blob: 47428a8324248e0d892e0e3f1c870084a96dae3b [file] [log] [blame]
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001// Copyright 2011 the V8 project authors. All rights reserved.
ager@chromium.org5c838252010-02-19 08:53:10 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +000030#if defined(V8_TARGET_ARCH_MIPS)
31
ager@chromium.org5c838252010-02-19 08:53:10 +000032#include "ic-inl.h"
karlklose@chromium.org83a47282011-05-11 11:54:09 +000033#include "codegen.h"
ager@chromium.org5c838252010-02-19 08:53:10 +000034#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40
41
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +000042static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm,
44 Code::Flags flags,
45 StubCache::Table table,
46 Register name,
47 Register offset,
48 Register scratch,
49 Register scratch2) {
50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
52
53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
55
56 // Check the relative positions of the address fields.
57 ASSERT(value_off_addr > key_off_addr);
58 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
61 Label miss;
62 Register offsets_base_addr = scratch;
63
64 // Check that the key in the entry matches the name.
65 __ li(offsets_base_addr, Operand(key_offset));
66 __ sll(scratch2, offset, 1);
67 __ addu(scratch2, offsets_base_addr, scratch2);
68 __ lw(scratch2, MemOperand(scratch2));
69 __ Branch(&miss, ne, name, Operand(scratch2));
70
71 // Get the code entry from the cache.
72 __ Addu(offsets_base_addr, offsets_base_addr,
73 Operand(value_off_addr - key_off_addr));
74 __ sll(scratch2, offset, 1);
75 __ addu(scratch2, offsets_base_addr, scratch2);
76 __ lw(scratch2, MemOperand(scratch2));
77
78 // Check that the flags match what we're looking for.
79 __ lw(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
80 __ And(scratch2, scratch2, Operand(~Code::kFlagsNotUsedInLookup));
81 __ Branch(&miss, ne, scratch2, Operand(flags));
82
83 // Re-load code entry from cache.
84 __ sll(offset, offset, 1);
85 __ addu(offset, offset, offsets_base_addr);
86 __ lw(offset, MemOperand(offset));
87
88 // Jump to the first instruction in the code stub.
89 __ Addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
90 __ Jump(offset);
91
92 // Miss: fall through.
93 __ bind(&miss);
94}
95
96
97// Helper function used to check that the dictionary doesn't contain
98// the property. This function may return false negatives, so miss_label
99// must always call a backup property check that is complete.
100// This function is safe to call if the receiver has fast properties.
101// Name must be a symbol and receiver must be a heap object.
102MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
103 MacroAssembler* masm,
104 Label* miss_label,
105 Register receiver,
106 String* name,
107 Register scratch0,
108 Register scratch1) {
109 ASSERT(name->IsSymbol());
110 Counters* counters = masm->isolate()->counters();
111 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
112 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
113
114 Label done;
115
116 const int kInterceptorOrAccessCheckNeededMask =
117 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
118
119 // Bail out if the receiver has a named interceptor or requires access checks.
120 Register map = scratch1;
121 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
122 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
123 __ And(at, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
124 __ Branch(miss_label, ne, at, Operand(zero_reg));
125
126
127 // Check that receiver is a JSObject.
128 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
129 __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_OBJECT_TYPE));
130
131 // Load properties array.
132 Register properties = scratch0;
133 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
134 // Check that the properties array is a dictionary.
135 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
136 Register tmp = properties;
137 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
138 __ Branch(miss_label, ne, map, Operand(tmp));
139
140 // Restore the temporarily used register.
141 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
142
143 MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
144 masm,
145 miss_label,
146 &done,
147 receiver,
148 properties,
149 name,
150 scratch1);
151 if (result->IsFailure()) return result;
152
153 __ bind(&done);
154 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
155
156 return result;
157}
158
159
ager@chromium.org5c838252010-02-19 08:53:10 +0000160void StubCache::GenerateProbe(MacroAssembler* masm,
161 Code::Flags flags,
162 Register receiver,
163 Register name,
164 Register scratch,
fschneider@chromium.orge03fb642010-11-01 12:34:09 +0000165 Register extra,
166 Register extra2) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000167 Isolate* isolate = masm->isolate();
168 Label miss;
169
170 // Make sure that code is valid. The shifting code relies on the
171 // entry size being 8.
172 ASSERT(sizeof(Entry) == 8);
173
174 // Make sure the flags does not name a specific type.
175 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
176
177 // Make sure that there are no register conflicts.
178 ASSERT(!scratch.is(receiver));
179 ASSERT(!scratch.is(name));
180 ASSERT(!extra.is(receiver));
181 ASSERT(!extra.is(name));
182 ASSERT(!extra.is(scratch));
183 ASSERT(!extra2.is(receiver));
184 ASSERT(!extra2.is(name));
185 ASSERT(!extra2.is(scratch));
186 ASSERT(!extra2.is(extra));
187
188 // Check scratch, extra and extra2 registers are valid.
189 ASSERT(!scratch.is(no_reg));
190 ASSERT(!extra.is(no_reg));
191 ASSERT(!extra2.is(no_reg));
192
193 // Check that the receiver isn't a smi.
194 __ JumpIfSmi(receiver, &miss, t0);
195
196 // Get the map of the receiver and compute the hash.
197 __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
198 __ lw(t8, FieldMemOperand(receiver, HeapObject::kMapOffset));
199 __ Addu(scratch, scratch, Operand(t8));
200 __ Xor(scratch, scratch, Operand(flags));
201 __ And(scratch,
202 scratch,
203 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
204
205 // Probe the primary table.
206 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
207
208 // Primary miss: Compute hash for secondary probe.
209 __ Subu(scratch, scratch, Operand(name));
210 __ Addu(scratch, scratch, Operand(flags));
211 __ And(scratch,
212 scratch,
213 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
214
215 // Probe the secondary table.
216 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
217
218 // Cache miss: Fall-through and let caller handle the miss by
219 // entering the runtime system.
220 __ bind(&miss);
ager@chromium.org5c838252010-02-19 08:53:10 +0000221}
222
223
224void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
225 int index,
226 Register prototype) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000227 // Load the global or builtins object from the current context.
228 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
229 // Load the global context from the global or builtins object.
230 __ lw(prototype,
231 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
232 // Load the function from the global context.
233 __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
234 // Load the initial map. The global functions all have initial maps.
235 __ lw(prototype,
236 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
237 // Load the prototype from the initial map.
238 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
ager@chromium.org5c838252010-02-19 08:53:10 +0000239}
240
241
lrn@chromium.org7516f052011-03-30 08:52:27 +0000242void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
243 MacroAssembler* masm, int index, Register prototype, Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000244 Isolate* isolate = masm->isolate();
245 // Check we're still in the same context.
246 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
247 ASSERT(!prototype.is(at));
248 __ li(at, isolate->global());
249 __ Branch(miss, ne, prototype, Operand(at));
250 // Get the global function with the given index.
251 JSFunction* function =
252 JSFunction::cast(isolate->global_context()->get(index));
253 // Load its initial map. The global functions all have initial maps.
254 __ li(prototype, Handle<Map>(function->initial_map()));
255 // Load the prototype from the initial map.
256 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
lrn@chromium.org7516f052011-03-30 08:52:27 +0000257}
258
259
ager@chromium.org5c838252010-02-19 08:53:10 +0000260// Load a fast property out of a holder object (src). In-object properties
261// are loaded directly otherwise the property is loaded from the properties
262// fixed array.
263void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
264 Register dst, Register src,
265 JSObject* holder, int index) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000266 // Adjust for the number of properties stored in the holder.
267 index -= holder->map()->inobject_properties();
268 if (index < 0) {
269 // Get the property straight out of the holder.
270 int offset = holder->map()->instance_size() + (index * kPointerSize);
271 __ lw(dst, FieldMemOperand(src, offset));
272 } else {
273 // Calculate the offset into the properties array.
274 int offset = index * kPointerSize + FixedArray::kHeaderSize;
275 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
276 __ lw(dst, FieldMemOperand(dst, offset));
277 }
ager@chromium.org5c838252010-02-19 08:53:10 +0000278}
279
280
281void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
282 Register receiver,
283 Register scratch,
284 Label* miss_label) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000285 // Check that the receiver isn't a smi.
286 __ And(scratch, receiver, Operand(kSmiTagMask));
287 __ Branch(miss_label, eq, scratch, Operand(zero_reg));
288
289 // Check that the object is a JS array.
290 __ GetObjectType(receiver, scratch, scratch);
291 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
292
293 // Load length directly from the JS array.
294 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
295 __ Ret();
296}
297
298
299// Generate code to check if an object is a string. If the object is a
300// heap object, its map's instance type is left in the scratch1 register.
301// If this is not needed, scratch1 and scratch2 may be the same register.
302static void GenerateStringCheck(MacroAssembler* masm,
303 Register receiver,
304 Register scratch1,
305 Register scratch2,
306 Label* smi,
307 Label* non_string_object) {
308 // Check that the receiver isn't a smi.
309 __ JumpIfSmi(receiver, smi, t0);
310
311 // Check that the object is a string.
312 __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
313 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
314 __ And(scratch2, scratch1, Operand(kIsNotStringMask));
315 // The cast is to resolve the overload for the argument of 0x0.
316 __ Branch(non_string_object,
317 ne,
318 scratch2,
319 Operand(static_cast<int32_t>(kStringTag)));
ager@chromium.org5c838252010-02-19 08:53:10 +0000320}
321
322
lrn@chromium.org7516f052011-03-30 08:52:27 +0000323// Generate code to load the length from a string object and return the length.
324// If the receiver object is not a string or a wrapped string object the
325// execution continues at the miss label. The register containing the
326// receiver is potentially clobbered.
327void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
328 Register receiver,
329 Register scratch1,
330 Register scratch2,
331 Label* miss,
332 bool support_wrappers) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000333 Label check_wrapper;
334
335 // Check if the object is a string leaving the instance type in the
336 // scratch1 register.
337 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
338 support_wrappers ? &check_wrapper : miss);
339
340 // Load length directly from the string.
341 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
342 __ Ret();
343
344 if (support_wrappers) {
345 // Check if the object is a JSValue wrapper.
346 __ bind(&check_wrapper);
347 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
348
349 // Unwrap the value and check if the wrapped value is a string.
350 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
351 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
352 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
353 __ Ret();
354 }
lrn@chromium.org7516f052011-03-30 08:52:27 +0000355}
356
357
ager@chromium.org5c838252010-02-19 08:53:10 +0000358void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
359 Register receiver,
360 Register scratch1,
361 Register scratch2,
362 Label* miss_label) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000363 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
364 __ mov(v0, scratch1);
365 __ Ret();
ager@chromium.org5c838252010-02-19 08:53:10 +0000366}
367
368
lrn@chromium.org7516f052011-03-30 08:52:27 +0000369// Generate StoreField code, value is passed in a0 register.
ager@chromium.org5c838252010-02-19 08:53:10 +0000370// After executing generated code, the receiver_reg and name_reg
371// may be clobbered.
372void StubCompiler::GenerateStoreField(MacroAssembler* masm,
ager@chromium.org5c838252010-02-19 08:53:10 +0000373 JSObject* object,
374 int index,
375 Map* transition,
376 Register receiver_reg,
377 Register name_reg,
378 Register scratch,
379 Label* miss_label) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000380 // a0 : value.
381 Label exit;
382
383 // Check that the receiver isn't a smi.
384 __ JumpIfSmi(receiver_reg, miss_label, scratch);
385
386 // Check that the map of the receiver hasn't changed.
387 __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
388 __ Branch(miss_label, ne, scratch, Operand(Handle<Map>(object->map())));
389
390 // Perform global security token check if needed.
391 if (object->IsJSGlobalProxy()) {
392 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
393 }
394
395 // Stub never generated for non-global objects that require access
396 // checks.
397 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
398
399 // Perform map transition for the receiver if necessary.
400 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
401 // The properties must be extended before we can store the value.
402 // We jump to a runtime call that extends the properties array.
403 __ push(receiver_reg);
404 __ li(a2, Operand(Handle<Map>(transition)));
405 __ Push(a2, a0);
406 __ TailCallExternalReference(
407 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
408 masm->isolate()),
409 3, 1);
410 return;
411 }
412
413 if (transition != NULL) {
414 // Update the map of the object; no write barrier updating is
415 // needed because the map is never in new space.
416 __ li(t0, Operand(Handle<Map>(transition)));
417 __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
418 }
419
420 // Adjust for the number of properties stored in the object. Even in the
421 // face of a transition we can use the old map here because the size of the
422 // object and the number of in-object properties is not going to change.
423 index -= object->map()->inobject_properties();
424
425 if (index < 0) {
426 // Set the property straight into the object.
427 int offset = object->map()->instance_size() + (index * kPointerSize);
428 __ sw(a0, FieldMemOperand(receiver_reg, offset));
429
430 // Skip updating write barrier if storing a smi.
431 __ JumpIfSmi(a0, &exit, scratch);
432
433 // Update the write barrier for the array address.
434 // Pass the now unused name_reg as a scratch register.
435 __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
436 } else {
437 // Write to the properties array.
438 int offset = index * kPointerSize + FixedArray::kHeaderSize;
439 // Get the properties array.
440 __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
441 __ sw(a0, FieldMemOperand(scratch, offset));
442
443 // Skip updating write barrier if storing a smi.
444 __ JumpIfSmi(a0, &exit);
445
446 // Update the write barrier for the array address.
447 // Ok to clobber receiver_reg and name_reg, since we return.
448 __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
449 }
450
451 // Return the value (register v0).
452 __ bind(&exit);
453 __ mov(v0, a0);
454 __ Ret();
ager@chromium.org5c838252010-02-19 08:53:10 +0000455}
456
457
458void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000459 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
460 Code* code = NULL;
461 if (kind == Code::LOAD_IC) {
462 code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
463 } else {
464 code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
465 }
466
467 Handle<Code> ic(code);
468 __ Jump(ic, RelocInfo::CODE_TARGET);
ager@chromium.org5c838252010-02-19 08:53:10 +0000469}
470
471
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000472static void GenerateCallFunction(MacroAssembler* masm,
473 Object* object,
474 const ParameterCount& arguments,
475 Label* miss) {
476 // ----------- S t a t e -------------
477 // -- a0: receiver
478 // -- a1: function to call
479 // -----------------------------------
480 // Check that the function really is a function.
481 __ JumpIfSmi(a1, miss);
482 __ GetObjectType(a1, a3, a3);
483 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
484
485 // Patch the receiver on the stack with the global proxy if
486 // necessary.
487 if (object->IsGlobalObject()) {
488 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
489 __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
490 }
491
492 // Invoke the function.
493 __ InvokeFunction(a1, arguments, JUMP_FUNCTION);
494}
495
496
497static void PushInterceptorArguments(MacroAssembler* masm,
498 Register receiver,
499 Register holder,
500 Register name,
501 JSObject* holder_obj) {
502 __ push(name);
503 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
504 ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
505 Register scratch = name;
506 __ li(scratch, Operand(Handle<Object>(interceptor)));
507 __ Push(scratch, receiver, holder);
508 __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
509 __ push(scratch);
510}
511
512
513static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
514 Register receiver,
515 Register holder,
516 Register name,
517 JSObject* holder_obj) {
518 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
519
520 ExternalReference ref =
521 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
522 masm->isolate());
523 __ li(a0, Operand(5));
524 __ li(a1, Operand(ref));
525
526 CEntryStub stub(1);
527 __ CallStub(&stub);
528}
529
530
531static const int kFastApiCallArguments = 3;
532
533
534// Reserves space for the extra arguments to FastHandleApiCall in the
535// caller's frame.
536//
537// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
538static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
539 Register scratch) {
540 ASSERT(Smi::FromInt(0) == 0);
541 for (int i = 0; i < kFastApiCallArguments; i++) {
542 __ push(zero_reg);
543 }
544}
545
546
547// Undoes the effects of ReserveSpaceForFastApiCall.
548static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
549 __ Drop(kFastApiCallArguments);
550}
551
552
553static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
554 const CallOptimization& optimization,
555 int argc) {
556 // ----------- S t a t e -------------
557 // -- sp[0] : holder (set by CheckPrototypes)
558 // -- sp[4] : callee js function
559 // -- sp[8] : call data
560 // -- sp[12] : last js argument
561 // -- ...
562 // -- sp[(argc + 3) * 4] : first js argument
563 // -- sp[(argc + 4) * 4] : receiver
564 // -----------------------------------
565 // Get the function and setup the context.
566 JSFunction* function = optimization.constant_function();
567 __ li(t1, Operand(Handle<JSFunction>(function)));
568 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
569
570 // Pass the additional arguments FastHandleApiCall expects.
571 Object* call_data = optimization.api_call_info()->data();
572 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
573 if (masm->isolate()->heap()->InNewSpace(call_data)) {
574 __ li(a0, api_call_info_handle);
575 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
576 } else {
577 __ li(t2, Operand(Handle<Object>(call_data)));
578 }
579
580 // Store js function and call data.
581 __ sw(t1, MemOperand(sp, 1 * kPointerSize));
582 __ sw(t2, MemOperand(sp, 2 * kPointerSize));
583
584 // a2 points to call data as expected by Arguments
585 // (refer to layout above).
586 __ Addu(a2, sp, Operand(2 * kPointerSize));
587
588 Object* callback = optimization.api_call_info()->callback();
589 Address api_function_address = v8::ToCData<Address>(callback);
590 ApiFunction fun(api_function_address);
591
592 const int kApiStackSpace = 4;
593
594 __ EnterExitFrame(false, kApiStackSpace);
595
596 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
597 // struct from the function (which is currently the case). This means we pass
598 // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn
599 // will handle setting up a0.
600
601 // a1 = v8::Arguments&
602 // Arguments is built at sp + 1 (sp is a reserved spot for ra).
603 __ Addu(a1, sp, kPointerSize);
604
605 // v8::Arguments::implicit_args = data
606 __ sw(a2, MemOperand(a1, 0 * kPointerSize));
607 // v8::Arguments::values = last argument
608 __ Addu(t0, a2, Operand(argc * kPointerSize));
609 __ sw(t0, MemOperand(a1, 1 * kPointerSize));
610 // v8::Arguments::length_ = argc
611 __ li(t0, Operand(argc));
612 __ sw(t0, MemOperand(a1, 2 * kPointerSize));
613 // v8::Arguments::is_construct_call = 0
614 __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
615
616 // Emitting a stub call may try to allocate (if the code is not
617 // already generated). Do not allow the assembler to perform a
618 // garbage collection but instead return the allocation failure
619 // object.
620 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
621 ExternalReference ref =
622 ExternalReference(&fun,
623 ExternalReference::DIRECT_API_CALL,
624 masm->isolate());
625 return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
626}
627
lrn@chromium.org7516f052011-03-30 08:52:27 +0000628class CallInterceptorCompiler BASE_EMBEDDED {
629 public:
630 CallInterceptorCompiler(StubCompiler* stub_compiler,
631 const ParameterCount& arguments,
632 Register name)
633 : stub_compiler_(stub_compiler),
634 arguments_(arguments),
635 name_(name) {}
636
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000637 MaybeObject* Compile(MacroAssembler* masm,
lrn@chromium.org7516f052011-03-30 08:52:27 +0000638 JSObject* object,
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000639 JSObject* holder,
640 String* name,
641 LookupResult* lookup,
lrn@chromium.org7516f052011-03-30 08:52:27 +0000642 Register receiver,
643 Register scratch1,
644 Register scratch2,
645 Register scratch3,
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000646 Label* miss) {
647 ASSERT(holder->HasNamedInterceptor());
648 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
649
650 // Check that the receiver isn't a smi.
651 __ JumpIfSmi(receiver, miss);
652
653 CallOptimization optimization(lookup);
654
655 if (optimization.is_constant_call()) {
656 return CompileCacheable(masm,
657 object,
658 receiver,
659 scratch1,
660 scratch2,
661 scratch3,
662 holder,
663 lookup,
664 name,
665 optimization,
666 miss);
667 } else {
668 CompileRegular(masm,
669 object,
670 receiver,
671 scratch1,
672 scratch2,
673 scratch3,
674 name,
675 holder,
676 miss);
677 return masm->isolate()->heap()->undefined_value();
678 }
679 }
680
681 private:
682 MaybeObject* CompileCacheable(MacroAssembler* masm,
683 JSObject* object,
684 Register receiver,
685 Register scratch1,
686 Register scratch2,
687 Register scratch3,
688 JSObject* interceptor_holder,
689 LookupResult* lookup,
690 String* name,
691 const CallOptimization& optimization,
692 Label* miss_label) {
693 ASSERT(optimization.is_constant_call());
694 ASSERT(!lookup->holder()->IsGlobalObject());
695
696 Counters* counters = masm->isolate()->counters();
697
698 int depth1 = kInvalidProtoDepth;
699 int depth2 = kInvalidProtoDepth;
700 bool can_do_fast_api_call = false;
701 if (optimization.is_simple_api_call() &&
702 !lookup->holder()->IsGlobalObject()) {
703 depth1 =
704 optimization.GetPrototypeDepthOfExpectedType(object,
705 interceptor_holder);
706 if (depth1 == kInvalidProtoDepth) {
707 depth2 =
708 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
709 lookup->holder());
710 }
711 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
712 (depth2 != kInvalidProtoDepth);
713 }
714
715 __ IncrementCounter(counters->call_const_interceptor(), 1,
716 scratch1, scratch2);
717
718 if (can_do_fast_api_call) {
719 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
720 scratch1, scratch2);
721 ReserveSpaceForFastApiCall(masm, scratch1);
722 }
723
724 // Check that the maps from receiver to interceptor's holder
725 // haven't changed and thus we can invoke interceptor.
726 Label miss_cleanup;
727 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
728 Register holder =
729 stub_compiler_->CheckPrototypes(object, receiver,
730 interceptor_holder, scratch1,
731 scratch2, scratch3, name, depth1, miss);
732
733 // Invoke an interceptor and if it provides a value,
734 // branch to |regular_invoke|.
735 Label regular_invoke;
736 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
737 &regular_invoke);
738
739 // Interceptor returned nothing for this property. Try to use cached
740 // constant function.
741
742 // Check that the maps from interceptor's holder to constant function's
743 // holder haven't changed and thus we can use cached constant function.
744 if (interceptor_holder != lookup->holder()) {
745 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
746 lookup->holder(), scratch1,
747 scratch2, scratch3, name, depth2, miss);
748 } else {
749 // CheckPrototypes has a side effect of fetching a 'holder'
750 // for API (object which is instanceof for the signature). It's
751 // safe to omit it here, as if present, it should be fetched
752 // by the previous CheckPrototypes.
753 ASSERT(depth2 == kInvalidProtoDepth);
754 }
755
756 // Invoke function.
757 if (can_do_fast_api_call) {
758 MaybeObject* result = GenerateFastApiDirectCall(masm,
759 optimization,
760 arguments_.immediate());
761 if (result->IsFailure()) return result;
762 } else {
763 __ InvokeFunction(optimization.constant_function(), arguments_,
764 JUMP_FUNCTION);
765 }
766
767 // Deferred code for fast API call case---clean preallocated space.
768 if (can_do_fast_api_call) {
769 __ bind(&miss_cleanup);
770 FreeSpaceForFastApiCall(masm);
771 __ Branch(miss_label);
772 }
773
774 // Invoke a regular function.
775 __ bind(&regular_invoke);
776 if (can_do_fast_api_call) {
777 FreeSpaceForFastApiCall(masm);
778 }
779
780 return masm->isolate()->heap()->undefined_value();
lrn@chromium.org7516f052011-03-30 08:52:27 +0000781 }
782
783 void CompileRegular(MacroAssembler* masm,
784 JSObject* object,
785 Register receiver,
786 Register scratch1,
787 Register scratch2,
788 Register scratch3,
789 String* name,
790 JSObject* interceptor_holder,
791 Label* miss_label) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000792 Register holder =
793 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
794 scratch1, scratch2, scratch3, name,
795 miss_label);
796
797 // Call a runtime function to load the interceptor property.
798 __ EnterInternalFrame();
799 // Save the name_ register across the call.
800 __ push(name_);
801
802 PushInterceptorArguments(masm,
803 receiver,
804 holder,
805 name_,
806 interceptor_holder);
807
808 __ CallExternalReference(
809 ExternalReference(
810 IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
811 masm->isolate()),
812 5);
813
814 // Restore the name_ register.
815 __ pop(name_);
816 __ LeaveInternalFrame();
lrn@chromium.org7516f052011-03-30 08:52:27 +0000817 }
818
819 void LoadWithInterceptor(MacroAssembler* masm,
820 Register receiver,
821 Register holder,
822 JSObject* holder_obj,
823 Register scratch,
824 Label* interceptor_succeeded) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000825 __ EnterInternalFrame();
826
827 __ Push(holder, name_);
828
829 CompileCallLoadPropertyWithInterceptor(masm,
830 receiver,
831 holder,
832 name_,
833 holder_obj);
834
835 __ pop(name_); // Restore the name.
836 __ pop(receiver); // Restore the holder.
837 __ LeaveInternalFrame();
838
839 // If interceptor returns no-result sentinel, call the constant function.
840 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
841 __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
lrn@chromium.org7516f052011-03-30 08:52:27 +0000842 }
843
844 StubCompiler* stub_compiler_;
845 const ParameterCount& arguments_;
846 Register name_;
847};
848
849
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +0000850
851// Generate code to check that a global property cell is empty. Create
852// the property cell at compilation time if no cell exists for the
853// property.
854MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
855 MacroAssembler* masm,
856 GlobalObject* global,
857 String* name,
858 Register scratch,
859 Label* miss) {
860 Object* probe;
861 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
862 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
863 }
864 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
865 ASSERT(cell->value()->IsTheHole());
866 __ li(scratch, Operand(Handle<Object>(cell)));
867 __ lw(scratch,
868 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
869 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
870 __ Branch(miss, ne, scratch, Operand(at));
871 return cell;
872}
873
874
875// Calls GenerateCheckPropertyCell for each global object in the prototype chain
876// from object to (but not including) holder.
877MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
878 MacroAssembler* masm,
879 JSObject* object,
880 JSObject* holder,
881 String* name,
882 Register scratch,
883 Label* miss) {
884 JSObject* current = object;
885 while (current != holder) {
886 if (current->IsGlobalObject()) {
887 // Returns a cell or a failure.
888 MaybeObject* result = GenerateCheckPropertyCell(
889 masm,
890 GlobalObject::cast(current),
891 name,
892 scratch,
893 miss);
894 if (result->IsFailure()) return result;
895 }
896 ASSERT(current->IsJSObject());
897 current = JSObject::cast(current->GetPrototype());
898 }
899 return NULL;
900}
901
902
903// Convert and store int passed in register ival to IEEE 754 single precision
904// floating point value at memory location (dst + 4 * wordoffset)
905// If FPU is available use it for conversion.
906static void StoreIntAsFloat(MacroAssembler* masm,
907 Register dst,
908 Register wordoffset,
909 Register ival,
910 Register fval,
911 Register scratch1,
912 Register scratch2) {
913 if (CpuFeatures::IsSupported(FPU)) {
914 CpuFeatures::Scope scope(FPU);
915 __ mtc1(ival, f0);
916 __ cvt_s_w(f0, f0);
917 __ sll(scratch1, wordoffset, 2);
918 __ addu(scratch1, dst, scratch1);
919 __ swc1(f0, MemOperand(scratch1, 0));
920 } else {
921 // FPU is not available, do manual conversions.
922
923 Label not_special, done;
924 // Move sign bit from source to destination. This works because the sign
925 // bit in the exponent word of the double has the same position and polarity
926 // as the 2's complement sign bit in a Smi.
927 ASSERT(kBinary32SignMask == 0x80000000u);
928
929 __ And(fval, ival, Operand(kBinary32SignMask));
930 // Negate value if it is negative.
931 __ subu(scratch1, zero_reg, ival);
932 __ movn(ival, scratch1, fval);
933
934 // We have -1, 0 or 1, which we treat specially. Register ival contains
935 // absolute value: it is either equal to 1 (special case of -1 and 1),
936 // greater than 1 (not a special case) or less than 1 (special case of 0).
937 __ Branch(&not_special, gt, ival, Operand(1));
938
939 // For 1 or -1 we need to or in the 0 exponent (biased).
940 static const uint32_t exponent_word_for_1 =
941 kBinary32ExponentBias << kBinary32ExponentShift;
942
943 __ Xor(scratch1, ival, Operand(1));
944 __ li(scratch2, exponent_word_for_1);
945 __ or_(scratch2, fval, scratch2);
946 __ movz(fval, scratch2, scratch1); // Only if ival is equal to 1.
947 __ Branch(&done);
948
949 __ bind(&not_special);
950 // Count leading zeros.
951 // Gets the wrong answer for 0, but we already checked for that case above.
952 Register zeros = scratch2;
953 __ clz(zeros, ival);
954
955 // Compute exponent and or it into the exponent register.
956 __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
957 __ subu(scratch1, scratch1, zeros);
958
959 __ sll(scratch1, scratch1, kBinary32ExponentShift);
960 __ or_(fval, fval, scratch1);
961
962 // Shift up the source chopping the top bit off.
963 __ Addu(zeros, zeros, Operand(1));
964 // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
965 __ sllv(ival, ival, zeros);
966 // And the top (top 20 bits).
967 __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits);
968 __ or_(fval, fval, scratch1);
969
970 __ bind(&done);
971
972 __ sll(scratch1, wordoffset, 2);
973 __ addu(scratch1, dst, scratch1);
974 __ sw(fval, MemOperand(scratch1, 0));
975 }
976}
977
978
979// Convert unsigned integer with specified number of leading zeroes in binary
980// representation to IEEE 754 double.
981// Integer to convert is passed in register hiword.
982// Resulting double is returned in registers hiword:loword.
983// This functions does not work correctly for 0.
984static void GenerateUInt2Double(MacroAssembler* masm,
985 Register hiword,
986 Register loword,
987 Register scratch,
988 int leading_zeroes) {
989 const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
990 const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
991
992 const int mantissa_shift_for_hi_word =
993 meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
994
995 const int mantissa_shift_for_lo_word =
996 kBitsPerInt - mantissa_shift_for_hi_word;
997
998 __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
999 if (mantissa_shift_for_hi_word > 0) {
1000 __ sll(loword, hiword, mantissa_shift_for_lo_word);
1001 __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1002 __ or_(hiword, scratch, hiword);
1003 } else {
1004 __ mov(loword, zero_reg);
1005 __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1006 __ or_(hiword, scratch, hiword);
1007 }
1008
1009 // If least significant bit of biased exponent was not 1 it was corrupted
1010 // by most significant bit of mantissa so we should fix that.
1011 if (!(biased_exponent & 1)) {
1012 __ li(scratch, 1 << HeapNumber::kExponentShift);
1013 __ nor(scratch, scratch, scratch);
1014 __ and_(hiword, hiword, scratch);
1015 }
1016}
1017
1018
ager@chromium.org5c838252010-02-19 08:53:10 +00001019#undef __
1020#define __ ACCESS_MASM(masm())
1021
1022
lrn@chromium.org7516f052011-03-30 08:52:27 +00001023Register StubCompiler::CheckPrototypes(JSObject* object,
1024 Register object_reg,
1025 JSObject* holder,
1026 Register holder_reg,
1027 Register scratch1,
1028 Register scratch2,
1029 String* name,
1030 int save_at_depth,
1031 Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001032 // Make sure there's no overlap between holder and object registers.
1033 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1034 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1035 && !scratch2.is(scratch1));
1036
1037 // Keep track of the current object in register reg.
1038 Register reg = object_reg;
1039 int depth = 0;
1040
1041 if (save_at_depth == depth) {
1042 __ sw(reg, MemOperand(sp));
1043 }
1044
1045 // Check the maps in the prototype chain.
1046 // Traverse the prototype chain from the object and do map checks.
1047 JSObject* current = object;
1048 while (current != holder) {
1049 depth++;
1050
1051 // Only global objects and objects that do not require access
1052 // checks are allowed in stubs.
1053 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1054
1055 ASSERT(current->GetPrototype()->IsJSObject());
1056 JSObject* prototype = JSObject::cast(current->GetPrototype());
1057 if (!current->HasFastProperties() &&
1058 !current->IsJSGlobalObject() &&
1059 !current->IsJSGlobalProxy()) {
1060 if (!name->IsSymbol()) {
1061 MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
1062 Object* lookup_result = NULL; // Initialization to please compiler.
1063 if (!maybe_lookup_result->ToObject(&lookup_result)) {
1064 set_failure(Failure::cast(maybe_lookup_result));
1065 return reg;
1066 }
1067 name = String::cast(lookup_result);
1068 }
1069 ASSERT(current->property_dictionary()->FindEntry(name) ==
1070 StringDictionary::kNotFound);
1071
1072 MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
1073 miss,
1074 reg,
1075 name,
1076 scratch1,
1077 scratch2);
1078 if (negative_lookup->IsFailure()) {
1079 set_failure(Failure::cast(negative_lookup));
1080 return reg;
1081 }
1082
1083 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1084 reg = holder_reg; // From now the object is in holder_reg.
1085 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1086 } else if (heap()->InNewSpace(prototype)) {
1087 // Get the map of the current object.
1088 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1089
1090 // Branch on the result of the map check.
1091 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1092
1093 // Check access rights to the global object. This has to happen
1094 // after the map check so that we know that the object is
1095 // actually a global object.
1096 if (current->IsJSGlobalProxy()) {
1097 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1098 // Restore scratch register to be the map of the object. In the
1099 // new space case below, we load the prototype from the map in
1100 // the scratch register.
1101 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1102 }
1103
1104 reg = holder_reg; // From now the object is in holder_reg.
1105 // The prototype is in new space; we cannot store a reference
1106 // to it in the code. Load it from the map.
1107 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1108 } else {
1109 // Check the map of the current object.
1110 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1111 // Branch on the result of the map check.
1112 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1113 // Check access rights to the global object. This has to happen
1114 // after the map check so that we know that the object is
1115 // actually a global object.
1116 if (current->IsJSGlobalProxy()) {
1117 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1118 }
1119 // The prototype is in old space; load it directly.
1120 reg = holder_reg; // From now the object is in holder_reg.
1121 __ li(reg, Operand(Handle<JSObject>(prototype)));
1122 }
1123
1124 if (save_at_depth == depth) {
1125 __ sw(reg, MemOperand(sp));
1126 }
1127
1128 // Go to the next object in the prototype chain.
1129 current = prototype;
1130 }
1131
1132 // Check the holder map.
1133 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1134 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1135
1136 // Log the check depth.
1137 LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1138 // Perform security check for access to the global object.
1139 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1140 if (holder->IsJSGlobalProxy()) {
1141 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1142 };
1143
1144 // If we've skipped any global objects, it's not enough to verify
1145 // that their maps haven't changed. We also need to check that the
1146 // property cell for the property is still empty.
1147
1148 MaybeObject* result = GenerateCheckPropertyCells(masm(),
1149 object,
1150 holder,
1151 name,
1152 scratch1,
1153 miss);
1154 if (result->IsFailure()) set_failure(Failure::cast(result));
1155
1156 // Return the register containing the holder.
1157 return reg;
lrn@chromium.org7516f052011-03-30 08:52:27 +00001158}
1159
1160
ager@chromium.org5c838252010-02-19 08:53:10 +00001161void StubCompiler::GenerateLoadField(JSObject* object,
1162 JSObject* holder,
1163 Register receiver,
1164 Register scratch1,
1165 Register scratch2,
lrn@chromium.org7516f052011-03-30 08:52:27 +00001166 Register scratch3,
ager@chromium.org5c838252010-02-19 08:53:10 +00001167 int index,
1168 String* name,
1169 Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001170 // Check that the receiver isn't a smi.
1171 __ And(scratch1, receiver, Operand(kSmiTagMask));
1172 __ Branch(miss, eq, scratch1, Operand(zero_reg));
1173
1174 // Check that the maps haven't changed.
1175 Register reg =
1176 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1177 name, miss);
1178 GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1179 __ Ret();
ager@chromium.org5c838252010-02-19 08:53:10 +00001180}
1181
1182
1183void StubCompiler::GenerateLoadConstant(JSObject* object,
1184 JSObject* holder,
1185 Register receiver,
1186 Register scratch1,
1187 Register scratch2,
lrn@chromium.org7516f052011-03-30 08:52:27 +00001188 Register scratch3,
ager@chromium.org5c838252010-02-19 08:53:10 +00001189 Object* value,
1190 String* name,
1191 Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001192 // Check that the receiver isn't a smi.
1193 __ JumpIfSmi(receiver, miss, scratch1);
1194
1195 // Check that the maps haven't changed.
1196 Register reg =
1197 CheckPrototypes(object, receiver, holder,
1198 scratch1, scratch2, scratch3, name, miss);
1199
1200 // Return the constant value.
1201 __ li(v0, Operand(Handle<Object>(value)));
1202 __ Ret();
ager@chromium.org5c838252010-02-19 08:53:10 +00001203}
1204
1205
lrn@chromium.org7516f052011-03-30 08:52:27 +00001206MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1207 JSObject* holder,
1208 Register receiver,
1209 Register name_reg,
1210 Register scratch1,
1211 Register scratch2,
1212 Register scratch3,
1213 AccessorInfo* callback,
1214 String* name,
1215 Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001216 // Check that the receiver isn't a smi.
1217 __ JumpIfSmi(receiver, miss, scratch1);
1218
1219 // Check that the maps haven't changed.
1220 Register reg =
1221 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1222 name, miss);
1223
1224 // Build AccessorInfo::args_ list on the stack and push property name below
1225 // the exit frame to make GC aware of them and store pointers to them.
1226 __ push(receiver);
1227 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1228 Handle<AccessorInfo> callback_handle(callback);
1229 if (heap()->InNewSpace(callback_handle->data())) {
1230 __ li(scratch3, callback_handle);
1231 __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1232 } else {
1233 __ li(scratch3, Handle<Object>(callback_handle->data()));
1234 }
1235 __ Push(reg, scratch3, name_reg);
1236 __ mov(a2, scratch2); // Saved in case scratch2 == a1.
1237 __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String>
1238
1239 Address getter_address = v8::ToCData<Address>(callback->getter());
1240 ApiFunction fun(getter_address);
1241
1242 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
1243 // struct from the function (which is currently the case). This means we pass
1244 // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn
1245 // will handle setting up a0.
1246
1247 const int kApiStackSpace = 1;
1248
1249 __ EnterExitFrame(false, kApiStackSpace);
1250 // Create AccessorInfo instance on the stack above the exit frame with
1251 // scratch2 (internal::Object **args_) as the data.
1252 __ sw(a2, MemOperand(sp, kPointerSize));
1253 // a2 (second argument - see note above) = AccessorInfo&
1254 __ Addu(a2, sp, kPointerSize);
1255
1256 // Emitting a stub call may try to allocate (if the code is not
1257 // already generated). Do not allow the assembler to perform a
1258 // garbage collection but instead return the allocation failure
1259 // object.
1260 ExternalReference ref =
1261 ExternalReference(&fun,
1262 ExternalReference::DIRECT_GETTER_CALL,
1263 masm()->isolate());
1264 // 4 args - will be freed later by LeaveExitFrame.
1265 return masm()->TryCallApiFunctionAndReturn(ref, 4);
ager@chromium.org5c838252010-02-19 08:53:10 +00001266}
1267
1268
1269void StubCompiler::GenerateLoadInterceptor(JSObject* object,
lrn@chromium.org7516f052011-03-30 08:52:27 +00001270 JSObject* interceptor_holder,
ager@chromium.org5c838252010-02-19 08:53:10 +00001271 LookupResult* lookup,
1272 Register receiver,
1273 Register name_reg,
1274 Register scratch1,
1275 Register scratch2,
lrn@chromium.org7516f052011-03-30 08:52:27 +00001276 Register scratch3,
ager@chromium.org5c838252010-02-19 08:53:10 +00001277 String* name,
1278 Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001279 ASSERT(interceptor_holder->HasNamedInterceptor());
1280 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1281
1282 // Check that the receiver isn't a smi.
1283 __ JumpIfSmi(receiver, miss);
1284
1285 // So far the most popular follow ups for interceptor loads are FIELD
1286 // and CALLBACKS, so inline only them, other cases may be added
1287 // later.
1288 bool compile_followup_inline = false;
1289 if (lookup->IsProperty() && lookup->IsCacheable()) {
1290 if (lookup->type() == FIELD) {
1291 compile_followup_inline = true;
1292 } else if (lookup->type() == CALLBACKS &&
1293 lookup->GetCallbackObject()->IsAccessorInfo() &&
1294 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1295 compile_followup_inline = true;
1296 }
1297 }
1298
1299 if (compile_followup_inline) {
1300 // Compile the interceptor call, followed by inline code to load the
1301 // property from further up the prototype chain if the call fails.
1302 // Check that the maps haven't changed.
1303 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1304 scratch1, scratch2, scratch3,
1305 name, miss);
1306 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1307
1308 // Save necessary data before invoking an interceptor.
1309 // Requires a frame to make GC aware of pushed pointers.
1310 __ EnterInternalFrame();
1311
1312 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1313 // CALLBACKS case needs a receiver to be passed into C++ callback.
1314 __ Push(receiver, holder_reg, name_reg);
1315 } else {
1316 __ Push(holder_reg, name_reg);
1317 }
1318
1319 // Invoke an interceptor. Note: map checks from receiver to
1320 // interceptor's holder has been compiled before (see a caller
1321 // of this method).
1322 CompileCallLoadPropertyWithInterceptor(masm(),
1323 receiver,
1324 holder_reg,
1325 name_reg,
1326 interceptor_holder);
1327
1328 // Check if interceptor provided a value for property. If it's
1329 // the case, return immediately.
1330 Label interceptor_failed;
1331 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1332 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1));
1333 __ LeaveInternalFrame();
1334 __ Ret();
1335
1336 __ bind(&interceptor_failed);
1337 __ pop(name_reg);
1338 __ pop(holder_reg);
1339 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1340 __ pop(receiver);
1341 }
1342
1343 __ LeaveInternalFrame();
1344
1345 // Check that the maps from interceptor's holder to lookup's holder
1346 // haven't changed. And load lookup's holder into |holder| register.
1347 if (interceptor_holder != lookup->holder()) {
1348 holder_reg = CheckPrototypes(interceptor_holder,
1349 holder_reg,
1350 lookup->holder(),
1351 scratch1,
1352 scratch2,
1353 scratch3,
1354 name,
1355 miss);
1356 }
1357
1358 if (lookup->type() == FIELD) {
1359 // We found FIELD property in prototype chain of interceptor's holder.
1360 // Retrieve a field from field's holder.
1361 GenerateFastPropertyLoad(masm(), v0, holder_reg,
1362 lookup->holder(), lookup->GetFieldIndex());
1363 __ Ret();
1364 } else {
1365 // We found CALLBACKS property in prototype chain of interceptor's
1366 // holder.
1367 ASSERT(lookup->type() == CALLBACKS);
1368 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1369 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1370 ASSERT(callback != NULL);
1371 ASSERT(callback->getter() != NULL);
1372
1373 // Tail call to runtime.
1374 // Important invariant in CALLBACKS case: the code above must be
1375 // structured to never clobber |receiver| register.
1376 __ li(scratch2, Handle<AccessorInfo>(callback));
1377 // holder_reg is either receiver or scratch1.
1378 if (!receiver.is(holder_reg)) {
1379 ASSERT(scratch1.is(holder_reg));
1380 __ Push(receiver, holder_reg);
1381 __ lw(scratch3,
1382 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1383 __ Push(scratch3, scratch2, name_reg);
1384 } else {
1385 __ push(receiver);
1386 __ lw(scratch3,
1387 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1388 __ Push(holder_reg, scratch3, scratch2, name_reg);
1389 }
1390
1391 ExternalReference ref =
1392 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1393 masm()->isolate());
1394 __ TailCallExternalReference(ref, 5, 1);
1395 }
1396 } else { // !compile_followup_inline
1397 // Call the runtime system to load the interceptor.
1398 // Check that the maps haven't changed.
1399 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1400 scratch1, scratch2, scratch3,
1401 name, miss);
1402 PushInterceptorArguments(masm(), receiver, holder_reg,
1403 name_reg, interceptor_holder);
1404
1405 ExternalReference ref = ExternalReference(
1406 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1407 __ TailCallExternalReference(ref, 5, 1);
1408 }
ager@chromium.org5c838252010-02-19 08:53:10 +00001409}
1410
1411
lrn@chromium.org7516f052011-03-30 08:52:27 +00001412void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001413 if (kind_ == Code::KEYED_CALL_IC) {
1414 __ Branch(miss, ne, a2, Operand(Handle<String>(name)));
1415 }
ager@chromium.org5c838252010-02-19 08:53:10 +00001416}
1417
1418
lrn@chromium.org7516f052011-03-30 08:52:27 +00001419void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1420 JSObject* holder,
1421 String* name,
1422 Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001423 ASSERT(holder->IsGlobalObject());
1424
1425 // Get the number of arguments.
1426 const int argc = arguments().immediate();
1427
1428 // Get the receiver from the stack.
1429 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1430
1431 // If the object is the holder then we know that it's a global
1432 // object which can only happen for contextual calls. In this case,
1433 // the receiver cannot be a smi.
1434 if (object != holder) {
1435 __ JumpIfSmi(a0, miss);
1436 }
1437
1438 // Check that the maps haven't changed.
1439 CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
kmillikin@chromium.org5d8f0e62010-03-24 08:21:20 +00001440}
1441
1442
lrn@chromium.org7516f052011-03-30 08:52:27 +00001443void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1444 JSFunction* function,
1445 Label* miss) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001446 // Get the value from the cell.
1447 __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1448 __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
1449
1450 // Check that the cell contains the same function.
1451 if (heap()->InNewSpace(function)) {
1452 // We can't embed a pointer to a function in new space so we have
1453 // to verify that the shared function info is unchanged. This has
1454 // the nice side effect that multiple closures based on the same
1455 // function can all use this call IC. Before we load through the
1456 // function, we have to verify that it still is a function.
1457 __ JumpIfSmi(a1, miss);
1458 __ GetObjectType(a1, a3, a3);
1459 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1460
1461 // Check the shared function info. Make sure it hasn't changed.
1462 __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1463 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1464 __ Branch(miss, ne, t0, Operand(a3));
1465 } else {
1466 __ Branch(miss, ne, a1, Operand(Handle<JSFunction>(function)));
1467 }
kmillikin@chromium.org5d8f0e62010-03-24 08:21:20 +00001468}
1469
1470
lrn@chromium.org7516f052011-03-30 08:52:27 +00001471MaybeObject* CallStubCompiler::GenerateMissBranch() {
danno@chromium.org40cb8782011-05-25 07:58:50 +00001472 MaybeObject* maybe_obj =
1473 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1474 kind_,
1475 extra_ic_state_);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001476 Object* obj;
1477 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1478 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1479 return obj;
ager@chromium.org5c838252010-02-19 08:53:10 +00001480}
1481
1482
lrn@chromium.org7516f052011-03-30 08:52:27 +00001483MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1484 JSObject* holder,
1485 int index,
ager@chromium.org5c838252010-02-19 08:53:10 +00001486 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001487 // ----------- S t a t e -------------
1488 // -- a2 : name
1489 // -- ra : return address
1490 // -----------------------------------
1491 Label miss;
1492
1493 GenerateNameCheck(name, &miss);
1494
1495 const int argc = arguments().immediate();
1496
1497 // Get the receiver of the function from the stack into a0.
1498 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1499 // Check that the receiver isn't a smi.
1500 __ JumpIfSmi(a0, &miss, t0);
1501
1502 // Do the right check and compute the holder register.
1503 Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
1504 GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1505
1506 GenerateCallFunction(masm(), object, arguments(), &miss);
1507
1508 // Handle call cache miss.
1509 __ bind(&miss);
1510 MaybeObject* maybe_result = GenerateMissBranch();
1511 if (maybe_result->IsFailure()) return maybe_result;
1512
1513 // Return the generated code.
1514 return GetCode(FIELD, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00001515}
1516
1517
lrn@chromium.org7516f052011-03-30 08:52:27 +00001518MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1519 JSObject* holder,
1520 JSGlobalPropertyCell* cell,
1521 JSFunction* function,
1522 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001523 // ----------- S t a t e -------------
1524 // -- a2 : name
1525 // -- ra : return address
1526 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1527 // -- ...
1528 // -- sp[argc * 4] : receiver
1529 // -----------------------------------
1530
1531 // If object is not an array, bail out to regular call.
1532 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1533
1534 Label miss;
1535
1536 GenerateNameCheck(name, &miss);
1537
1538 Register receiver = a1;
1539
1540 // Get the receiver from the stack.
1541 const int argc = arguments().immediate();
1542 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1543
1544 // Check that the receiver isn't a smi.
1545 __ JumpIfSmi(receiver, &miss);
1546
1547 // Check that the maps haven't changed.
1548 CheckPrototypes(JSObject::cast(object), receiver,
1549 holder, a3, v0, t0, name, &miss);
1550
1551 if (argc == 0) {
1552 // Nothing to do, just return the length.
1553 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1554 __ Drop(argc + 1);
1555 __ Ret();
1556 } else {
1557 Label call_builtin;
1558
1559 Register elements = a3;
1560 Register end_elements = t1;
1561
1562 // Get the elements array of the object.
1563 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1564
1565 // Check that the elements are in fast mode and writable.
danno@chromium.org40cb8782011-05-25 07:58:50 +00001566 __ CheckMap(elements,
1567 v0,
1568 Heap::kFixedArrayMapRootIndex,
1569 &call_builtin,
1570 DONT_DO_SMI_CHECK);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001571
1572 if (argc == 1) { // Otherwise fall through to call the builtin.
1573 Label exit, with_write_barrier, attempt_to_grow_elements;
1574
1575 // Get the array's length into v0 and calculate new length.
1576 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1577 STATIC_ASSERT(kSmiTagSize == 1);
1578 STATIC_ASSERT(kSmiTag == 0);
1579 __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1580
1581 // Get the element's length.
1582 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1583
1584 // Check if we could survive without allocation.
1585 __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1586
1587 // Save new length.
1588 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1589
1590 // Push the element.
1591 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1592 // We may need a register containing the address end_elements below,
1593 // so write back the value in end_elements.
1594 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1595 __ Addu(end_elements, elements, end_elements);
1596 const int kEndElementsOffset =
1597 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1598 __ sw(t0, MemOperand(end_elements, kEndElementsOffset));
1599 __ Addu(end_elements, end_elements, kPointerSize);
1600
1601 // Check for a smi.
1602 __ JumpIfNotSmi(t0, &with_write_barrier);
1603 __ bind(&exit);
1604 __ Drop(argc + 1);
1605 __ Ret();
1606
1607 __ bind(&with_write_barrier);
1608 __ InNewSpace(elements, t0, eq, &exit);
1609 __ RecordWriteHelper(elements, end_elements, t0);
1610 __ Drop(argc + 1);
1611 __ Ret();
1612
1613 __ bind(&attempt_to_grow_elements);
1614 // v0: array's length + 1.
1615 // t0: elements' length.
1616
1617 if (!FLAG_inline_new) {
1618 __ Branch(&call_builtin);
1619 }
1620
1621 ExternalReference new_space_allocation_top =
1622 ExternalReference::new_space_allocation_top_address(
1623 masm()->isolate());
1624 ExternalReference new_space_allocation_limit =
1625 ExternalReference::new_space_allocation_limit_address(
1626 masm()->isolate());
1627
1628 const int kAllocationDelta = 4;
1629 // Load top and check if it is the end of elements.
1630 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1631 __ Addu(end_elements, elements, end_elements);
1632 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1633 __ li(t3, Operand(new_space_allocation_top));
1634 __ lw(t2, MemOperand(t3));
1635 __ Branch(&call_builtin, ne, end_elements, Operand(t2));
1636
1637 __ li(t5, Operand(new_space_allocation_limit));
1638 __ lw(t5, MemOperand(t5));
1639 __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
1640 __ Branch(&call_builtin, hi, t2, Operand(t5));
1641
1642 // We fit and could grow elements.
1643 // Update new_space_allocation_top.
1644 __ sw(t2, MemOperand(t3));
1645 // Push the argument.
1646 __ lw(t2, MemOperand(sp, (argc - 1) * kPointerSize));
1647 __ sw(t2, MemOperand(end_elements));
1648 // Fill the rest with holes.
1649 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1650 for (int i = 1; i < kAllocationDelta; i++) {
1651 __ sw(t2, MemOperand(end_elements, i * kPointerSize));
1652 }
1653
1654 // Update elements' and array's sizes.
1655 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1656 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1657 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1658
1659 // Elements are in new space, so write barrier is not required.
1660 __ Drop(argc + 1);
1661 __ Ret();
1662 }
1663 __ bind(&call_builtin);
1664 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1665 masm()->isolate()),
1666 argc + 1,
1667 1);
1668 }
1669
1670 // Handle call cache miss.
1671 __ bind(&miss);
1672 MaybeObject* maybe_result = GenerateMissBranch();
1673 if (maybe_result->IsFailure()) return maybe_result;
1674
1675 // Return the generated code.
1676 return GetCode(function);
lrn@chromium.org7516f052011-03-30 08:52:27 +00001677}
1678
1679
1680MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1681 JSObject* holder,
1682 JSGlobalPropertyCell* cell,
1683 JSFunction* function,
ager@chromium.org5c838252010-02-19 08:53:10 +00001684 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001685 // ----------- S t a t e -------------
1686 // -- a2 : name
1687 // -- ra : return address
1688 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1689 // -- ...
1690 // -- sp[argc * 4] : receiver
1691 // -----------------------------------
1692
1693 // If object is not an array, bail out to regular call.
1694 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1695
1696 Label miss, return_undefined, call_builtin;
1697
1698 Register receiver = a1;
1699 Register elements = a3;
1700
1701 GenerateNameCheck(name, &miss);
1702
1703 // Get the receiver from the stack.
1704 const int argc = arguments().immediate();
1705 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1706
1707 // Check that the receiver isn't a smi.
1708 __ JumpIfSmi(receiver, &miss);
1709
1710 // Check that the maps haven't changed.
1711 CheckPrototypes(JSObject::cast(object),
1712 receiver, holder, elements, t0, v0, name, &miss);
1713
1714 // Get the elements array of the object.
1715 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1716
1717 // Check that the elements are in fast mode and writable.
danno@chromium.org40cb8782011-05-25 07:58:50 +00001718 __ CheckMap(elements,
1719 v0,
1720 Heap::kFixedArrayMapRootIndex,
1721 &call_builtin,
1722 DONT_DO_SMI_CHECK);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001723
1724 // Get the array's length into t0 and calculate new length.
1725 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1726 __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1727 __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1728
1729 // Get the last element.
1730 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1731 STATIC_ASSERT(kSmiTagSize == 1);
1732 STATIC_ASSERT(kSmiTag == 0);
1733 // We can't address the last element in one operation. Compute the more
1734 // expensive shift first, and use an offset later on.
1735 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1736 __ Addu(elements, elements, t1);
1737 __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1738 __ Branch(&call_builtin, eq, v0, Operand(t2));
1739
1740 // Set the array's length.
1741 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1742
1743 // Fill with the hole.
1744 __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1745 __ Drop(argc + 1);
1746 __ Ret();
1747
1748 __ bind(&return_undefined);
1749 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1750 __ Drop(argc + 1);
1751 __ Ret();
1752
1753 __ bind(&call_builtin);
1754 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1755 masm()->isolate()),
1756 argc + 1,
1757 1);
1758
1759 // Handle call cache miss.
1760 __ bind(&miss);
1761 MaybeObject* maybe_result = GenerateMissBranch();
1762 if (maybe_result->IsFailure()) return maybe_result;
1763
1764 // Return the generated code.
1765 return GetCode(function);
ager@chromium.org5c838252010-02-19 08:53:10 +00001766}
1767
1768
lrn@chromium.org7516f052011-03-30 08:52:27 +00001769MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1770 Object* object,
1771 JSObject* holder,
1772 JSGlobalPropertyCell* cell,
1773 JSFunction* function,
1774 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001775 // ----------- S t a t e -------------
1776 // -- a2 : function name
1777 // -- ra : return address
1778 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1779 // -- ...
1780 // -- sp[argc * 4] : receiver
1781 // -----------------------------------
1782
1783 // If object is not a string, bail out to regular call.
1784 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1785
1786 const int argc = arguments().immediate();
1787
1788 Label miss;
1789 Label name_miss;
1790 Label index_out_of_range;
1791
1792 Label* index_out_of_range_label = &index_out_of_range;
1793
danno@chromium.org40cb8782011-05-25 07:58:50 +00001794 if (kind_ == Code::CALL_IC &&
1795 (CallICBase::StringStubState::decode(extra_ic_state_) ==
1796 DEFAULT_STRING_STUB)) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001797 index_out_of_range_label = &miss;
1798 }
1799
1800 GenerateNameCheck(name, &name_miss);
1801
1802 // Check that the maps starting from the prototype haven't changed.
1803 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1804 Context::STRING_FUNCTION_INDEX,
1805 v0,
1806 &miss);
1807 ASSERT(object != holder);
1808 CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1809 a1, a3, t0, name, &miss);
1810
1811 Register receiver = a1;
1812 Register index = t1;
1813 Register scratch = a3;
1814 Register result = v0;
1815 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1816 if (argc > 0) {
1817 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1818 } else {
1819 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1820 }
1821
1822 StringCharCodeAtGenerator char_code_at_generator(receiver,
1823 index,
1824 scratch,
1825 result,
1826 &miss, // When not a string.
1827 &miss, // When not a number.
1828 index_out_of_range_label,
1829 STRING_INDEX_IS_NUMBER);
1830 char_code_at_generator.GenerateFast(masm());
1831 __ Drop(argc + 1);
1832 __ Ret();
1833
1834 StubRuntimeCallHelper call_helper;
1835 char_code_at_generator.GenerateSlow(masm(), call_helper);
1836
1837 if (index_out_of_range.is_linked()) {
1838 __ bind(&index_out_of_range);
1839 __ LoadRoot(v0, Heap::kNanValueRootIndex);
1840 __ Drop(argc + 1);
1841 __ Ret();
1842 }
1843
1844 __ bind(&miss);
1845 // Restore function name in a2.
1846 __ li(a2, Handle<String>(name));
1847 __ bind(&name_miss);
1848 MaybeObject* maybe_result = GenerateMissBranch();
1849 if (maybe_result->IsFailure()) return maybe_result;
1850
1851 // Return the generated code.
1852 return GetCode(function);
ager@chromium.org5c838252010-02-19 08:53:10 +00001853}
1854
1855
lrn@chromium.org7516f052011-03-30 08:52:27 +00001856MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1857 Object* object,
1858 JSObject* holder,
1859 JSGlobalPropertyCell* cell,
1860 JSFunction* function,
1861 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001862 // ----------- S t a t e -------------
1863 // -- a2 : function name
1864 // -- ra : return address
1865 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1866 // -- ...
1867 // -- sp[argc * 4] : receiver
1868 // -----------------------------------
1869
1870 // If object is not a string, bail out to regular call.
1871 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1872
1873 const int argc = arguments().immediate();
1874
1875 Label miss;
1876 Label name_miss;
1877 Label index_out_of_range;
1878 Label* index_out_of_range_label = &index_out_of_range;
1879
danno@chromium.org40cb8782011-05-25 07:58:50 +00001880 if (kind_ == Code::CALL_IC &&
1881 (CallICBase::StringStubState::decode(extra_ic_state_) ==
1882 DEFAULT_STRING_STUB)) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001883 index_out_of_range_label = &miss;
1884 }
1885
1886 GenerateNameCheck(name, &name_miss);
1887
1888 // Check that the maps starting from the prototype haven't changed.
1889 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1890 Context::STRING_FUNCTION_INDEX,
1891 v0,
1892 &miss);
1893 ASSERT(object != holder);
1894 CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1895 a1, a3, t0, name, &miss);
1896
1897 Register receiver = v0;
1898 Register index = t1;
1899 Register scratch1 = a1;
1900 Register scratch2 = a3;
1901 Register result = v0;
1902 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1903 if (argc > 0) {
1904 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1905 } else {
1906 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1907 }
1908
1909 StringCharAtGenerator char_at_generator(receiver,
1910 index,
1911 scratch1,
1912 scratch2,
1913 result,
1914 &miss, // When not a string.
1915 &miss, // When not a number.
1916 index_out_of_range_label,
1917 STRING_INDEX_IS_NUMBER);
1918 char_at_generator.GenerateFast(masm());
1919 __ Drop(argc + 1);
1920 __ Ret();
1921
1922 StubRuntimeCallHelper call_helper;
1923 char_at_generator.GenerateSlow(masm(), call_helper);
1924
1925 if (index_out_of_range.is_linked()) {
1926 __ bind(&index_out_of_range);
1927 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
1928 __ Drop(argc + 1);
1929 __ Ret();
1930 }
1931
1932 __ bind(&miss);
1933 // Restore function name in a2.
1934 __ li(a2, Handle<String>(name));
1935 __ bind(&name_miss);
1936 MaybeObject* maybe_result = GenerateMissBranch();
1937 if (maybe_result->IsFailure()) return maybe_result;
1938
1939 // Return the generated code.
1940 return GetCode(function);
ager@chromium.org5c838252010-02-19 08:53:10 +00001941}
1942
1943
lrn@chromium.org7516f052011-03-30 08:52:27 +00001944MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1945 Object* object,
1946 JSObject* holder,
1947 JSGlobalPropertyCell* cell,
1948 JSFunction* function,
1949 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00001950 // ----------- S t a t e -------------
1951 // -- a2 : function name
1952 // -- ra : return address
1953 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1954 // -- ...
1955 // -- sp[argc * 4] : receiver
1956 // -----------------------------------
1957
1958 const int argc = arguments().immediate();
1959
1960 // If the object is not a JSObject or we got an unexpected number of
1961 // arguments, bail out to the regular call.
1962 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1963
1964 Label miss;
1965 GenerateNameCheck(name, &miss);
1966
1967 if (cell == NULL) {
1968 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1969
1970 STATIC_ASSERT(kSmiTag == 0);
1971 __ JumpIfSmi(a1, &miss);
1972
1973 CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
1974 &miss);
1975 } else {
1976 ASSERT(cell->value() == function);
1977 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1978 GenerateLoadFunctionFromCell(cell, function, &miss);
1979 }
1980
1981 // Load the char code argument.
1982 Register code = a1;
1983 __ lw(code, MemOperand(sp, 0 * kPointerSize));
1984
1985 // Check the code is a smi.
1986 Label slow;
1987 STATIC_ASSERT(kSmiTag == 0);
1988 __ JumpIfNotSmi(code, &slow);
1989
1990 // Convert the smi code to uint16.
1991 __ And(code, code, Operand(Smi::FromInt(0xffff)));
1992
1993 StringCharFromCodeGenerator char_from_code_generator(code, v0);
1994 char_from_code_generator.GenerateFast(masm());
1995 __ Drop(argc + 1);
1996 __ Ret();
1997
1998 StubRuntimeCallHelper call_helper;
1999 char_from_code_generator.GenerateSlow(masm(), call_helper);
2000
2001 // Tail call the full function. We do not have to patch the receiver
2002 // because the function makes no use of it.
2003 __ bind(&slow);
2004 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2005
2006 __ bind(&miss);
2007 // a2: function name.
2008 MaybeObject* maybe_result = GenerateMissBranch();
2009 if (maybe_result->IsFailure()) return maybe_result;
2010
2011 // Return the generated code.
2012 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00002013}
2014
2015
lrn@chromium.org7516f052011-03-30 08:52:27 +00002016MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2017 JSObject* holder,
2018 JSGlobalPropertyCell* cell,
2019 JSFunction* function,
2020 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002021 // ----------- S t a t e -------------
2022 // -- a2 : function name
2023 // -- ra : return address
2024 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2025 // -- ...
2026 // -- sp[argc * 4] : receiver
2027 // -----------------------------------
2028
2029 if (!CpuFeatures::IsSupported(FPU))
2030 return heap()->undefined_value();
2031 CpuFeatures::Scope scope_fpu(FPU);
2032
2033 const int argc = arguments().immediate();
2034
2035 // If the object is not a JSObject or we got an unexpected number of
2036 // arguments, bail out to the regular call.
2037 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2038
2039 Label miss, slow;
2040 GenerateNameCheck(name, &miss);
2041
2042 if (cell == NULL) {
2043 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2044
2045 STATIC_ASSERT(kSmiTag == 0);
2046 __ JumpIfSmi(a1, &miss);
2047
2048 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2049 &miss);
2050 } else {
2051 ASSERT(cell->value() == function);
2052 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2053 GenerateLoadFunctionFromCell(cell, function, &miss);
2054 }
2055
2056 // Load the (only) argument into v0.
2057 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2058
2059 // If the argument is a smi, just return.
2060 STATIC_ASSERT(kSmiTag == 0);
2061 __ And(t0, v0, Operand(kSmiTagMask));
2062 __ Drop(argc + 1, eq, t0, Operand(zero_reg));
2063 __ Ret(eq, t0, Operand(zero_reg));
2064
danno@chromium.org40cb8782011-05-25 07:58:50 +00002065 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002066
2067 Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2068
2069 // If fpu is enabled, we use the floor instruction.
2070
2071 // Load the HeapNumber value.
2072 __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2073
2074 // Backup FCSR.
2075 __ cfc1(a3, FCSR);
2076 // Clearing FCSR clears the exception mask with no side-effects.
2077 __ ctc1(zero_reg, FCSR);
2078 // Convert the argument to an integer.
2079 __ floor_w_d(f0, f0);
2080
2081 // Start checking for special cases.
2082 // Get the argument exponent and clear the sign bit.
2083 __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2084 __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2085 __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2086
2087 // Retrieve FCSR and check for fpu errors.
2088 __ cfc1(t5, FCSR);
2089 __ srl(t5, t5, kFCSRFlagShift);
2090 // Flag 1 marks an inaccurate but still good result so we ignore it.
2091 __ And(t5, t5, Operand(kFCSRFlagMask ^ 1));
2092 __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2093
2094 // Check for NaN, Infinity, and -Infinity.
2095 // They are invariant through a Math.Floor call, so just
2096 // return the original argument.
2097 __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2098 >> HeapNumber::kMantissaBitsInTopWord));
2099 __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2100 // We had an overflow or underflow in the conversion. Check if we
2101 // have a big exponent.
2102 // If greater or equal, the argument is already round and in v0.
2103 __ Branch(&restore_fcsr_and_return, ge, t3,
2104 Operand(HeapNumber::kMantissaBits));
2105 __ Branch(&wont_fit_smi);
2106
2107 __ bind(&no_fpu_error);
2108 // Move the result back to v0.
2109 __ mfc1(v0, f0);
2110 // Check if the result fits into a smi.
2111 __ Addu(a1, v0, Operand(0x40000000));
2112 __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2113 // Tag the result.
2114 STATIC_ASSERT(kSmiTag == 0);
2115 __ sll(v0, v0, kSmiTagSize);
2116
2117 // Check for -0.
2118 __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2119 // t1 already holds the HeapNumber exponent.
2120 __ And(t0, t1, Operand(HeapNumber::kSignMask));
2121 // If our HeapNumber is negative it was -0, so load its address and return.
2122 // Else v0 is loaded with 0, so we can also just return.
2123 __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2124 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2125
2126 __ bind(&restore_fcsr_and_return);
2127 // Restore FCSR and return.
2128 __ ctc1(a3, FCSR);
2129
2130 __ Drop(argc + 1);
2131 __ Ret();
2132
2133 __ bind(&wont_fit_smi);
2134 // Restore FCSR and fall to slow case.
2135 __ ctc1(a3, FCSR);
2136
2137 __ bind(&slow);
2138 // Tail call the full function. We do not have to patch the receiver
2139 // because the function makes no use of it.
2140 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2141
2142 __ bind(&miss);
2143 // a2: function name.
2144 MaybeObject* obj = GenerateMissBranch();
2145 if (obj->IsFailure()) return obj;
2146
2147 // Return the generated code.
2148 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00002149}
2150
2151
lrn@chromium.org7516f052011-03-30 08:52:27 +00002152MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2153 JSObject* holder,
2154 JSGlobalPropertyCell* cell,
2155 JSFunction* function,
2156 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002157 // ----------- S t a t e -------------
2158 // -- a2 : function name
2159 // -- ra : return address
2160 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2161 // -- ...
2162 // -- sp[argc * 4] : receiver
2163 // -----------------------------------
2164
2165 const int argc = arguments().immediate();
2166
2167 // If the object is not a JSObject or we got an unexpected number of
2168 // arguments, bail out to the regular call.
2169 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2170
2171 Label miss;
2172 GenerateNameCheck(name, &miss);
2173
2174 if (cell == NULL) {
2175 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2176
2177 STATIC_ASSERT(kSmiTag == 0);
2178 __ JumpIfSmi(a1, &miss);
2179
2180 CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
2181 &miss);
2182 } else {
2183 ASSERT(cell->value() == function);
2184 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2185 GenerateLoadFunctionFromCell(cell, function, &miss);
2186 }
2187
2188 // Load the (only) argument into v0.
2189 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2190
2191 // Check if the argument is a smi.
2192 Label not_smi;
2193 STATIC_ASSERT(kSmiTag == 0);
2194 __ JumpIfNotSmi(v0, &not_smi);
2195
2196 // Do bitwise not or do nothing depending on the sign of the
2197 // argument.
2198 __ sra(t0, v0, kBitsPerInt - 1);
2199 __ Xor(a1, v0, t0);
2200
2201 // Add 1 or do nothing depending on the sign of the argument.
2202 __ Subu(v0, a1, t0);
2203
2204 // If the result is still negative, go to the slow case.
2205 // This only happens for the most negative smi.
2206 Label slow;
2207 __ Branch(&slow, lt, v0, Operand(zero_reg));
2208
2209 // Smi case done.
2210 __ Drop(argc + 1);
2211 __ Ret();
2212
2213 // Check if the argument is a heap number and load its exponent and
2214 // sign.
2215 __ bind(&not_smi);
danno@chromium.org40cb8782011-05-25 07:58:50 +00002216 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002217 __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2218
2219 // Check the sign of the argument. If the argument is positive,
2220 // just return it.
2221 Label negative_sign;
2222 __ And(t0, a1, Operand(HeapNumber::kSignMask));
2223 __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2224 __ Drop(argc + 1);
2225 __ Ret();
2226
2227 // If the argument is negative, clear the sign, and return a new
2228 // number.
2229 __ bind(&negative_sign);
2230 __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2231 __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2232 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2233 __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2234 __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2235 __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2236 __ Drop(argc + 1);
2237 __ Ret();
2238
2239 // Tail call the full function. We do not have to patch the receiver
2240 // because the function makes no use of it.
2241 __ bind(&slow);
2242 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2243
2244 __ bind(&miss);
2245 // a2: function name.
2246 MaybeObject* maybe_result = GenerateMissBranch();
2247 if (maybe_result->IsFailure()) return maybe_result;
2248
2249 // Return the generated code.
2250 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00002251}
2252
2253
lrn@chromium.org7516f052011-03-30 08:52:27 +00002254MaybeObject* CallStubCompiler::CompileFastApiCall(
2255 const CallOptimization& optimization,
2256 Object* object,
2257 JSObject* holder,
2258 JSGlobalPropertyCell* cell,
2259 JSFunction* function,
2260 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002261
danno@chromium.org40cb8782011-05-25 07:58:50 +00002262 Counters* counters = isolate()->counters();
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002263
2264 ASSERT(optimization.is_simple_api_call());
2265 // Bail out if object is a global object as we don't want to
2266 // repatch it to global receiver.
danno@chromium.org40cb8782011-05-25 07:58:50 +00002267 if (object->IsGlobalObject()) return heap()->undefined_value();
2268 if (cell != NULL) return heap()->undefined_value();
ager@chromium.orgea91cc52011-05-23 06:06:11 +00002269 if (!object->IsJSObject()) return heap()->undefined_value();
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002270 int depth = optimization.GetPrototypeDepthOfExpectedType(
2271 JSObject::cast(object), holder);
danno@chromium.org40cb8782011-05-25 07:58:50 +00002272 if (depth == kInvalidProtoDepth) return heap()->undefined_value();
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002273
2274 Label miss, miss_before_stack_reserved;
2275
2276 GenerateNameCheck(name, &miss_before_stack_reserved);
2277
2278 // Get the receiver from the stack.
2279 const int argc = arguments().immediate();
2280 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2281
2282 // Check that the receiver isn't a smi.
2283 __ JumpIfSmi(a1, &miss_before_stack_reserved);
2284
2285 __ IncrementCounter(counters->call_const(), 1, a0, a3);
2286 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2287
2288 ReserveSpaceForFastApiCall(masm(), a0);
2289
2290 // Check that the maps haven't changed and find a Holder as a side effect.
2291 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2292 depth, &miss);
2293
2294 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2295 if (result->IsFailure()) return result;
2296
2297 __ bind(&miss);
2298 FreeSpaceForFastApiCall(masm());
2299
2300 __ bind(&miss_before_stack_reserved);
2301 MaybeObject* maybe_result = GenerateMissBranch();
2302 if (maybe_result->IsFailure()) return maybe_result;
2303
2304 // Return the generated code.
2305 return GetCode(function);
ager@chromium.org5c838252010-02-19 08:53:10 +00002306}
2307
2308
lrn@chromium.org7516f052011-03-30 08:52:27 +00002309MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
ager@chromium.org5c838252010-02-19 08:53:10 +00002310 JSObject* holder,
lrn@chromium.org7516f052011-03-30 08:52:27 +00002311 JSFunction* function,
2312 String* name,
2313 CheckType check) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002314 // ----------- S t a t e -------------
2315 // -- a2 : name
2316 // -- ra : return address
2317 // -----------------------------------
2318 if (HasCustomCallGenerator(function)) {
2319 MaybeObject* maybe_result = CompileCustomCall(
2320 object, holder, NULL, function, name);
2321 Object* result;
2322 if (!maybe_result->ToObject(&result)) return maybe_result;
2323 // Undefined means bail out to regular compiler.
2324 if (!result->IsUndefined()) return result;
2325 }
2326
2327 Label miss;
2328
2329 GenerateNameCheck(name, &miss);
2330
2331 // Get the receiver from the stack.
2332 const int argc = arguments().immediate();
2333 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2334
2335 // Check that the receiver isn't a smi.
2336 if (check != NUMBER_CHECK) {
2337 __ And(t1, a1, Operand(kSmiTagMask));
2338 __ Branch(&miss, eq, t1, Operand(zero_reg));
2339 }
2340
2341 // Make sure that it's okay not to patch the on stack receiver
2342 // unless we're doing a receiver map check.
2343 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2344
2345 SharedFunctionInfo* function_info = function->shared();
2346 switch (check) {
2347 case RECEIVER_MAP_CHECK:
2348 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2349 1, a0, a3);
2350
2351 // Check that the maps haven't changed.
2352 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2353 &miss);
2354
2355 // Patch the receiver on the stack with the global proxy if
2356 // necessary.
2357 if (object->IsGlobalObject()) {
2358 __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2359 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2360 }
2361 break;
2362
2363 case STRING_CHECK:
2364 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2365 // Calling non-strict non-builtins with a value as the receiver
2366 // requires boxing.
2367 __ jmp(&miss);
2368 } else {
2369 // Check that the object is a two-byte string or a symbol.
2370 __ GetObjectType(a1, a3, a3);
2371 __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2372 // Check that the maps starting from the prototype haven't changed.
2373 GenerateDirectLoadGlobalFunctionPrototype(
2374 masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2375 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2376 a1, t0, name, &miss);
2377 }
2378 break;
2379
2380 case NUMBER_CHECK: {
2381 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2382 // Calling non-strict non-builtins with a value as the receiver
2383 // requires boxing.
2384 __ jmp(&miss);
2385 } else {
2386 Label fast;
2387 // Check that the object is a smi or a heap number.
2388 __ And(t1, a1, Operand(kSmiTagMask));
2389 __ Branch(&fast, eq, t1, Operand(zero_reg));
2390 __ GetObjectType(a1, a0, a0);
2391 __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2392 __ bind(&fast);
2393 // Check that the maps starting from the prototype haven't changed.
2394 GenerateDirectLoadGlobalFunctionPrototype(
2395 masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2396 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2397 a1, t0, name, &miss);
2398 }
2399 break;
2400 }
2401
2402 case BOOLEAN_CHECK: {
2403 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2404 // Calling non-strict non-builtins with a value as the receiver
2405 // requires boxing.
2406 __ jmp(&miss);
2407 } else {
2408 Label fast;
2409 // Check that the object is a boolean.
2410 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2411 __ Branch(&fast, eq, a1, Operand(t0));
2412 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2413 __ Branch(&miss, ne, a1, Operand(t0));
2414 __ bind(&fast);
2415 // Check that the maps starting from the prototype haven't changed.
2416 GenerateDirectLoadGlobalFunctionPrototype(
2417 masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2418 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2419 a1, t0, name, &miss);
2420 }
2421 break;
2422 }
2423
2424 default:
2425 UNREACHABLE();
2426 }
2427
2428 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2429
2430 // Handle call cache miss.
2431 __ bind(&miss);
2432
2433 MaybeObject* maybe_result = GenerateMissBranch();
2434 if (maybe_result->IsFailure()) return maybe_result;
2435
2436 // Return the generated code.
2437 return GetCode(function);
ager@chromium.org5c838252010-02-19 08:53:10 +00002438}
2439
2440
lrn@chromium.org7516f052011-03-30 08:52:27 +00002441MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
ager@chromium.org5c838252010-02-19 08:53:10 +00002442 JSObject* holder,
2443 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002444 // ----------- S t a t e -------------
2445 // -- a2 : name
2446 // -- ra : return address
2447 // -----------------------------------
2448
2449 Label miss;
2450
2451 GenerateNameCheck(name, &miss);
2452
2453 // Get the number of arguments.
2454 const int argc = arguments().immediate();
2455
2456 LookupResult lookup;
2457 LookupPostInterceptor(holder, name, &lookup);
2458
2459 // Get the receiver from the stack.
2460 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2461
2462 CallInterceptorCompiler compiler(this, arguments(), a2);
2463 MaybeObject* result = compiler.Compile(masm(),
2464 object,
2465 holder,
2466 name,
2467 &lookup,
2468 a1,
2469 a3,
2470 t0,
2471 a0,
2472 &miss);
2473 if (result->IsFailure()) {
2474 return result;
2475 }
2476
2477 // Move returned value, the function to call, to a1.
2478 __ mov(a1, v0);
2479 // Restore receiver.
2480 __ lw(a0, MemOperand(sp, argc * kPointerSize));
2481
2482 GenerateCallFunction(masm(), object, arguments(), &miss);
2483
2484 // Handle call cache miss.
2485 __ bind(&miss);
2486 MaybeObject* maybe_result = GenerateMissBranch();
2487 if (maybe_result->IsFailure()) return maybe_result;
2488
2489 // Return the generated code.
2490 return GetCode(INTERCEPTOR, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00002491}
2492
2493
danno@chromium.org40cb8782011-05-25 07:58:50 +00002494MaybeObject* CallStubCompiler::CompileCallGlobal(
2495 JSObject* object,
2496 GlobalObject* holder,
2497 JSGlobalPropertyCell* cell,
2498 JSFunction* function,
2499 String* name,
2500 Code::ExtraICState extra_ic_state) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002501 // ----------- S t a t e -------------
2502 // -- a2 : name
2503 // -- ra : return address
2504 // -----------------------------------
2505
2506 if (HasCustomCallGenerator(function)) {
2507 MaybeObject* maybe_result = CompileCustomCall(
2508 object, holder, cell, function, name);
2509 Object* result;
2510 if (!maybe_result->ToObject(&result)) return maybe_result;
2511 // Undefined means bail out to regular compiler.
2512 if (!result->IsUndefined()) return result;
2513 }
2514
2515 Label miss;
2516
2517 GenerateNameCheck(name, &miss);
2518
2519 // Get the number of arguments.
2520 const int argc = arguments().immediate();
2521
2522 GenerateGlobalReceiverCheck(object, holder, name, &miss);
2523 GenerateLoadFunctionFromCell(cell, function, &miss);
2524
2525 // Patch the receiver on the stack with the global proxy if
2526 // necessary.
2527 if (object->IsGlobalObject()) {
2528 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2529 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2530 }
2531
2532 // Setup the context (function already in r1).
2533 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2534
2535 // Jump to the cached code (tail call).
2536 Counters* counters = masm()->isolate()->counters();
2537 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2538 ASSERT(function->is_compiled());
2539 Handle<Code> code(function->code());
2540 ParameterCount expected(function->shared()->formal_parameter_count());
danno@chromium.org40cb8782011-05-25 07:58:50 +00002541 CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
2542 ? CALL_AS_FUNCTION
2543 : CALL_AS_METHOD;
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002544 if (V8::UseCrankshaft()) {
2545 UNIMPLEMENTED_MIPS();
2546 } else {
danno@chromium.org40cb8782011-05-25 07:58:50 +00002547 __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET,
2548 JUMP_FUNCTION, call_kind);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002549 }
2550
2551 // Handle call cache miss.
2552 __ bind(&miss);
2553 __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2554 MaybeObject* maybe_result = GenerateMissBranch();
2555 if (maybe_result->IsFailure()) return maybe_result;
2556
2557 // Return the generated code.
2558 return GetCode(NORMAL, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00002559}
2560
2561
lrn@chromium.org7516f052011-03-30 08:52:27 +00002562MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
ager@chromium.org5c838252010-02-19 08:53:10 +00002563 int index,
2564 Map* transition,
2565 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002566 // ----------- S t a t e -------------
2567 // -- a0 : value
2568 // -- a1 : receiver
2569 // -- a2 : name
2570 // -- ra : return address
2571 // -----------------------------------
2572 Label miss;
2573
2574 // Name register might be clobbered.
2575 GenerateStoreField(masm(),
2576 object,
2577 index,
2578 transition,
2579 a1, a2, a3,
2580 &miss);
2581 __ bind(&miss);
2582 __ li(a2, Operand(Handle<String>(name))); // Restore name.
2583 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2584 __ Jump(ic, RelocInfo::CODE_TARGET);
2585
2586 // Return the generated code.
2587 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00002588}
2589
2590
lrn@chromium.org7516f052011-03-30 08:52:27 +00002591MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2592 AccessorInfo* callback,
2593 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002594 // ----------- S t a t e -------------
2595 // -- a0 : value
2596 // -- a1 : receiver
2597 // -- a2 : name
2598 // -- ra : return address
2599 // -----------------------------------
2600 Label miss;
2601
2602 // Check that the object isn't a smi.
2603 __ JumpIfSmi(a1, &miss);
2604
2605 // Check that the map of the object hasn't changed.
2606 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2607 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2608
2609 // Perform global security token check if needed.
2610 if (object->IsJSGlobalProxy()) {
2611 __ CheckAccessGlobalProxy(a1, a3, &miss);
2612 }
2613
2614 // Stub never generated for non-global objects that require access
2615 // checks.
2616 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2617
2618 __ push(a1); // Receiver.
2619 __ li(a3, Operand(Handle<AccessorInfo>(callback))); // Callback info.
2620 __ Push(a3, a2, a0);
2621
2622 // Do tail-call to the runtime system.
2623 ExternalReference store_callback_property =
2624 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2625 masm()->isolate());
2626 __ TailCallExternalReference(store_callback_property, 4, 1);
2627
2628 // Handle store cache miss.
2629 __ bind(&miss);
2630 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2631 __ Jump(ic, RelocInfo::CODE_TARGET);
2632
2633 // Return the generated code.
2634 return GetCode(CALLBACKS, name);
ager@chromium.org5c838252010-02-19 08:53:10 +00002635}
2636
2637
lrn@chromium.org7516f052011-03-30 08:52:27 +00002638MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2639 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002640 // ----------- S t a t e -------------
2641 // -- a0 : value
2642 // -- a1 : receiver
2643 // -- a2 : name
2644 // -- ra : return address
2645 // -----------------------------------
2646 Label miss;
2647
2648 // Check that the object isn't a smi.
2649 __ JumpIfSmi(a1, &miss);
2650
2651 // Check that the map of the object hasn't changed.
2652 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2653 __ Branch(&miss, ne, a3, Operand(Handle<Map>(receiver->map())));
2654
2655 // Perform global security token check if needed.
2656 if (receiver->IsJSGlobalProxy()) {
2657 __ CheckAccessGlobalProxy(a1, a3, &miss);
2658 }
2659
2660 // Stub is never generated for non-global objects that require access
2661 // checks.
2662 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2663
2664 __ Push(a1, a2, a0); // Receiver, name, value.
2665
2666 __ li(a0, Operand(Smi::FromInt(strict_mode_)));
2667 __ push(a0); // Strict mode.
2668
2669 // Do tail-call to the runtime system.
2670 ExternalReference store_ic_property =
2671 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2672 masm()->isolate());
2673 __ TailCallExternalReference(store_ic_property, 4, 1);
2674
2675 // Handle store cache miss.
2676 __ bind(&miss);
2677 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2678 __ Jump(ic, RelocInfo::CODE_TARGET);
2679
2680 // Return the generated code.
2681 return GetCode(INTERCEPTOR, name);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002682}
2683
2684
lrn@chromium.org7516f052011-03-30 08:52:27 +00002685MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2686 JSGlobalPropertyCell* cell,
2687 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002688 // ----------- S t a t e -------------
2689 // -- a0 : value
2690 // -- a1 : receiver
2691 // -- a2 : name
2692 // -- ra : return address
2693 // -----------------------------------
2694 Label miss;
2695
2696 // Check that the map of the global has not changed.
2697 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2698 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2699
2700 // Check that the value in the cell is not the hole. If it is, this
2701 // cell could have been deleted and reintroducing the global needs
2702 // to update the property details in the property dictionary of the
2703 // global object. We bail out to the runtime system to do that.
2704 __ li(t0, Operand(Handle<JSGlobalPropertyCell>(cell)));
2705 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2706 __ lw(t2, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2707 __ Branch(&miss, eq, t1, Operand(t2));
2708
2709 // Store the value in the cell.
2710 __ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2711 __ mov(v0, a0); // Stored value must be returned in v0.
2712 Counters* counters = masm()->isolate()->counters();
2713 __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2714 __ Ret();
2715
2716 // Handle store cache miss.
2717 __ bind(&miss);
2718 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2719 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2720 __ Jump(ic, RelocInfo::CODE_TARGET);
2721
2722 // Return the generated code.
2723 return GetCode(NORMAL, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002724}
2725
2726
2727MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2728 JSObject* object,
2729 JSObject* last) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002730 // ----------- S t a t e -------------
2731 // -- a0 : receiver
2732 // -- ra : return address
2733 // -----------------------------------
2734 Label miss;
2735
2736 // Check that the receiver is not a smi.
2737 __ JumpIfSmi(a0, &miss);
2738
2739 // Check the maps of the full prototype chain.
2740 CheckPrototypes(object, a0, last, a3, a1, t0, name, &miss);
2741
2742 // If the last object in the prototype chain is a global object,
2743 // check that the global property cell is empty.
2744 if (last->IsGlobalObject()) {
2745 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2746 GlobalObject::cast(last),
2747 name,
2748 a1,
2749 &miss);
2750 if (cell->IsFailure()) {
2751 miss.Unuse();
2752 return cell;
2753 }
2754 }
2755
2756 // Return undefined if maps of the full prototype chain is still the same.
2757 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2758 __ Ret();
2759
2760 __ bind(&miss);
2761 GenerateLoadMiss(masm(), Code::LOAD_IC);
2762
2763 // Return the generated code.
2764 return GetCode(NONEXISTENT, heap()->empty_string());
lrn@chromium.org7516f052011-03-30 08:52:27 +00002765}
2766
2767
2768MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2769 JSObject* holder,
2770 int index,
2771 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002772 // ----------- S t a t e -------------
2773 // -- a0 : receiver
2774 // -- a2 : name
2775 // -- ra : return address
2776 // -----------------------------------
2777 Label miss;
2778
2779 __ mov(v0, a0);
2780
2781 GenerateLoadField(object, holder, v0, a3, a1, t0, index, name, &miss);
2782 __ bind(&miss);
2783 GenerateLoadMiss(masm(), Code::LOAD_IC);
2784
2785 // Return the generated code.
2786 return GetCode(FIELD, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002787}
2788
2789
2790MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2791 JSObject* object,
2792 JSObject* holder,
2793 AccessorInfo* callback) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002794 // ----------- S t a t e -------------
2795 // -- a0 : receiver
2796 // -- a2 : name
2797 // -- ra : return address
2798 // -----------------------------------
2799 Label miss;
2800
2801 MaybeObject* result = GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0,
2802 callback, name, &miss);
2803 if (result->IsFailure()) {
2804 miss.Unuse();
2805 return result;
2806 }
2807
2808 __ bind(&miss);
2809 GenerateLoadMiss(masm(), Code::LOAD_IC);
2810
2811 // Return the generated code.
2812 return GetCode(CALLBACKS, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002813}
2814
2815
2816MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2817 JSObject* holder,
2818 Object* value,
2819 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002820 // ----------- S t a t e -------------
2821 // -- a0 : receiver
2822 // -- a2 : name
2823 // -- ra : return address
2824 // -----------------------------------
2825 Label miss;
2826
2827 GenerateLoadConstant(object, holder, a0, a3, a1, t0, value, name, &miss);
2828 __ bind(&miss);
2829 GenerateLoadMiss(masm(), Code::LOAD_IC);
2830
2831 // Return the generated code.
2832 return GetCode(CONSTANT_FUNCTION, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002833}
2834
2835
2836MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
2837 JSObject* holder,
2838 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002839 // ----------- S t a t e -------------
2840 // -- a0 : receiver
2841 // -- a2 : name
2842 // -- ra : return address
2843 // -- [sp] : receiver
2844 // -----------------------------------
2845 Label miss;
2846
2847 LookupResult lookup;
2848 LookupPostInterceptor(holder, name, &lookup);
2849 GenerateLoadInterceptor(object,
2850 holder,
2851 &lookup,
2852 a0,
2853 a2,
2854 a3,
2855 a1,
2856 t0,
2857 name,
2858 &miss);
2859 __ bind(&miss);
2860 GenerateLoadMiss(masm(), Code::LOAD_IC);
2861
2862 // Return the generated code.
2863 return GetCode(INTERCEPTOR, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002864}
2865
2866
2867MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2868 GlobalObject* holder,
2869 JSGlobalPropertyCell* cell,
2870 String* name,
2871 bool is_dont_delete) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002872 // ----------- S t a t e -------------
2873 // -- a0 : receiver
2874 // -- a2 : name
2875 // -- ra : return address
2876 // -----------------------------------
2877 Label miss;
2878
2879 // If the object is the holder then we know that it's a global
2880 // object which can only happen for contextual calls. In this case,
2881 // the receiver cannot be a smi.
2882 if (object != holder) {
2883 __ And(t0, a0, Operand(kSmiTagMask));
2884 __ Branch(&miss, eq, t0, Operand(zero_reg));
2885 }
2886
2887 // Check that the map of the global has not changed.
2888 CheckPrototypes(object, a0, holder, a3, t0, a1, name, &miss);
2889
2890 // Get the value from the cell.
2891 __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2892 __ lw(t0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
2893
2894 // Check for deleted property if property can actually be deleted.
2895 if (!is_dont_delete) {
2896 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2897 __ Branch(&miss, eq, t0, Operand(at));
2898 }
2899
2900 __ mov(v0, t0);
2901 Counters* counters = masm()->isolate()->counters();
2902 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2903 __ Ret();
2904
2905 __ bind(&miss);
2906 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
2907 GenerateLoadMiss(masm(), Code::LOAD_IC);
2908
2909 // Return the generated code.
2910 return GetCode(NORMAL, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002911}
2912
2913
2914MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2915 JSObject* receiver,
2916 JSObject* holder,
2917 int index) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002918 // ----------- S t a t e -------------
2919 // -- ra : return address
2920 // -- a0 : key
2921 // -- a1 : receiver
2922 // -----------------------------------
2923 Label miss;
2924
2925 // Check the key is the cached one.
2926 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2927
2928 GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
2929 __ bind(&miss);
2930 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2931
2932 return GetCode(FIELD, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002933}
2934
2935
2936MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2937 String* name,
2938 JSObject* receiver,
2939 JSObject* holder,
2940 AccessorInfo* callback) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002941 // ----------- S t a t e -------------
2942 // -- ra : return address
2943 // -- a0 : key
2944 // -- a1 : receiver
2945 // -----------------------------------
2946 Label miss;
2947
2948 // Check the key is the cached one.
2949 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2950
2951 MaybeObject* result = GenerateLoadCallback(receiver, holder, a1, a0, a2, a3,
2952 t0, callback, name, &miss);
2953 if (result->IsFailure()) {
2954 miss.Unuse();
2955 return result;
2956 }
2957
2958 __ bind(&miss);
2959 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2960
2961 return GetCode(CALLBACKS, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002962}
2963
2964
2965MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2966 JSObject* receiver,
2967 JSObject* holder,
2968 Object* value) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002969 // ----------- S t a t e -------------
2970 // -- ra : return address
2971 // -- a0 : key
2972 // -- a1 : receiver
2973 // -----------------------------------
2974 Label miss;
2975
2976 // Check the key is the cached one.
2977 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2978
2979 GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
2980 __ bind(&miss);
2981 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2982
2983 // Return the generated code.
2984 return GetCode(CONSTANT_FUNCTION, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002985}
2986
2987
2988MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2989 JSObject* holder,
2990 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00002991 // ----------- S t a t e -------------
2992 // -- ra : return address
2993 // -- a0 : key
2994 // -- a1 : receiver
2995 // -----------------------------------
2996 Label miss;
2997
2998 // Check the key is the cached one.
2999 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3000
3001 LookupResult lookup;
3002 LookupPostInterceptor(holder, name, &lookup);
3003 GenerateLoadInterceptor(receiver,
3004 holder,
3005 &lookup,
3006 a1,
3007 a0,
3008 a2,
3009 a3,
3010 t0,
3011 name,
3012 &miss);
3013 __ bind(&miss);
3014 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3015
3016 return GetCode(INTERCEPTOR, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003017}
3018
3019
3020MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003021 // ----------- S t a t e -------------
3022 // -- ra : return address
3023 // -- a0 : key
3024 // -- a1 : receiver
3025 // -----------------------------------
3026 Label miss;
3027
3028 // Check the key is the cached one.
3029 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3030
3031 GenerateLoadArrayLength(masm(), a1, a2, &miss);
3032 __ bind(&miss);
3033 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3034
3035 return GetCode(CALLBACKS, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003036}
3037
3038
3039MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003040 // ----------- S t a t e -------------
3041 // -- ra : return address
3042 // -- a0 : key
3043 // -- a1 : receiver
3044 // -----------------------------------
3045 Label miss;
3046
3047 Counters* counters = masm()->isolate()->counters();
3048 __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3049
3050 // Check the key is the cached one.
3051 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3052
3053 GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
3054 __ bind(&miss);
3055 __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3056
3057 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3058
3059 return GetCode(CALLBACKS, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003060}
3061
3062
3063MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003064 // ----------- S t a t e -------------
3065 // -- ra : return address
3066 // -- a0 : key
3067 // -- a1 : receiver
3068 // -----------------------------------
3069 Label miss;
3070
3071 Counters* counters = masm()->isolate()->counters();
3072 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3073
3074 // Check the name hasn't changed.
3075 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3076
3077 GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3078 __ bind(&miss);
3079 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3080 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3081
3082 return GetCode(CALLBACKS, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003083}
3084
3085
danno@chromium.org40cb8782011-05-25 07:58:50 +00003086MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
3087 // ----------- S t a t e -------------
3088 // -- ra : return address
3089 // -- a0 : key
3090 // -- a1 : receiver
3091 // -----------------------------------
3092 MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
3093 Code* stub;
3094 if (!maybe_stub->To(&stub)) return maybe_stub;
3095 __ DispatchMap(a1,
3096 a2,
3097 Handle<Map>(receiver_map),
3098 Handle<Code>(stub),
3099 DO_SMI_CHECK);
3100
3101 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3102 __ Jump(ic, RelocInfo::CODE_TARGET);
3103
3104 // Return the generated code.
3105 return GetCode(NORMAL, NULL);
3106}
3107
3108
3109MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
3110 MapList* receiver_maps,
3111 CodeList* handler_ics) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003112 // ----------- S t a t e -------------
3113 // -- ra : return address
3114 // -- a0 : key
3115 // -- a1 : receiver
3116 // -----------------------------------
3117 Label miss;
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003118 __ JumpIfSmi(a1, &miss);
3119
danno@chromium.org40cb8782011-05-25 07:58:50 +00003120 int receiver_count = receiver_maps->length();
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003121 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
danno@chromium.org40cb8782011-05-25 07:58:50 +00003122 for (int current = 0; current < receiver_count; ++current) {
3123 Handle<Map> map(receiver_maps->at(current));
3124 Handle<Code> code(handler_ics->at(current));
3125 __ Jump(code, RelocInfo::CODE_TARGET, eq, a2, Operand(map));
3126 }
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003127
3128 __ bind(&miss);
danno@chromium.org40cb8782011-05-25 07:58:50 +00003129 Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3130 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003131
3132 // Return the generated code.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003133 return GetCode(NORMAL, NULL, MEGAMORPHIC);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003134}
3135
3136
3137MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3138 int index,
3139 Map* transition,
3140 String* name) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003141 // ----------- S t a t e -------------
3142 // -- a0 : value
3143 // -- a1 : key
3144 // -- a2 : receiver
3145 // -- ra : return address
3146 // -----------------------------------
3147
3148 Label miss;
3149
3150 Counters* counters = masm()->isolate()->counters();
3151 __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3152
3153 // Check that the name has not changed.
3154 __ Branch(&miss, ne, a1, Operand(Handle<String>(name)));
3155
3156 // a3 is used as scratch register. a1 and a2 keep their values if a jump to
3157 // the miss label is generated.
3158 GenerateStoreField(masm(),
3159 object,
3160 index,
3161 transition,
3162 a2, a1, a3,
3163 &miss);
3164 __ bind(&miss);
3165
3166 __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3167 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3168 __ Jump(ic, RelocInfo::CODE_TARGET);
3169
3170 // Return the generated code.
3171 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003172}
3173
3174
danno@chromium.org40cb8782011-05-25 07:58:50 +00003175MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
3176 Map* receiver_map) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003177 // ----------- S t a t e -------------
3178 // -- a0 : value
3179 // -- a1 : key
3180 // -- a2 : receiver
3181 // -- ra : return address
3182 // -- a3 : scratch
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003183 // -----------------------------------
danno@chromium.org40cb8782011-05-25 07:58:50 +00003184 bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3185 MaybeObject* maybe_stub =
3186 KeyedStoreFastElementStub(is_js_array).TryGetCode();
3187 Code* stub;
3188 if (!maybe_stub->To(&stub)) return maybe_stub;
3189 __ DispatchMap(a2,
3190 a3,
3191 Handle<Map>(receiver_map),
3192 Handle<Code>(stub),
3193 DO_SMI_CHECK);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003194
danno@chromium.org40cb8782011-05-25 07:58:50 +00003195 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003196 __ Jump(ic, RelocInfo::CODE_TARGET);
3197
3198 // Return the generated code.
3199 return GetCode(NORMAL, NULL);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003200}
3201
3202
danno@chromium.org40cb8782011-05-25 07:58:50 +00003203MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
3204 MapList* receiver_maps,
3205 CodeList* handler_ics) {
3206 // ----------- S t a t e -------------
3207 // -- a0 : value
3208 // -- a1 : key
3209 // -- a2 : receiver
3210 // -- ra : return address
3211 // -- a3 : scratch
3212 // -----------------------------------
3213 Label miss;
3214 __ JumpIfSmi(a2, &miss);
3215
3216 int receiver_count = receiver_maps->length();
3217 __ lw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
3218 for (int current = 0; current < receiver_count; ++current) {
3219 Handle<Map> map(receiver_maps->at(current));
3220 Handle<Code> code(handler_ics->at(current));
3221 __ Jump(code, RelocInfo::CODE_TARGET, eq, a3, Operand(map));
3222 }
3223
3224 __ bind(&miss);
3225 Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3226 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3227
3228 // Return the generated code.
3229 return GetCode(NORMAL, NULL, MEGAMORPHIC);
3230}
3231
3232
lrn@chromium.org7516f052011-03-30 08:52:27 +00003233MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003234 // a0 : argc
3235 // a1 : constructor
3236 // ra : return address
3237 // [sp] : last argument
3238 Label generic_stub_call;
3239
3240 // Use t7 for holding undefined which is used in several places below.
3241 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3242
3243#ifdef ENABLE_DEBUGGER_SUPPORT
3244 // Check to see whether there are any break points in the function code. If
3245 // there are jump to the generic constructor stub which calls the actual
3246 // code for the function thereby hitting the break points.
3247 __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3248 __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset));
3249 __ Branch(&generic_stub_call, ne, a2, Operand(t7));
3250#endif
3251
3252 // Load the initial map and verify that it is in fact a map.
3253 // a1: constructor function
3254 // t7: undefined
3255 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
3256 __ And(t0, a2, Operand(kSmiTagMask));
3257 __ Branch(&generic_stub_call, eq, t0, Operand(zero_reg));
3258 __ GetObjectType(a2, a3, t0);
3259 __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
3260
3261#ifdef DEBUG
3262 // Cannot construct functions this way.
3263 // a0: argc
3264 // a1: constructor function
3265 // a2: initial map
3266 // t7: undefined
3267 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3268 __ Check(ne, "Function constructed by construct stub.",
3269 a3, Operand(JS_FUNCTION_TYPE));
3270#endif
3271
3272 // Now allocate the JSObject in new space.
3273 // a0: argc
3274 // a1: constructor function
3275 // a2: initial map
3276 // t7: undefined
3277 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
3278 __ AllocateInNewSpace(a3,
3279 t4,
3280 t5,
3281 t6,
3282 &generic_stub_call,
3283 SIZE_IN_WORDS);
3284
3285 // Allocated the JSObject, now initialize the fields. Map is set to initial
3286 // map and properties and elements are set to empty fixed array.
3287 // a0: argc
3288 // a1: constructor function
3289 // a2: initial map
3290 // a3: object size (in words)
3291 // t4: JSObject (not tagged)
3292 // t7: undefined
3293 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3294 __ mov(t5, t4);
3295 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
3296 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
3297 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
3298 __ Addu(t5, t5, Operand(3 * kPointerSize));
3299 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3300 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3301 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3302
3303
3304 // Calculate the location of the first argument. The stack contains only the
3305 // argc arguments.
3306 __ sll(a1, a0, kPointerSizeLog2);
3307 __ Addu(a1, a1, sp);
3308
3309 // Fill all the in-object properties with undefined.
3310 // a0: argc
3311 // a1: first argument
3312 // a3: object size (in words)
3313 // t4: JSObject (not tagged)
3314 // t5: First in-object property of JSObject (not tagged)
3315 // t7: undefined
3316 // Fill the initialized properties with a constant value or a passed argument
3317 // depending on the this.x = ...; assignment in the function.
3318 SharedFunctionInfo* shared = function->shared();
3319 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3320 if (shared->IsThisPropertyAssignmentArgument(i)) {
3321 Label not_passed, next;
3322 // Check if the argument assigned to the property is actually passed.
3323 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3324 __ Branch(&not_passed, less_equal, a0, Operand(arg_number));
3325 // Argument passed - find it on the stack.
3326 __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize));
3327 __ sw(a2, MemOperand(t5));
3328 __ Addu(t5, t5, kPointerSize);
3329 __ jmp(&next);
3330 __ bind(&not_passed);
3331 // Set the property to undefined.
3332 __ sw(t7, MemOperand(t5));
3333 __ Addu(t5, t5, Operand(kPointerSize));
3334 __ bind(&next);
3335 } else {
3336 // Set the property to the constant value.
3337 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3338 __ li(a2, Operand(constant));
3339 __ sw(a2, MemOperand(t5));
3340 __ Addu(t5, t5, kPointerSize);
3341 }
3342 }
3343
3344 // Fill the unused in-object property fields with undefined.
3345 ASSERT(function->has_initial_map());
3346 for (int i = shared->this_property_assignments_count();
3347 i < function->initial_map()->inobject_properties();
3348 i++) {
3349 __ sw(t7, MemOperand(t5));
3350 __ Addu(t5, t5, kPointerSize);
3351 }
3352
3353 // a0: argc
3354 // t4: JSObject (not tagged)
3355 // Move argc to a1 and the JSObject to return to v0 and tag it.
3356 __ mov(a1, a0);
3357 __ mov(v0, t4);
3358 __ Or(v0, v0, Operand(kHeapObjectTag));
3359
3360 // v0: JSObject
3361 // a1: argc
3362 // Remove caller arguments and receiver from the stack and return.
3363 __ sll(t0, a1, kPointerSizeLog2);
3364 __ Addu(sp, sp, t0);
3365 __ Addu(sp, sp, Operand(kPointerSize));
3366 Counters* counters = masm()->isolate()->counters();
3367 __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3368 __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3369 __ Ret();
3370
3371 // Jump to the generic stub in case the specialized code cannot handle the
3372 // construction.
3373 __ bind(&generic_stub_call);
3374 Handle<Code> generic_construct_stub =
3375 masm()->isolate()->builtins()->JSConstructStubGeneric();
3376 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3377
3378 // Return the generated code.
3379 return GetCode();
3380}
3381
3382
danno@chromium.org40cb8782011-05-25 07:58:50 +00003383MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
3384 JSObject*receiver, ExternalArrayType array_type) {
3385 // ----------- S t a t e -------------
3386 // -- ra : return address
3387 // -- a0 : key
3388 // -- a1 : receiver
3389 // -----------------------------------
3390 MaybeObject* maybe_stub =
3391 KeyedLoadExternalArrayStub(array_type).TryGetCode();
3392 Code* stub;
3393 if (!maybe_stub->To(&stub)) return maybe_stub;
3394 __ DispatchMap(a1,
3395 a2,
3396 Handle<Map>(receiver->map()),
3397 Handle<Code>(stub),
3398 DO_SMI_CHECK);
3399
3400 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3401 __ Jump(ic, RelocInfo::CODE_TARGET);
3402
3403 // Return the generated code.
3404 return GetCode();
3405}
3406
3407
3408MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
3409 JSObject* receiver, ExternalArrayType array_type) {
3410 // ----------- S t a t e -------------
3411 // -- a0 : value
3412 // -- a1 : name
3413 // -- a2 : receiver
3414 // -- ra : return address
3415 // -----------------------------------
3416 MaybeObject* maybe_stub =
3417 KeyedStoreExternalArrayStub(array_type).TryGetCode();
3418 Code* stub;
3419 if (!maybe_stub->To(&stub)) return maybe_stub;
3420 __ DispatchMap(a2,
3421 a3,
3422 Handle<Map>(receiver->map()),
3423 Handle<Code>(stub),
3424 DO_SMI_CHECK);
3425
3426 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3427 __ Jump(ic, RelocInfo::CODE_TARGET);
3428
3429 return GetCode();
3430}
3431
3432
3433#undef __
3434#define __ ACCESS_MASM(masm)
3435
3436
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003437static bool IsElementTypeSigned(ExternalArrayType array_type) {
3438 switch (array_type) {
3439 case kExternalByteArray:
3440 case kExternalShortArray:
3441 case kExternalIntArray:
3442 return true;
3443
3444 case kExternalUnsignedByteArray:
3445 case kExternalUnsignedShortArray:
3446 case kExternalUnsignedIntArray:
3447 return false;
3448
3449 default:
3450 UNREACHABLE();
3451 return false;
3452 }
lrn@chromium.org7516f052011-03-30 08:52:27 +00003453}
3454
3455
danno@chromium.org40cb8782011-05-25 07:58:50 +00003456void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3457 MacroAssembler* masm,
3458 ExternalArrayType array_type) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003459 // ---------- S t a t e --------------
3460 // -- ra : return address
3461 // -- a0 : key
3462 // -- a1 : receiver
3463 // -----------------------------------
danno@chromium.org40cb8782011-05-25 07:58:50 +00003464 Label miss_force_generic, slow, failed_allocation;
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003465
3466 Register key = a0;
3467 Register receiver = a1;
3468
danno@chromium.org40cb8782011-05-25 07:58:50 +00003469 // This stub is meant to be tail-jumped to, the receiver must already
3470 // have been verified by the caller to not be a smi.
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003471
3472 // Check that the key is a smi.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003473 __ JumpIfNotSmi(key, &miss_force_generic);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003474
3475 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3476 // a3: elements array
3477
3478 // Check that the index is in range.
3479 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3480 __ sra(t2, key, kSmiTagSize);
3481 // Unsigned comparison catches both negative and too-large values.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003482 __ Branch(&miss_force_generic, Uless, t1, Operand(t2));
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003483
3484 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3485 // a3: base pointer of external storage
3486
3487 // We are not untagging smi key and instead work with it
3488 // as if it was premultiplied by 2.
3489 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3490
3491 Register value = a2;
3492 switch (array_type) {
3493 case kExternalByteArray:
3494 __ srl(t2, key, 1);
3495 __ addu(t3, a3, t2);
3496 __ lb(value, MemOperand(t3, 0));
3497 break;
3498 case kExternalPixelArray:
3499 case kExternalUnsignedByteArray:
3500 __ srl(t2, key, 1);
3501 __ addu(t3, a3, t2);
3502 __ lbu(value, MemOperand(t3, 0));
3503 break;
3504 case kExternalShortArray:
3505 __ addu(t3, a3, key);
3506 __ lh(value, MemOperand(t3, 0));
3507 break;
3508 case kExternalUnsignedShortArray:
3509 __ addu(t3, a3, key);
3510 __ lhu(value, MemOperand(t3, 0));
3511 break;
3512 case kExternalIntArray:
3513 case kExternalUnsignedIntArray:
3514 __ sll(t2, key, 1);
3515 __ addu(t3, a3, t2);
3516 __ lw(value, MemOperand(t3, 0));
3517 break;
3518 case kExternalFloatArray:
3519 __ sll(t3, t2, 2);
3520 __ addu(t3, a3, t3);
3521 if (CpuFeatures::IsSupported(FPU)) {
3522 CpuFeatures::Scope scope(FPU);
3523 __ lwc1(f0, MemOperand(t3, 0));
3524 } else {
3525 __ lw(value, MemOperand(t3, 0));
3526 }
3527 break;
3528 case kExternalDoubleArray:
3529 __ sll(t2, key, 2);
3530 __ addu(t3, a3, t2);
3531 if (CpuFeatures::IsSupported(FPU)) {
3532 CpuFeatures::Scope scope(FPU);
3533 __ ldc1(f0, MemOperand(t3, 0));
3534 } else {
3535 // t3: pointer to the beginning of the double we want to load.
3536 __ lw(a2, MemOperand(t3, 0));
3537 __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3538 }
3539 break;
3540 default:
3541 UNREACHABLE();
3542 break;
3543 }
3544
3545 // For integer array types:
3546 // a2: value
3547 // For float array type:
3548 // f0: value (if FPU is supported)
3549 // a2: value (if FPU is not supported)
3550 // For double array type:
3551 // f0: value (if FPU is supported)
3552 // a2/a3: value (if FPU is not supported)
3553
3554 if (array_type == kExternalIntArray) {
3555 // For the Int and UnsignedInt array types, we need to see whether
3556 // the value can be represented in a Smi. If not, we need to convert
3557 // it to a HeapNumber.
3558 Label box_int;
3559 __ Subu(t3, value, Operand(0xC0000000)); // Non-smi value gives neg result.
3560 __ Branch(&box_int, lt, t3, Operand(zero_reg));
3561 // Tag integer as smi and return it.
3562 __ sll(v0, value, kSmiTagSize);
3563 __ Ret();
3564
3565 __ bind(&box_int);
3566 // Allocate a HeapNumber for the result and perform int-to-double
3567 // conversion.
3568 // The arm version uses a temporary here to save r0, but we don't need to
3569 // (a0 is not modified).
3570 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3571 __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3572
3573 if (CpuFeatures::IsSupported(FPU)) {
3574 CpuFeatures::Scope scope(FPU);
3575 __ mtc1(value, f0);
3576 __ cvt_d_w(f0, f0);
3577 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3578 __ Ret();
3579 } else {
danno@chromium.org40cb8782011-05-25 07:58:50 +00003580 Register dst1 = t2;
3581 Register dst2 = t3;
3582 FloatingPointHelper::Destination dest =
3583 FloatingPointHelper::kCoreRegisters;
3584 FloatingPointHelper::ConvertIntToDouble(masm,
3585 value,
3586 dest,
3587 f0,
3588 dst1,
3589 dst2,
3590 t1,
3591 f2);
3592 __ sw(dst1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3593 __ sw(dst2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3594 __ Ret();
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003595 }
3596 } else if (array_type == kExternalUnsignedIntArray) {
3597 // The test is different for unsigned int values. Since we need
3598 // the value to be in the range of a positive smi, we can't
3599 // handle either of the top two bits being set in the value.
3600 if (CpuFeatures::IsSupported(FPU)) {
3601 CpuFeatures::Scope scope(FPU);
3602 Label pl_box_int;
3603 __ And(t2, value, Operand(0xC0000000));
3604 __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3605
3606 // It can fit in an Smi.
3607 // Tag integer as smi and return it.
3608 __ sll(v0, value, kSmiTagSize);
3609 __ Ret();
3610
3611 __ bind(&pl_box_int);
3612 // Allocate a HeapNumber for the result and perform int-to-double
3613 // conversion. Don't use a0 and a1 as AllocateHeapNumber clobbers all
3614 // registers - also when jumping due to exhausted young space.
3615 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3616 __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3617
3618 // This is replaced by a macro:
3619 // __ mtc1(value, f0); // LS 32-bits.
3620 // __ mtc1(zero_reg, f1); // MS 32-bits are all zero.
3621 // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3622
3623 __ Cvt_d_uw(f0, value);
3624
3625 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3626
3627 __ Ret();
3628 } else {
3629 // Check whether unsigned integer fits into smi.
3630 Label box_int_0, box_int_1, done;
3631 __ And(t2, value, Operand(0x80000000));
3632 __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3633 __ And(t2, value, Operand(0x40000000));
3634 __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3635
3636 // Tag integer as smi and return it.
3637 __ sll(v0, value, kSmiTagSize);
3638 __ Ret();
3639
3640 Register hiword = value; // a2.
3641 Register loword = a3;
3642
3643 __ bind(&box_int_0);
3644 // Integer does not have leading zeros.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003645 GenerateUInt2Double(masm, hiword, loword, t0, 0);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003646 __ Branch(&done);
3647
3648 __ bind(&box_int_1);
3649 // Integer has one leading zero.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003650 GenerateUInt2Double(masm, hiword, loword, t0, 1);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003651
3652
3653 __ bind(&done);
3654 // Integer was converted to double in registers hiword:loword.
3655 // Wrap it into a HeapNumber. Don't use a0 and a1 as AllocateHeapNumber
3656 // clobbers all registers - also when jumping due to exhausted young
3657 // space.
3658 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3659 __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3660
3661 __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset));
3662 __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset));
3663
3664 __ mov(v0, t2);
3665 __ Ret();
3666 }
3667 } else if (array_type == kExternalFloatArray) {
3668 // For the floating-point array type, we need to always allocate a
3669 // HeapNumber.
3670 if (CpuFeatures::IsSupported(FPU)) {
3671 CpuFeatures::Scope scope(FPU);
3672 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3673 // AllocateHeapNumber clobbers all registers - also when jumping due to
3674 // exhausted young space.
3675 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3676 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3677 // The float (single) value is already in fpu reg f0 (if we use float).
3678 __ cvt_d_s(f0, f0);
3679 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3680 __ Ret();
3681 } else {
3682 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3683 // AllocateHeapNumber clobbers all registers - also when jumping due to
3684 // exhausted young space.
3685 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3686 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3687 // FPU is not available, do manual single to double conversion.
3688
3689 // a2: floating point value (binary32).
3690 // v0: heap number for result
3691
3692 // Extract mantissa to t4.
3693 __ And(t4, value, Operand(kBinary32MantissaMask));
3694
3695 // Extract exponent to t5.
3696 __ srl(t5, value, kBinary32MantissaBits);
3697 __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3698
3699 Label exponent_rebiased;
3700 __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3701
3702 __ li(t0, 0x7ff);
3703 __ Xor(t1, t5, Operand(0xFF));
3704 __ movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
3705 __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
3706
3707 // Rebias exponent.
3708 __ Addu(t5,
3709 t5,
3710 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3711
3712 __ bind(&exponent_rebiased);
3713 __ And(a2, value, Operand(kBinary32SignMask));
3714 value = no_reg;
3715 __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3716 __ or_(a2, a2, t0);
3717
3718 // Shift mantissa.
3719 static const int kMantissaShiftForHiWord =
3720 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3721
3722 static const int kMantissaShiftForLoWord =
3723 kBitsPerInt - kMantissaShiftForHiWord;
3724
3725 __ srl(t0, t4, kMantissaShiftForHiWord);
3726 __ or_(a2, a2, t0);
3727 __ sll(a0, t4, kMantissaShiftForLoWord);
3728
3729 __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3730 __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3731 __ Ret();
3732 }
3733
3734 } else if (array_type == kExternalDoubleArray) {
3735 if (CpuFeatures::IsSupported(FPU)) {
3736 CpuFeatures::Scope scope(FPU);
3737 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3738 // AllocateHeapNumber clobbers all registers - also when jumping due to
3739 // exhausted young space.
3740 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3741 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3742 // The double value is already in f0
3743 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3744 __ Ret();
3745 } else {
3746 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3747 // AllocateHeapNumber clobbers all registers - also when jumping due to
3748 // exhausted young space.
3749 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3750 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3751
3752 __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3753 __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3754 __ Ret();
3755 }
3756
3757 } else {
3758 // Tag integer as smi and return it.
3759 __ sll(v0, value, kSmiTagSize);
3760 __ Ret();
3761 }
3762
3763 // Slow case, key and receiver still in a0 and a1.
3764 __ bind(&slow);
3765 __ IncrementCounter(
danno@chromium.org40cb8782011-05-25 07:58:50 +00003766 masm->isolate()->counters()->keyed_load_external_array_slow(),
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003767 1, a2, a3);
3768
3769 // ---------- S t a t e --------------
3770 // -- ra : return address
3771 // -- a0 : key
3772 // -- a1 : receiver
3773 // -----------------------------------
3774
3775 __ Push(a1, a0);
3776
3777 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3778
danno@chromium.org40cb8782011-05-25 07:58:50 +00003779 __ bind(&miss_force_generic);
3780 Code* stub = masm->isolate()->builtins()->builtin(
3781 Builtins::kKeyedLoadIC_MissForceGeneric);
3782 __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003783}
3784
3785
danno@chromium.org40cb8782011-05-25 07:58:50 +00003786void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3787 MacroAssembler* masm,
3788 ExternalArrayType array_type) {
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003789 // ---------- S t a t e --------------
3790 // -- a0 : value
3791 // -- a1 : key
3792 // -- a2 : receiver
3793 // -- ra : return address
3794 // -----------------------------------
3795
danno@chromium.org40cb8782011-05-25 07:58:50 +00003796 Label slow, check_heap_number, miss_force_generic;
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003797
3798 // Register usage.
3799 Register value = a0;
3800 Register key = a1;
3801 Register receiver = a2;
3802 // a3 mostly holds the elements array or the destination external array.
3803
danno@chromium.org40cb8782011-05-25 07:58:50 +00003804 // This stub is meant to be tail-jumped to, the receiver must already
3805 // have been verified by the caller to not be a smi.
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003806
3807 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3808
3809 // Check that the key is a smi.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003810 __ JumpIfNotSmi(key, &miss_force_generic);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003811
3812 // Check that the index is in range.
3813 __ SmiUntag(t0, key);
3814 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3815 // Unsigned comparison catches both negative and too-large values.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003816 __ Branch(&miss_force_generic, Ugreater_equal, t0, Operand(t1));
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003817
3818 // Handle both smis and HeapNumbers in the fast path. Go to the
3819 // runtime for all other kinds of values.
3820 // a3: external array.
3821 // t0: key (integer).
3822
3823 if (array_type == kExternalPixelArray) {
3824 // Double to pixel conversion is only implemented in the runtime for now.
3825 __ JumpIfNotSmi(value, &slow);
3826 } else {
3827 __ JumpIfNotSmi(value, &check_heap_number);
3828 }
3829 __ SmiUntag(t1, value);
3830 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3831
3832 // a3: base pointer of external storage.
3833 // t0: key (integer).
3834 // t1: value (integer).
3835
3836 switch (array_type) {
3837 case kExternalPixelArray: {
3838 // Clamp the value to [0..255].
3839 // v0 is used as a scratch register here.
3840 Label done;
3841 __ li(v0, Operand(255));
3842 // Normal branch: nop in delay slot.
3843 __ Branch(&done, gt, t1, Operand(v0));
3844 // Use delay slot in this branch.
3845 __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
3846 __ mov(v0, zero_reg); // In delay slot.
3847 __ mov(v0, t1); // Value is in range 0..255.
3848 __ bind(&done);
3849 __ mov(t1, v0);
3850 __ addu(t8, a3, t0);
3851 __ sb(t1, MemOperand(t8, 0));
3852 }
3853 break;
3854 case kExternalByteArray:
3855 case kExternalUnsignedByteArray:
3856 __ addu(t8, a3, t0);
3857 __ sb(t1, MemOperand(t8, 0));
3858 break;
3859 case kExternalShortArray:
3860 case kExternalUnsignedShortArray:
3861 __ sll(t8, t0, 1);
3862 __ addu(t8, a3, t8);
3863 __ sh(t1, MemOperand(t8, 0));
3864 break;
3865 case kExternalIntArray:
3866 case kExternalUnsignedIntArray:
3867 __ sll(t8, t0, 2);
3868 __ addu(t8, a3, t8);
3869 __ sw(t1, MemOperand(t8, 0));
3870 break;
3871 case kExternalFloatArray:
3872 // Perform int-to-float conversion and store to memory.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003873 StoreIntAsFloat(masm, a3, t0, t1, t2, t3, t4);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003874 break;
3875 case kExternalDoubleArray:
3876 __ sll(t8, t0, 3);
3877 __ addu(a3, a3, t8);
3878 // a3: effective address of the double element
3879 FloatingPointHelper::Destination destination;
3880 if (CpuFeatures::IsSupported(FPU)) {
3881 destination = FloatingPointHelper::kFPURegisters;
3882 } else {
3883 destination = FloatingPointHelper::kCoreRegisters;
3884 }
3885 FloatingPointHelper::ConvertIntToDouble(
danno@chromium.org40cb8782011-05-25 07:58:50 +00003886 masm, t1, destination,
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00003887 f0, t2, t3, // These are: double_dst, dst1, dst2.
3888 t0, f2); // These are: scratch2, single_scratch.
3889 if (destination == FloatingPointHelper::kFPURegisters) {
3890 CpuFeatures::Scope scope(FPU);
3891 __ sdc1(f0, MemOperand(a3, 0));
3892 } else {
3893 __ sw(t2, MemOperand(a3, 0));
3894 __ sw(t3, MemOperand(a3, Register::kSizeInBytes));
3895 }
3896 break;
3897 default:
3898 UNREACHABLE();
3899 break;
3900 }
3901
3902 // Entry registers are intact, a0 holds the value which is the return value.
3903 __ mov(v0, value);
3904 __ Ret();
3905
3906 if (array_type != kExternalPixelArray) {
3907 // a3: external array.
3908 // t0: index (integer).
3909 __ bind(&check_heap_number);
3910 __ GetObjectType(value, t1, t2);
3911 __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
3912
3913 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3914
3915 // a3: base pointer of external storage.
3916 // t0: key (integer).
3917
3918 // The WebGL specification leaves the behavior of storing NaN and
3919 // +/-Infinity into integer arrays basically undefined. For more
3920 // reproducible behavior, convert these to zero.
3921
3922 if (CpuFeatures::IsSupported(FPU)) {
3923 CpuFeatures::Scope scope(FPU);
3924
3925 __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset));
3926
3927 if (array_type == kExternalFloatArray) {
3928 __ cvt_s_d(f0, f0);
3929 __ sll(t8, t0, 2);
3930 __ addu(t8, a3, t8);
3931 __ swc1(f0, MemOperand(t8, 0));
3932 } else if (array_type == kExternalDoubleArray) {
3933 __ sll(t8, t0, 3);
3934 __ addu(t8, a3, t8);
3935 __ sdc1(f0, MemOperand(t8, 0));
3936 } else {
3937 Label done;
3938
3939 // Need to perform float-to-int conversion.
3940 // Test whether exponent equal to 0x7FF (infinity or NaN).
3941
3942 __ mfc1(t3, f1); // Move exponent word of double to t3 (as raw bits).
3943 __ li(t1, Operand(0x7FF00000));
3944 __ And(t3, t3, Operand(t1));
3945 __ Branch(USE_DELAY_SLOT, &done, eq, t3, Operand(t1));
3946 __ mov(t3, zero_reg); // In delay slot.
3947
3948 // Not infinity or NaN simply convert to int.
3949 if (IsElementTypeSigned(array_type)) {
3950 __ trunc_w_d(f0, f0);
3951 __ mfc1(t3, f0);
3952 } else {
3953 __ Trunc_uw_d(f0, t3);
3954 }
3955
3956 // t3: HeapNumber converted to integer
3957 __ bind(&done);
3958 switch (array_type) {
3959 case kExternalByteArray:
3960 case kExternalUnsignedByteArray:
3961 __ addu(t8, a3, t0);
3962 __ sb(t3, MemOperand(t8, 0));
3963 break;
3964 case kExternalShortArray:
3965 case kExternalUnsignedShortArray:
3966 __ sll(t8, t0, 1);
3967 __ addu(t8, a3, t8);
3968 __ sh(t3, MemOperand(t8, 0));
3969 break;
3970 case kExternalIntArray:
3971 case kExternalUnsignedIntArray:
3972 __ sll(t8, t0, 2);
3973 __ addu(t8, a3, t8);
3974 __ sw(t3, MemOperand(t8, 0));
3975 break;
3976 default:
3977 UNREACHABLE();
3978 break;
3979 }
3980 }
3981
3982 // Entry registers are intact, a0 holds the value
3983 // which is the return value.
3984 __ mov(v0, value);
3985 __ Ret();
3986 } else {
3987 // FPU is not available, do manual conversions.
3988
3989 __ lw(t3, FieldMemOperand(value, HeapNumber::kExponentOffset));
3990 __ lw(t4, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3991
3992 if (array_type == kExternalFloatArray) {
3993 Label done, nan_or_infinity_or_zero;
3994 static const int kMantissaInHiWordShift =
3995 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3996
3997 static const int kMantissaInLoWordShift =
3998 kBitsPerInt - kMantissaInHiWordShift;
3999
4000 // Test for all special exponent values: zeros, subnormal numbers, NaNs
4001 // and infinities. All these should be converted to 0.
4002 __ li(t5, HeapNumber::kExponentMask);
4003 __ and_(t6, t3, t5);
4004 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg));
4005
4006 __ xor_(t1, t6, t5);
4007 __ li(t2, kBinary32ExponentMask);
4008 __ movz(t6, t2, t1); // Only if t6 is equal to t5.
4009 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
4010
4011 // Rebias exponent.
4012 __ srl(t6, t6, HeapNumber::kExponentShift);
4013 __ Addu(t6,
4014 t6,
4015 Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
4016
4017 __ li(t1, Operand(kBinary32MaxExponent));
4018 __ Slt(t1, t1, t6);
4019 __ And(t2, t3, Operand(HeapNumber::kSignMask));
4020 __ Or(t2, t2, Operand(kBinary32ExponentMask));
4021 __ movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent.
4022 __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
4023
4024 __ Slt(t1, t6, Operand(kBinary32MinExponent));
4025 __ And(t2, t3, Operand(HeapNumber::kSignMask));
4026 __ movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent.
4027 __ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
4028
4029 __ And(t7, t3, Operand(HeapNumber::kSignMask));
4030 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4031 __ sll(t3, t3, kMantissaInHiWordShift);
4032 __ or_(t7, t7, t3);
4033 __ srl(t4, t4, kMantissaInLoWordShift);
4034 __ or_(t7, t7, t4);
4035 __ sll(t6, t6, kBinary32ExponentShift);
4036 __ or_(t3, t7, t6);
4037
4038 __ bind(&done);
4039 __ sll(t9, a1, 2);
4040 __ addu(t9, a2, t9);
4041 __ sw(t3, MemOperand(t9, 0));
4042
4043 // Entry registers are intact, a0 holds the value which is the return
4044 // value.
4045 __ mov(v0, value);
4046 __ Ret();
4047
4048 __ bind(&nan_or_infinity_or_zero);
4049 __ And(t7, t3, Operand(HeapNumber::kSignMask));
4050 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4051 __ or_(t6, t6, t7);
4052 __ sll(t3, t3, kMantissaInHiWordShift);
4053 __ or_(t6, t6, t3);
4054 __ srl(t4, t4, kMantissaInLoWordShift);
4055 __ or_(t3, t6, t4);
4056 __ Branch(&done);
4057 } else if (array_type == kExternalDoubleArray) {
4058 __ sll(t8, t0, 3);
4059 __ addu(t8, a3, t8);
4060 // t8: effective address of destination element.
4061 __ sw(t4, MemOperand(t8, 0));
4062 __ sw(t3, MemOperand(t8, Register::kSizeInBytes));
4063 __ Ret();
4064 } else {
4065 bool is_signed_type = IsElementTypeSigned(array_type);
4066 int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4067 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4068
4069 Label done, sign;
4070
4071 // Test for all special exponent values: zeros, subnormal numbers, NaNs
4072 // and infinities. All these should be converted to 0.
4073 __ li(t5, HeapNumber::kExponentMask);
4074 __ and_(t6, t3, t5);
4075 __ movz(t3, zero_reg, t6); // Only if t6 is equal to zero.
4076 __ Branch(&done, eq, t6, Operand(zero_reg));
4077
4078 __ xor_(t2, t6, t5);
4079 __ movz(t3, zero_reg, t2); // Only if t6 is equal to t5.
4080 __ Branch(&done, eq, t6, Operand(t5));
4081
4082 // Unbias exponent.
4083 __ srl(t6, t6, HeapNumber::kExponentShift);
4084 __ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
4085 // If exponent is negative then result is 0.
4086 __ slt(t2, t6, zero_reg);
4087 __ movn(t3, zero_reg, t2); // Only if exponent is negative.
4088 __ Branch(&done, lt, t6, Operand(zero_reg));
4089
4090 // If exponent is too big then result is minimal value.
4091 __ slti(t1, t6, meaningfull_bits - 1);
4092 __ li(t2, min_value);
4093 __ movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1.
4094 __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
4095
4096 __ And(t5, t3, Operand(HeapNumber::kSignMask));
4097 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4098 __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4099
4100 __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4101 __ subu(t6, t9, t6);
4102 __ slt(t1, t6, zero_reg);
4103 __ srlv(t2, t3, t6);
4104 __ movz(t3, t2, t1); // Only if t6 is positive.
4105 __ Branch(&sign, ge, t6, Operand(zero_reg));
4106
4107 __ subu(t6, zero_reg, t6);
4108 __ sllv(t3, t3, t6);
4109 __ li(t9, meaningfull_bits);
4110 __ subu(t6, t9, t6);
4111 __ srlv(t4, t4, t6);
4112 __ or_(t3, t3, t4);
4113
4114 __ bind(&sign);
4115 __ subu(t2, t3, zero_reg);
4116 __ movz(t3, t2, t5); // Only if t5 is zero.
4117
4118 __ bind(&done);
4119
4120 // Result is in t3.
4121 // This switch block should be exactly the same as above (FPU mode).
4122 switch (array_type) {
4123 case kExternalByteArray:
4124 case kExternalUnsignedByteArray:
4125 __ addu(t8, a3, t0);
4126 __ sb(t3, MemOperand(t8, 0));
4127 break;
4128 case kExternalShortArray:
4129 case kExternalUnsignedShortArray:
4130 __ sll(t8, t0, 1);
4131 __ addu(t8, a3, t8);
4132 __ sh(t3, MemOperand(t8, 0));
4133 break;
4134 case kExternalIntArray:
4135 case kExternalUnsignedIntArray:
4136 __ sll(t8, t0, 2);
4137 __ addu(t8, a3, t8);
4138 __ sw(t3, MemOperand(t8, 0));
4139 break;
4140 default:
4141 UNREACHABLE();
4142 break;
4143 }
4144 }
4145 }
4146 }
4147
danno@chromium.org40cb8782011-05-25 07:58:50 +00004148 // Slow case, key and receiver still in a0 and a1.
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00004149 __ bind(&slow);
danno@chromium.org40cb8782011-05-25 07:58:50 +00004150 __ IncrementCounter(
4151 masm->isolate()->counters()->keyed_load_external_array_slow(),
4152 1, a2, a3);
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00004153 // Entry registers are intact.
4154 // ---------- S t a t e --------------
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00004155 // -- ra : return address
danno@chromium.org40cb8782011-05-25 07:58:50 +00004156 // -- a0 : key
4157 // -- a1 : receiver
4158 // -----------------------------------
4159 Handle<Code> slow_ic =
4160 masm->isolate()->builtins()->KeyedStoreIC_Slow();
4161 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4162
4163 // Miss case, call the runtime.
4164 __ bind(&miss_force_generic);
4165
4166 // ---------- S t a t e --------------
4167 // -- ra : return address
4168 // -- a0 : key
4169 // -- a1 : receiver
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00004170 // -----------------------------------
4171
danno@chromium.org40cb8782011-05-25 07:58:50 +00004172 Handle<Code> miss_ic =
4173 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4174 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4175}
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00004176
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00004177
danno@chromium.org40cb8782011-05-25 07:58:50 +00004178void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4179 // ----------- S t a t e -------------
4180 // -- ra : return address
4181 // -- a0 : key
4182 // -- a1 : receiver
4183 // -----------------------------------
4184 Label miss_force_generic;
kmillikin@chromium.orgc53e10d2011-05-18 09:12:58 +00004185
danno@chromium.org40cb8782011-05-25 07:58:50 +00004186 // This stub is meant to be tail-jumped to, the receiver must already
4187 // have been verified by the caller to not be a smi.
4188
4189 // Check that the key is a smi.
4190 __ JumpIfNotSmi(a0, &miss_force_generic);
4191
4192 // Get the elements array.
4193 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
4194 __ AssertFastElements(a2);
4195
4196 // Check that the key is within bounds.
4197 __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
4198 __ Branch(&miss_force_generic, hs, a0, Operand(a3));
4199
4200 // Load the result and make sure it's not the hole.
4201 __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4202 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4203 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
4204 __ Addu(t0, t0, a3);
4205 __ lw(t0, MemOperand(t0));
4206 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4207 __ Branch(&miss_force_generic, eq, t0, Operand(t1));
4208 __ mov(v0, t0);
4209 __ Ret();
4210
4211 __ bind(&miss_force_generic);
4212 Code* stub = masm->isolate()->builtins()->builtin(
4213 Builtins::kKeyedLoadIC_MissForceGeneric);
4214 __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
4215}
4216
4217
4218void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4219 bool is_js_array) {
4220 // ----------- S t a t e -------------
4221 // -- a0 : value
4222 // -- a1 : key
4223 // -- a2 : receiver
4224 // -- ra : return address
4225 // -- a3 : scratch
4226 // -- a4 : scratch (elements)
4227 // -----------------------------------
4228 Label miss_force_generic;
4229
4230 Register value_reg = a0;
4231 Register key_reg = a1;
4232 Register receiver_reg = a2;
4233 Register scratch = a3;
4234 Register elements_reg = t0;
4235 Register scratch2 = t1;
4236 Register scratch3 = t2;
4237
4238 // This stub is meant to be tail-jumped to, the receiver must already
4239 // have been verified by the caller to not be a smi.
4240
4241 // Check that the key is a smi.
4242 __ JumpIfNotSmi(a0, &miss_force_generic);
4243
4244 // Get the elements array and make sure it is a fast element array, not 'cow'.
4245 __ lw(elements_reg,
4246 FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4247 __ CheckMap(elements_reg,
4248 scratch,
4249 Heap::kFixedArrayMapRootIndex,
4250 &miss_force_generic,
4251 DONT_DO_SMI_CHECK);
4252
4253 // Check that the key is within bounds.
4254 if (is_js_array) {
4255 __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4256 } else {
4257 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4258 }
4259 // Compare smis.
4260 __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4261
4262 __ Addu(scratch,
4263 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4264 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4265 __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4266 __ Addu(scratch3, scratch2, scratch);
4267 __ sw(value_reg, MemOperand(scratch3));
4268 __ RecordWrite(scratch, Operand(scratch2), receiver_reg , elements_reg);
4269
4270 // value_reg (a0) is preserved.
4271 // Done.
4272 __ Ret();
4273
4274 __ bind(&miss_force_generic);
4275 Handle<Code> ic =
4276 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4277 __ Jump(ic, RelocInfo::CODE_TARGET);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00004278}
4279
4280
ager@chromium.org5c838252010-02-19 08:53:10 +00004281#undef __
4282
4283} } // namespace v8::internal
4284
erik.corry@gmail.com9dfbea42010-05-21 12:58:28 +00004285#endif // V8_TARGET_ARCH_MIPS