blob: 43354db105726b553638d151ed9fae08ba12b288 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_STUB_CACHE_H_
29#define V8_STUB_CACHE_H_
30
31#include "macro-assembler.h"
32
33namespace v8 {
34namespace internal {
35
36
37// The stub cache is used for megamorphic calls and property accesses.
38// It maps (map, name, type)->Code*
39
40// The design of the table uses the inline cache stubs used for
41// mono-morphic calls. The beauty of this, we do not have to
42// invalidate the cache whenever a prototype map is changed. The stub
43// validates the map chain as in the mono-morphic case.
44
45class SCTableReference;
46
47class StubCache : public AllStatic {
48 public:
49 struct Entry {
50 String* key;
51 Code* value;
52 };
53
54
55 static void Initialize(bool create_heap_objects);
56
57 // Computes the right stub matching. Inserts the result in the
58 // cache before returning. This might compile a stub if needed.
59 static Object* ComputeLoadField(String* name,
60 JSObject* receiver,
61 JSObject* holder,
62 int field_index);
63
64 static Object* ComputeLoadCallback(String* name,
65 JSObject* receiver,
66 JSObject* holder,
67 AccessorInfo* callback);
68
69 static Object* ComputeLoadConstant(String* name,
70 JSObject* receiver,
71 JSObject* holder,
72 Object* value);
73
74 static Object* ComputeLoadInterceptor(String* name,
75 JSObject* receiver,
76 JSObject* holder);
77
78 static Object* ComputeLoadNormal(String* name, JSObject* receiver);
79
80
81 static Object* ComputeLoadGlobal(String* name,
82 JSObject* receiver,
83 GlobalObject* holder,
84 JSGlobalPropertyCell* cell,
85 bool is_dont_delete);
86
87
88 // ---
89
90 static Object* ComputeKeyedLoadField(String* name,
91 JSObject* receiver,
92 JSObject* holder,
93 int field_index);
94
95 static Object* ComputeKeyedLoadCallback(String* name,
96 JSObject* receiver,
97 JSObject* holder,
98 AccessorInfo* callback);
99
100 static Object* ComputeKeyedLoadConstant(String* name, JSObject* receiver,
101 JSObject* holder, Object* value);
102
103 static Object* ComputeKeyedLoadInterceptor(String* name,
104 JSObject* receiver,
105 JSObject* holder);
106
107 static Object* ComputeKeyedLoadArrayLength(String* name, JSArray* receiver);
108
109 static Object* ComputeKeyedLoadStringLength(String* name,
110 String* receiver);
111
112 static Object* ComputeKeyedLoadFunctionPrototype(String* name,
113 JSFunction* receiver);
114
115 // ---
116
117 static Object* ComputeStoreField(String* name,
118 JSObject* receiver,
119 int field_index,
120 Map* transition = NULL);
121
122 static Object* ComputeStoreGlobal(String* name,
123 GlobalObject* receiver,
124 JSGlobalPropertyCell* cell);
125
126 static Object* ComputeStoreCallback(String* name,
127 JSObject* receiver,
128 AccessorInfo* callback);
129
130 static Object* ComputeStoreInterceptor(String* name, JSObject* receiver);
131
132 // ---
133
134 static Object* ComputeKeyedStoreField(String* name,
135 JSObject* receiver,
136 int field_index,
137 Map* transition = NULL);
138
139 // ---
140
141 static Object* ComputeCallField(int argc,
142 InLoopFlag in_loop,
143 String* name,
144 Object* object,
145 JSObject* holder,
146 int index);
147
148 static Object* ComputeCallConstant(int argc,
149 InLoopFlag in_loop,
150 String* name,
151 Object* object,
152 JSObject* holder,
153 JSFunction* function);
154
155 static Object* ComputeCallNormal(int argc,
156 InLoopFlag in_loop,
157 String* name,
158 JSObject* receiver);
159
160 static Object* ComputeCallInterceptor(int argc,
161 String* name,
162 Object* object,
163 JSObject* holder);
164
165 static Object* ComputeCallGlobal(int argc,
166 InLoopFlag in_loop,
167 String* name,
168 JSObject* receiver,
169 GlobalObject* holder,
170 JSGlobalPropertyCell* cell,
171 JSFunction* function);
172
173 // ---
174
175 static Object* ComputeCallInitialize(int argc, InLoopFlag in_loop);
176 static Object* ComputeCallPreMonomorphic(int argc, InLoopFlag in_loop);
177 static Object* ComputeCallNormal(int argc, InLoopFlag in_loop);
178 static Object* ComputeCallMegamorphic(int argc, InLoopFlag in_loop);
179 static Object* ComputeCallMiss(int argc);
180
181 // Finds the Code object stored in the Heap::non_monomorphic_cache().
182 static Code* FindCallInitialize(int argc, InLoopFlag in_loop);
183
184#ifdef ENABLE_DEBUGGER_SUPPORT
185 static Object* ComputeCallDebugBreak(int argc);
186 static Object* ComputeCallDebugPrepareStepIn(int argc);
187#endif
188
189 static Object* ComputeLazyCompile(int argc);
190
191
192 // Update cache for entry hash(name, map).
193 static Code* Set(String* name, Map* map, Code* code);
194
195 // Clear the lookup table (@ mark compact collection).
196 static void Clear();
197
198 // Functions for generating stubs at startup.
199 static void GenerateMiss(MacroAssembler* masm);
200
201 // Generate code for probing the stub cache table.
202 // If extra != no_reg it might be used as am extra scratch register.
203 static void GenerateProbe(MacroAssembler* masm,
204 Code::Flags flags,
205 Register receiver,
206 Register name,
207 Register scratch,
208 Register extra);
209
210 enum Table {
211 kPrimary,
212 kSecondary
213 };
214
215 private:
216 friend class SCTableReference;
217 static const int kPrimaryTableSize = 2048;
218 static const int kSecondaryTableSize = 512;
219 static Entry primary_[];
220 static Entry secondary_[];
221
222 // Computes the hashed offsets for primary and secondary caches.
223 static int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
224 // This works well because the heap object tag size and the hash
225 // shift are equal. Shifting down the length field to get the
226 // hash code would effectively throw away two bits of the hash
227 // code.
228 ASSERT(kHeapObjectTagSize == String::kHashShift);
Steve Blockd0582a62009-12-15 09:54:21 +0000229 // Compute the hash of the name (use entire hash field).
Steve Blocka7e24c12009-10-30 11:49:00 +0000230 ASSERT(name->HasHashCode());
Steve Blockd0582a62009-12-15 09:54:21 +0000231 uint32_t field = name->hash_field();
Steve Blocka7e24c12009-10-30 11:49:00 +0000232 // Using only the low bits in 64-bit mode is unlikely to increase the
233 // risk of collision even if the heap is spread over an area larger than
234 // 4Gb (and not at all if it isn't).
235 uint32_t map_low32bits =
236 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
237 // We always set the in_loop bit to zero when generating the lookup code
238 // so do it here too so the hash codes match.
239 uint32_t iflags =
240 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
241 // Base the offset on a simple combination of name, flags, and map.
242 uint32_t key = (map_low32bits + field) ^ iflags;
243 return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
244 }
245
246 static int SecondaryOffset(String* name, Code::Flags flags, int seed) {
247 // Use the seed from the primary cache in the secondary cache.
248 uint32_t string_low32bits =
249 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
250 // We always set the in_loop bit to zero when generating the lookup code
251 // so do it here too so the hash codes match.
252 uint32_t iflags =
253 (static_cast<uint32_t>(flags) & ~Code::kFlagsICInLoopMask);
254 uint32_t key = seed - string_low32bits + iflags;
255 return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize);
256 }
257
258 // Compute the entry for a given offset in exactly the same way as
259 // we do in generated code. We generate an hash code that already
260 // ends in String::kHashShift 0s. Then we shift it so it is a multiple
261 // of sizeof(Entry). This makes it easier to avoid making mistakes
262 // in the hashed offset computations.
263 static Entry* entry(Entry* table, int offset) {
264 const int shift_amount = kPointerSizeLog2 + 1 - String::kHashShift;
265 return reinterpret_cast<Entry*>(
266 reinterpret_cast<Address>(table) + (offset << shift_amount));
267 }
268};
269
270
271class SCTableReference {
272 public:
273 static SCTableReference keyReference(StubCache::Table table) {
274 return SCTableReference(
275 reinterpret_cast<Address>(&first_entry(table)->key));
276 }
277
278
279 static SCTableReference valueReference(StubCache::Table table) {
280 return SCTableReference(
281 reinterpret_cast<Address>(&first_entry(table)->value));
282 }
283
284 Address address() const { return address_; }
285
286 private:
287 explicit SCTableReference(Address address) : address_(address) {}
288
289 static StubCache::Entry* first_entry(StubCache::Table table) {
290 switch (table) {
291 case StubCache::kPrimary: return StubCache::primary_;
292 case StubCache::kSecondary: return StubCache::secondary_;
293 }
294 UNREACHABLE();
295 return NULL;
296 }
297
298 Address address_;
299};
300
301// ------------------------------------------------------------------------
302
303
304// Support functions for IC stubs for callbacks.
305Object* LoadCallbackProperty(Arguments args);
306Object* StoreCallbackProperty(Arguments args);
307
308
309// Support functions for IC stubs for interceptors.
310Object* LoadPropertyWithInterceptorOnly(Arguments args);
311Object* LoadPropertyWithInterceptorForLoad(Arguments args);
312Object* LoadPropertyWithInterceptorForCall(Arguments args);
313Object* StoreInterceptorProperty(Arguments args);
314Object* CallInterceptorProperty(Arguments args);
Andrei Popescu402d9372010-02-26 13:31:12 +0000315Object* KeyedLoadPropertyWithInterceptor(Arguments args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000316
317
318// Support function for computing call IC miss stubs.
319Handle<Code> ComputeCallMiss(int argc);
320
321
322// The stub compiler compiles stubs for the stub cache.
323class StubCompiler BASE_EMBEDDED {
324 public:
325 enum CheckType {
326 RECEIVER_MAP_CHECK,
327 STRING_CHECK,
328 NUMBER_CHECK,
329 BOOLEAN_CHECK,
330 JSARRAY_HAS_FAST_ELEMENTS_CHECK
331 };
332
333 StubCompiler() : scope_(), masm_(NULL, 256), failure_(NULL) { }
334
335 Object* CompileCallInitialize(Code::Flags flags);
336 Object* CompileCallPreMonomorphic(Code::Flags flags);
337 Object* CompileCallNormal(Code::Flags flags);
338 Object* CompileCallMegamorphic(Code::Flags flags);
339 Object* CompileCallMiss(Code::Flags flags);
340#ifdef ENABLE_DEBUGGER_SUPPORT
341 Object* CompileCallDebugBreak(Code::Flags flags);
342 Object* CompileCallDebugPrepareStepIn(Code::Flags flags);
343#endif
344 Object* CompileLazyCompile(Code::Flags flags);
345
346 // Static functions for generating parts of stubs.
347 static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
348 int index,
349 Register prototype);
Andrei Popescu402d9372010-02-26 13:31:12 +0000350
Steve Blocka7e24c12009-10-30 11:49:00 +0000351 static void GenerateFastPropertyLoad(MacroAssembler* masm,
352 Register dst, Register src,
353 JSObject* holder, int index);
354
355 static void GenerateLoadArrayLength(MacroAssembler* masm,
356 Register receiver,
357 Register scratch,
358 Label* miss_label);
Andrei Popescu402d9372010-02-26 13:31:12 +0000359
Steve Blocka7e24c12009-10-30 11:49:00 +0000360 static void GenerateLoadStringLength(MacroAssembler* masm,
361 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +0000362 Register scratch1,
363 Register scratch2,
Steve Blocka7e24c12009-10-30 11:49:00 +0000364 Label* miss_label);
Andrei Popescu402d9372010-02-26 13:31:12 +0000365
Steve Blocka7e24c12009-10-30 11:49:00 +0000366 static void GenerateLoadFunctionPrototype(MacroAssembler* masm,
367 Register receiver,
368 Register scratch1,
369 Register scratch2,
370 Label* miss_label);
Andrei Popescu402d9372010-02-26 13:31:12 +0000371
Steve Blocka7e24c12009-10-30 11:49:00 +0000372 static void GenerateStoreField(MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +0000373 JSObject* object,
374 int index,
375 Map* transition,
376 Register receiver_reg,
377 Register name_reg,
378 Register scratch,
379 Label* miss_label);
Andrei Popescu402d9372010-02-26 13:31:12 +0000380
Steve Blocka7e24c12009-10-30 11:49:00 +0000381 static void GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind);
382
383 // Check the integrity of the prototype chain to make sure that the
384 // current IC is still valid.
Andrei Popescu402d9372010-02-26 13:31:12 +0000385
Steve Blocka7e24c12009-10-30 11:49:00 +0000386 Register CheckPrototypes(JSObject* object,
387 Register object_reg,
388 JSObject* holder,
389 Register holder_reg,
390 Register scratch,
391 String* name,
Andrei Popescu402d9372010-02-26 13:31:12 +0000392 Label* miss) {
393 return CheckPrototypes(object, object_reg, holder, holder_reg, scratch,
394 name, kInvalidProtoDepth, miss);
395 }
396
397 Register CheckPrototypes(JSObject* object,
398 Register object_reg,
399 JSObject* holder,
400 Register holder_reg,
401 Register scratch,
402 String* name,
403 int save_at_depth,
Steve Blocka7e24c12009-10-30 11:49:00 +0000404 Label* miss);
405
406 protected:
407 Object* GetCodeWithFlags(Code::Flags flags, const char* name);
408 Object* GetCodeWithFlags(Code::Flags flags, String* name);
409
410 MacroAssembler* masm() { return &masm_; }
411 void set_failure(Failure* failure) { failure_ = failure; }
412
413 void GenerateLoadField(JSObject* object,
414 JSObject* holder,
415 Register receiver,
416 Register scratch1,
417 Register scratch2,
418 int index,
419 String* name,
420 Label* miss);
421
Leon Clarkee46be812010-01-19 14:06:41 +0000422 bool GenerateLoadCallback(JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +0000423 JSObject* holder,
424 Register receiver,
425 Register name_reg,
426 Register scratch1,
427 Register scratch2,
428 AccessorInfo* callback,
429 String* name,
Leon Clarkee46be812010-01-19 14:06:41 +0000430 Label* miss,
431 Failure** failure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000432
433 void GenerateLoadConstant(JSObject* object,
434 JSObject* holder,
435 Register receiver,
436 Register scratch1,
437 Register scratch2,
438 Object* value,
439 String* name,
440 Label* miss);
441
442 void GenerateLoadInterceptor(JSObject* object,
443 JSObject* holder,
444 LookupResult* lookup,
445 Register receiver,
446 Register name_reg,
447 Register scratch1,
448 Register scratch2,
449 String* name,
450 Label* miss);
451
Leon Clarke4515c472010-02-03 11:58:03 +0000452 static void LookupPostInterceptor(JSObject* holder,
453 String* name,
454 LookupResult* lookup);
455
Steve Blocka7e24c12009-10-30 11:49:00 +0000456 private:
457 HandleScope scope_;
458 MacroAssembler masm_;
459 Failure* failure_;
460};
461
462
463class LoadStubCompiler: public StubCompiler {
464 public:
465 Object* CompileLoadField(JSObject* object,
466 JSObject* holder,
467 int index,
468 String* name);
Leon Clarkee46be812010-01-19 14:06:41 +0000469 Object* CompileLoadCallback(String* name,
470 JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 JSObject* holder,
Leon Clarkee46be812010-01-19 14:06:41 +0000472 AccessorInfo* callback);
Steve Blocka7e24c12009-10-30 11:49:00 +0000473 Object* CompileLoadConstant(JSObject* object,
474 JSObject* holder,
475 Object* value,
476 String* name);
477 Object* CompileLoadInterceptor(JSObject* object,
478 JSObject* holder,
479 String* name);
480
481 Object* CompileLoadGlobal(JSObject* object,
482 GlobalObject* holder,
483 JSGlobalPropertyCell* cell,
484 String* name,
485 bool is_dont_delete);
486
487 private:
488 Object* GetCode(PropertyType type, String* name);
489};
490
491
492class KeyedLoadStubCompiler: public StubCompiler {
493 public:
494 Object* CompileLoadField(String* name,
495 JSObject* object,
496 JSObject* holder,
497 int index);
498 Object* CompileLoadCallback(String* name,
499 JSObject* object,
500 JSObject* holder,
501 AccessorInfo* callback);
502 Object* CompileLoadConstant(String* name,
503 JSObject* object,
504 JSObject* holder,
505 Object* value);
506 Object* CompileLoadInterceptor(JSObject* object,
507 JSObject* holder,
508 String* name);
509 Object* CompileLoadArrayLength(String* name);
510 Object* CompileLoadStringLength(String* name);
511 Object* CompileLoadFunctionPrototype(String* name);
512
513 private:
514 Object* GetCode(PropertyType type, String* name);
515};
516
517
518class StoreStubCompiler: public StubCompiler {
519 public:
520 Object* CompileStoreField(JSObject* object,
521 int index,
522 Map* transition,
523 String* name);
524 Object* CompileStoreCallback(JSObject* object,
525 AccessorInfo* callbacks,
526 String* name);
527 Object* CompileStoreInterceptor(JSObject* object, String* name);
528 Object* CompileStoreGlobal(GlobalObject* object,
529 JSGlobalPropertyCell* holder,
530 String* name);
531
532
533 private:
534 Object* GetCode(PropertyType type, String* name);
535};
536
537
538class KeyedStoreStubCompiler: public StubCompiler {
539 public:
540 Object* CompileStoreField(JSObject* object,
541 int index,
542 Map* transition,
543 String* name);
544
545 private:
546 Object* GetCode(PropertyType type, String* name);
547};
548
549
550class CallStubCompiler: public StubCompiler {
551 public:
552 explicit CallStubCompiler(int argc, InLoopFlag in_loop)
553 : arguments_(argc), in_loop_(in_loop) { }
554
Andrei Popescu402d9372010-02-26 13:31:12 +0000555 Object* CompileCallField(JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +0000556 JSObject* holder,
557 int index,
558 String* name);
559 Object* CompileCallConstant(Object* object,
560 JSObject* holder,
561 JSFunction* function,
562 String* name,
563 CheckType check);
Andrei Popescu402d9372010-02-26 13:31:12 +0000564 Object* CompileCallInterceptor(JSObject* object,
Steve Blocka7e24c12009-10-30 11:49:00 +0000565 JSObject* holder,
566 String* name);
567 Object* CompileCallGlobal(JSObject* object,
568 GlobalObject* holder,
569 JSGlobalPropertyCell* cell,
570 JSFunction* function,
571 String* name);
572
573 private:
574 const ParameterCount arguments_;
575 const InLoopFlag in_loop_;
576
577 const ParameterCount& arguments() { return arguments_; }
578
579 Object* GetCode(PropertyType type, String* name);
580};
581
582
583class ConstructStubCompiler: public StubCompiler {
584 public:
585 explicit ConstructStubCompiler() {}
586
587 Object* CompileConstructStub(SharedFunctionInfo* shared);
588
589 private:
590 Object* GetCode();
591};
592
593
594} } // namespace v8::internal
595
596#endif // V8_STUB_CACHE_H_