blob: 3c9dc82351f1ba57a72e9e6c44fffbf5bde92b3d [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27//
28// Review notes:
29//
30// - The use of macros in these inline functions may seem superfluous
31// but it is absolutely needed to make sure gcc generates optimal
32// code. gcc is not happy when attempting to inline too deep.
33//
34
35#ifndef V8_OBJECTS_INL_H_
36#define V8_OBJECTS_INL_H_
37
Kristian Monsen80d68ea2010-09-08 11:05:35 +010038#include "objects.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039#include "contexts.h"
40#include "conversions-inl.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010041#include "heap.h"
42#include "memory.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043#include "property.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010044#include "spaces.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000045
46namespace v8 {
47namespace internal {
48
49PropertyDetails::PropertyDetails(Smi* smi) {
50 value_ = smi->value();
51}
52
53
54Smi* PropertyDetails::AsSmi() {
55 return Smi::FromInt(value_);
56}
57
58
59PropertyDetails PropertyDetails::AsDeleted() {
60 PropertyDetails d(DONT_ENUM, NORMAL);
61 Smi* smi = Smi::FromInt(AsSmi()->value() | DeletedField::encode(1));
62 return PropertyDetails(smi);
63}
64
65
66#define CAST_ACCESSOR(type) \
67 type* type::cast(Object* object) { \
68 ASSERT(object->Is##type()); \
69 return reinterpret_cast<type*>(object); \
70 }
71
72
73#define INT_ACCESSORS(holder, name, offset) \
74 int holder::name() { return READ_INT_FIELD(this, offset); } \
75 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
76
77
78#define ACCESSORS(holder, name, type, offset) \
79 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
80 void holder::set_##name(type* value, WriteBarrierMode mode) { \
81 WRITE_FIELD(this, offset, value); \
82 CONDITIONAL_WRITE_BARRIER(this, offset, mode); \
83 }
84
85
Steve Blocka7e24c12009-10-30 11:49:00 +000086#define SMI_ACCESSORS(holder, name, offset) \
87 int holder::name() { \
88 Object* value = READ_FIELD(this, offset); \
89 return Smi::cast(value)->value(); \
90 } \
91 void holder::set_##name(int value) { \
92 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
93 }
94
95
96#define BOOL_GETTER(holder, field, name, offset) \
97 bool holder::name() { \
98 return BooleanBit::get(field(), offset); \
99 } \
100
101
102#define BOOL_ACCESSORS(holder, field, name, offset) \
103 bool holder::name() { \
104 return BooleanBit::get(field(), offset); \
105 } \
106 void holder::set_##name(bool value) { \
107 set_##field(BooleanBit::set(field(), offset, value)); \
108 }
109
110
111bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
112 // There is a constraint on the object; check.
113 if (!this->IsJSObject()) return false;
114 // Fetch the constructor function of the object.
115 Object* cons_obj = JSObject::cast(this)->map()->constructor();
116 if (!cons_obj->IsJSFunction()) return false;
117 JSFunction* fun = JSFunction::cast(cons_obj);
118 // Iterate through the chain of inheriting function templates to
119 // see if the required one occurs.
120 for (Object* type = fun->shared()->function_data();
121 type->IsFunctionTemplateInfo();
122 type = FunctionTemplateInfo::cast(type)->parent_template()) {
123 if (type == expected) return true;
124 }
125 // Didn't find the required type in the inheritance chain.
126 return false;
127}
128
129
130bool Object::IsSmi() {
131 return HAS_SMI_TAG(this);
132}
133
134
135bool Object::IsHeapObject() {
136 return Internals::HasHeapObjectTag(this);
137}
138
139
140bool Object::IsHeapNumber() {
141 return Object::IsHeapObject()
142 && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
143}
144
145
146bool Object::IsString() {
147 return Object::IsHeapObject()
148 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
149}
150
151
152bool Object::IsSymbol() {
153 if (!this->IsHeapObject()) return false;
154 uint32_t type = HeapObject::cast(this)->map()->instance_type();
Leon Clarkee46be812010-01-19 14:06:41 +0000155 // Because the symbol tag is non-zero and no non-string types have the
156 // symbol bit set we can test for symbols with a very simple test
157 // operation.
158 ASSERT(kSymbolTag != 0);
159 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
160 return (type & kIsSymbolMask) != 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000161}
162
163
164bool Object::IsConsString() {
165 if (!this->IsHeapObject()) return false;
166 uint32_t type = HeapObject::cast(this)->map()->instance_type();
167 return (type & (kIsNotStringMask | kStringRepresentationMask)) ==
168 (kStringTag | kConsStringTag);
169}
170
171
Steve Blocka7e24c12009-10-30 11:49:00 +0000172bool Object::IsSeqString() {
173 if (!IsString()) return false;
174 return StringShape(String::cast(this)).IsSequential();
175}
176
177
178bool Object::IsSeqAsciiString() {
179 if (!IsString()) return false;
180 return StringShape(String::cast(this)).IsSequential() &&
181 String::cast(this)->IsAsciiRepresentation();
182}
183
184
185bool Object::IsSeqTwoByteString() {
186 if (!IsString()) return false;
187 return StringShape(String::cast(this)).IsSequential() &&
188 String::cast(this)->IsTwoByteRepresentation();
189}
190
191
192bool Object::IsExternalString() {
193 if (!IsString()) return false;
194 return StringShape(String::cast(this)).IsExternal();
195}
196
197
198bool Object::IsExternalAsciiString() {
199 if (!IsString()) return false;
200 return StringShape(String::cast(this)).IsExternal() &&
201 String::cast(this)->IsAsciiRepresentation();
202}
203
204
205bool Object::IsExternalTwoByteString() {
206 if (!IsString()) return false;
207 return StringShape(String::cast(this)).IsExternal() &&
208 String::cast(this)->IsTwoByteRepresentation();
209}
210
211
Steve Blocka7e24c12009-10-30 11:49:00 +0000212StringShape::StringShape(String* str)
213 : type_(str->map()->instance_type()) {
214 set_valid();
215 ASSERT((type_ & kIsNotStringMask) == kStringTag);
216}
217
218
219StringShape::StringShape(Map* map)
220 : type_(map->instance_type()) {
221 set_valid();
222 ASSERT((type_ & kIsNotStringMask) == kStringTag);
223}
224
225
226StringShape::StringShape(InstanceType t)
227 : type_(static_cast<uint32_t>(t)) {
228 set_valid();
229 ASSERT((type_ & kIsNotStringMask) == kStringTag);
230}
231
232
233bool StringShape::IsSymbol() {
234 ASSERT(valid());
Leon Clarkee46be812010-01-19 14:06:41 +0000235 ASSERT(kSymbolTag != 0);
236 return (type_ & kIsSymbolMask) != 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000237}
238
239
240bool String::IsAsciiRepresentation() {
241 uint32_t type = map()->instance_type();
Steve Blocka7e24c12009-10-30 11:49:00 +0000242 return (type & kStringEncodingMask) == kAsciiStringTag;
243}
244
245
246bool String::IsTwoByteRepresentation() {
247 uint32_t type = map()->instance_type();
Steve Blocka7e24c12009-10-30 11:49:00 +0000248 return (type & kStringEncodingMask) == kTwoByteStringTag;
249}
250
251
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100252bool String::HasOnlyAsciiChars() {
253 uint32_t type = map()->instance_type();
254 return (type & kStringEncodingMask) == kAsciiStringTag ||
255 (type & kAsciiDataHintMask) == kAsciiDataHintTag;
Steve Block6ded16b2010-05-10 14:33:55 +0100256}
257
258
Steve Blocka7e24c12009-10-30 11:49:00 +0000259bool StringShape::IsCons() {
260 return (type_ & kStringRepresentationMask) == kConsStringTag;
261}
262
263
Steve Blocka7e24c12009-10-30 11:49:00 +0000264bool StringShape::IsExternal() {
265 return (type_ & kStringRepresentationMask) == kExternalStringTag;
266}
267
268
269bool StringShape::IsSequential() {
270 return (type_ & kStringRepresentationMask) == kSeqStringTag;
271}
272
273
274StringRepresentationTag StringShape::representation_tag() {
275 uint32_t tag = (type_ & kStringRepresentationMask);
276 return static_cast<StringRepresentationTag>(tag);
277}
278
279
280uint32_t StringShape::full_representation_tag() {
281 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
282}
283
284
285STATIC_CHECK((kStringRepresentationMask | kStringEncodingMask) ==
286 Internals::kFullStringRepresentationMask);
287
288
Steve Blocka7e24c12009-10-30 11:49:00 +0000289bool StringShape::IsSequentialAscii() {
290 return full_representation_tag() == (kSeqStringTag | kAsciiStringTag);
291}
292
293
294bool StringShape::IsSequentialTwoByte() {
295 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
296}
297
298
299bool StringShape::IsExternalAscii() {
300 return full_representation_tag() == (kExternalStringTag | kAsciiStringTag);
301}
302
303
304bool StringShape::IsExternalTwoByte() {
305 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
306}
307
308
309STATIC_CHECK((kExternalStringTag | kTwoByteStringTag) ==
310 Internals::kExternalTwoByteRepresentationTag);
311
312
313uc32 FlatStringReader::Get(int index) {
314 ASSERT(0 <= index && index <= length_);
315 if (is_ascii_) {
316 return static_cast<const byte*>(start_)[index];
317 } else {
318 return static_cast<const uc16*>(start_)[index];
319 }
320}
321
322
323bool Object::IsNumber() {
324 return IsSmi() || IsHeapNumber();
325}
326
327
328bool Object::IsByteArray() {
329 return Object::IsHeapObject()
330 && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
331}
332
333
334bool Object::IsPixelArray() {
335 return Object::IsHeapObject() &&
336 HeapObject::cast(this)->map()->instance_type() == PIXEL_ARRAY_TYPE;
337}
338
339
Steve Block3ce2e202009-11-05 08:53:23 +0000340bool Object::IsExternalArray() {
341 if (!Object::IsHeapObject())
342 return false;
343 InstanceType instance_type =
344 HeapObject::cast(this)->map()->instance_type();
Leon Clarkee46be812010-01-19 14:06:41 +0000345 return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
346 instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
Steve Block3ce2e202009-11-05 08:53:23 +0000347}
348
349
350bool Object::IsExternalByteArray() {
351 return Object::IsHeapObject() &&
352 HeapObject::cast(this)->map()->instance_type() ==
353 EXTERNAL_BYTE_ARRAY_TYPE;
354}
355
356
357bool Object::IsExternalUnsignedByteArray() {
358 return Object::IsHeapObject() &&
359 HeapObject::cast(this)->map()->instance_type() ==
360 EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE;
361}
362
363
364bool Object::IsExternalShortArray() {
365 return Object::IsHeapObject() &&
366 HeapObject::cast(this)->map()->instance_type() ==
367 EXTERNAL_SHORT_ARRAY_TYPE;
368}
369
370
371bool Object::IsExternalUnsignedShortArray() {
372 return Object::IsHeapObject() &&
373 HeapObject::cast(this)->map()->instance_type() ==
374 EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE;
375}
376
377
378bool Object::IsExternalIntArray() {
379 return Object::IsHeapObject() &&
380 HeapObject::cast(this)->map()->instance_type() ==
381 EXTERNAL_INT_ARRAY_TYPE;
382}
383
384
385bool Object::IsExternalUnsignedIntArray() {
386 return Object::IsHeapObject() &&
387 HeapObject::cast(this)->map()->instance_type() ==
388 EXTERNAL_UNSIGNED_INT_ARRAY_TYPE;
389}
390
391
392bool Object::IsExternalFloatArray() {
393 return Object::IsHeapObject() &&
394 HeapObject::cast(this)->map()->instance_type() ==
395 EXTERNAL_FLOAT_ARRAY_TYPE;
396}
397
398
John Reck59135872010-11-02 12:39:01 -0700399bool MaybeObject::IsFailure() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000400 return HAS_FAILURE_TAG(this);
401}
402
403
John Reck59135872010-11-02 12:39:01 -0700404bool MaybeObject::IsRetryAfterGC() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000405 return HAS_FAILURE_TAG(this)
406 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC;
407}
408
409
John Reck59135872010-11-02 12:39:01 -0700410bool MaybeObject::IsOutOfMemory() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000411 return HAS_FAILURE_TAG(this)
412 && Failure::cast(this)->IsOutOfMemoryException();
413}
414
415
John Reck59135872010-11-02 12:39:01 -0700416bool MaybeObject::IsException() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000417 return this == Failure::Exception();
418}
419
420
John Reck59135872010-11-02 12:39:01 -0700421bool MaybeObject::IsTheHole() {
422 return this == Heap::the_hole_value();
423}
424
425
426Failure* Failure::cast(MaybeObject* obj) {
427 ASSERT(HAS_FAILURE_TAG(obj));
428 return reinterpret_cast<Failure*>(obj);
429}
430
431
Steve Blocka7e24c12009-10-30 11:49:00 +0000432bool Object::IsJSObject() {
433 return IsHeapObject()
434 && HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
435}
436
437
438bool Object::IsJSContextExtensionObject() {
439 return IsHeapObject()
440 && (HeapObject::cast(this)->map()->instance_type() ==
441 JS_CONTEXT_EXTENSION_OBJECT_TYPE);
442}
443
444
445bool Object::IsMap() {
446 return Object::IsHeapObject()
447 && HeapObject::cast(this)->map()->instance_type() == MAP_TYPE;
448}
449
450
451bool Object::IsFixedArray() {
452 return Object::IsHeapObject()
453 && HeapObject::cast(this)->map()->instance_type() == FIXED_ARRAY_TYPE;
454}
455
456
457bool Object::IsDescriptorArray() {
458 return IsFixedArray();
459}
460
461
Ben Murdochb0fe1622011-05-05 13:52:32 +0100462bool Object::IsDeoptimizationInputData() {
463 // Must be a fixed array.
464 if (!IsFixedArray()) return false;
465
466 // There's no sure way to detect the difference between a fixed array and
467 // a deoptimization data array. Since this is used for asserts we can
468 // check that the length is zero or else the fixed size plus a multiple of
469 // the entry size.
470 int length = FixedArray::cast(this)->length();
471 if (length == 0) return true;
472
473 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
474 return length >= 0 &&
475 length % DeoptimizationInputData::kDeoptEntrySize == 0;
476}
477
478
479bool Object::IsDeoptimizationOutputData() {
480 if (!IsFixedArray()) return false;
481 // There's actually no way to see the difference between a fixed array and
482 // a deoptimization data array. Since this is used for asserts we can check
483 // that the length is plausible though.
484 if (FixedArray::cast(this)->length() % 2 != 0) return false;
485 return true;
486}
487
488
Steve Blocka7e24c12009-10-30 11:49:00 +0000489bool Object::IsContext() {
490 return Object::IsHeapObject()
491 && (HeapObject::cast(this)->map() == Heap::context_map() ||
492 HeapObject::cast(this)->map() == Heap::catch_context_map() ||
493 HeapObject::cast(this)->map() == Heap::global_context_map());
494}
495
496
497bool Object::IsCatchContext() {
498 return Object::IsHeapObject()
499 && HeapObject::cast(this)->map() == Heap::catch_context_map();
500}
501
502
503bool Object::IsGlobalContext() {
504 return Object::IsHeapObject()
505 && HeapObject::cast(this)->map() == Heap::global_context_map();
506}
507
508
509bool Object::IsJSFunction() {
510 return Object::IsHeapObject()
511 && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
512}
513
514
515template <> inline bool Is<JSFunction>(Object* obj) {
516 return obj->IsJSFunction();
517}
518
519
520bool Object::IsCode() {
521 return Object::IsHeapObject()
522 && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
523}
524
525
526bool Object::IsOddball() {
527 return Object::IsHeapObject()
528 && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
529}
530
531
532bool Object::IsJSGlobalPropertyCell() {
533 return Object::IsHeapObject()
534 && HeapObject::cast(this)->map()->instance_type()
535 == JS_GLOBAL_PROPERTY_CELL_TYPE;
536}
537
538
539bool Object::IsSharedFunctionInfo() {
540 return Object::IsHeapObject() &&
541 (HeapObject::cast(this)->map()->instance_type() ==
542 SHARED_FUNCTION_INFO_TYPE);
543}
544
545
546bool Object::IsJSValue() {
547 return Object::IsHeapObject()
548 && HeapObject::cast(this)->map()->instance_type() == JS_VALUE_TYPE;
549}
550
551
552bool Object::IsStringWrapper() {
553 return IsJSValue() && JSValue::cast(this)->value()->IsString();
554}
555
556
557bool Object::IsProxy() {
558 return Object::IsHeapObject()
559 && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE;
560}
561
562
563bool Object::IsBoolean() {
564 return IsTrue() || IsFalse();
565}
566
567
568bool Object::IsJSArray() {
569 return Object::IsHeapObject()
570 && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
571}
572
573
574bool Object::IsJSRegExp() {
575 return Object::IsHeapObject()
576 && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
577}
578
579
580template <> inline bool Is<JSArray>(Object* obj) {
581 return obj->IsJSArray();
582}
583
584
585bool Object::IsHashTable() {
586 return Object::IsHeapObject()
587 && HeapObject::cast(this)->map() == Heap::hash_table_map();
588}
589
590
591bool Object::IsDictionary() {
592 return IsHashTable() && this != Heap::symbol_table();
593}
594
595
596bool Object::IsSymbolTable() {
597 return IsHashTable() && this == Heap::raw_unchecked_symbol_table();
598}
599
600
Steve Block6ded16b2010-05-10 14:33:55 +0100601bool Object::IsJSFunctionResultCache() {
602 if (!IsFixedArray()) return false;
603 FixedArray* self = FixedArray::cast(this);
604 int length = self->length();
605 if (length < JSFunctionResultCache::kEntriesIndex) return false;
606 if ((length - JSFunctionResultCache::kEntriesIndex)
607 % JSFunctionResultCache::kEntrySize != 0) {
608 return false;
609 }
610#ifdef DEBUG
611 reinterpret_cast<JSFunctionResultCache*>(this)->JSFunctionResultCacheVerify();
612#endif
613 return true;
614}
615
616
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100617bool Object::IsNormalizedMapCache() {
618 if (!IsFixedArray()) return false;
619 if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
620 return false;
621 }
622#ifdef DEBUG
623 reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
624#endif
625 return true;
626}
627
628
Steve Blocka7e24c12009-10-30 11:49:00 +0000629bool Object::IsCompilationCacheTable() {
630 return IsHashTable();
631}
632
633
Steve Block6ded16b2010-05-10 14:33:55 +0100634bool Object::IsCodeCacheHashTable() {
635 return IsHashTable();
636}
637
638
Steve Blocka7e24c12009-10-30 11:49:00 +0000639bool Object::IsMapCache() {
640 return IsHashTable();
641}
642
643
644bool Object::IsPrimitive() {
645 return IsOddball() || IsNumber() || IsString();
646}
647
648
649bool Object::IsJSGlobalProxy() {
650 bool result = IsHeapObject() &&
651 (HeapObject::cast(this)->map()->instance_type() ==
652 JS_GLOBAL_PROXY_TYPE);
653 ASSERT(!result || IsAccessCheckNeeded());
654 return result;
655}
656
657
658bool Object::IsGlobalObject() {
659 if (!IsHeapObject()) return false;
660
661 InstanceType type = HeapObject::cast(this)->map()->instance_type();
662 return type == JS_GLOBAL_OBJECT_TYPE ||
663 type == JS_BUILTINS_OBJECT_TYPE;
664}
665
666
667bool Object::IsJSGlobalObject() {
668 return IsHeapObject() &&
669 (HeapObject::cast(this)->map()->instance_type() ==
670 JS_GLOBAL_OBJECT_TYPE);
671}
672
673
674bool Object::IsJSBuiltinsObject() {
675 return IsHeapObject() &&
676 (HeapObject::cast(this)->map()->instance_type() ==
677 JS_BUILTINS_OBJECT_TYPE);
678}
679
680
681bool Object::IsUndetectableObject() {
682 return IsHeapObject()
683 && HeapObject::cast(this)->map()->is_undetectable();
684}
685
686
687bool Object::IsAccessCheckNeeded() {
688 return IsHeapObject()
689 && HeapObject::cast(this)->map()->is_access_check_needed();
690}
691
692
693bool Object::IsStruct() {
694 if (!IsHeapObject()) return false;
695 switch (HeapObject::cast(this)->map()->instance_type()) {
696#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
697 STRUCT_LIST(MAKE_STRUCT_CASE)
698#undef MAKE_STRUCT_CASE
699 default: return false;
700 }
701}
702
703
704#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
705 bool Object::Is##Name() { \
706 return Object::IsHeapObject() \
707 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
708 }
709 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
710#undef MAKE_STRUCT_PREDICATE
711
712
713bool Object::IsUndefined() {
714 return this == Heap::undefined_value();
715}
716
717
Steve Blocka7e24c12009-10-30 11:49:00 +0000718bool Object::IsNull() {
719 return this == Heap::null_value();
720}
721
722
723bool Object::IsTrue() {
724 return this == Heap::true_value();
725}
726
727
728bool Object::IsFalse() {
729 return this == Heap::false_value();
730}
731
732
Ben Murdoch086aeea2011-05-13 15:57:08 +0100733bool Object::IsArgumentsMarker() {
734 return this == Heap::arguments_marker();
735}
736
737
Steve Blocka7e24c12009-10-30 11:49:00 +0000738double Object::Number() {
739 ASSERT(IsNumber());
740 return IsSmi()
741 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
742 : reinterpret_cast<HeapNumber*>(this)->value();
743}
744
745
746
John Reck59135872010-11-02 12:39:01 -0700747MaybeObject* Object::ToSmi() {
Steve Blocka7e24c12009-10-30 11:49:00 +0000748 if (IsSmi()) return this;
749 if (IsHeapNumber()) {
750 double value = HeapNumber::cast(this)->value();
751 int int_value = FastD2I(value);
752 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
753 return Smi::FromInt(int_value);
754 }
755 }
756 return Failure::Exception();
757}
758
759
760bool Object::HasSpecificClassOf(String* name) {
761 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
762}
763
764
John Reck59135872010-11-02 12:39:01 -0700765MaybeObject* Object::GetElement(uint32_t index) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000766 return GetElementWithReceiver(this, index);
767}
768
769
John Reck59135872010-11-02 12:39:01 -0700770Object* Object::GetElementNoExceptionThrown(uint32_t index) {
771 MaybeObject* maybe = GetElementWithReceiver(this, index);
772 ASSERT(!maybe->IsFailure());
773 Object* result = NULL; // Initialization to please compiler.
774 maybe->ToObject(&result);
775 return result;
776}
777
778
779MaybeObject* Object::GetProperty(String* key) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000780 PropertyAttributes attributes;
781 return GetPropertyWithReceiver(this, key, &attributes);
782}
783
784
John Reck59135872010-11-02 12:39:01 -0700785MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000786 return GetPropertyWithReceiver(this, key, attributes);
787}
788
789
790#define FIELD_ADDR(p, offset) \
791 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
792
793#define READ_FIELD(p, offset) \
794 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
795
796#define WRITE_FIELD(p, offset, value) \
797 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
798
799
800#define WRITE_BARRIER(object, offset) \
801 Heap::RecordWrite(object->address(), offset);
802
803// CONDITIONAL_WRITE_BARRIER must be issued after the actual
804// write due to the assert validating the written value.
805#define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \
806 if (mode == UPDATE_WRITE_BARRIER) { \
807 Heap::RecordWrite(object->address(), offset); \
808 } else { \
809 ASSERT(mode == SKIP_WRITE_BARRIER); \
810 ASSERT(Heap::InNewSpace(object) || \
Steve Block6ded16b2010-05-10 14:33:55 +0100811 !Heap::InNewSpace(READ_FIELD(object, offset)) || \
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100812 Page::FromAddress(object->address())-> \
813 IsRegionDirty(object->address() + offset)); \
Steve Blocka7e24c12009-10-30 11:49:00 +0000814 }
815
816#define READ_DOUBLE_FIELD(p, offset) \
817 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
818
819#define WRITE_DOUBLE_FIELD(p, offset, value) \
820 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
821
822#define READ_INT_FIELD(p, offset) \
823 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)))
824
825#define WRITE_INT_FIELD(p, offset, value) \
826 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
827
828#define READ_INTPTR_FIELD(p, offset) \
829 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)))
830
831#define WRITE_INTPTR_FIELD(p, offset, value) \
832 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
833
834#define READ_UINT32_FIELD(p, offset) \
835 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)))
836
837#define WRITE_UINT32_FIELD(p, offset, value) \
838 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
839
840#define READ_SHORT_FIELD(p, offset) \
841 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
842
843#define WRITE_SHORT_FIELD(p, offset, value) \
844 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
845
846#define READ_BYTE_FIELD(p, offset) \
847 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)))
848
849#define WRITE_BYTE_FIELD(p, offset, value) \
850 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
851
852
853Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
854 return &READ_FIELD(obj, byte_offset);
855}
856
857
858int Smi::value() {
859 return Internals::SmiValue(this);
860}
861
862
863Smi* Smi::FromInt(int value) {
864 ASSERT(Smi::IsValid(value));
Steve Block3ce2e202009-11-05 08:53:23 +0000865 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
Steve Blocka7e24c12009-10-30 11:49:00 +0000866 intptr_t tagged_value =
Steve Block3ce2e202009-11-05 08:53:23 +0000867 (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
Steve Blocka7e24c12009-10-30 11:49:00 +0000868 return reinterpret_cast<Smi*>(tagged_value);
869}
870
871
872Smi* Smi::FromIntptr(intptr_t value) {
873 ASSERT(Smi::IsValid(value));
Steve Block3ce2e202009-11-05 08:53:23 +0000874 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
875 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000876}
877
878
879Failure::Type Failure::type() const {
880 return static_cast<Type>(value() & kFailureTypeTagMask);
881}
882
883
884bool Failure::IsInternalError() const {
885 return type() == INTERNAL_ERROR;
886}
887
888
889bool Failure::IsOutOfMemoryException() const {
890 return type() == OUT_OF_MEMORY_EXCEPTION;
891}
892
893
Steve Blocka7e24c12009-10-30 11:49:00 +0000894AllocationSpace Failure::allocation_space() const {
895 ASSERT_EQ(RETRY_AFTER_GC, type());
896 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize)
897 & kSpaceTagMask);
898}
899
900
901Failure* Failure::InternalError() {
902 return Construct(INTERNAL_ERROR);
903}
904
905
906Failure* Failure::Exception() {
907 return Construct(EXCEPTION);
908}
909
910
911Failure* Failure::OutOfMemoryException() {
912 return Construct(OUT_OF_MEMORY_EXCEPTION);
913}
914
915
Steve Block3ce2e202009-11-05 08:53:23 +0000916intptr_t Failure::value() const {
Steve Block6ded16b2010-05-10 14:33:55 +0100917 return static_cast<intptr_t>(
918 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize);
Steve Blocka7e24c12009-10-30 11:49:00 +0000919}
920
921
Ben Murdochf87a2032010-10-22 12:50:53 +0100922Failure* Failure::RetryAfterGC() {
923 return RetryAfterGC(NEW_SPACE);
924}
925
926
927Failure* Failure::RetryAfterGC(AllocationSpace space) {
928 ASSERT((space & ~kSpaceTagMask) == 0);
929 return Construct(RETRY_AFTER_GC, space);
Steve Blocka7e24c12009-10-30 11:49:00 +0000930}
931
932
Steve Block3ce2e202009-11-05 08:53:23 +0000933Failure* Failure::Construct(Type type, intptr_t value) {
Steve Block6ded16b2010-05-10 14:33:55 +0100934 uintptr_t info =
935 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type;
Steve Blocka7e24c12009-10-30 11:49:00 +0000936 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
Steve Block3ce2e202009-11-05 08:53:23 +0000937 return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
Steve Blocka7e24c12009-10-30 11:49:00 +0000938}
939
940
941bool Smi::IsValid(intptr_t value) {
942#ifdef DEBUG
943 bool in_range = (value >= kMinValue) && (value <= kMaxValue);
944#endif
Steve Block3ce2e202009-11-05 08:53:23 +0000945
946#ifdef V8_TARGET_ARCH_X64
947 // To be representable as a long smi, the value must be a 32-bit integer.
948 bool result = (value == static_cast<int32_t>(value));
949#else
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 // To be representable as an tagged small integer, the two
951 // most-significant bits of 'value' must be either 00 or 11 due to
952 // sign-extension. To check this we add 01 to the two
953 // most-significant bits, and check if the most-significant bit is 0
954 //
955 // CAUTION: The original code below:
956 // bool result = ((value + 0x40000000) & 0x80000000) == 0;
957 // may lead to incorrect results according to the C language spec, and
958 // in fact doesn't work correctly with gcc4.1.1 in some cases: The
959 // compiler may produce undefined results in case of signed integer
960 // overflow. The computation must be done w/ unsigned ints.
Steve Block3ce2e202009-11-05 08:53:23 +0000961 bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
Steve Blocka7e24c12009-10-30 11:49:00 +0000962#endif
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 ASSERT(result == in_range);
964 return result;
965}
966
967
968MapWord MapWord::FromMap(Map* map) {
969 return MapWord(reinterpret_cast<uintptr_t>(map));
970}
971
972
973Map* MapWord::ToMap() {
974 return reinterpret_cast<Map*>(value_);
975}
976
977
978bool MapWord::IsForwardingAddress() {
979 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
980}
981
982
983MapWord MapWord::FromForwardingAddress(HeapObject* object) {
984 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
985 return MapWord(reinterpret_cast<uintptr_t>(raw));
986}
987
988
989HeapObject* MapWord::ToForwardingAddress() {
990 ASSERT(IsForwardingAddress());
991 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
992}
993
994
995bool MapWord::IsMarked() {
996 return (value_ & kMarkingMask) == 0;
997}
998
999
1000void MapWord::SetMark() {
1001 value_ &= ~kMarkingMask;
1002}
1003
1004
1005void MapWord::ClearMark() {
1006 value_ |= kMarkingMask;
1007}
1008
1009
1010bool MapWord::IsOverflowed() {
1011 return (value_ & kOverflowMask) != 0;
1012}
1013
1014
1015void MapWord::SetOverflow() {
1016 value_ |= kOverflowMask;
1017}
1018
1019
1020void MapWord::ClearOverflow() {
1021 value_ &= ~kOverflowMask;
1022}
1023
1024
1025MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1026 // Offset is the distance in live bytes from the first live object in the
1027 // same page. The offset between two objects in the same page should not
1028 // exceed the object area size of a page.
1029 ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1030
Leon Clarkee46be812010-01-19 14:06:41 +00001031 uintptr_t compact_offset = offset >> kObjectAlignmentBits;
Steve Blocka7e24c12009-10-30 11:49:00 +00001032 ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1033
1034 Page* map_page = Page::FromAddress(map_address);
1035 ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1036
Leon Clarkee46be812010-01-19 14:06:41 +00001037 uintptr_t map_page_offset =
1038 map_page->Offset(map_address) >> kMapAlignmentBits;
Steve Blocka7e24c12009-10-30 11:49:00 +00001039
1040 uintptr_t encoding =
1041 (compact_offset << kForwardingOffsetShift) |
1042 (map_page_offset << kMapPageOffsetShift) |
1043 (map_page->mc_page_index << kMapPageIndexShift);
1044 return MapWord(encoding);
1045}
1046
1047
1048Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1049 int map_page_index =
1050 static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1051 ASSERT_MAP_PAGE_INDEX(map_page_index);
1052
1053 int map_page_offset = static_cast<int>(
Leon Clarkee46be812010-01-19 14:06:41 +00001054 ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1055 kMapAlignmentBits);
Steve Blocka7e24c12009-10-30 11:49:00 +00001056
1057 return (map_space->PageAddress(map_page_index) + map_page_offset);
1058}
1059
1060
1061int MapWord::DecodeOffset() {
1062 // The offset field is represented in the kForwardingOffsetBits
1063 // most-significant bits.
Steve Blockd0582a62009-12-15 09:54:21 +00001064 uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1065 ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1066 return static_cast<int>(offset);
Steve Blocka7e24c12009-10-30 11:49:00 +00001067}
1068
1069
1070MapWord MapWord::FromEncodedAddress(Address address) {
1071 return MapWord(reinterpret_cast<uintptr_t>(address));
1072}
1073
1074
1075Address MapWord::ToEncodedAddress() {
1076 return reinterpret_cast<Address>(value_);
1077}
1078
1079
1080#ifdef DEBUG
1081void HeapObject::VerifyObjectField(int offset) {
1082 VerifyPointer(READ_FIELD(this, offset));
1083}
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001084
1085void HeapObject::VerifySmiField(int offset) {
1086 ASSERT(READ_FIELD(this, offset)->IsSmi());
1087}
Steve Blocka7e24c12009-10-30 11:49:00 +00001088#endif
1089
1090
1091Map* HeapObject::map() {
1092 return map_word().ToMap();
1093}
1094
1095
1096void HeapObject::set_map(Map* value) {
1097 set_map_word(MapWord::FromMap(value));
1098}
1099
1100
1101MapWord HeapObject::map_word() {
1102 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1103}
1104
1105
1106void HeapObject::set_map_word(MapWord map_word) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001107 // WRITE_FIELD does not invoke write barrier, but there is no need
Steve Blocka7e24c12009-10-30 11:49:00 +00001108 // here.
1109 WRITE_FIELD(this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1110}
1111
1112
1113HeapObject* HeapObject::FromAddress(Address address) {
1114 ASSERT_TAG_ALIGNED(address);
1115 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1116}
1117
1118
1119Address HeapObject::address() {
1120 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1121}
1122
1123
1124int HeapObject::Size() {
1125 return SizeFromMap(map());
1126}
1127
1128
1129void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1130 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1131 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1132}
1133
1134
1135void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1136 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1137}
1138
1139
1140bool HeapObject::IsMarked() {
1141 return map_word().IsMarked();
1142}
1143
1144
1145void HeapObject::SetMark() {
1146 ASSERT(!IsMarked());
1147 MapWord first_word = map_word();
1148 first_word.SetMark();
1149 set_map_word(first_word);
1150}
1151
1152
1153void HeapObject::ClearMark() {
1154 ASSERT(IsMarked());
1155 MapWord first_word = map_word();
1156 first_word.ClearMark();
1157 set_map_word(first_word);
1158}
1159
1160
1161bool HeapObject::IsOverflowed() {
1162 return map_word().IsOverflowed();
1163}
1164
1165
1166void HeapObject::SetOverflow() {
1167 MapWord first_word = map_word();
1168 first_word.SetOverflow();
1169 set_map_word(first_word);
1170}
1171
1172
1173void HeapObject::ClearOverflow() {
1174 ASSERT(IsOverflowed());
1175 MapWord first_word = map_word();
1176 first_word.ClearOverflow();
1177 set_map_word(first_word);
1178}
1179
1180
1181double HeapNumber::value() {
1182 return READ_DOUBLE_FIELD(this, kValueOffset);
1183}
1184
1185
1186void HeapNumber::set_value(double value) {
1187 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1188}
1189
1190
Steve Block6ded16b2010-05-10 14:33:55 +01001191int HeapNumber::get_exponent() {
1192 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1193 kExponentShift) - kExponentBias;
1194}
1195
1196
1197int HeapNumber::get_sign() {
1198 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1199}
1200
1201
Steve Blocka7e24c12009-10-30 11:49:00 +00001202ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1203
1204
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001205HeapObject* JSObject::elements() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001206 Object* array = READ_FIELD(this, kElementsOffset);
1207 // In the assert below Dictionary is covered under FixedArray.
Steve Block3ce2e202009-11-05 08:53:23 +00001208 ASSERT(array->IsFixedArray() || array->IsPixelArray() ||
1209 array->IsExternalArray());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001210 return reinterpret_cast<HeapObject*>(array);
Steve Blocka7e24c12009-10-30 11:49:00 +00001211}
1212
1213
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001214void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
Steve Block8defd9f2010-07-08 12:39:36 +01001215 ASSERT(map()->has_fast_elements() ==
Iain Merrick75681382010-08-19 15:07:18 +01001216 (value->map() == Heap::fixed_array_map() ||
1217 value->map() == Heap::fixed_cow_array_map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001218 // In the assert below Dictionary is covered under FixedArray.
Steve Block3ce2e202009-11-05 08:53:23 +00001219 ASSERT(value->IsFixedArray() || value->IsPixelArray() ||
1220 value->IsExternalArray());
Steve Blocka7e24c12009-10-30 11:49:00 +00001221 WRITE_FIELD(this, kElementsOffset, value);
1222 CONDITIONAL_WRITE_BARRIER(this, kElementsOffset, mode);
1223}
1224
1225
1226void JSObject::initialize_properties() {
1227 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1228 WRITE_FIELD(this, kPropertiesOffset, Heap::empty_fixed_array());
1229}
1230
1231
1232void JSObject::initialize_elements() {
Steve Block8defd9f2010-07-08 12:39:36 +01001233 ASSERT(map()->has_fast_elements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001234 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1235 WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array());
1236}
1237
1238
John Reck59135872010-11-02 12:39:01 -07001239MaybeObject* JSObject::ResetElements() {
1240 Object* obj;
1241 { MaybeObject* maybe_obj = map()->GetFastElementsMap();
1242 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1243 }
Steve Block8defd9f2010-07-08 12:39:36 +01001244 set_map(Map::cast(obj));
1245 initialize_elements();
1246 return this;
1247}
1248
1249
Steve Blocka7e24c12009-10-30 11:49:00 +00001250ACCESSORS(Oddball, to_string, String, kToStringOffset)
1251ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1252
1253
1254Object* JSGlobalPropertyCell::value() {
1255 return READ_FIELD(this, kValueOffset);
1256}
1257
1258
1259void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1260 // The write barrier is not used for global property cells.
1261 ASSERT(!val->IsJSGlobalPropertyCell());
1262 WRITE_FIELD(this, kValueOffset, val);
1263}
1264
1265
1266int JSObject::GetHeaderSize() {
1267 InstanceType type = map()->instance_type();
1268 // Check for the most common kind of JavaScript object before
1269 // falling into the generic switch. This speeds up the internal
1270 // field operations considerably on average.
1271 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1272 switch (type) {
1273 case JS_GLOBAL_PROXY_TYPE:
1274 return JSGlobalProxy::kSize;
1275 case JS_GLOBAL_OBJECT_TYPE:
1276 return JSGlobalObject::kSize;
1277 case JS_BUILTINS_OBJECT_TYPE:
1278 return JSBuiltinsObject::kSize;
1279 case JS_FUNCTION_TYPE:
1280 return JSFunction::kSize;
1281 case JS_VALUE_TYPE:
1282 return JSValue::kSize;
1283 case JS_ARRAY_TYPE:
1284 return JSValue::kSize;
1285 case JS_REGEXP_TYPE:
1286 return JSValue::kSize;
1287 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1288 return JSObject::kHeaderSize;
1289 default:
1290 UNREACHABLE();
1291 return 0;
1292 }
1293}
1294
1295
1296int JSObject::GetInternalFieldCount() {
1297 ASSERT(1 << kPointerSizeLog2 == kPointerSize);
1298 // Make sure to adjust for the number of in-object properties. These
1299 // properties do contribute to the size, but are not internal fields.
1300 return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1301 map()->inobject_properties();
1302}
1303
1304
1305Object* JSObject::GetInternalField(int index) {
1306 ASSERT(index < GetInternalFieldCount() && index >= 0);
1307 // Internal objects do follow immediately after the header, whereas in-object
1308 // properties are at the end of the object. Therefore there is no need
1309 // to adjust the index here.
1310 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1311}
1312
1313
1314void JSObject::SetInternalField(int index, Object* value) {
1315 ASSERT(index < GetInternalFieldCount() && index >= 0);
1316 // Internal objects do follow immediately after the header, whereas in-object
1317 // properties are at the end of the object. Therefore there is no need
1318 // to adjust the index here.
1319 int offset = GetHeaderSize() + (kPointerSize * index);
1320 WRITE_FIELD(this, offset, value);
1321 WRITE_BARRIER(this, offset);
1322}
1323
1324
1325// Access fast-case object properties at index. The use of these routines
1326// is needed to correctly distinguish between properties stored in-object and
1327// properties stored in the properties array.
1328Object* JSObject::FastPropertyAt(int index) {
1329 // Adjust for the number of properties stored in the object.
1330 index -= map()->inobject_properties();
1331 if (index < 0) {
1332 int offset = map()->instance_size() + (index * kPointerSize);
1333 return READ_FIELD(this, offset);
1334 } else {
1335 ASSERT(index < properties()->length());
1336 return properties()->get(index);
1337 }
1338}
1339
1340
1341Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1342 // Adjust for the number of properties stored in the object.
1343 index -= map()->inobject_properties();
1344 if (index < 0) {
1345 int offset = map()->instance_size() + (index * kPointerSize);
1346 WRITE_FIELD(this, offset, value);
1347 WRITE_BARRIER(this, offset);
1348 } else {
1349 ASSERT(index < properties()->length());
1350 properties()->set(index, value);
1351 }
1352 return value;
1353}
1354
1355
1356Object* JSObject::InObjectPropertyAt(int index) {
1357 // Adjust for the number of properties stored in the object.
1358 index -= map()->inobject_properties();
1359 ASSERT(index < 0);
1360 int offset = map()->instance_size() + (index * kPointerSize);
1361 return READ_FIELD(this, offset);
1362}
1363
1364
1365Object* JSObject::InObjectPropertyAtPut(int index,
1366 Object* value,
1367 WriteBarrierMode mode) {
1368 // Adjust for the number of properties stored in the object.
1369 index -= map()->inobject_properties();
1370 ASSERT(index < 0);
1371 int offset = map()->instance_size() + (index * kPointerSize);
1372 WRITE_FIELD(this, offset, value);
1373 CONDITIONAL_WRITE_BARRIER(this, offset, mode);
1374 return value;
1375}
1376
1377
1378
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001379void JSObject::InitializeBody(int object_size, Object* value) {
1380 ASSERT(!value->IsHeapObject() || !Heap::InNewSpace(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001381 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1382 WRITE_FIELD(this, offset, value);
1383 }
1384}
1385
1386
Steve Block8defd9f2010-07-08 12:39:36 +01001387bool JSObject::HasFastProperties() {
1388 return !properties()->IsDictionary();
1389}
1390
1391
1392int JSObject::MaxFastProperties() {
1393 // Allow extra fast properties if the object has more than
1394 // kMaxFastProperties in-object properties. When this is the case,
1395 // it is very unlikely that the object is being used as a dictionary
1396 // and there is a good chance that allowing more map transitions
1397 // will be worth it.
1398 return Max(map()->inobject_properties(), kMaxFastProperties);
1399}
1400
1401
Steve Blocka7e24c12009-10-30 11:49:00 +00001402void Struct::InitializeBody(int object_size) {
1403 Object* value = Heap::undefined_value();
1404 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1405 WRITE_FIELD(this, offset, value);
1406 }
1407}
1408
1409
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001410bool Object::ToArrayIndex(uint32_t* index) {
1411 if (IsSmi()) {
1412 int value = Smi::cast(this)->value();
Steve Blocka7e24c12009-10-30 11:49:00 +00001413 if (value < 0) return false;
1414 *index = value;
1415 return true;
1416 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001417 if (IsHeapNumber()) {
1418 double value = HeapNumber::cast(this)->value();
Steve Blocka7e24c12009-10-30 11:49:00 +00001419 uint32_t uint_value = static_cast<uint32_t>(value);
1420 if (value == static_cast<double>(uint_value)) {
1421 *index = uint_value;
1422 return true;
1423 }
1424 }
1425 return false;
1426}
1427
1428
1429bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
1430 if (!this->IsJSValue()) return false;
1431
1432 JSValue* js_value = JSValue::cast(this);
1433 if (!js_value->value()->IsString()) return false;
1434
1435 String* str = String::cast(js_value->value());
1436 if (index >= (uint32_t)str->length()) return false;
1437
1438 return true;
1439}
1440
1441
1442Object* FixedArray::get(int index) {
1443 ASSERT(index >= 0 && index < this->length());
1444 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1445}
1446
1447
1448void FixedArray::set(int index, Smi* value) {
Iain Merrick75681382010-08-19 15:07:18 +01001449 ASSERT(map() != Heap::fixed_cow_array_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00001450 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1451 int offset = kHeaderSize + index * kPointerSize;
1452 WRITE_FIELD(this, offset, value);
1453}
1454
1455
1456void FixedArray::set(int index, Object* value) {
Iain Merrick75681382010-08-19 15:07:18 +01001457 ASSERT(map() != Heap::fixed_cow_array_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00001458 ASSERT(index >= 0 && index < this->length());
1459 int offset = kHeaderSize + index * kPointerSize;
1460 WRITE_FIELD(this, offset, value);
1461 WRITE_BARRIER(this, offset);
1462}
1463
1464
Leon Clarke4515c472010-02-03 11:58:03 +00001465WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001466 if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
1467 return UPDATE_WRITE_BARRIER;
1468}
1469
1470
1471void FixedArray::set(int index,
1472 Object* value,
1473 WriteBarrierMode mode) {
Iain Merrick75681382010-08-19 15:07:18 +01001474 ASSERT(map() != Heap::fixed_cow_array_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00001475 ASSERT(index >= 0 && index < this->length());
1476 int offset = kHeaderSize + index * kPointerSize;
1477 WRITE_FIELD(this, offset, value);
1478 CONDITIONAL_WRITE_BARRIER(this, offset, mode);
1479}
1480
1481
1482void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
Iain Merrick75681382010-08-19 15:07:18 +01001483 ASSERT(array->map() != Heap::raw_unchecked_fixed_cow_array_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00001484 ASSERT(index >= 0 && index < array->length());
Leon Clarke4515c472010-02-03 11:58:03 +00001485 ASSERT(!Heap::InNewSpace(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001486 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1487}
1488
1489
1490void FixedArray::set_undefined(int index) {
Iain Merrick75681382010-08-19 15:07:18 +01001491 ASSERT(map() != Heap::fixed_cow_array_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00001492 ASSERT(index >= 0 && index < this->length());
1493 ASSERT(!Heap::InNewSpace(Heap::undefined_value()));
1494 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1495 Heap::undefined_value());
1496}
1497
1498
1499void FixedArray::set_null(int index) {
Iain Merrick75681382010-08-19 15:07:18 +01001500 ASSERT(map() != Heap::fixed_cow_array_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00001501 ASSERT(index >= 0 && index < this->length());
1502 ASSERT(!Heap::InNewSpace(Heap::null_value()));
1503 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
1504}
1505
1506
1507void FixedArray::set_the_hole(int index) {
Iain Merrick75681382010-08-19 15:07:18 +01001508 ASSERT(map() != Heap::fixed_cow_array_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00001509 ASSERT(index >= 0 && index < this->length());
1510 ASSERT(!Heap::InNewSpace(Heap::the_hole_value()));
1511 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::the_hole_value());
1512}
1513
1514
Iain Merrick75681382010-08-19 15:07:18 +01001515void FixedArray::set_unchecked(int index, Smi* value) {
1516 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1517 int offset = kHeaderSize + index * kPointerSize;
1518 WRITE_FIELD(this, offset, value);
1519}
1520
1521
Ben Murdochf87a2032010-10-22 12:50:53 +01001522void FixedArray::set_unchecked(int index,
1523 Object* value,
1524 WriteBarrierMode mode) {
1525 int offset = kHeaderSize + index * kPointerSize;
1526 WRITE_FIELD(this, offset, value);
1527 CONDITIONAL_WRITE_BARRIER(this, offset, mode);
1528}
1529
1530
Iain Merrick75681382010-08-19 15:07:18 +01001531void FixedArray::set_null_unchecked(int index) {
1532 ASSERT(index >= 0 && index < this->length());
1533 ASSERT(!Heap::InNewSpace(Heap::null_value()));
1534 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
1535}
1536
1537
Steve Block6ded16b2010-05-10 14:33:55 +01001538Object** FixedArray::data_start() {
1539 return HeapObject::RawField(this, kHeaderSize);
1540}
1541
1542
Steve Blocka7e24c12009-10-30 11:49:00 +00001543bool DescriptorArray::IsEmpty() {
1544 ASSERT(this == Heap::empty_descriptor_array() ||
1545 this->length() > 2);
1546 return this == Heap::empty_descriptor_array();
1547}
1548
1549
1550void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1551 Object* tmp = array->get(first);
1552 fast_set(array, first, array->get(second));
1553 fast_set(array, second, tmp);
1554}
1555
1556
1557int DescriptorArray::Search(String* name) {
1558 SLOW_ASSERT(IsSortedNoDuplicates());
1559
1560 // Check for empty descriptor array.
1561 int nof = number_of_descriptors();
1562 if (nof == 0) return kNotFound;
1563
1564 // Fast case: do linear search for small arrays.
1565 const int kMaxElementsForLinearSearch = 8;
1566 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1567 return LinearSearch(name, nof);
1568 }
1569
1570 // Slow case: perform binary search.
1571 return BinarySearch(name, 0, nof - 1);
1572}
1573
1574
Iain Merrick75681382010-08-19 15:07:18 +01001575int DescriptorArray::SearchWithCache(String* name) {
1576 int number = DescriptorLookupCache::Lookup(this, name);
1577 if (number == DescriptorLookupCache::kAbsent) {
1578 number = Search(name);
1579 DescriptorLookupCache::Update(this, name, number);
1580 }
1581 return number;
1582}
1583
1584
Steve Blocka7e24c12009-10-30 11:49:00 +00001585String* DescriptorArray::GetKey(int descriptor_number) {
1586 ASSERT(descriptor_number < number_of_descriptors());
1587 return String::cast(get(ToKeyIndex(descriptor_number)));
1588}
1589
1590
1591Object* DescriptorArray::GetValue(int descriptor_number) {
1592 ASSERT(descriptor_number < number_of_descriptors());
1593 return GetContentArray()->get(ToValueIndex(descriptor_number));
1594}
1595
1596
1597Smi* DescriptorArray::GetDetails(int descriptor_number) {
1598 ASSERT(descriptor_number < number_of_descriptors());
1599 return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
1600}
1601
1602
1603PropertyType DescriptorArray::GetType(int descriptor_number) {
1604 ASSERT(descriptor_number < number_of_descriptors());
1605 return PropertyDetails(GetDetails(descriptor_number)).type();
1606}
1607
1608
1609int DescriptorArray::GetFieldIndex(int descriptor_number) {
1610 return Descriptor::IndexFromValue(GetValue(descriptor_number));
1611}
1612
1613
1614JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) {
1615 return JSFunction::cast(GetValue(descriptor_number));
1616}
1617
1618
1619Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
1620 ASSERT(GetType(descriptor_number) == CALLBACKS);
1621 return GetValue(descriptor_number);
1622}
1623
1624
1625AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
1626 ASSERT(GetType(descriptor_number) == CALLBACKS);
1627 Proxy* p = Proxy::cast(GetCallbacksObject(descriptor_number));
1628 return reinterpret_cast<AccessorDescriptor*>(p->proxy());
1629}
1630
1631
1632bool DescriptorArray::IsProperty(int descriptor_number) {
1633 return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
1634}
1635
1636
1637bool DescriptorArray::IsTransition(int descriptor_number) {
1638 PropertyType t = GetType(descriptor_number);
1639 return t == MAP_TRANSITION || t == CONSTANT_TRANSITION;
1640}
1641
1642
1643bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
1644 return GetType(descriptor_number) == NULL_DESCRIPTOR;
1645}
1646
1647
1648bool DescriptorArray::IsDontEnum(int descriptor_number) {
1649 return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
1650}
1651
1652
1653void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
1654 desc->Init(GetKey(descriptor_number),
1655 GetValue(descriptor_number),
1656 GetDetails(descriptor_number));
1657}
1658
1659
1660void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1661 // Range check.
1662 ASSERT(descriptor_number < number_of_descriptors());
1663
Leon Clarkee46be812010-01-19 14:06:41 +00001664 // Make sure none of the elements in desc are in new space.
Steve Blocka7e24c12009-10-30 11:49:00 +00001665 ASSERT(!Heap::InNewSpace(desc->GetKey()));
1666 ASSERT(!Heap::InNewSpace(desc->GetValue()));
1667
1668 fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
1669 FixedArray* content_array = GetContentArray();
1670 fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
1671 fast_set(content_array, ToDetailsIndex(descriptor_number),
1672 desc->GetDetails().AsSmi());
1673}
1674
1675
1676void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
1677 Descriptor desc;
1678 src->Get(src_index, &desc);
1679 Set(index, &desc);
1680}
1681
1682
1683void DescriptorArray::Swap(int first, int second) {
1684 fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
1685 FixedArray* content_array = GetContentArray();
1686 fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
1687 fast_swap(content_array, ToDetailsIndex(first), ToDetailsIndex(second));
1688}
1689
1690
1691bool NumberDictionary::requires_slow_elements() {
1692 Object* max_index_object = get(kMaxNumberKeyIndex);
1693 if (!max_index_object->IsSmi()) return false;
1694 return 0 !=
1695 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1696}
1697
1698uint32_t NumberDictionary::max_number_key() {
1699 ASSERT(!requires_slow_elements());
1700 Object* max_index_object = get(kMaxNumberKeyIndex);
1701 if (!max_index_object->IsSmi()) return 0;
1702 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
1703 return value >> kRequiresSlowElementsTagSize;
1704}
1705
1706void NumberDictionary::set_requires_slow_elements() {
Leon Clarke4515c472010-02-03 11:58:03 +00001707 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001708}
1709
1710
1711// ------------------------------------
1712// Cast operations
1713
1714
1715CAST_ACCESSOR(FixedArray)
1716CAST_ACCESSOR(DescriptorArray)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001717CAST_ACCESSOR(DeoptimizationInputData)
1718CAST_ACCESSOR(DeoptimizationOutputData)
Steve Blocka7e24c12009-10-30 11:49:00 +00001719CAST_ACCESSOR(SymbolTable)
Steve Block6ded16b2010-05-10 14:33:55 +01001720CAST_ACCESSOR(JSFunctionResultCache)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001721CAST_ACCESSOR(NormalizedMapCache)
Steve Blocka7e24c12009-10-30 11:49:00 +00001722CAST_ACCESSOR(CompilationCacheTable)
Steve Block6ded16b2010-05-10 14:33:55 +01001723CAST_ACCESSOR(CodeCacheHashTable)
Steve Blocka7e24c12009-10-30 11:49:00 +00001724CAST_ACCESSOR(MapCache)
1725CAST_ACCESSOR(String)
1726CAST_ACCESSOR(SeqString)
1727CAST_ACCESSOR(SeqAsciiString)
1728CAST_ACCESSOR(SeqTwoByteString)
1729CAST_ACCESSOR(ConsString)
Steve Blocka7e24c12009-10-30 11:49:00 +00001730CAST_ACCESSOR(ExternalString)
1731CAST_ACCESSOR(ExternalAsciiString)
1732CAST_ACCESSOR(ExternalTwoByteString)
1733CAST_ACCESSOR(JSObject)
1734CAST_ACCESSOR(Smi)
Steve Blocka7e24c12009-10-30 11:49:00 +00001735CAST_ACCESSOR(HeapObject)
1736CAST_ACCESSOR(HeapNumber)
1737CAST_ACCESSOR(Oddball)
1738CAST_ACCESSOR(JSGlobalPropertyCell)
1739CAST_ACCESSOR(SharedFunctionInfo)
1740CAST_ACCESSOR(Map)
1741CAST_ACCESSOR(JSFunction)
1742CAST_ACCESSOR(GlobalObject)
1743CAST_ACCESSOR(JSGlobalProxy)
1744CAST_ACCESSOR(JSGlobalObject)
1745CAST_ACCESSOR(JSBuiltinsObject)
1746CAST_ACCESSOR(Code)
1747CAST_ACCESSOR(JSArray)
1748CAST_ACCESSOR(JSRegExp)
1749CAST_ACCESSOR(Proxy)
1750CAST_ACCESSOR(ByteArray)
1751CAST_ACCESSOR(PixelArray)
Steve Block3ce2e202009-11-05 08:53:23 +00001752CAST_ACCESSOR(ExternalArray)
1753CAST_ACCESSOR(ExternalByteArray)
1754CAST_ACCESSOR(ExternalUnsignedByteArray)
1755CAST_ACCESSOR(ExternalShortArray)
1756CAST_ACCESSOR(ExternalUnsignedShortArray)
1757CAST_ACCESSOR(ExternalIntArray)
1758CAST_ACCESSOR(ExternalUnsignedIntArray)
1759CAST_ACCESSOR(ExternalFloatArray)
Steve Blocka7e24c12009-10-30 11:49:00 +00001760CAST_ACCESSOR(Struct)
1761
1762
1763#define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
1764 STRUCT_LIST(MAKE_STRUCT_CAST)
1765#undef MAKE_STRUCT_CAST
1766
1767
1768template <typename Shape, typename Key>
1769HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
1770 ASSERT(obj->IsHashTable());
1771 return reinterpret_cast<HashTable*>(obj);
1772}
1773
1774
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001775SMI_ACCESSORS(FixedArray, length, kLengthOffset)
1776SMI_ACCESSORS(ByteArray, length, kLengthOffset)
1777
1778INT_ACCESSORS(PixelArray, length, kLengthOffset)
1779INT_ACCESSORS(ExternalArray, length, kLengthOffset)
Steve Blocka7e24c12009-10-30 11:49:00 +00001780
1781
Steve Block6ded16b2010-05-10 14:33:55 +01001782SMI_ACCESSORS(String, length, kLengthOffset)
Steve Blockd0582a62009-12-15 09:54:21 +00001783
1784
1785uint32_t String::hash_field() {
1786 return READ_UINT32_FIELD(this, kHashFieldOffset);
1787}
1788
1789
1790void String::set_hash_field(uint32_t value) {
1791 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001792#if V8_HOST_ARCH_64_BIT
1793 WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
1794#endif
Steve Blockd0582a62009-12-15 09:54:21 +00001795}
1796
1797
Steve Blocka7e24c12009-10-30 11:49:00 +00001798bool String::Equals(String* other) {
1799 if (other == this) return true;
1800 if (StringShape(this).IsSymbol() && StringShape(other).IsSymbol()) {
1801 return false;
1802 }
1803 return SlowEquals(other);
1804}
1805
1806
John Reck59135872010-11-02 12:39:01 -07001807MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001808 if (!StringShape(this).IsCons()) return this;
1809 ConsString* cons = ConsString::cast(this);
1810 if (cons->second()->length() == 0) return cons->first();
Steve Block6ded16b2010-05-10 14:33:55 +01001811 return SlowTryFlatten(pretenure);
Steve Blocka7e24c12009-10-30 11:49:00 +00001812}
1813
1814
Leon Clarkef7060e22010-06-03 12:02:55 +01001815String* String::TryFlattenGetString(PretenureFlag pretenure) {
John Reck59135872010-11-02 12:39:01 -07001816 MaybeObject* flat = TryFlatten(pretenure);
1817 Object* successfully_flattened;
1818 if (flat->ToObject(&successfully_flattened)) {
1819 return String::cast(successfully_flattened);
1820 }
1821 return this;
Leon Clarkef7060e22010-06-03 12:02:55 +01001822}
1823
1824
Steve Blocka7e24c12009-10-30 11:49:00 +00001825uint16_t String::Get(int index) {
1826 ASSERT(index >= 0 && index < length());
1827 switch (StringShape(this).full_representation_tag()) {
1828 case kSeqStringTag | kAsciiStringTag:
1829 return SeqAsciiString::cast(this)->SeqAsciiStringGet(index);
1830 case kSeqStringTag | kTwoByteStringTag:
1831 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
1832 case kConsStringTag | kAsciiStringTag:
1833 case kConsStringTag | kTwoByteStringTag:
1834 return ConsString::cast(this)->ConsStringGet(index);
Steve Blocka7e24c12009-10-30 11:49:00 +00001835 case kExternalStringTag | kAsciiStringTag:
1836 return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
1837 case kExternalStringTag | kTwoByteStringTag:
1838 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
1839 default:
1840 break;
1841 }
1842
1843 UNREACHABLE();
1844 return 0;
1845}
1846
1847
1848void String::Set(int index, uint16_t value) {
1849 ASSERT(index >= 0 && index < length());
1850 ASSERT(StringShape(this).IsSequential());
1851
1852 return this->IsAsciiRepresentation()
1853 ? SeqAsciiString::cast(this)->SeqAsciiStringSet(index, value)
1854 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
1855}
1856
1857
1858bool String::IsFlat() {
1859 switch (StringShape(this).representation_tag()) {
1860 case kConsStringTag: {
1861 String* second = ConsString::cast(this)->second();
1862 // Only flattened strings have second part empty.
1863 return second->length() == 0;
1864 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001865 default:
1866 return true;
1867 }
1868}
1869
1870
1871uint16_t SeqAsciiString::SeqAsciiStringGet(int index) {
1872 ASSERT(index >= 0 && index < length());
1873 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
1874}
1875
1876
1877void SeqAsciiString::SeqAsciiStringSet(int index, uint16_t value) {
1878 ASSERT(index >= 0 && index < length() && value <= kMaxAsciiCharCode);
1879 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
1880 static_cast<byte>(value));
1881}
1882
1883
1884Address SeqAsciiString::GetCharsAddress() {
1885 return FIELD_ADDR(this, kHeaderSize);
1886}
1887
1888
1889char* SeqAsciiString::GetChars() {
1890 return reinterpret_cast<char*>(GetCharsAddress());
1891}
1892
1893
1894Address SeqTwoByteString::GetCharsAddress() {
1895 return FIELD_ADDR(this, kHeaderSize);
1896}
1897
1898
1899uc16* SeqTwoByteString::GetChars() {
1900 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
1901}
1902
1903
1904uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
1905 ASSERT(index >= 0 && index < length());
1906 return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
1907}
1908
1909
1910void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
1911 ASSERT(index >= 0 && index < length());
1912 WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
1913}
1914
1915
1916int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
Steve Block6ded16b2010-05-10 14:33:55 +01001917 return SizeFor(length());
Steve Blocka7e24c12009-10-30 11:49:00 +00001918}
1919
1920
1921int SeqAsciiString::SeqAsciiStringSize(InstanceType instance_type) {
Steve Block6ded16b2010-05-10 14:33:55 +01001922 return SizeFor(length());
Steve Blocka7e24c12009-10-30 11:49:00 +00001923}
1924
1925
1926String* ConsString::first() {
1927 return String::cast(READ_FIELD(this, kFirstOffset));
1928}
1929
1930
1931Object* ConsString::unchecked_first() {
1932 return READ_FIELD(this, kFirstOffset);
1933}
1934
1935
1936void ConsString::set_first(String* value, WriteBarrierMode mode) {
1937 WRITE_FIELD(this, kFirstOffset, value);
1938 CONDITIONAL_WRITE_BARRIER(this, kFirstOffset, mode);
1939}
1940
1941
1942String* ConsString::second() {
1943 return String::cast(READ_FIELD(this, kSecondOffset));
1944}
1945
1946
1947Object* ConsString::unchecked_second() {
1948 return READ_FIELD(this, kSecondOffset);
1949}
1950
1951
1952void ConsString::set_second(String* value, WriteBarrierMode mode) {
1953 WRITE_FIELD(this, kSecondOffset, value);
1954 CONDITIONAL_WRITE_BARRIER(this, kSecondOffset, mode);
1955}
1956
1957
Steve Blocka7e24c12009-10-30 11:49:00 +00001958ExternalAsciiString::Resource* ExternalAsciiString::resource() {
1959 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
1960}
1961
1962
1963void ExternalAsciiString::set_resource(
1964 ExternalAsciiString::Resource* resource) {
1965 *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
1966}
1967
1968
Steve Blocka7e24c12009-10-30 11:49:00 +00001969ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
1970 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
1971}
1972
1973
1974void ExternalTwoByteString::set_resource(
1975 ExternalTwoByteString::Resource* resource) {
1976 *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
1977}
1978
1979
Steve Block6ded16b2010-05-10 14:33:55 +01001980void JSFunctionResultCache::MakeZeroSize() {
1981 set(kFingerIndex, Smi::FromInt(kEntriesIndex));
1982 set(kCacheSizeIndex, Smi::FromInt(kEntriesIndex));
1983}
1984
1985
1986void JSFunctionResultCache::Clear() {
1987 int cache_size = Smi::cast(get(kCacheSizeIndex))->value();
1988 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08001989 MemsetPointer(entries_start,
1990 Heap::the_hole_value(),
1991 cache_size - kEntriesIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01001992 MakeZeroSize();
1993}
1994
1995
Steve Blocka7e24c12009-10-30 11:49:00 +00001996byte ByteArray::get(int index) {
1997 ASSERT(index >= 0 && index < this->length());
1998 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
1999}
2000
2001
2002void ByteArray::set(int index, byte value) {
2003 ASSERT(index >= 0 && index < this->length());
2004 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
2005}
2006
2007
2008int ByteArray::get_int(int index) {
2009 ASSERT(index >= 0 && (index * kIntSize) < this->length());
2010 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
2011}
2012
2013
2014ByteArray* ByteArray::FromDataStartAddress(Address address) {
2015 ASSERT_TAG_ALIGNED(address);
2016 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
2017}
2018
2019
2020Address ByteArray::GetDataStartAddress() {
2021 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
2022}
2023
2024
2025uint8_t* PixelArray::external_pointer() {
2026 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2027 return reinterpret_cast<uint8_t*>(ptr);
2028}
2029
2030
2031void PixelArray::set_external_pointer(uint8_t* value, WriteBarrierMode mode) {
2032 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2033 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2034}
2035
2036
2037uint8_t PixelArray::get(int index) {
2038 ASSERT((index >= 0) && (index < this->length()));
2039 uint8_t* ptr = external_pointer();
2040 return ptr[index];
2041}
2042
2043
2044void PixelArray::set(int index, uint8_t value) {
2045 ASSERT((index >= 0) && (index < this->length()));
2046 uint8_t* ptr = external_pointer();
2047 ptr[index] = value;
2048}
2049
2050
Steve Block3ce2e202009-11-05 08:53:23 +00002051void* ExternalArray::external_pointer() {
2052 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
2053 return reinterpret_cast<void*>(ptr);
2054}
2055
2056
2057void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
2058 intptr_t ptr = reinterpret_cast<intptr_t>(value);
2059 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
2060}
2061
2062
2063int8_t ExternalByteArray::get(int index) {
2064 ASSERT((index >= 0) && (index < this->length()));
2065 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2066 return ptr[index];
2067}
2068
2069
2070void ExternalByteArray::set(int index, int8_t value) {
2071 ASSERT((index >= 0) && (index < this->length()));
2072 int8_t* ptr = static_cast<int8_t*>(external_pointer());
2073 ptr[index] = value;
2074}
2075
2076
2077uint8_t ExternalUnsignedByteArray::get(int index) {
2078 ASSERT((index >= 0) && (index < this->length()));
2079 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2080 return ptr[index];
2081}
2082
2083
2084void ExternalUnsignedByteArray::set(int index, uint8_t value) {
2085 ASSERT((index >= 0) && (index < this->length()));
2086 uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
2087 ptr[index] = value;
2088}
2089
2090
2091int16_t ExternalShortArray::get(int index) {
2092 ASSERT((index >= 0) && (index < this->length()));
2093 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2094 return ptr[index];
2095}
2096
2097
2098void ExternalShortArray::set(int index, int16_t value) {
2099 ASSERT((index >= 0) && (index < this->length()));
2100 int16_t* ptr = static_cast<int16_t*>(external_pointer());
2101 ptr[index] = value;
2102}
2103
2104
2105uint16_t ExternalUnsignedShortArray::get(int index) {
2106 ASSERT((index >= 0) && (index < this->length()));
2107 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2108 return ptr[index];
2109}
2110
2111
2112void ExternalUnsignedShortArray::set(int index, uint16_t value) {
2113 ASSERT((index >= 0) && (index < this->length()));
2114 uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
2115 ptr[index] = value;
2116}
2117
2118
2119int32_t ExternalIntArray::get(int index) {
2120 ASSERT((index >= 0) && (index < this->length()));
2121 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2122 return ptr[index];
2123}
2124
2125
2126void ExternalIntArray::set(int index, int32_t value) {
2127 ASSERT((index >= 0) && (index < this->length()));
2128 int32_t* ptr = static_cast<int32_t*>(external_pointer());
2129 ptr[index] = value;
2130}
2131
2132
2133uint32_t ExternalUnsignedIntArray::get(int index) {
2134 ASSERT((index >= 0) && (index < this->length()));
2135 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2136 return ptr[index];
2137}
2138
2139
2140void ExternalUnsignedIntArray::set(int index, uint32_t value) {
2141 ASSERT((index >= 0) && (index < this->length()));
2142 uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
2143 ptr[index] = value;
2144}
2145
2146
2147float ExternalFloatArray::get(int index) {
2148 ASSERT((index >= 0) && (index < this->length()));
2149 float* ptr = static_cast<float*>(external_pointer());
2150 return ptr[index];
2151}
2152
2153
2154void ExternalFloatArray::set(int index, float value) {
2155 ASSERT((index >= 0) && (index < this->length()));
2156 float* ptr = static_cast<float*>(external_pointer());
2157 ptr[index] = value;
2158}
2159
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002160
Iain Merrick9ac36c92010-09-13 15:29:50 +01002161int Map::visitor_id() {
2162 return READ_BYTE_FIELD(this, kVisitorIdOffset);
2163}
2164
2165
2166void Map::set_visitor_id(int id) {
2167 ASSERT(0 <= id && id < 256);
2168 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
2169}
2170
Steve Block3ce2e202009-11-05 08:53:23 +00002171
Steve Blocka7e24c12009-10-30 11:49:00 +00002172int Map::instance_size() {
2173 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2174}
2175
2176
2177int Map::inobject_properties() {
2178 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
2179}
2180
2181
2182int Map::pre_allocated_property_fields() {
2183 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
2184}
2185
2186
2187int HeapObject::SizeFromMap(Map* map) {
Steve Block791712a2010-08-27 10:21:07 +01002188 int instance_size = map->instance_size();
2189 if (instance_size != kVariableSizeSentinel) return instance_size;
2190 // We can ignore the "symbol" bit becase it is only set for symbols
2191 // and implies a string type.
2192 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask;
Steve Blocka7e24c12009-10-30 11:49:00 +00002193 // Only inline the most frequent cases.
Steve Blocka7e24c12009-10-30 11:49:00 +00002194 if (instance_type == FIXED_ARRAY_TYPE) {
Iain Merrick75681382010-08-19 15:07:18 +01002195 return FixedArray::BodyDescriptor::SizeOf(map, this);
Steve Blocka7e24c12009-10-30 11:49:00 +00002196 }
Steve Block791712a2010-08-27 10:21:07 +01002197 if (instance_type == ASCII_STRING_TYPE) {
2198 return SeqAsciiString::SizeFor(
2199 reinterpret_cast<SeqAsciiString*>(this)->length());
2200 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002201 if (instance_type == BYTE_ARRAY_TYPE) {
2202 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2203 }
Steve Block791712a2010-08-27 10:21:07 +01002204 if (instance_type == STRING_TYPE) {
2205 return SeqTwoByteString::SizeFor(
2206 reinterpret_cast<SeqTwoByteString*>(this)->length());
2207 }
2208 ASSERT(instance_type == CODE_TYPE);
2209 return reinterpret_cast<Code*>(this)->CodeSize();
Steve Blocka7e24c12009-10-30 11:49:00 +00002210}
2211
2212
2213void Map::set_instance_size(int value) {
2214 ASSERT_EQ(0, value & (kPointerSize - 1));
2215 value >>= kPointerSizeLog2;
2216 ASSERT(0 <= value && value < 256);
2217 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
2218}
2219
2220
2221void Map::set_inobject_properties(int value) {
2222 ASSERT(0 <= value && value < 256);
2223 WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
2224}
2225
2226
2227void Map::set_pre_allocated_property_fields(int value) {
2228 ASSERT(0 <= value && value < 256);
2229 WRITE_BYTE_FIELD(this,
2230 kPreAllocatedPropertyFieldsOffset,
2231 static_cast<byte>(value));
2232}
2233
2234
2235InstanceType Map::instance_type() {
2236 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
2237}
2238
2239
2240void Map::set_instance_type(InstanceType value) {
2241 ASSERT(0 <= value && value < 256);
2242 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
2243}
2244
2245
2246int Map::unused_property_fields() {
2247 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
2248}
2249
2250
2251void Map::set_unused_property_fields(int value) {
2252 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
2253}
2254
2255
2256byte Map::bit_field() {
2257 return READ_BYTE_FIELD(this, kBitFieldOffset);
2258}
2259
2260
2261void Map::set_bit_field(byte value) {
2262 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
2263}
2264
2265
2266byte Map::bit_field2() {
2267 return READ_BYTE_FIELD(this, kBitField2Offset);
2268}
2269
2270
2271void Map::set_bit_field2(byte value) {
2272 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
2273}
2274
2275
2276void Map::set_non_instance_prototype(bool value) {
2277 if (value) {
2278 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
2279 } else {
2280 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
2281 }
2282}
2283
2284
2285bool Map::has_non_instance_prototype() {
2286 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
2287}
2288
2289
Steve Block6ded16b2010-05-10 14:33:55 +01002290void Map::set_function_with_prototype(bool value) {
2291 if (value) {
2292 set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
2293 } else {
2294 set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
2295 }
2296}
2297
2298
2299bool Map::function_with_prototype() {
2300 return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
2301}
2302
2303
Steve Blocka7e24c12009-10-30 11:49:00 +00002304void Map::set_is_access_check_needed(bool access_check_needed) {
2305 if (access_check_needed) {
2306 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
2307 } else {
2308 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
2309 }
2310}
2311
2312
2313bool Map::is_access_check_needed() {
2314 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
2315}
2316
2317
Steve Block8defd9f2010-07-08 12:39:36 +01002318void Map::set_is_extensible(bool value) {
2319 if (value) {
2320 set_bit_field2(bit_field2() | (1 << kIsExtensible));
2321 } else {
2322 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
2323 }
2324}
2325
2326bool Map::is_extensible() {
2327 return ((1 << kIsExtensible) & bit_field2()) != 0;
2328}
2329
2330
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002331void Map::set_attached_to_shared_function_info(bool value) {
2332 if (value) {
2333 set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
2334 } else {
2335 set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
2336 }
2337}
2338
2339bool Map::attached_to_shared_function_info() {
2340 return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
2341}
2342
2343
2344void Map::set_is_shared(bool value) {
2345 if (value) {
2346 set_bit_field2(bit_field2() | (1 << kIsShared));
2347 } else {
2348 set_bit_field2(bit_field2() & ~(1 << kIsShared));
2349 }
2350}
2351
2352bool Map::is_shared() {
2353 return ((1 << kIsShared) & bit_field2()) != 0;
2354}
2355
2356
2357JSFunction* Map::unchecked_constructor() {
2358 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2359}
2360
Steve Block8defd9f2010-07-08 12:39:36 +01002361
Steve Blocka7e24c12009-10-30 11:49:00 +00002362Code::Flags Code::flags() {
2363 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2364}
2365
2366
2367void Code::set_flags(Code::Flags flags) {
2368 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= (kFlagsKindMask >> kFlagsKindShift)+1);
2369 // Make sure that all call stubs have an arguments count.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002370 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2371 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
Steve Blocka7e24c12009-10-30 11:49:00 +00002372 ExtractArgumentsCountFromFlags(flags) >= 0);
2373 WRITE_INT_FIELD(this, kFlagsOffset, flags);
2374}
2375
2376
2377Code::Kind Code::kind() {
2378 return ExtractKindFromFlags(flags());
2379}
2380
2381
2382InLoopFlag Code::ic_in_loop() {
2383 return ExtractICInLoopFromFlags(flags());
2384}
2385
2386
2387InlineCacheState Code::ic_state() {
2388 InlineCacheState result = ExtractICStateFromFlags(flags());
2389 // Only allow uninitialized or debugger states for non-IC code
2390 // objects. This is used in the debugger to determine whether or not
2391 // a call to code object has been replaced with a debug break call.
2392 ASSERT(is_inline_cache_stub() ||
2393 result == UNINITIALIZED ||
2394 result == DEBUG_BREAK ||
2395 result == DEBUG_PREPARE_STEP_IN);
2396 return result;
2397}
2398
2399
2400PropertyType Code::type() {
2401 ASSERT(ic_state() == MONOMORPHIC);
2402 return ExtractTypeFromFlags(flags());
2403}
2404
2405
2406int Code::arguments_count() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002407 ASSERT(is_call_stub() || is_keyed_call_stub() || kind() == STUB);
Steve Blocka7e24c12009-10-30 11:49:00 +00002408 return ExtractArgumentsCountFromFlags(flags());
2409}
2410
2411
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002412int Code::major_key() {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002413 ASSERT(kind() == STUB ||
2414 kind() == BINARY_OP_IC ||
2415 kind() == TYPE_RECORDING_BINARY_OP_IC ||
2416 kind() == COMPARE_IC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002417 return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +00002418}
2419
2420
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002421void Code::set_major_key(int major) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002422 ASSERT(kind() == STUB ||
2423 kind() == BINARY_OP_IC ||
2424 kind() == TYPE_RECORDING_BINARY_OP_IC ||
2425 kind() == COMPARE_IC);
Steve Blocka7e24c12009-10-30 11:49:00 +00002426 ASSERT(0 <= major && major < 256);
2427 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2428}
2429
2430
Ben Murdochb0fe1622011-05-05 13:52:32 +01002431bool Code::optimizable() {
2432 ASSERT(kind() == FUNCTION);
2433 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2434}
2435
2436
2437void Code::set_optimizable(bool value) {
2438 ASSERT(kind() == FUNCTION);
2439 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2440}
2441
2442
2443bool Code::has_deoptimization_support() {
2444 ASSERT(kind() == FUNCTION);
2445 return READ_BYTE_FIELD(this, kHasDeoptimizationSupportOffset) == 1;
2446}
2447
2448
2449void Code::set_has_deoptimization_support(bool value) {
2450 ASSERT(kind() == FUNCTION);
2451 WRITE_BYTE_FIELD(this, kHasDeoptimizationSupportOffset, value ? 1 : 0);
2452}
2453
2454
2455int Code::allow_osr_at_loop_nesting_level() {
2456 ASSERT(kind() == FUNCTION);
2457 return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
2458}
2459
2460
2461void Code::set_allow_osr_at_loop_nesting_level(int level) {
2462 ASSERT(kind() == FUNCTION);
2463 ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
2464 WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
2465}
2466
2467
2468unsigned Code::stack_slots() {
2469 ASSERT(kind() == OPTIMIZED_FUNCTION);
2470 return READ_UINT32_FIELD(this, kStackSlotsOffset);
2471}
2472
2473
2474void Code::set_stack_slots(unsigned slots) {
2475 ASSERT(kind() == OPTIMIZED_FUNCTION);
2476 WRITE_UINT32_FIELD(this, kStackSlotsOffset, slots);
2477}
2478
2479
2480unsigned Code::safepoint_table_start() {
2481 ASSERT(kind() == OPTIMIZED_FUNCTION);
2482 return READ_UINT32_FIELD(this, kSafepointTableStartOffset);
2483}
2484
2485
2486void Code::set_safepoint_table_start(unsigned offset) {
2487 ASSERT(kind() == OPTIMIZED_FUNCTION);
2488 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2489 WRITE_UINT32_FIELD(this, kSafepointTableStartOffset, offset);
2490}
2491
2492
2493unsigned Code::stack_check_table_start() {
2494 ASSERT(kind() == FUNCTION);
2495 return READ_UINT32_FIELD(this, kStackCheckTableStartOffset);
2496}
2497
2498
2499void Code::set_stack_check_table_start(unsigned offset) {
2500 ASSERT(kind() == FUNCTION);
2501 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
2502 WRITE_UINT32_FIELD(this, kStackCheckTableStartOffset, offset);
2503}
2504
2505
2506CheckType Code::check_type() {
2507 ASSERT(is_call_stub() || is_keyed_call_stub());
2508 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset);
2509 return static_cast<CheckType>(type);
2510}
2511
2512
2513void Code::set_check_type(CheckType value) {
2514 ASSERT(is_call_stub() || is_keyed_call_stub());
2515 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value);
2516}
2517
2518
2519byte Code::binary_op_type() {
2520 ASSERT(is_binary_op_stub());
2521 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2522}
2523
2524
2525void Code::set_binary_op_type(byte value) {
2526 ASSERT(is_binary_op_stub());
2527 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2528}
2529
2530
2531byte Code::type_recording_binary_op_type() {
2532 ASSERT(is_type_recording_binary_op_stub());
2533 return READ_BYTE_FIELD(this, kBinaryOpTypeOffset);
2534}
2535
2536
2537void Code::set_type_recording_binary_op_type(byte value) {
2538 ASSERT(is_type_recording_binary_op_stub());
2539 WRITE_BYTE_FIELD(this, kBinaryOpTypeOffset, value);
2540}
2541
2542
2543byte Code::type_recording_binary_op_result_type() {
2544 ASSERT(is_type_recording_binary_op_stub());
2545 return READ_BYTE_FIELD(this, kBinaryOpReturnTypeOffset);
2546}
2547
2548
2549void Code::set_type_recording_binary_op_result_type(byte value) {
2550 ASSERT(is_type_recording_binary_op_stub());
2551 WRITE_BYTE_FIELD(this, kBinaryOpReturnTypeOffset, value);
2552}
2553
2554
2555byte Code::compare_state() {
2556 ASSERT(is_compare_ic_stub());
2557 return READ_BYTE_FIELD(this, kCompareStateOffset);
2558}
2559
2560
2561void Code::set_compare_state(byte value) {
2562 ASSERT(is_compare_ic_stub());
2563 WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
2564}
2565
2566
Steve Blocka7e24c12009-10-30 11:49:00 +00002567bool Code::is_inline_cache_stub() {
2568 Kind kind = this->kind();
2569 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
2570}
2571
2572
2573Code::Flags Code::ComputeFlags(Kind kind,
2574 InLoopFlag in_loop,
2575 InlineCacheState ic_state,
2576 PropertyType type,
Steve Block8defd9f2010-07-08 12:39:36 +01002577 int argc,
2578 InlineCacheHolderFlag holder) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002579 // Compute the bit mask.
2580 int bits = kind << kFlagsKindShift;
2581 if (in_loop) bits |= kFlagsICInLoopMask;
2582 bits |= ic_state << kFlagsICStateShift;
2583 bits |= type << kFlagsTypeShift;
2584 bits |= argc << kFlagsArgumentsCountShift;
Steve Block8defd9f2010-07-08 12:39:36 +01002585 if (holder == PROTOTYPE_MAP) bits |= kFlagsCacheInPrototypeMapMask;
Steve Blocka7e24c12009-10-30 11:49:00 +00002586 // Cast to flags and validate result before returning it.
2587 Flags result = static_cast<Flags>(bits);
2588 ASSERT(ExtractKindFromFlags(result) == kind);
2589 ASSERT(ExtractICStateFromFlags(result) == ic_state);
2590 ASSERT(ExtractICInLoopFromFlags(result) == in_loop);
2591 ASSERT(ExtractTypeFromFlags(result) == type);
2592 ASSERT(ExtractArgumentsCountFromFlags(result) == argc);
2593 return result;
2594}
2595
2596
2597Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
2598 PropertyType type,
Steve Block8defd9f2010-07-08 12:39:36 +01002599 InlineCacheHolderFlag holder,
Steve Blocka7e24c12009-10-30 11:49:00 +00002600 InLoopFlag in_loop,
2601 int argc) {
Steve Block8defd9f2010-07-08 12:39:36 +01002602 return ComputeFlags(kind, in_loop, MONOMORPHIC, type, argc, holder);
Steve Blocka7e24c12009-10-30 11:49:00 +00002603}
2604
2605
2606Code::Kind Code::ExtractKindFromFlags(Flags flags) {
2607 int bits = (flags & kFlagsKindMask) >> kFlagsKindShift;
2608 return static_cast<Kind>(bits);
2609}
2610
2611
2612InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
2613 int bits = (flags & kFlagsICStateMask) >> kFlagsICStateShift;
2614 return static_cast<InlineCacheState>(bits);
2615}
2616
2617
2618InLoopFlag Code::ExtractICInLoopFromFlags(Flags flags) {
2619 int bits = (flags & kFlagsICInLoopMask);
2620 return bits != 0 ? IN_LOOP : NOT_IN_LOOP;
2621}
2622
2623
2624PropertyType Code::ExtractTypeFromFlags(Flags flags) {
2625 int bits = (flags & kFlagsTypeMask) >> kFlagsTypeShift;
2626 return static_cast<PropertyType>(bits);
2627}
2628
2629
2630int Code::ExtractArgumentsCountFromFlags(Flags flags) {
2631 return (flags & kFlagsArgumentsCountMask) >> kFlagsArgumentsCountShift;
2632}
2633
2634
Steve Block8defd9f2010-07-08 12:39:36 +01002635InlineCacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
2636 int bits = (flags & kFlagsCacheInPrototypeMapMask);
2637 return bits != 0 ? PROTOTYPE_MAP : OWN_MAP;
2638}
2639
2640
Steve Blocka7e24c12009-10-30 11:49:00 +00002641Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
2642 int bits = flags & ~kFlagsTypeMask;
2643 return static_cast<Flags>(bits);
2644}
2645
2646
2647Code* Code::GetCodeFromTargetAddress(Address address) {
2648 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
2649 // GetCodeFromTargetAddress might be called when marking objects during mark
2650 // sweep. reinterpret_cast is therefore used instead of the more appropriate
2651 // Code::cast. Code::cast does not work when the object's map is
2652 // marked.
2653 Code* result = reinterpret_cast<Code*>(code);
2654 return result;
2655}
2656
2657
Steve Block791712a2010-08-27 10:21:07 +01002658Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
2659 return HeapObject::
2660 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
2661}
2662
2663
Steve Blocka7e24c12009-10-30 11:49:00 +00002664Object* Map::prototype() {
2665 return READ_FIELD(this, kPrototypeOffset);
2666}
2667
2668
2669void Map::set_prototype(Object* value, WriteBarrierMode mode) {
2670 ASSERT(value->IsNull() || value->IsJSObject());
2671 WRITE_FIELD(this, kPrototypeOffset, value);
2672 CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode);
2673}
2674
2675
John Reck59135872010-11-02 12:39:01 -07002676MaybeObject* Map::GetFastElementsMap() {
Steve Block8defd9f2010-07-08 12:39:36 +01002677 if (has_fast_elements()) return this;
John Reck59135872010-11-02 12:39:01 -07002678 Object* obj;
2679 { MaybeObject* maybe_obj = CopyDropTransitions();
2680 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2681 }
Steve Block8defd9f2010-07-08 12:39:36 +01002682 Map* new_map = Map::cast(obj);
2683 new_map->set_has_fast_elements(true);
Iain Merrick75681382010-08-19 15:07:18 +01002684 Counters::map_slow_to_fast_elements.Increment();
Steve Block8defd9f2010-07-08 12:39:36 +01002685 return new_map;
2686}
2687
2688
John Reck59135872010-11-02 12:39:01 -07002689MaybeObject* Map::GetSlowElementsMap() {
Steve Block8defd9f2010-07-08 12:39:36 +01002690 if (!has_fast_elements()) return this;
John Reck59135872010-11-02 12:39:01 -07002691 Object* obj;
2692 { MaybeObject* maybe_obj = CopyDropTransitions();
2693 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2694 }
Steve Block8defd9f2010-07-08 12:39:36 +01002695 Map* new_map = Map::cast(obj);
2696 new_map->set_has_fast_elements(false);
Iain Merrick75681382010-08-19 15:07:18 +01002697 Counters::map_fast_to_slow_elements.Increment();
Steve Block8defd9f2010-07-08 12:39:36 +01002698 return new_map;
2699}
2700
2701
Steve Blocka7e24c12009-10-30 11:49:00 +00002702ACCESSORS(Map, instance_descriptors, DescriptorArray,
2703 kInstanceDescriptorsOffset)
Steve Block6ded16b2010-05-10 14:33:55 +01002704ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
Steve Blocka7e24c12009-10-30 11:49:00 +00002705ACCESSORS(Map, constructor, Object, kConstructorOffset)
2706
2707ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
2708ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
Ben Murdochb0fe1622011-05-05 13:52:32 +01002709ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
Steve Blocka7e24c12009-10-30 11:49:00 +00002710
2711ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
2712ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
2713ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
2714
2715ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
2716
2717ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
2718ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
2719ACCESSORS(AccessorInfo, data, Object, kDataOffset)
2720ACCESSORS(AccessorInfo, name, Object, kNameOffset)
2721ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
2722
2723ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
2724ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
2725ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
2726
2727ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
2728ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
2729ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
2730ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
2731ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
2732ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
2733
2734ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
2735ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
2736
2737ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
2738ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
2739
2740ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
2741ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
2742ACCESSORS(FunctionTemplateInfo, property_accessors, Object,
2743 kPropertyAccessorsOffset)
2744ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
2745 kPrototypeTemplateOffset)
2746ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
2747ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
2748 kNamedPropertyHandlerOffset)
2749ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
2750 kIndexedPropertyHandlerOffset)
2751ACCESSORS(FunctionTemplateInfo, instance_template, Object,
2752 kInstanceTemplateOffset)
2753ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
2754ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
2755ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
2756 kInstanceCallHandlerOffset)
2757ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
2758 kAccessCheckInfoOffset)
2759ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
2760
2761ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
2762ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
2763 kInternalFieldCountOffset)
2764
2765ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
2766ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
2767
2768ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
2769
2770ACCESSORS(Script, source, Object, kSourceOffset)
2771ACCESSORS(Script, name, Object, kNameOffset)
2772ACCESSORS(Script, id, Object, kIdOffset)
2773ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
2774ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
2775ACCESSORS(Script, data, Object, kDataOffset)
2776ACCESSORS(Script, context_data, Object, kContextOffset)
2777ACCESSORS(Script, wrapper, Proxy, kWrapperOffset)
2778ACCESSORS(Script, type, Smi, kTypeOffset)
2779ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
2780ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
Steve Blockd0582a62009-12-15 09:54:21 +00002781ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
Steve Blocka7e24c12009-10-30 11:49:00 +00002782ACCESSORS(Script, eval_from_instructions_offset, Smi,
2783 kEvalFrominstructionsOffsetOffset)
2784
2785#ifdef ENABLE_DEBUGGER_SUPPORT
2786ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
2787ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
2788ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
2789ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
2790
2791ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
2792ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
2793ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
2794ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
2795#endif
2796
Steve Blocka7e24c12009-10-30 11:49:00 +00002797ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
Steve Block6ded16b2010-05-10 14:33:55 +01002798ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002799ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
Steve Blocka7e24c12009-10-30 11:49:00 +00002800ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
2801 kInstanceClassNameOffset)
Steve Block6ded16b2010-05-10 14:33:55 +01002802ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
Steve Blocka7e24c12009-10-30 11:49:00 +00002803ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
2804ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
2805ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
2806ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
2807 kThisPropertyAssignmentsOffset)
2808
2809BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
2810 kHiddenPrototypeBit)
2811BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
2812BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
2813 kNeedsAccessCheckBit)
2814BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
2815 kIsExpressionBit)
2816BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
2817 kIsTopLevelBit)
2818BOOL_GETTER(SharedFunctionInfo, compiler_hints,
Steve Blocka7e24c12009-10-30 11:49:00 +00002819 has_only_simple_this_property_assignments,
2820 kHasOnlySimpleThisPropertyAssignments)
Steve Blockd0582a62009-12-15 09:54:21 +00002821BOOL_ACCESSORS(SharedFunctionInfo,
2822 compiler_hints,
Leon Clarked91b9f72010-01-27 17:25:45 +00002823 try_full_codegen,
2824 kTryFullCodegen)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002825BOOL_ACCESSORS(SharedFunctionInfo,
2826 compiler_hints,
2827 allows_lazy_compilation,
2828 kAllowLazyCompilation)
Steve Blocka7e24c12009-10-30 11:49:00 +00002829
Iain Merrick75681382010-08-19 15:07:18 +01002830
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002831#if V8_HOST_ARCH_32_BIT
2832SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
2833SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
Steve Blocka7e24c12009-10-30 11:49:00 +00002834 kFormalParameterCountOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002835SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
Steve Blocka7e24c12009-10-30 11:49:00 +00002836 kExpectedNofPropertiesOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002837SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
2838SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
Steve Blocka7e24c12009-10-30 11:49:00 +00002839 kStartPositionAndTypeOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002840SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
2841SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
Steve Blocka7e24c12009-10-30 11:49:00 +00002842 kFunctionTokenPositionOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002843SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
Steve Blocka7e24c12009-10-30 11:49:00 +00002844 kCompilerHintsOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002845SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
Steve Blocka7e24c12009-10-30 11:49:00 +00002846 kThisPropertyAssignmentsCountOffset)
Ben Murdochb0fe1622011-05-05 13:52:32 +01002847SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002848#else
Steve Blocka7e24c12009-10-30 11:49:00 +00002849
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002850#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002851 STATIC_ASSERT(holder::offset % kPointerSize == 0); \
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002852 int holder::name() { \
2853 int value = READ_INT_FIELD(this, offset); \
2854 ASSERT(kHeapObjectTag == 1); \
2855 ASSERT((value & kHeapObjectTag) == 0); \
2856 return value >> 1; \
2857 } \
2858 void holder::set_##name(int value) { \
2859 ASSERT(kHeapObjectTag == 1); \
2860 ASSERT((value & 0xC0000000) == 0xC0000000 || \
2861 (value & 0xC0000000) == 0x000000000); \
2862 WRITE_INT_FIELD(this, \
2863 offset, \
2864 (value << 1) & ~kHeapObjectTag); \
2865 }
2866
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002867#define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
2868 STATIC_ASSERT(holder::offset % kPointerSize == kIntSize); \
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002869 INT_ACCESSORS(holder, name, offset)
2870
2871
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002872PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002873PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
2874 formal_parameter_count,
2875 kFormalParameterCountOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002876
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002877PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
2878 expected_nof_properties,
2879 kExpectedNofPropertiesOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002880PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
2881
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002882PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
2883PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
2884 start_position_and_type,
2885 kStartPositionAndTypeOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002886
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002887PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
2888 function_token_position,
2889 kFunctionTokenPositionOffset)
2890PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
2891 compiler_hints,
2892 kCompilerHintsOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002893
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002894PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
2895 this_property_assignments_count,
2896 kThisPropertyAssignmentsCountOffset)
Ben Murdochb0fe1622011-05-05 13:52:32 +01002897PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002898#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002899
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002900
2901int SharedFunctionInfo::construction_count() {
2902 return READ_BYTE_FIELD(this, kConstructionCountOffset);
2903}
2904
2905
2906void SharedFunctionInfo::set_construction_count(int value) {
2907 ASSERT(0 <= value && value < 256);
2908 WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value));
2909}
2910
2911
2912bool SharedFunctionInfo::live_objects_may_exist() {
2913 return (compiler_hints() & (1 << kLiveObjectsMayExist)) != 0;
2914}
2915
2916
2917void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
2918 if (value) {
2919 set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist));
2920 } else {
2921 set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist));
2922 }
2923}
2924
2925
2926bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
2927 return initial_map() != Heap::undefined_value();
2928}
2929
2930
Ben Murdochb0fe1622011-05-05 13:52:32 +01002931bool SharedFunctionInfo::optimization_disabled() {
2932 return BooleanBit::get(compiler_hints(), kOptimizationDisabled);
2933}
2934
2935
2936void SharedFunctionInfo::set_optimization_disabled(bool disable) {
2937 set_compiler_hints(BooleanBit::set(compiler_hints(),
2938 kOptimizationDisabled,
2939 disable));
2940 // If disabling optimizations we reflect that in the code object so
2941 // it will not be counted as optimizable code.
2942 if ((code()->kind() == Code::FUNCTION) && disable) {
2943 code()->set_optimizable(false);
2944 }
2945}
2946
2947
Steve Block6ded16b2010-05-10 14:33:55 +01002948ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
2949ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
2950
Steve Block3ce2e202009-11-05 08:53:23 +00002951bool Script::HasValidSource() {
2952 Object* src = this->source();
2953 if (!src->IsString()) return true;
2954 String* src_str = String::cast(src);
2955 if (!StringShape(src_str).IsExternal()) return true;
2956 if (src_str->IsAsciiRepresentation()) {
2957 return ExternalAsciiString::cast(src)->resource() != NULL;
2958 } else if (src_str->IsTwoByteRepresentation()) {
2959 return ExternalTwoByteString::cast(src)->resource() != NULL;
2960 }
2961 return true;
2962}
2963
2964
Steve Blocka7e24c12009-10-30 11:49:00 +00002965void SharedFunctionInfo::DontAdaptArguments() {
2966 ASSERT(code()->kind() == Code::BUILTIN);
2967 set_formal_parameter_count(kDontAdaptArgumentsSentinel);
2968}
2969
2970
2971int SharedFunctionInfo::start_position() {
2972 return start_position_and_type() >> kStartPositionShift;
2973}
2974
2975
2976void SharedFunctionInfo::set_start_position(int start_position) {
2977 set_start_position_and_type((start_position << kStartPositionShift)
2978 | (start_position_and_type() & ~kStartPositionMask));
2979}
2980
2981
2982Code* SharedFunctionInfo::code() {
2983 return Code::cast(READ_FIELD(this, kCodeOffset));
2984}
2985
2986
Iain Merrick75681382010-08-19 15:07:18 +01002987Code* SharedFunctionInfo::unchecked_code() {
2988 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
2989}
2990
2991
Steve Blocka7e24c12009-10-30 11:49:00 +00002992void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002993 // If optimization has been disabled for the shared function info,
2994 // reflect that in the code object so it will not be counted as
2995 // optimizable code.
2996 ASSERT(value->kind() != Code::FUNCTION ||
2997 !value->optimizable() ||
2998 this->code() == Builtins::builtin(Builtins::Illegal) ||
2999 this->allows_lazy_compilation());
Steve Blocka7e24c12009-10-30 11:49:00 +00003000 WRITE_FIELD(this, kCodeOffset, value);
3001 CONDITIONAL_WRITE_BARRIER(this, kCodeOffset, mode);
3002}
3003
3004
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003005SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3006 return reinterpret_cast<SerializedScopeInfo*>(
3007 READ_FIELD(this, kScopeInfoOffset));
3008}
3009
3010
3011void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3012 WriteBarrierMode mode) {
3013 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3014 CONDITIONAL_WRITE_BARRIER(this, kScopeInfoOffset, mode);
3015}
3016
3017
Ben Murdochb0fe1622011-05-05 13:52:32 +01003018Smi* SharedFunctionInfo::deopt_counter() {
3019 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3020}
3021
3022
3023void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3024 WRITE_FIELD(this, kDeoptCounterOffset, value);
3025}
3026
3027
Steve Blocka7e24c12009-10-30 11:49:00 +00003028bool SharedFunctionInfo::is_compiled() {
Iain Merrick75681382010-08-19 15:07:18 +01003029 return code() != Builtins::builtin(Builtins::LazyCompile);
Steve Blocka7e24c12009-10-30 11:49:00 +00003030}
3031
3032
Steve Block6ded16b2010-05-10 14:33:55 +01003033bool SharedFunctionInfo::IsApiFunction() {
3034 return function_data()->IsFunctionTemplateInfo();
3035}
3036
3037
3038FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3039 ASSERT(IsApiFunction());
3040 return FunctionTemplateInfo::cast(function_data());
3041}
3042
3043
Ben Murdochb0fe1622011-05-05 13:52:32 +01003044bool SharedFunctionInfo::HasBuiltinFunctionId() {
Kristian Monsen25f61362010-05-21 11:50:48 +01003045 return function_data()->IsSmi();
3046}
3047
3048
Ben Murdochb0fe1622011-05-05 13:52:32 +01003049bool SharedFunctionInfo::IsBuiltinMathFunction() {
3050 return HasBuiltinFunctionId() &&
3051 builtin_function_id() >= kFirstMathFunctionId;
3052}
3053
3054
3055BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
3056 ASSERT(HasBuiltinFunctionId());
3057 return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003058}
3059
3060
Iain Merrick75681382010-08-19 15:07:18 +01003061int SharedFunctionInfo::code_age() {
3062 return (compiler_hints() >> kCodeAgeShift) & kCodeAgeMask;
3063}
3064
3065
3066void SharedFunctionInfo::set_code_age(int code_age) {
3067 set_compiler_hints(compiler_hints() |
3068 ((code_age & kCodeAgeMask) << kCodeAgeShift));
3069}
3070
3071
Ben Murdochb0fe1622011-05-05 13:52:32 +01003072bool SharedFunctionInfo::has_deoptimization_support() {
3073 Code* code = this->code();
3074 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
3075}
3076
3077
Steve Blocka7e24c12009-10-30 11:49:00 +00003078bool JSFunction::IsBuiltin() {
3079 return context()->global()->IsJSBuiltinsObject();
3080}
3081
3082
Ben Murdochb0fe1622011-05-05 13:52:32 +01003083bool JSFunction::NeedsArgumentsAdaption() {
3084 return shared()->formal_parameter_count() !=
3085 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3086}
3087
3088
3089bool JSFunction::IsOptimized() {
3090 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3091}
3092
3093
3094bool JSFunction::IsMarkedForLazyRecompilation() {
3095 return code() == Builtins::builtin(Builtins::LazyRecompile);
3096}
3097
3098
Steve Blocka7e24c12009-10-30 11:49:00 +00003099Code* JSFunction::code() {
Steve Block791712a2010-08-27 10:21:07 +01003100 return Code::cast(unchecked_code());
Iain Merrick75681382010-08-19 15:07:18 +01003101}
3102
3103
3104Code* JSFunction::unchecked_code() {
Steve Block791712a2010-08-27 10:21:07 +01003105 return reinterpret_cast<Code*>(
3106 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
Steve Blocka7e24c12009-10-30 11:49:00 +00003107}
3108
3109
3110void JSFunction::set_code(Code* value) {
Iain Merrick75681382010-08-19 15:07:18 +01003111 // Skip the write barrier because code is never in new space.
3112 ASSERT(!Heap::InNewSpace(value));
Steve Block791712a2010-08-27 10:21:07 +01003113 Address entry = value->entry();
3114 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
Steve Blocka7e24c12009-10-30 11:49:00 +00003115}
3116
3117
Ben Murdochb0fe1622011-05-05 13:52:32 +01003118void JSFunction::ReplaceCode(Code* code) {
3119 bool was_optimized = IsOptimized();
3120 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3121
3122 set_code(code);
3123
3124 // Add/remove the function from the list of optimized functions for this
3125 // context based on the state change.
3126 if (!was_optimized && is_optimized) {
3127 context()->global_context()->AddOptimizedFunction(this);
3128 }
3129 if (was_optimized && !is_optimized) {
3130 context()->global_context()->RemoveOptimizedFunction(this);
3131 }
3132}
3133
3134
Steve Blocka7e24c12009-10-30 11:49:00 +00003135Context* JSFunction::context() {
3136 return Context::cast(READ_FIELD(this, kContextOffset));
3137}
3138
3139
3140Object* JSFunction::unchecked_context() {
3141 return READ_FIELD(this, kContextOffset);
3142}
3143
3144
Iain Merrick75681382010-08-19 15:07:18 +01003145SharedFunctionInfo* JSFunction::unchecked_shared() {
3146 return reinterpret_cast<SharedFunctionInfo*>(
3147 READ_FIELD(this, kSharedFunctionInfoOffset));
3148}
3149
3150
Steve Blocka7e24c12009-10-30 11:49:00 +00003151void JSFunction::set_context(Object* value) {
3152 ASSERT(value == Heap::undefined_value() || value->IsContext());
3153 WRITE_FIELD(this, kContextOffset, value);
3154 WRITE_BARRIER(this, kContextOffset);
3155}
3156
3157ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3158 kPrototypeOrInitialMapOffset)
3159
3160
3161Map* JSFunction::initial_map() {
3162 return Map::cast(prototype_or_initial_map());
3163}
3164
3165
3166void JSFunction::set_initial_map(Map* value) {
3167 set_prototype_or_initial_map(value);
3168}
3169
3170
3171bool JSFunction::has_initial_map() {
3172 return prototype_or_initial_map()->IsMap();
3173}
3174
3175
3176bool JSFunction::has_instance_prototype() {
3177 return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
3178}
3179
3180
3181bool JSFunction::has_prototype() {
3182 return map()->has_non_instance_prototype() || has_instance_prototype();
3183}
3184
3185
3186Object* JSFunction::instance_prototype() {
3187 ASSERT(has_instance_prototype());
3188 if (has_initial_map()) return initial_map()->prototype();
3189 // When there is no initial map and the prototype is a JSObject, the
3190 // initial map field is used for the prototype field.
3191 return prototype_or_initial_map();
3192}
3193
3194
3195Object* JSFunction::prototype() {
3196 ASSERT(has_prototype());
3197 // If the function's prototype property has been set to a non-JSObject
3198 // value, that value is stored in the constructor field of the map.
3199 if (map()->has_non_instance_prototype()) return map()->constructor();
3200 return instance_prototype();
3201}
3202
Steve Block6ded16b2010-05-10 14:33:55 +01003203bool JSFunction::should_have_prototype() {
3204 return map()->function_with_prototype();
3205}
3206
Steve Blocka7e24c12009-10-30 11:49:00 +00003207
3208bool JSFunction::is_compiled() {
Iain Merrick75681382010-08-19 15:07:18 +01003209 return code() != Builtins::builtin(Builtins::LazyCompile);
Steve Blocka7e24c12009-10-30 11:49:00 +00003210}
3211
3212
3213int JSFunction::NumberOfLiterals() {
3214 return literals()->length();
3215}
3216
3217
3218Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3219 ASSERT(0 <= id && id < kJSBuiltinsCount);
Steve Block6ded16b2010-05-10 14:33:55 +01003220 return READ_FIELD(this, OffsetOfFunctionWithId(id));
Steve Blocka7e24c12009-10-30 11:49:00 +00003221}
3222
3223
3224void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3225 Object* value) {
3226 ASSERT(0 <= id && id < kJSBuiltinsCount);
Steve Block6ded16b2010-05-10 14:33:55 +01003227 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3228 WRITE_BARRIER(this, OffsetOfFunctionWithId(id));
3229}
3230
3231
3232Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3233 ASSERT(0 <= id && id < kJSBuiltinsCount);
3234 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3235}
3236
3237
3238void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3239 Code* value) {
3240 ASSERT(0 <= id && id < kJSBuiltinsCount);
3241 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3242 ASSERT(!Heap::InNewSpace(value));
Steve Blocka7e24c12009-10-30 11:49:00 +00003243}
3244
3245
3246Address Proxy::proxy() {
3247 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset));
3248}
3249
3250
3251void Proxy::set_proxy(Address value) {
3252 WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value));
3253}
3254
3255
Steve Blocka7e24c12009-10-30 11:49:00 +00003256ACCESSORS(JSValue, value, Object, kValueOffset)
3257
3258
3259JSValue* JSValue::cast(Object* obj) {
3260 ASSERT(obj->IsJSValue());
3261 ASSERT(HeapObject::cast(obj)->Size() == JSValue::kSize);
3262 return reinterpret_cast<JSValue*>(obj);
3263}
3264
3265
3266INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
Leon Clarkeac952652010-07-15 11:15:24 +01003267ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
Ben Murdochb0fe1622011-05-05 13:52:32 +01003268ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
Steve Blocka7e24c12009-10-30 11:49:00 +00003269
3270
3271byte* Code::instruction_start() {
3272 return FIELD_ADDR(this, kHeaderSize);
3273}
3274
3275
Leon Clarkeac952652010-07-15 11:15:24 +01003276byte* Code::instruction_end() {
3277 return instruction_start() + instruction_size();
3278}
3279
3280
Steve Blocka7e24c12009-10-30 11:49:00 +00003281int Code::body_size() {
Leon Clarkeac952652010-07-15 11:15:24 +01003282 return RoundUp(instruction_size(), kObjectAlignment);
3283}
3284
3285
Ben Murdochb0fe1622011-05-05 13:52:32 +01003286FixedArray* Code::unchecked_deoptimization_data() {
3287 return reinterpret_cast<FixedArray*>(
3288 READ_FIELD(this, kDeoptimizationDataOffset));
3289}
3290
3291
Leon Clarkeac952652010-07-15 11:15:24 +01003292ByteArray* Code::unchecked_relocation_info() {
3293 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003294}
3295
3296
3297byte* Code::relocation_start() {
Leon Clarkeac952652010-07-15 11:15:24 +01003298 return unchecked_relocation_info()->GetDataStartAddress();
3299}
3300
3301
3302int Code::relocation_size() {
3303 return unchecked_relocation_info()->length();
Steve Blocka7e24c12009-10-30 11:49:00 +00003304}
3305
3306
3307byte* Code::entry() {
3308 return instruction_start();
3309}
3310
3311
3312bool Code::contains(byte* pc) {
3313 return (instruction_start() <= pc) &&
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003314 (pc <= instruction_start() + instruction_size());
Steve Blocka7e24c12009-10-30 11:49:00 +00003315}
3316
3317
Steve Blocka7e24c12009-10-30 11:49:00 +00003318ACCESSORS(JSArray, length, Object, kLengthOffset)
3319
3320
3321ACCESSORS(JSRegExp, data, Object, kDataOffset)
3322
3323
3324JSRegExp::Type JSRegExp::TypeTag() {
3325 Object* data = this->data();
3326 if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
3327 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
3328 return static_cast<JSRegExp::Type>(smi->value());
3329}
3330
3331
3332int JSRegExp::CaptureCount() {
3333 switch (TypeTag()) {
3334 case ATOM:
3335 return 0;
3336 case IRREGEXP:
3337 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
3338 default:
3339 UNREACHABLE();
3340 return -1;
3341 }
3342}
3343
3344
3345JSRegExp::Flags JSRegExp::GetFlags() {
3346 ASSERT(this->data()->IsFixedArray());
3347 Object* data = this->data();
3348 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
3349 return Flags(smi->value());
3350}
3351
3352
3353String* JSRegExp::Pattern() {
3354 ASSERT(this->data()->IsFixedArray());
3355 Object* data = this->data();
3356 String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
3357 return pattern;
3358}
3359
3360
3361Object* JSRegExp::DataAt(int index) {
3362 ASSERT(TypeTag() != NOT_COMPILED);
3363 return FixedArray::cast(data())->get(index);
3364}
3365
3366
3367void JSRegExp::SetDataAt(int index, Object* value) {
3368 ASSERT(TypeTag() != NOT_COMPILED);
3369 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
3370 FixedArray::cast(data())->set(index, value);
3371}
3372
3373
3374JSObject::ElementsKind JSObject::GetElementsKind() {
Iain Merrick75681382010-08-19 15:07:18 +01003375 if (map()->has_fast_elements()) {
3376 ASSERT(elements()->map() == Heap::fixed_array_map() ||
3377 elements()->map() == Heap::fixed_cow_array_map());
3378 return FAST_ELEMENTS;
3379 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003380 HeapObject* array = elements();
Steve Blocka7e24c12009-10-30 11:49:00 +00003381 if (array->IsFixedArray()) {
Iain Merrick75681382010-08-19 15:07:18 +01003382 // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a
3383 // FixedArray, but FAST_ELEMENTS is already handled above.
Steve Blocka7e24c12009-10-30 11:49:00 +00003384 ASSERT(array->IsDictionary());
3385 return DICTIONARY_ELEMENTS;
3386 }
Steve Block3ce2e202009-11-05 08:53:23 +00003387 if (array->IsExternalArray()) {
3388 switch (array->map()->instance_type()) {
3389 case EXTERNAL_BYTE_ARRAY_TYPE:
3390 return EXTERNAL_BYTE_ELEMENTS;
3391 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
3392 return EXTERNAL_UNSIGNED_BYTE_ELEMENTS;
3393 case EXTERNAL_SHORT_ARRAY_TYPE:
3394 return EXTERNAL_SHORT_ELEMENTS;
3395 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
3396 return EXTERNAL_UNSIGNED_SHORT_ELEMENTS;
3397 case EXTERNAL_INT_ARRAY_TYPE:
3398 return EXTERNAL_INT_ELEMENTS;
3399 case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
3400 return EXTERNAL_UNSIGNED_INT_ELEMENTS;
3401 default:
3402 ASSERT(array->map()->instance_type() == EXTERNAL_FLOAT_ARRAY_TYPE);
3403 return EXTERNAL_FLOAT_ELEMENTS;
3404 }
3405 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003406 ASSERT(array->IsPixelArray());
3407 return PIXEL_ELEMENTS;
3408}
3409
3410
3411bool JSObject::HasFastElements() {
3412 return GetElementsKind() == FAST_ELEMENTS;
3413}
3414
3415
3416bool JSObject::HasDictionaryElements() {
3417 return GetElementsKind() == DICTIONARY_ELEMENTS;
3418}
3419
3420
3421bool JSObject::HasPixelElements() {
3422 return GetElementsKind() == PIXEL_ELEMENTS;
3423}
3424
3425
Steve Block3ce2e202009-11-05 08:53:23 +00003426bool JSObject::HasExternalArrayElements() {
3427 return (HasExternalByteElements() ||
3428 HasExternalUnsignedByteElements() ||
3429 HasExternalShortElements() ||
3430 HasExternalUnsignedShortElements() ||
3431 HasExternalIntElements() ||
3432 HasExternalUnsignedIntElements() ||
3433 HasExternalFloatElements());
3434}
3435
3436
3437bool JSObject::HasExternalByteElements() {
3438 return GetElementsKind() == EXTERNAL_BYTE_ELEMENTS;
3439}
3440
3441
3442bool JSObject::HasExternalUnsignedByteElements() {
3443 return GetElementsKind() == EXTERNAL_UNSIGNED_BYTE_ELEMENTS;
3444}
3445
3446
3447bool JSObject::HasExternalShortElements() {
3448 return GetElementsKind() == EXTERNAL_SHORT_ELEMENTS;
3449}
3450
3451
3452bool JSObject::HasExternalUnsignedShortElements() {
3453 return GetElementsKind() == EXTERNAL_UNSIGNED_SHORT_ELEMENTS;
3454}
3455
3456
3457bool JSObject::HasExternalIntElements() {
3458 return GetElementsKind() == EXTERNAL_INT_ELEMENTS;
3459}
3460
3461
3462bool JSObject::HasExternalUnsignedIntElements() {
3463 return GetElementsKind() == EXTERNAL_UNSIGNED_INT_ELEMENTS;
3464}
3465
3466
3467bool JSObject::HasExternalFloatElements() {
3468 return GetElementsKind() == EXTERNAL_FLOAT_ELEMENTS;
3469}
3470
3471
Steve Blocka7e24c12009-10-30 11:49:00 +00003472bool JSObject::HasNamedInterceptor() {
3473 return map()->has_named_interceptor();
3474}
3475
3476
3477bool JSObject::HasIndexedInterceptor() {
3478 return map()->has_indexed_interceptor();
3479}
3480
3481
Steve Block6ded16b2010-05-10 14:33:55 +01003482bool JSObject::AllowsSetElementsLength() {
3483 bool result = elements()->IsFixedArray();
3484 ASSERT(result == (!HasPixelElements() && !HasExternalArrayElements()));
3485 return result;
3486}
3487
3488
John Reck59135872010-11-02 12:39:01 -07003489MaybeObject* JSObject::EnsureWritableFastElements() {
Iain Merrick75681382010-08-19 15:07:18 +01003490 ASSERT(HasFastElements());
3491 FixedArray* elems = FixedArray::cast(elements());
3492 if (elems->map() != Heap::fixed_cow_array_map()) return elems;
John Reck59135872010-11-02 12:39:01 -07003493 Object* writable_elems;
3494 { MaybeObject* maybe_writable_elems =
3495 Heap::CopyFixedArrayWithMap(elems, Heap::fixed_array_map());
3496 if (!maybe_writable_elems->ToObject(&writable_elems)) {
3497 return maybe_writable_elems;
3498 }
3499 }
Iain Merrick75681382010-08-19 15:07:18 +01003500 set_elements(FixedArray::cast(writable_elems));
3501 Counters::cow_arrays_converted.Increment();
3502 return writable_elems;
3503}
3504
3505
Steve Blocka7e24c12009-10-30 11:49:00 +00003506StringDictionary* JSObject::property_dictionary() {
3507 ASSERT(!HasFastProperties());
3508 return StringDictionary::cast(properties());
3509}
3510
3511
3512NumberDictionary* JSObject::element_dictionary() {
3513 ASSERT(HasDictionaryElements());
3514 return NumberDictionary::cast(elements());
3515}
3516
3517
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003518bool String::IsHashFieldComputed(uint32_t field) {
3519 return (field & kHashNotComputedMask) == 0;
3520}
3521
3522
Steve Blocka7e24c12009-10-30 11:49:00 +00003523bool String::HasHashCode() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003524 return IsHashFieldComputed(hash_field());
Steve Blocka7e24c12009-10-30 11:49:00 +00003525}
3526
3527
3528uint32_t String::Hash() {
3529 // Fast case: has hash code already been computed?
Steve Blockd0582a62009-12-15 09:54:21 +00003530 uint32_t field = hash_field();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003531 if (IsHashFieldComputed(field)) return field >> kHashShift;
Steve Blocka7e24c12009-10-30 11:49:00 +00003532 // Slow case: compute hash code and set it.
3533 return ComputeAndSetHash();
3534}
3535
3536
3537StringHasher::StringHasher(int length)
3538 : length_(length),
3539 raw_running_hash_(0),
3540 array_index_(0),
3541 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
3542 is_first_char_(true),
3543 is_valid_(true) { }
3544
3545
3546bool StringHasher::has_trivial_hash() {
Steve Blockd0582a62009-12-15 09:54:21 +00003547 return length_ > String::kMaxHashCalcLength;
Steve Blocka7e24c12009-10-30 11:49:00 +00003548}
3549
3550
3551void StringHasher::AddCharacter(uc32 c) {
3552 // Use the Jenkins one-at-a-time hash function to update the hash
3553 // for the given character.
3554 raw_running_hash_ += c;
3555 raw_running_hash_ += (raw_running_hash_ << 10);
3556 raw_running_hash_ ^= (raw_running_hash_ >> 6);
3557 // Incremental array index computation.
3558 if (is_array_index_) {
3559 if (c < '0' || c > '9') {
3560 is_array_index_ = false;
3561 } else {
3562 int d = c - '0';
3563 if (is_first_char_) {
3564 is_first_char_ = false;
3565 if (c == '0' && length_ > 1) {
3566 is_array_index_ = false;
3567 return;
3568 }
3569 }
3570 if (array_index_ > 429496729U - ((d + 2) >> 3)) {
3571 is_array_index_ = false;
3572 } else {
3573 array_index_ = array_index_ * 10 + d;
3574 }
3575 }
3576 }
3577}
3578
3579
3580void StringHasher::AddCharacterNoIndex(uc32 c) {
3581 ASSERT(!is_array_index());
3582 raw_running_hash_ += c;
3583 raw_running_hash_ += (raw_running_hash_ << 10);
3584 raw_running_hash_ ^= (raw_running_hash_ >> 6);
3585}
3586
3587
3588uint32_t StringHasher::GetHash() {
3589 // Get the calculated raw hash value and do some more bit ops to distribute
3590 // the hash further. Ensure that we never return zero as the hash value.
3591 uint32_t result = raw_running_hash_;
3592 result += (result << 3);
3593 result ^= (result >> 11);
3594 result += (result << 15);
3595 if (result == 0) {
3596 result = 27;
3597 }
3598 return result;
3599}
3600
3601
3602bool String::AsArrayIndex(uint32_t* index) {
Steve Blockd0582a62009-12-15 09:54:21 +00003603 uint32_t field = hash_field();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003604 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
3605 return false;
3606 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003607 return SlowAsArrayIndex(index);
3608}
3609
3610
3611Object* JSObject::GetPrototype() {
3612 return JSObject::cast(this)->map()->prototype();
3613}
3614
3615
3616PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
3617 return GetPropertyAttributeWithReceiver(this, key);
3618}
3619
Steve Blockd0582a62009-12-15 09:54:21 +00003620// TODO(504): this may be useful in other places too where JSGlobalProxy
3621// is used.
3622Object* JSObject::BypassGlobalProxy() {
3623 if (IsJSGlobalProxy()) {
3624 Object* proto = GetPrototype();
3625 if (proto->IsNull()) return Heap::undefined_value();
3626 ASSERT(proto->IsJSGlobalObject());
3627 return proto;
3628 }
3629 return this;
3630}
3631
3632
3633bool JSObject::HasHiddenPropertiesObject() {
3634 ASSERT(!IsJSGlobalProxy());
3635 return GetPropertyAttributePostInterceptor(this,
3636 Heap::hidden_symbol(),
3637 false) != ABSENT;
3638}
3639
3640
3641Object* JSObject::GetHiddenPropertiesObject() {
3642 ASSERT(!IsJSGlobalProxy());
3643 PropertyAttributes attributes;
John Reck59135872010-11-02 12:39:01 -07003644 // You can't install a getter on a property indexed by the hidden symbol,
3645 // so we can be sure that GetLocalPropertyPostInterceptor returns a real
3646 // object.
3647 Object* result =
3648 GetLocalPropertyPostInterceptor(this,
3649 Heap::hidden_symbol(),
3650 &attributes)->ToObjectUnchecked();
3651 return result;
Steve Blockd0582a62009-12-15 09:54:21 +00003652}
3653
3654
John Reck59135872010-11-02 12:39:01 -07003655MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
Steve Blockd0582a62009-12-15 09:54:21 +00003656 ASSERT(!IsJSGlobalProxy());
3657 return SetPropertyPostInterceptor(Heap::hidden_symbol(),
3658 hidden_obj,
3659 DONT_ENUM);
3660}
3661
Steve Blocka7e24c12009-10-30 11:49:00 +00003662
3663bool JSObject::HasElement(uint32_t index) {
3664 return HasElementWithReceiver(this, index);
3665}
3666
3667
3668bool AccessorInfo::all_can_read() {
3669 return BooleanBit::get(flag(), kAllCanReadBit);
3670}
3671
3672
3673void AccessorInfo::set_all_can_read(bool value) {
3674 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
3675}
3676
3677
3678bool AccessorInfo::all_can_write() {
3679 return BooleanBit::get(flag(), kAllCanWriteBit);
3680}
3681
3682
3683void AccessorInfo::set_all_can_write(bool value) {
3684 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
3685}
3686
3687
3688bool AccessorInfo::prohibits_overwriting() {
3689 return BooleanBit::get(flag(), kProhibitsOverwritingBit);
3690}
3691
3692
3693void AccessorInfo::set_prohibits_overwriting(bool value) {
3694 set_flag(BooleanBit::set(flag(), kProhibitsOverwritingBit, value));
3695}
3696
3697
3698PropertyAttributes AccessorInfo::property_attributes() {
3699 return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
3700}
3701
3702
3703void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
3704 ASSERT(AttributesField::is_valid(attributes));
3705 int rest_value = flag()->value() & ~AttributesField::mask();
3706 set_flag(Smi::FromInt(rest_value | AttributesField::encode(attributes)));
3707}
3708
3709template<typename Shape, typename Key>
3710void Dictionary<Shape, Key>::SetEntry(int entry,
3711 Object* key,
3712 Object* value,
3713 PropertyDetails details) {
3714 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
3715 int index = HashTable<Shape, Key>::EntryToIndex(entry);
Leon Clarke4515c472010-02-03 11:58:03 +00003716 AssertNoAllocation no_gc;
3717 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
Steve Blocka7e24c12009-10-30 11:49:00 +00003718 FixedArray::set(index, key, mode);
3719 FixedArray::set(index+1, value, mode);
3720 FixedArray::fast_set(this, index+2, details.AsSmi());
3721}
3722
3723
3724void Map::ClearCodeCache() {
3725 // No write barrier is needed since empty_fixed_array is not in new space.
3726 // Please note this function is used during marking:
3727 // - MarkCompactCollector::MarkUnmarkedObject
3728 ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array()));
3729 WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array());
3730}
3731
3732
3733void JSArray::EnsureSize(int required_size) {
3734 ASSERT(HasFastElements());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003735 FixedArray* elts = FixedArray::cast(elements());
Steve Blockd0582a62009-12-15 09:54:21 +00003736 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
3737 if (elts->length() < required_size) {
3738 // Doubling in size would be overkill, but leave some slack to avoid
3739 // constantly growing.
3740 Expand(required_size + (required_size >> 3));
3741 // It's a performance benefit to keep a frequently used array in new-space.
3742 } else if (!Heap::new_space()->Contains(elts) &&
3743 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
3744 // Expand will allocate a new backing store in new space even if the size
3745 // we asked for isn't larger than what we had before.
3746 Expand(required_size);
3747 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003748}
3749
3750
Leon Clarke4515c472010-02-03 11:58:03 +00003751void JSArray::set_length(Smi* length) {
3752 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
3753}
3754
3755
Steve Blocka7e24c12009-10-30 11:49:00 +00003756void JSArray::SetContent(FixedArray* storage) {
Leon Clarke4515c472010-02-03 11:58:03 +00003757 set_length(Smi::FromInt(storage->length()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003758 set_elements(storage);
3759}
3760
3761
John Reck59135872010-11-02 12:39:01 -07003762MaybeObject* FixedArray::Copy() {
Steve Blocka7e24c12009-10-30 11:49:00 +00003763 if (length() == 0) return this;
3764 return Heap::CopyFixedArray(this);
3765}
3766
3767
Iain Merrick75681382010-08-19 15:07:18 +01003768int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
3769 return map->instance_size();
3770}
3771
3772
3773void Proxy::ProxyIterateBody(ObjectVisitor* v) {
3774 v->VisitExternalReference(
3775 reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
3776}
3777
3778
3779template<typename StaticVisitor>
3780void Proxy::ProxyIterateBody() {
3781 StaticVisitor::VisitExternalReference(
3782 reinterpret_cast<Address *>(FIELD_ADDR(this, kProxyOffset)));
3783}
3784
3785
3786void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) {
3787 typedef v8::String::ExternalAsciiStringResource Resource;
3788 v->VisitExternalAsciiString(
3789 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
3790}
3791
3792
3793template<typename StaticVisitor>
3794void ExternalAsciiString::ExternalAsciiStringIterateBody() {
3795 typedef v8::String::ExternalAsciiStringResource Resource;
3796 StaticVisitor::VisitExternalAsciiString(
3797 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
3798}
3799
3800
3801void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
3802 typedef v8::String::ExternalStringResource Resource;
3803 v->VisitExternalTwoByteString(
3804 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
3805}
3806
3807
3808template<typename StaticVisitor>
3809void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
3810 typedef v8::String::ExternalStringResource Resource;
3811 StaticVisitor::VisitExternalTwoByteString(
3812 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
3813}
3814
3815#define SLOT_ADDR(obj, offset) \
3816 reinterpret_cast<Object**>((obj)->address() + offset)
3817
3818template<int start_offset, int end_offset, int size>
3819void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
3820 HeapObject* obj,
3821 ObjectVisitor* v) {
3822 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, end_offset));
3823}
3824
3825
3826template<int start_offset>
3827void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
3828 int object_size,
3829 ObjectVisitor* v) {
3830 v->VisitPointers(SLOT_ADDR(obj, start_offset), SLOT_ADDR(obj, object_size));
3831}
3832
3833#undef SLOT_ADDR
3834
3835
Steve Blocka7e24c12009-10-30 11:49:00 +00003836#undef CAST_ACCESSOR
3837#undef INT_ACCESSORS
3838#undef SMI_ACCESSORS
3839#undef ACCESSORS
3840#undef FIELD_ADDR
3841#undef READ_FIELD
3842#undef WRITE_FIELD
3843#undef WRITE_BARRIER
3844#undef CONDITIONAL_WRITE_BARRIER
3845#undef READ_MEMADDR_FIELD
3846#undef WRITE_MEMADDR_FIELD
3847#undef READ_DOUBLE_FIELD
3848#undef WRITE_DOUBLE_FIELD
3849#undef READ_INT_FIELD
3850#undef WRITE_INT_FIELD
3851#undef READ_SHORT_FIELD
3852#undef WRITE_SHORT_FIELD
3853#undef READ_BYTE_FIELD
3854#undef WRITE_BYTE_FIELD
3855
3856
3857} } // namespace v8::internal
3858
3859#endif // V8_OBJECTS_INL_H_