Version 1.3.1.
Speed improvements to accessors and interceptors.
Added support for capturing stack information on custom errors.
Added support for morphing an object into a pixel array where its indexed properties are stored in an external byte array. Values written are always clamped to the 0..255 interval.
Profiler on x64 now handles C/C++ functions from shared libraries.
Changed the debugger to avoid stepping into function.call/apply if the function is a built-in.
Initial implementation of constructor heap profile for JS objects.
More fine grained control of profiling aspects through the API.
Optimized the called as constructor check for API calls.
git-svn-id: http://v8.googlecode.com/svn/trunk@2592 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/api.cc b/src/api.cc
index 9e3ca9b..0828101 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -2194,6 +2194,25 @@
}
+void v8::Object::SetIndexedPropertiesToPixelData(uint8_t* data, int length) {
+ ON_BAILOUT("v8::SetElementsToPixelData()", return);
+ ENTER_V8;
+ if (!ApiCheck(i::Smi::IsValid(length),
+ "v8::Object::SetIndexedPropertiesToPixelData()",
+ "length exceeds max acceptable value")) {
+ return;
+ }
+ i::Handle<i::JSObject> self = Utils::OpenHandle(this);
+ if (!ApiCheck(!self->IsJSArray(),
+ "v8::Object::SetIndexedPropertiesToPixelData()",
+ "JSArray is not supported")) {
+ return;
+ }
+ i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(length, data);
+ self->set_elements(*pixels);
+}
+
+
Local<v8::Object> Function::NewInstance() const {
return NewInstance(0, NULL);
}
@@ -3057,7 +3076,7 @@
if (!self->HasFastElements()) {
return Local<Object>();
}
- i::FixedArray* elms = self->elements();
+ i::FixedArray* elms = i::FixedArray::cast(self->elements());
i::Object* paragon = elms->get(index);
if (!paragon->IsJSObject()) {
return Local<Object>();
@@ -3216,6 +3235,46 @@
}
+void V8::ResumeProfilerEx(int flags) {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ if (flags & PROFILER_MODULE_CPU) {
+ i::Logger::ResumeProfiler();
+ }
+ if (flags & (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
+ i::FLAG_log_gc = true;
+ }
+#endif
+}
+
+
+void V8::PauseProfilerEx(int flags) {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ if (flags & PROFILER_MODULE_CPU) {
+ i::Logger::PauseProfiler();
+ }
+ if (flags & (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
+ i::FLAG_log_gc = false;
+ }
+#endif
+}
+
+
+int V8::GetActiveProfilerModules() {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ int result = PROFILER_MODULE_NONE;
+ if (!i::Logger::IsProfilerPaused()) {
+ result |= PROFILER_MODULE_CPU;
+ }
+ if (i::FLAG_log_gc) {
+ result |= PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS;
+ }
+ return result;
+#else
+ return PROFILER_MODULE_NONE;
+#endif
+}
+
+
int V8::GetLogLines(int from_pos, char* dest_buf, int max_size) {
#ifdef ENABLE_LOGGING_AND_PROFILING
return i::Logger::GetLogLines(from_pos, dest_buf, max_size);
diff --git a/src/arm/codegen-arm.h b/src/arm/codegen-arm.h
index 6391a8e..42c5908 100644
--- a/src/arm/codegen-arm.h
+++ b/src/arm/codegen-arm.h
@@ -215,7 +215,7 @@
#define DEF_VISIT(type) \
void Visit##type(type* node);
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Visit a statement and then spill the virtual frame if control flow can
@@ -374,7 +374,7 @@
// information.
void CodeForFunctionPosition(FunctionLiteral* fun);
void CodeForReturnPosition(FunctionLiteral* fun);
- void CodeForStatementPosition(Node* node);
+ void CodeForStatementPosition(AstNode* node);
void CodeForSourcePosition(int pos);
#ifdef DEBUG
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 82a2bec..8781256 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -582,8 +582,8 @@
__ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
- __ cmp(r3, Operand(Factory::hash_table_map()));
- __ b(eq, &slow);
+ __ cmp(r3, Operand(Factory::fixed_array_map()));
+ __ b(ne, &slow);
// Check that the key (index) is within bounds.
__ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
__ cmp(r0, Operand(r3));
@@ -661,8 +661,8 @@
__ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ cmp(r2, Operand(Factory::hash_table_map()));
- __ b(eq, &slow);
+ __ cmp(r2, Operand(Factory::fixed_array_map()));
+ __ b(ne, &slow);
// Untag the key (for checking against untagged length in the fixed array).
__ mov(r1, Operand(r1, ASR, kSmiTagSize));
// Compute address to store into and check array bounds.
@@ -710,8 +710,8 @@
__ bind(&array);
__ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
__ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
- __ cmp(r1, Operand(Factory::hash_table_map()));
- __ b(eq, &slow);
+ __ cmp(r1, Operand(Factory::fixed_array_map()));
+ __ b(ne, &slow);
// Check the key against the length in the array, compute the
// address to store into and fall through to fast case.
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index d6650c9..393db59 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -467,21 +467,23 @@
// Push the arguments on the JS stack of the caller.
__ push(receiver); // receiver
+ __ push(reg); // holder
__ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback data
__ push(ip);
+ __ ldr(reg, FieldMemOperand(ip, AccessorInfo::kDataOffset));
+ __ push(reg);
__ push(name_reg); // name
- __ push(reg); // holder
// Do tail-call to the runtime system.
ExternalReference load_callback_property =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
- __ TailCallRuntime(load_callback_property, 4);
+ __ TailCallRuntime(load_callback_property, 5);
}
void StubCompiler::GenerateLoadInterceptor(JSObject* object,
JSObject* holder,
- Smi* lookup_hint,
+ LookupResult* lookup,
Register receiver,
Register name_reg,
Register scratch1,
@@ -500,13 +502,18 @@
__ push(receiver); // receiver
__ push(reg); // holder
__ push(name_reg); // name
- __ mov(scratch1, Operand(lookup_hint));
+
+ InterceptorInfo* interceptor = holder->GetNamedInterceptor();
+ ASSERT(!Heap::InNewSpace(interceptor));
+ __ mov(scratch1, Operand(Handle<Object>(interceptor)));
__ push(scratch1);
+ __ ldr(scratch2, FieldMemOperand(scratch1, InterceptorInfo::kDataOffset));
+ __ push(scratch2);
// Do tail-call to the runtime system.
ExternalReference load_ic_property =
- ExternalReference(IC_Utility(IC::kLoadInterceptorProperty));
- __ TailCallRuntime(load_ic_property, 4);
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
+ __ TailCallRuntime(load_ic_property, 5);
}
@@ -676,13 +683,13 @@
case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
CheckPrototypes(JSObject::cast(object), r1, holder, r3, r2, name, &miss);
- // Make sure object->elements()->map() != Heap::hash_table_map()
+ // Make sure object->HasFastElements().
// Get the elements array of the object.
__ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ cmp(r2, Operand(Factory::hash_table_map()));
- __ b(eq, &miss);
+ __ cmp(r2, Operand(Factory::fixed_array_map()));
+ __ b(ne, &miss);
break;
default:
@@ -744,8 +751,6 @@
// -----------------------------------
Label miss;
- __ IncrementCounter(&Counters::call_global_inline, 1, r1, r3);
-
// Get the number of arguments.
const int argc = arguments().immediate();
@@ -782,6 +787,7 @@
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
+ __ IncrementCounter(&Counters::call_global_inline, 1, r1, r3);
ASSERT(function->is_compiled());
Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count());
@@ -790,7 +796,6 @@
// Handle call cache miss.
__ bind(&miss);
- __ DecrementCounter(&Counters::call_global_inline, 1, r1, r3);
__ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
Handle<Code> ic = ComputeCallMiss(arguments().immediate());
__ Jump(ic, RelocInfo::CODE_TARGET);
@@ -951,8 +956,6 @@
// -----------------------------------
Label miss;
- __ IncrementCounter(&Counters::named_store_global_inline, 1, r1, r3);
-
// Check that the map of the global has not changed.
__ ldr(r1, MemOperand(sp, 0 * kPointerSize));
__ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -963,11 +966,11 @@
__ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
__ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ __ IncrementCounter(&Counters::named_store_global_inline, 1, r1, r3);
__ Ret();
// Handle store cache miss.
__ bind(&miss);
- __ DecrementCounter(&Counters::named_store_global_inline, 1, r1, r3);
__ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r1, r3);
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ Jump(ic, RelocInfo::CODE_TARGET);
@@ -1054,9 +1057,11 @@
__ ldr(r0, MemOperand(sp, 0));
+ LookupResult lookup;
+ holder->LocalLookupRealNamedProperty(name, &lookup);
GenerateLoadInterceptor(object,
holder,
- holder->InterceptorPropertyLookupHint(name),
+ &lookup,
r0,
r2,
r3,
@@ -1083,8 +1088,6 @@
// -----------------------------------
Label miss;
- __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
-
// Get the receiver from the stack.
__ ldr(r1, MemOperand(sp, 0 * kPointerSize));
@@ -1109,10 +1112,10 @@
__ b(eq, &miss);
}
+ __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
__ Ret();
__ bind(&miss);
- __ DecrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
__ IncrementCounter(&Counters::named_load_global_inline_miss, 1, r1, r3);
GenerateLoadMiss(masm(), Code::LOAD_IC);
@@ -1215,9 +1218,11 @@
__ cmp(r2, Operand(Handle<String>(name)));
__ b(ne, &miss);
+ LookupResult lookup;
+ holder->LocalLookupRealNamedProperty(name, &lookup);
GenerateLoadInterceptor(receiver,
holder,
- Smi::FromInt(JSObject::kLookupInHolder),
+ &lookup,
r0,
r2,
r3,
diff --git a/src/ast.cc b/src/ast.cc
index d8a3232..2b60742 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -51,7 +51,7 @@
if (v->CheckStackOverflow()) return; \
v->Visit##type(this); \
}
-NODE_LIST(DECL_ACCEPT)
+AST_NODE_LIST(DECL_ACCEPT)
#undef DECL_ACCEPT
diff --git a/src/ast.h b/src/ast.h
index 64d61cc..406d43d 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -53,9 +53,8 @@
// Nodes of the abstract syntax tree. Only concrete classes are
// enumerated here.
-#define NODE_LIST(V) \
+#define STATEMENT_NODE_LIST(V) \
V(Block) \
- V(Declaration) \
V(ExpressionStatement) \
V(EmptyStatement) \
V(IfStatement) \
@@ -69,7 +68,9 @@
V(ForInStatement) \
V(TryCatch) \
V(TryFinally) \
- V(DebuggerStatement) \
+ V(DebuggerStatement)
+
+#define EXPRESSION_NODE_LIST(V) \
V(FunctionLiteral) \
V(FunctionBoilerplateLiteral) \
V(Conditional) \
@@ -93,13 +94,17 @@
V(CompareOperation) \
V(ThisFunction)
+#define AST_NODE_LIST(V) \
+ V(Declaration) \
+ STATEMENT_NODE_LIST(V) \
+ EXPRESSION_NODE_LIST(V)
// Forward declarations
class TargetCollector;
class MaterializedLiteral;
#define DEF_FORWARD_DECLARATION(type) class type;
-NODE_LIST(DEF_FORWARD_DECLARATION)
+AST_NODE_LIST(DEF_FORWARD_DECLARATION)
#undef DEF_FORWARD_DECLARATION
@@ -108,10 +113,10 @@
typedef ZoneList<Handle<String> > ZoneStringList;
-class Node: public ZoneObject {
+class AstNode: public ZoneObject {
public:
- Node(): statement_pos_(RelocInfo::kNoPosition) { }
- virtual ~Node() { }
+ AstNode(): statement_pos_(RelocInfo::kNoPosition) { }
+ virtual ~AstNode() { }
virtual void Accept(AstVisitor* v) = 0;
// Type testing & conversion.
@@ -143,7 +148,7 @@
};
-class Statement: public Node {
+class Statement: public AstNode {
public:
virtual Statement* AsStatement() { return this; }
virtual ReturnStatement* AsReturnStatement() { return NULL; }
@@ -152,7 +157,7 @@
};
-class Expression: public Node {
+class Expression: public AstNode {
public:
virtual Expression* AsExpression() { return this; }
@@ -240,7 +245,7 @@
};
-class Declaration: public Node {
+class Declaration: public AstNode {
public:
Declaration(VariableProxy* proxy, Variable::Mode mode, FunctionLiteral* fun)
: proxy_(proxy),
@@ -523,7 +528,7 @@
// NOTE: TargetCollectors are represented as nodes to fit in the target
// stack in the compiler; this should probably be reworked.
-class TargetCollector: public Node {
+class TargetCollector: public AstNode {
public:
explicit TargetCollector(ZoneList<BreakTarget*>* targets)
: targets_(targets) {
@@ -1678,7 +1683,7 @@
virtual ~AstVisitor() { }
// Dispatch
- void Visit(Node* node) { node->Accept(this); }
+ void Visit(AstNode* node) { node->Accept(this); }
// Iteration
virtual void VisitStatements(ZoneList<Statement*>* statements);
@@ -1702,7 +1707,7 @@
// Individual nodes
#define DEF_VISIT(type) \
virtual void Visit##type(type* node) = 0;
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
private:
diff --git a/src/builtins.cc b/src/builtins.cc
index 0648e54..1ea0245 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -87,18 +87,34 @@
}
-// TODO(1238487): Get rid of this function that determines if the
-// builtin is called as a constructor. This may be a somewhat slow
-// operation due to the stack frame iteration.
static inline bool CalledAsConstructor() {
+#ifdef DEBUG
+ // Calculate the result using a full stack frame iterator and check
+ // that the state of the stack is as we assume it to be in the
+ // code below.
StackFrameIterator it;
ASSERT(it.frame()->is_exit());
it.Advance();
StackFrame* frame = it.frame();
- return frame->is_construct();
+ bool reference_result = frame->is_construct();
+#endif
+ Address fp = Top::c_entry_fp(Top::GetCurrentThread());
+ // Because we know fp points to an exit frame we can use the relevant
+ // part of ExitFrame::ComputeCallerState directly.
+ const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
+ Address caller_fp = Memory::Address_at(fp + kCallerOffset);
+ // This inlines the part of StackFrame::ComputeType that grabs the
+ // type of the current frame. Note that StackFrame::ComputeType
+ // has been specialized for each architecture so if any one of them
+ // changes this code has to be changed as well.
+ const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
+ const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
+ Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
+ bool result = (marker == kConstructMarker);
+ ASSERT_EQ(result, reference_result);
+ return result;
}
-
// ----------------------------------------------------------------------------
diff --git a/src/codegen.cc b/src/codegen.cc
index b7297d7..7a4bb12 100644
--- a/src/codegen.cc
+++ b/src/codegen.cc
@@ -496,7 +496,7 @@
}
-void CodeGenerator::CodeForStatementPosition(Node* node) {
+void CodeGenerator::CodeForStatementPosition(AstNode* node) {
if (FLAG_debug_info) {
int pos = node->statement_pos();
if (pos != RelocInfo::kNoPosition) {
diff --git a/src/debug-agent.cc b/src/debug-agent.cc
index 62cc251..3dba53a 100644
--- a/src/debug-agent.cc
+++ b/src/debug-agent.cc
@@ -254,8 +254,8 @@
// Check that key is Content-Length.
if (strcmp(key, kContentLength) == 0) {
- // Get the content length value if within a sensible range.
- if (strlen(value) > 7) {
+ // Get the content length value if present and within a sensible range.
+ if (value == NULL || strlen(value) > 7) {
return SmartPointer<char>();
}
for (int i = 0; value[i] != '\0'; i++) {
diff --git a/src/debug.cc b/src/debug.cc
index 64f98c7..18536f5 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -1301,7 +1301,7 @@
// step into was requested.
if (fp == Debug::step_in_fp()) {
// Don't allow step into functions in the native context.
- if (function->context()->global() != Top::context()->builtins()) {
+ if (!function->IsBuiltin()) {
if (function->shared()->code() ==
Builtins::builtin(Builtins::FunctionApply) ||
function->shared()->code() ==
@@ -1310,7 +1310,8 @@
// function to be called and not the code for Builtins::FunctionApply or
// Builtins::FunctionCall. The receiver of call/apply is the target
// function.
- if (!holder.is_null() && holder->IsJSFunction()) {
+ if (!holder.is_null() && holder->IsJSFunction() &&
+ !JSFunction::cast(*holder)->IsBuiltin()) {
Handle<SharedFunctionInfo> shared_info(
JSFunction::cast(*holder)->shared());
Debug::FloodWithOneShot(shared_info);
diff --git a/src/execution.cc b/src/execution.cc
index 7d163cb..eb68f46 100644
--- a/src/execution.cc
+++ b/src/execution.cc
@@ -592,14 +592,14 @@
ASSERT(!it.done());
Object* fun = it.frame()->function();
if (fun && fun->IsJSFunction()) {
- GlobalObject* global = JSFunction::cast(fun)->context()->global();
// Don't stop in builtin functions.
- if (global == Top::context()->builtins()) {
- return Heap::undefined_value();
+ if (JSFunction::cast(fun)->IsBuiltin()) {
+ return Heap::undefined_value();
}
+ GlobalObject* global = JSFunction::cast(fun)->context()->global();
// Don't stop in debugger functions.
if (Debug::IsDebugGlobal(global)) {
- return Heap::undefined_value();
+ return Heap::undefined_value();
}
}
}
diff --git a/src/factory.cc b/src/factory.cc
index 1045a4c..36554df 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -210,6 +210,16 @@
}
+Handle<PixelArray> Factory::NewPixelArray(int length,
+ uint8_t* external_pointer,
+ PretenureFlag pretenure) {
+ ASSERT(0 <= length);
+ CALL_HEAP_FUNCTION(Heap::AllocatePixelArray(length,
+ external_pointer,
+ pretenure), PixelArray);
+}
+
+
Handle<Map> Factory::NewMap(InstanceType type, int instance_size) {
CALL_HEAP_FUNCTION(Heap::AllocateMap(type, instance_size), Map);
}
diff --git a/src/factory.h b/src/factory.h
index 0afdd76..4db5d4e 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -154,6 +154,10 @@
static Handle<ByteArray> NewByteArray(int length,
PretenureFlag pretenure = NOT_TENURED);
+ static Handle<PixelArray> NewPixelArray(int length,
+ uint8_t* external_pointer,
+ PretenureFlag pretenure = NOT_TENURED);
+
static Handle<Map> NewMap(InstanceType type, int instance_size);
static Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function);
diff --git a/src/globals.h b/src/globals.h
index 44bd527..195a2e2 100644
--- a/src/globals.h
+++ b/src/globals.h
@@ -207,6 +207,7 @@
class IC;
class InterceptorInfo;
class IterationStatement;
+class Array;
class JSArray;
class JSFunction;
class JSObject;
diff --git a/src/handles.cc b/src/handles.cc
index 510ea95..6345d41 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -164,8 +164,11 @@
void NormalizeProperties(Handle<JSObject> object,
- PropertyNormalizationMode mode) {
- CALL_HEAP_FUNCTION_VOID(object->NormalizeProperties(mode));
+ PropertyNormalizationMode mode,
+ int expected_additional_properties) {
+ CALL_HEAP_FUNCTION_VOID(object->NormalizeProperties(
+ mode,
+ expected_additional_properties));
}
@@ -341,6 +344,14 @@
Handle<Object> SetElement(Handle<JSObject> object,
uint32_t index,
Handle<Object> value) {
+ if (object->HasPixelElements()) {
+ if (!value->IsSmi() && !value->IsHeapNumber() && !value->IsUndefined()) {
+ bool has_exception;
+ Handle<Object> number = Execution::ToNumber(value, &has_exception);
+ if (has_exception) return Handle<Object>();
+ value = number;
+ }
+ }
CALL_HEAP_FUNCTION(object->SetElement(index, *value), Object);
}
@@ -643,13 +654,17 @@
OptimizedObjectForAddingMultipleProperties::
OptimizedObjectForAddingMultipleProperties(Handle<JSObject> object,
+ int expected_additional_properties,
bool condition) {
object_ = object;
if (condition && object_->HasFastProperties()) {
// Normalize the properties of object to avoid n^2 behavior
- // when extending the object multiple properties.
+ // when extending the object multiple properties. Indicate the number of
+ // properties to be added.
unused_property_fields_ = object->map()->unused_property_fields();
- NormalizeProperties(object_, KEEP_INOBJECT_PROPERTIES);
+ NormalizeProperties(object_,
+ KEEP_INOBJECT_PROPERTIES,
+ expected_additional_properties);
has_been_transformed_ = true;
} else {
diff --git a/src/handles.h b/src/handles.h
index a86dc96..ba2694f 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -181,7 +181,8 @@
// of space or encountering an internal error.
void NormalizeProperties(Handle<JSObject> object,
- PropertyNormalizationMode mode);
+ PropertyNormalizationMode mode,
+ int expected_additional_properties);
void NormalizeElements(Handle<JSObject> object);
void TransformToFastProperties(Handle<JSObject> object,
int unused_property_fields);
@@ -336,6 +337,7 @@
class OptimizedObjectForAddingMultipleProperties BASE_EMBEDDED {
public:
OptimizedObjectForAddingMultipleProperties(Handle<JSObject> object,
+ int expected_property_count,
bool condition = true);
~OptimizedObjectForAddingMultipleProperties();
private:
diff --git a/src/heap.cc b/src/heap.cc
index 213eec5..ebd0e1e 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -1191,6 +1191,10 @@
if (obj->IsFailure()) return false;
set_byte_array_map(Map::cast(obj));
+ obj = AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
+ if (obj->IsFailure()) return false;
+ set_pixel_array_map(Map::cast(obj));
+
obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
if (obj->IsFailure()) return false;
set_code_map(Map::cast(obj));
@@ -1407,6 +1411,12 @@
if (obj->IsFailure()) return false;
set_the_hole_value(obj);
+ obj = CreateOddball(
+ oddball_map(), "no_interceptor_result_sentinel", Smi::FromInt(-2));
+ if (obj->IsFailure()) return false;
+ set_no_interceptor_result_sentinel(obj);
+
+
// Allocate the empty string.
obj = AllocateRawAsciiString(0, TENURED);
if (obj->IsFailure()) return false;
@@ -1433,13 +1443,15 @@
if (obj->IsFailure()) return false;
set_prototype_accessors(Proxy::cast(obj));
- // Allocate the code_stubs dictionary.
- obj = NumberDictionary::Allocate(4);
+ // Allocate the code_stubs dictionary. The initial size is set to avoid
+ // expanding the dictionary during bootstrapping.
+ obj = NumberDictionary::Allocate(128);
if (obj->IsFailure()) return false;
set_code_stubs(NumberDictionary::cast(obj));
- // Allocate the non_monomorphic_cache used in stub-cache.cc
- obj = NumberDictionary::Allocate(4);
+ // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
+ // is set to avoid expanding the dictionary during bootstrapping.
+ obj = NumberDictionary::Allocate(64);
if (obj->IsFailure()) return false;
set_non_monomorphic_cache(NumberDictionary::cast(obj));
@@ -1576,8 +1588,7 @@
Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
// Statically ensure that it is safe to allocate proxies in paged spaces.
STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
- AllocationSpace space =
- (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Object* result = Allocate(proxy_map(), space);
if (result->IsFailure()) return result;
@@ -1859,6 +1870,23 @@
}
+Object* Heap::AllocatePixelArray(int length,
+ uint8_t* external_pointer,
+ PretenureFlag pretenure) {
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+
+ Object* result = AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE);
+
+ if (result->IsFailure()) return result;
+
+ reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
+ reinterpret_cast<PixelArray*>(result)->set_length(length);
+ reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer);
+
+ return result;
+}
+
+
Object* Heap::CreateCode(const CodeDesc& desc,
ZoneScopeInfo* sinfo,
Code::Flags flags,
@@ -2077,6 +2105,11 @@
// properly initialized.
ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
+ // Both types of globla objects should be allocated using
+ // AllocateGloblaObject to be properly initialized.
+ ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
+ ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
+
// Allocate the backing storage for the properties.
int prop_size = map->unused_property_fields() - map->inobject_properties();
Object* properties = AllocateFixedArray(prop_size, pretenure);
@@ -2117,24 +2150,62 @@
Object* Heap::AllocateGlobalObject(JSFunction* constructor) {
ASSERT(constructor->has_initial_map());
+ Map* map = constructor->initial_map();
+
// Make sure no field properties are described in the initial map.
// This guarantees us that normalizing the properties does not
// require us to change property values to JSGlobalPropertyCells.
- ASSERT(constructor->initial_map()->NextFreePropertyIndex() == 0);
+ ASSERT(map->NextFreePropertyIndex() == 0);
// Make sure we don't have a ton of pre-allocated slots in the
// global objects. They will be unused once we normalize the object.
- ASSERT(constructor->initial_map()->unused_property_fields() == 0);
- ASSERT(constructor->initial_map()->inobject_properties() == 0);
+ ASSERT(map->unused_property_fields() == 0);
+ ASSERT(map->inobject_properties() == 0);
- // Allocate the object based on the constructors initial map.
- Object* result = AllocateJSObjectFromMap(constructor->initial_map(), TENURED);
- if (result->IsFailure()) return result;
+ // Initial size of the backing store to avoid resize of the storage during
+ // bootstrapping. The size differs between the JS global object ad the
+ // builtins object.
+ int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
- // Normalize the result.
- JSObject* global = JSObject::cast(result);
- result = global->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
- if (result->IsFailure()) return result;
+ // Allocate a dictionary object for backing storage.
+ Object* obj =
+ StringDictionary::Allocate(
+ map->NumberOfDescribedProperties() * 2 + initial_size);
+ if (obj->IsFailure()) return obj;
+ StringDictionary* dictionary = StringDictionary::cast(obj);
+
+ // The global object might be created from an object template with accessors.
+ // Fill these accessors into the dictionary.
+ DescriptorArray* descs = map->instance_descriptors();
+ for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ PropertyDetails details = descs->GetDetails(i);
+ ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
+ PropertyDetails d =
+ PropertyDetails(details.attributes(), CALLBACKS, details.index());
+ Object* value = descs->GetCallbacksObject(i);
+ value = Heap::AllocateJSGlobalPropertyCell(value);
+ if (value->IsFailure()) return value;
+
+ Object* result = dictionary->Add(descs->GetKey(i), value, d);
+ if (result->IsFailure()) return result;
+ dictionary = StringDictionary::cast(result);
+ }
+
+ // Allocate the global object and initialize it with the backing store.
+ obj = Allocate(map, OLD_POINTER_SPACE);
+ if (obj->IsFailure()) return obj;
+ JSObject* global = JSObject::cast(obj);
+ InitializeJSObjectFromMap(global, dictionary, map);
+
+ // Create a new map for the global object.
+ obj = map->CopyDropDescriptors();
+ if (obj->IsFailure()) return obj;
+ Map* new_map = Map::cast(obj);
+
+ // Setup the global object as a normalized object.
+ global->set_map(new_map);
+ global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
+ global->set_properties(dictionary);
// Make sure result is a global object with properties in dictionary.
ASSERT(global->IsGlobalObject());
@@ -3391,6 +3462,100 @@
}
+#ifdef ENABLE_LOGGING_AND_PROFILING
+namespace {
+
+// JSConstructorProfile is responsible for gathering and logging
+// "constructor profile" of JS object allocated on heap.
+// It is run during garbage collection cycle, thus it doesn't need
+// to use handles.
+class JSConstructorProfile BASE_EMBEDDED {
+ public:
+ JSConstructorProfile() : zscope_(DELETE_ON_EXIT) {}
+ void CollectStats(JSObject* obj);
+ void PrintStats();
+ // Used by ZoneSplayTree::ForEach.
+ void Call(String* name, const NumberAndSizeInfo& number_and_size);
+ private:
+ struct TreeConfig {
+ typedef String* Key;
+ typedef NumberAndSizeInfo Value;
+ static const Key kNoKey;
+ static const Value kNoValue;
+ // Strings are unique, so it is sufficient to compare their pointers.
+ static int Compare(const Key& a, const Key& b) {
+ return a == b ? 0 : (a < b ? -1 : 1);
+ }
+ };
+
+ typedef ZoneSplayTree<TreeConfig> JSObjectsInfoTree;
+ static int CalculateJSObjectNetworkSize(JSObject* obj);
+
+ ZoneScope zscope_;
+ JSObjectsInfoTree js_objects_info_tree_;
+};
+
+const JSConstructorProfile::TreeConfig::Key
+ JSConstructorProfile::TreeConfig::kNoKey = NULL;
+const JSConstructorProfile::TreeConfig::Value
+ JSConstructorProfile::TreeConfig::kNoValue;
+
+
+int JSConstructorProfile::CalculateJSObjectNetworkSize(JSObject* obj) {
+ int size = obj->Size();
+ // If 'properties' and 'elements' are non-empty (thus, non-shared),
+ // take their size into account.
+ if (FixedArray::cast(obj->properties())->length() != 0) {
+ size += obj->properties()->Size();
+ }
+ if (FixedArray::cast(obj->elements())->length() != 0) {
+ size += obj->elements()->Size();
+ }
+ return size;
+}
+
+
+void JSConstructorProfile::Call(String* name,
+ const NumberAndSizeInfo& number_and_size) {
+ SmartPointer<char> s_name;
+ if (name != NULL) {
+ s_name = name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+ }
+ LOG(HeapSampleJSConstructorEvent(*s_name,
+ number_and_size.number(),
+ number_and_size.bytes()));
+}
+
+
+void JSConstructorProfile::CollectStats(JSObject* obj) {
+ String* constructor_func = NULL;
+ if (obj->map()->constructor()->IsJSFunction()) {
+ JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
+ SharedFunctionInfo* sfi = constructor->shared();
+ String* name = String::cast(sfi->name());
+ constructor_func = name->length() > 0 ? name : sfi->inferred_name();
+ } else if (obj->IsJSFunction()) {
+ constructor_func = Heap::function_class_symbol();
+ }
+ JSObjectsInfoTree::Locator loc;
+ if (!js_objects_info_tree_.Find(constructor_func, &loc)) {
+ js_objects_info_tree_.Insert(constructor_func, &loc);
+ }
+ NumberAndSizeInfo number_and_size = loc.value();
+ number_and_size.increment_number(1);
+ number_and_size.increment_bytes(CalculateJSObjectNetworkSize(obj));
+ loc.set_value(number_and_size);
+}
+
+
+void JSConstructorProfile::PrintStats() {
+ js_objects_info_tree_.ForEach(this);
+}
+
+} // namespace
+#endif
+
+
//
// HeapProfiler class implementation.
//
@@ -3415,9 +3580,14 @@
INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
#undef DEF_TYPE_NAME
+ JSConstructorProfile js_cons_profile;
HeapIterator iterator;
while (iterator.has_next()) {
- CollectStats(iterator.next(), info);
+ HeapObject* obj = iterator.next();
+ CollectStats(obj, info);
+ if (obj->IsJSObject()) {
+ js_cons_profile.CollectStats(JSObject::cast(obj));
+ }
}
// Lump all the string types together.
@@ -3439,6 +3609,8 @@
}
}
+ js_cons_profile.PrintStats();
+
LOG(HeapSampleEndEvent("Heap", "allocated"));
}
diff --git a/src/heap.h b/src/heap.h
index 4e2c64c..69d9ff0 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -94,6 +94,7 @@
UndetectableMediumAsciiStringMap) \
V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap) \
V(Map, byte_array_map, ByteArrayMap) \
+ V(Map, pixel_array_map, PixelArrayMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Map, hash_table_map, HashTableMap) \
V(Map, context_map, ContextMap) \
@@ -109,6 +110,7 @@
V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
V(Object, nan_value, NanValue) \
V(Object, undefined_value, UndefinedValue) \
+ V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Object, minus_zero_value, MinusZeroValue) \
V(Object, null_value, NullValue) \
V(Object, true_value, TrueValue) \
@@ -418,6 +420,14 @@
// Please note this does not perform a garbage collection.
static Object* AllocateByteArray(int length);
+ // Allocate a pixel array of the specified length
+ // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
+ // failed.
+ // Please note this does not perform a garbage collection.
+ static Object* AllocatePixelArray(int length,
+ uint8_t* external_pointer,
+ PretenureFlag pretenure);
+
// Allocate a tenured JS global property cell.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index 457b22f..6648277 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -6297,8 +6297,8 @@
__ mov(elements.reg(),
FieldOperand(receiver.reg(), JSObject::kElementsOffset));
__ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
- Immediate(Factory::hash_table_map()));
- deferred->Branch(equal);
+ Immediate(Factory::fixed_array_map()));
+ deferred->Branch(not_equal);
// Shift the key to get the actual index value and check that
// it is within bounds.
diff --git a/src/ia32/codegen-ia32.h b/src/ia32/codegen-ia32.h
index 5cd50b8..4bb006f 100644
--- a/src/ia32/codegen-ia32.h
+++ b/src/ia32/codegen-ia32.h
@@ -359,7 +359,7 @@
#define DEF_VISIT(type) \
void Visit##type(type* node);
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Visit a statement and then spill the virtual frame if control flow can
@@ -558,7 +558,7 @@
// information.
void CodeForFunctionPosition(FunctionLiteral* fun);
void CodeForReturnPosition(FunctionLiteral* fun);
- void CodeForStatementPosition(Node* node);
+ void CodeForStatementPosition(AstNode* node);
void CodeForSourcePosition(int pos);
#ifdef DEBUG
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index f7d0797..08ffe2f 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -234,7 +234,7 @@
// -- esp[4] : name
// -- esp[8] : receiver
// -----------------------------------
- Label slow, fast, check_string, index_int, index_string;
+ Label slow, check_string, index_int, index_string, check_pixel_array;
// Load name and receiver.
__ mov(eax, Operand(esp, kPointerSize));
@@ -269,11 +269,36 @@
__ mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
- Immediate(Factory::hash_table_map()));
- __ j(equal, &slow, not_taken);
+ Immediate(Factory::fixed_array_map()));
+ __ j(not_equal, &check_pixel_array);
// Check that the key (index) is within bounds.
__ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
- __ j(below, &fast, taken);
+ __ j(above_equal, &slow);
+ // Fast case: Do the load.
+ __ mov(eax,
+ Operand(ecx, eax, times_4, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ cmp(Operand(eax), Immediate(Factory::the_hole_value()));
+ // In case the loaded value is the_hole we have to consult GetProperty
+ // to ensure the prototype chain is searched.
+ __ j(equal, &slow);
+ __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
+ __ ret(0);
+
+ // Check whether the elements is a pixel array.
+ // eax: untagged index
+ // ecx: elements array
+ __ bind(&check_pixel_array);
+ __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
+ Immediate(Factory::pixel_array_map()));
+ __ j(not_equal, &slow);
+ __ cmp(eax, FieldOperand(ecx, PixelArray::kLengthOffset));
+ __ j(above_equal, &slow);
+ __ mov(ecx, FieldOperand(ecx, PixelArray::kExternalPointerOffset));
+ __ movzx_b(eax, Operand(ecx, eax, times_1, 0));
+ __ shl(eax, kSmiTagSize);
+ __ ret(0);
+
+
// Slow case: Load name and receiver from stack and jump to runtime.
__ bind(&slow);
__ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
@@ -315,16 +340,6 @@
__ and_(eax, (1 << String::kShortLengthShift) - 1);
__ shr(eax, String::kLongLengthShift);
__ jmp(&index_int);
- // Fast case: Do the load.
- __ bind(&fast);
- __ mov(eax,
- Operand(ecx, eax, times_4, FixedArray::kHeaderSize - kHeapObjectTag));
- __ cmp(Operand(eax), Immediate(Factory::the_hole_value()));
- // In case the loaded value is the_hole we have to consult GetProperty
- // to ensure the prototype chain is searched.
- __ j(equal, &slow, not_taken);
- __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
- __ ret(0);
}
@@ -335,7 +350,7 @@
// -- esp[4] : key
// -- esp[8] : receiver
// -----------------------------------
- Label slow, fast, array, extra;
+ Label slow, fast, array, extra, check_pixel_array;
// Get the receiver from the stack.
__ mov(edx, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, key
@@ -370,8 +385,8 @@
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
- Immediate(Factory::hash_table_map()));
- __ j(equal, &slow, not_taken);
+ Immediate(Factory::fixed_array_map()));
+ __ j(not_equal, &check_pixel_array, not_taken);
// Untag the key (for checking against untagged length in the fixed array).
__ mov(edx, Operand(ebx));
__ sar(edx, kSmiTagSize); // untag the index and use it for the comparison
@@ -381,7 +396,6 @@
// ebx: index (as a smi)
__ j(below, &fast, taken);
-
// Slow case: Push extra copies of the arguments (3).
__ bind(&slow);
__ pop(ecx);
@@ -392,6 +406,37 @@
// Do tail-call to runtime routine.
__ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
+ // Check whether the elements is a pixel array.
+ // eax: value
+ // ecx: elements array
+ // ebx: index (as a smi)
+ __ bind(&check_pixel_array);
+ __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
+ Immediate(Factory::pixel_array_map()));
+ __ j(not_equal, &slow);
+ // Check that the value is a smi. If a conversion is needed call into the
+ // runtime to convert and clamp.
+ __ test(eax, Immediate(kSmiTagMask));
+ __ j(not_zero, &slow);
+ __ sar(ebx, kSmiTagSize); // Untag the index.
+ __ cmp(ebx, FieldOperand(ecx, PixelArray::kLengthOffset));
+ __ j(above_equal, &slow);
+ __ sar(eax, kSmiTagSize); // Untag the value.
+ { // Clamp the value to [0..255].
+ Label done, check_255;
+ __ cmp(eax, 0);
+ __ j(greater_equal, &check_255);
+ __ mov(eax, Immediate(0));
+ __ jmp(&done);
+ __ bind(&check_255);
+ __ cmp(eax, 255);
+ __ j(less_equal, &done);
+ __ mov(eax, Immediate(255));
+ __ bind(&done);
+ }
+ __ mov(ecx, FieldOperand(ecx, PixelArray::kExternalPointerOffset));
+ __ mov_b(Operand(ecx, ebx, times_1, 0), eax);
+ __ ret(0);
// Extra capacity case: Check if there is extra capacity to
// perform the store and update the length. Used for adding one
@@ -422,15 +467,14 @@
// ebx: index (as a smi)
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
- Immediate(Factory::hash_table_map()));
- __ j(equal, &slow, not_taken);
+ Immediate(Factory::fixed_array_map()));
+ __ j(not_equal, &check_pixel_array);
// Check the key against the length in the array, compute the
// address to store into and fall through to fast case.
__ cmp(ebx, FieldOperand(edx, JSArray::kLengthOffset));
__ j(above_equal, &extra, not_taken);
-
// Fast case: Do the store.
__ bind(&fast);
// eax: value
@@ -749,12 +793,10 @@
// -----------------------------------
__ mov(eax, Operand(esp, kPointerSize));
-
- // Move the return address below the arguments.
__ pop(ebx);
- __ push(eax);
- __ push(ecx);
- __ push(ebx);
+ __ push(eax); // receiver
+ __ push(ecx); // name
+ __ push(ebx); // return address
// Perform tail call to the entry.
__ TailCallRuntime(f, 2);
@@ -797,7 +839,8 @@
bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
// The address of the instruction following the call.
- Address test_instruction_address = address + 4;
+ Address test_instruction_address =
+ address + Assembler::kTargetAddrToReturnAddrDist;
// If the instruction following the call is not a test eax, nothing
// was inlined.
if (*test_instruction_address != kTestEaxByte) return false;
@@ -823,7 +866,8 @@
static bool PatchInlinedMapCheck(Address address, Object* map) {
- Address test_instruction_address = address + 4; // 4 = stub address
+ Address test_instruction_address =
+ address + Assembler::kTargetAddrToReturnAddrDist;
// The keyed load has a fast inlined case if the IC call instruction
// is immediately followed by a test instruction.
if (*test_instruction_address != kTestEaxByte) return false;
@@ -877,12 +921,10 @@
__ mov(eax, Operand(esp, kPointerSize));
__ mov(ecx, Operand(esp, 2 * kPointerSize));
-
- // Move the return address below the arguments.
__ pop(ebx);
- __ push(ecx);
- __ push(eax);
- __ push(ebx);
+ __ push(ecx); // receiver
+ __ push(eax); // name
+ __ push(ebx); // return address
// Perform tail call to the entry.
__ TailCallRuntime(f, 2);
@@ -917,12 +959,12 @@
// -- esp[4] : receiver
// -----------------------------------
- // Move the return address below the arguments.
__ pop(ebx);
- __ push(Operand(esp, 0));
- __ push(ecx);
- __ push(eax);
- __ push(ebx);
+ __ push(Operand(esp, 0)); // receiver
+ __ push(ecx); // transition map
+ __ push(eax); // value
+ __ push(ebx); // return address
+
// Perform tail call to the entry.
__ TailCallRuntime(
ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 4f5b3e0..a626377 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -152,6 +152,22 @@
}
+template <typename Pushable>
+static void PushInterceptorArguments(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Pushable name,
+ JSObject* holder_obj) {
+ __ push(receiver);
+ __ push(holder);
+ __ push(name);
+ InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
+ __ mov(receiver, Immediate(Handle<Object>(interceptor)));
+ __ push(receiver);
+ __ push(FieldOperand(receiver, InterceptorInfo::kDataOffset));
+}
+
+
void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
int index,
Register prototype) {
@@ -273,6 +289,322 @@
}
+template <class Pushable>
+static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Pushable name,
+ JSObject* holder_obj) {
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
+
+ ExternalReference ref =
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
+ __ mov(eax, Immediate(5));
+ __ mov(ebx, Immediate(ref));
+
+ CEntryStub stub;
+ __ CallStub(&stub);
+}
+
+
+template <class Compiler>
+static void CompileLoadInterceptor(Compiler* compiler,
+ StubCompiler* stub_compiler,
+ MacroAssembler* masm,
+ JSObject* object,
+ JSObject* holder,
+ String* name,
+ LookupResult* lookup,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ Label* miss) {
+ ASSERT(holder->HasNamedInterceptor());
+ ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
+
+ // Check that the receiver isn't a smi.
+ __ test(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss, not_taken);
+
+ // Check that the maps haven't changed.
+ Register reg =
+ stub_compiler->CheckPrototypes(object, receiver, holder,
+ scratch1, scratch2, name, miss);
+
+ if (lookup->IsValid() && lookup->IsCacheable()) {
+ compiler->CompileCacheable(masm,
+ stub_compiler,
+ receiver,
+ reg,
+ scratch1,
+ scratch2,
+ holder,
+ lookup,
+ name,
+ miss);
+ } else {
+ compiler->CompileRegular(masm,
+ receiver,
+ reg,
+ scratch2,
+ holder,
+ miss);
+ }
+}
+
+
+static void LookupPostInterceptor(JSObject* holder,
+ String* name,
+ LookupResult* lookup) {
+ holder->LocalLookupRealNamedProperty(name, lookup);
+ if (lookup->IsNotFound()) {
+ Object* proto = holder->GetPrototype();
+ if (proto != Heap::null_value()) {
+ proto->Lookup(name, lookup);
+ }
+ }
+}
+
+
+class LoadInterceptorCompiler BASE_EMBEDDED {
+ public:
+ explicit LoadInterceptorCompiler(Register name) : name_(name) {}
+
+ void CompileCacheable(MacroAssembler* masm,
+ StubCompiler* stub_compiler,
+ Register receiver,
+ Register holder,
+ Register scratch1,
+ Register scratch2,
+ JSObject* holder_obj,
+ LookupResult* lookup,
+ String* name,
+ Label* miss_label) {
+ AccessorInfo* callback = 0;
+ bool optimize = false;
+ // So far the most popular follow ups for interceptor loads are FIELD
+ // and CALLBACKS, so inline only them, other cases may be added
+ // later.
+ if (lookup->type() == FIELD) {
+ optimize = true;
+ } else if (lookup->type() == CALLBACKS) {
+ Object* callback_object = lookup->GetCallbackObject();
+ if (callback_object->IsAccessorInfo()) {
+ callback = AccessorInfo::cast(callback_object);
+ optimize = callback->getter() != NULL;
+ }
+ }
+
+ if (!optimize) {
+ CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
+ return;
+ }
+
+ // Note: starting a frame here makes GC aware of pointers pushed below.
+ __ EnterInternalFrame();
+
+ if (lookup->type() == CALLBACKS) {
+ __ push(receiver);
+ }
+ __ push(holder);
+ __ push(name_);
+
+ CompileCallLoadPropertyWithInterceptor(masm,
+ receiver,
+ holder,
+ name_,
+ holder_obj);
+
+ Label interceptor_failed;
+ __ cmp(eax, Factory::no_interceptor_result_sentinel());
+ __ j(equal, &interceptor_failed);
+ __ LeaveInternalFrame();
+ __ ret(0);
+
+ __ bind(&interceptor_failed);
+ __ pop(name_);
+ __ pop(holder);
+ if (lookup->type() == CALLBACKS) {
+ __ pop(receiver);
+ }
+
+ __ LeaveInternalFrame();
+
+ if (lookup->type() == FIELD) {
+ holder = stub_compiler->CheckPrototypes(holder_obj, holder,
+ lookup->holder(), scratch1,
+ scratch2,
+ name,
+ miss_label);
+ stub_compiler->GenerateFastPropertyLoad(masm, eax,
+ holder, lookup->holder(),
+ lookup->GetFieldIndex());
+ __ ret(0);
+ } else {
+ ASSERT(lookup->type() == CALLBACKS);
+ ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
+ ASSERT(callback != NULL);
+ ASSERT(callback->getter() != NULL);
+
+ Label cleanup;
+ __ pop(scratch2);
+ __ push(receiver);
+ __ push(scratch2);
+
+ holder = stub_compiler->CheckPrototypes(holder_obj, holder,
+ lookup->holder(), scratch1,
+ scratch2,
+ name,
+ &cleanup);
+
+ __ pop(scratch2); // save old return address
+ __ push(holder);
+ __ mov(holder, Immediate(Handle<AccessorInfo>(callback)));
+ __ push(holder);
+ __ push(FieldOperand(holder, AccessorInfo::kDataOffset));
+ __ push(name_);
+ __ push(scratch2); // restore old return address
+
+ ExternalReference ref =
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
+ __ TailCallRuntime(ref, 5);
+
+ __ bind(&cleanup);
+ __ pop(scratch1);
+ __ pop(scratch2);
+ __ push(scratch1);
+ }
+ }
+
+
+ void CompileRegular(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Register scratch,
+ JSObject* holder_obj,
+ Label* miss_label) {
+ __ pop(scratch); // save old return address
+ PushInterceptorArguments(masm, receiver, holder, name_, holder_obj);
+ __ push(scratch); // restore old return address
+
+ ExternalReference ref = ExternalReference(
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
+ __ TailCallRuntime(ref, 5);
+ }
+
+ private:
+ Register name_;
+};
+
+
+class CallInterceptorCompiler BASE_EMBEDDED {
+ public:
+ explicit CallInterceptorCompiler(const ParameterCount& arguments)
+ : arguments_(arguments), argc_(arguments.immediate()) {}
+
+ void CompileCacheable(MacroAssembler* masm,
+ StubCompiler* stub_compiler,
+ Register receiver,
+ Register holder,
+ Register scratch1,
+ Register scratch2,
+ JSObject* holder_obj,
+ LookupResult* lookup,
+ String* name,
+ Label* miss_label) {
+ JSFunction* function = 0;
+ bool optimize = false;
+ // So far the most popular case for failed interceptor is
+ // CONSTANT_FUNCTION sitting below.
+ if (lookup->type() == CONSTANT_FUNCTION) {
+ function = lookup->GetConstantFunction();
+ // JSArray holder is a special case for call constant function
+ // (see the corresponding code).
+ if (function->is_compiled() && !holder_obj->IsJSArray()) {
+ optimize = true;
+ }
+ }
+
+ if (!optimize) {
+ CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
+ return;
+ }
+
+ __ EnterInternalFrame();
+ __ push(holder); // save the holder
+
+ CompileCallLoadPropertyWithInterceptor(
+ masm,
+ receiver,
+ holder,
+ // Under EnterInternalFrame this refers to name.
+ Operand(ebp, (argc_ + 3) * kPointerSize),
+ holder_obj);
+
+ __ pop(receiver); // restore holder
+ __ LeaveInternalFrame();
+
+ __ cmp(eax, Factory::no_interceptor_result_sentinel());
+ Label invoke;
+ __ j(not_equal, &invoke);
+
+ stub_compiler->CheckPrototypes(holder_obj, receiver,
+ lookup->holder(), scratch1,
+ scratch2,
+ name,
+ miss_label);
+ if (lookup->holder()->IsGlobalObject()) {
+ __ mov(edx, Operand(esp, (argc_ + 1) * kPointerSize));
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
+ __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edx);
+ }
+
+ ASSERT(function->is_compiled());
+ // Get the function and setup the context.
+ __ mov(edi, Immediate(Handle<JSFunction>(function)));
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments_,
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ __ bind(&invoke);
+ }
+
+ void CompileRegular(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Register scratch,
+ JSObject* holder_obj,
+ Label* miss_label) {
+ __ EnterInternalFrame();
+
+ PushInterceptorArguments(masm,
+ receiver,
+ holder,
+ Operand(ebp, (argc_ + 3) * kPointerSize),
+ holder_obj);
+
+ ExternalReference ref = ExternalReference(
+ IC_Utility(IC::kLoadPropertyWithInterceptorForCall));
+ __ mov(eax, Immediate(5));
+ __ mov(ebx, Immediate(ref));
+
+ CEntryStub stub;
+ __ CallStub(&stub);
+
+ __ LeaveInternalFrame();
+ }
+
+ private:
+ const ParameterCount& arguments_;
+ int argc_;
+};
+
+
void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
Code* code = NULL;
@@ -447,15 +779,17 @@
// Push the arguments on the JS stack of the caller.
__ pop(scratch2); // remove return address
__ push(receiver); // receiver
- __ push(Immediate(Handle<AccessorInfo>(callback))); // callback data
- __ push(name_reg); // name
__ push(reg); // holder
+ __ mov(reg, Immediate(Handle<AccessorInfo>(callback))); // callback data
+ __ push(reg);
+ __ push(FieldOperand(reg, AccessorInfo::kDataOffset));
+ __ push(name_reg); // name
__ push(scratch2); // restore return address
// Do tail-call to the runtime system.
ExternalReference load_callback_property =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
- __ TailCallRuntime(load_callback_property, 4);
+ __ TailCallRuntime(load_callback_property, 5);
}
@@ -484,36 +818,25 @@
void StubCompiler::GenerateLoadInterceptor(JSObject* object,
JSObject* holder,
- Smi* lookup_hint,
+ LookupResult* lookup,
Register receiver,
Register name_reg,
Register scratch1,
Register scratch2,
String* name,
Label* miss) {
- // Check that the receiver isn't a smi.
- __ test(receiver, Immediate(kSmiTagMask));
- __ j(zero, miss, not_taken);
-
- // Check that the maps haven't changed.
- Register reg =
- CheckPrototypes(object, receiver, holder,
- scratch1, scratch2, name, miss);
-
- // Push the arguments on the JS stack of the caller.
- __ pop(scratch2); // remove return address
- __ push(receiver); // receiver
- __ push(reg); // holder
- __ push(name_reg); // name
- // TODO(367): Maybe don't push lookup_hint for LOOKUP_IN_HOLDER and/or
- // LOOKUP_IN_PROTOTYPE, but use a special version of lookup method?
- __ push(Immediate(lookup_hint));
- __ push(scratch2); // restore return address
-
- // Do tail-call to the runtime system.
- ExternalReference load_ic_property =
- ExternalReference(IC_Utility(IC::kLoadInterceptorProperty));
- __ TailCallRuntime(load_ic_property, 4);
+ LoadInterceptorCompiler compiler(name_reg);
+ CompileLoadInterceptor(&compiler,
+ this,
+ masm(),
+ object,
+ holder,
+ name,
+ lookup,
+ receiver,
+ scratch1,
+ scratch2,
+ miss);
}
@@ -678,13 +1001,13 @@
case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
CheckPrototypes(JSObject::cast(object), edx, holder,
ebx, ecx, name, &miss);
- // Make sure object->elements()->map() != Heap::dictionary_array_map()
+ // Make sure object->HasFastElements().
// Get the elements array of the object.
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
- Immediate(Factory::hash_table_map()));
- __ j(equal, &miss, not_taken);
+ Immediate(Factory::fixed_array_map()));
+ __ j(not_equal, &miss, not_taken);
break;
default:
@@ -726,47 +1049,32 @@
// Get the number of arguments.
const int argc = arguments().immediate();
+ LookupResult lookup;
+ LookupPostInterceptor(holder, name, &lookup);
+
// Get the receiver from the stack.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
- // Check that the receiver isn't a smi.
- __ test(edx, Immediate(kSmiTagMask));
- __ j(zero, &miss, not_taken);
+ CallInterceptorCompiler compiler(arguments());
+ CompileLoadInterceptor(&compiler,
+ this,
+ masm(),
+ JSObject::cast(object),
+ holder,
+ name,
+ &lookup,
+ edx,
+ ebx,
+ ecx,
+ &miss);
- // Check that maps have not changed and compute the holder register.
- Register reg =
- CheckPrototypes(JSObject::cast(object), edx, holder,
- ebx, ecx, name, &miss);
-
- // Enter an internal frame.
- __ EnterInternalFrame();
-
- // Push arguments on the expression stack.
- __ push(edx); // receiver
- __ push(reg); // holder
- __ push(Operand(ebp, (argc + 3) * kPointerSize)); // name
- __ push(Immediate(holder->InterceptorPropertyLookupHint(name)));
-
- // Perform call.
- ExternalReference load_interceptor =
- ExternalReference(IC_Utility(IC::kLoadInterceptorProperty));
- __ mov(eax, Immediate(4));
- __ mov(ebx, Immediate(load_interceptor));
-
- CEntryStub stub;
- __ CallStub(&stub);
-
- // Move result to edi and restore receiver.
- __ mov(edi, eax);
- __ mov(edx, Operand(ebp, (argc + 2) * kPointerSize)); // receiver
-
- // Exit frame.
- __ LeaveInternalFrame();
+ // Restore receiver.
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
// Check that the function really is a function.
- __ test(edi, Immediate(kSmiTagMask));
+ __ test(eax, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken);
- __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
+ __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
__ j(not_equal, &miss, not_taken);
// Patch the receiver on the stack with the global proxy if
@@ -777,6 +1085,7 @@
}
// Invoke the function.
+ __ mov(edi, eax);
__ InvokeFunction(edi, arguments(), JUMP_FUNCTION);
// Handle load cache miss.
@@ -798,8 +1107,6 @@
// -----------------------------------
Label miss;
- __ IncrementCounter(&Counters::call_global_inline, 1);
-
// Get the number of arguments.
const int argc = arguments().immediate();
@@ -835,6 +1142,7 @@
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
+ __ IncrementCounter(&Counters::call_global_inline, 1);
ASSERT(function->is_compiled());
Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count());
@@ -843,7 +1151,6 @@
// Handle call cache miss.
__ bind(&miss);
- __ DecrementCounter(&Counters::call_global_inline, 1);
__ IncrementCounter(&Counters::call_global_inline_miss, 1);
Handle<Code> ic = ComputeCallMiss(arguments().immediate());
__ jmp(ic, RelocInfo::CODE_TARGET);
@@ -1007,8 +1314,6 @@
// -----------------------------------
Label miss;
- __ IncrementCounter(&Counters::named_store_global_inline, 1);
-
// Check that the map of the global has not changed.
__ mov(ebx, Operand(esp, kPointerSize));
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
@@ -1020,11 +1325,11 @@
__ mov(FieldOperand(ecx, JSGlobalPropertyCell::kValueOffset), eax);
// Return the value (register eax).
+ __ IncrementCounter(&Counters::named_store_global_inline, 1);
__ ret(0);
// Handle store cache miss.
__ bind(&miss);
- __ DecrementCounter(&Counters::named_store_global_inline, 1);
__ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);
@@ -1152,12 +1457,15 @@
// -----------------------------------
Label miss;
+ LookupResult lookup;
+ LookupPostInterceptor(holder, name, &lookup);
+
__ mov(eax, Operand(esp, kPointerSize));
// TODO(368): Compile in the whole chain: all the interceptors in
// prototypes and ultimate answer.
GenerateLoadInterceptor(receiver,
holder,
- holder->InterceptorPropertyLookupHint(name),
+ &lookup,
eax,
ecx,
edx,
@@ -1185,8 +1493,6 @@
// -----------------------------------
Label miss;
- __ IncrementCounter(&Counters::named_load_global_inline, 1);
-
// Get the receiver from the stack.
__ mov(eax, Operand(esp, kPointerSize));
@@ -1214,10 +1520,10 @@
__ Check(not_equal, "DontDelete cells can't contain the hole");
}
+ __ IncrementCounter(&Counters::named_load_global_inline, 1);
__ ret(0);
__ bind(&miss);
- __ DecrementCounter(&Counters::named_load_global_inline, 1);
__ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
GenerateLoadMiss(masm(), Code::LOAD_IC);
@@ -1334,9 +1640,11 @@
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
+ LookupResult lookup;
+ LookupPostInterceptor(holder, name, &lookup);
GenerateLoadInterceptor(receiver,
holder,
- Smi::FromInt(JSObject::kLookupInHolder),
+ &lookup,
ecx,
eax,
edx,
diff --git a/src/ic.cc b/src/ic.cc
index b02556a..f4d74c9 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -737,8 +737,6 @@
return TypeError("non_object_property_load", object, name);
}
- // TODO(X64): Enable specialized stubs for length and prototype lookup.
-#ifndef V8_TARGET_ARCH_X64
if (FLAG_use_ic) {
// Use specialized code for getting the length of strings.
if (object->IsString() && name->Equals(Heap::length_symbol())) {
@@ -778,7 +776,6 @@
return Accessors::FunctionGetPrototype(*object, 0);
}
}
-#endif // !V8_TARGET_ARCH_X64
// Check if the name is trivially convertible to an index and get
// the element or char if so.
@@ -801,13 +798,9 @@
}
}
- // TODO(X64): Enable inline caching for load.
-#ifndef V8_TARGET_ARCH_X64
- // Update the inline cache.
if (FLAG_use_ic && lookup.IsLoaded()) {
UpdateCaches(&lookup, state, object, name);
}
-#endif
PropertyAttributes attr;
if (lookup.IsValid() && lookup.type() == INTERCEPTOR) {
@@ -978,10 +971,6 @@
return *value;
}
- // TODO(X64): Enable inline cache for StoreIC.
-#ifdef V8_TARGET_ARCH_X64
- USE(&LookupForWrite); // The compiler complains otherwise.
-#else
// Lookup the property locally in the receiver.
if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
LookupResult lookup;
@@ -989,7 +978,6 @@
UpdateCaches(&lookup, state, receiver, name, value);
}
}
-#endif
// Set the property.
return receiver->SetProperty(*name, *value, NONE);
@@ -1108,13 +1096,10 @@
LookupResult lookup;
receiver->LocalLookup(*name, &lookup);
- // TODO(X64): Enable inline cache for KeyedStoreIC.
-#ifndef V8_TARGET_ARCH_X64
// Update inline cache and stub cache.
if (FLAG_use_ic && lookup.IsLoaded()) {
UpdateCaches(&lookup, state, receiver, name, value);
}
-#endif
// Set the property.
return receiver->SetProperty(*name, *value, NONE);
diff --git a/src/ic.h b/src/ic.h
index 593519b..860b7e6 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -35,17 +35,19 @@
// IC_UTIL_LIST defines all utility functions called from generated
// inline caching code. The argument for the macro, ICU, is the function name.
-#define IC_UTIL_LIST(ICU) \
- ICU(LoadIC_Miss) \
- ICU(KeyedLoadIC_Miss) \
- ICU(CallIC_Miss) \
- ICU(StoreIC_Miss) \
- ICU(SharedStoreIC_ExtendStorage) \
- ICU(KeyedStoreIC_Miss) \
- /* Utilities for IC stubs. */ \
- ICU(LoadCallbackProperty) \
- ICU(StoreCallbackProperty) \
- ICU(LoadInterceptorProperty) \
+#define IC_UTIL_LIST(ICU) \
+ ICU(LoadIC_Miss) \
+ ICU(KeyedLoadIC_Miss) \
+ ICU(CallIC_Miss) \
+ ICU(StoreIC_Miss) \
+ ICU(SharedStoreIC_ExtendStorage) \
+ ICU(KeyedStoreIC_Miss) \
+ /* Utilities for IC stubs. */ \
+ ICU(LoadCallbackProperty) \
+ ICU(StoreCallbackProperty) \
+ ICU(LoadPropertyWithInterceptorOnly) \
+ ICU(LoadPropertyWithInterceptorForLoad) \
+ ICU(LoadPropertyWithInterceptorForCall) \
ICU(StoreInterceptorProperty)
//
@@ -387,6 +389,10 @@
// Support for patching the map that is checked in an inlined
// version of keyed store.
+ // The address is the patch point for the IC call
+ // (Assembler::kTargetAddrToReturnAddrDist before the end of
+ // the call/return address).
+ // The map is the new map that the inlined code should check against.
static bool PatchInlinedStore(Address address, Object* map);
friend class IC;
diff --git a/src/jsregexp-inl.h b/src/jsregexp-inl.h
deleted file mode 100644
index cc90bd1..0000000
--- a/src/jsregexp-inl.h
+++ /dev/null
@@ -1,260 +0,0 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_JSREGEXP_INL_H_
-#define V8_JSREGEXP_INL_H_
-
-
-#include "jsregexp.h"
-#include "regexp-macro-assembler.h"
-
-
-namespace v8 {
-namespace internal {
-
-
-template <typename C>
-bool ZoneSplayTree<C>::Insert(const Key& key, Locator* locator) {
- if (is_empty()) {
- // If the tree is empty, insert the new node.
- root_ = new Node(key, C::kNoValue);
- } else {
- // Splay on the key to move the last node on the search path
- // for the key to the root of the tree.
- Splay(key);
- // Ignore repeated insertions with the same key.
- int cmp = C::Compare(key, root_->key_);
- if (cmp == 0) {
- locator->bind(root_);
- return false;
- }
- // Insert the new node.
- Node* node = new Node(key, C::kNoValue);
- if (cmp > 0) {
- node->left_ = root_;
- node->right_ = root_->right_;
- root_->right_ = NULL;
- } else {
- node->right_ = root_;
- node->left_ = root_->left_;
- root_->left_ = NULL;
- }
- root_ = node;
- }
- locator->bind(root_);
- return true;
-}
-
-
-template <typename C>
-bool ZoneSplayTree<C>::Find(const Key& key, Locator* locator) {
- if (is_empty())
- return false;
- Splay(key);
- if (C::Compare(key, root_->key_) == 0) {
- locator->bind(root_);
- return true;
- } else {
- return false;
- }
-}
-
-
-template <typename C>
-bool ZoneSplayTree<C>::FindGreatestLessThan(const Key& key,
- Locator* locator) {
- if (is_empty())
- return false;
- // Splay on the key to move the node with the given key or the last
- // node on the search path to the top of the tree.
- Splay(key);
- // Now the result is either the root node or the greatest node in
- // the left subtree.
- int cmp = C::Compare(root_->key_, key);
- if (cmp <= 0) {
- locator->bind(root_);
- return true;
- } else {
- Node* temp = root_;
- root_ = root_->left_;
- bool result = FindGreatest(locator);
- root_ = temp;
- return result;
- }
-}
-
-
-template <typename C>
-bool ZoneSplayTree<C>::FindLeastGreaterThan(const Key& key,
- Locator* locator) {
- if (is_empty())
- return false;
- // Splay on the key to move the node with the given key or the last
- // node on the search path to the top of the tree.
- Splay(key);
- // Now the result is either the root node or the least node in
- // the right subtree.
- int cmp = C::Compare(root_->key_, key);
- if (cmp >= 0) {
- locator->bind(root_);
- return true;
- } else {
- Node* temp = root_;
- root_ = root_->right_;
- bool result = FindLeast(locator);
- root_ = temp;
- return result;
- }
-}
-
-
-template <typename C>
-bool ZoneSplayTree<C>::FindGreatest(Locator* locator) {
- if (is_empty())
- return false;
- Node* current = root_;
- while (current->right_ != NULL)
- current = current->right_;
- locator->bind(current);
- return true;
-}
-
-
-template <typename C>
-bool ZoneSplayTree<C>::FindLeast(Locator* locator) {
- if (is_empty())
- return false;
- Node* current = root_;
- while (current->left_ != NULL)
- current = current->left_;
- locator->bind(current);
- return true;
-}
-
-
-template <typename C>
-bool ZoneSplayTree<C>::Remove(const Key& key) {
- // Bail if the tree is empty
- if (is_empty())
- return false;
- // Splay on the key to move the node with the given key to the top.
- Splay(key);
- // Bail if the key is not in the tree
- if (C::Compare(key, root_->key_) != 0)
- return false;
- if (root_->left_ == NULL) {
- // No left child, so the new tree is just the right child.
- root_ = root_->right_;
- } else {
- // Left child exists.
- Node* right = root_->right_;
- // Make the original left child the new root.
- root_ = root_->left_;
- // Splay to make sure that the new root has an empty right child.
- Splay(key);
- // Insert the original right child as the right child of the new
- // root.
- root_->right_ = right;
- }
- return true;
-}
-
-
-template <typename C>
-void ZoneSplayTree<C>::Splay(const Key& key) {
- if (is_empty())
- return;
- Node dummy_node(C::kNoKey, C::kNoValue);
- // Create a dummy node. The use of the dummy node is a bit
- // counter-intuitive: The right child of the dummy node will hold
- // the L tree of the algorithm. The left child of the dummy node
- // will hold the R tree of the algorithm. Using a dummy node, left
- // and right will always be nodes and we avoid special cases.
- Node* dummy = &dummy_node;
- Node* left = dummy;
- Node* right = dummy;
- Node* current = root_;
- while (true) {
- int cmp = C::Compare(key, current->key_);
- if (cmp < 0) {
- if (current->left_ == NULL)
- break;
- if (C::Compare(key, current->left_->key_) < 0) {
- // Rotate right.
- Node* temp = current->left_;
- current->left_ = temp->right_;
- temp->right_ = current;
- current = temp;
- if (current->left_ == NULL)
- break;
- }
- // Link right.
- right->left_ = current;
- right = current;
- current = current->left_;
- } else if (cmp > 0) {
- if (current->right_ == NULL)
- break;
- if (C::Compare(key, current->right_->key_) > 0) {
- // Rotate left.
- Node* temp = current->right_;
- current->right_ = temp->left_;
- temp->left_ = current;
- current = temp;
- if (current->right_ == NULL)
- break;
- }
- // Link left.
- left->right_ = current;
- left = current;
- current = current->right_;
- } else {
- break;
- }
- }
- // Assemble.
- left->right_ = current->left_;
- right->left_ = current->right_;
- current->left_ = dummy->right_;
- current->right_ = dummy->left_;
- root_ = current;
-}
-
-
-template <typename Node, class Callback>
-static void DoForEach(Node* node, Callback* callback) {
- if (node == NULL) return;
- DoForEach<Node, Callback>(node->left(), callback);
- callback->Call(node->key(), node->value());
- DoForEach<Node, Callback>(node->right(), callback);
-}
-
-
-}} // namespace v8::internal
-
-
-#endif // V8_JSREGEXP_INL_H_
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index 852d431..bd51102 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -31,7 +31,7 @@
#include "compiler.h"
#include "execution.h"
#include "factory.h"
-#include "jsregexp-inl.h"
+#include "jsregexp.h"
#include "platform.h"
#include "runtime.h"
#include "top.h"
@@ -254,7 +254,7 @@
{
NoHandleAllocation no_handles;
- FixedArray* array = last_match_info->elements();
+ FixedArray* array = FixedArray::cast(last_match_info->elements());
SetAtomLastCapture(array, *subject, value, value + needle->length());
}
return last_match_info;
@@ -442,7 +442,7 @@
if (res != RegExpMacroAssemblerIA32::SUCCESS) return Factory::null_value();
- array = Handle<FixedArray>(last_match_info->elements());
+ array = Handle<FixedArray>(FixedArray::cast(last_match_info->elements()));
ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead);
// The captures come in (start, end+1) pairs.
for (int i = 0; i < number_of_capture_registers; i += 2) {
@@ -475,7 +475,7 @@
return Factory::null_value();
}
- array = Handle<FixedArray>(last_match_info->elements());
+ array = Handle<FixedArray>(FixedArray::cast(last_match_info->elements()));
ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead);
// The captures come in (start, end+1) pairs.
for (int i = 0; i < number_of_capture_registers; i += 2) {
diff --git a/src/jsregexp.h b/src/jsregexp.h
index 0e7965c..3bc30b6 100644
--- a/src/jsregexp.h
+++ b/src/jsregexp.h
@@ -214,108 +214,6 @@
};
-template <typename Node, class Callback>
-static void DoForEach(Node* node, Callback* callback);
-
-
-// A zone splay tree. The config type parameter encapsulates the
-// different configurations of a concrete splay tree:
-//
-// typedef Key: the key type
-// typedef Value: the value type
-// static const kNoKey: the dummy key used when no key is set
-// static const kNoValue: the dummy value used to initialize nodes
-// int (Compare)(Key& a, Key& b) -> {-1, 0, 1}: comparison function
-//
-template <typename Config>
-class ZoneSplayTree : public ZoneObject {
- public:
- typedef typename Config::Key Key;
- typedef typename Config::Value Value;
-
- class Locator;
-
- ZoneSplayTree() : root_(NULL) { }
-
- // Inserts the given key in this tree with the given value. Returns
- // true if a node was inserted, otherwise false. If found the locator
- // is enabled and provides access to the mapping for the key.
- bool Insert(const Key& key, Locator* locator);
-
- // Looks up the key in this tree and returns true if it was found,
- // otherwise false. If the node is found the locator is enabled and
- // provides access to the mapping for the key.
- bool Find(const Key& key, Locator* locator);
-
- // Finds the mapping with the greatest key less than or equal to the
- // given key.
- bool FindGreatestLessThan(const Key& key, Locator* locator);
-
- // Find the mapping with the greatest key in this tree.
- bool FindGreatest(Locator* locator);
-
- // Finds the mapping with the least key greater than or equal to the
- // given key.
- bool FindLeastGreaterThan(const Key& key, Locator* locator);
-
- // Find the mapping with the least key in this tree.
- bool FindLeast(Locator* locator);
-
- // Remove the node with the given key from the tree.
- bool Remove(const Key& key);
-
- bool is_empty() { return root_ == NULL; }
-
- // Perform the splay operation for the given key. Moves the node with
- // the given key to the top of the tree. If no node has the given
- // key, the last node on the search path is moved to the top of the
- // tree.
- void Splay(const Key& key);
-
- class Node : public ZoneObject {
- public:
- Node(const Key& key, const Value& value)
- : key_(key),
- value_(value),
- left_(NULL),
- right_(NULL) { }
- Key key() { return key_; }
- Value value() { return value_; }
- Node* left() { return left_; }
- Node* right() { return right_; }
- private:
- friend class ZoneSplayTree;
- friend class Locator;
- Key key_;
- Value value_;
- Node* left_;
- Node* right_;
- };
-
- // A locator provides access to a node in the tree without actually
- // exposing the node.
- class Locator {
- public:
- explicit Locator(Node* node) : node_(node) { }
- Locator() : node_(NULL) { }
- const Key& key() { return node_->key_; }
- Value& value() { return node_->value_; }
- void set_value(const Value& value) { node_->value_ = value; }
- inline void bind(Node* node) { node_ = node; }
- private:
- Node* node_;
- };
-
- template <class Callback>
- void ForEach(Callback* c) {
- DoForEach<typename ZoneSplayTree<Config>::Node, Callback>(root_, c);
- }
-
- private:
- Node* root_;
-};
-
-
// A set of unsigned integers that behaves especially well on small
// integers (< 32). May do zone-allocation.
class OutSet: public ZoneObject {
diff --git a/src/log.cc b/src/log.cc
index 33cf8e2..0c1b76d 100644
--- a/src/log.cc
+++ b/src/log.cc
@@ -884,6 +884,21 @@
}
+void Logger::HeapSampleJSConstructorEvent(const char* constructor,
+ int number, int bytes) {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ if (!Log::IsEnabled() || !FLAG_log_gc) return;
+ LogMessageBuilder msg;
+ msg.Append("heap-js-cons-item,%s,%d,%d\n",
+ constructor != NULL ?
+ (constructor[0] != '\0' ? constructor : "(anonymous)") :
+ "(no_constructor)",
+ number, bytes);
+ msg.WriteToLogFile();
+#endif
+}
+
+
void Logger::DebugTag(const char* call_site_tag) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log) return;
diff --git a/src/log.h b/src/log.h
index 95c9cde..1692e77 100644
--- a/src/log.h
+++ b/src/log.h
@@ -219,6 +219,8 @@
static void HeapSampleBeginEvent(const char* space, const char* kind);
static void HeapSampleEndEvent(const char* space, const char* kind);
static void HeapSampleItemEvent(const char* type, int number, int bytes);
+ static void HeapSampleJSConstructorEvent(const char* constructor,
+ int number, int bytes);
static void HeapSampleStats(const char* space, const char* kind,
int capacity, int used);
diff --git a/src/messages.js b/src/messages.js
index 870c969..fd505ff 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -561,20 +561,24 @@
var kAddMessageAccessorsMarker = { };
// Defines accessors for a property that is calculated the first time
-// the property is read and then replaces the accessor with the value.
-// Also, setting the property causes the accessors to be deleted.
+// the property is read.
function DefineOneShotAccessor(obj, name, fun) {
// Note that the accessors consistently operate on 'obj', not 'this'.
// Since the object may occur in someone else's prototype chain we
// can't rely on 'this' being the same as 'obj'.
+ var hasBeenSet = false;
+ var value;
obj.__defineGetter__(name, function () {
- var value = fun(obj);
- obj[name] = value;
+ if (hasBeenSet) {
+ return value;
+ }
+ hasBeenSet = true;
+ value = fun(obj);
return value;
});
obj.__defineSetter__(name, function (v) {
- delete obj[name];
- obj[name] = v;
+ hasBeenSet = true;
+ value = v;
});
}
@@ -833,22 +837,25 @@
} else if (!IS_UNDEFINED(m)) {
this.message = ToString(m);
}
- var stackTraceLimit = $Error.stackTraceLimit;
- if (stackTraceLimit) {
- // Cap the limit to avoid extremely big traces
- if (stackTraceLimit < 0 || stackTraceLimit > 10000)
- stackTraceLimit = 10000;
- var raw_stack = %CollectStackTrace(f, stackTraceLimit);
- DefineOneShotAccessor(this, 'stack', function (obj) {
- return FormatRawStackTrace(obj, raw_stack);
- });
- }
+ captureStackTrace(this, f);
} else {
return new f(m);
}
});
}
+function captureStackTrace(obj, cons_opt) {
+ var stackTraceLimit = $Error.stackTraceLimit;
+ if (!stackTraceLimit) return;
+ if (stackTraceLimit < 0 || stackTraceLimit > 10000)
+ stackTraceLimit = 10000;
+ var raw_stack = %CollectStackTrace(cons_opt ? cons_opt : captureStackTrace,
+ stackTraceLimit);
+ DefineOneShotAccessor(obj, 'stack', function (obj) {
+ return FormatRawStackTrace(obj, raw_stack);
+ });
+};
+
$Math.__proto__ = global.Object.prototype;
DefineError(function Error() { });
@@ -859,6 +866,8 @@
DefineError(function EvalError() { });
DefineError(function URIError() { });
+$Error.captureStackTrace = captureStackTrace;
+
// Setup extra properties of the Error.prototype object.
$Error.prototype.message = '';
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 8c57afd..40001f9 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -115,6 +115,9 @@
case BYTE_ARRAY_TYPE:
ByteArray::cast(this)->ByteArrayPrint();
break;
+ case PIXEL_ARRAY_TYPE:
+ PixelArray::cast(this)->PixelArrayPrint();
+ break;
case FILLER_TYPE:
PrintF("filler");
break;
@@ -191,6 +194,9 @@
case BYTE_ARRAY_TYPE:
ByteArray::cast(this)->ByteArrayVerify();
break;
+ case PIXEL_ARRAY_TYPE:
+ PixelArray::cast(this)->PixelArrayVerify();
+ break;
case CODE_TYPE:
Code::cast(this)->CodeVerify();
break;
@@ -264,11 +270,21 @@
}
+void PixelArray::PixelArrayPrint() {
+ PrintF("pixel array");
+}
+
+
void ByteArray::ByteArrayVerify() {
ASSERT(IsByteArray());
}
+void PixelArray::PixelArrayVerify() {
+ ASSERT(IsPixelArray());
+}
+
+
void JSObject::PrintProperties() {
if (HasFastProperties()) {
DescriptorArray* descs = map()->instance_descriptors();
@@ -312,15 +328,30 @@
void JSObject::PrintElements() {
- if (HasFastElements()) {
- FixedArray* p = FixedArray::cast(elements());
- for (int i = 0; i < p->length(); i++) {
- PrintF(" %d: ", i);
- p->get(i)->ShortPrint();
- PrintF("\n");
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ // Print in array notation for non-sparse arrays.
+ FixedArray* p = FixedArray::cast(elements());
+ for (int i = 0; i < p->length(); i++) {
+ PrintF(" %d: ", i);
+ p->get(i)->ShortPrint();
+ PrintF("\n");
+ }
+ break;
}
- } else {
- elements()->Print();
+ case PIXEL_ELEMENTS: {
+ PixelArray* p = PixelArray::cast(elements());
+ for (int i = 0; i < p->length(); i++) {
+ PrintF(" %d: %d\n", i, p->get(i));
+ }
+ break;
+ }
+ case DICTIONARY_ELEMENTS:
+ elements()->Print();
+ break;
+ default:
+ UNREACHABLE();
+ break;
}
}
@@ -402,6 +433,7 @@
case LONG_EXTERNAL_STRING_TYPE: return "EXTERNAL_STRING";
case FIXED_ARRAY_TYPE: return "FIXED_ARRAY";
case BYTE_ARRAY_TYPE: return "BYTE_ARRAY";
+ case PIXEL_ARRAY_TYPE: return "PIXEL_ARRAY";
case FILLER_TYPE: return "FILLER";
case JS_OBJECT_TYPE: return "JS_OBJECT";
case JS_CONTEXT_EXTENSION_OBJECT_TYPE: return "JS_CONTEXT_EXTENSION_OBJECT";
@@ -666,7 +698,7 @@
} else {
ASSERT(number->IsSmi());
int value = Smi::cast(number)->value();
- ASSERT(value == 0 || value == 1 || value == -1);
+ ASSERT(value == 0 || value == 1 || value == -1 || value == -2);
}
}
@@ -1015,21 +1047,35 @@
dict->Capacity() - dict->NumberOfElements();
}
// Indexed properties
- if (HasFastElements()) {
- info->number_of_objects_with_fast_elements_++;
- int holes = 0;
- FixedArray* e = FixedArray::cast(elements());
- int len = e->length();
- for (int i = 0; i < len; i++) {
- if (e->get(i) == Heap::the_hole_value()) holes++;
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ info->number_of_objects_with_fast_elements_++;
+ int holes = 0;
+ FixedArray* e = FixedArray::cast(elements());
+ int len = e->length();
+ for (int i = 0; i < len; i++) {
+ if (e->get(i) == Heap::the_hole_value()) holes++;
+ }
+ info->number_of_fast_used_elements_ += len - holes;
+ info->number_of_fast_unused_elements_ += holes;
+ break;
}
- info->number_of_fast_used_elements_ += len - holes;
- info->number_of_fast_unused_elements_ += holes;
- } else {
- NumberDictionary* dict = element_dictionary();
- info->number_of_slow_used_elements_ += dict->NumberOfElements();
- info->number_of_slow_unused_elements_ +=
- dict->Capacity() - dict->NumberOfElements();
+ case PIXEL_ELEMENTS: {
+ info->number_of_objects_with_fast_elements_++;
+ PixelArray* e = PixelArray::cast(elements());
+ info->number_of_fast_used_elements_ += e->length();
+ break;
+ }
+ case DICTIONARY_ELEMENTS: {
+ NumberDictionary* dict = element_dictionary();
+ info->number_of_slow_used_elements_ += dict->NumberOfElements();
+ info->number_of_slow_unused_elements_ +=
+ dict->Capacity() - dict->NumberOfElements();
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
}
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 7abc7c3..c7f791c 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -321,6 +321,12 @@
}
+bool Object::IsPixelArray() {
+ return Object::IsHeapObject() &&
+ HeapObject::cast(this)->map()->instance_type() == PIXEL_ARRAY_TYPE;
+}
+
+
bool Object::IsFailure() {
return HAS_FAILURE_TAG(this);
}
@@ -1043,7 +1049,22 @@
ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
-ACCESSORS(JSObject, elements, FixedArray, kElementsOffset)
+
+
+Array* JSObject::elements() {
+ Object* array = READ_FIELD(this, kElementsOffset);
+ // In the assert below Dictionary is covered under FixedArray.
+ ASSERT(array->IsFixedArray() || array->IsPixelArray());
+ return reinterpret_cast<Array*>(array);
+}
+
+
+void JSObject::set_elements(Array* value, WriteBarrierMode mode) {
+ // In the assert below Dictionary is covered under FixedArray.
+ ASSERT(value->IsFixedArray() || value->IsPixelArray());
+ WRITE_FIELD(this, kElementsOffset, value);
+ CONDITIONAL_WRITE_BARRIER(this, kElementsOffset, mode);
+}
void JSObject::initialize_properties() {
@@ -1502,6 +1523,7 @@
CAST_ACCESSOR(JSRegExp)
CAST_ACCESSOR(Proxy)
CAST_ACCESSOR(ByteArray)
+CAST_ACCESSOR(PixelArray)
CAST_ACCESSOR(Struct)
@@ -1860,6 +1882,32 @@
}
+uint8_t* PixelArray::external_pointer() {
+ intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
+ return reinterpret_cast<uint8_t*>(ptr);
+}
+
+
+void PixelArray::set_external_pointer(uint8_t* value, WriteBarrierMode mode) {
+ intptr_t ptr = reinterpret_cast<intptr_t>(value);
+ WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
+}
+
+
+uint8_t PixelArray::get(int index) {
+ ASSERT((index >= 0) && (index < this->length()));
+ uint8_t* ptr = external_pointer();
+ return ptr[index];
+}
+
+
+void PixelArray::set(int index, uint8_t value) {
+ ASSERT((index >= 0) && (index < this->length()));
+ uint8_t* ptr = external_pointer();
+ ptr[index] = value;
+}
+
+
int Map::instance_size() {
return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
}
@@ -2293,6 +2341,11 @@
}
+bool JSFunction::IsBuiltin() {
+ return context()->global()->IsJSBuiltinsObject();
+}
+
+
bool JSObject::IsLoaded() {
return !map()->needs_loading();
}
@@ -2523,8 +2576,33 @@
}
+JSObject::ElementsKind JSObject::GetElementsKind() {
+ Array* array = elements();
+ if (array->IsFixedArray()) {
+ // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a FixedArray.
+ if (array->map() == Heap::fixed_array_map()) {
+ return FAST_ELEMENTS;
+ }
+ ASSERT(array->IsDictionary());
+ return DICTIONARY_ELEMENTS;
+ }
+ ASSERT(array->IsPixelArray());
+ return PIXEL_ELEMENTS;
+}
+
+
bool JSObject::HasFastElements() {
- return !elements()->IsDictionary();
+ return GetElementsKind() == FAST_ELEMENTS;
+}
+
+
+bool JSObject::HasDictionaryElements() {
+ return GetElementsKind() == DICTIONARY_ELEMENTS;
+}
+
+
+bool JSObject::HasPixelElements() {
+ return GetElementsKind() == PIXEL_ELEMENTS;
}
@@ -2545,7 +2623,7 @@
NumberDictionary* JSObject::element_dictionary() {
- ASSERT(!HasFastElements());
+ ASSERT(HasDictionaryElements());
return NumberDictionary::cast(elements());
}
@@ -2651,24 +2729,6 @@
}
-Smi* JSObject::InterceptorPropertyLookupHint(String* name) {
- // TODO(antonm): Do we want to do any shortcuts for global object?
- if (HasFastProperties()) {
- LookupResult lookup;
- LocalLookupRealNamedProperty(name, &lookup);
- if (lookup.IsValid()) {
- if (lookup.type() == FIELD && lookup.IsCacheable()) {
- return Smi::FromInt(lookup.GetFieldIndex());
- }
- } else {
- return Smi::FromInt(kLookupInPrototype);
- }
- }
-
- return Smi::FromInt(kLookupInHolder);
-}
-
-
bool AccessorInfo::all_can_read() {
return BooleanBit::get(flag(), kAllCanReadBit);
}
diff --git a/src/objects.cc b/src/objects.cc
index 72412c1..b3b290e 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -1006,6 +1006,9 @@
case BYTE_ARRAY_TYPE:
accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length());
break;
+ case PIXEL_ARRAY_TYPE:
+ accumulator->Add("<PixelArray[%u]>", PixelArray::cast(this)->length());
+ break;
case SHARED_FUNCTION_INFO_TYPE:
accumulator->Add("<SharedFunctionInfo>");
break;
@@ -1147,6 +1150,7 @@
case HEAP_NUMBER_TYPE:
case FILLER_TYPE:
case BYTE_ARRAY_TYPE:
+ case PIXEL_ARRAY_TYPE:
break;
case SHARED_FUNCTION_INFO_TYPE: {
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(this);
@@ -1240,7 +1244,7 @@
// hidden symbols) and is not a real identifier.
StringInputBuffer buffer(name);
if (!Scanner::IsIdentifier(&buffer) && name != Heap::hidden_symbol()) {
- Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
if (obj->IsFailure()) return obj;
return AddSlowProperty(name, value, attributes);
}
@@ -1278,7 +1282,7 @@
if (map()->unused_property_fields() == 0) {
if (properties()->length() > kMaxFastProperties) {
- Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
if (obj->IsFailure()) return obj;
return AddSlowProperty(name, value, attributes);
}
@@ -1399,7 +1403,7 @@
} else {
// Normalize the object to prevent very large instance descriptors.
// This eliminates unwanted N^2 allocation and lookup behavior.
- Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
if (obj->IsFailure()) return obj;
}
}
@@ -1469,7 +1473,7 @@
PropertyAttributes attributes) {
if (map()->unused_property_fields() == 0 &&
properties()->length() > kMaxFastProperties) {
- Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
if (obj->IsFailure()) return obj;
return ReplaceSlowProperty(name, new_value, attributes);
}
@@ -1669,7 +1673,9 @@
for (Object* pt = GetPrototype();
pt != Heap::null_value();
pt = pt->GetPrototype()) {
- if (JSObject::cast(pt)->HasFastElements()) continue;
+ if (!JSObject::cast(pt)->HasDictionaryElements()) {
+ continue;
+ }
NumberDictionary* dictionary = JSObject::cast(pt)->element_dictionary();
int entry = dictionary->FindEntry(index);
if (entry != NumberDictionary::kNotFound) {
@@ -2118,12 +2124,22 @@
}
-Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode) {
+Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
+ int expected_additional_properties) {
if (!HasFastProperties()) return this;
- // Allocate new content
+ // The global object is always normalized.
+ ASSERT(!IsGlobalObject());
+
+ // Allocate new content.
+ int property_count = map()->NumberOfDescribedProperties();
+ if (expected_additional_properties > 0) {
+ property_count += expected_additional_properties;
+ } else {
+ property_count += 2; // Make space for two more properties.
+ }
Object* obj =
- StringDictionary::Allocate(map()->NumberOfDescribedProperties() * 2 + 4);
+ StringDictionary::Allocate(property_count * 2);
if (obj->IsFailure()) return obj;
StringDictionary* dictionary = StringDictionary::cast(obj);
@@ -2135,10 +2151,6 @@
PropertyDetails d =
PropertyDetails(details.attributes(), NORMAL, details.index());
Object* value = descs->GetConstantFunction(i);
- if (IsGlobalObject()) {
- value = Heap::AllocateJSGlobalPropertyCell(value);
- if (value->IsFailure()) return value;
- }
Object* result = dictionary->Add(descs->GetKey(i), value, d);
if (result->IsFailure()) return result;
dictionary = StringDictionary::cast(result);
@@ -2148,10 +2160,6 @@
PropertyDetails d =
PropertyDetails(details.attributes(), NORMAL, details.index());
Object* value = FastPropertyAt(descs->GetFieldIndex(i));
- if (IsGlobalObject()) {
- value = Heap::AllocateJSGlobalPropertyCell(value);
- if (value->IsFailure()) return value;
- }
Object* result = dictionary->Add(descs->GetKey(i), value, d);
if (result->IsFailure()) return result;
dictionary = StringDictionary::cast(result);
@@ -2161,10 +2169,6 @@
PropertyDetails d =
PropertyDetails(details.attributes(), CALLBACKS, details.index());
Object* value = descs->GetCallbacksObject(i);
- if (IsGlobalObject()) {
- value = Heap::AllocateJSGlobalPropertyCell(value);
- if (value->IsFailure()) return value;
- }
Object* result = dictionary->Add(descs->GetKey(i), value, d);
if (result->IsFailure()) return result;
dictionary = StringDictionary::cast(result);
@@ -2176,9 +2180,7 @@
case INTERCEPTOR:
break;
default:
- case NORMAL:
UNREACHABLE();
- break;
}
}
@@ -2231,7 +2233,8 @@
Object* JSObject::NormalizeElements() {
- if (!HasFastElements()) return this;
+ ASSERT(!HasPixelElements());
+ if (HasDictionaryElements()) return this;
// Get number of entries.
FixedArray* array = FixedArray::cast(elements());
@@ -2276,7 +2279,7 @@
if (!result.IsValid()) return Heap::true_value();
// Normalize object if needed.
- Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
if (obj->IsFailure()) return obj;
return DeleteNormalizedProperty(name, mode);
@@ -2317,20 +2320,28 @@
Object* JSObject::DeleteElementPostInterceptor(uint32_t index,
DeleteMode mode) {
- if (HasFastElements()) {
- uint32_t length = IsJSArray() ?
+ ASSERT(!HasPixelElements());
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
static_cast<uint32_t>(Smi::cast(JSArray::cast(this)->length())->value()) :
static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if (index < length) {
- FixedArray::cast(elements())->set_the_hole(index);
+ if (index < length) {
+ FixedArray::cast(elements())->set_the_hole(index);
+ }
+ break;
}
- return Heap::true_value();
- }
- ASSERT(!HasFastElements());
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- return dictionary->DeleteProperty(entry, mode);
+ case DICTIONARY_ELEMENTS: {
+ NumberDictionary* dictionary = element_dictionary();
+ int entry = dictionary->FindEntry(index);
+ if (entry != NumberDictionary::kNotFound) {
+ return dictionary->DeleteProperty(entry, mode);
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
return Heap::true_value();
}
@@ -2392,20 +2403,31 @@
return DeleteElementWithInterceptor(index);
}
- if (HasFastElements()) {
- uint32_t length = IsJSArray() ?
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
static_cast<uint32_t>(Smi::cast(JSArray::cast(this)->length())->value()) :
static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if (index < length) {
- FixedArray::cast(elements())->set_the_hole(index);
+ if (index < length) {
+ FixedArray::cast(elements())->set_the_hole(index);
+ }
+ break;
}
- return Heap::true_value();
- } else {
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- return dictionary->DeleteProperty(entry, mode);
+ case PIXEL_ELEMENTS: {
+ // Pixel elements cannot be deleted. Just silently ignore here.
+ break;
}
+ case DICTIONARY_ELEMENTS: {
+ NumberDictionary* dictionary = element_dictionary();
+ int entry = dictionary->FindEntry(index);
+ if (entry != NumberDictionary::kNotFound) {
+ return dictionary->DeleteProperty(entry, mode);
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
return Heap::true_value();
}
@@ -2454,7 +2476,7 @@
mode);
}
// Normalize object if needed.
- Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
if (obj->IsFailure()) return obj;
// Make sure the properties are normalized before removing the entry.
return DeleteNormalizedProperty(name, mode);
@@ -2483,21 +2505,32 @@
}
// Check if the object is among the indexed properties.
- if (HasFastElements()) {
- int length = IsJSArray()
- ? Smi::cast(JSArray::cast(this)->length())->value()
- : FixedArray::cast(elements())->length();
- for (int i = 0; i < length; i++) {
- Object* element = FixedArray::cast(elements())->get(i);
- if (!element->IsTheHole() && element == obj) {
+ switch (GetElementsKind()) {
+ case PIXEL_ELEMENTS:
+ // Raw pixels do not reference other objects.
+ break;
+ case FAST_ELEMENTS: {
+ int length = IsJSArray() ?
+ Smi::cast(JSArray::cast(this)->length())->value() :
+ FixedArray::cast(elements())->length();
+ for (int i = 0; i < length; i++) {
+ Object* element = FixedArray::cast(elements())->get(i);
+ if (!element->IsTheHole() && element == obj) {
+ return true;
+ }
+ }
+ break;
+ }
+ case DICTIONARY_ELEMENTS: {
+ key = element_dictionary()->SlowReverseLookup(obj);
+ if (key != Heap::undefined_value()) {
return true;
}
+ break;
}
- } else {
- key = element_dictionary()->SlowReverseLookup(obj);
- if (key != Heap::undefined_value()) {
- return true;
- }
+ default:
+ UNREACHABLE();
+ break;
}
// For functions check the context. Boilerplate functions do
@@ -2715,20 +2748,31 @@
if (is_element && IsJSArray()) return Heap::undefined_value();
if (is_element) {
- // Lookup the index.
- if (!HasFastElements()) {
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* result = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.IsReadOnly()) return Heap::undefined_value();
- if (details.type() == CALLBACKS) {
- // Only accessors allowed as elements.
- ASSERT(result->IsFixedArray());
- return result;
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS:
+ break;
+ case PIXEL_ELEMENTS:
+ // Ignore getters and setters on pixel elements.
+ return Heap::undefined_value();
+ case DICTIONARY_ELEMENTS: {
+ // Lookup the index.
+ NumberDictionary* dictionary = element_dictionary();
+ int entry = dictionary->FindEntry(index);
+ if (entry != NumberDictionary::kNotFound) {
+ Object* result = dictionary->ValueAt(entry);
+ PropertyDetails details = dictionary->DetailsAt(entry);
+ if (details.IsReadOnly()) return Heap::undefined_value();
+ if (details.type() == CALLBACKS) {
+ // Only accessors allowed as elements.
+ ASSERT(result->IsFixedArray());
+ return result;
+ }
}
+ break;
}
+ default:
+ UNREACHABLE();
+ break;
}
} else {
// Lookup the name.
@@ -2765,7 +2809,7 @@
set_elements(NumberDictionary::cast(dict));
} else {
// Normalize object to make this operation simple.
- Object* ok = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ Object* ok = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
if (ok->IsFailure()) return ok;
// For the global object allocate a new map to invalidate the global inline
@@ -2827,9 +2871,9 @@
for (Object* obj = this;
obj != Heap::null_value();
obj = JSObject::cast(obj)->GetPrototype()) {
- JSObject* jsObject = JSObject::cast(obj);
- if (!jsObject->HasFastElements()) {
- NumberDictionary* dictionary = jsObject->element_dictionary();
+ JSObject* js_object = JSObject::cast(obj);
+ if (js_object->HasDictionaryElements()) {
+ NumberDictionary* dictionary = js_object->element_dictionary();
int entry = dictionary->FindEntry(index);
if (entry != NumberDictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
@@ -3029,28 +3073,35 @@
Object* FixedArray::AddKeysFromJSArray(JSArray* array) {
- if (array->HasFastElements()) {
- return UnionOfKeys(array->elements());
- }
- ASSERT(!array->HasFastElements());
- NumberDictionary* dict = array->element_dictionary();
- int size = dict->NumberOfElements();
+ ASSERT(!array->HasPixelElements());
+ switch (array->GetElementsKind()) {
+ case JSObject::FAST_ELEMENTS:
+ return UnionOfKeys(FixedArray::cast(array->elements()));
+ case JSObject::DICTIONARY_ELEMENTS: {
+ NumberDictionary* dict = array->element_dictionary();
+ int size = dict->NumberOfElements();
- // Allocate a temporary fixed array.
- Object* object = Heap::AllocateFixedArray(size);
- if (object->IsFailure()) return object;
- FixedArray* key_array = FixedArray::cast(object);
+ // Allocate a temporary fixed array.
+ Object* object = Heap::AllocateFixedArray(size);
+ if (object->IsFailure()) return object;
+ FixedArray* key_array = FixedArray::cast(object);
- int capacity = dict->Capacity();
- int pos = 0;
- // Copy the elements from the JSArray to the temporary fixed array.
- for (int i = 0; i < capacity; i++) {
- if (dict->IsKey(dict->KeyAt(i))) {
- key_array->set(pos++, dict->ValueAt(i));
+ int capacity = dict->Capacity();
+ int pos = 0;
+ // Copy the elements from the JSArray to the temporary fixed array.
+ for (int i = 0; i < capacity; i++) {
+ if (dict->IsKey(dict->KeyAt(i))) {
+ key_array->set(pos++, dict->ValueAt(i));
+ }
+ }
+ // Compute the union of this and the temporary fixed array.
+ return UnionOfKeys(key_array);
}
+ default:
+ UNREACHABLE();
}
- // Compute the union of this and the temporary fixed array.
- return UnionOfKeys(key_array);
+ UNREACHABLE();
+ return Heap::null_value(); // Failure case needs to "return" a value.
}
@@ -5089,54 +5140,74 @@
void JSObject::SetFastElements(FixedArray* elems) {
+ // We should never end in here with a pixel array.
+ ASSERT(!HasPixelElements());
#ifdef DEBUG
// Check the provided array is filled with the_hole.
uint32_t len = static_cast<uint32_t>(elems->length());
for (uint32_t i = 0; i < len; i++) ASSERT(elems->get(i)->IsTheHole());
#endif
WriteBarrierMode mode = elems->GetWriteBarrierMode();
- if (HasFastElements()) {
- FixedArray* old_elements = FixedArray::cast(elements());
- uint32_t old_length = static_cast<uint32_t>(old_elements->length());
- // Fill out the new array with this content and array holes.
- for (uint32_t i = 0; i < old_length; i++) {
- elems->set(i, old_elements->get(i), mode);
- }
- } else {
- NumberDictionary* dictionary = NumberDictionary::cast(elements());
- for (int i = 0; i < dictionary->Capacity(); i++) {
- Object* key = dictionary->KeyAt(i);
- if (key->IsNumber()) {
- uint32_t entry = static_cast<uint32_t>(key->Number());
- elems->set(entry, dictionary->ValueAt(i), mode);
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ FixedArray* old_elements = FixedArray::cast(elements());
+ uint32_t old_length = static_cast<uint32_t>(old_elements->length());
+ // Fill out the new array with this content and array holes.
+ for (uint32_t i = 0; i < old_length; i++) {
+ elems->set(i, old_elements->get(i), mode);
}
+ break;
}
+ case DICTIONARY_ELEMENTS: {
+ NumberDictionary* dictionary = NumberDictionary::cast(elements());
+ for (int i = 0; i < dictionary->Capacity(); i++) {
+ Object* key = dictionary->KeyAt(i);
+ if (key->IsNumber()) {
+ uint32_t entry = static_cast<uint32_t>(key->Number());
+ elems->set(entry, dictionary->ValueAt(i), mode);
+ }
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
set_elements(elems);
}
Object* JSObject::SetSlowElements(Object* len) {
+ // We should never end in here with a pixel array.
+ ASSERT(!HasPixelElements());
+
uint32_t new_length = static_cast<uint32_t>(len->Number());
- if (!HasFastElements()) {
- if (IsJSArray()) {
- uint32_t old_length =
- static_cast<uint32_t>(JSArray::cast(this)->length()->Number());
- element_dictionary()->RemoveNumberEntries(new_length, old_length),
- JSArray::cast(this)->set_length(len);
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ // Make sure we never try to shrink dense arrays into sparse arrays.
+ ASSERT(static_cast<uint32_t>(FixedArray::cast(elements())->length()) <=
+ new_length);
+ Object* obj = NormalizeElements();
+ if (obj->IsFailure()) return obj;
+
+ // Update length for JSArrays.
+ if (IsJSArray()) JSArray::cast(this)->set_length(len);
+ break;
}
- return this;
+ case DICTIONARY_ELEMENTS: {
+ if (IsJSArray()) {
+ uint32_t old_length =
+ static_cast<uint32_t>(JSArray::cast(this)->length()->Number());
+ element_dictionary()->RemoveNumberEntries(new_length, old_length),
+ JSArray::cast(this)->set_length(len);
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
-
- // Make sure we never try to shrink dense arrays into sparse arrays.
- ASSERT(static_cast<uint32_t>(FixedArray::cast(elements())->length()) <=
- new_length);
- Object* obj = NormalizeElements();
- if (obj->IsFailure()) return obj;
-
- // Update length for JSArrays.
- if (IsJSArray()) JSArray::cast(this)->set_length(len);
return this;
}
@@ -5159,7 +5230,7 @@
void JSArray::Expand(int required_size) {
Handle<JSArray> self(this);
- Handle<FixedArray> old_backing(elements());
+ Handle<FixedArray> old_backing(FixedArray::cast(elements()));
int old_size = old_backing->length();
// Doubling in size would be overkill, but leave some slack to avoid
// constantly growing.
@@ -5186,52 +5257,62 @@
Object* JSObject::SetElementsLength(Object* len) {
+ // We should never end in here with a pixel array.
+ ASSERT(!HasPixelElements());
+
Object* smi_length = len->ToSmi();
if (smi_length->IsSmi()) {
int value = Smi::cast(smi_length)->value();
if (value < 0) return ArrayLengthRangeError();
- if (HasFastElements()) {
- int old_capacity = FixedArray::cast(elements())->length();
- if (value <= old_capacity) {
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ int old_capacity = FixedArray::cast(elements())->length();
+ if (value <= old_capacity) {
+ if (IsJSArray()) {
+ int old_length = FastD2I(JSArray::cast(this)->length()->Number());
+ // NOTE: We may be able to optimize this by removing the
+ // last part of the elements backing storage array and
+ // setting the capacity to the new size.
+ for (int i = value; i < old_length; i++) {
+ FixedArray::cast(elements())->set_the_hole(i);
+ }
+ JSArray::cast(this)->set_length(smi_length, SKIP_WRITE_BARRIER);
+ }
+ return this;
+ }
+ int min = NewElementsCapacity(old_capacity);
+ int new_capacity = value > min ? value : min;
+ if (new_capacity <= kMaxFastElementsLength ||
+ !ShouldConvertToSlowElements(new_capacity)) {
+ Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
+ if (obj->IsFailure()) return obj;
+ if (IsJSArray()) JSArray::cast(this)->set_length(smi_length,
+ SKIP_WRITE_BARRIER);
+ SetFastElements(FixedArray::cast(obj));
+ return this;
+ }
+ break;
+ }
+ case DICTIONARY_ELEMENTS: {
if (IsJSArray()) {
- int old_length = FastD2I(JSArray::cast(this)->length()->Number());
- // NOTE: We may be able to optimize this by removing the
- // last part of the elements backing storage array and
- // setting the capacity to the new size.
- for (int i = value; i < old_length; i++) {
- FixedArray::cast(elements())->set_the_hole(i);
+ if (value == 0) {
+ // If the length of a slow array is reset to zero, we clear
+ // the array and flush backing storage. This has the added
+ // benefit that the array returns to fast mode.
+ initialize_elements();
+ } else {
+ // Remove deleted elements.
+ uint32_t old_length =
+ static_cast<uint32_t>(JSArray::cast(this)->length()->Number());
+ element_dictionary()->RemoveNumberEntries(value, old_length);
}
JSArray::cast(this)->set_length(smi_length, SKIP_WRITE_BARRIER);
}
return this;
}
- int min = NewElementsCapacity(old_capacity);
- int new_capacity = value > min ? value : min;
- if (new_capacity <= kMaxFastElementsLength ||
- !ShouldConvertToSlowElements(new_capacity)) {
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
- if (obj->IsFailure()) return obj;
- if (IsJSArray()) JSArray::cast(this)->set_length(smi_length,
- SKIP_WRITE_BARRIER);
- SetFastElements(FixedArray::cast(obj));
- return this;
- }
- } else {
- if (IsJSArray()) {
- if (value == 0) {
- // If the length of a slow array is reset to zero, we clear
- // the array and flush backing storage. This has the added
- // benefit that the array returns to fast mode.
- initialize_elements();
- } else {
- // Remove deleted elements.
- uint32_t old_length =
- static_cast<uint32_t>(JSArray::cast(this)->length()->Number());
- element_dictionary()->RemoveNumberEntries(value, old_length);
- }
- JSArray::cast(this)->set_length(smi_length, SKIP_WRITE_BARRIER);
- }
- return this;
+ default:
+ UNREACHABLE();
+ break;
}
}
@@ -5258,20 +5339,36 @@
bool JSObject::HasElementPostInterceptor(JSObject* receiver, uint32_t index) {
- if (HasFastElements()) {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>(
- Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if ((index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole()) {
- return true;
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
+ static_cast<uint32_t>
+ (Smi::cast(JSArray::cast(this)->length())->value()) :
+ static_cast<uint32_t>(FixedArray::cast(elements())->length());
+ if ((index < length) &&
+ !FixedArray::cast(elements())->get(index)->IsTheHole()) {
+ return true;
+ }
+ break;
}
- } else {
- if (element_dictionary()->FindEntry(index)
- != NumberDictionary::kNotFound) {
- return true;
+ case PIXEL_ELEMENTS: {
+ // TODO(iposva): Add testcase.
+ PixelArray* pixels = PixelArray::cast(elements());
+ if (index < static_cast<uint32_t>(pixels->length())) {
+ return true;
+ }
+ break;
}
+ case DICTIONARY_ELEMENTS: {
+ if (element_dictionary()->FindEntry(index)
+ != NumberDictionary::kNotFound) {
+ return true;
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
// Handle [] on String objects.
@@ -5338,17 +5435,29 @@
// Handle [] on String objects.
if (this->IsStringObjectWithCharacterAt(index)) return true;
- if (HasFastElements()) {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>(
- Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- return (index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole();
- } else {
- return element_dictionary()->FindEntry(index)
- != NumberDictionary::kNotFound;
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
+ static_cast<uint32_t>
+ (Smi::cast(JSArray::cast(this)->length())->value()) :
+ static_cast<uint32_t>(FixedArray::cast(elements())->length());
+ return (index < length) &&
+ !FixedArray::cast(elements())->get(index)->IsTheHole();
+ }
+ case PIXEL_ELEMENTS: {
+ PixelArray* pixels = PixelArray::cast(elements());
+ return (index < static_cast<uint32_t>(pixels->length()));
+ }
+ case DICTIONARY_ELEMENTS: {
+ return element_dictionary()->FindEntry(index)
+ != NumberDictionary::kNotFound;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
+ UNREACHABLE();
+ return Heap::null_value();
}
@@ -5365,18 +5474,33 @@
return HasElementWithInterceptor(receiver, index);
}
- if (HasFastElements()) {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>(
- Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if ((index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole()) return true;
- } else {
- if (element_dictionary()->FindEntry(index)
- != NumberDictionary::kNotFound) {
- return true;
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
+ static_cast<uint32_t>
+ (Smi::cast(JSArray::cast(this)->length())->value()) :
+ static_cast<uint32_t>(FixedArray::cast(elements())->length());
+ if ((index < length) &&
+ !FixedArray::cast(elements())->get(index)->IsTheHole()) return true;
+ break;
}
+ case PIXEL_ELEMENTS: {
+ PixelArray* pixels = PixelArray::cast(elements());
+ if (index < static_cast<uint32_t>(pixels->length())) {
+ return true;
+ }
+ break;
+ }
+ case DICTIONARY_ELEMENTS: {
+ if (element_dictionary()->FindEntry(index)
+ != NumberDictionary::kNotFound) {
+ return true;
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
// Handle [] on String objects.
@@ -5472,7 +5596,7 @@
// Otherwise default to slow case.
Object* obj = NormalizeElements();
if (obj->IsFailure()) return obj;
- ASSERT(!HasFastElements());
+ ASSERT(HasDictionaryElements());
return SetElement(index, value);
}
@@ -5501,80 +5625,95 @@
Object* JSObject::SetElementWithoutInterceptor(uint32_t index, Object* value) {
- // Fast case.
- if (HasFastElements()) return SetFastElement(index, value);
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS:
+ // Fast case.
+ return SetFastElement(index, value);
+ case PIXEL_ELEMENTS: {
+ PixelArray* pixels = PixelArray::cast(elements());
+ return pixels->SetValue(index, value);
+ }
+ case DICTIONARY_ELEMENTS: {
+ // Insert element in the dictionary.
+ FixedArray* elms = FixedArray::cast(elements());
+ NumberDictionary* dictionary = NumberDictionary::cast(elms);
- // Dictionary case.
- ASSERT(!HasFastElements());
-
- // Insert element in the dictionary.
- FixedArray* elms = FixedArray::cast(elements());
- NumberDictionary* dictionary = NumberDictionary::cast(elms);
-
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- // Only accessors allowed as elements.
- FixedArray* structure = FixedArray::cast(element);
- if (structure->get(kSetterIndex)->IsJSFunction()) {
- JSFunction* setter = JSFunction::cast(structure->get(kSetterIndex));
- return SetPropertyWithDefinedSetter(setter, value);
+ int entry = dictionary->FindEntry(index);
+ if (entry != NumberDictionary::kNotFound) {
+ Object* element = dictionary->ValueAt(entry);
+ PropertyDetails details = dictionary->DetailsAt(entry);
+ if (details.type() == CALLBACKS) {
+ // Only accessors allowed as elements.
+ FixedArray* structure = FixedArray::cast(element);
+ if (structure->get(kSetterIndex)->IsJSFunction()) {
+ JSFunction* setter = JSFunction::cast(structure->get(kSetterIndex));
+ return SetPropertyWithDefinedSetter(setter, value);
+ } else {
+ Handle<Object> self(this);
+ Handle<Object> key(Factory::NewNumberFromUint(index));
+ Handle<Object> args[2] = { key, self };
+ return Top::Throw(*Factory::NewTypeError("no_setter_in_callback",
+ HandleVector(args, 2)));
+ }
+ } else {
+ dictionary->UpdateMaxNumberKey(index);
+ dictionary->ValueAtPut(entry, value);
+ }
} else {
- Handle<Object> self(this);
- Handle<Object> key(Factory::NewNumberFromUint(index));
- Handle<Object> args[2] = { key, self };
- return Top::Throw(*Factory::NewTypeError("no_setter_in_callback",
- HandleVector(args, 2)));
+ // Index not already used. Look for an accessor in the prototype chain.
+ if (!IsJSArray()) {
+ Object* setter = LookupCallbackSetterInPrototypes(index);
+ if (setter->IsJSFunction()) {
+ return SetPropertyWithDefinedSetter(JSFunction::cast(setter),
+ value);
+ }
+ }
+ Object* result = dictionary->AtNumberPut(index, value);
+ if (result->IsFailure()) return result;
+ if (elms != FixedArray::cast(result)) {
+ set_elements(FixedArray::cast(result));
+ }
}
- } else {
- dictionary->UpdateMaxNumberKey(index);
- dictionary->ValueAtPut(entry, value);
- }
- } else {
- // Index not already used. Look for an accessor in the prototype chain.
- if (!IsJSArray()) {
- Object* setter = LookupCallbackSetterInPrototypes(index);
- if (setter->IsJSFunction()) {
- return SetPropertyWithDefinedSetter(JSFunction::cast(setter), value);
+
+ // Update the array length if this JSObject is an array.
+ if (IsJSArray()) {
+ JSArray* array = JSArray::cast(this);
+ Object* return_value = array->JSArrayUpdateLengthFromIndex(index,
+ value);
+ if (return_value->IsFailure()) return return_value;
}
- }
- Object* result = dictionary->AtNumberPut(index, value);
- if (result->IsFailure()) return result;
- if (elms != FixedArray::cast(result)) {
- set_elements(FixedArray::cast(result));
- }
- }
- // Update the array length if this JSObject is an array.
- if (IsJSArray()) {
- JSArray* array = JSArray::cast(this);
- Object* return_value = array->JSArrayUpdateLengthFromIndex(index, value);
- if (return_value->IsFailure()) return return_value;
- }
-
- // Attempt to put this object back in fast case.
- if (ShouldConvertToFastElements()) {
- uint32_t new_length = 0;
- if (IsJSArray()) {
- CHECK(Array::IndexFromObject(JSArray::cast(this)->length(), &new_length));
- JSArray::cast(this)->set_length(Smi::FromInt(new_length));
- } else {
- new_length = NumberDictionary::cast(elements())->max_number_key() + 1;
- }
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_length);
- if (obj->IsFailure()) return obj;
- SetFastElements(FixedArray::cast(obj));
+ // Attempt to put this object back in fast case.
+ if (ShouldConvertToFastElements()) {
+ uint32_t new_length = 0;
+ if (IsJSArray()) {
+ CHECK(Array::IndexFromObject(JSArray::cast(this)->length(),
+ &new_length));
+ JSArray::cast(this)->set_length(Smi::FromInt(new_length));
+ } else {
+ new_length = NumberDictionary::cast(elements())->max_number_key() + 1;
+ }
+ Object* obj = Heap::AllocateFixedArrayWithHoles(new_length);
+ if (obj->IsFailure()) return obj;
+ SetFastElements(FixedArray::cast(obj));
#ifdef DEBUG
- if (FLAG_trace_normalization) {
- PrintF("Object elements are fast case again:\n");
- Print();
- }
+ if (FLAG_trace_normalization) {
+ PrintF("Object elements are fast case again:\n");
+ Print();
+ }
#endif
- }
+ }
- return value;
+ return value;
+ }
+ default:
+ UNREACHABLE();
+ break;
+ }
+ // All possible cases have been handled above. Add a return to avoid the
+ // complaints from the compiler.
+ UNREACHABLE();
+ return Heap::null_value();
}
@@ -5597,32 +5736,45 @@
uint32_t index) {
// Get element works for both JSObject and JSArray since
// JSArray::length cannot change.
- if (HasFastElements()) {
- FixedArray* elms = FixedArray::cast(elements());
- if (index < static_cast<uint32_t>(elms->length())) {
- Object* value = elms->get(index);
- if (!value->IsTheHole()) return value;
- }
- } else {
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- // Only accessors allowed as elements.
- FixedArray* structure = FixedArray::cast(element);
- Object* getter = structure->get(kGetterIndex);
- if (getter->IsJSFunction()) {
- return GetPropertyWithDefinedGetter(receiver,
- JSFunction::cast(getter));
- } else {
- // Getter is not a function.
- return Heap::undefined_value();
- }
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ FixedArray* elms = FixedArray::cast(elements());
+ if (index < static_cast<uint32_t>(elms->length())) {
+ Object* value = elms->get(index);
+ if (!value->IsTheHole()) return value;
}
- return element;
+ break;
}
+ case PIXEL_ELEMENTS: {
+ // TODO(iposva): Add testcase and implement.
+ UNIMPLEMENTED();
+ break;
+ }
+ case DICTIONARY_ELEMENTS: {
+ NumberDictionary* dictionary = element_dictionary();
+ int entry = dictionary->FindEntry(index);
+ if (entry != NumberDictionary::kNotFound) {
+ Object* element = dictionary->ValueAt(entry);
+ PropertyDetails details = dictionary->DetailsAt(entry);
+ if (details.type() == CALLBACKS) {
+ // Only accessors allowed as elements.
+ FixedArray* structure = FixedArray::cast(element);
+ Object* getter = structure->get(kGetterIndex);
+ if (getter->IsJSFunction()) {
+ return GetPropertyWithDefinedGetter(receiver,
+ JSFunction::cast(getter));
+ } else {
+ // Getter is not a function.
+ return Heap::undefined_value();
+ }
+ }
+ return element;
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
// Continue searching via the prototype chain.
@@ -5681,31 +5833,44 @@
// Get element works for both JSObject and JSArray since
// JSArray::length cannot change.
- if (HasFastElements()) {
- FixedArray* elms = FixedArray::cast(elements());
- if (index < static_cast<uint32_t>(elms->length())) {
- Object* value = elms->get(index);
- if (!value->IsTheHole()) return value;
- }
- } else {
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- // Only accessors allowed as elements.
- FixedArray* structure = FixedArray::cast(element);
- Object* getter = structure->get(kGetterIndex);
- if (getter->IsJSFunction()) {
- return GetPropertyWithDefinedGetter(receiver,
- JSFunction::cast(getter));
- } else {
- // Getter is not a function.
- return Heap::undefined_value();
- }
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ FixedArray* elms = FixedArray::cast(elements());
+ if (index < static_cast<uint32_t>(elms->length())) {
+ Object* value = elms->get(index);
+ if (!value->IsTheHole()) return value;
}
- return element;
+ break;
+ }
+ case PIXEL_ELEMENTS: {
+ PixelArray* pixels = PixelArray::cast(elements());
+ if (index < static_cast<uint32_t>(pixels->length())) {
+ uint8_t value = pixels->get(index);
+ return Smi::FromInt(value);
+ }
+ break;
+ }
+ case DICTIONARY_ELEMENTS: {
+ NumberDictionary* dictionary = element_dictionary();
+ int entry = dictionary->FindEntry(index);
+ if (entry != NumberDictionary::kNotFound) {
+ Object* element = dictionary->ValueAt(entry);
+ PropertyDetails details = dictionary->DetailsAt(entry);
+ if (details.type() == CALLBACKS) {
+ // Only accessors allowed as elements.
+ FixedArray* structure = FixedArray::cast(element);
+ Object* getter = structure->get(kGetterIndex);
+ if (getter->IsJSFunction()) {
+ return GetPropertyWithDefinedGetter(receiver,
+ JSFunction::cast(getter));
+ } else {
+ // Getter is not a function.
+ return Heap::undefined_value();
+ }
+ }
+ return element;
+ }
+ break;
}
}
@@ -5719,16 +5884,27 @@
int capacity = 0;
int number_of_elements = 0;
- if (HasFastElements()) {
- FixedArray* elms = FixedArray::cast(elements());
- capacity = elms->length();
- for (int i = 0; i < capacity; i++) {
- if (!elms->get(i)->IsTheHole()) number_of_elements++;
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ FixedArray* elms = FixedArray::cast(elements());
+ capacity = elms->length();
+ for (int i = 0; i < capacity; i++) {
+ if (!elms->get(i)->IsTheHole()) number_of_elements++;
+ }
+ break;
}
- } else {
- NumberDictionary* dictionary = NumberDictionary::cast(elements());
- capacity = dictionary->Capacity();
- number_of_elements = dictionary->NumberOfElements();
+ case PIXEL_ELEMENTS: {
+ return true;
+ }
+ case DICTIONARY_ELEMENTS: {
+ NumberDictionary* dictionary = NumberDictionary::cast(elements());
+ capacity = dictionary->Capacity();
+ number_of_elements = dictionary->NumberOfElements();
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
if (capacity == 0) return true;
@@ -5747,7 +5923,7 @@
bool JSObject::ShouldConvertToFastElements() {
- ASSERT(!HasFastElements());
+ ASSERT(HasDictionaryElements());
NumberDictionary* dictionary = NumberDictionary::cast(elements());
// If the elements are sparse, we should not go back to fast case.
if (!HasDenseElements()) return false;
@@ -5848,12 +6024,12 @@
}
-Object* JSObject::GetPropertyWithInterceptorProper(
+Object* JSObject::GetPropertyWithInterceptor(
JSObject* receiver,
String* name,
PropertyAttributes* attributes) {
+ InterceptorInfo* interceptor = GetNamedInterceptor();
HandleScope scope;
- Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
Handle<JSObject> receiver_handle(receiver);
Handle<JSObject> holder_handle(this);
Handle<String> name_handle(name);
@@ -5872,85 +6048,14 @@
VMState state(EXTERNAL);
result = getter(v8::Utils::ToLocal(name_handle), info);
}
- if (!Top::has_scheduled_exception() && !result.IsEmpty()) {
+ RETURN_IF_SCHEDULED_EXCEPTION();
+ if (!result.IsEmpty()) {
*attributes = NONE;
return *v8::Utils::OpenHandle(*result);
}
}
- *attributes = ABSENT;
- return Heap::undefined_value();
-}
-
-
-Object* JSObject::GetInterceptorPropertyWithLookupHint(
- JSObject* receiver,
- Smi* lookup_hint,
- String* name,
- PropertyAttributes* attributes) {
- HandleScope scope;
- Handle<JSObject> receiver_handle(receiver);
- Handle<JSObject> holder_handle(this);
- Handle<String> name_handle(name);
-
- Object* result = GetPropertyWithInterceptorProper(receiver,
- name,
- attributes);
- if (*attributes != ABSENT) {
- return result;
- }
- RETURN_IF_SCHEDULED_EXCEPTION();
-
- int property_index = lookup_hint->value();
- if (property_index >= 0) {
- result = holder_handle->FastPropertyAt(property_index);
- } else {
- switch (property_index) {
- case kLookupInPrototype: {
- Object* pt = holder_handle->GetPrototype();
- *attributes = ABSENT;
- if (pt == Heap::null_value()) return Heap::undefined_value();
- result = pt->GetPropertyWithReceiver(
- *receiver_handle,
- *name_handle,
- attributes);
- RETURN_IF_SCHEDULED_EXCEPTION();
- }
- break;
-
- case kLookupInHolder:
- result = holder_handle->GetPropertyPostInterceptor(
- *receiver_handle,
- *name_handle,
- attributes);
- RETURN_IF_SCHEDULED_EXCEPTION();
- break;
-
- default:
- UNREACHABLE();
- }
- }
-
- return result;
-}
-
-
-Object* JSObject::GetPropertyWithInterceptor(
- JSObject* receiver,
- String* name,
- PropertyAttributes* attributes) {
- HandleScope scope;
- Handle<JSObject> receiver_handle(receiver);
- Handle<JSObject> holder_handle(this);
- Handle<String> name_handle(name);
-
- Object* result = GetPropertyWithInterceptorProper(receiver, name, attributes);
- if (*attributes != ABSENT) {
- return result;
- }
- RETURN_IF_SCHEDULED_EXCEPTION();
-
- result = holder_handle->GetPropertyPostInterceptor(
+ Object* result = holder_handle->GetPropertyPostInterceptor(
*receiver_handle,
*name_handle,
attributes);
@@ -6001,16 +6106,30 @@
// Handle [] on String objects.
if (this->IsStringObjectWithCharacterAt(index)) return true;
- if (HasFastElements()) {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>(
- Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- return (index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole();
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
+ static_cast<uint32_t>(
+ Smi::cast(JSArray::cast(this)->length())->value()) :
+ static_cast<uint32_t>(FixedArray::cast(elements())->length());
+ return (index < length) &&
+ !FixedArray::cast(elements())->get(index)->IsTheHole();
+ }
+ case PIXEL_ELEMENTS: {
+ PixelArray* pixels = PixelArray::cast(elements());
+ return index < static_cast<uint32_t>(pixels->length());
+ }
+ case DICTIONARY_ELEMENTS: {
+ return element_dictionary()->FindEntry(index)
+ != NumberDictionary::kNotFound;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
- return element_dictionary()->FindEntry(index)
- != NumberDictionary::kNotFound;
+ // All possibilities have been handled above already.
+ UNREACHABLE();
+ return Heap::null_value();
}
@@ -6193,24 +6312,43 @@
int JSObject::GetLocalElementKeys(FixedArray* storage,
PropertyAttributes filter) {
int counter = 0;
- if (HasFastElements()) {
- int length = IsJSArray()
- ? Smi::cast(JSArray::cast(this)->length())->value()
- : FixedArray::cast(elements())->length();
- for (int i = 0; i < length; i++) {
- if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
- if (storage) {
- storage->set(counter, Smi::FromInt(i), SKIP_WRITE_BARRIER);
+ switch (GetElementsKind()) {
+ case FAST_ELEMENTS: {
+ int length = IsJSArray() ?
+ Smi::cast(JSArray::cast(this)->length())->value() :
+ FixedArray::cast(elements())->length();
+ for (int i = 0; i < length; i++) {
+ if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
+ if (storage != NULL) {
+ storage->set(counter, Smi::FromInt(i), SKIP_WRITE_BARRIER);
+ }
+ counter++;
+ }
+ }
+ ASSERT(!storage || storage->length() >= counter);
+ break;
+ }
+ case PIXEL_ELEMENTS: {
+ int length = PixelArray::cast(elements())->length();
+ while (counter < length) {
+ if (storage != NULL) {
+ storage->set(counter, Smi::FromInt(counter), SKIP_WRITE_BARRIER);
}
counter++;
}
+ ASSERT(!storage || storage->length() >= counter);
+ break;
}
- ASSERT(!storage || storage->length() >= counter);
- } else {
- if (storage) {
- element_dictionary()->CopyKeysTo(storage, filter);
+ case DICTIONARY_ELEMENTS: {
+ if (storage != NULL) {
+ element_dictionary()->CopyKeysTo(storage, filter);
+ }
+ counter = element_dictionary()->NumberOfElementsFilterAttributes(filter);
+ break;
}
- counter = element_dictionary()->NumberOfElementsFilterAttributes(filter);
+ default:
+ UNREACHABLE();
+ break;
}
if (this->IsJSValue()) {
@@ -6669,7 +6807,7 @@
// Collates undefined and unexisting elements below limit from position
// zero of the elements. The object stays in Dictionary mode.
Object* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
- ASSERT(!HasFastElements());
+ ASSERT(HasDictionaryElements());
// Must stay in dictionary mode, either because of requires_slow_elements,
// or because we are not going to sort (and therefore compact) all of the
// elements.
@@ -6743,7 +6881,9 @@
// If the object is in dictionary mode, it is converted to fast elements
// mode.
Object* JSObject::PrepareElementsForSort(uint32_t limit) {
- if (!HasFastElements()) {
+ ASSERT(!HasPixelElements());
+
+ if (HasDictionaryElements()) {
// Convert to fast elements containing only the existing properties.
// Ordering is irrelevant, since we are going to sort anyway.
NumberDictionary* dict = element_dictionary();
@@ -6768,7 +6908,7 @@
// Collect holes at the end, undefined before that and the rest at the
// start, and return the number of non-hole, non-undefined values.
- FixedArray* elements = this->elements();
+ FixedArray* elements = FixedArray::cast(this->elements());
uint32_t elements_length = static_cast<uint32_t>(elements->length());
if (limit > elements_length) {
limit = elements_length ;
@@ -6838,6 +6978,41 @@
}
+Object* PixelArray::SetValue(uint32_t index, Object* value) {
+ uint8_t clamped_value = 0;
+ if (index < static_cast<uint32_t>(length())) {
+ if (value->IsSmi()) {
+ int int_value = Smi::cast(value)->value();
+ if (int_value < 0) {
+ clamped_value = 0;
+ } else if (int_value > 255) {
+ clamped_value = 255;
+ } else {
+ clamped_value = static_cast<uint8_t>(int_value);
+ }
+ } else if (value->IsHeapNumber()) {
+ double double_value = HeapNumber::cast(value)->value();
+ if (!(double_value > 0)) {
+ // NaN and less than zero clamp to zero.
+ clamped_value = 0;
+ } else if (double_value > 255) {
+ // Greater than 255 clamp to 255.
+ clamped_value = 255;
+ } else {
+ // Other doubles are rounded to the nearest integer.
+ clamped_value = static_cast<uint8_t>(double_value + 0.5);
+ }
+ } else {
+ // Clamp undefined to zero (default). All other types have been
+ // converted to a number type further up in the call chain.
+ ASSERT(value->IsUndefined());
+ }
+ set(index, clamped_value);
+ }
+ return Smi::FromInt(clamped_value);
+}
+
+
Object* GlobalObject::GetPropertyCell(LookupResult* result) {
ASSERT(!HasFastProperties());
Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
diff --git a/src/objects.h b/src/objects.h
index 5e5eb6b..d367f81 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -52,6 +52,7 @@
// - JSValue
// - Array
// - ByteArray
+// - PixelArray
// - FixedArray
// - DescriptorArray
// - HashTable
@@ -95,7 +96,6 @@
// HeapObject: [32 bit direct pointer] (4 byte aligned) | 01
// Failure: [30 bit signed int] 11
-
// Ecma-262 3rd 8.6.1
enum PropertyAttributes {
NONE = v8::None,
@@ -270,6 +270,7 @@
V(ODDBALL_TYPE) \
V(PROXY_TYPE) \
V(BYTE_ARRAY_TYPE) \
+ V(PIXEL_ARRAY_TYPE) \
V(FILLER_TYPE) \
\
V(ACCESSOR_INFO_TYPE) \
@@ -659,6 +660,7 @@
JS_GLOBAL_PROPERTY_CELL_TYPE,
PROXY_TYPE,
BYTE_ARRAY_TYPE,
+ PIXEL_ARRAY_TYPE,
FILLER_TYPE,
SMI_TYPE,
@@ -760,6 +762,7 @@
inline bool IsNumber();
inline bool IsByteArray();
+ inline bool IsPixelArray();
inline bool IsFailure();
inline bool IsRetryAfterGC();
inline bool IsOutOfMemoryFailure();
@@ -1302,6 +1305,11 @@
class JSObject: public HeapObject {
public:
enum DeleteMode { NORMAL_DELETION, FORCE_DELETION };
+ enum ElementsKind {
+ FAST_ELEMENTS,
+ DICTIONARY_ELEMENTS,
+ PIXEL_ELEMENTS
+ };
// [properties]: Backing storage for properties.
// properties is a FixedArray in the fast case, and a Dictionary in the
@@ -1313,10 +1321,13 @@
// [elements]: The elements (properties with names that are integers).
// elements is a FixedArray in the fast case, and a Dictionary in the slow
- // case.
- DECL_ACCESSORS(elements, FixedArray) // Get and set fast elements.
+ // case or a PixelArray in a special case.
+ DECL_ACCESSORS(elements, Array) // Get and set fast elements.
inline void initialize_elements();
+ inline ElementsKind GetElementsKind();
inline bool HasFastElements();
+ inline bool HasDictionaryElements();
+ inline bool HasPixelElements();
inline NumberDictionary* element_dictionary(); // Gets slow elements.
// Collects elements starting at index 0.
@@ -1496,14 +1507,6 @@
Object* LookupCallbackSetterInPrototypes(uint32_t index);
void LookupCallback(String* name, LookupResult* result);
- inline Smi* InterceptorPropertyLookupHint(String* name);
- Object* GetInterceptorPropertyWithLookupHint(JSObject* receiver,
- Smi* lookup_hint,
- String* name,
- PropertyAttributes* attributes);
- static const int kLookupInHolder = -1;
- static const int kLookupInPrototype = -2;
-
// Returns the number of properties on this object filtering out properties
// with the specified attributes (ignoring interceptors).
int NumberOfLocalProperties(PropertyAttributes filter);
@@ -1581,8 +1584,11 @@
PropertyAttributes attributes);
// Convert the object to use the canonical dictionary
- // representation.
- Object* NormalizeProperties(PropertyNormalizationMode mode);
+ // representation. If the object is expected to have additional properties
+ // added this number can be indicated to have the backing store allocated to
+ // an initial capacity for holding these properties.
+ Object* NormalizeProperties(PropertyNormalizationMode mode,
+ int expected_additional_properties);
Object* NormalizeElements();
// Transform slow named properties to fast variants.
@@ -1695,12 +1701,6 @@
void LookupInDescriptor(String* name, LookupResult* result);
- // Attempts to get property with a named interceptor getter.
- // Sets |attributes| to ABSENT if interceptor didn't return anything
- Object* GetPropertyWithInterceptorProper(JSObject* receiver,
- String* name,
- PropertyAttributes* attributes);
-
DISALLOW_IMPLICIT_CONSTRUCTORS(JSObject);
};
@@ -2440,6 +2440,45 @@
};
+// A PixelArray represents a fixed-size byte array with special semantics
+// used for implementing the CanvasPixelArray object. Please see the
+// specification at:
+// http://www.whatwg.org/specs/web-apps/current-work/
+// multipage/the-canvas-element.html#canvaspixelarray
+// In particular, write access clamps the value written to 0 or 255 if the
+// value written is outside this range.
+class PixelArray: public Array {
+ public:
+ // [external_pointer]: The pointer to the external memory area backing this
+ // pixel array.
+ DECL_ACCESSORS(external_pointer, uint8_t) // Pointer to the data store.
+
+ // Setter and getter.
+ inline uint8_t get(int index);
+ inline void set(int index, uint8_t value);
+
+ // This accessor applies the correct conversion from Smi, HeapNumber and
+ // undefined and clamps the converted value between 0 and 255.
+ Object* SetValue(uint32_t index, Object* value);
+
+ // Casting.
+ static inline PixelArray* cast(Object* obj);
+
+#ifdef DEBUG
+ void PixelArrayPrint();
+ void PixelArrayVerify();
+#endif // DEBUG
+
+ // PixelArray headers are not quadword aligned.
+ static const int kExternalPointerOffset = Array::kAlignedSize;
+ static const int kHeaderSize = kExternalPointerOffset + kPointerSize;
+ static const int kAlignedSize = OBJECT_SIZE_ALIGN(kHeaderSize);
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(PixelArray);
+};
+
+
// Code describes objects with on-the-fly generated machine code.
class Code: public HeapObject {
public:
@@ -3137,6 +3176,9 @@
// function.
inline bool IsBoilerplate();
+ // Tells whether this function is builtin.
+ inline bool IsBuiltin();
+
// [literals]: Fixed array holding the materialized literals.
//
// If the function contains object, regexp or array literals, the
diff --git a/src/parser.cc b/src/parser.cc
index da2b286..348c12a 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -1059,7 +1059,7 @@
class Target BASE_EMBEDDED {
public:
- Target(Parser* parser, Node* node)
+ Target(Parser* parser, AstNode* node)
: parser_(parser), node_(node), previous_(parser_->target_stack_) {
parser_->target_stack_ = this;
}
@@ -1069,11 +1069,11 @@
}
Target* previous() { return previous_; }
- Node* node() { return node_; }
+ AstNode* node() { return node_; }
private:
Parser* parser_;
- Node* node_;
+ AstNode* node_;
Target* previous_;
};
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index bccf9e6..57c884f 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -223,62 +223,36 @@
}
-#ifdef ENABLE_LOGGING_AND_PROFILING
-static uintptr_t StringToULong(char* buffer) {
- return strtoul(buffer, NULL, 16); // NOLINT
-}
-#endif
-
-
void OS::LogSharedLibraryAddresses() {
#ifdef ENABLE_LOGGING_AND_PROFILING
- static const int MAP_LENGTH = 1024;
- int fd = open("/proc/self/maps", O_RDONLY);
- if (fd < 0) return;
+ FILE *fp;
+ fp = fopen("/proc/self/maps", "r");
+ if (fp == NULL) return;
while (true) {
- char addr_buffer[11];
- addr_buffer[0] = '0';
- addr_buffer[1] = 'x';
- addr_buffer[10] = 0;
- int result = read(fd, addr_buffer + 2, 8);
- if (result < 8) break;
- uintptr_t start = StringToULong(addr_buffer);
- result = read(fd, addr_buffer + 2, 1);
- if (result < 1) break;
- if (addr_buffer[2] != '-') break;
- result = read(fd, addr_buffer + 2, 8);
- if (result < 8) break;
- uintptr_t end = StringToULong(addr_buffer);
- char buffer[MAP_LENGTH];
- int bytes_read = -1;
- do {
- bytes_read++;
- if (bytes_read >= MAP_LENGTH - 1)
- break;
- result = read(fd, buffer + bytes_read, 1);
- if (result < 1) break;
- } while (buffer[bytes_read] != '\n');
- buffer[bytes_read] = 0;
- // Ignore mappings that are not executable.
- if (buffer[3] != 'x') continue;
- char* start_of_path = index(buffer, '/');
- // If there is no filename for this line then log it as an anonymous
- // mapping and use the address as its name.
- if (start_of_path == NULL) {
- // 40 is enough to print a 64 bit address range.
- ASSERT(sizeof(buffer) > 40);
- snprintf(buffer,
- sizeof(buffer),
- "%08" V8PRIxPTR "-%08" V8PRIxPTR,
- start,
- end);
- LOG(SharedLibraryEvent(buffer, start, end));
- } else {
- buffer[bytes_read] = 0;
- LOG(SharedLibraryEvent(start_of_path, start, end));
+ uintptr_t start, end;
+ char attr_r, attr_w, attr_x, attr_p;
+ if (fscanf(fp, "%" V8PRIxPTR "-%" V8PRIxPTR, &start, &end) != 2) break;
+ if (fscanf(fp, " %c%c%c%c", &attr_r, &attr_w, &attr_x, &attr_p) != 4) break;
+ int c;
+ if (attr_r == 'r' && attr_x == 'x') {
+ while (c = getc(fp), (c != EOF) && (c != '\n') && (c != '/'));
+ char lib_name[1024];
+ bool lib_has_name = false;
+ if (c == '/') {
+ ungetc(c, fp);
+ lib_has_name = fgets(lib_name, sizeof(lib_name), fp) != NULL;
+ }
+ if (lib_has_name && strlen(lib_name) > 0) {
+ lib_name[strlen(lib_name) - 1] = '\0';
+ } else {
+ snprintf(lib_name, sizeof(lib_name),
+ "%08" V8PRIxPTR "-%08" V8PRIxPTR, start, end);
+ }
+ LOG(SharedLibraryEvent(lib_name, start, end));
}
+ while (c = getc(fp), (c != EOF) && (c != '\n'));
}
- close(fd);
+ fclose(fp);
#endif
}
diff --git a/src/prettyprinter.cc b/src/prettyprinter.cc
index 79f1883..7a8af40 100644
--- a/src/prettyprinter.cc
+++ b/src/prettyprinter.cc
@@ -417,7 +417,7 @@
}
-const char* PrettyPrinter::Print(Node* node) {
+const char* PrettyPrinter::Print(AstNode* node) {
Init();
Visit(node);
return output_;
@@ -441,7 +441,7 @@
}
-void PrettyPrinter::PrintOut(Node* node) {
+void PrettyPrinter::PrintOut(AstNode* node) {
PrettyPrinter printer;
PrintF("%s", printer.Print(node));
}
@@ -700,7 +700,7 @@
}
-void AstPrinter::PrintIndentedVisit(const char* s, Node* node) {
+void AstPrinter::PrintIndentedVisit(const char* s, AstNode* node) {
IndentedScope indent(s);
Visit(node);
}
@@ -934,6 +934,9 @@
case ObjectLiteral::Property::COMPUTED:
prop_kind = "PROPERTY - COMPUTED";
break;
+ case ObjectLiteral::Property::MATERIALIZED_LITERAL:
+ prop_kind = "PROPERTY - MATERIALIZED_LITERAL";
+ break;
case ObjectLiteral::Property::PROTOTYPE:
prop_kind = "PROPERTY - PROTOTYPE";
break;
@@ -945,7 +948,6 @@
break;
default:
UNREACHABLE();
- break;
}
IndentedScope prop(prop_kind);
PrintIndentedVisit("KEY", node->properties()->at(i)->key());
diff --git a/src/prettyprinter.h b/src/prettyprinter.h
index bfce9b0..8a6d1fb 100644
--- a/src/prettyprinter.h
+++ b/src/prettyprinter.h
@@ -42,17 +42,17 @@
// The following routines print a node into a string.
// The result string is alive as long as the PrettyPrinter is alive.
- const char* Print(Node* node);
+ const char* Print(AstNode* node);
const char* PrintExpression(FunctionLiteral* program);
const char* PrintProgram(FunctionLiteral* program);
// Print a node to stdout.
- static void PrintOut(Node* node);
+ static void PrintOut(AstNode* node);
// Individual nodes
#define DEF_VISIT(type) \
virtual void Visit##type(type* node);
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
private:
@@ -87,12 +87,12 @@
// Individual nodes
#define DEF_VISIT(type) \
virtual void Visit##type(type* node);
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
private:
friend class IndentedScope;
void PrintIndented(const char* txt);
- void PrintIndentedVisit(const char* s, Node* node);
+ void PrintIndentedVisit(const char* s, AstNode* node);
void PrintStatements(ZoneList<Statement*>* statements);
void PrintDeclarations(ZoneList<Declaration*>* declarations);
diff --git a/src/rewriter.cc b/src/rewriter.cc
index 8a7267a..d6ea68e 100644
--- a/src/rewriter.cc
+++ b/src/rewriter.cc
@@ -59,7 +59,7 @@
// Node visitors.
#define DEF_VISIT(type) \
virtual void Visit##type(type* node);
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
DISALLOW_COPY_AND_ASSIGN(AstOptimizer);
@@ -557,7 +557,7 @@
// Node visitors.
#define DEF_VISIT(type) \
virtual void Visit##type(type* node);
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
};
diff --git a/src/runtime.cc b/src/runtime.cc
index 350d391..0b98167 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -155,33 +155,43 @@
}
// Deep copy local elements.
- if (copy->HasFastElements()) {
- FixedArray* elements = copy->elements();
- WriteBarrierMode mode = elements->GetWriteBarrierMode();
- for (int i = 0; i < elements->length(); i++) {
- Object* value = elements->get(i);
- if (value->IsJSObject()) {
- JSObject* jsObject = JSObject::cast(value);
- result = DeepCopyBoilerplate(jsObject);
- if (result->IsFailure()) return result;
- elements->set(i, result, mode);
- }
- }
- } else {
- NumberDictionary* element_dictionary = copy->element_dictionary();
- int capacity = element_dictionary->Capacity();
- for (int i = 0; i < capacity; i++) {
- Object* k = element_dictionary->KeyAt(i);
- if (element_dictionary->IsKey(k)) {
- Object* value = element_dictionary->ValueAt(i);
+ // Pixel elements cannot be created using an object literal.
+ ASSERT(!copy->HasPixelElements());
+ switch (copy->GetElementsKind()) {
+ case JSObject::FAST_ELEMENTS: {
+ FixedArray* elements = FixedArray::cast(copy->elements());
+ WriteBarrierMode mode = elements->GetWriteBarrierMode();
+ for (int i = 0; i < elements->length(); i++) {
+ Object* value = elements->get(i);
if (value->IsJSObject()) {
JSObject* jsObject = JSObject::cast(value);
result = DeepCopyBoilerplate(jsObject);
if (result->IsFailure()) return result;
- element_dictionary->ValueAtPut(i, result);
+ elements->set(i, result, mode);
}
}
+ break;
}
+ case JSObject::DICTIONARY_ELEMENTS: {
+ NumberDictionary* element_dictionary = copy->element_dictionary();
+ int capacity = element_dictionary->Capacity();
+ for (int i = 0; i < capacity; i++) {
+ Object* k = element_dictionary->KeyAt(i);
+ if (element_dictionary->IsKey(k)) {
+ Object* value = element_dictionary->ValueAt(i);
+ if (value->IsJSObject()) {
+ JSObject* jsObject = JSObject::cast(value);
+ result = DeepCopyBoilerplate(jsObject);
+ if (result->IsFailure()) return result;
+ element_dictionary->ValueAtPut(i, result);
+ }
+ }
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
return copy;
}
@@ -258,6 +268,7 @@
{ // Add the constant properties to the boilerplate.
int length = constant_properties->length();
OptimizedObjectForAddingMultipleProperties opt(boilerplate,
+ length / 2,
!is_result_from_cache);
for (int index = 0; index < length; index +=2) {
Handle<Object> key(constant_properties->get(index+0));
@@ -1637,7 +1648,7 @@
}
case SUBJECT_CAPTURE: {
int capture = part.data;
- FixedArray* match_info = last_match_info->elements();
+ FixedArray* match_info = FixedArray::cast(last_match_info->elements());
int from = RegExpImpl::GetCapture(match_info, capture * 2);
int to = RegExpImpl::GetCapture(match_info, capture * 2 + 1);
if (from >= 0 && to > from) {
@@ -1717,7 +1728,8 @@
int start, end;
{
AssertNoAllocation match_info_array_is_not_in_a_handle;
- FixedArray* match_info_array = last_match_info_handle->elements();
+ FixedArray* match_info_array =
+ FixedArray::cast(last_match_info_handle->elements());
ASSERT_EQ(capture_count * 2 + 2,
RegExpImpl::GetLastCaptureCount(match_info_array));
@@ -2345,7 +2357,7 @@
int end;
{
AssertNoAllocation no_alloc;
- FixedArray* elements = regexp_info->elements();
+ FixedArray* elements = FixedArray::cast(regexp_info->elements());
start = Smi::cast(elements->get(RegExpImpl::kFirstCapture))->value();
end = Smi::cast(elements->get(RegExpImpl::kFirstCapture + 1))->value();
}
@@ -3022,7 +3034,7 @@
Handle<Object> object = args.at<Object>(0);
if (object->IsJSObject()) {
Handle<JSObject> js_object = Handle<JSObject>::cast(object);
- js_object->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ js_object->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
}
return *object;
}
@@ -4885,7 +4897,7 @@
AssertNoAllocation no_allocation;
- FixedArray* output_array = output->elements();
+ FixedArray* output_array = FixedArray::cast(output->elements());
RUNTIME_ASSERT(output_array->length() >= DateParser::OUTPUT_SIZE);
bool result;
if (str->IsAsciiRepresentation()) {
@@ -5173,37 +5185,62 @@
ArrayConcatVisitor* visitor) {
uint32_t num_of_elements = 0;
- if (receiver->HasFastElements()) {
- Handle<FixedArray> elements(FixedArray::cast(receiver->elements()));
- uint32_t len = elements->length();
- if (range < len) len = range;
-
- for (uint32_t j = 0; j < len; j++) {
- Handle<Object> e(elements->get(j));
- if (!e->IsTheHole()) {
- num_of_elements++;
- if (visitor)
- visitor->visit(j, e);
+ switch (receiver->GetElementsKind()) {
+ case JSObject::FAST_ELEMENTS: {
+ Handle<FixedArray> elements(FixedArray::cast(receiver->elements()));
+ uint32_t len = elements->length();
+ if (range < len) {
+ len = range;
}
- }
- } else {
- Handle<NumberDictionary> dict(receiver->element_dictionary());
- uint32_t capacity = dict->Capacity();
- for (uint32_t j = 0; j < capacity; j++) {
- Handle<Object> k(dict->KeyAt(j));
- if (dict->IsKey(*k)) {
- ASSERT(k->IsNumber());
- uint32_t index = static_cast<uint32_t>(k->Number());
- if (index < range) {
+ for (uint32_t j = 0; j < len; j++) {
+ Handle<Object> e(elements->get(j));
+ if (!e->IsTheHole()) {
num_of_elements++;
if (visitor) {
- visitor->visit(index,
- Handle<Object>(dict->ValueAt(j)));
+ visitor->visit(j, e);
}
}
}
+ break;
}
+ case JSObject::PIXEL_ELEMENTS: {
+ Handle<PixelArray> pixels(PixelArray::cast(receiver->elements()));
+ uint32_t len = pixels->length();
+ if (range < len) {
+ len = range;
+ }
+
+ for (uint32_t j = 0; j < len; j++) {
+ num_of_elements++;
+ if (visitor != NULL) {
+ Handle<Smi> e(Smi::FromInt(pixels->get(j)));
+ visitor->visit(j, e);
+ }
+ }
+ break;
+ }
+ case JSObject::DICTIONARY_ELEMENTS: {
+ Handle<NumberDictionary> dict(receiver->element_dictionary());
+ uint32_t capacity = dict->Capacity();
+ for (uint32_t j = 0; j < capacity; j++) {
+ Handle<Object> k(dict->KeyAt(j));
+ if (dict->IsKey(*k)) {
+ ASSERT(k->IsNumber());
+ uint32_t index = static_cast<uint32_t>(k->Number());
+ if (index < range) {
+ num_of_elements++;
+ if (visitor) {
+ visitor->visit(index, Handle<Object>(dict->ValueAt(j)));
+ }
+ }
+ }
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
}
return num_of_elements;
@@ -7408,14 +7445,15 @@
// Not sure when this can happen but skip it just in case.
if (!raw_fun->IsJSFunction())
return false;
- if ((raw_fun == caller) && !(*seen_caller) && frame->IsConstructor()) {
+ if ((raw_fun == caller) && !(*seen_caller)) {
*seen_caller = true;
return false;
}
- // Skip the most obvious builtin calls. Some builtin calls (such as
- // Number.ADD which is invoked using 'call') are very difficult to
- // recognize so we're leaving them in for now.
- return !frame->receiver()->IsJSBuiltinsObject();
+ // Skip all frames until we've seen the caller. Also, skip the most
+ // obvious builtin calls. Some builtin calls (such as Number.ADD
+ // which is invoked using 'call') are very difficult to recognize
+ // so we're leaving them in for now.
+ return *seen_caller && !frame->receiver()->IsJSBuiltinsObject();
}
@@ -7424,7 +7462,7 @@
// code offset.
static Object* Runtime_CollectStackTrace(Arguments args) {
ASSERT_EQ(args.length(), 2);
- Object* caller = args[0];
+ Handle<Object> caller = args.at<Object>(0);
CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[1]);
HandleScope scope;
@@ -7433,12 +7471,14 @@
Handle<JSArray> result = Factory::NewJSArray(initial_size * 3);
StackFrameIterator iter;
- bool seen_caller = false;
+ // If the caller parameter is a function we skip frames until we're
+ // under it before starting to collect.
+ bool seen_caller = !caller->IsJSFunction();
int cursor = 0;
int frames_seen = 0;
while (!iter.done() && frames_seen < limit) {
StackFrame* raw_frame = iter.frame();
- if (ShowFrameInStackTrace(raw_frame, caller, &seen_caller)) {
+ if (ShowFrameInStackTrace(raw_frame, *caller, &seen_caller)) {
frames_seen++;
JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
Object* recv = frame->receiver();
@@ -7446,15 +7486,17 @@
Address pc = frame->pc();
Address start = frame->code()->address();
Smi* offset = Smi::FromInt(pc - start);
- FixedArray* elements = result->elements();
+ FixedArray* elements = FixedArray::cast(result->elements());
if (cursor + 2 < elements->length()) {
elements->set(cursor++, recv);
elements->set(cursor++, fun);
elements->set(cursor++, offset, SKIP_WRITE_BARRIER);
} else {
HandleScope scope;
- SetElement(result, cursor++, Handle<Object>(recv));
- SetElement(result, cursor++, Handle<Object>(fun));
+ Handle<Object> recv_handle(recv);
+ Handle<Object> fun_handle(fun);
+ SetElement(result, cursor++, recv_handle);
+ SetElement(result, cursor++, fun_handle);
SetElement(result, cursor++, Handle<Smi>(offset));
}
}
diff --git a/src/spaces.h b/src/spaces.h
index 94f7a91..9841a5f 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -931,34 +931,41 @@
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-// HistogramInfo class for recording a single "bar" of a histogram. This
-// class is used for collecting statistics to print to stdout (when compiled
-// with DEBUG) or to the log file (when compiled with
-// ENABLE_LOGGING_AND_PROFILING).
-class HistogramInfo BASE_EMBEDDED {
+class NumberAndSizeInfo BASE_EMBEDDED {
public:
- HistogramInfo() : number_(0), bytes_(0) {}
+ NumberAndSizeInfo() : number_(0), bytes_(0) {}
- const char* name() { return name_; }
- void set_name(const char* name) { name_ = name; }
-
- int number() { return number_; }
+ int number() const { return number_; }
void increment_number(int num) { number_ += num; }
- int bytes() { return bytes_; }
+ int bytes() const { return bytes_; }
void increment_bytes(int size) { bytes_ += size; }
- // Clear the number of objects and size fields, but not the name.
void clear() {
number_ = 0;
bytes_ = 0;
}
private:
- const char* name_;
int number_;
int bytes_;
};
+
+
+// HistogramInfo class for recording a single "bar" of a histogram. This
+// class is used for collecting statistics to print to stdout (when compiled
+// with DEBUG) or to the log file (when compiled with
+// ENABLE_LOGGING_AND_PROFILING).
+class HistogramInfo: public NumberAndSizeInfo {
+ public:
+ HistogramInfo() : NumberAndSizeInfo() {}
+
+ const char* name() { return name_; }
+ void set_name(const char* name) { name_ = name; }
+
+ private:
+ const char* name_;
+};
#endif
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 7ca2677..b25f5b4 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -736,22 +736,22 @@
Object* LoadCallbackProperty(Arguments args) {
Handle<JSObject> recv = args.at<JSObject>(0);
- AccessorInfo* callback = AccessorInfo::cast(args[1]);
+ Handle<JSObject> holder = args.at<JSObject>(1);
+ AccessorInfo* callback = AccessorInfo::cast(args[2]);
+ Handle<Object> data = args.at<Object>(3);
Address getter_address = v8::ToCData<Address>(callback->getter());
v8::AccessorGetter fun = FUNCTION_CAST<v8::AccessorGetter>(getter_address);
ASSERT(fun != NULL);
- Handle<String> name = args.at<String>(2);
- Handle<JSObject> holder = args.at<JSObject>(3);
- HandleScope scope;
- Handle<Object> data(callback->data());
- LOG(ApiNamedPropertyAccess("load", *recv, *name));
+ Handle<String> name = args.at<String>(4);
// NOTE: If we can align the structure of an AccessorInfo with the
// locations of the arguments to this function maybe we don't have
// to explicitly create the structure but can just pass a pointer
// into the stack.
+ LOG(ApiNamedPropertyAccess("load", *recv, *name));
v8::AccessorInfo info(v8::Utils::ToLocal(recv),
v8::Utils::ToLocal(data),
v8::Utils::ToLocal(holder));
+ HandleScope scope;
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
@@ -787,49 +787,129 @@
return *value;
}
+/**
+ * Attempts to load a property with an interceptor (which must be present),
+ * but doesn't search the prototype chain.
+ *
+ * Returns |Heap::no_interceptor_result_sentinel()| if interceptor doesn't
+ * provide any value for the given name.
+ */
+Object* LoadPropertyWithInterceptorOnly(Arguments args) {
+ Handle<JSObject> receiver_handle = args.at<JSObject>(0);
+ Handle<JSObject> holder_handle = args.at<JSObject>(1);
+ Handle<String> name_handle = args.at<String>(2);
+ Handle<InterceptorInfo> interceptor_info = args.at<InterceptorInfo>(3);
+ Handle<Object> data_handle = args.at<Object>(4);
-Object* LoadInterceptorProperty(Arguments args) {
- JSObject* recv = JSObject::cast(args[0]);
- JSObject* holder = JSObject::cast(args[1]);
- String* name = String::cast(args[2]);
- Smi* lookup_hint = Smi::cast(args[3]);
- ASSERT(holder->HasNamedInterceptor());
- PropertyAttributes attr = NONE;
+ Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
+ v8::NamedPropertyGetter getter =
+ FUNCTION_CAST<v8::NamedPropertyGetter>(getter_address);
+ ASSERT(getter != NULL);
- Object* result = holder->GetInterceptorPropertyWithLookupHint(
- recv, lookup_hint, name, &attr);
- if (result->IsFailure()) return result;
+ {
+ // Use the interceptor getter.
+ v8::AccessorInfo info(v8::Utils::ToLocal(receiver_handle),
+ v8::Utils::ToLocal(data_handle),
+ v8::Utils::ToLocal(holder_handle));
+ HandleScope scope;
+ v8::Handle<v8::Value> r;
+ {
+ // Leaving JavaScript.
+ VMState state(EXTERNAL);
+ r = getter(v8::Utils::ToLocal(name_handle), info);
+ }
+ RETURN_IF_SCHEDULED_EXCEPTION();
+ if (!r.IsEmpty()) {
+ return *v8::Utils::OpenHandle(*r);
+ }
+ }
- // If the property is present, return it.
- if (attr != ABSENT) return result;
+ return Heap::no_interceptor_result_sentinel();
+}
- // If the top frame is an internal frame, this is really a call
- // IC. In this case, we simply return the undefined result which
- // will lead to an exception when trying to invoke the result as a
- // function.
- StackFrameIterator it;
- it.Advance(); // skip exit frame
- if (it.frame()->is_internal()) return result;
+static Object* ThrowReferenceError(String* name) {
// If the load is non-contextual, just return the undefined result.
// Note that both keyed and non-keyed loads may end up here, so we
// can't use either LoadIC or KeyedLoadIC constructors.
IC ic(IC::NO_EXTRA_FRAME);
ASSERT(ic.target()->is_load_stub() || ic.target()->is_keyed_load_stub());
- if (!ic.is_contextual()) return result;
+ if (!ic.is_contextual()) return Heap::undefined_value();
// Throw a reference error.
+ HandleScope scope;
+ Handle<String> name_handle(name);
+ Handle<Object> error =
+ Factory::NewReferenceError("not_defined",
+ HandleVector(&name_handle, 1));
+ return Top::Throw(*error);
+}
+
+
+static Object* LoadWithInterceptor(Arguments* args,
+ PropertyAttributes* attrs) {
+ Handle<JSObject> receiver_handle = args->at<JSObject>(0);
+ Handle<JSObject> holder_handle = args->at<JSObject>(1);
+ Handle<String> name_handle = args->at<String>(2);
+ Handle<InterceptorInfo> interceptor_info = args->at<InterceptorInfo>(3);
+ Handle<Object> data_handle = args->at<Object>(4);
+
+ Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
+ v8::NamedPropertyGetter getter =
+ FUNCTION_CAST<v8::NamedPropertyGetter>(getter_address);
+ ASSERT(getter != NULL);
+
{
+ // Use the interceptor getter.
+ v8::AccessorInfo info(v8::Utils::ToLocal(receiver_handle),
+ v8::Utils::ToLocal(data_handle),
+ v8::Utils::ToLocal(holder_handle));
HandleScope scope;
- // We cannot use the raw name pointer here since getting the
- // property might cause a GC. However, we can get the name from
- // the stack using the arguments object.
- Handle<String> name_handle = args.at<String>(2);
- Handle<Object> error =
- Factory::NewReferenceError("not_defined",
- HandleVector(&name_handle, 1));
- return Top::Throw(*error);
+ v8::Handle<v8::Value> r;
+ {
+ // Leaving JavaScript.
+ VMState state(EXTERNAL);
+ r = getter(v8::Utils::ToLocal(name_handle), info);
+ }
+ RETURN_IF_SCHEDULED_EXCEPTION();
+ if (!r.IsEmpty()) {
+ *attrs = NONE;
+ return *v8::Utils::OpenHandle(*r);
+ }
}
+
+ Object* result = holder_handle->GetPropertyPostInterceptor(
+ *receiver_handle,
+ *name_handle,
+ attrs);
+ RETURN_IF_SCHEDULED_EXCEPTION();
+ return result;
+}
+
+
+/**
+ * Loads a property with an interceptor performing post interceptor
+ * lookup if interceptor failed.
+ */
+Object* LoadPropertyWithInterceptorForLoad(Arguments args) {
+ PropertyAttributes attr = NONE;
+ Object* result = LoadWithInterceptor(&args, &attr);
+ if (result->IsFailure()) return result;
+
+ // If the property is present, return it.
+ if (attr != ABSENT) return result;
+ return ThrowReferenceError(String::cast(args[2]));
+}
+
+
+Object* LoadPropertyWithInterceptorForCall(Arguments args) {
+ PropertyAttributes attr;
+ Object* result = LoadWithInterceptor(&args, &attr);
+ RETURN_IF_SCHEDULED_EXCEPTION();
+ // This is call IC. In this case, we simply return the undefined result which
+ // will lead to an exception when trying to invoke the result as a
+ // function.
+ return result;
}
diff --git a/src/stub-cache.h b/src/stub-cache.h
index c6b002b..3b3caad 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -307,7 +307,9 @@
// Support functions for IC stubs for interceptors.
-Object* LoadInterceptorProperty(Arguments args);
+Object* LoadPropertyWithInterceptorOnly(Arguments args);
+Object* LoadPropertyWithInterceptorForLoad(Arguments args);
+Object* LoadPropertyWithInterceptorForCall(Arguments args);
Object* StoreInterceptorProperty(Arguments args);
Object* CallInterceptorProperty(Arguments args);
@@ -377,13 +379,6 @@
Label* miss_label);
static void GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind);
- protected:
- Object* GetCodeWithFlags(Code::Flags flags, const char* name);
- Object* GetCodeWithFlags(Code::Flags flags, String* name);
-
- MacroAssembler* masm() { return &masm_; }
- void set_failure(Failure* failure) { failure_ = failure; }
-
// Check the integrity of the prototype chain to make sure that the
// current IC is still valid.
Register CheckPrototypes(JSObject* object,
@@ -394,6 +389,13 @@
String* name,
Label* miss);
+ protected:
+ Object* GetCodeWithFlags(Code::Flags flags, const char* name);
+ Object* GetCodeWithFlags(Code::Flags flags, String* name);
+
+ MacroAssembler* masm() { return &masm_; }
+ void set_failure(Failure* failure) { failure_ = failure; }
+
void GenerateLoadField(JSObject* object,
JSObject* holder,
Register receiver,
@@ -424,7 +426,7 @@
void GenerateLoadInterceptor(JSObject* object,
JSObject* holder,
- Smi* lookup_hint,
+ LookupResult* lookup,
Register receiver,
Register name_reg,
Register scratch1,
diff --git a/src/usage-analyzer.cc b/src/usage-analyzer.cc
index 36464fa..5514f40 100644
--- a/src/usage-analyzer.cc
+++ b/src/usage-analyzer.cc
@@ -42,7 +42,7 @@
class UsageComputer: public AstVisitor {
public:
- static bool Traverse(Node* node);
+ static bool Traverse(AstNode* node);
void VisitBlock(Block* node);
void VisitDeclaration(Declaration* node);
@@ -116,7 +116,7 @@
// ----------------------------------------------------------------------------
// Implementation of UsageComputer
-bool UsageComputer::Traverse(Node* node) {
+bool UsageComputer::Traverse(AstNode* node) {
UsageComputer uc(InitialWeight, false);
uc.Visit(node);
return !uc.HasStackOverflow();
diff --git a/src/version.cc b/src/version.cc
index d281151..8fcbf8e 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 1
#define MINOR_VERSION 3
-#define BUILD_NUMBER 0
-#define PATCH_LEVEL 1
+#define BUILD_NUMBER 1
+#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false
// Define SONAME to have the SCons build the put a specific SONAME into the
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index f511145..5e70f9e 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -687,6 +687,13 @@
}
+void Assembler::cdq() {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ emit(0x99);
+}
+
+
void Assembler::cmovq(Condition cc, Register dst, Register src) {
// No need to check CpuInfo for CMOV support, it's a required part of the
// 64-bit architecture.
@@ -807,7 +814,7 @@
}
-void Assembler::idiv(Register src) {
+void Assembler::idivq(Register src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit_rex_64(src);
@@ -816,6 +823,15 @@
}
+void Assembler::idivl(Register src) {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ emit_optional_rex_32(src);
+ emit(0xF7);
+ emit_modrm(0x7, src);
+}
+
+
void Assembler::imul(Register src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
@@ -1124,6 +1140,9 @@
void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) {
+ // This method must not be used with heap object references. The stored
+ // address is not GC safe. Use the handle version instead.
+ ASSERT(rmode > RelocInfo::LAST_GCED_ENUM);
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit_rex_64(dst);
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 4b1eb7a..ad4721d 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -292,6 +292,7 @@
times_4 = 2,
times_8 = 3,
times_int_size = times_4,
+ times_half_pointer_size = times_4,
times_pointer_size = times_8
};
@@ -632,9 +633,13 @@
// Sign-extends rax into rdx:rax.
void cqo();
+ // Sign-extends eax into edx:eax.
+ void cdq();
// Divide rdx:rax by src. Quotient in rax, remainder in rdx.
- void idiv(Register src);
+ void idivq(Register src);
+ // Divide edx:eax by lower 32 bits of src. Quotient in eax, rem. in edx.
+ void idivl(Register src);
// Signed multiply instructions.
void imul(Register src); // rdx:rax = rax * src.
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index 66e4d39..3112ecc 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -389,6 +389,112 @@
#endif
+class DeferredReferenceGetKeyedValue: public DeferredCode {
+ public:
+ explicit DeferredReferenceGetKeyedValue(Register dst,
+ Register receiver,
+ Register key,
+ bool is_global)
+ : dst_(dst), receiver_(receiver), key_(key), is_global_(is_global) {
+ set_comment("[ DeferredReferenceGetKeyedValue");
+ }
+
+ virtual void Generate();
+
+ Label* patch_site() { return &patch_site_; }
+
+ private:
+ Label patch_site_;
+ Register dst_;
+ Register receiver_;
+ Register key_;
+ bool is_global_;
+};
+
+
+void DeferredReferenceGetKeyedValue::Generate() {
+ __ push(receiver_); // First IC argument.
+ __ push(key_); // Second IC argument.
+
+ // Calculate the delta from the IC call instruction to the map check
+ // movq instruction in the inlined version. This delta is stored in
+ // a test(rax, delta) instruction after the call so that we can find
+ // it in the IC initialization code and patch the movq instruction.
+ // This means that we cannot allow test instructions after calls to
+ // KeyedLoadIC stubs in other places.
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ RelocInfo::Mode mode = is_global_
+ ? RelocInfo::CODE_TARGET_CONTEXT
+ : RelocInfo::CODE_TARGET;
+ __ Call(ic, mode);
+ // The delta from the start of the map-compare instruction to the
+ // test instruction. We use masm_-> directly here instead of the __
+ // macro because the macro sometimes uses macro expansion to turn
+ // into something that can't return a value. This is encountered
+ // when doing generated code coverage tests.
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
+ // Here we use masm_-> instead of the __ macro because this is the
+ // instruction that gets patched and coverage code gets in the way.
+ // TODO(X64): Consider whether it's worth switching the test to a
+ // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
+ // be generated normally.
+ masm_->testl(rax, Immediate(-delta_to_patch_site));
+ __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
+
+ if (!dst_.is(rax)) __ movq(dst_, rax);
+ __ pop(key_);
+ __ pop(receiver_);
+}
+
+
+class DeferredReferenceSetKeyedValue: public DeferredCode {
+ public:
+ DeferredReferenceSetKeyedValue(Register value,
+ Register key,
+ Register receiver)
+ : value_(value), key_(key), receiver_(receiver) {
+ set_comment("[ DeferredReferenceSetKeyedValue");
+ }
+
+ virtual void Generate();
+
+ Label* patch_site() { return &patch_site_; }
+
+ private:
+ Register value_;
+ Register key_;
+ Register receiver_;
+ Label patch_site_;
+};
+
+
+void DeferredReferenceSetKeyedValue::Generate() {
+ __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
+ // Push receiver and key arguments on the stack.
+ __ push(receiver_);
+ __ push(key_);
+ // Move value argument to eax as expected by the IC stub.
+ if (!value_.is(rax)) __ movq(rax, value_);
+ // Call the IC stub.
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ __ Call(ic, RelocInfo::CODE_TARGET);
+ // The delta from the start of the map-compare instructions (initial movq)
+ // to the test instruction. We use masm_-> directly here instead of the
+ // __ macro because the macro sometimes uses macro expansion to turn
+ // into something that can't return a value. This is encountered
+ // when doing generated code coverage tests.
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
+ // Here we use masm_-> instead of the __ macro because this is the
+ // instruction that gets patched and coverage code gets in the way.
+ masm_->testl(rax, Immediate(-delta_to_patch_site));
+ // Restore value (returned from store IC), key and receiver
+ // registers.
+ if (!value_.is(rax)) __ movq(value_, rax);
+ __ pop(key_);
+ __ pop(receiver_);
+}
+
+
class DeferredStackCheck: public DeferredCode {
public:
DeferredStackCheck() {
@@ -2193,9 +2299,8 @@
// The receiver is the argument to the runtime call. It is the
// first value pushed when the reference was loaded to the
// frame.
- // TODO(X64): Enable this and the switch back to fast, once they work.
- // frame_->PushElementAt(target.size() - 1);
- // Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
+ frame_->PushElementAt(target.size() - 1);
+ Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
}
if (node->op() == Token::ASSIGN ||
node->op() == Token::INIT_VAR ||
@@ -2203,20 +2308,18 @@
Load(node->value());
} else {
- // Literal* literal = node->value()->AsLiteral();
+ Literal* literal = node->value()->AsLiteral();
bool overwrite_value =
(node->value()->AsBinaryOperation() != NULL &&
node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
- // Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
+ Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
// There are two cases where the target is not read in the right hand
// side, that are easy to test for: the right hand side is a literal,
// or the right hand side is a different variable. TakeValue invalidates
// the target, with an implicit promise that it will be written to again
// before it is read.
- // TODO(X64): Implement TakeValue optimization. Check issue 150016.
- if (false) {
- // if (literal != NULL || (right_var != NULL && right_var != var)) {
- // target.TakeValue(NOT_INSIDE_TYPEOF);
+ if (literal != NULL || (right_var != NULL && right_var != var)) {
+ target.TakeValue(NOT_INSIDE_TYPEOF);
} else {
target.GetValue(NOT_INSIDE_TYPEOF);
}
@@ -2247,9 +2350,8 @@
// argument to the runtime call is the receiver, which is the
// first value pushed as part of the reference, which is below
// the lhs value.
- // TODO(X64): Enable this once ToFastProperties works.
- // frame_->PushElementAt(target.size());
- // Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
+ frame_->PushElementAt(target.size());
+ Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
}
}
}
@@ -3645,7 +3747,7 @@
// Smi => false iff zero.
ASSERT(kSmiTag == 0);
- __ testq(value.reg(), value.reg());
+ __ testl(value.reg(), value.reg());
dest->false_target()->Branch(zero);
__ testl(value.reg(), Immediate(kSmiTagMask));
dest->true_target()->Branch(zero);
@@ -3728,7 +3830,7 @@
} else {
// Anything else is a runtime error.
Load(e);
- // frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
+ frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
}
in_spilled_code_ = was_in_spilled_code;
@@ -4130,7 +4232,7 @@
// A test rax instruction following the call signals that the inobject
// property case was inlined. Ensure that there is not a test eax
// instruction here.
- __ nop();
+ masm_->nop();
// Discard the global object. The result is in answer.
frame_->Drop();
return answer;
@@ -4700,7 +4802,7 @@
int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
// Here we use masm_-> instead of the __ macro because this is the
// instruction that gets patched and coverage code gets in the way.
- masm_->testq(rax, Immediate(-delta_to_patch_site));
+ masm_->testl(rax, Immediate(-delta_to_patch_site));
__ IncrementCounter(&Counters::named_load_inline_miss, 1);
if (!dst_.is(rax)) __ movq(dst_, rax);
@@ -4851,10 +4953,8 @@
Label add_success;
__ j(no_overflow, &add_success);
__ subl(operand->reg(), Immediate(smi_value));
- __ movsxlq(operand->reg(), operand->reg());
deferred->Jump();
__ bind(&add_success);
- __ movsxlq(operand->reg(), operand->reg());
deferred->BindExit();
frame_->Push(operand);
break;
@@ -4965,35 +5065,36 @@
}
deferred->Branch(not_zero);
- if (!left_is_in_rax) __ movq(rax, left->reg());
- // Sign extend rax into rdx:rax.
- __ cqo();
+ // All operations on the smi values are on 32-bit registers, which are
+ // zero-extended into 64-bits by all 32-bit operations.
+ if (!left_is_in_rax) __ movl(rax, left->reg());
+ // Sign extend eax into edx:eax.
+ __ cdq();
// Check for 0 divisor.
- __ testq(right->reg(), right->reg());
+ __ testl(right->reg(), right->reg());
deferred->Branch(zero);
// Divide rdx:rax by the right operand.
- __ idiv(right->reg());
+ __ idivl(right->reg());
// Complete the operation.
if (op == Token::DIV) {
- // Check for negative zero result. If result is zero, and divisor
- // is negative, return a floating point negative zero. The
- // virtual frame is unchanged in this block, so local control flow
- // can use a Label rather than a JumpTarget.
+ // Check for negative zero result. If the result is zero, and the
+ // divisor is negative, return a floating point negative zero.
Label non_zero_result;
- __ testq(left->reg(), left->reg());
+ __ testl(left->reg(), left->reg());
__ j(not_zero, &non_zero_result);
- __ testq(right->reg(), right->reg());
+ __ testl(right->reg(), right->reg());
deferred->Branch(negative);
+ // The frame is identical on all paths reaching this label.
__ bind(&non_zero_result);
// Check for the corner case of dividing the most negative smi by
// -1. We cannot use the overflow flag, since it is not set by
// idiv instruction.
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
- __ cmpq(rax, Immediate(0x40000000));
+ __ cmpl(rax, Immediate(0x40000000));
deferred->Branch(equal);
// Check that the remainder is zero.
- __ testq(rdx, rdx);
+ __ testl(rdx, rdx);
deferred->Branch(not_zero);
// Tag the result and store it in the quotient register.
ASSERT(kSmiTagSize == times_2); // adjust code if not the case
@@ -5004,15 +5105,14 @@
frame_->Push("ient);
} else {
ASSERT(op == Token::MOD);
- // Check for a negative zero result. If the result is zero, and
- // the dividend is negative, return a floating point negative
- // zero. The frame is unchanged in this block, so local control
- // flow can use a Label rather than a JumpTarget.
+ // Check for a negative zero result. If the result is zero, and the
+ // dividend is negative, return a floating point negative zero.
Label non_zero_result;
- __ testq(rdx, rdx);
+ __ testl(rdx, rdx);
__ j(not_zero, &non_zero_result);
- __ testq(left->reg(), left->reg());
+ __ testl(left->reg(), left->reg());
deferred->Branch(negative);
+ // The frame is identical on all paths reaching this label.
__ bind(&non_zero_result);
deferred->BindExit();
left->Unuse();
@@ -5056,9 +5156,9 @@
deferred->Branch(not_zero);
// Untag both operands.
- __ movq(answer.reg(), left->reg());
- __ sar(answer.reg(), Immediate(kSmiTagSize));
- __ sar(rcx, Immediate(kSmiTagSize));
+ __ movl(answer.reg(), left->reg());
+ __ sarl(answer.reg(), Immediate(kSmiTagSize));
+ __ sarl(rcx, Immediate(kSmiTagSize));
// Perform the operation.
switch (op) {
case Token::SAR:
@@ -5154,7 +5254,7 @@
ASSERT(kSmiTag == 0); // Adjust code below if not the case.
// Remove smi tag from the left operand (but keep sign).
// Left-hand operand has been copied into answer.
- __ sar(answer.reg(), Immediate(kSmiTagSize));
+ __ sarl(answer.reg(), Immediate(kSmiTagSize));
// Do multiplication of smis, leaving result in answer.
__ imull(answer.reg(), right->reg());
// Go slow on overflows.
@@ -5164,7 +5264,7 @@
// in this block, so local control flow can use a Label rather
// than a JumpTarget.
Label non_zero_result;
- __ testq(answer.reg(), answer.reg());
+ __ testl(answer.reg(), answer.reg());
__ j(not_zero, &non_zero_result);
__ movq(answer.reg(), left->reg());
__ or_(answer.reg(), right->reg());
@@ -5183,6 +5283,7 @@
break;
case Token::BIT_XOR:
+ ASSERT(kSmiTag == 0); // Adjust code below if not the case.
__ xor_(answer.reg(), right->reg());
break;
@@ -5288,7 +5389,8 @@
kScratchRegister);
// This branch is always a forwards branch so it's always a fixed
// size which allows the assert below to succeed and patching to work.
- deferred->Branch(not_equal);
+ // Don't use deferred->Branch(...), since that might add coverage code.
+ masm->j(not_equal, deferred->entry_label());
// The delta from the patch label to the load offset must be
// statically known.
@@ -5315,26 +5417,118 @@
Variable* var = expression_->AsVariableProxy()->AsVariable();
bool is_global = var != NULL;
ASSERT(!is_global || var->is_global());
+
// Inline array load code if inside of a loop. We do not know
// the receiver map yet, so we initially generate the code with
// a check against an invalid map. In the inline cache code, we
// patch the map check if appropriate.
+ if (cgen_->loop_nesting() > 0) {
+ Comment cmnt(masm, "[ Inlined load from keyed Property");
- // TODO(x64): Implement inlined loads for keyed properties.
- // Make sure to load length field as a 32-bit quantity.
- // Comment cmnt(masm, "[ Load from keyed Property");
+ Result key = cgen_->frame()->Pop();
+ Result receiver = cgen_->frame()->Pop();
+ key.ToRegister();
+ receiver.ToRegister();
- RelocInfo::Mode mode = is_global
- ? RelocInfo::CODE_TARGET_CONTEXT
- : RelocInfo::CODE_TARGET;
- Result answer = cgen_->frame()->CallKeyedLoadIC(mode);
- // Make sure that we do not have a test instruction after the
- // call. A test instruction after the call is used to
- // indicate that we have generated an inline version of the
- // keyed load. The explicit nop instruction is here because
- // the push that follows might be peep-hole optimized away.
- __ nop();
- cgen_->frame()->Push(&answer);
+ // Use a fresh temporary to load the elements without destroying
+ // the receiver which is needed for the deferred slow case.
+ Result elements = cgen_->allocator()->Allocate();
+ ASSERT(elements.is_valid());
+
+ // Use a fresh temporary for the index and later the loaded
+ // value.
+ Result index = cgen_->allocator()->Allocate();
+ ASSERT(index.is_valid());
+
+ DeferredReferenceGetKeyedValue* deferred =
+ new DeferredReferenceGetKeyedValue(index.reg(),
+ receiver.reg(),
+ key.reg(),
+ is_global);
+
+ // Check that the receiver is not a smi (only needed if this
+ // is not a load from the global context) and that it has the
+ // expected map.
+ if (!is_global) {
+ __ testl(receiver.reg(), Immediate(kSmiTagMask));
+ deferred->Branch(zero);
+ }
+
+ // Initially, use an invalid map. The map is patched in the IC
+ // initialization code.
+ __ bind(deferred->patch_site());
+ // Use masm-> here instead of the double underscore macro since extra
+ // coverage code can interfere with the patching.
+ masm->movq(kScratchRegister, Factory::null_value(),
+ RelocInfo::EMBEDDED_OBJECT);
+ masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
+ kScratchRegister);
+ deferred->Branch(not_equal);
+
+ // Check that the key is a non-negative smi.
+ __ testl(key.reg(),
+ Immediate(static_cast<int32_t>(kSmiTagMask | 0x80000000u)));
+ deferred->Branch(not_zero);
+
+ // Get the elements array from the receiver and check that it
+ // is not a dictionary.
+ __ movq(elements.reg(),
+ FieldOperand(receiver.reg(), JSObject::kElementsOffset));
+ __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
+ Factory::fixed_array_map());
+ deferred->Branch(not_equal);
+
+ // Shift the key to get the actual index value and check that
+ // it is within bounds.
+ __ movl(index.reg(), key.reg());
+ __ shrl(index.reg(), Immediate(kSmiTagSize));
+ __ cmpl(index.reg(),
+ FieldOperand(elements.reg(), FixedArray::kLengthOffset));
+ deferred->Branch(above_equal);
+
+ // The index register holds the un-smi-tagged key. It has been
+ // zero-extended to 64-bits, so it can be used directly as index in the
+ // operand below.
+ // Load and check that the result is not the hole. We could
+ // reuse the index or elements register for the value.
+ //
+ // TODO(206): Consider whether it makes sense to try some
+ // heuristic about which register to reuse. For example, if
+ // one is rax, the we can reuse that one because the value
+ // coming from the deferred code will be in rax.
+ Result value = index;
+ __ movq(value.reg(),
+ Operand(elements.reg(),
+ index.reg(),
+ times_pointer_size,
+ FixedArray::kHeaderSize - kHeapObjectTag));
+ elements.Unuse();
+ index.Unuse();
+ __ Cmp(value.reg(), Factory::the_hole_value());
+ deferred->Branch(equal);
+ __ IncrementCounter(&Counters::keyed_load_inline, 1);
+
+ deferred->BindExit();
+ // Restore the receiver and key to the frame and push the
+ // result on top of it.
+ cgen_->frame()->Push(&receiver);
+ cgen_->frame()->Push(&key);
+ cgen_->frame()->Push(&value);
+
+ } else {
+ Comment cmnt(masm, "[ Load from keyed Property");
+ RelocInfo::Mode mode = is_global
+ ? RelocInfo::CODE_TARGET_CONTEXT
+ : RelocInfo::CODE_TARGET;
+ Result answer = cgen_->frame()->CallKeyedLoadIC(mode);
+ // Make sure that we do not have a test instruction after the
+ // call. A test instruction after the call is used to
+ // indicate that we have generated an inline version of the
+ // keyed load. The explicit nop instruction is here because
+ // the push that follows might be peep-hole optimized away.
+ __ nop();
+ cgen_->frame()->Push(&answer);
+ }
break;
}
@@ -5401,15 +5595,105 @@
case KEYED: {
Comment cmnt(masm, "[ Store to keyed Property");
- // TODO(x64): Implement inlined version of keyed stores.
+ // Generate inlined version of the keyed store if the code is in
+ // a loop and the key is likely to be a smi.
+ Property* property = expression()->AsProperty();
+ ASSERT(property != NULL);
+ SmiAnalysis* key_smi_analysis = property->key()->type();
- Result answer = cgen_->frame()->CallKeyedStoreIC();
- // Make sure that we do not have a test instruction after the
- // call. A test instruction after the call is used to
- // indicate that we have generated an inline version of the
- // keyed store.
- __ nop();
- cgen_->frame()->Push(&answer);
+ if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
+ Comment cmnt(masm, "[ Inlined store to keyed Property");
+
+ // Get the receiver, key and value into registers.
+ Result value = cgen_->frame()->Pop();
+ Result key = cgen_->frame()->Pop();
+ Result receiver = cgen_->frame()->Pop();
+
+ Result tmp = cgen_->allocator_->Allocate();
+ ASSERT(tmp.is_valid());
+
+ // Determine whether the value is a constant before putting it
+ // in a register.
+ bool value_is_constant = value.is_constant();
+
+ // Make sure that value, key and receiver are in registers.
+ value.ToRegister();
+ key.ToRegister();
+ receiver.ToRegister();
+
+ DeferredReferenceSetKeyedValue* deferred =
+ new DeferredReferenceSetKeyedValue(value.reg(),
+ key.reg(),
+ receiver.reg());
+
+ // Check that the value is a smi if it is not a constant.
+ // We can skip the write barrier for smis and constants.
+ if (!value_is_constant) {
+ __ testl(value.reg(), Immediate(kSmiTagMask));
+ deferred->Branch(not_zero);
+ }
+
+ // Check that the key is a non-negative smi.
+ __ testl(key.reg(),
+ Immediate(static_cast<uint32_t>(kSmiTagMask | 0x80000000U)));
+ deferred->Branch(not_zero);
+
+ // Check that the receiver is not a smi.
+ __ testl(receiver.reg(), Immediate(kSmiTagMask));
+ deferred->Branch(zero);
+
+ // Check that the receiver is a JSArray.
+ __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
+ deferred->Branch(not_equal);
+
+ // Check that the key is within bounds. Both the key and the
+ // length of the JSArray are smis, so compare only low 32 bits.
+ __ cmpl(key.reg(),
+ FieldOperand(receiver.reg(), JSArray::kLengthOffset));
+ deferred->Branch(greater_equal);
+
+ // Get the elements array from the receiver and check that it
+ // is a flat array (not a dictionary).
+ __ movq(tmp.reg(),
+ FieldOperand(receiver.reg(), JSObject::kElementsOffset));
+ // Bind the deferred code patch site to be able to locate the
+ // fixed array map comparison. When debugging, we patch this
+ // comparison to always fail so that we will hit the IC call
+ // in the deferred code which will allow the debugger to
+ // break for fast case stores.
+ __ bind(deferred->patch_site());
+ // Avoid using __ to ensure the distance from patch_site
+ // to the map address is always the same.
+ masm->movq(kScratchRegister, Factory::fixed_array_map(),
+ RelocInfo::EMBEDDED_OBJECT);
+ __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
+ kScratchRegister);
+ deferred->Branch(not_equal);
+
+ // Store the value.
+ ASSERT_EQ(1, kSmiTagSize);
+ ASSERT_EQ(0, kSmiTag);
+ __ movq(Operand(tmp.reg(),
+ key.reg(),
+ times_half_pointer_size,
+ FixedArray::kHeaderSize - kHeapObjectTag),
+ value.reg());
+ __ IncrementCounter(&Counters::keyed_store_inline, 1);
+
+ deferred->BindExit();
+
+ cgen_->frame()->Push(&receiver);
+ cgen_->frame()->Push(&key);
+ cgen_->frame()->Push(&value);
+ } else {
+ Result answer = cgen_->frame()->CallKeyedStoreIC();
+ // Make sure that we do not have a test instruction after the
+ // call. A test instruction after the call is used to
+ // indicate that we have generated an inline version of the
+ // keyed store.
+ masm->nop();
+ cgen_->frame()->Push(&answer);
+ }
break;
}
@@ -6430,7 +6714,7 @@
__ jmp(&done);
__ bind(&load_smi);
- __ sar(src, Immediate(kSmiTagSize));
+ __ sarl(src, Immediate(kSmiTagSize));
__ cvtlsi2sd(dst, src);
__ bind(&done);
@@ -6563,7 +6847,7 @@
// Smi check both operands.
__ movq(rcx, rbx);
- __ or_(rcx, rax);
+ __ or_(rcx, rax); // The value in ecx is used for negative zero test later.
__ testl(rcx, Immediate(kSmiTagMask));
__ j(not_zero, slow);
@@ -6571,14 +6855,12 @@
case Token::ADD: {
__ addl(rax, rbx);
__ j(overflow, slow); // The slow case rereads operands from the stack.
- __ movsxlq(rax, rax); // Sign extend eax into rax.
break;
}
case Token::SUB: {
__ subl(rax, rbx);
__ j(overflow, slow); // The slow case rereads operands from the stack.
- __ movsxlq(rax, rax); // Sign extend eax into rax.
break;
}
@@ -6586,27 +6868,25 @@
// If the smi tag is 0 we can just leave the tag on one operand.
ASSERT(kSmiTag == 0); // adjust code below if not the case
// Remove tag from one of the operands (but keep sign).
- __ sar(rax, Immediate(kSmiTagSize));
+ __ sarl(rax, Immediate(kSmiTagSize));
// Do multiplication.
__ imull(rax, rbx); // multiplication of smis; result in eax
// Go slow on overflows.
__ j(overflow, slow);
// Check for negative zero result.
- __ movsxlq(rax, rax); // Sign extend eax into rax.
- __ NegativeZeroTest(rax, rcx, slow); // use rcx = x | y
+ __ NegativeZeroTest(rax, rcx, slow); // ecx (not rcx) holds x | y.
break;
case Token::DIV:
- // Sign extend rax into rdx:rax
- // (also sign extends eax into edx if eax is Smi).
- __ cqo();
+ // Sign extend eax into edx:eax.
+ __ cdq();
// Check for 0 divisor.
- __ testq(rbx, rbx);
+ __ testl(rbx, rbx);
__ j(zero, slow);
- // Divide rdx:rax by rbx (where rdx:rax is equivalent to the smi in eax).
- __ idiv(rbx);
+ // Divide edx:eax by ebx (where edx:eax is equivalent to the smi in eax).
+ __ idivl(rbx);
// Check that the remainder is zero.
- __ testq(rdx, rdx);
+ __ testl(rdx, rdx);
__ j(not_zero, slow);
// Check for the corner case of dividing the most negative smi
// by -1. We cannot use the overflow flag, since it is not set
@@ -6614,28 +6894,27 @@
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
// TODO(X64): TODO(Smi): Smi implementation dependent constant.
// Value is Smi::fromInt(-(1<<31)) / Smi::fromInt(-1)
- __ cmpq(rax, Immediate(0x40000000));
+ __ cmpl(rax, Immediate(0x40000000));
__ j(equal, slow);
// Check for negative zero result.
- __ NegativeZeroTest(rax, rcx, slow); // use ecx = x | y
+ __ NegativeZeroTest(rax, rcx, slow); // ecx (not rcx) holds x | y.
// Tag the result and store it in register rax.
ASSERT(kSmiTagSize == times_2); // adjust code if not the case
__ lea(rax, Operand(rax, rax, times_1, kSmiTag));
break;
case Token::MOD:
- // Sign extend rax into rdx:rax
- // (also sign extends eax into edx if eax is Smi).
- __ cqo();
+ // Sign extend eax into edx:eax
+ __ cdq();
// Check for 0 divisor.
- __ testq(rbx, rbx);
+ __ testl(rbx, rbx);
__ j(zero, slow);
- // Divide rdx:rax by rbx.
- __ idiv(rbx);
+ // Divide edx:eax by ebx.
+ __ idivl(rbx);
// Check for negative zero result.
- __ NegativeZeroTest(rdx, rcx, slow); // use ecx = x | y
+ __ NegativeZeroTest(rdx, rcx, slow); // ecx (not rcx) holds x | y.
// Move remainder to register rax.
- __ movq(rax, rdx);
+ __ movl(rax, rdx);
break;
case Token::BIT_OR:
@@ -6655,7 +6934,7 @@
case Token::SHR:
case Token::SAR:
// Move the second operand into register ecx.
- __ movq(rcx, rbx);
+ __ movl(rcx, rbx);
// Remove tags from operands (but keep sign).
__ sarl(rax, Immediate(kSmiTagSize));
__ sarl(rcx, Immediate(kSmiTagSize));
diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h
index bb4b538..9e69007 100644
--- a/src/x64/codegen-x64.h
+++ b/src/x64/codegen-x64.h
@@ -361,7 +361,7 @@
#define DEF_VISIT(type) \
void Visit##type(type* node);
- NODE_LIST(DEF_VISIT)
+ AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Visit a statement and then spill the virtual frame if control flow can
@@ -548,7 +548,7 @@
// information.
void CodeForFunctionPosition(FunctionLiteral* fun);
void CodeForReturnPosition(FunctionLiteral* fun);
- void CodeForStatementPosition(Node* node);
+ void CodeForStatementPosition(AstNode* node);
void CodeForSourcePosition(int pos);
#ifdef DEBUG
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 8b746c4..83e3149 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -1343,6 +1343,39 @@
data += 2;
break;
+ case 0xA1: // Fall through.
+ case 0xA3:
+ switch (operand_size()) {
+ case DOUBLEWORD_SIZE: {
+ const char* memory_location = NameOfAddress(
+ reinterpret_cast<byte*>(
+ *reinterpret_cast<int32_t*>(data + 1)));
+ if (*data == 0xA3) { // Opcode 0xA3
+ AppendToBuffer("movzxlq rax,(%s)", memory_location);
+ } else { // Opcode 0xA1
+ AppendToBuffer("movzxlq (%s),rax", memory_location);
+ }
+ data += 5;
+ break;
+ }
+ case QUADWORD_SIZE: {
+ // New x64 instruction mov rax,(imm_64).
+ const char* memory_location = NameOfAddress(
+ *reinterpret_cast<byte**>(data + 1));
+ if (*data == 0xA3) { // Opcode 0xA3
+ AppendToBuffer("movq rax,(%s)", memory_location);
+ } else { // Opcode 0xA1
+ AppendToBuffer("movq (%s),rax", memory_location);
+ }
+ data += 9;
+ break;
+ }
+ default:
+ UnimplementedInstruction();
+ data += 2;
+ }
+ break;
+
case 0xA8:
AppendToBuffer("test al,0x%x", *reinterpret_cast<uint8_t*>(data + 1));
data += 2;
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 247e9e6..0ef75f8 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -87,8 +87,7 @@
// Check that the properties array is a dictionary.
__ movq(r0, FieldOperand(r1, JSObject::kPropertiesOffset));
- __ Cmp(FieldOperand(r0, HeapObject::kMapOffset),
- Factory::hash_table_map());
+ __ Cmp(FieldOperand(r0, HeapObject::kMapOffset), Factory::hash_table_map());
__ j(not_equal, miss_label);
// Compute the capacity mask.
@@ -160,16 +159,64 @@
}
-void KeyedLoadIC::ClearInlinedVersion(Address address) {
- // TODO(X64): Implement this when LoadIC is enabled.
+// One byte opcode for test eax,0xXXXXXXXX.
+static const byte kTestEaxByte = 0xA9;
+
+
+static bool PatchInlinedMapCheck(Address address, Object* map) {
+ // Arguments are address of start of call sequence that called
+ // the IC,
+ Address test_instruction_address =
+ address + Assembler::kTargetAddrToReturnAddrDist;
+ // The keyed load has a fast inlined case if the IC call instruction
+ // is immediately followed by a test instruction.
+ if (*test_instruction_address != kTestEaxByte) return false;
+
+ // Fetch the offset from the test instruction to the map compare
+ // instructions (starting with the 64-bit immediate mov of the map
+ // address). This offset is stored in the last 4 bytes of the 5
+ // byte test instruction.
+ Address delta_address = test_instruction_address + 1;
+ int delta = *reinterpret_cast<int*>(delta_address);
+ // Compute the map address. The map address is in the last 8 bytes
+ // of the 10-byte immediate mov instruction (incl. REX prefix), so we add 2
+ // to the offset to get the map address.
+ Address map_address = test_instruction_address + delta + 2;
+ // Patch the map check.
+ *(reinterpret_cast<Object**>(map_address)) = map;
+ return true;
}
+
+bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
+ return PatchInlinedMapCheck(address, map);
+}
+
+
+bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
+ return PatchInlinedMapCheck(address, map);
+}
+
+
+void KeyedLoadIC::ClearInlinedVersion(Address address) {
+ // Insert null as the map to check for to make sure the map check fails
+ // sending control flow to the IC instead of the inlined version.
+ PatchInlinedLoad(address, Heap::null_value());
+}
+
+
void KeyedStoreIC::ClearInlinedVersion(Address address) {
- // TODO(X64): Implement this when LoadIC is enabled.
+ // Insert null as the elements map to check for. This will make
+ // sure that the elements fast-case map check fails so that control
+ // flows to the IC instead of the inlined version.
+ PatchInlinedStore(address, Heap::null_value());
}
+
void KeyedStoreIC::RestoreInlinedVersion(Address address) {
- UNIMPLEMENTED();
+ // Restore the fast-case elements map check so that the inlined
+ // version can be used again.
+ PatchInlinedStore(address, Heap::fixed_array_map());
}
@@ -183,12 +230,10 @@
__ movq(rax, Operand(rsp, kPointerSize));
__ movq(rcx, Operand(rsp, 2 * kPointerSize));
-
- // Move the return address below the arguments.
__ pop(rbx);
- __ push(rcx);
- __ push(rax);
- __ push(rbx);
+ __ push(rcx); // receiver
+ __ push(rax); // name
+ __ push(rbx); // return address
// Perform tail call to the entry.
__ TailCallRuntime(f, 2);
@@ -245,8 +290,8 @@
__ bind(&index_int);
__ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
- __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map());
- __ j(equal, &slow);
+ __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::fixed_array_map());
+ __ j(not_equal, &slow);
// Check that the key (index) is within bounds.
__ cmpl(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
__ j(below, &fast); // Unsigned comparison rejects negative indices.
@@ -302,64 +347,16 @@
__ ret(0);
}
+
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rsp[0] : return address
// -- rsp[8] : name
// -- rsp[16] : receiver
// -----------------------------------
-
- Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
+ Generate(masm, ExternalReference(IC_Utility(kKeyedLoadIC_Miss)));
}
-bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
- // Never patch the map in the map check, so the check always fails.
- return false;
-}
-
-bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
- // Never patch the map in the map check, so the check always fails.
- return false;
-}
-
-Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name,
- JSObject* object,
- JSObject* holder,
- AccessorInfo* callback) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
- JSObject* object,
- JSObject* holder,
- Object* callback) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-
-Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* object,
- JSObject* holder,
- String* name) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
- UNIMPLEMENTED();
- return NULL;
-}
void KeyedStoreIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
// ----------- S t a t e -------------
@@ -369,19 +366,35 @@
// -- rsp[16] : receiver
// -----------------------------------
- // Move the return address below the arguments.
__ pop(rcx);
- __ push(Operand(rsp, 1 * kPointerSize));
- __ push(Operand(rsp, 1 * kPointerSize));
- __ push(rax);
- __ push(rcx);
+ __ push(Operand(rsp, 1 * kPointerSize)); // receiver
+ __ push(Operand(rsp, 1 * kPointerSize)); // key
+ __ push(rax); // value
+ __ push(rcx); // return address
// Do tail-call to runtime routine.
__ TailCallRuntime(f, 3);
}
+
void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
- Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss)));
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : transition map
+ // -- rsp[0] : return address
+ // -- rsp[8] : key
+ // -- rsp[16] : receiver
+ // -----------------------------------
+
+ __ pop(rbx);
+ __ push(Operand(rsp, 1 * kPointerSize)); // receiver
+ __ push(rcx); // transition map
+ __ push(rax); // value
+ __ push(rbx); // return address
+
+ // Do tail-call to runtime routine.
+ __ TailCallRuntime(
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
}
@@ -424,8 +437,8 @@
// rbx: index (as a smi)
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
- __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map());
- __ j(equal, &slow);
+ __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::fixed_array_map());
+ __ j(not_equal, &slow);
// Untag the key (for checking against untagged length in the fixed array).
__ movl(rdx, rbx);
__ sarl(rdx, Immediate(kSmiTagSize));
@@ -475,8 +488,8 @@
// rdx: JSArray
// rbx: index (as a smi)
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
- __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map());
- __ j(equal, &slow);
+ __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::fixed_array_map());
+ __ j(not_equal, &slow);
// Check the key against the length in the array, compute the
// address to store into and fall through to fast case.
@@ -498,15 +511,6 @@
}
-Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
- int index,
- Map* transition,
- String* name) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-
void CallIC::Generate(MacroAssembler* masm,
int argc,
ExternalReference const& f) {
@@ -571,7 +575,10 @@
void LoadIC::ClearInlinedVersion(Address address) {
- // TODO(X64): Implement this when LoadIC is enabled.
+ // Reset the map check of the inlined inobject property load (if
+ // present) to guarantee failure by holding an invalid map (the null
+ // value). The offset can be patched to anything.
+ PatchInlinedLoad(address, Heap::null_value(), kMaxInt);
}
@@ -584,11 +591,10 @@
__ movq(rax, Operand(rsp, kPointerSize));
- // Move the return address below the arguments.
__ pop(rbx);
- __ push(rax);
- __ push(rcx);
- __ push(rbx);
+ __ push(rax); // receiver
+ __ push(rcx); // name
+ __ push(rbx); // return address
// Perform tail call to the entry.
__ TailCallRuntime(f, 2);
@@ -638,13 +644,37 @@
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
+
void LoadIC::GenerateStringLength(MacroAssembler* masm) {
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
-bool LoadIC::PatchInlinedLoad(Address address, Object* map, int index) {
- // TODO(X64): Implement this function. Until then, the code is not patched.
- return false;
+
+bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kTargetAddrToReturnAddrDist;
+ // If the instruction following the call is not a test eax, nothing
+ // was inlined.
+ if (*test_instruction_address != kTestEaxByte) return false;
+
+ Address delta_address = test_instruction_address + 1;
+ // The delta to the start of the map check instruction.
+ int delta = *reinterpret_cast<int*>(delta_address);
+
+ // The map address is the last 8 bytes of the 10-byte
+ // immediate move instruction, so we add 2 to get the
+ // offset to the last 8 bytes.
+ Address map_address = test_instruction_address + delta + 2;
+ *(reinterpret_cast<Object**>(map_address)) = map;
+
+ // The offset is in the 32-bit displacement of a seven byte
+ // memory-to-register move instruction (REX.W 0x88 ModR/M disp32),
+ // so we add 3 to get the offset of the displacement.
+ Address offset_address =
+ test_instruction_address + delta + kOffsetToLoadInstruction + 3;
+ *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
+ return true;
}
void StoreIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
@@ -654,19 +684,33 @@
// -- rsp[0] : return address
// -- rsp[8] : receiver
// -----------------------------------
- // Move the return address below the arguments.
__ pop(rbx);
- __ push(Operand(rsp, 0));
- __ push(rcx);
- __ push(rax);
- __ push(rbx);
+ __ push(Operand(rsp, 0)); // receiver
+ __ push(rcx); // name
+ __ push(rax); // value
+ __ push(rbx); // return address
// Perform tail call to the entry.
__ TailCallRuntime(f, 3);
}
void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
- Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : Map (target of map transition)
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+
+ __ pop(rbx);
+ __ push(Operand(rsp, 0)); // receiver
+ __ push(rcx); // transition map
+ __ push(rax); // value
+ __ push(rbx); // return address
+
+ // Perform tail call to the entry.
+ __ TailCallRuntime(
+ ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
}
void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 457011b..f58e1cd 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -71,9 +71,9 @@
Register op,
Label* then_label) {
Label ok;
- testq(result, result);
+ testl(result, result);
j(not_zero, &ok);
- testq(op, op);
+ testl(op, op);
j(sign, then_label);
bind(&ok);
}
@@ -151,6 +151,13 @@
void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
int num_arguments) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : argument num_arguments - 1
+ // ...
+ // -- rsp[8 * num_arguments] : argument 0 (receiver)
+ // -----------------------------------
+
// TODO(1236192): Most runtime routines don't need the number of
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
@@ -311,6 +318,17 @@
}
+void MacroAssembler::Push(Smi* source) {
+ if (IsUnsafeSmi(source)) {
+ LoadUnsafeSmi(kScratchRegister, source);
+ push(kScratchRegister);
+ } else {
+ int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(source));
+ push(Immediate(smi));
+ }
+}
+
+
void MacroAssembler::Jump(ExternalReference ext) {
movq(kScratchRegister, ext);
jmp(kScratchRegister);
@@ -356,6 +374,7 @@
ASSERT(RelocInfo::IsCodeTarget(rmode));
movq(kScratchRegister, code_object, rmode);
#ifdef DEBUG
+ // Patch target is kPointer size bytes *before* target label.
Label target;
bind(&target);
#endif
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 44a76a4..cba55eb 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -164,6 +164,7 @@
void Cmp(Register dst, Handle<Object> source);
void Cmp(const Operand& dst, Handle<Object> source);
void Push(Handle<Object> source);
+ void Push(Smi* smi);
// Control Flow
void Jump(Address destination, RelocInfo::Mode rmode);
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index ba13996..091c826 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -36,584 +36,6 @@
namespace v8 {
namespace internal {
-#define __ ACCESS_MASM((masm()))
-
-
-Object* CallStubCompiler::CompileCallConstant(Object* object,
- JSObject* holder,
- JSFunction* function,
- String* name,
- StubCompiler::CheckType check) {
- // ----------- S t a t e -------------
- // -----------------------------------
- // rsp[0] return address
- // rsp[8] argument argc
- // rsp[16] argument argc - 1
- // ...
- // rsp[argc * 8] argument 1
- // rsp[(argc + 1) * 8] argument 0 = reciever
- // rsp[(argc + 2) * 8] function name
-
- Label miss;
-
- // Get the receiver from the stack.
- const int argc = arguments().immediate();
- __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
-
- // Check that the receiver isn't a smi.
- if (check != NUMBER_CHECK) {
- __ testl(rdx, Immediate(kSmiTagMask));
- __ j(zero, &miss);
- }
-
- // Make sure that it's okay not to patch the on stack receiver
- // unless we're doing a receiver map check.
- ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
-
- switch (check) {
- case RECEIVER_MAP_CHECK:
- // Check that the maps haven't changed.
- CheckPrototypes(JSObject::cast(object), rdx, holder,
- rbx, rcx, name, &miss);
-
- // Patch the receiver on the stack with the global proxy if
- // necessary.
- if (object->IsGlobalObject()) {
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
- __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
- }
- break;
-
- case STRING_CHECK:
- // Check that the object is a two-byte string or a symbol.
- __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rcx);
- __ j(above_equal, &miss);
- // Check that the maps starting from the prototype haven't changed.
- GenerateLoadGlobalFunctionPrototype(masm(),
- Context::STRING_FUNCTION_INDEX,
- rcx);
- CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
- rbx, rdx, name, &miss);
- break;
-
- case NUMBER_CHECK: {
- Label fast;
- // Check that the object is a smi or a heap number.
- __ testl(rdx, Immediate(kSmiTagMask));
- __ j(zero, &fast);
- __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
- __ j(not_equal, &miss);
- __ bind(&fast);
- // Check that the maps starting from the prototype haven't changed.
- GenerateLoadGlobalFunctionPrototype(masm(),
- Context::NUMBER_FUNCTION_INDEX,
- rcx);
- CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
- rbx, rdx, name, &miss);
- break;
- }
-
- case BOOLEAN_CHECK: {
- Label fast;
- // Check that the object is a boolean.
- __ Cmp(rdx, Factory::true_value());
- __ j(equal, &fast);
- __ Cmp(rdx, Factory::false_value());
- __ j(not_equal, &miss);
- __ bind(&fast);
- // Check that the maps starting from the prototype haven't changed.
- GenerateLoadGlobalFunctionPrototype(masm(),
- Context::BOOLEAN_FUNCTION_INDEX,
- rcx);
- CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
- rbx, rdx, name, &miss);
- break;
- }
-
- case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
- CheckPrototypes(JSObject::cast(object), rdx, holder,
- rbx, rcx, name, &miss);
- // Make sure object->elements()->map() != Heap::dictionary_array_map()
- // Get the elements array of the object.
- __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
- // Check that the object is in fast mode (not dictionary).
- __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
- Factory::hash_table_map());
- __ j(equal, &miss);
- break;
-
- default:
- UNREACHABLE();
- }
-
- // Get the function and setup the context.
- __ Move(rdi, Handle<JSFunction>(function));
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
-
- // Jump to the cached code (tail call).
- ASSERT(function->is_compiled());
- Handle<Code> code(function->code());
- ParameterCount expected(function->shared()->formal_parameter_count());
- __ InvokeCode(code, expected, arguments(),
- RelocInfo::CODE_TARGET, JUMP_FUNCTION);
-
- // Handle call cache miss.
- __ bind(&miss);
- Handle<Code> ic = ComputeCallMiss(arguments().immediate());
- __ Jump(ic, RelocInfo::CODE_TARGET);
-
- // Return the generated code.
- String* function_name = NULL;
- if (function->shared()->name()->IsString()) {
- function_name = String::cast(function->shared()->name());
- }
- return GetCode(CONSTANT_FUNCTION, function_name);
-}
-
-
-Object* CallStubCompiler::CompileCallField(Object* object,
- JSObject* holder,
- int index,
- String* name) {
- // ----------- S t a t e -------------
- // -----------------------------------
- // rsp[0] return address
- // rsp[8] argument argc
- // rsp[16] argument argc - 1
- // ...
- // rsp[argc * 8] argument 1
- // rsp[(argc + 1) * 8] argument 0 = receiver
- // rsp[(argc + 2) * 8] function name
- Label miss;
-
- // Get the receiver from the stack.
- const int argc = arguments().immediate();
- __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
-
- // Check that the receiver isn't a smi.
- __ testl(rdx, Immediate(kSmiTagMask));
- __ j(zero, &miss);
-
- // Do the right check and compute the holder register.
- Register reg =
- CheckPrototypes(JSObject::cast(object), rdx, holder,
- rbx, rcx, name, &miss);
-
- GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
-
- // Check that the function really is a function.
- __ testl(rdi, Immediate(kSmiTagMask));
- __ j(zero, &miss);
- __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
- __ j(not_equal, &miss);
-
- // Patch the receiver on the stack with the global proxy if
- // necessary.
- if (object->IsGlobalObject()) {
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
- __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
- }
-
- // Invoke the function.
- __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
-
- // Handle call cache miss.
- __ bind(&miss);
- Handle<Code> ic = ComputeCallMiss(arguments().immediate());
- __ Jump(ic, RelocInfo::CODE_TARGET);
-
- // Return the generated code.
- return GetCode(FIELD, name);
-}
-
-
-Object* CallStubCompiler::CompileCallInterceptor(Object* a,
- JSObject* b,
- String* c) {
- // TODO(X64): Implement a real stub.
- return Failure::InternalError();
-}
-
-
-
-Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
- GlobalObject* holder,
- JSGlobalPropertyCell* cell,
- JSFunction* function,
- String* name) {
- // ----------- S t a t e -------------
- // -----------------------------------
- // rsp[0] return address
- // rsp[8] argument argc
- // rsp[16] argument argc - 1
- // ...
- // rsp[argc * 8] argument 1
- // rsp[(argc + 1) * 8] argument 0 = receiver
- // rsp[(argc + 2) * 8] function name
- Label miss;
-
- __ IncrementCounter(&Counters::call_global_inline, 1);
-
- // Get the number of arguments.
- const int argc = arguments().immediate();
-
- // Get the receiver from the stack.
- __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
-
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual calls. In this case,
- // the receiver cannot be a smi.
- if (object != holder) {
- __ testl(rdx, Immediate(kSmiTagMask));
- __ j(zero, &miss);
- }
-
- // Check that the maps haven't changed.
- CheckPrototypes(object, rdx, holder, rbx, rcx, name, &miss);
-
- // Get the value from the cell.
- __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
- __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
-
- // Check that the cell contains the same function.
- __ Cmp(rdi, Handle<JSFunction>(function));
- __ j(not_equal, &miss);
-
- // Patch the receiver on the stack with the global proxy.
- if (object->IsGlobalObject()) {
- __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
- __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
- }
-
- // Setup the context (function already in edi).
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
-
- // Jump to the cached code (tail call).
- ASSERT(function->is_compiled());
- Handle<Code> code(function->code());
- ParameterCount expected(function->shared()->formal_parameter_count());
- __ InvokeCode(code, expected, arguments(),
- RelocInfo::CODE_TARGET, JUMP_FUNCTION);
-
- // Handle call cache miss.
- __ bind(&miss);
- __ DecrementCounter(&Counters::call_global_inline, 1);
- __ IncrementCounter(&Counters::call_global_inline_miss, 1);
- Handle<Code> ic = ComputeCallMiss(arguments().immediate());
- __ Jump(ic, RelocInfo::CODE_TARGET);
-
- // Return the generated code.
- return GetCode(NORMAL, name);
-}
-
-
-Object* LoadStubCompiler::CompileLoadCallback(JSObject* a,
- JSObject* b,
- AccessorInfo* c,
- String* d) {
- // TODO(X64): Implement a real stub.
- return Failure::InternalError();
-}
-
-
-Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
- JSObject* holder,
- Object* value,
- String* name) {
- // ----------- S t a t e -------------
- // -- rcx : name
- // -- rsp[0] : return address
- // -- rsp[8] : receiver
- // -----------------------------------
- Label miss;
-
- __ movq(rax, Operand(rsp, kPointerSize));
- GenerateLoadConstant(object, holder, rax, rbx, rdx, value, name, &miss);
- __ bind(&miss);
- GenerateLoadMiss(masm(), Code::LOAD_IC);
-
- // Return the generated code.
- return GetCode(CONSTANT_FUNCTION, name);
-}
-
-
-Object* LoadStubCompiler::CompileLoadField(JSObject* object,
- JSObject* holder,
- int index,
- String* name) {
- // ----------- S t a t e -------------
- // -- rcx : name
- // -- rsp[0] : return address
- // -- rsp[8] : receiver
- // -----------------------------------
- Label miss;
-
- __ movq(rax, Operand(rsp, kPointerSize));
- GenerateLoadField(object, holder, rax, rbx, rdx, index, name, &miss);
- __ bind(&miss);
- GenerateLoadMiss(masm(), Code::LOAD_IC);
-
- // Return the generated code.
- return GetCode(FIELD, name);
-}
-
-
-Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* a,
- JSObject* b,
- String* c) {
- // TODO(X64): Implement a real stub.
- return Failure::InternalError();
-}
-
-
-Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
- GlobalObject* holder,
- JSGlobalPropertyCell* cell,
- String* name,
- bool is_dont_delete) {
- // ----------- S t a t e -------------
- // -- rcx : name
- // -- rsp[0] : return address
- // -- rsp[8] : receiver
- // -----------------------------------
- Label miss;
-
- __ IncrementCounter(&Counters::named_load_global_inline, 1);
-
- // Get the receiver from the stack.
- __ movq(rax, Operand(rsp, kPointerSize));
-
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual loads. In this case,
- // the receiver cannot be a smi.
- if (object != holder) {
- __ testl(rax, Immediate(kSmiTagMask));
- __ j(zero, &miss);
- }
-
- // Check that the maps haven't changed.
- CheckPrototypes(object, rax, holder, rbx, rdx, name, &miss);
-
- // Get the value from the cell.
- __ Move(rax, Handle<JSGlobalPropertyCell>(cell));
- __ movq(rax, FieldOperand(rax, JSGlobalPropertyCell::kValueOffset));
-
- // Check for deleted property if property can actually be deleted.
- if (!is_dont_delete) {
- __ Cmp(rax, Factory::the_hole_value());
- __ j(equal, &miss);
- } else if (FLAG_debug_code) {
- __ Cmp(rax, Factory::the_hole_value());
- __ Check(not_equal, "DontDelete cells can't contain the hole");
- }
-
- __ ret(0);
-
- __ bind(&miss);
- __ DecrementCounter(&Counters::named_load_global_inline, 1);
- __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
- GenerateLoadMiss(masm(), Code::LOAD_IC);
-
- // Return the generated code.
- return GetCode(NORMAL, name);
-}
-
-
-Object* StoreStubCompiler::CompileStoreCallback(JSObject* a,
- AccessorInfo* b,
- String* c) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-
-Object* StoreStubCompiler::CompileStoreField(JSObject* object,
- int index,
- Map* transition,
- String* name) {
- // ----------- S t a t e -------------
- // -- rax : value
- // -- rcx : name
- // -- rsp[0] : return address
- // -- rsp[8] : receiver
- // -----------------------------------
- Label miss;
-
- // Get the object from the stack.
- __ movq(rbx, Operand(rsp, 1 * kPointerSize));
-
- // Generate store field code. Trashes the name register.
- GenerateStoreField(masm(),
- Builtins::StoreIC_ExtendStorage,
- object,
- index,
- transition,
- rbx, rcx, rdx,
- &miss);
-
- // Handle store cache miss.
- __ bind(&miss);
- __ Move(rcx, Handle<String>(name)); // restore name
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
- __ Jump(ic, RelocInfo::CODE_TARGET);
-
- // Return the generated code.
- return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
-}
-
-
-Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* a, String* b) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-
-Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
- JSGlobalPropertyCell* cell,
- String* name) {
- UNIMPLEMENTED();
- return NULL;
-}
-
-
-Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
- JSObject* receiver,
- JSObject* holder,
- int index) {
- // ----------- S t a t e -------------
- // -- rsp[0] : return address
- // -- rsp[8] : name
- // -- rsp[16] : receiver
- // -----------------------------------
- Label miss;
-
- __ movq(rax, Operand(rsp, kPointerSize));
- __ movq(rcx, Operand(rsp, 2 * kPointerSize));
- __ IncrementCounter(&Counters::keyed_load_field, 1);
-
- // Check that the name has not changed.
- __ Cmp(rax, Handle<String>(name));
- __ j(not_equal, &miss);
-
- GenerateLoadField(receiver, holder, rcx, rbx, rdx, index, name, &miss);
-
- __ bind(&miss);
- __ DecrementCounter(&Counters::keyed_load_field, 1);
- GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
-
- // Return the generated code.
- return GetCode(FIELD, name);
-}
-
-
-// TODO(1241006): Avoid having lazy compile stubs specialized by the
-// number of arguments. It is not needed anymore.
-Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
- // Enter an internal frame.
- __ EnterInternalFrame();
-
- // Push a copy of the function onto the stack.
- __ push(rdi);
-
- __ push(rdi); // function is also the parameter to the runtime call
- __ CallRuntime(Runtime::kLazyCompile, 1);
- __ pop(rdi);
-
- // Tear down temporary frame.
- __ LeaveInternalFrame();
-
- // Do a tail-call of the compiled function.
- __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
- __ jmp(rcx);
-
- return GetCodeWithFlags(flags, "LazyCompileStub");
-}
-
-
-Register StubCompiler::CheckPrototypes(JSObject* object,
- Register object_reg,
- JSObject* holder,
- Register holder_reg,
- Register scratch,
- String* name,
- Label* miss) {
- // Check that the maps haven't changed.
- Register result =
- __ CheckMaps(object, object_reg, holder, holder_reg, scratch, miss);
-
- // If we've skipped any global objects, it's not enough to verify
- // that their maps haven't changed.
- while (object != holder) {
- if (object->IsGlobalObject()) {
- GlobalObject* global = GlobalObject::cast(object);
- Object* probe = global->EnsurePropertyCell(name);
- if (probe->IsFailure()) {
- set_failure(Failure::cast(probe));
- return result;
- }
- JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
- ASSERT(cell->value()->IsTheHole());
- __ Move(scratch, Handle<Object>(cell));
- __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
- Factory::the_hole_value());
- __ j(not_equal, miss);
- }
- object = JSObject::cast(object->GetPrototype());
- }
-
- // Return the register containing the holder.
- return result;
-}
-
-
-void StubCompiler::GenerateLoadField(JSObject* object,
- JSObject* holder,
- Register receiver,
- Register scratch1,
- Register scratch2,
- int index,
- String* name,
- Label* miss) {
- // Check that the receiver isn't a smi.
- __ testl(receiver, Immediate(kSmiTagMask));
- __ j(zero, miss);
-
- // Check the prototype chain.
- Register reg =
- CheckPrototypes(object, receiver, holder,
- scratch1, scratch2, name, miss);
-
- // Get the value from the properties.
- GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
- __ ret(0);
-}
-
-
-void StubCompiler::GenerateLoadConstant(JSObject* object,
- JSObject* holder,
- Register receiver,
- Register scratch1,
- Register scratch2,
- Object* value,
- String* name,
- Label* miss) {
- // Check that the receiver isn't a smi.
- __ testl(receiver, Immediate(kSmiTagMask));
- __ j(zero, miss);
-
- // Check that the maps haven't changed.
- Register reg =
- CheckPrototypes(object, receiver, holder,
- scratch1, scratch2, name, miss);
-
- // Return the constant value.
- __ Move(rax, Handle<Object>(value));
- __ ret(0);
-}
-
-
-#undef __
-
//-----------------------------------------------------------------------------
// StubCompiler static helper functions
@@ -704,6 +126,23 @@
}
+template <typename Pushable>
+static void PushInterceptorArguments(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Pushable name,
+ JSObject* holder_obj) {
+ __ push(receiver);
+ __ push(holder);
+ __ push(name);
+ InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
+ __ movq(kScratchRegister, Handle<Object>(interceptor),
+ RelocInfo::EMBEDDED_OBJECT);
+ __ push(kScratchRegister);
+ __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
+}
+
+
void StubCache::GenerateProbe(MacroAssembler* masm,
Code::Flags flags,
Register receiver,
@@ -831,7 +270,1474 @@
}
+void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
+ Register receiver,
+ Register scratch,
+ Label* miss_label) {
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss_label);
+
+ // Check that the object is a JS array.
+ __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
+ __ j(not_equal, miss_label);
+
+ // Load length directly from the JS array.
+ __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset));
+ __ ret(0);
+}
+
+
+// Generate code to check if an object is a string. If the object is
+// a string, the map's instance type is left in the scratch register.
+static void GenerateStringCheck(MacroAssembler* masm,
+ Register receiver,
+ Register scratch,
+ Label* smi,
+ Label* non_string_object) {
+ // Check that the object isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, smi);
+
+ // Check that the object is a string.
+ __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
+ ASSERT(kNotStringTag != 0);
+ __ testl(scratch, Immediate(kNotStringTag));
+ __ j(not_zero, non_string_object);
+}
+
+
+void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
+ Register receiver,
+ Register scratch,
+ Label* miss) {
+ Label load_length, check_wrapper;
+
+ // Check if the object is a string leaving the instance type in the
+ // scratch register.
+ GenerateStringCheck(masm, receiver, scratch, miss, &check_wrapper);
+
+ // Load length directly from the string.
+ __ bind(&load_length);
+ __ and_(scratch, Immediate(kStringSizeMask));
+ __ movl(rax, FieldOperand(receiver, String::kLengthOffset));
+ // rcx is also the receiver.
+ __ lea(rcx, Operand(scratch, String::kLongLengthShift));
+ __ shr(rax); // rcx is implicit shift register.
+ __ shl(rax, Immediate(kSmiTagSize));
+ __ ret(0);
+
+ // Check if the object is a JSValue wrapper.
+ __ bind(&check_wrapper);
+ __ cmpl(scratch, Immediate(JS_VALUE_TYPE));
+ __ j(not_equal, miss);
+
+ // Check if the wrapped value is a string and load the length
+ // directly if it is.
+ __ movq(receiver, FieldOperand(receiver, JSValue::kValueOffset));
+ GenerateStringCheck(masm, receiver, scratch, miss, miss);
+ __ jmp(&load_length);
+}
+
+
+template <class Pushable>
+static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Pushable name,
+ JSObject* holder_obj) {
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
+
+ ExternalReference ref =
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
+ __ movq(rax, Immediate(5));
+ __ movq(rbx, ref);
+
+ CEntryStub stub;
+ __ CallStub(&stub);
+}
+
+
+
+void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
+ Register receiver,
+ Register result,
+ Register scratch,
+ Label* miss_label) {
+ __ TryGetFunctionPrototype(receiver, result, miss_label);
+ if (!result.is(rax)) __ movq(rax, result);
+ __ ret(0);
+}
+
+
+static void LookupPostInterceptor(JSObject* holder,
+ String* name,
+ LookupResult* lookup) {
+ holder->LocalLookupRealNamedProperty(name, lookup);
+ if (lookup->IsNotFound()) {
+ Object* proto = holder->GetPrototype();
+ if (proto != Heap::null_value()) {
+ proto->Lookup(name, lookup);
+ }
+ }
+}
+
+
+class LoadInterceptorCompiler BASE_EMBEDDED {
+ public:
+ explicit LoadInterceptorCompiler(Register name) : name_(name) {}
+
+ void CompileCacheable(MacroAssembler* masm,
+ StubCompiler* stub_compiler,
+ Register receiver,
+ Register holder,
+ Register scratch1,
+ Register scratch2,
+ JSObject* holder_obj,
+ LookupResult* lookup,
+ String* name,
+ Label* miss_label) {
+ AccessorInfo* callback = 0;
+ bool optimize = false;
+ // So far the most popular follow ups for interceptor loads are FIELD
+ // and CALLBACKS, so inline only them, other cases may be added
+ // later.
+ if (lookup->type() == FIELD) {
+ optimize = true;
+ } else if (lookup->type() == CALLBACKS) {
+ Object* callback_object = lookup->GetCallbackObject();
+ if (callback_object->IsAccessorInfo()) {
+ callback = AccessorInfo::cast(callback_object);
+ optimize = callback->getter() != NULL;
+ }
+ }
+
+ if (!optimize) {
+ CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
+ return;
+ }
+
+ // Note: starting a frame here makes GC aware of pointers pushed below.
+ __ EnterInternalFrame();
+
+ if (lookup->type() == CALLBACKS) {
+ __ push(receiver);
+ }
+ __ push(holder);
+ __ push(name_);
+
+ CompileCallLoadPropertyWithInterceptor(masm,
+ receiver,
+ holder,
+ name_,
+ holder_obj);
+
+ Label interceptor_failed;
+ __ Cmp(rax, Factory::no_interceptor_result_sentinel());
+ __ j(equal, &interceptor_failed);
+ __ LeaveInternalFrame();
+ __ ret(0);
+
+ __ bind(&interceptor_failed);
+ __ pop(name_);
+ __ pop(holder);
+ if (lookup->type() == CALLBACKS) {
+ __ pop(receiver);
+ }
+
+ __ LeaveInternalFrame();
+
+ if (lookup->type() == FIELD) {
+ holder = stub_compiler->CheckPrototypes(holder_obj,
+ holder,
+ lookup->holder(),
+ scratch1,
+ scratch2,
+ name,
+ miss_label);
+ stub_compiler->GenerateFastPropertyLoad(masm,
+ rax,
+ holder,
+ lookup->holder(),
+ lookup->GetFieldIndex());
+ __ ret(0);
+ } else {
+ ASSERT(lookup->type() == CALLBACKS);
+ ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
+ ASSERT(callback != NULL);
+ ASSERT(callback->getter() != NULL);
+
+ Label cleanup;
+ __ pop(scratch2);
+ __ push(receiver);
+ __ push(scratch2);
+
+ holder = stub_compiler->CheckPrototypes(holder_obj, holder,
+ lookup->holder(), scratch1,
+ scratch2,
+ name,
+ &cleanup);
+
+ __ pop(scratch2); // save old return address
+ __ push(holder);
+ __ Move(holder, Handle<AccessorInfo>(callback));
+ __ push(holder);
+ __ push(FieldOperand(holder, AccessorInfo::kDataOffset));
+ __ push(name_);
+ __ push(scratch2); // restore old return address
+
+ ExternalReference ref =
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
+ __ TailCallRuntime(ref, 5);
+
+ __ bind(&cleanup);
+ __ pop(scratch1);
+ __ pop(scratch2);
+ __ push(scratch1);
+ }
+ }
+
+
+ void CompileRegular(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Register scratch,
+ JSObject* holder_obj,
+ Label* miss_label) {
+ __ pop(scratch); // save old return address
+ PushInterceptorArguments(masm, receiver, holder, name_, holder_obj);
+ __ push(scratch); // restore old return address
+
+ ExternalReference ref = ExternalReference(
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
+ __ TailCallRuntime(ref, 5);
+ }
+
+ private:
+ Register name_;
+};
+
+
+template <class Compiler>
+static void CompileLoadInterceptor(Compiler* compiler,
+ StubCompiler* stub_compiler,
+ MacroAssembler* masm,
+ JSObject* object,
+ JSObject* holder,
+ String* name,
+ LookupResult* lookup,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ Label* miss) {
+ ASSERT(holder->HasNamedInterceptor());
+ ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
+
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss);
+
+ // Check that the maps haven't changed.
+ Register reg =
+ stub_compiler->CheckPrototypes(object, receiver, holder,
+ scratch1, scratch2, name, miss);
+
+ if (lookup->IsValid() && lookup->IsCacheable()) {
+ compiler->CompileCacheable(masm,
+ stub_compiler,
+ receiver,
+ reg,
+ scratch1,
+ scratch2,
+ holder,
+ lookup,
+ name,
+ miss);
+ } else {
+ compiler->CompileRegular(masm,
+ receiver,
+ reg,
+ scratch2,
+ holder,
+ miss);
+ }
+}
+
+
+class CallInterceptorCompiler BASE_EMBEDDED {
+ public:
+ explicit CallInterceptorCompiler(const ParameterCount& arguments)
+ : arguments_(arguments), argc_(arguments.immediate()) {}
+
+ void CompileCacheable(MacroAssembler* masm,
+ StubCompiler* stub_compiler,
+ Register receiver,
+ Register holder,
+ Register scratch1,
+ Register scratch2,
+ JSObject* holder_obj,
+ LookupResult* lookup,
+ String* name,
+ Label* miss_label) {
+ JSFunction* function = 0;
+ bool optimize = false;
+ // So far the most popular case for failed interceptor is
+ // CONSTANT_FUNCTION sitting below.
+ if (lookup->type() == CONSTANT_FUNCTION) {
+ function = lookup->GetConstantFunction();
+ // JSArray holder is a special case for call constant function
+ // (see the corresponding code).
+ if (function->is_compiled() && !holder_obj->IsJSArray()) {
+ optimize = true;
+ }
+ }
+
+ if (!optimize) {
+ CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
+ return;
+ }
+
+ __ EnterInternalFrame();
+ __ push(holder); // save the holder
+
+ CompileCallLoadPropertyWithInterceptor(
+ masm,
+ receiver,
+ holder,
+ // Under EnterInternalFrame this refers to name.
+ Operand(rbp, (argc_ + 3) * kPointerSize),
+ holder_obj);
+
+ __ pop(receiver); // restore holder
+ __ LeaveInternalFrame();
+
+ __ Cmp(rax, Factory::no_interceptor_result_sentinel());
+ Label invoke;
+ __ j(not_equal, &invoke);
+
+ stub_compiler->CheckPrototypes(holder_obj, receiver,
+ lookup->holder(), scratch1,
+ scratch2,
+ name,
+ miss_label);
+ if (lookup->holder()->IsGlobalObject()) {
+ __ movq(rdx, Operand(rsp, (argc_ + 1) * kPointerSize));
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdx);
+ }
+
+ ASSERT(function->is_compiled());
+ // Get the function and setup the context.
+ __ Move(rdi, Handle<JSFunction>(function));
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments_,
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ __ bind(&invoke);
+ }
+
+ void CompileRegular(MacroAssembler* masm,
+ Register receiver,
+ Register holder,
+ Register scratch,
+ JSObject* holder_obj,
+ Label* miss_label) {
+ __ EnterInternalFrame();
+
+ PushInterceptorArguments(masm,
+ receiver,
+ holder,
+ Operand(rbp, (argc_ + 3) * kPointerSize),
+ holder_obj);
+
+ ExternalReference ref = ExternalReference(
+ IC_Utility(IC::kLoadPropertyWithInterceptorForCall));
+ __ movq(rax, Immediate(5));
+ __ movq(rbx, ref);
+
+ CEntryStub stub;
+ __ CallStub(&stub);
+
+ __ LeaveInternalFrame();
+ }
+
+ private:
+ const ParameterCount& arguments_;
+ int argc_;
+};
+
+
#undef __
+#define __ ACCESS_MASM((masm()))
+
+
+Object* CallStubCompiler::CompileCallConstant(Object* object,
+ JSObject* holder,
+ JSFunction* function,
+ String* name,
+ StubCompiler::CheckType check) {
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ // rsp[0] return address
+ // rsp[8] argument argc
+ // rsp[16] argument argc - 1
+ // ...
+ // rsp[argc * 8] argument 1
+ // rsp[(argc + 1) * 8] argument 0 = reciever
+ // rsp[(argc + 2) * 8] function name
+
+ Label miss;
+
+ // Get the receiver from the stack.
+ const int argc = arguments().immediate();
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Check that the receiver isn't a smi.
+ if (check != NUMBER_CHECK) {
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ }
+
+ // Make sure that it's okay not to patch the on stack receiver
+ // unless we're doing a receiver map check.
+ ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
+
+ switch (check) {
+ case RECEIVER_MAP_CHECK:
+ // Check that the maps haven't changed.
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
+ rbx, rcx, name, &miss);
+
+ // Patch the receiver on the stack with the global proxy if
+ // necessary.
+ if (object->IsGlobalObject()) {
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
+ }
+ break;
+
+ case STRING_CHECK:
+ // Check that the object is a two-byte string or a symbol.
+ __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rcx);
+ __ j(above_equal, &miss);
+ // Check that the maps starting from the prototype haven't changed.
+ GenerateLoadGlobalFunctionPrototype(masm(),
+ Context::STRING_FUNCTION_INDEX,
+ rcx);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
+ rbx, rdx, name, &miss);
+ break;
+
+ case NUMBER_CHECK: {
+ Label fast;
+ // Check that the object is a smi or a heap number.
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &fast);
+ __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
+ __ j(not_equal, &miss);
+ __ bind(&fast);
+ // Check that the maps starting from the prototype haven't changed.
+ GenerateLoadGlobalFunctionPrototype(masm(),
+ Context::NUMBER_FUNCTION_INDEX,
+ rcx);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
+ rbx, rdx, name, &miss);
+ break;
+ }
+
+ case BOOLEAN_CHECK: {
+ Label fast;
+ // Check that the object is a boolean.
+ __ Cmp(rdx, Factory::true_value());
+ __ j(equal, &fast);
+ __ Cmp(rdx, Factory::false_value());
+ __ j(not_equal, &miss);
+ __ bind(&fast);
+ // Check that the maps starting from the prototype haven't changed.
+ GenerateLoadGlobalFunctionPrototype(masm(),
+ Context::BOOLEAN_FUNCTION_INDEX,
+ rcx);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
+ rbx, rdx, name, &miss);
+ break;
+ }
+
+ case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
+ rbx, rcx, name, &miss);
+ // Make sure object->HasFastElements().
+ // Get the elements array of the object.
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ // Check that the object is in fast mode (not dictionary).
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ Factory::fixed_array_map());
+ __ j(not_equal, &miss);
+ break;
+
+ default:
+ UNREACHABLE();
+ }
+
+ // Get the function and setup the context.
+ __ Move(rdi, Handle<JSFunction>(function));
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments(),
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ String* function_name = NULL;
+ if (function->shared()->name()->IsString()) {
+ function_name = String::cast(function->shared()->name());
+ }
+ return GetCode(CONSTANT_FUNCTION, function_name);
+}
+
+
+Object* CallStubCompiler::CompileCallField(Object* object,
+ JSObject* holder,
+ int index,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ // rsp[0] return address
+ // rsp[8] argument argc
+ // rsp[16] argument argc - 1
+ // ...
+ // rsp[argc * 8] argument 1
+ // rsp[(argc + 1) * 8] argument 0 = receiver
+ // rsp[(argc + 2) * 8] function name
+ Label miss;
+
+ // Get the receiver from the stack.
+ const int argc = arguments().immediate();
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Check that the receiver isn't a smi.
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+
+ // Do the right check and compute the holder register.
+ Register reg =
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
+ rbx, rcx, name, &miss);
+
+ GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
+
+ // Check that the function really is a function.
+ __ testl(rdi, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
+ __ j(not_equal, &miss);
+
+ // Patch the receiver on the stack with the global proxy if
+ // necessary.
+ if (object->IsGlobalObject()) {
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
+ }
+
+ // Invoke the function.
+ __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(FIELD, name);
+}
+
+
+Object* CallStubCompiler::CompileCallInterceptor(Object* object,
+ JSObject* holder,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ Label miss;
+
+ // Get the number of arguments.
+ const int argc = arguments().immediate();
+
+ LookupResult lookup;
+ LookupPostInterceptor(holder, name, &lookup);
+
+ // Get the receiver from the stack.
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ CallInterceptorCompiler compiler(arguments());
+ CompileLoadInterceptor(&compiler,
+ this,
+ masm(),
+ JSObject::cast(object),
+ holder,
+ name,
+ &lookup,
+ rdx,
+ rbx,
+ rcx,
+ &miss);
+
+ // Restore receiver.
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Check that the function really is a function.
+ __ testl(rax, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
+ __ j(not_equal, &miss);
+
+ // Patch the receiver on the stack with the global proxy if
+ // necessary.
+ if (object->IsGlobalObject()) {
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
+ }
+
+ // Invoke the function.
+ __ movq(rdi, rax);
+ __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
+
+ // Handle load cache miss.
+ __ bind(&miss);
+ Handle<Code> ic = ComputeCallMiss(argc);
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(INTERCEPTOR, name);
+}
+
+
+
+Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
+ GlobalObject* holder,
+ JSGlobalPropertyCell* cell,
+ JSFunction* function,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ // rsp[0] return address
+ // rsp[8] argument argc
+ // rsp[16] argument argc - 1
+ // ...
+ // rsp[argc * 8] argument 1
+ // rsp[(argc + 1) * 8] argument 0 = receiver
+ // rsp[(argc + 2) * 8] function name
+ Label miss;
+
+ // Get the number of arguments.
+ const int argc = arguments().immediate();
+
+ // Get the receiver from the stack.
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // If the object is the holder then we know that it's a global
+ // object which can only happen for contextual calls. In this case,
+ // the receiver cannot be a smi.
+ if (object != holder) {
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ }
+
+ // Check that the maps haven't changed.
+ CheckPrototypes(object, rdx, holder, rbx, rcx, name, &miss);
+
+ // Get the value from the cell.
+ __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
+ __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
+
+ // Check that the cell contains the same function.
+ __ Cmp(rdi, Handle<JSFunction>(function));
+ __ j(not_equal, &miss);
+
+ // Patch the receiver on the stack with the global proxy.
+ if (object->IsGlobalObject()) {
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
+ }
+
+ // Setup the context (function already in edi).
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ __ IncrementCounter(&Counters::call_global_inline, 1);
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments(),
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ __ IncrementCounter(&Counters::call_global_inline_miss, 1);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
+Object* LoadStubCompiler::CompileLoadCallback(JSObject* object,
+ JSObject* holder,
+ AccessorInfo* callback,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ GenerateLoadCallback(object, holder, rax, rcx, rbx, rdx,
+ callback, name, &miss);
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
+Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
+ JSObject* holder,
+ Object* value,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ GenerateLoadConstant(object, holder, rax, rbx, rdx, value, name, &miss);
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CONSTANT_FUNCTION, name);
+}
+
+
+Object* LoadStubCompiler::CompileLoadField(JSObject* object,
+ JSObject* holder,
+ int index,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ GenerateLoadField(object, holder, rax, rbx, rdx, index, name, &miss);
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(FIELD, name);
+}
+
+
+Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
+ JSObject* holder,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ LookupResult lookup;
+ LookupPostInterceptor(holder, name, &lookup);
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ // TODO(368): Compile in the whole chain: all the interceptors in
+ // prototypes and ultimate answer.
+ GenerateLoadInterceptor(receiver,
+ holder,
+ &lookup,
+ rax,
+ rcx,
+ rdx,
+ rbx,
+ name,
+ &miss);
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(INTERCEPTOR, name);
+}
+
+
+Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
+ GlobalObject* holder,
+ JSGlobalPropertyCell* cell,
+ String* name,
+ bool is_dont_delete) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ // Get the receiver from the stack.
+ __ movq(rax, Operand(rsp, kPointerSize));
+
+ // If the object is the holder then we know that it's a global
+ // object which can only happen for contextual loads. In this case,
+ // the receiver cannot be a smi.
+ if (object != holder) {
+ __ testl(rax, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ }
+
+ // Check that the maps haven't changed.
+ CheckPrototypes(object, rax, holder, rbx, rdx, name, &miss);
+
+ // Get the value from the cell.
+ __ Move(rax, Handle<JSGlobalPropertyCell>(cell));
+ __ movq(rax, FieldOperand(rax, JSGlobalPropertyCell::kValueOffset));
+
+ // Check for deleted property if property can actually be deleted.
+ if (!is_dont_delete) {
+ __ Cmp(rax, Factory::the_hole_value());
+ __ j(equal, &miss);
+ } else if (FLAG_debug_code) {
+ __ Cmp(rax, Factory::the_hole_value());
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
+ }
+
+ __ IncrementCounter(&Counters::named_load_global_inline, 1);
+ __ ret(0);
+
+ __ bind(&miss);
+ __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
+Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name,
+ JSObject* receiver,
+ JSObject* holder,
+ AccessorInfo* callback) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : name
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ __ movq(rcx, Operand(rsp, 2 * kPointerSize));
+ __ IncrementCounter(&Counters::keyed_load_callback, 1);
+
+ // Check that the name has not changed.
+ __ Cmp(rax, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ GenerateLoadCallback(receiver, holder, rcx, rax, rbx, rdx,
+ callback, name, &miss);
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_callback, 1);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
+Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : name
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ __ movq(rcx, Operand(rsp, 2 * kPointerSize));
+ __ IncrementCounter(&Counters::keyed_load_array_length, 1);
+
+ // Check that the name has not changed.
+ __ Cmp(rax, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ GenerateLoadArrayLength(masm(), rcx, rdx, &miss);
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_array_length, 1);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
+Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
+ JSObject* receiver,
+ JSObject* holder,
+ Object* value) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : name
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ __ movq(rcx, Operand(rsp, 2 * kPointerSize));
+ __ IncrementCounter(&Counters::keyed_load_constant_function, 1);
+
+ // Check that the name has not changed.
+ __ Cmp(rax, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ GenerateLoadConstant(receiver, holder, rcx, rbx, rdx,
+ value, name, &miss);
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CONSTANT_FUNCTION, name);
+}
+
+
+Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : name
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ __ movq(rcx, Operand(rsp, 2 * kPointerSize));
+ __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
+
+ // Check that the name has not changed.
+ __ Cmp(rax, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ GenerateLoadFunctionPrototype(masm(), rcx, rdx, rbx, &miss);
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
+Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
+ JSObject* holder,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : name
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ __ movq(rcx, Operand(rsp, 2 * kPointerSize));
+ __ IncrementCounter(&Counters::keyed_load_interceptor, 1);
+
+ // Check that the name has not changed.
+ __ Cmp(rax, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ LookupResult lookup;
+ LookupPostInterceptor(holder, name, &lookup);
+ GenerateLoadInterceptor(receiver,
+ holder,
+ &lookup,
+ rcx,
+ rax,
+ rdx,
+ rbx,
+ name,
+ &miss);
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(INTERCEPTOR, name);
+}
+
+
+Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : name
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ __ movq(rcx, Operand(rsp, 2 * kPointerSize));
+ __ IncrementCounter(&Counters::keyed_load_string_length, 1);
+
+ // Check that the name has not changed.
+ __ Cmp(rax, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ GenerateLoadStringLength(masm(), rcx, rdx, &miss);
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_string_length, 1);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
+Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
+ AccessorInfo* callback,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ // Get the object from the stack.
+ __ movq(rbx, Operand(rsp, 1 * kPointerSize));
+
+ // Check that the object isn't a smi.
+ __ testl(rbx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+
+ // Check that the map of the object hasn't changed.
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ Handle<Map>(object->map()));
+ __ j(not_equal, &miss);
+
+ // Perform global security token check if needed.
+ if (object->IsJSGlobalProxy()) {
+ __ CheckAccessGlobalProxy(rbx, rdx, &miss);
+ }
+
+ // Stub never generated for non-global objects that require access
+ // checks.
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+
+ __ pop(rbx); // remove the return address
+ __ push(Operand(rsp, 0)); // receiver
+ __ Push(Handle<AccessorInfo>(callback)); // callback info
+ __ push(rcx); // name
+ __ push(rax); // value
+ __ push(rbx); // restore return address
+
+ // Do tail-call to the runtime system.
+ ExternalReference store_callback_property =
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
+ __ TailCallRuntime(store_callback_property, 4);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ Move(rcx, Handle<String>(name)); // restore name
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
+Object* StoreStubCompiler::CompileStoreField(JSObject* object,
+ int index,
+ Map* transition,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ // Get the object from the stack.
+ __ movq(rbx, Operand(rsp, 1 * kPointerSize));
+
+ // Generate store field code. Trashes the name register.
+ GenerateStoreField(masm(),
+ Builtins::StoreIC_ExtendStorage,
+ object,
+ index,
+ transition,
+ rbx, rcx, rdx,
+ &miss);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ Move(rcx, Handle<String>(name)); // restore name
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+}
+
+
+Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ // Get the object from the stack.
+ __ movq(rbx, Operand(rsp, 1 * kPointerSize));
+
+ // Check that the object isn't a smi.
+ __ testl(rbx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+
+ // Check that the map of the object hasn't changed.
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ Handle<Map>(receiver->map()));
+ __ j(not_equal, &miss);
+
+ // Perform global security token check if needed.
+ if (receiver->IsJSGlobalProxy()) {
+ __ CheckAccessGlobalProxy(rbx, rdx, &miss);
+ }
+
+ // Stub never generated for non-global objects that require access
+ // checks.
+ ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
+
+ __ pop(rbx); // remove the return address
+ __ push(Operand(rsp, 0)); // receiver
+ __ push(rcx); // name
+ __ push(rax); // value
+ __ push(rbx); // restore return address
+
+ // Do tail-call to the runtime system.
+ ExternalReference store_ic_property =
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
+ __ TailCallRuntime(store_ic_property, 3);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ Move(rcx, Handle<String>(name)); // restore name
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(INTERCEPTOR, name);
+}
+
+
+Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map of the global has not changed.
+ __ movq(rbx, Operand(rsp, kPointerSize));
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ Handle<Map>(object->map()));
+ __ j(not_equal, &miss);
+
+ // Store the value in the cell.
+ __ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
+ __ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
+
+ // Return the value (register rax).
+ __ IncrementCounter(&Counters::named_store_global_inline, 1);
+ __ ret(0);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
+Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
+ JSObject* receiver,
+ JSObject* holder,
+ int index) {
+ // ----------- S t a t e -------------
+ // -- rsp[0] : return address
+ // -- rsp[8] : name
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, Operand(rsp, kPointerSize));
+ __ movq(rcx, Operand(rsp, 2 * kPointerSize));
+ __ IncrementCounter(&Counters::keyed_load_field, 1);
+
+ // Check that the name has not changed.
+ __ Cmp(rax, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ GenerateLoadField(receiver, holder, rcx, rbx, rdx, index, name, &miss);
+
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_field, 1);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(FIELD, name);
+}
+
+
+Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
+ int index,
+ Map* transition,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rsp[0] : return address
+ // -- rsp[8] : key
+ // -- rsp[16] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::keyed_store_field, 1);
+
+ // Get the name from the stack.
+ __ movq(rcx, Operand(rsp, 1 * kPointerSize));
+ // Check that the name has not changed.
+ __ Cmp(rcx, Handle<String>(name));
+ __ j(not_equal, &miss);
+
+ // Get the object from the stack.
+ __ movq(rbx, Operand(rsp, 2 * kPointerSize));
+
+ // Generate store field code. Trashes the name register.
+ GenerateStoreField(masm(),
+ Builtins::KeyedStoreIC_ExtendStorage,
+ object,
+ index,
+ transition,
+ rbx, rcx, rdx,
+ &miss);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_store_field, 1);
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
+}
+
+
+// TODO(1241006): Avoid having lazy compile stubs specialized by the
+// number of arguments. It is not needed anymore.
+Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
+ // Enter an internal frame.
+ __ EnterInternalFrame();
+
+ // Push a copy of the function onto the stack.
+ __ push(rdi);
+
+ __ push(rdi); // function is also the parameter to the runtime call
+ __ CallRuntime(Runtime::kLazyCompile, 1);
+ __ pop(rdi);
+
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
+
+ // Do a tail-call of the compiled function.
+ __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
+ __ jmp(rcx);
+
+ return GetCodeWithFlags(flags, "LazyCompileStub");
+}
+
+
+
+void StubCompiler::GenerateLoadInterceptor(JSObject* object,
+ JSObject* holder,
+ LookupResult* lookup,
+ Register receiver,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ String* name,
+ Label* miss) {
+ LoadInterceptorCompiler compiler(name_reg);
+ CompileLoadInterceptor(&compiler,
+ this,
+ masm(),
+ object,
+ holder,
+ name,
+ lookup,
+ receiver,
+ scratch1,
+ scratch2,
+ miss);
+}
+
+
+void StubCompiler::GenerateLoadCallback(JSObject* object,
+ JSObject* holder,
+ Register receiver,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ AccessorInfo* callback,
+ String* name,
+ Label* miss) {
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss);
+
+ // Check that the maps haven't changed.
+ Register reg =
+ CheckPrototypes(object, receiver, holder,
+ scratch1, scratch2, name, miss);
+
+ // Push the arguments on the JS stack of the caller.
+ __ pop(scratch2); // remove return address
+ __ push(receiver); // receiver
+ __ push(reg); // holder
+ __ Move(reg, Handle<AccessorInfo>(callback)); // callback data
+ __ push(reg);
+ __ push(FieldOperand(reg, AccessorInfo::kDataOffset));
+ __ push(name_reg); // name
+ __ push(scratch2); // restore return address
+
+ // Do tail-call to the runtime system.
+ ExternalReference load_callback_property =
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
+ __ TailCallRuntime(load_callback_property, 5);
+}
+
+
+Register StubCompiler::CheckPrototypes(JSObject* object,
+ Register object_reg,
+ JSObject* holder,
+ Register holder_reg,
+ Register scratch,
+ String* name,
+ Label* miss) {
+ // Check that the maps haven't changed.
+ Register result =
+ __ CheckMaps(object, object_reg, holder, holder_reg, scratch, miss);
+
+ // If we've skipped any global objects, it's not enough to verify
+ // that their maps haven't changed.
+ while (object != holder) {
+ if (object->IsGlobalObject()) {
+ GlobalObject* global = GlobalObject::cast(object);
+ Object* probe = global->EnsurePropertyCell(name);
+ if (probe->IsFailure()) {
+ set_failure(Failure::cast(probe));
+ return result;
+ }
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
+ ASSERT(cell->value()->IsTheHole());
+ __ Move(scratch, Handle<Object>(cell));
+ __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
+ Factory::the_hole_value());
+ __ j(not_equal, miss);
+ }
+ object = JSObject::cast(object->GetPrototype());
+ }
+
+ // Return the register containing the holder.
+ return result;
+}
+
+
+void StubCompiler::GenerateLoadField(JSObject* object,
+ JSObject* holder,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ int index,
+ String* name,
+ Label* miss) {
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss);
+
+ // Check the prototype chain.
+ Register reg =
+ CheckPrototypes(object, receiver, holder,
+ scratch1, scratch2, name, miss);
+
+ // Get the value from the properties.
+ GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
+ __ ret(0);
+}
+
+
+void StubCompiler::GenerateLoadConstant(JSObject* object,
+ JSObject* holder,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ Object* value,
+ String* name,
+ Label* miss) {
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss);
+
+ // Check that the maps haven't changed.
+ Register reg =
+ CheckPrototypes(object, receiver, holder,
+ scratch1, scratch2, name, miss);
+
+ // Return the constant value.
+ __ Move(rax, Handle<Object>(value));
+ __ ret(0);
+}
+
+
+#undef __
} } // namespace v8::internal
diff --git a/src/zone-inl.h b/src/zone-inl.h
index 9af6251..b3141a4 100644
--- a/src/zone-inl.h
+++ b/src/zone-inl.h
@@ -68,6 +68,223 @@
}
+template <typename C>
+bool ZoneSplayTree<C>::Insert(const Key& key, Locator* locator) {
+ if (is_empty()) {
+ // If the tree is empty, insert the new node.
+ root_ = new Node(key, C::kNoValue);
+ } else {
+ // Splay on the key to move the last node on the search path
+ // for the key to the root of the tree.
+ Splay(key);
+ // Ignore repeated insertions with the same key.
+ int cmp = C::Compare(key, root_->key_);
+ if (cmp == 0) {
+ locator->bind(root_);
+ return false;
+ }
+ // Insert the new node.
+ Node* node = new Node(key, C::kNoValue);
+ if (cmp > 0) {
+ node->left_ = root_;
+ node->right_ = root_->right_;
+ root_->right_ = NULL;
+ } else {
+ node->right_ = root_;
+ node->left_ = root_->left_;
+ root_->left_ = NULL;
+ }
+ root_ = node;
+ }
+ locator->bind(root_);
+ return true;
+}
+
+
+template <typename C>
+bool ZoneSplayTree<C>::Find(const Key& key, Locator* locator) {
+ if (is_empty())
+ return false;
+ Splay(key);
+ if (C::Compare(key, root_->key_) == 0) {
+ locator->bind(root_);
+ return true;
+ } else {
+ return false;
+ }
+}
+
+
+template <typename C>
+bool ZoneSplayTree<C>::FindGreatestLessThan(const Key& key,
+ Locator* locator) {
+ if (is_empty())
+ return false;
+ // Splay on the key to move the node with the given key or the last
+ // node on the search path to the top of the tree.
+ Splay(key);
+ // Now the result is either the root node or the greatest node in
+ // the left subtree.
+ int cmp = C::Compare(root_->key_, key);
+ if (cmp <= 0) {
+ locator->bind(root_);
+ return true;
+ } else {
+ Node* temp = root_;
+ root_ = root_->left_;
+ bool result = FindGreatest(locator);
+ root_ = temp;
+ return result;
+ }
+}
+
+
+template <typename C>
+bool ZoneSplayTree<C>::FindLeastGreaterThan(const Key& key,
+ Locator* locator) {
+ if (is_empty())
+ return false;
+ // Splay on the key to move the node with the given key or the last
+ // node on the search path to the top of the tree.
+ Splay(key);
+ // Now the result is either the root node or the least node in
+ // the right subtree.
+ int cmp = C::Compare(root_->key_, key);
+ if (cmp >= 0) {
+ locator->bind(root_);
+ return true;
+ } else {
+ Node* temp = root_;
+ root_ = root_->right_;
+ bool result = FindLeast(locator);
+ root_ = temp;
+ return result;
+ }
+}
+
+
+template <typename C>
+bool ZoneSplayTree<C>::FindGreatest(Locator* locator) {
+ if (is_empty())
+ return false;
+ Node* current = root_;
+ while (current->right_ != NULL)
+ current = current->right_;
+ locator->bind(current);
+ return true;
+}
+
+
+template <typename C>
+bool ZoneSplayTree<C>::FindLeast(Locator* locator) {
+ if (is_empty())
+ return false;
+ Node* current = root_;
+ while (current->left_ != NULL)
+ current = current->left_;
+ locator->bind(current);
+ return true;
+}
+
+
+template <typename C>
+bool ZoneSplayTree<C>::Remove(const Key& key) {
+ // Bail if the tree is empty
+ if (is_empty())
+ return false;
+ // Splay on the key to move the node with the given key to the top.
+ Splay(key);
+ // Bail if the key is not in the tree
+ if (C::Compare(key, root_->key_) != 0)
+ return false;
+ if (root_->left_ == NULL) {
+ // No left child, so the new tree is just the right child.
+ root_ = root_->right_;
+ } else {
+ // Left child exists.
+ Node* right = root_->right_;
+ // Make the original left child the new root.
+ root_ = root_->left_;
+ // Splay to make sure that the new root has an empty right child.
+ Splay(key);
+ // Insert the original right child as the right child of the new
+ // root.
+ root_->right_ = right;
+ }
+ return true;
+}
+
+
+template <typename C>
+void ZoneSplayTree<C>::Splay(const Key& key) {
+ if (is_empty())
+ return;
+ Node dummy_node(C::kNoKey, C::kNoValue);
+ // Create a dummy node. The use of the dummy node is a bit
+ // counter-intuitive: The right child of the dummy node will hold
+ // the L tree of the algorithm. The left child of the dummy node
+ // will hold the R tree of the algorithm. Using a dummy node, left
+ // and right will always be nodes and we avoid special cases.
+ Node* dummy = &dummy_node;
+ Node* left = dummy;
+ Node* right = dummy;
+ Node* current = root_;
+ while (true) {
+ int cmp = C::Compare(key, current->key_);
+ if (cmp < 0) {
+ if (current->left_ == NULL)
+ break;
+ if (C::Compare(key, current->left_->key_) < 0) {
+ // Rotate right.
+ Node* temp = current->left_;
+ current->left_ = temp->right_;
+ temp->right_ = current;
+ current = temp;
+ if (current->left_ == NULL)
+ break;
+ }
+ // Link right.
+ right->left_ = current;
+ right = current;
+ current = current->left_;
+ } else if (cmp > 0) {
+ if (current->right_ == NULL)
+ break;
+ if (C::Compare(key, current->right_->key_) > 0) {
+ // Rotate left.
+ Node* temp = current->right_;
+ current->right_ = temp->left_;
+ temp->left_ = current;
+ current = temp;
+ if (current->right_ == NULL)
+ break;
+ }
+ // Link left.
+ left->right_ = current;
+ left = current;
+ current = current->right_;
+ } else {
+ break;
+ }
+ }
+ // Assemble.
+ left->right_ = current->left_;
+ right->left_ = current->right_;
+ current->left_ = dummy->right_;
+ current->right_ = dummy->left_;
+ root_ = current;
+}
+
+
+template <typename Node, class Callback>
+static void DoForEach(Node* node, Callback* callback) {
+ if (node == NULL) return;
+ DoForEach<Node, Callback>(node->left(), callback);
+ callback->Call(node->key(), node->value());
+ DoForEach<Node, Callback>(node->right(), callback);
+}
+
+
} } // namespace v8::internal
#endif // V8_ZONE_INL_H_
diff --git a/src/zone.h b/src/zone.h
index a8b26e9..cdbab32 100644
--- a/src/zone.h
+++ b/src/zone.h
@@ -204,6 +204,108 @@
};
+template <typename Node, class Callback>
+static void DoForEach(Node* node, Callback* callback);
+
+
+// A zone splay tree. The config type parameter encapsulates the
+// different configurations of a concrete splay tree:
+//
+// typedef Key: the key type
+// typedef Value: the value type
+// static const kNoKey: the dummy key used when no key is set
+// static const kNoValue: the dummy value used to initialize nodes
+// int (Compare)(Key& a, Key& b) -> {-1, 0, 1}: comparison function
+//
+template <typename Config>
+class ZoneSplayTree : public ZoneObject {
+ public:
+ typedef typename Config::Key Key;
+ typedef typename Config::Value Value;
+
+ class Locator;
+
+ ZoneSplayTree() : root_(NULL) { }
+
+ // Inserts the given key in this tree with the given value. Returns
+ // true if a node was inserted, otherwise false. If found the locator
+ // is enabled and provides access to the mapping for the key.
+ bool Insert(const Key& key, Locator* locator);
+
+ // Looks up the key in this tree and returns true if it was found,
+ // otherwise false. If the node is found the locator is enabled and
+ // provides access to the mapping for the key.
+ bool Find(const Key& key, Locator* locator);
+
+ // Finds the mapping with the greatest key less than or equal to the
+ // given key.
+ bool FindGreatestLessThan(const Key& key, Locator* locator);
+
+ // Find the mapping with the greatest key in this tree.
+ bool FindGreatest(Locator* locator);
+
+ // Finds the mapping with the least key greater than or equal to the
+ // given key.
+ bool FindLeastGreaterThan(const Key& key, Locator* locator);
+
+ // Find the mapping with the least key in this tree.
+ bool FindLeast(Locator* locator);
+
+ // Remove the node with the given key from the tree.
+ bool Remove(const Key& key);
+
+ bool is_empty() { return root_ == NULL; }
+
+ // Perform the splay operation for the given key. Moves the node with
+ // the given key to the top of the tree. If no node has the given
+ // key, the last node on the search path is moved to the top of the
+ // tree.
+ void Splay(const Key& key);
+
+ class Node : public ZoneObject {
+ public:
+ Node(const Key& key, const Value& value)
+ : key_(key),
+ value_(value),
+ left_(NULL),
+ right_(NULL) { }
+ Key key() { return key_; }
+ Value value() { return value_; }
+ Node* left() { return left_; }
+ Node* right() { return right_; }
+ private:
+ friend class ZoneSplayTree;
+ friend class Locator;
+ Key key_;
+ Value value_;
+ Node* left_;
+ Node* right_;
+ };
+
+ // A locator provides access to a node in the tree without actually
+ // exposing the node.
+ class Locator {
+ public:
+ explicit Locator(Node* node) : node_(node) { }
+ Locator() : node_(NULL) { }
+ const Key& key() { return node_->key_; }
+ Value& value() { return node_->value_; }
+ void set_value(const Value& value) { node_->value_ = value; }
+ inline void bind(Node* node) { node_ = node; }
+ private:
+ Node* node_;
+ };
+
+ template <class Callback>
+ void ForEach(Callback* c) {
+ DoForEach<typename ZoneSplayTree<Config>::Node, Callback>(root_, c);
+ }
+
+ private:
+ Node* root_;
+};
+
+
} } // namespace v8::internal
#endif // V8_ZONE_H_