Version 3.19.3
Performance and stability improvements on all platforms.
git-svn-id: http://v8.googlecode.com/svn/trunk@14749 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/api.cc b/src/api.cc
index f0505cd..7099ca8 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -983,8 +983,12 @@
}
-Local<FunctionTemplate> FunctionTemplate::New(InvocationCallback callback,
- v8::Handle<Value> data, v8::Handle<Signature> signature, int length) {
+template<typename Callback>
+static Local<FunctionTemplate> FunctionTemplateNew(
+ Callback callback_in,
+ v8::Handle<Value> data,
+ v8::Handle<Signature> signature,
+ int length) {
i::Isolate* isolate = i::Isolate::Current();
EnsureInitializedForIsolate(isolate, "v8::FunctionTemplate::New()");
LOG_API(isolate, "FunctionTemplate::New");
@@ -997,8 +1001,10 @@
int next_serial_number = isolate->next_serial_number();
isolate->set_next_serial_number(next_serial_number + 1);
obj->set_serial_number(i::Smi::FromInt(next_serial_number));
- if (callback != 0) {
+ if (callback_in != 0) {
if (data.IsEmpty()) data = v8::Undefined();
+ InvocationCallback callback =
+ i::CallbackTable::Register(isolate, callback_in);
Utils::ToLocal(obj)->SetCallHandler(callback, data);
}
obj->set_length(length);
@@ -1011,6 +1017,24 @@
}
+Local<FunctionTemplate> FunctionTemplate::New(
+ InvocationCallback callback,
+ v8::Handle<Value> data,
+ v8::Handle<Signature> signature,
+ int length) {
+ return FunctionTemplateNew(callback, data, signature, length);
+}
+
+
+Local<FunctionTemplate> FunctionTemplate::New(
+ FunctionCallback callback,
+ v8::Handle<Value> data,
+ v8::Handle<Signature> signature,
+ int length) {
+ return FunctionTemplateNew(callback, data, signature, length);
+}
+
+
Local<Signature> Signature::New(Handle<FunctionTemplate> receiver,
int argc, Handle<FunctionTemplate> argv[]) {
i::Isolate* isolate = i::Isolate::Current();
@@ -1202,9 +1226,11 @@
} while (false)
-void FunctionTemplate::SetCallHandler(InvocationCallback callback,
- v8::Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+template<typename Callback>
+static void FunctionTemplateSetCallHandler(FunctionTemplate* function_template,
+ Callback callback,
+ v8::Handle<Value> data) {
+ i::Isolate* isolate = Utils::OpenHandle(function_template)->GetIsolate();
if (IsDeadCheck(isolate, "v8::FunctionTemplate::SetCallHandler()")) return;
ENTER_V8(isolate);
i::HandleScope scope(isolate);
@@ -1215,9 +1241,18 @@
SET_FIELD_WRAPPED(obj, set_callback, callback);
if (data.IsEmpty()) data = v8::Undefined();
obj->set_data(*Utils::OpenHandle(*data));
- Utils::OpenHandle(this)->set_call_code(*obj);
+ Utils::OpenHandle(function_template)->set_call_code(*obj);
}
+void FunctionTemplate::SetCallHandler(InvocationCallback callback,
+ v8::Handle<Value> data) {
+ FunctionTemplateSetCallHandler(this, callback, data);
+}
+
+void FunctionTemplate::SetCallHandler(FunctionCallback callback,
+ v8::Handle<Value> data) {
+ FunctionTemplateSetCallHandler(this, callback, data);
+}
static i::Handle<i::AccessorInfo> SetAccessorInfoProperties(
i::Handle<i::AccessorInfo> obj,
@@ -1237,10 +1272,11 @@
}
+template<typename Getter, typename Setter>
static i::Handle<i::AccessorInfo> MakeAccessorInfo(
v8::Handle<String> name,
- AccessorGetter getter,
- AccessorSetter setter,
+ Getter getter_in,
+ Setter setter_in,
v8::Handle<Value> data,
v8::AccessControl settings,
v8::PropertyAttribute attributes,
@@ -1248,7 +1284,9 @@
i::Isolate* isolate = Utils::OpenHandle(*name)->GetIsolate();
i::Handle<i::ExecutableAccessorInfo> obj =
isolate->factory()->NewExecutableAccessorInfo();
+ AccessorGetter getter = i::CallbackTable::Register(isolate, getter_in);
SET_FIELD_WRAPPED(obj, set_getter, getter);
+ AccessorSetter setter = i::CallbackTable::Register(isolate, setter_in);
SET_FIELD_WRAPPED(obj, set_setter, setter);
if (data.IsEmpty()) data = v8::Undefined();
obj->set_data(*Utils::OpenHandle(*data));
@@ -1259,6 +1297,8 @@
static i::Handle<i::AccessorInfo> MakeAccessorInfo(
v8::Handle<String> name,
v8::Handle<v8::DeclaredAccessorDescriptor> descriptor,
+ void* setter_ignored,
+ void* data_ignored,
v8::AccessControl settings,
v8::PropertyAttribute attributes,
v8::Handle<AccessorSignature> signature) {
@@ -1323,15 +1363,21 @@
Utils::OpenHandle(this)->set_read_only_prototype(true);
}
-
-void FunctionTemplate::SetNamedInstancePropertyHandler(
- NamedPropertyGetter getter,
- NamedPropertySetter setter,
- NamedPropertyQuery query,
- NamedPropertyDeleter remover,
- NamedPropertyEnumerator enumerator,
+template<
+ typename Getter,
+ typename Setter,
+ typename Query,
+ typename Deleter,
+ typename Enumerator>
+static void SetNamedInstancePropertyHandler(
+ i::Handle<i::FunctionTemplateInfo> function_template,
+ Getter getter_in,
+ Setter setter_in,
+ Query query_in,
+ Deleter remover_in,
+ Enumerator enumerator_in,
Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ i::Isolate* isolate = function_template->GetIsolate();
if (IsDeadCheck(isolate,
"v8::FunctionTemplate::SetNamedInstancePropertyHandler()")) {
return;
@@ -1343,26 +1389,40 @@
i::Handle<i::InterceptorInfo> obj =
i::Handle<i::InterceptorInfo>::cast(struct_obj);
+ NamedPropertyGetter getter = i::CallbackTable::Register(isolate, getter_in);
if (getter != 0) SET_FIELD_WRAPPED(obj, set_getter, getter);
+ NamedPropertySetter setter = i::CallbackTable::Register(isolate, setter_in);
if (setter != 0) SET_FIELD_WRAPPED(obj, set_setter, setter);
+ NamedPropertyQuery query = i::CallbackTable::Register(isolate, query_in);
if (query != 0) SET_FIELD_WRAPPED(obj, set_query, query);
+ NamedPropertyDeleter remover =
+ i::CallbackTable::Register(isolate, remover_in);
if (remover != 0) SET_FIELD_WRAPPED(obj, set_deleter, remover);
+ NamedPropertyEnumerator enumerator =
+ i::CallbackTable::Register(isolate, enumerator_in);
if (enumerator != 0) SET_FIELD_WRAPPED(obj, set_enumerator, enumerator);
if (data.IsEmpty()) data = v8::Undefined();
obj->set_data(*Utils::OpenHandle(*data));
- Utils::OpenHandle(this)->set_named_property_handler(*obj);
+ function_template->set_named_property_handler(*obj);
}
-void FunctionTemplate::SetIndexedInstancePropertyHandler(
- IndexedPropertyGetter getter,
- IndexedPropertySetter setter,
- IndexedPropertyQuery query,
- IndexedPropertyDeleter remover,
- IndexedPropertyEnumerator enumerator,
+template<
+ typename Getter,
+ typename Setter,
+ typename Query,
+ typename Deleter,
+ typename Enumerator>
+static void SetIndexedInstancePropertyHandler(
+ i::Handle<i::FunctionTemplateInfo> function_template,
+ Getter getter_in,
+ Setter setter_in,
+ Query query_in,
+ Deleter remover_in,
+ Enumerator enumerator_in,
Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ i::Isolate* isolate = function_template->GetIsolate();
if (IsDeadCheck(isolate,
"v8::FunctionTemplate::SetIndexedInstancePropertyHandler()")) {
return;
@@ -1374,22 +1434,33 @@
i::Handle<i::InterceptorInfo> obj =
i::Handle<i::InterceptorInfo>::cast(struct_obj);
+ IndexedPropertyGetter getter =
+ i::CallbackTable::Register(isolate, getter_in);
if (getter != 0) SET_FIELD_WRAPPED(obj, set_getter, getter);
+ IndexedPropertySetter setter =
+ i::CallbackTable::Register(isolate, setter_in);
if (setter != 0) SET_FIELD_WRAPPED(obj, set_setter, setter);
+ IndexedPropertyQuery query = i::CallbackTable::Register(isolate, query_in);
if (query != 0) SET_FIELD_WRAPPED(obj, set_query, query);
+ IndexedPropertyDeleter remover =
+ i::CallbackTable::Register(isolate, remover_in);
if (remover != 0) SET_FIELD_WRAPPED(obj, set_deleter, remover);
+ IndexedPropertyEnumerator enumerator =
+ i::CallbackTable::Register(isolate, enumerator_in);
if (enumerator != 0) SET_FIELD_WRAPPED(obj, set_enumerator, enumerator);
if (data.IsEmpty()) data = v8::Undefined();
obj->set_data(*Utils::OpenHandle(*data));
- Utils::OpenHandle(this)->set_indexed_property_handler(*obj);
+ function_template->set_indexed_property_handler(*obj);
}
-void FunctionTemplate::SetInstanceCallAsFunctionHandler(
- InvocationCallback callback,
+template<typename Callback>
+static void SetInstanceCallAsFunctionHandler(
+ i::Handle<i::FunctionTemplateInfo> function_template,
+ Callback callback_in,
Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+ i::Isolate* isolate = function_template->GetIsolate();
if (IsDeadCheck(isolate,
"v8::FunctionTemplate::SetInstanceCallAsFunctionHandler()")) {
return;
@@ -1400,10 +1471,12 @@
isolate->factory()->NewStruct(i::CALL_HANDLER_INFO_TYPE);
i::Handle<i::CallHandlerInfo> obj =
i::Handle<i::CallHandlerInfo>::cast(struct_obj);
+ InvocationCallback callback =
+ i::CallbackTable::Register(isolate, callback_in);
SET_FIELD_WRAPPED(obj, set_callback, callback);
if (data.IsEmpty()) data = v8::Undefined();
obj->set_data(*Utils::OpenHandle(*data));
- Utils::OpenHandle(this)->set_instance_call_handler(*obj);
+ function_template->set_instance_call_handler(*obj);
}
@@ -1461,6 +1534,32 @@
}
+template<typename Setter, typename Getter, typename Data>
+static bool ObjectTemplateSetAccessor(
+ ObjectTemplate* object_template,
+ v8::Handle<String> name,
+ Getter getter,
+ Setter setter,
+ Data data,
+ AccessControl settings,
+ PropertyAttribute attribute,
+ v8::Handle<AccessorSignature> signature) {
+ i::Isolate* isolate = Utils::OpenHandle(object_template)->GetIsolate();
+ if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetAccessor()")) return false;
+ ENTER_V8(isolate);
+ i::HandleScope scope(isolate);
+ EnsureConstructor(object_template);
+ i::FunctionTemplateInfo* constructor = i::FunctionTemplateInfo::cast(
+ Utils::OpenHandle(object_template)->constructor());
+ i::Handle<i::FunctionTemplateInfo> cons(constructor);
+ i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(
+ name, getter, setter, data, settings, attribute, signature);
+ if (obj.is_null()) return false;
+ AddPropertyToFunctionTemplate(cons, obj);
+ return true;
+}
+
+
void ObjectTemplate::SetAccessor(v8::Handle<String> name,
AccessorGetter getter,
AccessorSetter setter,
@@ -1468,18 +1567,20 @@
AccessControl settings,
PropertyAttribute attribute,
v8::Handle<AccessorSignature> signature) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetAccessor()")) return;
- ENTER_V8(isolate);
- i::HandleScope scope(isolate);
- EnsureConstructor(this);
- i::FunctionTemplateInfo* constructor =
- i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
- i::Handle<i::FunctionTemplateInfo> cons(constructor);
- i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(name, getter, setter, data,
- settings, attribute,
- signature);
- AddPropertyToFunctionTemplate(cons, obj);
+ ObjectTemplateSetAccessor(
+ this, name, getter, setter, data, settings, attribute, signature);
+}
+
+
+void ObjectTemplate::SetAccessor(v8::Handle<String> name,
+ AccessorGetterCallback getter,
+ AccessorSetterCallback setter,
+ v8::Handle<Value> data,
+ AccessControl settings,
+ PropertyAttribute attribute,
+ v8::Handle<AccessorSignature> signature) {
+ ObjectTemplateSetAccessor(
+ this, name, getter, setter, data, settings, attribute, signature);
}
@@ -1488,44 +1589,67 @@
AccessControl settings,
PropertyAttribute attribute,
Handle<AccessorSignature> signature) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetAccessor()")) return false;
- ENTER_V8(isolate);
- i::HandleScope scope(isolate);
- EnsureConstructor(this);
- i::FunctionTemplateInfo* constructor =
- i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
- i::Handle<i::FunctionTemplateInfo> cons(constructor);
- i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(
- name, descriptor, settings, attribute, signature);
- if (obj.is_null()) return false;
- AddPropertyToFunctionTemplate(cons, obj);
- return true;
+ void* null = NULL;
+ return ObjectTemplateSetAccessor(
+ this, name, descriptor, null, null, settings, attribute, signature);
}
-void ObjectTemplate::SetNamedPropertyHandler(NamedPropertyGetter getter,
- NamedPropertySetter setter,
- NamedPropertyQuery query,
- NamedPropertyDeleter remover,
- NamedPropertyEnumerator enumerator,
- Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+template<
+ typename Getter,
+ typename Setter,
+ typename Query,
+ typename Deleter,
+ typename Enumerator>
+static void ObjectTemplateSetNamedPropertyHandler(
+ ObjectTemplate* object_template,
+ Getter getter,
+ Setter setter,
+ Query query,
+ Deleter remover,
+ Enumerator enumerator,
+ Handle<Value> data) {
+ i::Isolate* isolate = Utils::OpenHandle(object_template)->GetIsolate();
if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetNamedPropertyHandler()")) {
return;
}
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- EnsureConstructor(this);
- i::FunctionTemplateInfo* constructor =
- i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
+ EnsureConstructor(object_template);
+ i::FunctionTemplateInfo* constructor = i::FunctionTemplateInfo::cast(
+ Utils::OpenHandle(object_template)->constructor());
i::Handle<i::FunctionTemplateInfo> cons(constructor);
- Utils::ToLocal(cons)->SetNamedInstancePropertyHandler(getter,
- setter,
- query,
- remover,
- enumerator,
- data);
+ SetNamedInstancePropertyHandler(cons,
+ getter,
+ setter,
+ query,
+ remover,
+ enumerator,
+ data);
+}
+
+
+void ObjectTemplate::SetNamedPropertyHandler(
+ NamedPropertyGetter getter,
+ NamedPropertySetter setter,
+ NamedPropertyQuery query,
+ NamedPropertyDeleter remover,
+ NamedPropertyEnumerator enumerator,
+ Handle<Value> data) {
+ ObjectTemplateSetNamedPropertyHandler(
+ this, getter, setter, query, remover, enumerator, data);
+}
+
+
+void ObjectTemplate::SetNamedPropertyHandler(
+ NamedPropertyGetterCallback getter,
+ NamedPropertySetterCallback setter,
+ NamedPropertyQueryCallback query,
+ NamedPropertyDeleterCallback remover,
+ NamedPropertyEnumeratorCallback enumerator,
+ Handle<Value> data) {
+ ObjectTemplateSetNamedPropertyHandler(
+ this, getter, setter, query, remover, enumerator, data);
}
@@ -1574,6 +1698,40 @@
}
+template<
+ typename Getter,
+ typename Setter,
+ typename Query,
+ typename Deleter,
+ typename Enumerator>
+void ObjectTemplateSetIndexedPropertyHandler(
+ ObjectTemplate* object_template,
+ Getter getter,
+ Setter setter,
+ Query query,
+ Deleter remover,
+ Enumerator enumerator,
+ Handle<Value> data) {
+ i::Isolate* isolate = Utils::OpenHandle(object_template)->GetIsolate();
+ if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetIndexedPropertyHandler()")) {
+ return;
+ }
+ ENTER_V8(isolate);
+ i::HandleScope scope(isolate);
+ EnsureConstructor(object_template);
+ i::FunctionTemplateInfo* constructor = i::FunctionTemplateInfo::cast(
+ Utils::OpenHandle(object_template)->constructor());
+ i::Handle<i::FunctionTemplateInfo> cons(constructor);
+ SetIndexedInstancePropertyHandler(cons,
+ getter,
+ setter,
+ query,
+ remover,
+ enumerator,
+ data);
+}
+
+
void ObjectTemplate::SetIndexedPropertyHandler(
IndexedPropertyGetter getter,
IndexedPropertySetter setter,
@@ -1581,39 +1739,52 @@
IndexedPropertyDeleter remover,
IndexedPropertyEnumerator enumerator,
Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetIndexedPropertyHandler()")) {
- return;
- }
- ENTER_V8(isolate);
- i::HandleScope scope(isolate);
- EnsureConstructor(this);
- i::FunctionTemplateInfo* constructor =
- i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
- i::Handle<i::FunctionTemplateInfo> cons(constructor);
- Utils::ToLocal(cons)->SetIndexedInstancePropertyHandler(getter,
- setter,
- query,
- remover,
- enumerator,
- data);
+ ObjectTemplateSetIndexedPropertyHandler(
+ this, getter, setter, query, remover, enumerator, data);
}
-void ObjectTemplate::SetCallAsFunctionHandler(InvocationCallback callback,
- Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
+void ObjectTemplate::SetIndexedPropertyHandler(
+ IndexedPropertyGetterCallback getter,
+ IndexedPropertySetterCallback setter,
+ IndexedPropertyQueryCallback query,
+ IndexedPropertyDeleterCallback remover,
+ IndexedPropertyEnumeratorCallback enumerator,
+ Handle<Value> data) {
+ ObjectTemplateSetIndexedPropertyHandler(
+ this, getter, setter, query, remover, enumerator, data);
+}
+
+
+template<typename Callback>
+static void ObjectTemplateSetCallAsFunctionHandler(
+ ObjectTemplate* object_template,
+ Callback callback,
+ Handle<Value> data) {
+ i::Isolate* isolate = Utils::OpenHandle(object_template)->GetIsolate();
if (IsDeadCheck(isolate,
"v8::ObjectTemplate::SetCallAsFunctionHandler()")) {
return;
}
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- EnsureConstructor(this);
- i::FunctionTemplateInfo* constructor =
- i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
+ EnsureConstructor(object_template);
+ i::FunctionTemplateInfo* constructor = i::FunctionTemplateInfo::cast(
+ Utils::OpenHandle(object_template)->constructor());
i::Handle<i::FunctionTemplateInfo> cons(constructor);
- Utils::ToLocal(cons)->SetInstanceCallAsFunctionHandler(callback, data);
+ SetInstanceCallAsFunctionHandler(cons, callback, data);
+}
+
+
+void ObjectTemplate::SetCallAsFunctionHandler(InvocationCallback callback,
+ Handle<Value> data) {
+ return ObjectTemplateSetCallAsFunctionHandler(this, callback, data);
+}
+
+
+void ObjectTemplate::SetCallAsFunctionHandler(FunctionCallback callback,
+ Handle<Value> data) {
+ return ObjectTemplateSetCallAsFunctionHandler(this, callback, data);
}
@@ -3446,7 +3617,21 @@
}
-static inline bool SetAccessor(Object* obj, i::Handle<i::AccessorInfo> info) {
+template<typename Setter, typename Getter, typename Data>
+static inline bool ObjectSetAccessor(Object* obj,
+ Handle<String> name,
+ Setter getter,
+ Getter setter,
+ Data data,
+ AccessControl settings,
+ PropertyAttribute attributes) {
+ i::Isolate* isolate = Utils::OpenHandle(obj)->GetIsolate();
+ ON_BAILOUT(isolate, "v8::Object::SetAccessor()", return false);
+ ENTER_V8(isolate);
+ i::HandleScope scope(isolate);
+ v8::Handle<AccessorSignature> signature;
+ i::Handle<i::AccessorInfo> info = MakeAccessorInfo(
+ name, getter, setter, data, settings, attributes, signature);
if (info.is_null()) return false;
bool fast = Utils::OpenHandle(obj)->HasFastProperties();
i::Handle<i::Object> result = i::SetAccessor(Utils::OpenHandle(obj), info);
@@ -3462,15 +3647,19 @@
v8::Handle<Value> data,
AccessControl settings,
PropertyAttribute attributes) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- ON_BAILOUT(isolate, "v8::Object::SetAccessor()", return false);
- ENTER_V8(isolate);
- i::HandleScope scope(isolate);
- v8::Handle<AccessorSignature> signature;
- i::Handle<i::AccessorInfo> info = MakeAccessorInfo(name, getter, setter, data,
- settings, attributes,
- signature);
- return v8::SetAccessor(this, info);
+ return ObjectSetAccessor(
+ this, name, getter, setter, data, settings, attributes);
+}
+
+
+bool Object::SetAccessor(Handle<String> name,
+ AccessorGetterCallback getter,
+ AccessorSetterCallback setter,
+ v8::Handle<Value> data,
+ AccessControl settings,
+ PropertyAttribute attributes) {
+ return ObjectSetAccessor(
+ this, name, getter, setter, data, settings, attributes);
}
@@ -3478,14 +3667,9 @@
Handle<DeclaredAccessorDescriptor> descriptor,
AccessControl settings,
PropertyAttribute attributes) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- ON_BAILOUT(isolate, "v8::Object::SetAccessor()", return false);
- ENTER_V8(isolate);
- i::HandleScope scope(isolate);
- v8::Handle<AccessorSignature> signature;
- i::Handle<i::AccessorInfo> info = MakeAccessorInfo(
- name, descriptor, settings, attributes, signature);
- return v8::SetAccessor(this, info);
+ void* null = NULL;
+ return ObjectSetAccessor(
+ this, name, descriptor, null, null, settings, attributes);
}
@@ -6023,12 +6207,19 @@
Local<Number> v8::Number::New(double value) {
i::Isolate* isolate = i::Isolate::Current();
EnsureInitializedForIsolate(isolate, "v8::Number::New()");
+ return Number::New(reinterpret_cast<Isolate*>(isolate), value);
+}
+
+
+Local<Number> v8::Number::New(Isolate* isolate, double value) {
+ i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
+ ASSERT(internal_isolate->IsInitialized());
if (std::isnan(value)) {
// Introduce only canonical NaN value into the VM, to avoid signaling NaNs.
value = i::OS::nan_value();
}
- ENTER_V8(isolate);
- i::Handle<i::Object> result = isolate->factory()->NewNumber(value);
+ ENTER_V8(internal_isolate);
+ i::Handle<i::Object> result = internal_isolate->factory()->NewNumber(value);
return Utils::NumberToLocal(result);
}
diff --git a/src/api.h b/src/api.h
index 686abf7..12d6e3d 100644
--- a/src/api.h
+++ b/src/api.h
@@ -149,12 +149,10 @@
static void UnregisterAll();
Extension* extension() { return extension_; }
RegisteredExtension* next() { return next_; }
- RegisteredExtension* next_auto() { return next_auto_; }
static RegisteredExtension* first_extension() { return first_extension_; }
private:
Extension* extension_;
RegisteredExtension* next_;
- RegisteredExtension* next_auto_;
static RegisteredExtension* first_extension_;
};
diff --git a/src/apiutils.h b/src/apiutils.h
index 9831f08..0765585 100644
--- a/src/apiutils.h
+++ b/src/apiutils.h
@@ -39,31 +39,6 @@
return that->names_;
}
- // Packs additional parameters for the NewArguments function. |implicit_args|
- // is a pointer to the last element of 4-elements array controlled by GC.
- static void PrepareArgumentsData(internal::Object** implicit_args,
- internal::Isolate* isolate,
- internal::Object* data,
- internal::JSFunction* callee,
- internal::Object* holder) {
- implicit_args[v8::Arguments::kDataIndex] = data;
- implicit_args[v8::Arguments::kCalleeIndex] = callee;
- implicit_args[v8::Arguments::kHolderIndex] = holder;
- implicit_args[v8::Arguments::kIsolateIndex] =
- reinterpret_cast<internal::Object*>(isolate);
- }
-
- static v8::Arguments NewArguments(internal::Object** implicit_args,
- internal::Object** argv, int argc,
- bool is_construct_call) {
- ASSERT(implicit_args[v8::Arguments::kCalleeIndex]->IsJSFunction());
- ASSERT(implicit_args[v8::Arguments::kHolderIndex]->IsHeapObject());
- // The implicit isolate argument is not tagged and looks like a SMI.
- ASSERT(implicit_args[v8::Arguments::kIsolateIndex]->IsSmi());
-
- return v8::Arguments(implicit_args, argv, argc, is_construct_call);
- }
-
// Introduce an alias for the handle scope data to allow non-friends
// to access the HandleScope data.
typedef v8::HandleScope::Data HandleScopeData;
diff --git a/src/arguments.cc b/src/arguments.cc
new file mode 100644
index 0000000..091d0b9
--- /dev/null
+++ b/src/arguments.cc
@@ -0,0 +1,195 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+#include "arguments.h"
+
+namespace v8 {
+namespace internal {
+
+
+static bool Match(void* a, void* b) {
+ return a == b;
+}
+
+
+static uint32_t Hash(void* function) {
+ uintptr_t as_int = reinterpret_cast<uintptr_t>(function);
+ if (sizeof(function) == 4) return static_cast<uint32_t>(as_int);
+ uint64_t as_64 = static_cast<uint64_t>(as_int);
+ return
+ static_cast<uint32_t>(as_64 >> 32) ^
+ static_cast<uint32_t>(as_64);
+}
+
+
+CallbackTable::CallbackTable(): map_(Match, 64) {}
+
+
+bool CallbackTable::Contains(void* function) {
+ ASSERT(function != NULL);
+ return map_.Lookup(function, Hash(function), false) != NULL;
+}
+
+
+void CallbackTable::InsertCallback(Isolate* isolate,
+ void* function,
+ bool returns_void) {
+ if (function == NULL) return;
+ // Don't store for performance.
+ if (kStoreVoidFunctions != returns_void) return;
+ CallbackTable* table = isolate->callback_table();
+ if (table == NULL) {
+ table = new CallbackTable();
+ isolate->set_callback_table(table);
+ }
+ typedef HashMap::Entry Entry;
+ Entry* entry = table->map_.Lookup(function, Hash(function), true);
+ ASSERT(entry != NULL);
+ ASSERT(entry->value == NULL || entry->value == function);
+ entry->value = function;
+}
+
+
+template<typename T>
+template<typename V>
+v8::Handle<V> CustomArguments<T>::GetReturnValue(Isolate* isolate) {
+ // Check the ReturnValue.
+ Object** handle = &this->end()[kReturnValueOffset];
+ // Nothing was set, return empty handle as per previous behaviour.
+ if ((*handle)->IsTheHole()) return v8::Handle<V>();
+ return v8::Handle<V>(reinterpret_cast<V*>(handle));
+}
+
+
+v8::Handle<v8::Value> FunctionCallbackArguments::Call(InvocationCallback f) {
+ Isolate* isolate = this->isolate();
+ void* f_as_void = CallbackTable::FunctionToVoidPtr(f);
+ bool new_style = CallbackTable::ReturnsVoid(isolate, f_as_void);
+ if (new_style) {
+ FunctionCallback c = reinterpret_cast<FunctionCallback>(f);
+ FunctionCallbackInfo<v8::Value> info(end(),
+ argv_,
+ argc_,
+ is_construct_call_);
+ c(info);
+ } else {
+ v8::Arguments args(end(),
+ argv_,
+ argc_,
+ is_construct_call_);
+ v8::Handle<v8::Value> return_value = f(args);
+ if (!return_value.IsEmpty()) return return_value;
+ }
+ return GetReturnValue<v8::Value>(isolate);
+}
+
+
+#define WRITE_CALL_0(OldFunction, NewFunction, ReturnValue) \
+v8::Handle<ReturnValue> PropertyCallbackArguments::Call(OldFunction f) { \
+ Isolate* isolate = this->isolate(); \
+ void* f_as_void = CallbackTable::FunctionToVoidPtr(f); \
+ bool new_style = CallbackTable::ReturnsVoid(isolate, f_as_void); \
+ if (new_style) { \
+ NewFunction c = reinterpret_cast<NewFunction>(f); \
+ PropertyCallbackInfo<ReturnValue> info(end()); \
+ c(info); \
+ } else { \
+ v8::AccessorInfo info(end()); \
+ v8::Handle<ReturnValue> return_value = f(info); \
+ if (!return_value.IsEmpty()) return return_value; \
+ } \
+ return GetReturnValue<ReturnValue>(isolate); \
+}
+
+#define WRITE_CALL_1(OldFunction, NewFunction, ReturnValue, Arg1) \
+v8::Handle<ReturnValue> PropertyCallbackArguments::Call(OldFunction f, \
+ Arg1 arg1) { \
+ Isolate* isolate = this->isolate(); \
+ void* f_as_void = CallbackTable::FunctionToVoidPtr(f); \
+ bool new_style = CallbackTable::ReturnsVoid(isolate, f_as_void); \
+ if (new_style) { \
+ NewFunction c = reinterpret_cast<NewFunction>(f); \
+ PropertyCallbackInfo<ReturnValue> info(end()); \
+ c(arg1, info); \
+ } else { \
+ v8::AccessorInfo info(end()); \
+ v8::Handle<ReturnValue> return_value = f(arg1, info); \
+ if (!return_value.IsEmpty()) return return_value; \
+ } \
+ return GetReturnValue<ReturnValue>(isolate); \
+}
+
+#define WRITE_CALL_2(OldFunction, NewFunction, ReturnValue, Arg1, Arg2) \
+v8::Handle<ReturnValue> PropertyCallbackArguments::Call(OldFunction f, \
+ Arg1 arg1, \
+ Arg2 arg2) { \
+ Isolate* isolate = this->isolate(); \
+ void* f_as_void = CallbackTable::FunctionToVoidPtr(f); \
+ bool new_style = CallbackTable::ReturnsVoid(isolate, f_as_void); \
+ if (new_style) { \
+ NewFunction c = reinterpret_cast<NewFunction>(f); \
+ PropertyCallbackInfo<ReturnValue> info(end()); \
+ c(arg1, arg2, info); \
+ } else { \
+ v8::AccessorInfo info(end()); \
+ v8::Handle<ReturnValue> return_value = f(arg1, arg2, info); \
+ if (!return_value.IsEmpty()) return return_value; \
+ } \
+ return GetReturnValue<ReturnValue>(isolate); \
+}
+
+#define WRITE_CALL_2_VOID(OldFunction, NewFunction, ReturnValue, Arg1, Arg2) \
+void PropertyCallbackArguments::Call(OldFunction f, \
+ Arg1 arg1, \
+ Arg2 arg2) { \
+ Isolate* isolate = this->isolate(); \
+ void* f_as_void = CallbackTable::FunctionToVoidPtr(f); \
+ bool new_style = CallbackTable::ReturnsVoid(isolate, f_as_void); \
+ if (new_style) { \
+ NewFunction c = reinterpret_cast<NewFunction>(f); \
+ PropertyCallbackInfo<ReturnValue> info(end()); \
+ c(arg1, arg2, info); \
+ } else { \
+ v8::AccessorInfo info(end()); \
+ f(arg1, arg2, info); \
+ } \
+}
+
+FOR_EACH_CALLBACK_TABLE_MAPPING_0(WRITE_CALL_0)
+FOR_EACH_CALLBACK_TABLE_MAPPING_1(WRITE_CALL_1)
+FOR_EACH_CALLBACK_TABLE_MAPPING_2(WRITE_CALL_2)
+FOR_EACH_CALLBACK_TABLE_MAPPING_2_VOID_RETURN(WRITE_CALL_2_VOID)
+
+#undef WRITE_CALL_0
+#undef WRITE_CALL_1
+#undef WRITE_CALL_2
+#undef WRITE_CALL_2_VOID
+
+
+} } // namespace v8::internal
+
diff --git a/src/arguments.h b/src/arguments.h
index 1423d56..a80b613 100644
--- a/src/arguments.h
+++ b/src/arguments.h
@@ -82,35 +82,258 @@
};
+// mappings from old property callbacks to new ones
+// F(old name, new name, return value, parameters...)
+//
+// These aren't included in the list as they have duplicate signatures
+// F(NamedPropertyEnumerator, NamedPropertyEnumeratorCallback, ...)
+// F(NamedPropertyGetter, NamedPropertyGetterCallback, ...)
+
+#define FOR_EACH_CALLBACK_TABLE_MAPPING_0(F) \
+ F(IndexedPropertyEnumerator, IndexedPropertyEnumeratorCallback, v8::Array) \
+
+#define FOR_EACH_CALLBACK_TABLE_MAPPING_1(F) \
+ F(AccessorGetter, AccessorGetterCallback, v8::Value, v8::Local<v8::String>) \
+ F(NamedPropertyQuery, \
+ NamedPropertyQueryCallback, \
+ v8::Integer, \
+ v8::Local<v8::String>) \
+ F(NamedPropertyDeleter, \
+ NamedPropertyDeleterCallback, \
+ v8::Boolean, \
+ v8::Local<v8::String>) \
+ F(IndexedPropertyGetter, \
+ IndexedPropertyGetterCallback, \
+ v8::Value, \
+ uint32_t) \
+ F(IndexedPropertyQuery, \
+ IndexedPropertyQueryCallback, \
+ v8::Integer, \
+ uint32_t) \
+ F(IndexedPropertyDeleter, \
+ IndexedPropertyDeleterCallback, \
+ v8::Boolean, \
+ uint32_t) \
+
+#define FOR_EACH_CALLBACK_TABLE_MAPPING_2(F) \
+ F(NamedPropertySetter, \
+ NamedPropertySetterCallback, \
+ v8::Value, \
+ v8::Local<v8::String>, \
+ v8::Local<v8::Value>) \
+ F(IndexedPropertySetter, \
+ IndexedPropertySetterCallback, \
+ v8::Value, \
+ uint32_t, \
+ v8::Local<v8::Value>) \
+
+#define FOR_EACH_CALLBACK_TABLE_MAPPING_2_VOID_RETURN(F) \
+ F(AccessorSetter, \
+ AccessorSetterCallback, \
+ void, \
+ v8::Local<v8::String>, \
+ v8::Local<v8::Value>) \
+
+// All property callbacks as well as invocation callbacks
+#define FOR_EACH_CALLBACK_TABLE_MAPPING(F) \
+ F(InvocationCallback, FunctionCallback) \
+ F(AccessorGetter, AccessorGetterCallback) \
+ F(AccessorSetter, AccessorSetterCallback) \
+ F(NamedPropertySetter, NamedPropertySetterCallback) \
+ F(NamedPropertyQuery, NamedPropertyQueryCallback) \
+ F(NamedPropertyDeleter, NamedPropertyDeleterCallback) \
+ F(IndexedPropertyGetter, IndexedPropertyGetterCallback) \
+ F(IndexedPropertySetter, IndexedPropertySetterCallback) \
+ F(IndexedPropertyQuery, IndexedPropertyQueryCallback) \
+ F(IndexedPropertyDeleter, IndexedPropertyDeleterCallback) \
+ F(IndexedPropertyEnumerator, IndexedPropertyEnumeratorCallback) \
+
+
+// TODO(dcarney): Remove this class when old callbacks are gone.
+class CallbackTable {
+ public:
+ // TODO(dcarney): Flip this when it makes sense for performance.
+ static const bool kStoreVoidFunctions = true;
+ static inline bool ReturnsVoid(Isolate* isolate, void* function) {
+ CallbackTable* table = isolate->callback_table();
+ bool contains =
+ table != NULL &&
+ table->map_.occupancy() != 0 &&
+ table->Contains(function);
+ return contains == kStoreVoidFunctions;
+ }
+
+ STATIC_ASSERT(sizeof(intptr_t) == sizeof(AccessorGetterCallback));
+
+ template<typename F>
+ static inline void* FunctionToVoidPtr(F function) {
+ return reinterpret_cast<void*>(reinterpret_cast<intptr_t>(function));
+ }
+
+#define WRITE_REGISTER(OldFunction, NewFunction) \
+ static OldFunction Register(Isolate* isolate, NewFunction f) { \
+ InsertCallback(isolate, FunctionToVoidPtr(f), true); \
+ return reinterpret_cast<OldFunction>(f); \
+ } \
+ \
+ static OldFunction Register(Isolate* isolate, OldFunction f) { \
+ InsertCallback(isolate, FunctionToVoidPtr(f), false); \
+ return f; \
+ }
+ FOR_EACH_CALLBACK_TABLE_MAPPING(WRITE_REGISTER)
+#undef WRITE_REGISTER
+
+ private:
+ CallbackTable();
+ bool Contains(void* function);
+ static void InsertCallback(Isolate* isolate,
+ void* function,
+ bool returns_void);
+ HashMap map_;
+ DISALLOW_COPY_AND_ASSIGN(CallbackTable);
+};
+
+
// Custom arguments replicate a small segment of stack that can be
// accessed through an Arguments object the same way the actual stack
// can.
-class CustomArguments : public Relocatable {
+template<int kArrayLength>
+class CustomArgumentsBase : public Relocatable {
public:
- inline CustomArguments(Isolate* isolate,
- Object* data,
- Object* self,
- JSObject* holder) : Relocatable(isolate) {
- ASSERT(reinterpret_cast<Object*>(isolate)->IsSmi());
- values_[3] = self;
- values_[2] = holder;
- values_[1] = data;
- values_[0] = reinterpret_cast<Object*>(isolate);
+ virtual inline void IterateInstance(ObjectVisitor* v) {
+ v->VisitPointers(values_, values_ + kArrayLength);
+ }
+ protected:
+ inline Object** end() { return values_ + kArrayLength - 1; }
+ explicit inline CustomArgumentsBase(Isolate* isolate)
+ : Relocatable(isolate) {}
+ Object* values_[kArrayLength];
+};
+
+
+template<typename T>
+class CustomArguments : public CustomArgumentsBase<T::kArgsLength> {
+ public:
+ static const int kReturnValueOffset = T::kReturnValueIndex;
+
+ typedef CustomArgumentsBase<T::kArgsLength> Super;
+ ~CustomArguments() {
+ // TODO(dcarney): create a new zap value for this.
+ this->end()[kReturnValueOffset] =
+ reinterpret_cast<Object*>(kHandleZapValue);
}
- inline explicit CustomArguments(Isolate* isolate) : Relocatable(isolate) {
-#ifdef DEBUG
- for (size_t i = 0; i < ARRAY_SIZE(values_); i++) {
- values_[i] = reinterpret_cast<Object*>(kZapValue);
- }
-#endif
+ protected:
+ explicit inline CustomArguments(Isolate* isolate) : Super(isolate) {}
+
+ template<typename V>
+ v8::Handle<V> GetReturnValue(Isolate* isolate);
+
+ inline Isolate* isolate() {
+ return reinterpret_cast<Isolate*>(this->end()[T::kIsolateIndex]);
+ }
+};
+
+
+class PropertyCallbackArguments
+ : public CustomArguments<PropertyCallbackInfo<Value> > {
+ public:
+ typedef PropertyCallbackInfo<Value> T;
+ typedef CustomArguments<T> Super;
+ static const int kArgsLength = T::kArgsLength;
+ static const int kThisIndex = T::kThisIndex;
+ static const int kHolderIndex = T::kHolderIndex;
+
+ PropertyCallbackArguments(Isolate* isolate,
+ Object* data,
+ Object* self,
+ JSObject* holder)
+ : Super(isolate) {
+ Object** values = this->end();
+ values[T::kThisIndex] = self;
+ values[T::kHolderIndex] = holder;
+ values[T::kDataIndex] = data;
+ values[T::kIsolateIndex] = reinterpret_cast<Object*>(isolate);
+ values[T::kReturnValueIndex] = isolate->heap()->the_hole_value();
+ ASSERT(values[T::kHolderIndex]->IsHeapObject());
+ ASSERT(values[T::kIsolateIndex]->IsSmi());
}
- void IterateInstance(ObjectVisitor* v);
- Object** end() { return values_ + ARRAY_SIZE(values_) - 1; }
+ /*
+ * The following Call functions wrap the calling of all callbacks to handle
+ * calling either the old or the new style callbacks depending on which one
+ * has been registered.
+ * For old callbacks which return an empty handle, the ReturnValue is checked
+ * and used if it's been set to anything inside the callback.
+ * New style callbacks always use the return value.
+ */
+#define WRITE_CALL_0(OldFunction, NewFunction, ReturnValue) \
+ v8::Handle<ReturnValue> Call(OldFunction f); \
+
+#define WRITE_CALL_1(OldFunction, NewFunction, ReturnValue, Arg1) \
+ v8::Handle<ReturnValue> Call(OldFunction f, Arg1 arg1); \
+
+#define WRITE_CALL_2(OldFunction, NewFunction, ReturnValue, Arg1, Arg2) \
+ v8::Handle<ReturnValue> Call(OldFunction f, Arg1 arg1, Arg2 arg2); \
+
+#define WRITE_CALL_2_VOID(OldFunction, NewFunction, ReturnValue, Arg1, Arg2) \
+ void Call(OldFunction f, Arg1 arg1, Arg2 arg2); \
+
+FOR_EACH_CALLBACK_TABLE_MAPPING_0(WRITE_CALL_0)
+FOR_EACH_CALLBACK_TABLE_MAPPING_1(WRITE_CALL_1)
+FOR_EACH_CALLBACK_TABLE_MAPPING_2(WRITE_CALL_2)
+FOR_EACH_CALLBACK_TABLE_MAPPING_2_VOID_RETURN(WRITE_CALL_2_VOID)
+
+#undef WRITE_CALL_0
+#undef WRITE_CALL_1
+#undef WRITE_CALL_2
+#undef WRITE_CALL_2_VOID
+};
+
+
+class FunctionCallbackArguments
+ : public CustomArguments<FunctionCallbackInfo<Value> > {
+ public:
+ typedef FunctionCallbackInfo<Value> T;
+ typedef CustomArguments<T> Super;
+ static const int kArgsLength = T::kArgsLength;
+
+ FunctionCallbackArguments(internal::Isolate* isolate,
+ internal::Object* data,
+ internal::JSFunction* callee,
+ internal::Object* holder,
+ internal::Object** argv,
+ int argc,
+ bool is_construct_call)
+ : Super(isolate),
+ argv_(argv),
+ argc_(argc),
+ is_construct_call_(is_construct_call) {
+ Object** values = end();
+ values[T::kDataIndex] = data;
+ values[T::kCalleeIndex] = callee;
+ values[T::kHolderIndex] = holder;
+ values[T::kIsolateIndex] = reinterpret_cast<internal::Object*>(isolate);
+ values[T::kReturnValueIndex] = isolate->heap()->the_hole_value();
+ ASSERT(values[T::kCalleeIndex]->IsJSFunction());
+ ASSERT(values[T::kHolderIndex]->IsHeapObject());
+ ASSERT(values[T::kIsolateIndex]->IsSmi());
+ }
+
+ /*
+ * The following Call function wraps the calling of all callbacks to handle
+ * calling either the old or the new style callbacks depending on which one
+ * has been registered.
+ * For old callbacks which return an empty handle, the ReturnValue is checked
+ * and used if it's been set to anything inside the callback.
+ * New style callbacks always use the return value.
+ */
+ v8::Handle<v8::Value> Call(InvocationCallback f);
private:
- Object* values_[4];
+ internal::Object** argv_;
+ int argc_;
+ bool is_construct_call_;
};
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 2678a5a..0102f33 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -2473,6 +2473,23 @@
}
+void Assembler::vcvt_f64_s32(const DwVfpRegister dst,
+ int fraction_bits,
+ const Condition cond) {
+ // Instruction details available in ARM DDI 0406C.b, A8-874.
+ // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 1010(19-16) | Vd(15-12) |
+ // 101(11-9) | sf=1(8) | sx=1(7) | 1(6) | i(5) | 0(4) | imm4(3-0)
+ ASSERT(fraction_bits > 0 && fraction_bits <= 32);
+ ASSERT(CpuFeatures::IsSupported(VFP3));
+ int vd, d;
+ dst.split_code(&vd, &d);
+ int i = ((32 - fraction_bits) >> 4) & 1;
+ int imm4 = (32 - fraction_bits) & 0xf;
+ emit(cond | 0xE*B24 | B23 | d*B22 | 0x3*B20 | B19 | 0x2*B16 |
+ vd*B12 | 0x5*B9 | B8 | B7 | B6 | i*B5 | imm4);
+}
+
+
void Assembler::vneg(const DwVfpRegister dst,
const DwVfpRegister src,
const Condition cond) {
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 0fd5186..3000860 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -459,6 +459,17 @@
// rm <shift_op> shift_imm
explicit Operand(Register rm, ShiftOp shift_op, int shift_imm);
+ INLINE(static Operand SmiUntag(Register rm)) {
+ return Operand(rm, ASR, kSmiTagSize);
+ }
+ INLINE(static Operand PointerOffsetFromSmiKey(Register key)) {
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ return Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize);
+ }
+ INLINE(static Operand DoubleOffsetFromSmiKey(Register key)) {
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kDoubleSizeLog2);
+ return Operand(key, LSL, kDoubleSizeLog2 - kSmiTagSize);
+ }
// rm <shift_op> rs
explicit Operand(Register rm, ShiftOp shift_op, Register rs);
@@ -515,6 +526,12 @@
// [rn], +/- rm <shift_op> shift_imm PostIndex/NegPostIndex
explicit MemOperand(Register rn, Register rm,
ShiftOp shift_op, int shift_imm, AddrMode am = Offset);
+ INLINE(static MemOperand PointerAddressFromSmiKey(Register array,
+ Register key,
+ AddrMode am = Offset)) {
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ return MemOperand(array, key, LSL, kPointerSizeLog2 - kSmiTagSize, am);
+ }
void set_offset(int32_t offset) {
ASSERT(rm_.is(no_reg));
@@ -1032,6 +1049,9 @@
const DwVfpRegister src,
VFPConversionMode mode = kDefaultRoundToZero,
const Condition cond = al);
+ void vcvt_f64_s32(const DwVfpRegister dst,
+ int fraction_bits,
+ const Condition cond = al);
void vneg(const DwVfpRegister dst,
const DwVfpRegister src,
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 121f3c2..6333924 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -215,12 +215,9 @@
// Allocate the JSArray object together with space for a FixedArray with the
// requested number of elements.
- STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ mov(elements_array_end,
Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
- __ add(elements_array_end,
- elements_array_end,
- Operand(array_size, ASR, kSmiTagSize));
+ __ add(elements_array_end, elements_array_end, Operand::SmiUntag(array_size));
__ Allocate(elements_array_end,
result,
scratch1,
@@ -249,7 +246,6 @@
FieldMemOperand(result, JSArray::kElementsOffset));
// Clear the heap tag on the elements array.
- STATIC_ASSERT(kSmiTag == 0);
__ sub(elements_array_storage,
elements_array_storage,
Operand(kHeapObjectTag));
@@ -261,7 +257,6 @@
__ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
__ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
- STATIC_ASSERT(kSmiTag == 0);
ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
__ str(array_size,
MemOperand(elements_array_storage, kPointerSize, PostIndex));
@@ -270,10 +265,9 @@
// result: JSObject
// elements_array_storage: elements array element storage
// array_size: smi-tagged size of elements array
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ add(elements_array_end,
elements_array_storage,
- Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
+ Operand::PointerOffsetFromSmiKey(array_size));
// Fill the allocated FixedArray with the hole value if requested.
// result: JSObject
@@ -335,7 +329,6 @@
__ bind(&argc_one_or_more);
__ cmp(r0, Operand(1));
__ b(ne, &argc_two_or_more);
- STATIC_ASSERT(kSmiTag == 0);
__ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
__ tst(r2, r2);
__ b(ne, ¬_empty_array);
@@ -344,6 +337,7 @@
__ b(&empty_array);
__ bind(¬_empty_array);
+ STATIC_ASSERT(kSmiTag == 0);
__ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
__ b(ne, call_generic_code);
@@ -375,7 +369,7 @@
// Handle construction of an array from a list of arguments.
__ bind(&argc_two_or_more);
- __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
+ __ SmiTag(r2, r0);
// r0: argc
// r1: constructor
@@ -478,7 +472,7 @@
if (FLAG_debug_code) {
// Initial map for the builtin InternalArray functions should be maps.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
- __ tst(r2, Operand(kSmiTagMask));
+ __ SmiTst(r2);
__ Assert(ne, "Unexpected initial map for InternalArray function");
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
__ Assert(eq, "Unexpected initial map for InternalArray function");
@@ -512,7 +506,7 @@
if (FLAG_debug_code) {
// Initial map for the builtin Array functions should be maps.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
- __ tst(r2, Operand(kSmiTagMask));
+ __ SmiTst(r2);
__ Assert(ne, "Unexpected initial map for Array function");
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
__ Assert(eq, "Unexpected initial map for Array function");
@@ -545,7 +539,7 @@
// Array functions which always have a map.
// Initial map for the builtin Array function should be a map.
__ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
- __ tst(r3, Operand(kSmiTagMask));
+ __ SmiTst(r3);
__ Assert(ne, "Unexpected initial map for Array function");
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
__ Assert(eq, "Unexpected initial map for Array function");
@@ -778,7 +772,7 @@
FrameScope scope(masm, StackFrame::CONSTRUCT);
// Preserve the two incoming parameters on the stack.
- __ mov(r0, Operand(r0, LSL, kSmiTagSize));
+ __ SmiTag(r0);
__ push(r0); // Smi-tagged arguments count.
__ push(r1); // Constructor function.
@@ -931,7 +925,7 @@
ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
__ str(r6, MemOperand(r2, kPointerSize, PostIndex));
ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
- __ mov(r0, Operand(r3, LSL, kSmiTagSize));
+ __ SmiTag(r0, r3);
__ str(r0, MemOperand(r2, kPointerSize, PostIndex));
// Initialize the fields to undefined.
@@ -1004,7 +998,7 @@
__ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
// Set up number of arguments for function call below
- __ mov(r0, Operand(r3, LSR, kSmiTagSize));
+ __ SmiUntag(r0, r3);
// Copy arguments and receiver to the expression stack.
// r0: number of arguments
@@ -1459,7 +1453,7 @@
{
// Enter an internal frame in order to preserve argument count.
FrameScope scope(masm, StackFrame::INTERNAL);
- __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
+ __ SmiTag(r0);
__ push(r0);
__ push(r2);
@@ -1467,7 +1461,7 @@
__ mov(r2, r0);
__ pop(r0);
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ __ SmiUntag(r0);
// Exit the internal frame.
}
@@ -1570,7 +1564,7 @@
__ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2,
FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
- __ mov(r2, Operand(r2, ASR, kSmiTagSize));
+ __ SmiUntag(r2);
__ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
__ SetCallKind(r5, CALL_AS_METHOD);
__ cmp(r2, r0); // Check formal and actual parameter counts.
@@ -1609,7 +1603,7 @@
// here which will cause r2 to become negative.
__ sub(r2, sp, r2);
// Check if the arguments will overflow the stack.
- __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0));
__ b(gt, &okay); // Signed comparison.
// Out of stack space.
@@ -1719,7 +1713,7 @@
// Invoke the function.
Label call_proxy;
ParameterCount actual(r0);
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ __ SmiUntag(r0);
__ ldr(r1, MemOperand(fp, kFunctionOffset));
__ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
__ b(ne, &call_proxy);
@@ -1748,7 +1742,7 @@
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
- __ mov(r0, Operand(r0, LSL, kSmiTagSize));
+ __ SmiTag(r0);
__ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
__ add(fp, sp, Operand(3 * kPointerSize));
@@ -1764,7 +1758,7 @@
__ ldr(r1, MemOperand(fp, -3 * kPointerSize));
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
- __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
__ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
}
@@ -1795,7 +1789,7 @@
// r1: function
// r2: expected number of arguments
// r3: code entry to call
- __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
// adjust for return address and receiver
__ add(r0, r0, Operand(2 * kPointerSize));
__ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
@@ -1826,7 +1820,7 @@
// r1: function
// r2: expected number of arguments
// r3: code entry to call
- __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
// Copy the arguments (including the receiver) to the new stack frame.
// r0: copy start address
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 3521589..c667c90 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -321,13 +321,13 @@
__ b(eq, &install_unoptimized);
__ sub(r4, r4, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
__ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r5, r5, Operand::PointerOffsetFromSmiKey(r4));
__ ldr(r5, MemOperand(r5));
__ cmp(r2, r5);
__ b(ne, &loop);
// Hit: fetch the optimized code.
__ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r5, r5, Operand::PointerOffsetFromSmiKey(r4));
__ add(r5, r5, Operand(kPointerSize));
__ ldr(r4, MemOperand(r5));
@@ -519,8 +519,7 @@
Register mantissa = result2_;
Label not_special;
- // Convert from Smi to integer.
- __ mov(source_, Operand(source_, ASR, kSmiTagSize));
+ __ SmiUntag(source_);
// Move sign bit from source to destination. This works because the sign bit
// in the exponent word of the double has the same position and polarity as
// the 2's complement sign bit in a Smi.
@@ -770,7 +769,7 @@
// Lhs is a smi, rhs is a number.
// Convert lhs to a double in d7.
- __ SmiToDoubleVFPRegister(lhs, d7, r7, s15);
+ __ SmiToDouble(d7, lhs);
// Load the double from rhs, tagged HeapNumber r0, to d6.
__ sub(r7, rhs, Operand(kHeapObjectTag));
__ vldr(d6, r7, HeapNumber::kValueOffset);
@@ -801,7 +800,7 @@
__ sub(r7, lhs, Operand(kHeapObjectTag));
__ vldr(d7, r7, HeapNumber::kValueOffset);
// Convert rhs to a double in d6 .
- __ SmiToDoubleVFPRegister(rhs, d6, r7, s13);
+ __ SmiToDouble(d6, rhs);
// Fall through to both_loaded_as_doubles.
}
@@ -1228,7 +1227,7 @@
if (types_.Contains(SMI)) {
// Smis: 0 -> false, all other -> true
- __ tst(tos_, Operand(kSmiTagMask));
+ __ SmiTst(tos_);
// tos_ contains the correct return value already
__ Ret(eq);
} else if (types_.NeedsMap()) {
@@ -1533,7 +1532,7 @@
__ b(mi, &try_float);
// Tag the result as a smi and we're done.
- __ mov(r0, Operand(r1, LSL, kSmiTagSize));
+ __ SmiTag(r0, r1);
__ Ret();
// Try to store the result in a heap number.
@@ -1880,9 +1879,7 @@
__ GetLeastBitsFromSmi(scratch2, right, 5);
__ mov(scratch1, Operand(scratch1, LSL, scratch2));
// Check that the signed result fits in a Smi.
- __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
- __ b(mi, ¬_smi_result);
- __ SmiTag(right, scratch1);
+ __ TrySmiTag(right, scratch1, ¬_smi_result);
__ Ret();
break;
default:
@@ -1944,12 +1941,8 @@
// Load left and right operands into d0 and d1.
if (smi_operands) {
- __ SmiUntag(scratch1, right);
- __ vmov(d1.high(), scratch1);
- __ vcvt_f64_s32(d1, d1.high());
- __ SmiUntag(scratch1, left);
- __ vmov(d0.high(), scratch1);
- __ vcvt_f64_s32(d0, d0.high());
+ __ SmiToDouble(d1, right);
+ __ SmiToDouble(d0, left);
} else {
// Load right operand into d1.
if (right_type == BinaryOpIC::INT32) {
@@ -2060,9 +2053,7 @@
}
// Check that the *signed* result fits in a smi.
- __ add(r3, r2, Operand(0x40000000), SetCC);
- __ b(mi, &result_not_a_smi);
- __ SmiTag(r0, r2);
+ __ TrySmiTag(r0, r2, &result_not_a_smi);
__ Ret();
// Allocate new heap number for result.
@@ -2122,7 +2113,6 @@
// Perform combined smi check on both operands.
__ orr(scratch1, left, Operand(right));
- STATIC_ASSERT(kSmiTag == 0);
__ JumpIfNotSmi(scratch1, ¬_smis);
// If the smi-smi operation results in a smi return is generated.
@@ -2411,12 +2401,9 @@
UNREACHABLE();
}
- // Check if the result fits in a smi.
- __ add(scratch1, r2, Operand(0x40000000), SetCC);
- // If not try to return a heap number. (We know the result is an int32.)
- __ b(mi, &return_heap_number);
- // Tag the result and return.
- __ SmiTag(r0, r2);
+ // Check if the result fits in a smi. If not try to return a heap number.
+ // (We know the result is an int32).
+ __ TrySmiTag(r0, r2, &return_heap_number);
__ Ret();
__ bind(&return_heap_number);
@@ -2644,7 +2631,8 @@
// Input is a smi. Convert to double and load the low and high words
// of the double into r2, r3.
- __ IntegerToDoubleConversionWithVFP3(r0, r3, r2);
+ __ SmiToDouble(d7, r0);
+ __ vmov(r2, r3, d7);
__ b(&loaded);
__ bind(&input_not_smi);
@@ -3842,7 +3830,7 @@
// Read the argument from the stack and return it.
__ sub(r3, r0, r1);
- __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r3, fp, Operand::PointerOffsetFromSmiKey(r3));
__ ldr(r0, MemOperand(r3, kDisplacement));
__ Jump(lr);
@@ -3856,7 +3844,7 @@
// Read the argument from the adaptor frame and return it.
__ sub(r3, r0, r1);
- __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r3));
__ ldr(r0, MemOperand(r3, kDisplacement));
__ Jump(lr);
@@ -4109,7 +4097,7 @@
__ bind(&adaptor_frame);
__ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ str(r1, MemOperand(sp, 0));
- __ add(r3, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r1));
__ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
__ str(r3, MemOperand(sp, 1 * kPointerSize));
@@ -4117,9 +4105,8 @@
// of the arguments object and the elements array in words.
Label add_arguments_object;
__ bind(&try_allocate);
- __ cmp(r1, Operand::Zero());
+ __ SmiUntag(r1, SetCC);
__ b(eq, &add_arguments_object);
- __ mov(r1, Operand(r1, LSR, kSmiTagSize));
__ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize));
__ bind(&add_arguments_object);
__ add(r1, r1, Operand(Heap::kArgumentsObjectSizeStrict / kPointerSize));
@@ -4158,8 +4145,7 @@
__ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
__ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset));
__ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset));
- // Untag the length for the loop.
- __ mov(r1, Operand(r1, LSR, kSmiTagSize));
+ __ SmiUntag(r1);
// Copy the fixed array slots.
Label loop;
@@ -4228,7 +4214,6 @@
// Check that the first argument is a JSRegExp object.
__ ldr(r0, MemOperand(sp, kJSRegExpOffset));
- STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(r0, &runtime);
__ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
__ b(ne, &runtime);
@@ -4236,7 +4221,7 @@
// Check that the RegExp has been compiled (data contains a fixed array).
__ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
if (FLAG_debug_code) {
- __ tst(regexp_data, Operand(kSmiTagMask));
+ __ SmiTst(regexp_data);
__ Check(ne, "Unexpected type for RegExp data, FixedArray expected");
__ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
__ Check(eq, "Unexpected type for RegExp data, FixedArray expected");
@@ -4341,7 +4326,7 @@
__ ldr(r3, FieldMemOperand(r3, String::kLengthOffset));
__ cmp(r3, Operand(r1));
__ b(ls, &runtime);
- __ mov(r1, Operand(r1, ASR, kSmiTagSize));
+ __ SmiUntag(r1);
STATIC_ASSERT(4 == kOneByteStringTag);
STATIC_ASSERT(kTwoByteStringTag == 0);
@@ -4416,7 +4401,7 @@
__ add(r2, r9, Operand(r1, LSL, r3));
__ ldr(r8, FieldMemOperand(subject, String::kLengthOffset));
- __ mov(r8, Operand(r8, ASR, kSmiTagSize));
+ __ SmiUntag(r8);
__ add(r3, r9, Operand(r8, LSL, r3));
// Argument 2 (r1): Previous index.
@@ -4503,13 +4488,13 @@
__ ldr(r0,
FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
__ add(r2, r1, Operand(RegExpImpl::kLastMatchOverhead));
- __ cmp(r2, Operand(r0, ASR, kSmiTagSize));
+ __ cmp(r2, Operand::SmiUntag(r0));
__ b(gt, &runtime);
// r1: number of capture registers
// r4: subject string
// Store the capture count.
- __ mov(r2, Operand(r1, LSL, kSmiTagSize + kSmiShiftSize)); // To smi.
+ __ SmiTag(r2, r1);
__ str(r2, FieldMemOperand(last_match_info_elements,
RegExpImpl::kLastCaptureCountOffset));
// Store last subject and last input.
@@ -4553,7 +4538,7 @@
// Read the value from the static offsets vector buffer.
__ ldr(r3, MemOperand(r2, kPointerSize, PostIndex));
// Store the smi value in the last match info.
- __ mov(r3, Operand(r3, LSL, kSmiTagSize));
+ __ SmiTag(r3);
__ str(r3, MemOperand(r0, kPointerSize, PostIndex));
__ jmp(&next_capture);
__ bind(&done);
@@ -4601,7 +4586,7 @@
// (9) Sliced string. Replace subject with parent. Go to (4).
// Load offset into r9 and replace subject string with parent.
__ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
- __ mov(r9, Operand(r9, ASR, kSmiTagSize));
+ __ SmiUntag(r9);
__ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
__ jmp(&check_underlying); // Go to (4).
#endif // V8_INTERPRETED_REGEXP
@@ -4628,7 +4613,7 @@
// FixedArray.
int objects_size =
(JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
- __ mov(r5, Operand(r1, LSR, kSmiTagSize + kSmiShiftSize));
+ __ SmiUntag(r5, r1);
__ add(r2, r5, Operand(objects_size));
__ Allocate(
r2, // In: Size, in words.
@@ -4671,7 +4656,7 @@
__ mov(r2, Operand(factory->fixed_array_map()));
__ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
// Set FixedArray length.
- __ mov(r6, Operand(r5, LSL, kSmiTagSize));
+ __ SmiTag(r6, r5);
__ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
// Fill contents of fixed-array with undefined.
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
@@ -4988,7 +4973,7 @@
__ cmp(ip, Operand(index_));
__ b(ls, index_out_of_range_);
- __ mov(index_, Operand(index_, ASR, kSmiTagSize));
+ __ SmiUntag(index_);
StringCharLoadGenerator::Generate(masm,
object_,
@@ -4996,7 +4981,7 @@
result_,
&call_runtime_);
- __ mov(result_, Operand(result_, LSL, kSmiTagSize));
+ __ SmiTag(result_);
__ bind(&exit_);
}
@@ -5042,7 +5027,7 @@
// is too complex (e.g., when the string needs to be flattened).
__ bind(&call_runtime_);
call_helper.BeforeCall(masm);
- __ mov(index_, Operand(index_, LSL, kSmiTagSize));
+ __ SmiTag(index_);
__ Push(object_, index_);
__ CallRuntime(Runtime::kStringCharCodeAt, 2);
__ Move(result_, r0);
@@ -5068,8 +5053,7 @@
__ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
// At this point code register contains smi tagged ASCII char code.
- STATIC_ASSERT(kSmiTag == 0);
- __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(result_, result_, Operand::PointerOffsetFromSmiKey(code_));
__ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
__ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
__ b(eq, &slow_case_);
@@ -5494,9 +5478,8 @@
// Make sure first argument is a string.
__ ldr(r0, MemOperand(sp, kStringOffset));
- STATIC_ASSERT(kSmiTag == 0);
// Do a JumpIfSmi, but fold its jump into the subsequent string test.
- __ tst(r0, Operand(kSmiTagMask));
+ __ SmiTst(r0);
Condition is_string = masm->IsObjectStringType(r0, r1, ne);
ASSERT(is_string == eq);
__ b(NegateCondition(is_string), &runtime);
@@ -5893,8 +5876,8 @@
__ bind(&strings_not_empty);
}
- __ mov(r2, Operand(r2, ASR, kSmiTagSize));
- __ mov(r3, Operand(r3, ASR, kSmiTagSize));
+ __ SmiUntag(r2);
+ __ SmiUntag(r3);
// Both strings are non-empty.
// r0: first string
// r1: second string
@@ -6236,7 +6219,7 @@
} else {
// Untag before subtracting to avoid handling overflow.
__ SmiUntag(r1);
- __ sub(r0, r1, SmiUntagOperand(r0));
+ __ sub(r0, r1, Operand::SmiUntag(r0));
}
__ Ret();
@@ -6270,10 +6253,7 @@
__ vldr(d1, r2, HeapNumber::kValueOffset);
__ b(&left);
__ bind(&right_smi);
- __ SmiUntag(r2, r0); // Can't clobber r0 yet.
- SwVfpRegister single_scratch = d2.low();
- __ vmov(single_scratch, r2);
- __ vcvt_f64_s32(d1, single_scratch);
+ __ SmiToDouble(d1, r0);
__ bind(&left);
__ JumpIfSmi(r1, &left_smi);
@@ -6283,10 +6263,7 @@
__ vldr(d0, r2, HeapNumber::kValueOffset);
__ b(&done);
__ bind(&left_smi);
- __ SmiUntag(r2, r1); // Can't clobber r1 yet.
- single_scratch = d3.low();
- __ vmov(single_scratch, r2);
- __ vcvt_f64_s32(d0, single_scratch);
+ __ SmiToDouble(d0, r1);
__ bind(&done);
// Compare operands.
@@ -6697,7 +6674,7 @@
// Compute the capacity mask.
__ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset));
- __ mov(scratch1, Operand(scratch1, ASR, kSmiTagSize)); // convert smi to int
+ __ SmiUntag(scratch1);
__ sub(scratch1, scratch1, Operand(1));
// Generate an unrolled loop that performs a few probes before
@@ -6778,7 +6755,7 @@
Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
__ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset));
- __ mov(mask, Operand(mask, ASR, kSmiTagSize));
+ __ SmiUntag(mask);
__ sub(mask, mask, Operand(1));
__ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset));
@@ -7176,7 +7153,7 @@
// Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r6, r5, Operand::PointerOffsetFromSmiKey(r3));
__ add(r6, r6, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ str(r0, MemOperand(r6, 0));
// Update the write barrier for the array store.
@@ -7188,7 +7165,7 @@
// and value is Smi.
__ bind(&smi_element);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r6, r5, Operand::PointerOffsetFromSmiKey(r3));
__ str(r0, FieldMemOperand(r6, FixedArray::kHeaderSize));
__ Ret();
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 9d773d4..7bf253a 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -440,7 +440,7 @@
Label indirect_string_loaded;
__ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
__ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
- __ add(index, index, Operand(result, ASR, kSmiTagSize));
+ __ add(index, index, Operand::SmiUntag(result));
__ jmp(&indirect_string_loaded);
// Handle cons strings.
@@ -510,9 +510,9 @@
Register index,
Register value) {
if (FLAG_debug_code) {
- __ tst(index, Operand(kSmiTagMask));
+ __ SmiTst(index);
__ Check(eq, "Non-smi index");
- __ tst(value, Operand(kSmiTagMask));
+ __ SmiTst(value);
__ Check(eq, "Non-smi value");
__ ldr(ip, FieldMemOperand(string, String::kLengthOffset));
@@ -540,10 +540,10 @@
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
if (encoding == String::ONE_BYTE_ENCODING) {
// Smis are tagged by left shift by 1, thus LSR by 1 to smi-untag inline.
- __ strb(value, MemOperand(ip, index, LSR, 1));
+ __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
} else {
// No need to untag a smi for two-byte addressing.
- __ strh(value, MemOperand(ip, index));
+ __ strh(value, MemOperand(ip, index)); // LSL(1 - kSmiTagSize).
}
}
diff --git a/src/arm/debug-arm.cc b/src/arm/debug-arm.cc
index 6bfaf41..2f0a7c4 100644
--- a/src/arm/debug-arm.cc
+++ b/src/arm/debug-arm.cc
@@ -132,7 +132,7 @@
__ tst(reg, Operand(0xc0000000));
__ Assert(eq, "Unable to encode value as smi");
}
- __ mov(reg, Operand(reg, LSL, kSmiTagSize));
+ __ SmiTag(reg);
}
}
__ stm(db_w, sp, object_regs | non_object_regs);
@@ -154,7 +154,7 @@
int r = JSCallerSavedCode(i);
Register reg = { r };
if ((non_object_regs & (1 << r)) != 0) {
- __ mov(reg, Operand(reg, LSR, kSmiTagSize));
+ __ SmiUntag(reg);
}
if (FLAG_debug_code &&
(((object_regs |non_object_regs) & (1 << r)) == 0)) {
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index b84d355..f55552d 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -1102,6 +1102,7 @@
// vmov: Rt = Sn
// vcvt: Dd = Sm
// vcvt: Sd = Dm
+// vcvt.f64.s32 Dd, Dd, #<fbits>
// Dd = vabs(Dm)
// Dd = vneg(Dm)
// Dd = vadd(Dn, Dm)
@@ -1138,6 +1139,13 @@
DecodeVCVTBetweenDoubleAndSingle(instr);
} else if ((instr->Opc2Value() == 0x8) && (instr->Opc3Value() & 0x1)) {
DecodeVCVTBetweenFloatingPointAndInteger(instr);
+ } else if ((instr->Opc2Value() == 0xA) && (instr->Opc3Value() == 0x3) &&
+ (instr->Bit(8) == 1)) {
+ // vcvt.f64.s32 Dd, Dd, #<fbits>
+ int fraction_bits = 32 - ((instr->Bit(5) << 4) | instr->Bits(3, 0));
+ Format(instr, "vcvt'cond.f64.s32 'Dd, 'Dd");
+ out_buffer_pos_ += OS::SNPrintF(out_buffer_ + out_buffer_pos_,
+ ", #%d", fraction_bits);
} else if (((instr->Opc2Value() >> 1) == 0x6) &&
(instr->Opc3Value() & 0x1)) {
DecodeVCVTBetweenFloatingPointAndInteger(instr);
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 5b6ce4c..33a499c 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -1198,7 +1198,7 @@
// Get the current entry of the array into register r3.
__ ldr(r2, MemOperand(sp, 2 * kPointerSize));
__ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
// Get the expected map from the stack or a smi in the
// permanent slow case into register r2.
@@ -2263,23 +2263,18 @@
// BinaryOpStub::GenerateSmiSmiOperation for comments.
switch (op) {
case Token::SAR:
- __ b(&stub_call);
__ GetLeastBitsFromSmi(scratch1, right, 5);
__ mov(right, Operand(left, ASR, scratch1));
__ bic(right, right, Operand(kSmiTagMask));
break;
case Token::SHL: {
- __ b(&stub_call);
__ SmiUntag(scratch1, left);
__ GetLeastBitsFromSmi(scratch2, right, 5);
__ mov(scratch1, Operand(scratch1, LSL, scratch2));
- __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
- __ b(mi, &stub_call);
- __ SmiTag(right, scratch1);
+ __ TrySmiTag(right, scratch1, &stub_call);
break;
}
case Token::SHR: {
- __ b(&stub_call);
__ SmiUntag(scratch1, left);
__ GetLeastBitsFromSmi(scratch2, right, 5);
__ mov(scratch1, Operand(scratch1, LSR, scratch2));
@@ -2858,7 +2853,7 @@
&if_true, &if_false, &fall_through);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- __ tst(r0, Operand(kSmiTagMask));
+ __ SmiTst(r0);
Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2879,7 +2874,7 @@
&if_true, &if_false, &fall_through);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- __ tst(r0, Operand(kSmiTagMask | 0x80000000));
+ __ NonNegativeSmiTst(r0);
Split(eq, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -3006,16 +3001,13 @@
__ LoadInstanceDescriptors(r1, r4);
// r4: descriptor array.
// r3: valid entries in the descriptor array.
- STATIC_ASSERT(kSmiTag == 0);
- STATIC_ASSERT(kSmiTagSize == 1);
- STATIC_ASSERT(kPointerSize == 4);
__ mov(ip, Operand(DescriptorArray::kDescriptorSize));
__ mul(r3, r3, ip);
// Calculate location of the first key name.
__ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
// Calculate the end of the descriptor array.
__ mov(r2, r4);
- __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
// Loop through all the keys in the descriptor array. If one of these is the
// string "valueOf" the result is false.
@@ -3783,12 +3775,11 @@
Label done, not_found;
// tmp now holds finger offset as a smi.
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
// r2 now holds finger offset as a smi.
__ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
// r3 now points to the start of fixed array elements.
- __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
+ __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
// Note side effect of PreIndex: r3 now points to the key of the pair.
__ cmp(key, r2);
__ b(ne, ¬_found);
@@ -4751,9 +4742,7 @@
__ push(result_register());
// Cook return address in link register to stack (smi encoded Code* delta)
__ sub(r1, lr, Operand(masm_->CodeObject()));
- ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
- STATIC_ASSERT(kSmiTag == 0);
- __ add(r1, r1, Operand(r1)); // Convert to smi.
+ __ SmiTag(r1);
// Store result register while executing finally block.
__ push(r1);
@@ -4807,8 +4796,7 @@
// Uncook return address and return.
__ pop(result_register());
- ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
- __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
+ __ SmiUntag(r1);
__ add(pc, r1, Operand(masm_->CodeObject()));
}
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index acccadc..14c4794 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -290,10 +290,7 @@
__ b(hs, out_of_range);
// Fast case: Do the load.
__ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- // The key is a smi.
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
- __ ldr(scratch2,
- MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(scratch2, MemOperand::PointerAddressFromSmiKey(scratch1, key));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(scratch2, ip);
// In case the loaded value is the_hole we have to consult GetProperty
@@ -567,7 +564,7 @@
__ LoadRoot(ip, Heap::kHashTableMapRootIndex);
__ cmp(r3, ip);
__ b(ne, &slow_load);
- __ mov(r0, Operand(r2, ASR, kSmiTagSize));
+ __ SmiUntag(r0, r2);
// r0: untagged index
__ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5);
__ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
@@ -960,7 +957,7 @@
__ LoadRoot(ip, Heap::kHashTableMapRootIndex);
__ cmp(r3, ip);
__ b(ne, &slow);
- __ mov(r2, Operand(r0, ASR, kSmiTagSize));
+ __ SmiUntag(r2, r0);
__ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5);
__ Ret();
@@ -1133,7 +1130,7 @@
__ JumpIfSmi(r1, &slow);
// Check that the key is an array index, that is Uint32.
- __ tst(r0, Operand(kSmiTagMask | kSmiSignMask));
+ __ NonNegativeSmiTst(r0);
__ b(ne, &slow);
// Get the map of the receiver.
@@ -1321,8 +1318,7 @@
}
// It's irrelevant whether array is smi-only or not when writing a smi.
__ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ str(value, MemOperand(address));
+ __ str(value, MemOperand::PointerAddressFromSmiKey(address, key));
__ Ret();
__ bind(&non_smi_value);
@@ -1338,7 +1334,7 @@
__ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
}
__ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(address, address, Operand::PointerOffsetFromSmiKey(key));
__ str(value, MemOperand(address));
// Update write barrier for the elements array address.
__ mov(scratch_value, value); // Preserve the value which is returned.
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 96adb2f..09a0e9c 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -1448,7 +1448,6 @@
const Register left = ToRegister(instr->left());
const Register right = ToRegister(instr->right());
- const Register scratch = scratch0();
const Register result = ToRegister(instr->result());
// Check for x / 0.
@@ -1497,8 +1496,8 @@
// to be tagged to Smis. If that is not possible, deoptimize.
DeferredDivI* deferred = new(zone()) DeferredDivI(this, instr);
- __ TrySmiTag(left, &deoptimize, scratch);
- __ TrySmiTag(right, &deoptimize, scratch);
+ __ TrySmiTag(left, &deoptimize);
+ __ TrySmiTag(right, &deoptimize);
__ b(al, deferred->entry());
__ bind(deferred->exit());
@@ -1950,7 +1949,7 @@
Label done;
// If the object is a smi return the object.
- __ tst(input, Operand(kSmiTagMask));
+ __ SmiTst(input);
__ Move(result, input, eq);
__ b(eq, &done);
@@ -1975,7 +1974,7 @@
ASSERT(!scratch.is(scratch0()));
ASSERT(!scratch.is(object));
- __ tst(object, Operand(kSmiTagMask));
+ __ SmiTst(object);
DeoptimizeIf(eq, instr->environment());
__ CompareObjectType(object, scratch, scratch, JS_DATE_TYPE);
DeoptimizeIf(ne, instr->environment());
@@ -2261,7 +2260,7 @@
__ JumpIfSmi(reg, true_label);
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
- __ tst(reg, Operand(kSmiTagMask));
+ __ SmiTst(reg);
DeoptimizeIf(eq, instr->environment());
}
@@ -2497,7 +2496,7 @@
int false_block = chunk_->LookupDestination(instr->false_block_id());
Register input_reg = EmitLoadRegister(instr->value(), ip);
- __ tst(input_reg, Operand(kSmiTagMask));
+ __ SmiTst(input_reg);
EmitBranch(true_block, false_block, eq);
}
@@ -3368,8 +3367,7 @@
// during bound check elimination with the index argument to the bounds
// check, which can be tagged, so that case must be handled here, too.
if (instr->hydrogen()->key()->representation().IsTagged()) {
- __ add(scratch, elements,
- Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
} else {
__ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
}
@@ -3380,7 +3378,7 @@
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
- __ tst(result, Operand(kSmiTagMask));
+ __ SmiTst(result);
DeoptimizeIf(ne, instr->environment());
} else {
__ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
@@ -3523,7 +3521,7 @@
__ b(eq, &global_object);
// Deoptimize if the receiver is not a JS object.
- __ tst(receiver, Operand(kSmiTagMask));
+ __ SmiTst(receiver);
DeoptimizeIf(eq, instr->environment());
__ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
DeoptimizeIf(lt, instr->environment());
@@ -4221,7 +4219,7 @@
} else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
Register value = ToRegister(instr->value());
if (!instr->hydrogen()->value()->type().IsHeapObject()) {
- __ tst(value, Operand(kSmiTagMask));
+ __ SmiTst(value);
DeoptimizeIf(eq, instr->environment());
}
} else if (FLAG_track_double_fields && representation.IsDouble()) {
@@ -4458,8 +4456,7 @@
// during bound check elimination with the index argument to the bounds
// check, which can be tagged, so that case must be handled here, too.
if (instr->hydrogen()->key()->representation().IsTagged()) {
- __ add(scratch, elements,
- Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
} else {
__ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
}
@@ -5144,14 +5141,14 @@
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
LOperand* input = instr->value();
- __ tst(ToRegister(input), Operand(kSmiTagMask));
+ __ SmiTst(ToRegister(input));
DeoptimizeIf(ne, instr->environment());
}
void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
LOperand* input = instr->value();
- __ tst(ToRegister(input), Operand(kSmiTagMask));
+ __ SmiTst(ToRegister(input));
DeoptimizeIf(eq, instr->environment());
}
@@ -5830,7 +5827,7 @@
__ cmp(r0, null_value);
DeoptimizeIf(eq, instr->environment());
- __ tst(r0, Operand(kSmiTagMask));
+ __ SmiTst(r0);
DeoptimizeIf(eq, instr->environment());
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -5898,8 +5895,7 @@
__ cmp(index, Operand::Zero());
__ b(lt, &out_of_object);
- STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
- __ add(scratch, object, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(scratch, object, Operand::PointerOffsetFromSmiKey(index));
__ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
__ b(&done);
@@ -5907,7 +5903,8 @@
__ bind(&out_of_object);
__ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
// Index is equal to negated out of object property index plus 1.
- __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
__ ldr(result, FieldMemOperand(scratch,
FixedArray::kHeaderSize - kPointerSize));
__ bind(&done);
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 6e0b4a7..a3b21a2 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -495,9 +495,7 @@
Label done;
if (smi_check == INLINE_SMI_CHECK) {
- ASSERT_EQ(0, kSmiTag);
- tst(value, Operand(kSmiTagMask));
- b(eq, &done);
+ JumpIfSmi(value, &done);
}
CheckPageFlag(value,
@@ -978,7 +976,7 @@
Heap::RootListIndex map_index,
Register scratch1,
Register scratch2) {
- mov(scratch1, Operand(length, LSL, kSmiTagSize));
+ SmiTag(scratch1, length);
LoadRoot(scratch2, map_index);
str(scratch1, FieldMemOperand(string, String::kLengthOffset));
mov(scratch1, Operand(String::kEmptyHashField));
@@ -1221,7 +1219,7 @@
ldr(expected_reg,
FieldMemOperand(code_reg,
SharedFunctionInfo::kFormalParameterCountOffset));
- mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize));
+ SmiUntag(expected_reg);
ldr(code_reg,
FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
@@ -1359,7 +1357,7 @@
mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index.
ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset.
add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
- add(pc, r1, Operand(r2, ASR, kSmiTagSize)); // Jump.
+ add(pc, r1, Operand::SmiUntag(r2)); // Jump
}
@@ -1575,7 +1573,7 @@
// Compute the capacity mask.
ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
- mov(t1, Operand(t1, ASR, kSmiTagSize)); // convert smi to int
+ SmiUntag(t1);
sub(t1, t1, Operand(1));
// Generate an unrolled loop that performs a few probes before giving up.
@@ -2095,14 +2093,10 @@
b(&store);
bind(&smi_value);
- Register untagged_value = scratch1;
- SmiUntag(untagged_value, value_reg);
- vmov(s2, untagged_value);
- vcvt_f64_s32(d0, s2);
+ SmiToDouble(d0, value_reg);
bind(&store);
- add(scratch1, elements_reg,
- Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
+ add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
vstr(d0, FieldMemOperand(scratch1,
FixedDoubleArray::kHeaderSize - elements_offset));
}
@@ -2268,7 +2262,9 @@
void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
- int stack_space) {
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset) {
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
const int kNextOffset = 0;
@@ -2314,13 +2310,20 @@
Label promote_scheduled_exception;
Label delete_allocated_handles;
Label leave_exit_frame;
+ Label return_value_loaded;
- // If result is non-zero, dereference to get the result value
- // otherwise set it to undefined.
- cmp(r0, Operand::Zero());
- LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
- ldr(r0, MemOperand(r0), ne);
-
+ if (returns_handle) {
+ Label load_return_value;
+ cmp(r0, Operand::Zero());
+ b(eq, &load_return_value);
+ // derefernce returned value
+ ldr(r0, MemOperand(r0));
+ b(&return_value_loaded);
+ bind(&load_return_value);
+ }
+ // load value from ReturnValue
+ ldr(r0, MemOperand(fp, return_value_offset*kPointerSize));
+ bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
str(r4, MemOperand(r7, kNextOffset));
@@ -2390,70 +2393,21 @@
(1 << String::kArrayIndexValueBits));
// We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
// the low kHashShift bits.
- STATIC_ASSERT(kSmiTag == 0);
Ubfx(hash, hash, String::kHashShift, String::kArrayIndexValueBits);
- mov(index, Operand(hash, LSL, kSmiTagSize));
+ SmiTag(index, hash);
}
-void MacroAssembler::IntegerToDoubleConversionWithVFP3(Register inReg,
- Register outHighReg,
- Register outLowReg) {
- // ARMv7 VFP3 instructions to implement integer to double conversion.
- mov(r7, Operand(inReg, ASR, kSmiTagSize));
- vmov(s15, r7);
- vcvt_f64_s32(d7, s15);
- vmov(outLowReg, outHighReg, d7);
-}
-
-
-void MacroAssembler::ObjectToDoubleVFPRegister(Register object,
- DwVfpRegister result,
- Register scratch1,
- Register scratch2,
- Register heap_number_map,
- SwVfpRegister scratch3,
- Label* not_number,
- ObjectToDoubleFlags flags) {
- Label done;
- if ((flags & OBJECT_NOT_SMI) == 0) {
- Label not_smi;
- JumpIfNotSmi(object, ¬_smi);
- // Remove smi tag and convert to double.
- mov(scratch1, Operand(object, ASR, kSmiTagSize));
- vmov(scratch3, scratch1);
- vcvt_f64_s32(result, scratch3);
- b(&done);
- bind(¬_smi);
+void MacroAssembler::SmiToDouble(DwVfpRegister value, Register smi) {
+ ASSERT(value.code() < 16);
+ if (CpuFeatures::IsSupported(VFP3)) {
+ vmov(value.low(), smi);
+ vcvt_f64_s32(value, 1);
+ } else {
+ SmiUntag(ip, smi);
+ vmov(value.low(), ip);
+ vcvt_f64_s32(value, value.low());
}
- // Check for heap number and load double value from it.
- ldr(scratch1, FieldMemOperand(object, HeapObject::kMapOffset));
- sub(scratch2, object, Operand(kHeapObjectTag));
- cmp(scratch1, heap_number_map);
- b(ne, not_number);
- if ((flags & AVOID_NANS_AND_INFINITIES) != 0) {
- // If exponent is all ones the number is either a NaN or +/-Infinity.
- ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
- Sbfx(scratch1,
- scratch1,
- HeapNumber::kExponentShift,
- HeapNumber::kExponentBits);
- // All-one value sign extend to -1.
- cmp(scratch1, Operand(-1));
- b(eq, not_number);
- }
- vldr(result, scratch2, HeapNumber::kValueOffset);
- bind(&done);
-}
-
-
-void MacroAssembler::SmiToDoubleVFPRegister(Register smi,
- DwVfpRegister value,
- Register scratch1,
- SwVfpRegister scratch2) {
- mov(scratch1, Operand(smi, ASR, kSmiTagSize));
- vmov(scratch2, scratch1);
- vcvt_f64_s32(value, scratch2);
}
@@ -2610,7 +2564,7 @@
if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
ubfx(dst, src, kSmiTagSize, num_least_bits);
} else {
- mov(dst, Operand(src, ASR, kSmiTagSize));
+ SmiUntag(dst, src);
and_(dst, dst, Operand((1 << num_least_bits) - 1));
}
}
@@ -3005,7 +2959,7 @@
void MacroAssembler::UntagAndJumpIfSmi(
Register dst, Register src, Label* smi_case) {
STATIC_ASSERT(kSmiTag == 0);
- mov(dst, Operand(src, ASR, kSmiTagSize), SetCC);
+ SmiUntag(dst, src, SetCC);
b(cc, smi_case); // Shifter carry is not set for a smi.
}
@@ -3013,7 +2967,7 @@
void MacroAssembler::UntagAndJumpIfNotSmi(
Register dst, Register src, Label* non_smi_case) {
STATIC_ASSERT(kSmiTag == 0);
- mov(dst, Operand(src, ASR, kSmiTagSize), SetCC);
+ SmiUntag(dst, src, SetCC);
b(cs, non_smi_case); // Shifter carry is set for a non-smi.
}
@@ -3120,7 +3074,6 @@
Register scratch2,
Label* failure) {
// Check that neither is a smi.
- STATIC_ASSERT(kSmiTag == 0);
and_(scratch1, first, Operand(second));
JumpIfSmi(scratch1, failure);
JumpIfNonSmisNotBothSequentialAsciiStrings(first,
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index 9027291..50f53b3 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -44,12 +44,6 @@
}
-inline Operand SmiUntagOperand(Register object) {
- return Operand(object, ASR, kSmiTagSize);
-}
-
-
-
// Give alias names to registers
const Register cp = { 8 }; // JavaScript context pointer
const Register kRootRegister = { 10 }; // Roots array pointer.
@@ -62,16 +56,6 @@
DONT_TAG_RESULT
};
-// Flags used for the ObjectToDoubleVFPRegister function.
-enum ObjectToDoubleFlags {
- // No special flags.
- NO_OBJECT_TO_DOUBLE_FLAGS = 0,
- // Object is known to be a non smi.
- OBJECT_NOT_SMI = 1 << 0,
- // Don't load NaNs or infinities, branch to the non number case instead.
- AVOID_NANS_AND_INFINITIES = 1 << 1
-};
-
enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
@@ -974,31 +958,9 @@
void GetLeastBitsFromSmi(Register dst, Register src, int num_least_bits);
void GetLeastBitsFromInt32(Register dst, Register src, int mun_least_bits);
- // Uses VFP instructions to Convert a Smi to a double.
- void IntegerToDoubleConversionWithVFP3(Register inReg,
- Register outHighReg,
- Register outLowReg);
-
- // Load the value of a number object into a VFP double register. If the object
- // is not a number a jump to the label not_number is performed and the VFP
- // double register is unchanged.
- void ObjectToDoubleVFPRegister(
- Register object,
- DwVfpRegister value,
- Register scratch1,
- Register scratch2,
- Register heap_number_map,
- SwVfpRegister scratch3,
- Label* not_number,
- ObjectToDoubleFlags flags = NO_OBJECT_TO_DOUBLE_FLAGS);
-
- // Load the value of a smi object into a VFP double register. The register
- // scratch1 can be the same register as smi in which case smi will hold the
- // untagged value afterwards.
- void SmiToDoubleVFPRegister(Register smi,
- DwVfpRegister value,
- Register scratch1,
- SwVfpRegister scratch2);
+ // Load the value of a smi object into a double register.
+ // The register value must be between d0 and d15.
+ void SmiToDouble(DwVfpRegister value, Register smi);
// Check if a double can be exactly represented as a signed 32-bit integer.
// Z flag set to one if true.
@@ -1125,7 +1087,10 @@
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
- void CallApiFunctionAndReturn(ExternalReference function, int stack_space);
+ void CallApiFunctionAndReturn(ExternalReference function,
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset_from_fp);
// Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& builtin);
@@ -1228,18 +1193,21 @@
// Try to convert int32 to smi. If the value is to large, preserve
// the original value and jump to not_a_smi. Destroys scratch and
// sets flags.
- void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
- mov(scratch, reg);
- SmiTag(scratch, SetCC);
+ void TrySmiTag(Register reg, Label* not_a_smi) {
+ TrySmiTag(reg, reg, not_a_smi);
+ }
+ void TrySmiTag(Register reg, Register src, Label* not_a_smi) {
+ SmiTag(ip, src, SetCC);
b(vs, not_a_smi);
- mov(reg, scratch);
+ mov(reg, ip);
}
+
void SmiUntag(Register reg, SBit s = LeaveCC) {
- mov(reg, Operand(reg, ASR, kSmiTagSize), s);
+ mov(reg, Operand::SmiUntag(reg), s);
}
void SmiUntag(Register dst, Register src, SBit s = LeaveCC) {
- mov(dst, Operand(src, ASR, kSmiTagSize), s);
+ mov(dst, Operand::SmiUntag(src), s);
}
// Untag the source value into destination and jump if source is a smi.
@@ -1250,6 +1218,13 @@
// Souce and destination can be the same register.
void UntagAndJumpIfNotSmi(Register dst, Register src, Label* non_smi_case);
+ // Test if the register contains a smi (Z == 0 (eq) if true).
+ inline void SmiTst(Register value) {
+ tst(value, Operand(kSmiTagMask));
+ }
+ inline void NonNegativeSmiTst(Register value) {
+ tst(value, Operand(kSmiTagMask | kSmiSignMask));
+ }
// Jump if the register contains a smi.
inline void JumpIfSmi(Register value, Label* smi_label) {
tst(value, Operand(kSmiTagMask));
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index e91f079..c9db167 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -1628,10 +1628,13 @@
// This signature supports direct call in to API function native callback
// (refer to InvocationCallback in v8.h).
typedef v8::Handle<v8::Value> (*SimulatorRuntimeDirectApiCall)(int32_t arg0);
+typedef void (*SimulatorRuntimeDirectApiCallNew)(int32_t arg0);
// This signature supports direct call to accessor getter callback.
typedef v8::Handle<v8::Value> (*SimulatorRuntimeDirectGetterCall)(int32_t arg0,
int32_t arg1);
+typedef void (*SimulatorRuntimeDirectGetterCallNew)(int32_t arg0,
+ int32_t arg1);
// Software interrupt instructions are used by the simulator to call into the
// C-based V8 runtime.
@@ -1770,40 +1773,56 @@
break;
}
}
- } else if (redirection->type() == ExternalReference::DIRECT_API_CALL) {
- SimulatorRuntimeDirectApiCall target =
- reinterpret_cast<SimulatorRuntimeDirectApiCall>(external);
+ } else if (
+ redirection->type() == ExternalReference::DIRECT_API_CALL ||
+ redirection->type() == ExternalReference::DIRECT_API_CALL_NEW) {
if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
PrintF("Call to host function at %p args %08x",
- FUNCTION_ADDR(target), arg0);
+ reinterpret_cast<void*>(external), arg0);
if (!stack_aligned) {
PrintF(" with unaligned stack %08x\n", get_register(sp));
}
PrintF("\n");
}
CHECK(stack_aligned);
- v8::Handle<v8::Value> result = target(arg0);
- if (::v8::internal::FLAG_trace_sim) {
- PrintF("Returned %p\n", reinterpret_cast<void *>(*result));
+ if (redirection->type() == ExternalReference::DIRECT_API_CALL) {
+ SimulatorRuntimeDirectApiCall target =
+ reinterpret_cast<SimulatorRuntimeDirectApiCall>(external);
+ v8::Handle<v8::Value> result = target(arg0);
+ if (::v8::internal::FLAG_trace_sim) {
+ PrintF("Returned %p\n", reinterpret_cast<void *>(*result));
+ }
+ set_register(r0, reinterpret_cast<int32_t>(*result));
+ } else {
+ SimulatorRuntimeDirectApiCallNew target =
+ reinterpret_cast<SimulatorRuntimeDirectApiCallNew>(external);
+ target(arg0);
}
- set_register(r0, reinterpret_cast<int32_t>(*result));
- } else if (redirection->type() == ExternalReference::DIRECT_GETTER_CALL) {
- SimulatorRuntimeDirectGetterCall target =
- reinterpret_cast<SimulatorRuntimeDirectGetterCall>(external);
+ } else if (
+ redirection->type() == ExternalReference::DIRECT_GETTER_CALL ||
+ redirection->type() == ExternalReference::DIRECT_GETTER_CALL_NEW) {
if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
PrintF("Call to host function at %p args %08x %08x",
- FUNCTION_ADDR(target), arg0, arg1);
+ reinterpret_cast<void*>(external), arg0, arg1);
if (!stack_aligned) {
PrintF(" with unaligned stack %08x\n", get_register(sp));
}
PrintF("\n");
}
CHECK(stack_aligned);
- v8::Handle<v8::Value> result = target(arg0, arg1);
- if (::v8::internal::FLAG_trace_sim) {
- PrintF("Returned %p\n", reinterpret_cast<void *>(*result));
+ if (redirection->type() == ExternalReference::DIRECT_GETTER_CALL) {
+ SimulatorRuntimeDirectGetterCall target =
+ reinterpret_cast<SimulatorRuntimeDirectGetterCall>(external);
+ v8::Handle<v8::Value> result = target(arg0, arg1);
+ if (::v8::internal::FLAG_trace_sim) {
+ PrintF("Returned %p\n", reinterpret_cast<void *>(*result));
+ }
+ set_register(r0, reinterpret_cast<int32_t>(*result));
+ } else {
+ SimulatorRuntimeDirectGetterCallNew target =
+ reinterpret_cast<SimulatorRuntimeDirectGetterCallNew>(external);
+ target(arg0, arg1);
}
- set_register(r0, reinterpret_cast<int32_t>(*result));
} else {
// builtin call.
ASSERT(redirection->type() == ExternalReference::BUILTIN_CALL);
@@ -2698,6 +2717,7 @@
// vmov :Rt = Sn
// vcvt: Dd = Sm
// vcvt: Sd = Dm
+// vcvt.f64.s32 Dd, Dd, #<fbits>
// Dd = vabs(Dm)
// Dd = vneg(Dm)
// Dd = vadd(Dn, Dm)
@@ -2746,6 +2766,13 @@
DecodeVCVTBetweenDoubleAndSingle(instr);
} else if ((instr->Opc2Value() == 0x8) && (instr->Opc3Value() & 0x1)) {
DecodeVCVTBetweenFloatingPointAndInteger(instr);
+ } else if ((instr->Opc2Value() == 0xA) && (instr->Opc3Value() == 0x3) &&
+ (instr->Bit(8) == 1)) {
+ // vcvt.f64.s32 Dd, Dd, #<fbits>
+ int fraction_bits = 32 - ((instr->Bit(5) << 4) | instr->Bits(3, 0));
+ int fixed_value = get_sinteger_from_s_register(vd * 2);
+ double divide = 1 << fraction_bits;
+ set_d_register_from_double(vd, fixed_value / divide);
} else if (((instr->Opc2Value() >> 1) == 0x6) &&
(instr->Opc3Value() & 0x1)) {
DecodeVCVTBetweenFloatingPointAndInteger(instr);
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 5d10a7e..b0de014 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -852,7 +852,7 @@
}
-static const int kFastApiCallArguments = 4;
+static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
// Reserves space for the extra arguments to API function in the
// caller's frame.
@@ -881,10 +881,11 @@
// -- sp[4] : callee JS function
// -- sp[8] : call data
// -- sp[12] : isolate
- // -- sp[16] : last JS argument
+ // -- sp[16] : ReturnValue
+ // -- sp[20] : last JS argument
// -- ...
- // -- sp[(argc + 3) * 4] : first JS argument
- // -- sp[(argc + 4) * 4] : receiver
+ // -- sp[(argc + 4) * 4] : first JS argument
+ // -- sp[(argc + 5) * 4] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
@@ -901,11 +902,13 @@
__ Move(r6, call_data);
}
__ mov(r7, Operand(ExternalReference::isolate_address(masm->isolate())));
- // Store JS function, call data and isolate.
+ // Store JS function, call data, isolate and ReturnValue.
__ stm(ib, sp, r5.bit() | r6.bit() | r7.bit());
+ __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
+ __ str(r5, MemOperand(sp, 4 * kPointerSize));
// Prepare arguments.
- __ add(r2, sp, Operand(3 * kPointerSize));
+ __ add(r2, sp, Operand(4 * kPointerSize));
// Allocate the v8::Arguments structure in the arguments' space since
// it's not controlled by GC.
@@ -931,13 +934,21 @@
const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
Address function_address = v8::ToCData<Address>(api_call_info->callback());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(masm->isolate(), function_address);
ApiFunction fun(function_address);
+ ExternalReference::Type type =
+ returns_handle ?
+ ExternalReference::DIRECT_API_CALL :
+ ExternalReference::DIRECT_API_CALL_NEW;
ExternalReference ref = ExternalReference(&fun,
- ExternalReference::DIRECT_API_CALL,
+ type,
masm->isolate());
AllowExternalCallThatCantCauseGC scope(masm);
-
- __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
+ __ CallApiFunctionAndReturn(ref,
+ kStackUnwindSpace,
+ returns_handle,
+ kFastApiCallArguments + 1);
}
@@ -1413,7 +1424,8 @@
__ Push(reg, scratch3());
__ mov(scratch3(),
Operand(ExternalReference::isolate_address(isolate())));
- __ Push(scratch3(), name());
+ __ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
+ __ Push(scratch3(), scratch4(), name());
__ mov(r0, sp); // r0 = Handle<Name>
const int kApiStackSpace = 1;
@@ -1425,12 +1437,21 @@
__ str(scratch2(), MemOperand(sp, 1 * kPointerSize));
__ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
- const int kStackUnwindSpace = 5;
+ const int kStackUnwindSpace = kFastApiCallArguments + 1;
Address getter_address = v8::ToCData<Address>(callback->getter());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(isolate(), getter_address);
ApiFunction fun(getter_address);
- ExternalReference ref = ExternalReference(
- &fun, ExternalReference::DIRECT_GETTER_CALL, isolate());
- __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
+ ExternalReference::Type type =
+ returns_handle ?
+ ExternalReference::DIRECT_GETTER_CALL :
+ ExternalReference::DIRECT_GETTER_CALL_NEW;
+
+ ExternalReference ref = ExternalReference(&fun, type, isolate());
+ __ CallApiFunctionAndReturn(ref,
+ kStackUnwindSpace,
+ returns_handle,
+ 3);
}
@@ -1680,8 +1701,6 @@
// Get the array's length into r0 and calculate new length.
__ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- STATIC_ASSERT(kSmiTagSize == 1);
- STATIC_ASSERT(kSmiTag == 0);
__ add(r0, r0, Operand(Smi::FromInt(argc)));
// Get the elements' length.
@@ -1701,8 +1720,7 @@
// Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
- __ add(end_elements, elements,
- Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
const int kEndElementsOffset =
FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
__ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
@@ -1722,8 +1740,6 @@
// Get the array's length into r0 and calculate new length.
__ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- STATIC_ASSERT(kSmiTagSize == 1);
- STATIC_ASSERT(kSmiTag == 0);
__ add(r0, r0, Operand(Smi::FromInt(argc)));
// Get the elements' length.
@@ -1797,8 +1813,7 @@
// Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
- __ add(end_elements, elements,
- Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
__ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
__ RecordWrite(elements,
@@ -1835,8 +1850,7 @@
const int kAllocationDelta = 4;
// Load top and check if it is the end of elements.
- __ add(end_elements, elements,
- Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(end_elements, elements, Operand::PointerOffsetFromSmiKey(r0));
__ add(end_elements, end_elements, Operand(kEndElementsOffset));
__ mov(r7, Operand(new_space_allocation_top));
__ ldr(r3, MemOperand(r7));
@@ -1932,11 +1946,9 @@
// Get the last element.
__ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
- STATIC_ASSERT(kSmiTagSize == 1);
- STATIC_ASSERT(kSmiTag == 0);
// We can't address the last element in one operation. Compute the more
// expensive shift first, and use an offset later on.
- __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4));
__ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ cmp(r0, r6);
__ b(eq, &call_builtin);
@@ -2158,7 +2170,6 @@
if (cell.is_null()) {
__ ldr(r1, MemOperand(sp, 1 * kPointerSize));
- STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(r1, &miss);
CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
@@ -2176,7 +2187,6 @@
// Check the code is a smi.
Label slow;
- STATIC_ASSERT(kSmiTag == 0);
__ JumpIfNotSmi(code, &slow);
// Convert the smi code to uint16.
@@ -2230,7 +2240,6 @@
if (cell.is_null()) {
__ ldr(r1, MemOperand(sp, 1 * kPointerSize));
- STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(r1, &miss);
CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
name, &miss);
@@ -2245,8 +2254,7 @@
__ ldr(r0, MemOperand(sp, 0 * kPointerSize));
// If the argument is a smi, just return.
- STATIC_ASSERT(kSmiTag == 0);
- __ tst(r0, Operand(kSmiTagMask));
+ __ SmiTst(r0);
__ Drop(argc + 1, eq);
__ Ret(eq);
@@ -2292,11 +2300,9 @@
__ bind(&smi_check);
// Check if the result can fit into an smi. If we had an overflow,
// the result is either 0x80000000 or 0x7FFFFFFF and won't fit into an smi.
- __ add(r1, r0, Operand(0x40000000), SetCC);
// If result doesn't fit into an smi, branch to slow.
- __ b(&slow, mi);
- // Tag the result.
- __ mov(r0, Operand(r0, LSL, kSmiTagSize));
+ __ SmiTag(r0, SetCC);
+ __ b(vs, &slow);
__ bind(&just_return);
__ Drop(argc + 1);
@@ -2341,7 +2347,6 @@
GenerateNameCheck(name, &miss);
if (cell.is_null()) {
__ ldr(r1, MemOperand(sp, 1 * kPointerSize));
- STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(r1, &miss);
CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
name, &miss);
@@ -2357,7 +2362,6 @@
// Check if the argument is a smi.
Label not_smi;
- STATIC_ASSERT(kSmiTag == 0);
__ JumpIfNotSmi(r0, ¬_smi);
// Do bitwise not or do nothing depending on the sign of the
@@ -3237,8 +3241,7 @@
Register key = r0;
Register receiver = r1;
- __ JumpIfNotSmi(key, &miss_force_generic);
- __ mov(r2, Operand(key, ASR, kSmiTagSize));
+ __ UntagAndJumpIfNotSmi(r2, key, &miss_force_generic);
__ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
__ Ret();
@@ -3270,7 +3273,6 @@
static void GenerateSmiKeyCheck(MacroAssembler* masm,
Register key,
Register scratch0,
- Register scratch1,
DwVfpRegister double_scratch0,
DwVfpRegister double_scratch1,
Label* fail) {
@@ -3288,8 +3290,7 @@
__ vldr(double_scratch0, ip, HeapNumber::kValueOffset);
__ TryDoubleToInt32Exact(scratch0, double_scratch0, double_scratch1);
__ b(ne, fail);
- __ TrySmiTag(scratch0, fail, scratch1);
- __ mov(key, scratch0);
+ __ TrySmiTag(key, scratch0, fail);
__ bind(&key_ok);
}
@@ -3315,7 +3316,7 @@
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key, r4, r5, d1, d2, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key, r4, d1, d2, &miss_force_generic);
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
@@ -3330,11 +3331,10 @@
// r3: external array.
if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
// Double to pixel conversion is only implemented in the runtime for now.
- __ JumpIfNotSmi(value, &slow);
+ __ UntagAndJumpIfNotSmi(r5, value, &slow);
} else {
- __ JumpIfNotSmi(value, &check_heap_number);
+ __ UntagAndJumpIfNotSmi(r5, value, &check_heap_number);
}
- __ SmiUntag(r5, value);
__ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
// r3: base pointer of external storage.
@@ -3505,7 +3505,7 @@
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, d2, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key_reg, r4, d1, d2, &miss_force_generic);
if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(value_reg, &transition_elements_kind);
@@ -3539,20 +3539,14 @@
__ add(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
- __ add(scratch,
- scratch,
- Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(scratch, scratch, Operand::PointerOffsetFromSmiKey(key_reg));
__ str(value_reg, MemOperand(scratch));
} else {
ASSERT(IsFastObjectElementsKind(elements_kind));
__ add(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
- __ add(scratch,
- scratch,
- Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(scratch, scratch, Operand::PointerOffsetFromSmiKey(key_reg));
__ str(value_reg, MemOperand(scratch));
__ mov(receiver_reg, value_reg);
__ RecordWrite(elements_reg, // Object.
@@ -3666,7 +3660,7 @@
// have been verified by the caller to not be a smi.
// Check that the key is a smi or a heap number convertible to a smi.
- GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, d2, &miss_force_generic);
+ GenerateSmiKeyCheck(masm, key_reg, r4, d1, d2, &miss_force_generic);
__ ldr(elements_reg,
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
diff --git a/src/assembler.h b/src/assembler.h
index 6abd5c5..2d9e727 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -647,9 +647,17 @@
// Handle<Value> f(v8::Arguments&)
DIRECT_API_CALL,
+ // Direct call to API function callback.
+ // void f(v8::Arguments&)
+ DIRECT_API_CALL_NEW,
+
// Direct call to accessor getter callback.
// Handle<value> f(Local<String> property, AccessorInfo& info)
- DIRECT_GETTER_CALL
+ DIRECT_GETTER_CALL,
+
+ // Direct call to accessor getter callback.
+ // void f(Local<String> property, AccessorInfo& info)
+ DIRECT_GETTER_CALL_NEW
};
static void SetUp();
diff --git a/src/ast.h b/src/ast.h
index d697da7..ad7b119 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -278,7 +278,9 @@
int length() const { return list_.length(); }
void AddMapIfMissing(Handle<Map> map, Zone* zone) {
- map = Map::CurrentMapForDeprecated(map);
+ Map* updated = map->CurrentMapForDeprecated();
+ if (updated == NULL) return;
+ map = Handle<Map>(updated);
for (int i = 0; i < length(); ++i) {
if (at(i).is_identical_to(map)) return;
}
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 85812ca..7c9e436 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -95,6 +95,10 @@
void Bootstrapper::Initialize(bool create_heap_objects) {
extensions_cache_.Initialize(create_heap_objects);
+}
+
+
+void Bootstrapper::InitializeOncePerProcess() {
GCExtension::Register();
ExternalizeStringExtension::Register();
StatisticsExtension::Register();
diff --git a/src/bootstrapper.h b/src/bootstrapper.h
index e33415e..476ac12 100644
--- a/src/bootstrapper.h
+++ b/src/bootstrapper.h
@@ -88,6 +88,8 @@
// context.
class Bootstrapper {
public:
+ static void InitializeOncePerProcess();
+
// Requires: Heap::SetUp has been called.
void Initialize(bool create_heap_objects);
void TearDown();
diff --git a/src/builtins.cc b/src/builtins.cc
index add59eb..81b6005 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -1317,15 +1317,13 @@
LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
ASSERT(raw_holder->IsJSObject());
- CustomArguments custom(isolate);
- v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
- isolate, data_obj, *function, raw_holder);
-
- v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
- custom.end(),
- &args[0] - 1,
- args.length() - 1,
- is_construct);
+ FunctionCallbackArguments custom(isolate,
+ data_obj,
+ *function,
+ raw_holder,
+ &args[0] - 1,
+ args.length() - 1,
+ is_construct);
v8::Handle<v8::Value> value;
{
@@ -1333,7 +1331,7 @@
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate,
v8::ToCData<Address>(callback_obj));
- value = callback(new_args);
+ value = custom.Call(callback);
}
if (value.IsEmpty()) {
result = heap->undefined_value();
@@ -1396,21 +1394,20 @@
HandleScope scope(isolate);
LOG(isolate, ApiObjectAccess("call non-function", obj));
- CustomArguments custom(isolate);
- v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
- isolate, call_data->data(), constructor, obj);
- v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
- custom.end(),
- &args[0] - 1,
- args.length() - 1,
- is_construct_call);
+ FunctionCallbackArguments custom(isolate,
+ call_data->data(),
+ constructor,
+ obj,
+ &args[0] - 1,
+ args.length() - 1,
+ is_construct_call);
v8::Handle<v8::Value> value;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate,
v8::ToCData<Address>(callback_obj));
- value = callback(new_args);
+ value = custom.Call(callback);
}
if (value.IsEmpty()) {
result = heap->undefined_value();
diff --git a/src/checks.cc b/src/checks.cc
index a6405ec..8bcde1c 100644
--- a/src/checks.cc
+++ b/src/checks.cc
@@ -53,7 +53,7 @@
if (fatal_error_handler_nesting_depth < 3) {
if (i::FLAG_stack_trace_on_abort) {
// Call this one twice on double fault
- i::Isolate::Current()->PrintStack();
+ i::Isolate::Current()->PrintStack(stderr);
}
}
i::OS::Abort();
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index e9ae38e..58f29b4 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -194,7 +194,7 @@
"Optimize object size, Array shift, DOM strings and string +")
DEFINE_bool(pretenure_literals, true, "allocate literals in old space")
DEFINE_bool(track_fields, true, "track fields with only smi values")
-DEFINE_bool(track_double_fields, false, "track fields with double values")
+DEFINE_bool(track_double_fields, true, "track fields with double values")
DEFINE_bool(track_heap_object_fields, true, "track fields with heap values")
DEFINE_implication(track_double_fields, track_fields)
DEFINE_implication(track_heap_object_fields, track_fields)
diff --git a/src/handles.cc b/src/handles.cc
index 5a5773e..7a8d5c9 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -545,19 +545,14 @@
}
-void CustomArguments::IterateInstance(ObjectVisitor* v) {
- v->VisitPointers(values_, values_ + ARRAY_SIZE(values_));
-}
-
-
// Compute the property keys from the interceptor.
// TODO(rossberg): support symbols in API, and filter here if needed.
v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSReceiver> receiver,
Handle<JSObject> object) {
Isolate* isolate = receiver->GetIsolate();
Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor());
- CustomArguments args(isolate, interceptor->data(), *receiver, *object);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments
+ args(isolate, interceptor->data(), *receiver, *object);
v8::Handle<v8::Array> result;
if (!interceptor->enumerator()->IsUndefined()) {
v8::NamedPropertyEnumerator enum_fun =
@@ -566,7 +561,7 @@
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = enum_fun(info);
+ result = args.Call(enum_fun);
}
}
#if ENABLE_EXTRA_CHECKS
@@ -581,8 +576,8 @@
Handle<JSObject> object) {
Isolate* isolate = receiver->GetIsolate();
Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor());
- CustomArguments args(isolate, interceptor->data(), *receiver, *object);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments
+ args(isolate, interceptor->data(), *receiver, *object);
v8::Handle<v8::Array> result;
if (!interceptor->enumerator()->IsUndefined()) {
v8::IndexedPropertyEnumerator enum_fun =
@@ -591,7 +586,7 @@
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = enum_fun(info);
+ result = args.Call(enum_fun);
#if ENABLE_EXTRA_CHECKS
CHECK(result.IsEmpty() || v8::Utils::OpenHandle(*result)->IsJSObject());
#endif
diff --git a/src/heap.cc b/src/heap.cc
index a69c539..98844f0 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -4048,13 +4048,11 @@
// Make sure to use globals from the function's context, since the function
// can be from a different context.
Context* native_context = function->context()->native_context();
- bool needs_constructor_property;
Map* new_map;
if (function->shared()->is_generator()) {
// Generator prototypes can share maps since they don't have "constructor"
// properties.
new_map = native_context->generator_object_prototype_map();
- needs_constructor_property = false;
} else {
// Each function prototype gets a fresh map to avoid unwanted sharing of
// maps between prototypes of different constructors.
@@ -4062,14 +4060,13 @@
ASSERT(object_function->has_initial_map());
MaybeObject* maybe_map = object_function->initial_map()->Copy();
if (!maybe_map->To(&new_map)) return maybe_map;
- needs_constructor_property = true;
}
Object* prototype;
MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
- if (needs_constructor_property) {
+ if (!function->shared()->is_generator()) {
MaybeObject* maybe_failure =
JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributes(
constructor_string(), function, DONT_ENUM);
@@ -4209,7 +4206,7 @@
// the inline_new flag so we only change the map if we generate a
// specialized construct stub.
ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
- if (instance_type == JS_OBJECT_TYPE &&
+ if (!fun->shared()->is_generator() &&
fun->shared()->CanGenerateInlineConstructor(prototype)) {
int count = fun->shared()->this_property_assignments_count();
if (count > in_object_properties) {
@@ -4245,7 +4242,7 @@
}
}
- if (instance_type == JS_OBJECT_TYPE) {
+ if (!fun->shared()->is_generator()) {
fun->shared()->StartInobjectSlackTracking(map);
}
@@ -5936,7 +5933,7 @@
void Heap::Print() {
if (!HasBeenSetUp()) return;
- isolate()->PrintStack();
+ isolate()->PrintStack(stdout);
AllSpaces spaces(this);
for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
space->Print();
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index a97e083..097216e 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -1832,7 +1832,7 @@
} else {
if_nil.Then();
if_nil.Else();
- if (types.Contains(CompareNilICStub::MONOMORPHIC_MAP)) {
+ if (!map.is_null() && types.Contains(CompareNilICStub::MONOMORPHIC_MAP)) {
BuildCheckNonSmi(value);
// For ICs, the map checked below is a sentinel map that gets replaced by
// the monomorphic map when the code is used as a template to generate a
@@ -5363,7 +5363,9 @@
} else if (sub->right()->IsConstant()) {
subexpression = sub->left();
constant = HConstant::cast(sub->right());
- } return;
+ } else {
+ return;
+ }
} else {
return;
}
@@ -10497,7 +10499,7 @@
if ((is_logical_and && left_constant->BooleanValue()) ||
(!is_logical_and && !left_constant->BooleanValue())) {
Drop(1); // left_value.
- CHECK_BAILOUT(VisitForValue(expr->right()));
+ CHECK_ALIVE(VisitForValue(expr->right()));
}
return ast_context()->ReturnValue(Pop());
}
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 175b1ca..5c18cae 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -1954,14 +1954,14 @@
#endif
-Operand ApiParameterOperand(int index) {
- return Operand(
- esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
+Operand ApiParameterOperand(int index, bool returns_handle) {
+ int offset = (index +(kReturnHandlesDirectly || !returns_handle ? 0 : 1));
+ return Operand(esp, offset * kPointerSize);
}
-void MacroAssembler::PrepareCallApiFunction(int argc) {
- if (kReturnHandlesDirectly) {
+void MacroAssembler::PrepareCallApiFunction(int argc, bool returns_handle) {
+ if (kReturnHandlesDirectly || !returns_handle) {
EnterApiExitFrame(argc);
// When handles are returned directly we don't have to allocate extra
// space for and pass an out parameter.
@@ -1990,7 +1990,9 @@
void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
- int stack_space) {
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset) {
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
ExternalReference limit_address =
@@ -2026,23 +2028,29 @@
PopSafepointRegisters();
}
- if (!kReturnHandlesDirectly) {
- // PrepareCallApiFunction saved pointer to the output slot into
- // callee-save register esi.
- mov(eax, Operand(esi, 0));
- }
-
- Label empty_handle;
Label prologue;
+ if (returns_handle) {
+ if (!kReturnHandlesDirectly) {
+ // PrepareCallApiFunction saved pointer to the output slot into
+ // callee-save register esi.
+ mov(eax, Operand(esi, 0));
+ }
+ Label empty_handle;
+ // Check if the result handle holds 0.
+ test(eax, eax);
+ j(zero, &empty_handle);
+ // It was non-zero. Dereference to get the result value.
+ mov(eax, Operand(eax, 0));
+ jmp(&prologue);
+ bind(&empty_handle);
+ }
+ // Load the value from ReturnValue
+ mov(eax, Operand(ebp, return_value_offset * kPointerSize));
+
Label promote_scheduled_exception;
Label delete_allocated_handles;
Label leave_exit_frame;
- // Check if the result handle holds 0.
- test(eax, eax);
- j(zero, &empty_handle);
- // It was non-zero. Dereference to get the result value.
- mov(eax, Operand(eax, 0));
bind(&prologue);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
@@ -2098,11 +2106,6 @@
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
- bind(&empty_handle);
- // It was zero; the result is undefined.
- mov(eax, isolate()->factory()->undefined_value());
- jmp(&prologue);
-
bind(&promote_scheduled_exception);
TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index 519652a..e7a075d 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -769,13 +769,16 @@
// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
// etc. Saves context (esi). If space was reserved for return value then
// stores the pointer to the reserved slot into esi.
- void PrepareCallApiFunction(int argc);
+ void PrepareCallApiFunction(int argc, bool returns_handle);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Clobbers ebx, edi and
// caller-save registers. Restores context. On return removes
// stack_space * kPointerSize (GCed).
- void CallApiFunctionAndReturn(Address function_address, int stack_space);
+ void CallApiFunctionAndReturn(Address function_address,
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset_from_ebp);
// Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& ext);
@@ -1010,7 +1013,7 @@
// Generates an Operand for saving parameters after PrepareCallApiFunction.
-Operand ApiParameterOperand(int index);
+Operand ApiParameterOperand(int index, bool returns_handle);
#ifdef GENERATED_CODE_COVERAGE
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 4805d52..88ea4b2 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -420,7 +420,7 @@
// Number of pointers to be reserved on stack for fast API call.
-static const int kFastApiCallArguments = 4;
+static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
// Reserves space for the extra arguments to API function in the
@@ -469,10 +469,11 @@
// (first fast api call extra argument)
// -- esp[12] : api call data
// -- esp[16] : isolate
- // -- esp[20] : last argument
+ // -- esp[20] : ReturnValue
+ // -- esp[24] : last argument
// -- ...
- // -- esp[(argc + 4) * 4] : first argument
- // -- esp[(argc + 5) * 4] : receiver
+ // -- esp[(argc + 5) * 4] : first argument
+ // -- esp[(argc + 6) * 4] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
@@ -492,9 +493,12 @@
}
__ mov(Operand(esp, 4 * kPointerSize),
Immediate(reinterpret_cast<int>(masm->isolate())));
+ __ mov(Operand(esp, 5 * kPointerSize),
+ masm->isolate()->factory()->undefined_value());
// Prepare arguments.
- __ lea(eax, Operand(esp, 4 * kPointerSize));
+ STATIC_ASSERT(kFastApiCallArguments == 5);
+ __ lea(eax, Operand(esp, kFastApiCallArguments * kPointerSize));
const int kApiArgc = 1; // API function gets reference to the v8::Arguments.
@@ -502,23 +506,31 @@
// it's not controlled by GC.
const int kApiStackSpace = 4;
- __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
-
- __ mov(ApiParameterOperand(1), eax); // v8::Arguments::implicit_args_.
- __ add(eax, Immediate(argc * kPointerSize));
- __ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_.
- __ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_.
- // v8::Arguments::is_construct_call_.
- __ Set(ApiParameterOperand(4), Immediate(0));
-
- // v8::InvocationCallback's argument.
- __ lea(eax, ApiParameterOperand(1));
- __ mov(ApiParameterOperand(0), eax);
-
// Function address is a foreign pointer outside V8's heap.
Address function_address = v8::ToCData<Address>(api_call_info->callback());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(masm->isolate(),
+ reinterpret_cast<void*>(function_address));
+ __ PrepareCallApiFunction(kApiArgc + kApiStackSpace, returns_handle);
+
+ // v8::Arguments::implicit_args_.
+ __ mov(ApiParameterOperand(1, returns_handle), eax);
+ __ add(eax, Immediate(argc * kPointerSize));
+ // v8::Arguments::values_.
+ __ mov(ApiParameterOperand(2, returns_handle), eax);
+ // v8::Arguments::length_.
+ __ Set(ApiParameterOperand(3, returns_handle), Immediate(argc));
+ // v8::Arguments::is_construct_call_.
+ __ Set(ApiParameterOperand(4, returns_handle), Immediate(0));
+
+ // v8::InvocationCallback's argument.
+ __ lea(eax, ApiParameterOperand(1, returns_handle));
+ __ mov(ApiParameterOperand(0, returns_handle), eax);
+
__ CallApiFunctionAndReturn(function_address,
- argc + kFastApiCallArguments + 1);
+ argc + kFastApiCallArguments + 1,
+ returns_handle,
+ kFastApiCallArguments + 1);
}
@@ -1365,6 +1377,7 @@
__ push(Immediate(Handle<Object>(callback->data(), isolate())));
}
__ push(Immediate(reinterpret_cast<int>(isolate())));
+ __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue
// Save a pointer to where we pushed the arguments pointer. This will be
// passed as the const ExecutableAccessorInfo& to the C++ callback.
@@ -1375,22 +1388,29 @@
__ push(scratch3()); // Restore return address.
- // 4 elements array for v8::Arguments::values_, handler for name and pointer
+ // array for v8::Arguments::values_, handler for name and pointer
// to the values (it considered as smi in GC).
- const int kStackSpace = 6;
+ const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
const int kApiArgc = 2;
- __ PrepareCallApiFunction(kApiArgc);
- __ mov(ApiParameterOperand(0), ebx); // name.
+ Address getter_address = v8::ToCData<Address>(callback->getter());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(isolate(),
+ reinterpret_cast<void*>(getter_address));
+ __ PrepareCallApiFunction(kApiArgc, returns_handle);
+ __ mov(ApiParameterOperand(0, returns_handle), ebx); // name.
__ add(ebx, Immediate(kPointerSize));
- __ mov(ApiParameterOperand(1), ebx); // arguments pointer.
+ __ mov(ApiParameterOperand(1, returns_handle), ebx); // arguments pointer.
// Emitting a stub call may try to allocate (if the code is not
// already generated). Do not allow the assembler to perform a
// garbage collection but instead return the allocation failure
// object.
- Address getter_address = v8::ToCData<Address>(callback->getter());
- __ CallApiFunctionAndReturn(getter_address, kStackSpace);
+
+ __ CallApiFunctionAndReturn(getter_address,
+ kStackSpace,
+ returns_handle,
+ 4);
}
@@ -2493,7 +2513,7 @@
name, depth, &miss);
// Move the return address on top of the stack.
- __ mov(eax, Operand(esp, 4 * kPointerSize));
+ __ mov(eax, Operand(esp, kFastApiCallArguments * kPointerSize));
__ mov(Operand(esp, 0 * kPointerSize), eax);
// esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
diff --git a/src/isolate.cc b/src/isolate.cc
index 2f36063..8ae0c74 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -835,7 +835,7 @@
}
-void Isolate::PrintStack() {
+void Isolate::PrintStack(FILE* out) {
if (stack_trace_nesting_level_ == 0) {
stack_trace_nesting_level_++;
@@ -850,7 +850,7 @@
StringStream accumulator(allocator);
incomplete_message_ = &accumulator;
PrintStack(&accumulator);
- accumulator.OutputToStdOut();
+ accumulator.OutputToFile(out);
InitializeLoggingAndCounters();
accumulator.Log();
incomplete_message_ = NULL;
@@ -865,7 +865,7 @@
"\n\nAttempt to print stack while printing stack (double fault)\n");
OS::PrintError(
"If you are lucky you may find a partial stack dump on stdout.\n\n");
- incomplete_message_->OutputToStdOut();
+ incomplete_message_->OutputToFile(out);
}
}
@@ -1752,7 +1752,8 @@
deferred_handles_head_(NULL),
optimizing_compiler_thread_(this),
marking_thread_(NULL),
- sweeper_thread_(NULL) {
+ sweeper_thread_(NULL),
+ callback_table_(NULL) {
id_ = NoBarrier_AtomicIncrement(&isolate_counter_, 1);
TRACE_ISOLATE(constructor);
diff --git a/src/isolate.h b/src/isolate.h
index ab32987..de7e35e 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -51,6 +51,7 @@
namespace internal {
class Bootstrapper;
+class CallbackTable;
class CodeGenerator;
class CodeRange;
struct CodeStubInterfaceDescriptor;
@@ -724,7 +725,7 @@
void PrintCurrentStackTrace(FILE* out);
void PrintStackTrace(FILE* out, char* thread_data);
void PrintStack(StringStream* accumulator);
- void PrintStack();
+ void PrintStack(FILE* out);
Handle<String> StackTraceString();
NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
Object* object,
@@ -1102,6 +1103,13 @@
return sweeper_thread_;
}
+ CallbackTable* callback_table() {
+ return callback_table_;
+ }
+ void set_callback_table(CallbackTable* callback_table) {
+ callback_table_ = callback_table;
+ }
+
HStatistics* GetHStatistics();
HTracer* GetHTracer();
@@ -1339,6 +1347,7 @@
OptimizingCompilerThread optimizing_compiler_thread_;
MarkingThread** marking_thread_;
SweeperThread** sweeper_thread_;
+ CallbackTable* callback_table_;
friend class ExecutionAccess;
friend class HandleScopeImplementer;
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 81e9ec9..cea4bc4 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -3929,7 +3929,9 @@
void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
- int stack_space) {
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset_from_fp) {
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
const int kNextOffset = 0;
@@ -3978,22 +3980,27 @@
PopSafepointRegisters();
}
- // As mentioned above, on MIPS a pointer is returned - we need to dereference
- // it to get the actual return value (which is also a pointer).
- lw(v0, MemOperand(v0));
-
Label promote_scheduled_exception;
Label delete_allocated_handles;
Label leave_exit_frame;
+ Label return_value_loaded;
- // If result is non-zero, dereference to get the result value
- // otherwise set it to undefined.
- Label skip;
- LoadRoot(a0, Heap::kUndefinedValueRootIndex);
- Branch(&skip, eq, v0, Operand(zero_reg));
- lw(a0, MemOperand(v0));
- bind(&skip);
- mov(v0, a0);
+ if (returns_handle) {
+ Label load_return_value;
+
+ // As mentioned above, on MIPS a pointer is returned - we need to
+ // dereference it to get the actual return value (which is also a pointer).
+ lw(v0, MemOperand(v0));
+
+ Branch(&load_return_value, eq, v0, Operand(zero_reg));
+ // Dereference returned value.
+ lw(v0, MemOperand(v0));
+ Branch(&return_value_loaded);
+ bind(&load_return_value);
+ }
+ // Load value from ReturnValue.
+ lw(v0, MemOperand(fp, return_value_offset_from_fp*kPointerSize));
+ bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h
index 248e5b4..6511223 100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -1237,7 +1237,10 @@
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
- void CallApiFunctionAndReturn(ExternalReference function, int stack_space);
+ void CallApiFunctionAndReturn(ExternalReference function,
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset_from_fp);
// Jump to the builtin routine.
void JumpToExternalReference(const ExternalReference& builtin,
diff --git a/src/mips/simulator-mips.cc b/src/mips/simulator-mips.cc
index ffc8679..18e78a5 100644
--- a/src/mips/simulator-mips.cc
+++ b/src/mips/simulator-mips.cc
@@ -1388,10 +1388,13 @@
// This signature supports direct call in to API function native callback
// (refer to InvocationCallback in v8.h).
typedef v8::Handle<v8::Value> (*SimulatorRuntimeDirectApiCall)(int32_t arg0);
+typedef void (*SimulatorRuntimeDirectApiCallNew)(int32_t arg0);
// This signature supports direct call to accessor getter callback.
typedef v8::Handle<v8::Value> (*SimulatorRuntimeDirectGetterCall)(int32_t arg0,
int32_t arg1);
+typedef void (*SimulatorRuntimeDirectGetterCallNew)(int32_t arg0,
+ int32_t arg1);
// Software interrupt instructions are used by the simulator to call into the
// C-based V8 runtime. They are also used for debugging with simulator.
@@ -1536,28 +1539,44 @@
break;
}
}
- } else if (redirection->type() == ExternalReference::DIRECT_API_CALL) {
+ } else if (
+ redirection->type() == ExternalReference::DIRECT_API_CALL ||
+ redirection->type() == ExternalReference::DIRECT_API_CALL_NEW) {
// See DirectCEntryStub::GenerateCall for explanation of register usage.
- SimulatorRuntimeDirectApiCall target =
- reinterpret_cast<SimulatorRuntimeDirectApiCall>(external);
if (::v8::internal::FLAG_trace_sim) {
PrintF("Call to host function at %p args %08x\n",
- FUNCTION_ADDR(target), arg1);
+ reinterpret_cast<void*>(external), arg1);
}
- v8::Handle<v8::Value> result = target(arg1);
- *(reinterpret_cast<int*>(arg0)) = reinterpret_cast<int32_t>(*result);
- set_register(v0, arg0);
- } else if (redirection->type() == ExternalReference::DIRECT_GETTER_CALL) {
+ if (redirection->type() == ExternalReference::DIRECT_API_CALL) {
+ SimulatorRuntimeDirectApiCall target =
+ reinterpret_cast<SimulatorRuntimeDirectApiCall>(external);
+ v8::Handle<v8::Value> result = target(arg1);
+ *(reinterpret_cast<int*>(arg0)) = reinterpret_cast<int32_t>(*result);
+ set_register(v0, arg0);
+ } else {
+ SimulatorRuntimeDirectApiCallNew target =
+ reinterpret_cast<SimulatorRuntimeDirectApiCallNew>(external);
+ target(arg1);
+ }
+ } else if (
+ redirection->type() == ExternalReference::DIRECT_GETTER_CALL ||
+ redirection->type() == ExternalReference::DIRECT_GETTER_CALL_NEW) {
// See DirectCEntryStub::GenerateCall for explanation of register usage.
- SimulatorRuntimeDirectGetterCall target =
- reinterpret_cast<SimulatorRuntimeDirectGetterCall>(external);
if (::v8::internal::FLAG_trace_sim) {
PrintF("Call to host function at %p args %08x %08x\n",
- FUNCTION_ADDR(target), arg1, arg2);
+ reinterpret_cast<void*>(external), arg1, arg2);
}
- v8::Handle<v8::Value> result = target(arg1, arg2);
- *(reinterpret_cast<int*>(arg0)) = reinterpret_cast<int32_t>(*result);
- set_register(v0, arg0);
+ if (redirection->type() == ExternalReference::DIRECT_GETTER_CALL) {
+ SimulatorRuntimeDirectGetterCall target =
+ reinterpret_cast<SimulatorRuntimeDirectGetterCall>(external);
+ v8::Handle<v8::Value> result = target(arg1, arg2);
+ *(reinterpret_cast<int*>(arg0)) = reinterpret_cast<int32_t>(*result);
+ set_register(v0, arg0);
+ } else {
+ SimulatorRuntimeDirectGetterCallNew target =
+ reinterpret_cast<SimulatorRuntimeDirectGetterCallNew>(external);
+ target(arg1, arg2);
+ }
} else {
SimulatorRuntimeCall target =
reinterpret_cast<SimulatorRuntimeCall>(external);
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index 90ae404..4a73be2 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -842,8 +842,7 @@
}
-static const int kFastApiCallArguments = 4;
-
+static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
// Reserves space for the extra arguments to API function in the
// caller's frame.
@@ -872,10 +871,11 @@
// -- sp[4] : callee JS function
// -- sp[8] : call data
// -- sp[12] : isolate
- // -- sp[16] : last JS argument
+ // -- sp[16] : ReturnValue
+ // -- sp[20] : last JS argument
// -- ...
- // -- sp[(argc + 3) * 4] : first JS argument
- // -- sp[(argc + 4) * 4] : receiver
+ // -- sp[(argc + 4) * 4] : first JS argument
+ // -- sp[(argc + 5) * 4] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
@@ -893,13 +893,15 @@
}
__ li(t3, Operand(ExternalReference::isolate_address(masm->isolate())));
- // Store JS function, call data and isolate.
+ // Store JS function, call data, isolate and ReturnValue.
__ sw(t1, MemOperand(sp, 1 * kPointerSize));
__ sw(t2, MemOperand(sp, 2 * kPointerSize));
__ sw(t3, MemOperand(sp, 3 * kPointerSize));
+ __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
+ __ sw(t1, MemOperand(sp, 4 * kPointerSize));
// Prepare arguments.
- __ Addu(a2, sp, Operand(3 * kPointerSize));
+ __ Addu(a2, sp, Operand(4 * kPointerSize));
// Allocate the v8::Arguments structure in the arguments' space since
// it's not controlled by GC.
@@ -930,13 +932,22 @@
const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
Address function_address = v8::ToCData<Address>(api_call_info->callback());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(masm->isolate(), function_address);
ApiFunction fun(function_address);
+ ExternalReference::Type type =
+ returns_handle ?
+ ExternalReference::DIRECT_API_CALL :
+ ExternalReference::DIRECT_API_CALL_NEW;
ExternalReference ref =
ExternalReference(&fun,
- ExternalReference::DIRECT_API_CALL,
+ type,
masm->isolate());
AllowExternalCallThatCantCauseGC scope(masm);
- __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
+ __ CallApiFunctionAndReturn(ref,
+ kStackUnwindSpace,
+ returns_handle,
+ kFastApiCallArguments + 1);
}
class CallInterceptorCompiler BASE_EMBEDDED {
@@ -1410,12 +1421,14 @@
} else {
__ li(scratch3(), Handle<Object>(callback->data(), isolate()));
}
- __ Subu(sp, sp, 4 * kPointerSize);
- __ sw(reg, MemOperand(sp, 3 * kPointerSize));
- __ sw(scratch3(), MemOperand(sp, 2 * kPointerSize));
+ __ Subu(sp, sp, 5 * kPointerSize);
+ __ sw(reg, MemOperand(sp, 4 * kPointerSize));
+ __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
__ li(scratch3(),
Operand(ExternalReference::isolate_address(isolate())));
- __ sw(scratch3(), MemOperand(sp, 1 * kPointerSize));
+ __ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
+ __ sw(scratch3(), MemOperand(sp, 2 * kPointerSize));
+ __ sw(scratch4(), MemOperand(sp, 1 * kPointerSize));
__ sw(name(), MemOperand(sp, 0 * kPointerSize));
__ mov(a2, scratch2()); // Saved in case scratch2 == a1.
@@ -1436,12 +1449,21 @@
// a2 (second argument - see note above) = AccessorInfo&
__ Addu(a2, sp, kPointerSize);
- const int kStackUnwindSpace = 5;
+ const int kStackUnwindSpace = kFastApiCallArguments + 1;
Address getter_address = v8::ToCData<Address>(callback->getter());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(isolate(), getter_address);
ApiFunction fun(getter_address);
- ExternalReference ref = ExternalReference(
- &fun, ExternalReference::DIRECT_GETTER_CALL, isolate());
- __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
+ ExternalReference::Type type =
+ returns_handle ?
+ ExternalReference::DIRECT_GETTER_CALL :
+ ExternalReference::DIRECT_GETTER_CALL_NEW;
+
+ ExternalReference ref = ExternalReference(&fun, type, isolate());
+ __ CallApiFunctionAndReturn(ref,
+ kStackUnwindSpace,
+ returns_handle,
+ 3);
}
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index ecbf9d6..891f0d2 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -306,6 +306,17 @@
CHECK_EQ(map()->unused_property_fields(),
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
+ DescriptorArray* descriptors = map()->instance_descriptors();
+ for (int i = 0; i < map()->NumberOfOwnDescriptors(); i++) {
+ if (descriptors->GetDetails(i).type() == FIELD) {
+ Representation r = descriptors->GetDetails(i).representation();
+ int field = descriptors->GetFieldIndex(i);
+ Object* value = RawFastPropertyAt(field);
+ if (r.IsSmi()) ASSERT(value->IsSmi());
+ if (r.IsDouble()) ASSERT(value->IsHeapNumber());
+ if (r.IsHeapObject()) ASSERT(value->IsHeapObject());
+ }
+ }
}
CHECK_EQ((map()->has_fast_smi_or_object_elements() ||
(elements() == GetHeap()->empty_fixed_array())),
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 8c6e925..95a0eca 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -1030,10 +1030,7 @@
Smi* Smi::FromInt(int value) {
ASSERT(Smi::IsValid(value));
- int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
- intptr_t tagged_value =
- (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
- return reinterpret_cast<Smi*>(tagged_value);
+ return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
}
@@ -1111,28 +1108,8 @@
bool Smi::IsValid(intptr_t value) {
-#ifdef DEBUG
- bool in_range = (value >= kMinValue) && (value <= kMaxValue);
-#endif
-
-#ifdef V8_TARGET_ARCH_X64
- // To be representable as a long smi, the value must be a 32-bit integer.
- bool result = (value == static_cast<int32_t>(value));
-#else
- // To be representable as an tagged small integer, the two
- // most-significant bits of 'value' must be either 00 or 11 due to
- // sign-extension. To check this we add 01 to the two
- // most-significant bits, and check if the most-significant bit is 0
- //
- // CAUTION: The original code below:
- // bool result = ((value + 0x40000000) & 0x80000000) == 0;
- // may lead to incorrect results according to the C language spec, and
- // in fact doesn't work correctly with gcc4.1.1 in some cases: The
- // compiler may produce undefined results in case of signed integer
- // overflow. The computation must be done w/ unsigned ints.
- bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
-#endif
- ASSERT(result == in_range);
+ bool result = Internals::IsValidSmi(value);
+ ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue);
return result;
}
@@ -1528,9 +1505,19 @@
MaybeObject* JSObject::AllocateStorageForMap(Map* map) {
ASSERT(this->map()->inobject_properties() == map->inobject_properties());
- ElementsKind expected_kind = this->map()->elements_kind();
- if (map->elements_kind() != expected_kind) {
- MaybeObject* maybe_map = map->AsElementsKind(expected_kind);
+ ElementsKind obj_kind = this->map()->elements_kind();
+ ElementsKind map_kind = map->elements_kind();
+ if (map_kind != obj_kind) {
+ ElementsKind to_kind = map_kind;
+ if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) ||
+ IsDictionaryElementsKind(obj_kind)) {
+ to_kind = obj_kind;
+ }
+ MaybeObject* maybe_obj =
+ IsDictionaryElementsKind(to_kind) ? NormalizeElements()
+ : TransitionElementsKind(to_kind);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ MaybeObject* maybe_map = map->AsElementsKind(to_kind);
if (!maybe_map->To(&map)) return maybe_map;
}
int total_size =
@@ -3624,12 +3611,6 @@
}
-Handle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) {
- if (!map->is_deprecated()) return map;
- return GeneralizeRepresentation(map, 0, Representation::Smi());
-}
-
-
void Map::NotifyLeafMapLayoutChange() {
dependent_code()->DeoptimizeDependentCodeGroup(
GetIsolate(),
diff --git a/src/objects.cc b/src/objects.cc
index ee0ff51..31bbbdb 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -339,13 +339,12 @@
JSObject* self = JSObject::cast(receiver);
Handle<String> key(String::cast(name));
LOG(isolate, ApiNamedPropertyAccess("load", self, name));
- CustomArguments args(isolate, data->data(), self, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments args(isolate, data->data(), self, this);
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = call_fun(v8::Utils::ToLocal(key), info);
+ result = args.Call(call_fun, v8::Utils::ToLocal(key));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (result.IsEmpty()) {
@@ -1802,7 +1801,9 @@
int index = map()->NextFreePropertyIndex();
// Allocate new instance descriptors with (name, index) added
- Representation representation = value->OptimalRepresentation();
+ Representation representation = IsJSContextExtensionObject()
+ ? Representation::Tagged() : value->OptimalRepresentation();
+
FieldDescriptor new_field(name, index, attributes, representation);
ASSERT(index < map()->inobject_properties() ||
@@ -2055,8 +2056,8 @@
Map* old_target = old_map->GetTransition(transition_index);
Object* result;
- MaybeObject* maybe_result =
- ConvertDescriptorToField(name, new_value, attributes);
+ MaybeObject* maybe_result = ConvertDescriptorToField(
+ name, new_value, attributes, OMIT_TRANSITION_KEEP_REPRESENTATIONS);
if (!maybe_result->To(&result)) return maybe_result;
if (!HasFastProperties()) return result;
@@ -2097,7 +2098,8 @@
MaybeObject* JSObject::ConvertDescriptorToField(Name* name,
Object* new_value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ TransitionFlag flag) {
if (map()->unused_property_fields() == 0 &&
TooManyFastProperties(properties()->length(), MAY_BE_STORE_FROM_KEYED)) {
Object* obj;
@@ -2106,14 +2108,14 @@
return ReplaceSlowProperty(name, new_value, attributes);
}
- Representation representation = new_value->OptimalRepresentation();
+ Representation representation = IsJSContextExtensionObject()
+ ? Representation::Tagged() : new_value->OptimalRepresentation();
int index = map()->NextFreePropertyIndex();
FieldDescriptor new_field(name, index, attributes, representation);
// Make a new map for the object.
Map* new_map;
- MaybeObject* maybe_new_map = map()->CopyInsertDescriptor(&new_field,
- OMIT_TRANSITION);
+ MaybeObject* maybe_new_map = map()->CopyInsertDescriptor(&new_field, flag);
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
// Make new properties array if necessary.
@@ -2538,6 +2540,7 @@
int descriptors = old_map->NumberOfOwnDescriptors();
Map* root_map = old_map->FindRootMap();
+ // Check the state of the root map.
if (!old_map->EquivalentToForTransition(root_map)) {
return CopyGeneralizeAllRepresentations();
}
@@ -2548,7 +2551,6 @@
verbatim, descriptors, old_descriptors);
if (updated == NULL) return CopyGeneralizeAllRepresentations();
- // Check the state of the root map.
DescriptorArray* updated_descriptors = updated->instance_descriptors();
int valid = updated->NumberOfOwnDescriptors();
@@ -2625,6 +2627,34 @@
}
+Map* Map::CurrentMapForDeprecated() {
+ AssertNoAllocation no_allocation;
+ if (!is_deprecated()) return this;
+
+ DescriptorArray* old_descriptors = instance_descriptors();
+
+ int descriptors = NumberOfOwnDescriptors();
+ Map* root_map = FindRootMap();
+
+ // Check the state of the root map.
+ if (!EquivalentToForTransition(root_map)) return NULL;
+ int verbatim = root_map->NumberOfOwnDescriptors();
+
+ Map* updated = root_map->FindUpdatedMap(
+ verbatim, descriptors, old_descriptors);
+ if (updated == NULL) return NULL;
+
+ DescriptorArray* updated_descriptors = updated->instance_descriptors();
+ int valid = updated->NumberOfOwnDescriptors();
+ if (!updated_descriptors->IsMoreGeneralThan(
+ verbatim, valid, descriptors, old_descriptors)) {
+ return NULL;
+ }
+
+ return updated;
+}
+
+
MaybeObject* JSObject::SetPropertyWithInterceptor(
Name* name,
Object* value,
@@ -2640,8 +2670,7 @@
Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
if (!interceptor->setter()->IsUndefined()) {
LOG(isolate, ApiNamedPropertyAccess("interceptor-named-set", this, name));
- CustomArguments args(isolate, interceptor->data(), this, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
v8::NamedPropertySetter setter =
v8::ToCData<v8::NamedPropertySetter>(interceptor->setter());
v8::Handle<v8::Value> result;
@@ -2652,9 +2681,9 @@
isolate->heap()->undefined_value() :
value,
isolate);
- result = setter(v8::Utils::ToLocal(name_handle),
- v8::Utils::ToLocal(value_unhole),
- info);
+ result = args.Call(setter,
+ v8::Utils::ToLocal(name_handle),
+ v8::Utils::ToLocal(value_unhole));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) return *value_handle;
@@ -2754,14 +2783,14 @@
if (call_fun == NULL) return value;
Handle<String> key(String::cast(name));
LOG(isolate, ApiNamedPropertyAccess("store", this, name));
- CustomArguments args(isolate, data->data(), this, JSObject::cast(holder));
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments
+ args(isolate, data->data(), this, JSObject::cast(holder));
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- call_fun(v8::Utils::ToLocal(key),
- v8::Utils::ToLocal(value_handle),
- info);
+ args.Call(call_fun,
+ v8::Utils::ToLocal(key),
+ v8::Utils::ToLocal(value_handle));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
return *value_handle;
@@ -4063,8 +4092,7 @@
Handle<JSObject> receiver_handle(receiver);
Handle<JSObject> holder_handle(this);
Handle<String> name_handle(String::cast(name));
- CustomArguments args(isolate, interceptor->data(), receiver, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments args(isolate, interceptor->data(), receiver, this);
if (!interceptor->query()->IsUndefined()) {
v8::NamedPropertyQuery query =
v8::ToCData<v8::NamedPropertyQuery>(interceptor->query());
@@ -4074,7 +4102,7 @@
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = query(v8::Utils::ToLocal(name_handle), info);
+ result = args.Call(query, v8::Utils::ToLocal(name_handle));
}
if (!result.IsEmpty()) {
ASSERT(result->IsInt32());
@@ -4089,7 +4117,7 @@
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = getter(v8::Utils::ToLocal(name_handle), info);
+ result = args.Call(getter, v8::Utils::ToLocal(name_handle));
}
if (!result.IsEmpty()) return DONT_ENUM;
}
@@ -4204,8 +4232,7 @@
Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
Handle<JSReceiver> hreceiver(receiver);
Handle<JSObject> holder(this);
- CustomArguments args(isolate, interceptor->data(), receiver, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments args(isolate, interceptor->data(), receiver, this);
if (!interceptor->query()->IsUndefined()) {
v8::IndexedPropertyQuery query =
v8::ToCData<v8::IndexedPropertyQuery>(interceptor->query());
@@ -4215,7 +4242,7 @@
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = query(index, info);
+ result = args.Call(query, index);
}
if (!result.IsEmpty())
return static_cast<PropertyAttributes>(result->Int32Value());
@@ -4228,7 +4255,7 @@
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = getter(index, info);
+ result = args.Call(getter, index);
}
if (!result.IsEmpty()) return NONE;
}
@@ -4892,13 +4919,12 @@
v8::ToCData<v8::NamedPropertyDeleter>(interceptor->deleter());
LOG(isolate,
ApiNamedPropertyAccess("interceptor-named-delete", *this_handle, name));
- CustomArguments args(isolate, interceptor->data(), this, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
v8::Handle<v8::Boolean> result;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = deleter(v8::Utils::ToLocal(name_handle), info);
+ result = args.Call(deleter, v8::Utils::ToLocal(name_handle));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) {
@@ -4929,13 +4955,12 @@
Handle<JSObject> this_handle(this);
LOG(isolate,
ApiIndexedPropertyAccess("interceptor-indexed-delete", this, index));
- CustomArguments args(isolate, interceptor->data(), this, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
v8::Handle<v8::Boolean> result;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = deleter(index, info);
+ result = args.Call(deleter, index);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) {
@@ -6364,7 +6389,7 @@
set_transitions(transitions);
result->SetBackPointer(this);
- } else {
+ } else if (flag != OMIT_TRANSITION_KEEP_REPRESENTATIONS) {
descriptors->InitializeRepresentations(Representation::Tagged());
}
@@ -6372,6 +6397,8 @@
}
+// Since this method is used to rewrite an existing transition tree, it can
+// always insert transitions without checking.
MaybeObject* Map::CopyInstallDescriptors(int new_descriptor,
DescriptorArray* descriptors) {
ASSERT(descriptors->IsSortedNoDuplicates());
@@ -6394,18 +6421,14 @@
result->set_unused_property_fields(unused_property_fields);
result->set_owns_descriptors(false);
- if (CanHaveMoreTransitions()) {
- Name* name = descriptors->GetKey(new_descriptor);
- TransitionArray* transitions;
- MaybeObject* maybe_transitions =
- AddTransition(name, result, SIMPLE_TRANSITION);
- if (!maybe_transitions->To(&transitions)) return maybe_transitions;
+ Name* name = descriptors->GetKey(new_descriptor);
+ TransitionArray* transitions;
+ MaybeObject* maybe_transitions =
+ AddTransition(name, result, SIMPLE_TRANSITION);
+ if (!maybe_transitions->To(&transitions)) return maybe_transitions;
- set_transitions(transitions);
- result->SetBackPointer(this);
- } else {
- descriptors->InitializeRepresentations(Representation::Tagged());
- }
+ set_transitions(transitions);
+ result->SetBackPointer(this);
return result;
}
@@ -11132,13 +11155,12 @@
v8::ToCData<v8::IndexedPropertySetter>(interceptor->setter());
LOG(isolate,
ApiIndexedPropertyAccess("interceptor-indexed-set", this, index));
- CustomArguments args(isolate, interceptor->data(), this, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments args(isolate, interceptor->data(), this, this);
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = setter(index, v8::Utils::ToLocal(value_handle), info);
+ result = args.Call(setter, index, v8::Utils::ToLocal(value_handle));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) return *value_handle;
@@ -11175,13 +11197,13 @@
Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
Handle<String> key = isolate->factory()->NumberToString(number);
LOG(isolate, ApiNamedPropertyAccess("load", *self, *key));
- CustomArguments args(isolate, data->data(), *self, *holder_handle);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments
+ args(isolate, data->data(), *self, *holder_handle);
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = call_fun(v8::Utils::ToLocal(key), info);
+ result = args.Call(call_fun, v8::Utils::ToLocal(key));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (result.IsEmpty()) return isolate->heap()->undefined_value();
@@ -11242,14 +11264,14 @@
Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
Handle<String> key(isolate->factory()->NumberToString(number));
LOG(isolate, ApiNamedPropertyAccess("store", *self, *key));
- CustomArguments args(isolate, data->data(), *self, *holder_handle);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments
+ args(isolate, data->data(), *self, *holder_handle);
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- call_fun(v8::Utils::ToLocal(key),
- v8::Utils::ToLocal(value_handle),
- info);
+ args.Call(call_fun,
+ v8::Utils::ToLocal(key),
+ v8::Utils::ToLocal(value_handle));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
return *value_handle;
@@ -12133,13 +12155,13 @@
v8::ToCData<v8::IndexedPropertyGetter>(interceptor->getter());
LOG(isolate,
ApiIndexedPropertyAccess("interceptor-indexed-get", this, index));
- CustomArguments args(isolate, interceptor->data(), receiver, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments
+ args(isolate, interceptor->data(), receiver, this);
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = getter(index, info);
+ result = args.Call(getter, index);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) {
@@ -12443,13 +12465,13 @@
v8::ToCData<v8::NamedPropertyGetter>(interceptor->getter());
LOG(isolate,
ApiNamedPropertyAccess("interceptor-named-get", *holder_handle, name));
- CustomArguments args(isolate, interceptor->data(), receiver, this);
- v8::AccessorInfo info(args.end());
+ PropertyCallbackArguments
+ args(isolate, interceptor->data(), receiver, this);
v8::Handle<v8::Value> result;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- result = getter(v8::Utils::ToLocal(name_handle), info);
+ result = args.Call(getter, v8::Utils::ToLocal(name_handle));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!result.IsEmpty()) {
diff --git a/src/objects.h b/src/objects.h
index d466f65..ac74162 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -254,6 +254,7 @@
// Indicates whether transitions can be added to a source map or not.
enum TransitionFlag {
INSERT_TRANSITION,
+ OMIT_TRANSITION_KEEP_REPRESENTATIONS,
OMIT_TRANSITION
};
@@ -2200,7 +2201,8 @@
MUST_USE_RESULT MaybeObject* ConvertDescriptorToField(
Name* name,
Object* new_value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ TransitionFlag flag = OMIT_TRANSITION);
MUST_USE_RESULT MaybeObject* MigrateToMap(Map* new_map);
MUST_USE_RESULT MaybeObject* GeneralizeFieldRepresentation(
@@ -5377,9 +5379,8 @@
// Returns a non-deprecated version of the input. If the input was not
// deprecated, it is directly returned. Otherwise, the non-deprecated version
// is found by re-transitioning from the root of the transition tree using the
- // descriptor array of the map. New maps (and transitions) may be created if
- // no new (more general) version exists.
- static inline Handle<Map> CurrentMapForDeprecated(Handle<Map> map);
+ // descriptor array of the map. Returns NULL if no updated map is found.
+ Map* CurrentMapForDeprecated();
MUST_USE_RESULT MaybeObject* RawCopy(int instance_size);
MUST_USE_RESULT MaybeObject* CopyWithPreallocatedFieldDescriptors();
diff --git a/src/parser.cc b/src/parser.cc
index 86a486f..5eec342 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -794,7 +794,7 @@
}
-Handle<String> Parser::GetSymbol(bool* ok) {
+Handle<String> Parser::GetSymbol() {
int symbol_id = -1;
if (pre_parse_data() != NULL) {
symbol_id = pre_parse_data()->GetSymbolIdentifier();
@@ -1341,7 +1341,7 @@
// String
Expect(Token::STRING, CHECK_OK);
- Handle<String> symbol = GetSymbol(CHECK_OK);
+ Handle<String> symbol = GetSymbol();
// TODO(ES6): Request JS resource from environment...
@@ -3692,7 +3692,7 @@
case Token::STRING: {
Consume(Token::STRING);
- Handle<String> symbol = GetSymbol(CHECK_OK);
+ Handle<String> symbol = GetSymbol();
result = factory()->NewLiteral(symbol);
if (fni_ != NULL) fni_->PushLiteralName(symbol);
break;
@@ -4047,7 +4047,7 @@
if (is_keyword) {
name = isolate_->factory()->InternalizeUtf8String(Token::String(next));
} else {
- name = GetSymbol(CHECK_OK);
+ name = GetSymbol();
}
FunctionLiteral* value =
ParseFunctionLiteral(name,
@@ -4128,7 +4128,7 @@
}
case Token::STRING: {
Consume(Token::STRING);
- Handle<String> string = GetSymbol(CHECK_OK);
+ Handle<String> string = GetSymbol();
if (fni_ != NULL) fni_->PushLiteralName(string);
uint32_t index;
if (!string.is_null() && string->AsArrayIndex(&index)) {
@@ -4150,7 +4150,7 @@
default:
if (Token::IsKeyword(next)) {
Consume(next);
- Handle<String> string = GetSymbol(CHECK_OK);
+ Handle<String> string = GetSymbol();
key = factory()->NewLiteral(string);
} else {
// Unexpected token.
@@ -4823,7 +4823,7 @@
void Parser::ExpectContextualKeyword(const char* keyword, bool* ok) {
Expect(Token::IDENTIFIER, ok);
if (!*ok) return;
- Handle<String> symbol = GetSymbol(ok);
+ Handle<String> symbol = GetSymbol();
if (!*ok) return;
if (!symbol->IsUtf8EqualTo(CStrVector(keyword))) {
*ok = false;
@@ -4850,7 +4850,7 @@
(top_scope_->is_classic_mode() &&
(next == Token::FUTURE_STRICT_RESERVED_WORD ||
(next == Token::YIELD && !is_generator())))) {
- return GetSymbol(ok);
+ return GetSymbol();
} else {
ReportUnexpectedToken(next);
*ok = false;
@@ -4874,7 +4874,7 @@
*ok = false;
return Handle<String>();
}
- return GetSymbol(ok);
+ return GetSymbol();
}
@@ -4888,7 +4888,7 @@
*ok = false;
return Handle<String>();
}
- return GetSymbol(ok);
+ return GetSymbol();
}
diff --git a/src/parser.h b/src/parser.h
index 1defbf2..8a3ae92 100644
--- a/src/parser.h
+++ b/src/parser.h
@@ -767,7 +767,7 @@
}
}
- Handle<String> GetSymbol(bool* ok);
+ Handle<String> GetSymbol();
// Get odd-ball literals.
Literal* GetLiteralUndefined();
diff --git a/src/runtime.cc b/src/runtime.cc
index 849ddbf..61b3549 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -8991,7 +8991,7 @@
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugTrace) {
NoHandleAllocation ha(isolate);
ASSERT(args.length() == 0);
- isolate->PrintStack();
+ isolate->PrintStack(stdout);
return isolate->heap()->undefined_value();
}
@@ -13099,7 +13099,7 @@
ASSERT(args.length() == 2);
OS::PrintError("abort: %s\n",
reinterpret_cast<char*>(args[0]) + args.smi_at(1));
- isolate->PrintStack();
+ isolate->PrintStack(stderr);
OS::Abort();
UNREACHABLE();
return NULL;
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 89dee10..4e3a906 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -1034,10 +1034,12 @@
Code* empty = isolate_->builtins()->builtin(Builtins::kIllegal);
for (int i = 0; i < kPrimaryTableSize; i++) {
primary_[i].key = heap()->empty_string();
+ primary_[i].map = NULL;
primary_[i].value = empty;
}
for (int j = 0; j < kSecondaryTableSize; j++) {
secondary_[j].key = heap()->empty_string();
+ secondary_[j].map = NULL;
secondary_[j].value = empty;
}
}
@@ -1104,13 +1106,13 @@
Handle<String> str = Handle<String>::cast(name);
LOG(isolate, ApiNamedPropertyAccess("store", recv, *name));
- CustomArguments custom_args(isolate, callback->data(), recv, recv);
- v8::AccessorInfo info(custom_args.end());
+ PropertyCallbackArguments
+ custom_args(isolate, callback->data(), recv, recv);
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, setter_address);
- fun(v8::Utils::ToLocal(str), v8::Utils::ToLocal(value), info);
+ custom_args.Call(fun, v8::Utils::ToLocal(str), v8::Utils::ToLocal(value));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
return *value;
@@ -1128,13 +1130,13 @@
* provide any value for the given name.
*/
RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly) {
+ typedef PropertyCallbackArguments PCA;
+ static const int kArgsOffset = kAccessorInfoOffsetInInterceptorArgs;
Handle<Name> name_handle = args.at<Name>(0);
Handle<InterceptorInfo> interceptor_info = args.at<InterceptorInfo>(1);
- ASSERT(kAccessorInfoOffsetInInterceptorArgs == 2);
- ASSERT(args[2]->IsJSObject()); // Receiver.
- ASSERT(args[3]->IsJSObject()); // Holder.
- ASSERT(args[5]->IsSmi()); // Isolate.
- ASSERT(args.length() == 6);
+ ASSERT(kArgsOffset == 2);
+ // No ReturnValue in interceptors.
+ ASSERT(args.length() == kArgsOffset + PCA::kArgsLength - 1);
// TODO(rossberg): Support symbols in the API.
if (name_handle->IsSymbol())
@@ -1146,16 +1148,22 @@
FUNCTION_CAST<v8::NamedPropertyGetter>(getter_address);
ASSERT(getter != NULL);
+ Handle<JSObject> receiver =
+ args.at<JSObject>(kArgsOffset - PCA::kThisIndex);
+ Handle<JSObject> holder =
+ args.at<JSObject>(kArgsOffset - PCA::kHolderIndex);
+ PropertyCallbackArguments callback_args(isolate,
+ interceptor_info->data(),
+ *receiver,
+ *holder);
{
// Use the interceptor getter.
- v8::AccessorInfo info(args.arguments() -
- kAccessorInfoOffsetInInterceptorArgs);
HandleScope scope(isolate);
v8::Handle<v8::Value> r;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- r = getter(v8::Utils::ToLocal(name), info);
+ r = callback_args.Call(getter, v8::Utils::ToLocal(name));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!r.IsEmpty()) {
@@ -1189,12 +1197,17 @@
static MaybeObject* LoadWithInterceptor(Arguments* args,
PropertyAttributes* attrs) {
+ typedef PropertyCallbackArguments PCA;
+ static const int kArgsOffset = kAccessorInfoOffsetInInterceptorArgs;
Handle<Name> name_handle = args->at<Name>(0);
Handle<InterceptorInfo> interceptor_info = args->at<InterceptorInfo>(1);
- ASSERT(kAccessorInfoOffsetInInterceptorArgs == 2);
- Handle<JSObject> receiver_handle = args->at<JSObject>(2);
- Handle<JSObject> holder_handle = args->at<JSObject>(3);
- ASSERT(args->length() == 6);
+ ASSERT(kArgsOffset == 2);
+ // No ReturnValue in interceptors.
+ ASSERT(args->length() == kArgsOffset + PCA::kArgsLength - 1);
+ Handle<JSObject> receiver_handle =
+ args->at<JSObject>(kArgsOffset - PCA::kThisIndex);
+ Handle<JSObject> holder_handle =
+ args->at<JSObject>(kArgsOffset - PCA::kHolderIndex);
Isolate* isolate = receiver_handle->GetIsolate();
@@ -1209,16 +1222,18 @@
FUNCTION_CAST<v8::NamedPropertyGetter>(getter_address);
ASSERT(getter != NULL);
+ PropertyCallbackArguments callback_args(isolate,
+ interceptor_info->data(),
+ *receiver_handle,
+ *holder_handle);
{
// Use the interceptor getter.
- v8::AccessorInfo info(args->arguments() -
- kAccessorInfoOffsetInInterceptorArgs);
HandleScope scope(isolate);
v8::Handle<v8::Value> r;
{
// Leaving JavaScript.
VMState<EXTERNAL> state(isolate);
- r = getter(v8::Utils::ToLocal(name), info);
+ r = callback_args.Call(getter, v8::Utils::ToLocal(name));
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (!r.IsEmpty()) {
diff --git a/src/type-info.cc b/src/type-info.cc
index 21dcf74..53866c1 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -105,6 +105,8 @@
Code::ExtractTypeFromFlags(code->flags()) == Code::NORMAL;
if (!preliminary_checks) return false;
Map* map = code->FindFirstMap();
+ if (map == NULL) return false;
+ map = map->CurrentMapForDeprecated();
return map != NULL && !CanRetainOtherContext(map, *native_context_);
}
return false;
@@ -136,6 +138,8 @@
Code::ExtractTypeFromFlags(code->flags()) == Code::NORMAL;
if (!preliminary_checks) return false;
Map* map = code->FindFirstMap();
+ if (map == NULL) return false;
+ map = map->CurrentMapForDeprecated();
return map != NULL && !CanRetainOtherContext(map, *native_context_);
}
return false;
@@ -192,14 +196,12 @@
Handle<Object> map_or_code = GetInfo(expr->PropertyFeedbackId());
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
- Handle<Map> first_map(code->FindFirstMap());
- ASSERT(!first_map.is_null());
- first_map = Map::CurrentMapForDeprecated(first_map);
- return CanRetainOtherContext(*first_map, *native_context_)
+ Map* map = code->FindFirstMap()->CurrentMapForDeprecated();
+ return map == NULL || CanRetainOtherContext(map, *native_context_)
? Handle<Map>::null()
- : first_map;
+ : Handle<Map>(map);
}
- return Map::CurrentMapForDeprecated(Handle<Map>::cast(map_or_code));
+ return Handle<Map>::cast(map_or_code);
}
@@ -209,14 +211,12 @@
Handle<Object> map_or_code = GetInfo(ast_id);
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
- Handle<Map> first_map(code->FindFirstMap());
- ASSERT(!first_map.is_null());
- first_map = Map::CurrentMapForDeprecated(first_map);
- return CanRetainOtherContext(*first_map, *native_context_)
+ Map* map = code->FindFirstMap()->CurrentMapForDeprecated();
+ return map == NULL || CanRetainOtherContext(map, *native_context_)
? Handle<Map>::null()
- : first_map;
+ : Handle<Map>(map);
}
- return Map::CurrentMapForDeprecated(Handle<Map>::cast(map_or_code));
+ return Handle<Map>::cast(map_or_code);
}
@@ -224,10 +224,15 @@
TypeFeedbackId id) {
Handle<Object> maybe_code = GetInfo(id);
if (maybe_code->IsCode()) {
- Map* first_map = Handle<Code>::cast(maybe_code)->FindFirstMap();
- if (first_map != NULL) {
- return Map::CurrentMapForDeprecated(Handle<Map>(first_map));
- }
+ Map* map = Handle<Code>::cast(maybe_code)->FindFirstMap();
+ if (map == NULL) return Handle<Map>();
+ map = map->CurrentMapForDeprecated();
+ return map == NULL || CanRetainOtherContext(map, *native_context_)
+ ? Handle<Map>()
+ : Handle<Map>(map);
+ } else if (maybe_code->IsMap()) {
+ ASSERT(!Handle<Map>::cast(maybe_code)->is_deprecated());
+ return Handle<Map>::cast(maybe_code);
}
return Handle<Map>();
}
@@ -351,8 +356,7 @@
Handle<Map> TypeFeedbackOracle::GetObjectLiteralStoreMap(
ObjectLiteral::Property* prop) {
ASSERT(ObjectLiteralStoreIsMonomorphic(prop));
- return Map::CurrentMapForDeprecated(
- Handle<Map>::cast(GetInfo(prop->key()->LiteralFeedbackId())));
+ return Handle<Map>::cast(GetInfo(prop->key()->LiteralFeedbackId()));
}
@@ -431,12 +435,10 @@
if (state != CompareIC::KNOWN_OBJECT) {
return Handle<Map>::null();
}
- Handle<Map> first_map(code->FindFirstMap());
- ASSERT(!first_map.is_null());
- first_map = Map::CurrentMapForDeprecated(first_map);
- return CanRetainOtherContext(*first_map, *native_context_)
+ Map* map = code->FindFirstMap()->CurrentMapForDeprecated();
+ return map == NULL || CanRetainOtherContext(map, *native_context_)
? Handle<Map>::null()
- : first_map;
+ : Handle<Map>(map);
}
@@ -723,7 +725,8 @@
SetInfo(ast_id, static_cast<Object*>(target));
} else if (!CanRetainOtherContext(Map::cast(map),
*native_context_)) {
- SetInfo(ast_id, map);
+ Map* feedback = Map::cast(map)->CurrentMapForDeprecated();
+ if (feedback != NULL) SetInfo(ast_id, feedback);
}
}
} else {
diff --git a/src/v8.cc b/src/v8.cc
index 274128e..e21c815 100644
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -281,6 +281,7 @@
LOperand::SetUpCaches();
SetUpJSCallerSavedCodeData();
ExternalReference::SetUp();
+ Bootstrapper::InitializeOncePerProcess();
}
void V8::InitializeOncePerProcess() {
diff --git a/src/version.cc b/src/version.cc
index 073bd26..cec040a 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
// system so their names cannot be changed without changing the scripts.
#define MAJOR_VERSION 3
#define MINOR_VERSION 19
-#define BUILD_NUMBER 2
-#define PATCH_LEVEL 1
+#define BUILD_NUMBER 3
+#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 691894c..31796b1 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -677,8 +677,13 @@
}
-void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
+void MacroAssembler::PrepareCallApiFunction(int arg_stack_space,
+ bool returns_handle) {
#if defined(_WIN64) && !defined(__MINGW64__)
+ if (!returns_handle) {
+ EnterApiExitFrame(arg_stack_space);
+ return;
+ }
// We need to prepare a slot for result handle on stack and put
// a pointer to it into 1st arg register.
EnterApiExitFrame(arg_stack_space + 1);
@@ -692,8 +697,9 @@
void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
- int stack_space) {
- Label empty_result;
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset) {
Label prologue;
Label promote_scheduled_exception;
Label delete_allocated_handles;
@@ -745,15 +751,25 @@
PopSafepointRegisters();
}
+ // Can skip the result check for new-style callbacks
+ // TODO(dcarney): may need to pass this information down
+ // as some function_addresses might not have been registered
+ if (returns_handle) {
+ Label empty_result;
#if defined(_WIN64) && !defined(__MINGW64__)
- // rax keeps a pointer to v8::Handle, unpack it.
- movq(rax, Operand(rax, 0));
+ // rax keeps a pointer to v8::Handle, unpack it.
+ movq(rax, Operand(rax, 0));
#endif
- // Check if the result handle holds 0.
- testq(rax, rax);
- j(zero, &empty_result);
- // It was non-zero. Dereference to get the result value.
- movq(rax, Operand(rax, 0));
+ // Check if the result handle holds 0.
+ testq(rax, rax);
+ j(zero, &empty_result);
+ // It was non-zero. Dereference to get the result value.
+ movq(rax, Operand(rax, 0));
+ jmp(&prologue);
+ bind(&empty_result);
+ }
+ // Load the value from ReturnValue
+ movq(rax, Operand(rbp, return_value_offset * kPointerSize));
bind(&prologue);
// No more valid handles (the result handle was the last one). Restore
@@ -807,11 +823,6 @@
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
- bind(&empty_result);
- // It was zero; the result is undefined.
- LoadRoot(rax, Heap::kUndefinedValueRootIndex);
- jmp(&prologue);
-
bind(&promote_scheduled_exception);
TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 76941ff..6c8d5ff 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -1224,13 +1224,16 @@
// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
// context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
// inside the exit frame (not GCed) accessible via StackSpaceOperand.
- void PrepareCallApiFunction(int arg_stack_space);
+ void PrepareCallApiFunction(int arg_stack_space, bool returns_handle);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Clobbers r14, r15, rbx and
// caller-save registers. Restores context. On return removes
// stack_space * kPointerSize (GCed).
- void CallApiFunctionAndReturn(Address function_address, int stack_space);
+ void CallApiFunctionAndReturn(Address function_address,
+ int stack_space,
+ bool returns_handle,
+ int return_value_offset_from_rbp);
// Before calling a C-function from generated code, align arguments on stack.
// After aligning the frame, arguments must be stored in esp[0], esp[4],
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index f0b362b..148f65e 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -398,7 +398,7 @@
// Number of pointers to be reserved on stack for fast API call.
-static const int kFastApiCallArguments = 4;
+static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
// Reserves space for the extra arguments to API function in the
@@ -449,10 +449,12 @@
// (first fast api call extra argument)
// -- rsp[24] : api call data
// -- rsp[32] : isolate
- // -- rsp[40] : last argument
+ // -- rsp[40] : ReturnValue
+ //
+ // -- rsp[48] : last argument
// -- ...
- // -- rsp[(argc + 4) * 8] : first argument
- // -- rsp[(argc + 5) * 8] : receiver
+ // -- rsp[(argc + 5) * 8] : first argument
+ // -- rsp[(argc + 6) * 8] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
@@ -473,15 +475,23 @@
__ movq(kScratchRegister,
ExternalReference::isolate_address(masm->isolate()));
__ movq(Operand(rsp, 4 * kPointerSize), kScratchRegister);
+ __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
+ __ movq(Operand(rsp, 5 * kPointerSize), kScratchRegister);
// Prepare arguments.
- __ lea(rbx, Operand(rsp, 4 * kPointerSize));
+ STATIC_ASSERT(kFastApiCallArguments == 5);
+ __ lea(rbx, Operand(rsp, kFastApiCallArguments * kPointerSize));
+
+ // Function address is a foreign pointer outside V8's heap.
+ Address function_address = v8::ToCData<Address>(api_call_info->callback());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(masm->isolate(), function_address);
#if defined(__MINGW64__)
Register arguments_arg = rcx;
#elif defined(_WIN64)
// Win64 uses first register--rcx--for returned value.
- Register arguments_arg = rdx;
+ Register arguments_arg = returns_handle ? rdx : rcx;
#else
Register arguments_arg = rdi;
#endif
@@ -490,7 +500,7 @@
// it's not controlled by GC.
const int kApiStackSpace = 4;
- __ PrepareCallApiFunction(kApiStackSpace);
+ __ PrepareCallApiFunction(kApiStackSpace, returns_handle);
__ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
__ addq(rbx, Immediate(argc * kPointerSize));
@@ -502,10 +512,10 @@
// v8::InvocationCallback's argument.
__ lea(arguments_arg, StackSpaceOperand(0));
- // Function address is a foreign pointer outside V8's heap.
- Address function_address = v8::ToCData<Address>(api_call_info->callback());
__ CallApiFunctionAndReturn(function_address,
- argc + kFastApiCallArguments + 1);
+ argc + kFastApiCallArguments + 1,
+ returns_handle,
+ kFastApiCallArguments + 1);
}
@@ -1288,18 +1298,24 @@
} else {
__ Push(Handle<Object>(callback->data(), isolate()));
}
- __ PushAddress(ExternalReference::isolate_address(isolate())); // isolate
+ __ PushAddress(ExternalReference::isolate_address(isolate()));
+ __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
+ __ push(kScratchRegister); // return value
__ push(name()); // name
// Save a pointer to where we pushed the arguments pointer. This will be
// passed as the const ExecutableAccessorInfo& to the C++ callback.
+ Address getter_address = v8::ToCData<Address>(callback->getter());
+ bool returns_handle =
+ !CallbackTable::ReturnsVoid(isolate(), getter_address);
+
#if defined(__MINGW64__)
Register accessor_info_arg = rdx;
Register name_arg = rcx;
#elif defined(_WIN64)
// Win64 uses first register--rcx--for returned value.
- Register accessor_info_arg = r8;
- Register name_arg = rdx;
+ Register accessor_info_arg = returns_handle ? r8 : rdx;
+ Register name_arg = returns_handle ? rdx : rcx;
#else
Register accessor_info_arg = rsi;
Register name_arg = rdi;
@@ -1309,14 +1325,15 @@
__ movq(name_arg, rsp);
__ push(scratch2()); // Restore return address.
- // 4 elements array for v8::Arguments::values_ and handler for name.
- const int kStackSpace = 5;
+ // v8::Arguments::values_ and handler for name.
+ const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
// Allocate v8::AccessorInfo in non-GCed stack space.
const int kArgStackSpace = 1;
- __ PrepareCallApiFunction(kArgStackSpace);
- __ lea(rax, Operand(name_arg, 4 * kPointerSize));
+ __ PrepareCallApiFunction(kArgStackSpace, returns_handle);
+ STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 5);
+ __ lea(rax, Operand(name_arg, 5 * kPointerSize));
// v8::AccessorInfo::args_.
__ movq(StackSpaceOperand(0), rax);
@@ -1325,8 +1342,10 @@
// could be used to pass arguments.
__ lea(accessor_info_arg, StackSpaceOperand(0));
- Address getter_address = v8::ToCData<Address>(callback->getter());
- __ CallApiFunctionAndReturn(getter_address, kStackSpace);
+ __ CallApiFunctionAndReturn(getter_address,
+ kStackSpace,
+ returns_handle,
+ 3);
}
@@ -2272,7 +2291,7 @@
name, depth, &miss);
// Move the return address on top of the stack.
- __ movq(rax, Operand(rsp, 4 * kPointerSize));
+ __ movq(rax, Operand(rsp, kFastApiCallArguments * kPointerSize));
__ movq(Operand(rsp, 0 * kPointerSize), rax);
GenerateFastApiCall(masm(), optimization, argc);