Version 3.23.13

Increase precision for base conversion for large integers (issue 3025).

Flatten cons string for single character substrings (Chromium issue 323041).

Performance and stability improvements on all platforms.

git-svn-id: http://v8.googlecode.com/svn/trunk@18108 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index aee6b50..7bbca0c 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,13 @@
+2013-11-27: Version 3.23.13
+
+        Increase precision for base conversion for large integers (issue 3025).
+
+        Flatten cons string for single character substrings (Chromium issue
+        323041).
+
+        Performance and stability improvements on all platforms.
+
+
 2013-11-26: Version 3.23.12
 
         Performance and stability improvements on all platforms.
diff --git a/Makefile b/Makefile
index 64bf7bc..295ef9a 100644
--- a/Makefile
+++ b/Makefile
@@ -94,7 +94,7 @@
 endif
 # optdebug=on
 ifeq ($(optdebug), on)
-  GYPFLAGS += -Dv8_optimized_debug=1
+  GYPFLAGS += -Dv8_optimized_debug=2
 endif
 # debuggersupport=off
 ifeq ($(debuggersupport), off)
diff --git a/include/v8.h b/include/v8.h
index a15edf2..2d02562 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -123,8 +123,10 @@
 template <class T> class Local;
 template <class T> class Eternal;
 template<class T> class NonCopyablePersistentTraits;
+template<class T> class PersistentBase;
 template<class T,
          class M = NonCopyablePersistentTraits<T> > class Persistent;
+template<class T> class UniquePersistent;
 template<class T, class P> class WeakCallbackObject;
 class FunctionTemplate;
 class ObjectTemplate;
@@ -257,17 +259,17 @@
    * The handles' references are not checked.
    */
   template <class S> V8_INLINE bool operator==(const Handle<S>& that) const {
-    internal::Object** a = reinterpret_cast<internal::Object**>(**this);
-    internal::Object** b = reinterpret_cast<internal::Object**>(*that);
+    internal::Object** a = reinterpret_cast<internal::Object**>(this->val_);
+    internal::Object** b = reinterpret_cast<internal::Object**>(that.val_);
     if (a == 0) return b == 0;
     if (b == 0) return false;
     return *a == *b;
   }
 
   template <class S> V8_INLINE bool operator==(
-      const Persistent<S>& that) const {
-    internal::Object** a = reinterpret_cast<internal::Object**>(**this);
-    internal::Object** b = reinterpret_cast<internal::Object**>(*that);
+      const PersistentBase<S>& that) const {
+    internal::Object** a = reinterpret_cast<internal::Object**>(this->val_);
+    internal::Object** b = reinterpret_cast<internal::Object**>(that.val_);
     if (a == 0) return b == 0;
     if (b == 0) return false;
     return *a == *b;
@@ -304,7 +306,8 @@
   V8_INLINE static Handle<T> New(Isolate* isolate, Handle<T> that) {
     return New(isolate, that.val_);
   }
-  V8_INLINE static Handle<T> New(Isolate* isolate, const Persistent<T>& that) {
+  V8_INLINE static Handle<T> New(Isolate* isolate,
+                                 const PersistentBase<T>& that) {
     return New(isolate, that.val_);
   }
 
@@ -320,6 +323,8 @@
  private:
   friend class Utils;
   template<class F, class M> friend class Persistent;
+  template<class F> friend class PersistentBase;
+  template<class F> friend class Handle;
   template<class F> friend class Local;
   template<class F> friend class FunctionCallbackInfo;
   template<class F> friend class PropertyCallbackInfo;
@@ -383,9 +388,8 @@
    * the original handle is destroyed/disposed.
    */
   V8_INLINE static Local<T> New(Isolate* isolate, Handle<T> that);
-  template<class M>
   V8_INLINE static Local<T> New(Isolate* isolate,
-                                const Persistent<T, M>& that);
+                                const PersistentBase<T>& that);
 
 #ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
 
@@ -396,8 +400,10 @@
  private:
   friend class Utils;
   template<class F> friend class Eternal;
+  template<class F> friend class PersistentBase;
   template<class F, class M> friend class Persistent;
   template<class F> friend class Handle;
+  template<class F> friend class Local;
   template<class F> friend class FunctionCallbackInfo;
   template<class F> friend class PropertyCallbackInfo;
   friend class String;
@@ -462,116 +468,21 @@
 
 
 /**
- * Default traits for Persistent. This class does not allow
- * use of the copy constructor or assignment operator.
- * At present kResetInDestructor is not set, but that will change in a future
- * version.
- */
-template<class T>
-class NonCopyablePersistentTraits {
- public:
-  typedef Persistent<T, NonCopyablePersistentTraits<T> > NonCopyablePersistent;
-  static const bool kResetInDestructor = false;
-  template<class S, class M>
-  V8_INLINE static void Copy(const Persistent<S, M>& source,
-                             NonCopyablePersistent* dest) {
-    Uncompilable<Object>();
-  }
-  // TODO(dcarney): come up with a good compile error here.
-  template<class O> V8_INLINE static void Uncompilable() {
-    TYPE_CHECK(O, Primitive);
-  }
-};
-
-
-/**
- * Helper class traits to allow copying and assignment of Persistent.
- * This will clone the contents of storage cell, but not any of the flags, etc.
- */
-template<class T>
-struct CopyablePersistentTraits {
-  typedef Persistent<T, CopyablePersistentTraits<T> > CopyablePersistent;
-  static const bool kResetInDestructor = true;
-  template<class S, class M>
-  static V8_INLINE void Copy(const Persistent<S, M>& source,
-                             CopyablePersistent* dest) {
-    // do nothing, just allow copy
-  }
-};
-
-
-/**
  * An object reference that is independent of any handle scope.  Where
  * a Local handle only lives as long as the HandleScope in which it was
- * allocated, a Persistent handle remains valid until it is explicitly
+ * allocated, a PersistentBase handle remains valid until it is explicitly
  * disposed.
  *
  * A persistent handle contains a reference to a storage cell within
  * the v8 engine which holds an object value and which is updated by
  * the garbage collector whenever the object is moved.  A new storage
- * cell can be created using the constructor or Persistent::Reset and
- * existing handles can be disposed using Persistent::Reset.
+ * cell can be created using the constructor or PersistentBase::Reset and
+ * existing handles can be disposed using PersistentBase::Reset.
  *
- * Copy, assignment and destructor bevavior is controlled by the traits
- * class M.
  */
-template <class T, class M> class Persistent {
+template <class T> class PersistentBase {
  public:
   /**
-   * A Persistent with no storage cell.
-   */
-  V8_INLINE Persistent() : val_(0) { }
-  /**
-   * Construct a Persistent from a Handle.
-   * When the Handle is non-empty, a new storage cell is created
-   * pointing to the same object, and no flags are set.
-   */
-  template <class S> V8_INLINE Persistent(Isolate* isolate, Handle<S> that)
-      : val_(New(isolate, *that)) {
-    TYPE_CHECK(T, S);
-  }
-  /**
-   * Construct a Persistent from a Persistent.
-   * When the Persistent is non-empty, a new storage cell is created
-   * pointing to the same object, and no flags are set.
-   */
-  template <class S, class M2>
-  V8_INLINE Persistent(Isolate* isolate, const Persistent<S, M2>& that)
-    : val_(New(isolate, *that)) {
-    TYPE_CHECK(T, S);
-  }
-  /**
-   * The copy constructors and assignment operator create a Persistent
-   * exactly as the Persistent constructor, but the Copy function from the
-   * traits class is called, allowing the setting of flags based on the
-   * copied Persistent.
-   */
-  V8_INLINE Persistent(const Persistent& that) : val_(0) {
-    Copy(that);
-  }
-  template <class S, class M2>
-  V8_INLINE Persistent(const Persistent<S, M2>& that) : val_(0) {
-    Copy(that);
-  }
-  V8_INLINE Persistent& operator=(const Persistent& that) { // NOLINT
-    Copy(that);
-    return *this;
-  }
-  template <class S, class M2>
-  V8_INLINE Persistent& operator=(const Persistent<S, M2>& that) { // NOLINT
-    Copy(that);
-    return *this;
-  }
-  /**
-   * The destructor will dispose the Persistent based on the
-   * kResetInDestructor flags in the traits class.  Since not calling dispose
-   * can result in a memory leak, it is recommended to always set this flag.
-   */
-  V8_INLINE ~Persistent() {
-    if (M::kResetInDestructor) Reset();
-  }
-
-  /**
    * If non-empty, destroy the underlying storage cell
    * IsEmpty() will return true after this call.
    */
@@ -582,53 +493,35 @@
    */
   template <class S>
   V8_INLINE void Reset(Isolate* isolate, const Handle<S>& other);
+
   /**
    * If non-empty, destroy the underlying storage cell
    * and create a new one with the contents of other if other is non empty
    */
-  template <class S, class M2>
-  V8_INLINE void Reset(Isolate* isolate, const Persistent<S, M2>& other);
-
-  V8_DEPRECATED("Use Reset instead",
-                V8_INLINE void Dispose()) { Reset(); }
+  template <class S>
+  V8_INLINE void Reset(Isolate* isolate, const PersistentBase<S>& other);
 
   V8_INLINE bool IsEmpty() const { return val_ == 0; }
 
-  // TODO(dcarney): this is pretty useless, fix or remove
   template <class S>
-  V8_INLINE static Persistent<T>& Cast(Persistent<S>& that) { // NOLINT
-#ifdef V8_ENABLE_CHECKS
-    // If we're going to perform the type check then we have to check
-    // that the handle isn't empty before doing the checked cast.
-    if (!that.IsEmpty()) T::Cast(*that);
-#endif
-    return reinterpret_cast<Persistent<T>&>(that);
-  }
-
-  // TODO(dcarney): this is pretty useless, fix or remove
-  template <class S> V8_INLINE Persistent<S>& As() { // NOLINT
-    return Persistent<S>::Cast(*this);
-  }
-
-  template <class S, class M2>
-  V8_INLINE bool operator==(const Persistent<S, M2>& that) const {
-    internal::Object** a = reinterpret_cast<internal::Object**>(**this);
-    internal::Object** b = reinterpret_cast<internal::Object**>(*that);
+  V8_INLINE bool operator==(const PersistentBase<S>& that) const {
+    internal::Object** a = reinterpret_cast<internal::Object**>(this->val_);
+    internal::Object** b = reinterpret_cast<internal::Object**>(that.val_);
     if (a == 0) return b == 0;
     if (b == 0) return false;
     return *a == *b;
   }
 
   template <class S> V8_INLINE bool operator==(const Handle<S>& that) const {
-    internal::Object** a = reinterpret_cast<internal::Object**>(**this);
-    internal::Object** b = reinterpret_cast<internal::Object**>(*that);
+    internal::Object** a = reinterpret_cast<internal::Object**>(this->val_);
+    internal::Object** b = reinterpret_cast<internal::Object**>(that.val_);
     if (a == 0) return b == 0;
     if (b == 0) return false;
     return *a == *b;
   }
 
-  template <class S, class M2>
-  V8_INLINE bool operator!=(const Persistent<S, M2>& that) const {
+  template <class S>
+  V8_INLINE bool operator!=(const PersistentBase<S>& that) const {
     return !operator==(that);
   }
 
@@ -646,20 +539,6 @@
       P* parameter,
       typename WeakCallbackData<S, P>::Callback callback);
 
-  template<typename S, typename P>
-  V8_DEPRECATED(
-      "Use SetWeak instead",
-      V8_INLINE void MakeWeak(
-          P* parameter,
-          typename WeakReferenceCallbacks<S, P>::Revivable callback));
-
-  template<typename P>
-  V8_DEPRECATED(
-      "Use SetWeak instead",
-      V8_INLINE void MakeWeak(
-          P* parameter,
-          typename WeakReferenceCallbacks<T, P>::Revivable callback));
-
   V8_INLINE void ClearWeak();
 
   /**
@@ -700,20 +579,175 @@
    */
   V8_INLINE uint16_t WrapperClassId() const;
 
+ private:
+  friend class Isolate;
+  friend class Utils;
+  template<class F> friend class Handle;
+  template<class F> friend class Local;
+  template<class F1, class F2> friend class Persistent;
+  template<class F> friend class UniquePersistent;
+  template<class F> friend class PersistentBase;
+  template<class F> friend class ReturnValue;
+
+  explicit V8_INLINE PersistentBase(T* val) : val_(val) {}
+  PersistentBase(PersistentBase& other); // NOLINT
+  void operator=(PersistentBase&);
+  V8_INLINE static T* New(Isolate* isolate, T* that);
+
+  T* val_;
+};
+
+
+/**
+ * Default traits for Persistent. This class does not allow
+ * use of the copy constructor or assignment operator.
+ * At present kResetInDestructor is not set, but that will change in a future
+ * version.
+ */
+template<class T>
+class NonCopyablePersistentTraits {
+ public:
+  typedef Persistent<T, NonCopyablePersistentTraits<T> > NonCopyablePersistent;
+  static const bool kResetInDestructor = false;
+  template<class S, class M>
+  V8_INLINE static void Copy(const Persistent<S, M>& source,
+                             NonCopyablePersistent* dest) {
+    Uncompilable<Object>();
+  }
+  // TODO(dcarney): come up with a good compile error here.
+  template<class O> V8_INLINE static void Uncompilable() {
+    TYPE_CHECK(O, Primitive);
+  }
+};
+
+
+/**
+ * Helper class traits to allow copying and assignment of Persistent.
+ * This will clone the contents of storage cell, but not any of the flags, etc.
+ */
+template<class T>
+struct CopyablePersistentTraits {
+  typedef Persistent<T, CopyablePersistentTraits<T> > CopyablePersistent;
+  static const bool kResetInDestructor = true;
+  template<class S, class M>
+  static V8_INLINE void Copy(const Persistent<S, M>& source,
+                             CopyablePersistent* dest) {
+    // do nothing, just allow copy
+  }
+};
+
+
+/**
+ * A PersistentBase which allows copy and assignment.
+ *
+ * Copy, assignment and destructor bevavior is controlled by the traits
+ * class M.
+ *
+ * Note: Persistent class hierarchy is subject to future changes.
+ */
+template <class T, class M> class Persistent : public PersistentBase<T> {
+ public:
+  /**
+   * A Persistent with no storage cell.
+   */
+  V8_INLINE Persistent() : PersistentBase<T>(0) { }
+  /**
+   * Construct a Persistent from a Handle.
+   * When the Handle is non-empty, a new storage cell is created
+   * pointing to the same object, and no flags are set.
+   */
+  template <class S> V8_INLINE Persistent(Isolate* isolate, Handle<S> that)
+      : PersistentBase<T>(PersistentBase<T>::New(isolate, *that)) {
+    TYPE_CHECK(T, S);
+  }
+  /**
+   * Construct a Persistent from a Persistent.
+   * When the Persistent is non-empty, a new storage cell is created
+   * pointing to the same object, and no flags are set.
+   */
+  template <class S, class M2>
+  V8_INLINE Persistent(Isolate* isolate, const Persistent<S, M2>& that)
+    : PersistentBase<T>(PersistentBase<T>::New(isolate, *that)) {
+    TYPE_CHECK(T, S);
+  }
+  /**
+   * The copy constructors and assignment operator create a Persistent
+   * exactly as the Persistent constructor, but the Copy function from the
+   * traits class is called, allowing the setting of flags based on the
+   * copied Persistent.
+   */
+  V8_INLINE Persistent(const Persistent& that) : PersistentBase<T>(0) {
+    Copy(that);
+  }
+  template <class S, class M2>
+  V8_INLINE Persistent(const Persistent<S, M2>& that) : PersistentBase<T>(0) {
+    Copy(that);
+  }
+  V8_INLINE Persistent& operator=(const Persistent& that) { // NOLINT
+    Copy(that);
+    return *this;
+  }
+  template <class S, class M2>
+  V8_INLINE Persistent& operator=(const Persistent<S, M2>& that) { // NOLINT
+    Copy(that);
+    return *this;
+  }
+  /**
+   * The destructor will dispose the Persistent based on the
+   * kResetInDestructor flags in the traits class.  Since not calling dispose
+   * can result in a memory leak, it is recommended to always set this flag.
+   */
+  V8_INLINE ~Persistent() {
+    if (M::kResetInDestructor) this->Reset();
+  }
+
+  V8_DEPRECATED("Use Reset instead",
+                V8_INLINE void Dispose()) { this->Reset(); }
+
+  // TODO(dcarney): this is pretty useless, fix or remove
+  template <class S>
+  V8_INLINE static Persistent<T>& Cast(Persistent<S>& that) { // NOLINT
+#ifdef V8_ENABLE_CHECKS
+    // If we're going to perform the type check then we have to check
+    // that the handle isn't empty before doing the checked cast.
+    if (!that.IsEmpty()) T::Cast(*that);
+#endif
+    return reinterpret_cast<Persistent<T>&>(that);
+  }
+
+  // TODO(dcarney): this is pretty useless, fix or remove
+  template <class S> V8_INLINE Persistent<S>& As() { // NOLINT
+    return Persistent<S>::Cast(*this);
+  }
+
+  template<typename S, typename P>
+  V8_DEPRECATED(
+      "Use SetWeak instead",
+      V8_INLINE void MakeWeak(
+          P* parameter,
+          typename WeakReferenceCallbacks<S, P>::Revivable callback));
+
+  template<typename P>
+  V8_DEPRECATED(
+      "Use SetWeak instead",
+      V8_INLINE void MakeWeak(
+          P* parameter,
+          typename WeakReferenceCallbacks<T, P>::Revivable callback));
+
   V8_DEPRECATED("This will be removed",
                 V8_INLINE T* ClearAndLeak());
 
   V8_DEPRECATED("This will be removed",
-                V8_INLINE void Clear()) { val_ = 0; }
+                V8_INLINE void Clear()) { this->val_ = 0; }
 
   // TODO(dcarney): remove
 #ifndef V8_ALLOW_ACCESS_TO_RAW_HANDLE_CONSTRUCTOR
 
  private:
 #endif
-  template <class S> V8_INLINE Persistent(S* that) : val_(that) { }
+  template <class S> V8_INLINE Persistent(S* that) : PersistentBase<T>(that) { }
 
-  V8_INLINE T* operator*() const { return val_; }
+  V8_INLINE T* operator*() const { return this->val_; }
 
  private:
   friend class Isolate;
@@ -723,13 +757,81 @@
   template<class F1, class F2> friend class Persistent;
   template<class F> friend class ReturnValue;
 
-  V8_INLINE static T* New(Isolate* isolate, T* that);
   template<class S, class M2>
   V8_INLINE void Copy(const Persistent<S, M2>& that);
-
-  T* val_;
 };
 
+
+/**
+ * A PersistentBase which has move semantics.
+ *
+ * Note: Persistent class hierarchy is subject to future changes.
+ */
+template<class T>
+class UniquePersistent : public PersistentBase<T> {
+  struct RValue {
+    V8_INLINE explicit RValue(UniquePersistent* object) : object(object) {}
+    UniquePersistent* object;
+  };
+
+ public:
+    /**
+   * A UniquePersistent with no storage cell.
+   */
+  V8_INLINE UniquePersistent() : PersistentBase<T>(0) { }
+  /**
+   * Construct a UniquePersistent from a Handle.
+   * When the Handle is non-empty, a new storage cell is created
+   * pointing to the same object, and no flags are set.
+   */
+  template <class S>
+  V8_INLINE UniquePersistent(Isolate* isolate, Handle<S> that)
+      : PersistentBase<T>(PersistentBase<T>::New(isolate, *that)) {
+    TYPE_CHECK(T, S);
+  }
+  /**
+   * Construct a UniquePersistent from a PersistentBase.
+   * When the Persistent is non-empty, a new storage cell is created
+   * pointing to the same object, and no flags are set.
+   */
+  template <class S>
+  V8_INLINE UniquePersistent(Isolate* isolate, const PersistentBase<S>& that)
+    : PersistentBase<T>(PersistentBase<T>::New(isolate, that.val_)) {
+    TYPE_CHECK(T, S);
+  }
+  /**
+   * Move constructor.
+   */
+  V8_INLINE UniquePersistent(RValue rvalue)
+    : PersistentBase<T>(rvalue.object->val_) {
+    rvalue.object->val_ = 0;
+  }
+  V8_INLINE ~UniquePersistent() { this->Reset(); }
+  /**
+   * Move via assignment.
+   */
+  template<class S>
+  V8_INLINE UniquePersistent& operator=(UniquePersistent<S> rhs) {
+    TYPE_CHECK(T, S);
+    this->val_ = rhs.val_;
+    rhs.val_ = 0;
+    return *this;
+  }
+  /**
+   * Cast operator for moves.
+   */
+  V8_INLINE operator RValue() { return RValue(this); }
+  /**
+   * Pass allows returning uniques from functions, etc.
+   */
+  V8_INLINE UniquePersistent Pass() { return UniquePersistent(RValue(this)); }
+
+ private:
+  UniquePersistent(UniquePersistent&);
+  void operator=(UniquePersistent&);
+};
+
+
  /**
  * A stack-allocated class that governs a number of local handles.
  * After a handle scope has been created, all local handles will be
@@ -4886,6 +4988,7 @@
   template <class T> friend class Handle;
   template <class T> friend class Local;
   template <class T> friend class Eternal;
+  template <class T> friend class PersistentBase;
   template <class T, class M> friend class Persistent;
   friend class Context;
 };
@@ -5678,8 +5781,7 @@
 }
 
 template <class T>
-template <class M>
-Local<T> Local<T>::New(Isolate* isolate, const Persistent<T, M>& that) {
+Local<T> Local<T>::New(Isolate* isolate, const PersistentBase<T>& that) {
   return New(isolate, that.val_);
 }
 
@@ -5717,8 +5819,8 @@
 }
 
 
-template <class T, class M>
-T* Persistent<T, M>::New(Isolate* isolate, T* that) {
+template <class T>
+T* PersistentBase<T>::New(Isolate* isolate, T* that) {
   if (that == NULL) return NULL;
   internal::Object** p = reinterpret_cast<internal::Object**>(that);
   return reinterpret_cast<T*>(
@@ -5731,7 +5833,7 @@
 template <class S, class M2>
 void Persistent<T, M>::Copy(const Persistent<S, M2>& that) {
   TYPE_CHECK(T, S);
-  Reset();
+  this->Reset();
   if (that.IsEmpty()) return;
   internal::Object** p = reinterpret_cast<internal::Object**>(that.val_);
   this->val_ = reinterpret_cast<T*>(V8::CopyPersistent(p));
@@ -5739,8 +5841,8 @@
 }
 
 
-template <class T, class M>
-bool Persistent<T, M>::IsIndependent() const {
+template <class T>
+bool PersistentBase<T>::IsIndependent() const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return false;
   return I::GetNodeFlag(reinterpret_cast<internal::Object**>(this->val_),
@@ -5748,8 +5850,8 @@
 }
 
 
-template <class T, class M>
-bool Persistent<T, M>::IsNearDeath() const {
+template <class T>
+bool PersistentBase<T>::IsNearDeath() const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return false;
   uint8_t node_state =
@@ -5759,8 +5861,8 @@
 }
 
 
-template <class T, class M>
-bool Persistent<T, M>::IsWeak() const {
+template <class T>
+bool PersistentBase<T>::IsWeak() const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return false;
   return I::GetNodeState(reinterpret_cast<internal::Object**>(this->val_)) ==
@@ -5768,17 +5870,17 @@
 }
 
 
-template <class T, class M>
-void Persistent<T, M>::Reset() {
+template <class T>
+void PersistentBase<T>::Reset() {
   if (this->IsEmpty()) return;
   V8::DisposeGlobal(reinterpret_cast<internal::Object**>(this->val_));
   val_ = 0;
 }
 
 
-template <class T, class M>
+template <class T>
 template <class S>
-void Persistent<T, M>::Reset(Isolate* isolate, const Handle<S>& other) {
+void PersistentBase<T>::Reset(Isolate* isolate, const Handle<S>& other) {
   TYPE_CHECK(T, S);
   Reset();
   if (other.IsEmpty()) return;
@@ -5786,10 +5888,10 @@
 }
 
 
-template <class T,  class M>
-template <class S, class M2>
-void Persistent<T, M>::Reset(Isolate* isolate,
-                             const Persistent<S, M2>& other) {
+template <class T>
+template <class S>
+void PersistentBase<T>::Reset(Isolate* isolate,
+                              const PersistentBase<S>& other) {
   TYPE_CHECK(T, S);
   Reset();
   if (other.IsEmpty()) return;
@@ -5797,9 +5899,9 @@
 }
 
 
-template <class T, class M>
+template <class T>
 template <typename S, typename P>
-void Persistent<T, M>::SetWeak(
+void PersistentBase<T>::SetWeak(
     P* parameter,
     typename WeakCallbackData<S, P>::Callback callback) {
   TYPE_CHECK(S, T);
@@ -5811,9 +5913,9 @@
 }
 
 
-template <class T, class M>
+template <class T>
 template <typename P>
-void Persistent<T, M>::SetWeak(
+void PersistentBase<T>::SetWeak(
     P* parameter,
     typename WeakCallbackData<T, P>::Callback callback) {
   SetWeak<T, P>(parameter, callback);
@@ -5843,14 +5945,14 @@
 }
 
 
-template <class T, class M>
-void Persistent<T, M>::ClearWeak() {
+template <class T>
+void PersistentBase<T>::ClearWeak() {
   V8::ClearWeak(reinterpret_cast<internal::Object**>(this->val_));
 }
 
 
-template <class T, class M>
-void Persistent<T, M>::MarkIndependent() {
+template <class T>
+void PersistentBase<T>::MarkIndependent() {
   typedef internal::Internals I;
   if (this->IsEmpty()) return;
   I::UpdateNodeFlag(reinterpret_cast<internal::Object**>(this->val_),
@@ -5859,8 +5961,8 @@
 }
 
 
-template <class T, class M>
-void Persistent<T, M>::MarkPartiallyDependent() {
+template <class T>
+void PersistentBase<T>::MarkPartiallyDependent() {
   typedef internal::Internals I;
   if (this->IsEmpty()) return;
   I::UpdateNodeFlag(reinterpret_cast<internal::Object**>(this->val_),
@@ -5872,14 +5974,14 @@
 template <class T, class M>
 T* Persistent<T, M>::ClearAndLeak() {
   T* old;
-  old = val_;
-  val_ = NULL;
+  old = this->val_;
+  this->val_ = NULL;
   return old;
 }
 
 
-template <class T, class M>
-void Persistent<T, M>::SetWrapperClassId(uint16_t class_id) {
+template <class T>
+void PersistentBase<T>::SetWrapperClassId(uint16_t class_id) {
   typedef internal::Internals I;
   if (this->IsEmpty()) return;
   internal::Object** obj = reinterpret_cast<internal::Object**>(this->val_);
@@ -5888,8 +5990,8 @@
 }
 
 
-template <class T, class M>
-uint16_t Persistent<T, M>::WrapperClassId() const {
+template <class T>
+uint16_t PersistentBase<T>::WrapperClassId() const {
   typedef internal::Internals I;
   if (this->IsEmpty()) return 0;
   internal::Object** obj = reinterpret_cast<internal::Object**>(this->val_);
diff --git a/src/allocation-site-scopes.cc b/src/allocation-site-scopes.cc
index 31120b9..bbfb39b 100644
--- a/src/allocation-site-scopes.cc
+++ b/src/allocation-site-scopes.cc
@@ -82,4 +82,21 @@
   }
 }
 
+
+bool AllocationSiteUsageContext::ShouldCreateMemento(Handle<JSObject> object) {
+  if (activated_ && AllocationSite::CanTrack(object->map()->instance_type())) {
+    if (FLAG_allocation_site_pretenuring ||
+        AllocationSite::GetMode(object->GetElementsKind()) ==
+        TRACK_ALLOCATION_SITE) {
+      if (FLAG_trace_creation_allocation_sites) {
+        PrintF("*** Creating Memento for %s %p\n",
+               object->IsJSArray() ? "JSArray" : "JSObject",
+               static_cast<void*>(*object));
+      }
+      return true;
+    }
+  }
+  return false;
+}
+
 } }  // namespace v8::internal
diff --git a/src/allocation-site-scopes.h b/src/allocation-site-scopes.h
index f106c5e..a195b27 100644
--- a/src/allocation-site-scopes.h
+++ b/src/allocation-site-scopes.h
@@ -41,16 +41,14 @@
 // boilerplate with AllocationSite and AllocationMemento support.
 class AllocationSiteContext {
  public:
-  AllocationSiteContext(Isolate* isolate, bool activated) {
+  explicit AllocationSiteContext(Isolate* isolate) {
     isolate_ = isolate;
-    activated_ = activated;
   };
 
   Handle<AllocationSite> top() { return top_; }
   Handle<AllocationSite> current() { return current_; }
 
-  // If activated, then recursively create mementos
-  bool activated() const { return activated_; }
+  bool ShouldCreateMemento(Handle<JSObject> object) { return false; }
 
   Isolate* isolate() { return isolate_; }
 
@@ -68,7 +66,6 @@
   Isolate* isolate_;
   Handle<AllocationSite> top_;
   Handle<AllocationSite> current_;
-  bool activated_;
 };
 
 
@@ -77,7 +74,7 @@
 class AllocationSiteCreationContext : public AllocationSiteContext {
  public:
   explicit AllocationSiteCreationContext(Isolate* isolate)
-      : AllocationSiteContext(isolate, true) { }
+      : AllocationSiteContext(isolate) { }
 
   Handle<AllocationSite> EnterNewScope();
   void ExitScope(Handle<AllocationSite> site, Handle<JSObject> object);
@@ -90,8 +87,9 @@
  public:
   AllocationSiteUsageContext(Isolate* isolate, Handle<AllocationSite> site,
                              bool activated)
-      : AllocationSiteContext(isolate, activated),
-        top_site_(site) { }
+      : AllocationSiteContext(isolate),
+        top_site_(site),
+        activated_(activated) { }
 
   inline Handle<AllocationSite> EnterNewScope() {
     if (top().is_null()) {
@@ -113,8 +111,11 @@
     ASSERT(object.is_null() || *object == scope_site->transition_info());
   }
 
+  bool ShouldCreateMemento(Handle<JSObject> object);
+
  private:
   Handle<AllocationSite> top_site_;
+  bool activated_;
 };
 
 
diff --git a/src/arm/constants-arm.h b/src/arm/constants-arm.h
index 7036139..78bb66c 100644
--- a/src/arm/constants-arm.h
+++ b/src/arm/constants-arm.h
@@ -50,6 +50,9 @@
   return ((instr >> 4) & 0xfff0) | (instr & 0xf);
 }
 
+// Used in code age prologue - ldr(pc, MemOperand(pc, -4))
+const int kCodeAgeJumpInstruction = 0xe51ff004;
+
 // Number of registers in normal ARM mode.
 const int kNumRegisters = 16;
 
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index acffaa3..49e4126 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -1679,6 +1679,14 @@
                                     "constant pool begin (length %d)",
                                     DecodeConstantPoolLength(instruction_bits));
     return Instruction::kInstrSize;
+  } else if (instruction_bits == kCodeAgeJumpInstruction) {
+    // The code age prologue has a constant immediatly following the jump
+    // instruction.
+    Instruction* target = Instruction::At(instr_ptr + Instruction::kInstrSize);
+    DecodeType2(instr);
+    OS::SNPrintF(out_buffer_ + out_buffer_pos_,
+                 " (0x%08x)", target->InstructionBits());
+    return 2 * Instruction::kInstrSize;
   }
   switch (instr->TypeValue()) {
     case 0:
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 15bfba3..88421af 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -1784,6 +1784,14 @@
   Handle<FixedArrayBase> constant_elements_values(
       FixedArrayBase::cast(constant_elements->get(1)));
 
+  AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
+      ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
+  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
+    // If the only customer of allocation sites is transitioning, then
+    // we can turn it off if we don't have anywhere else to transition to.
+    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
+  }
+
   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
@@ -1792,7 +1800,7 @@
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        DONT_TRACK_ALLOCATION_SITE,
+        allocation_site_mode,
         length);
     __ CallStub(&stub);
     __ IncrementCounter(
@@ -1807,12 +1815,9 @@
            FLAG_smi_only_arrays);
     FastCloneShallowArrayStub::Mode mode =
         FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
-        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
 
     if (has_fast_elements) {
       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
     }
 
     FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index f93b04c..11a6145 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -1641,12 +1641,10 @@
     return;
   }
 
-#ifdef DEBUG
   if (FLAG_trace_ic) {
     PrintF("[  patching ic at %p, cmp=%p, delta=%d\n",
            address, cmp_instruction_address, delta);
   }
-#endif
 
   Address patch_address =
       cmp_instruction_address - delta * Instruction::kInstrSize;
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index 71c9dda..2f34d69 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -2031,8 +2031,8 @@
       HValue* val = instr->value();
       LOperand* value = UseRegister(val);
       LInstruction* result = val->CheckFlag(HInstruction::kUint32)
-          ? DefineSameAsFirst(new(zone()) LUint32ToSmi(value))
-          : DefineSameAsFirst(new(zone()) LInteger32ToSmi(value));
+          ? DefineAsRegister(new(zone()) LUint32ToSmi(value))
+          : DefineAsRegister(new(zone()) LInteger32ToSmi(value));
       if (val->HasRange() && val->range()->IsInSmiRange()) {
         return result;
       }
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 6dadef0..164ee55 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -4695,10 +4695,13 @@
 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) {
   LOperand* input = instr->value();
   LOperand* output = instr->result();
-  __ SmiTag(ToRegister(output), ToRegister(input), SetCC);
+  ASSERT(output->IsRegister());
   if (!instr->hydrogen()->value()->HasRange() ||
       !instr->hydrogen()->value()->range()->IsInSmiRange()) {
+    __ SmiTag(ToRegister(output), ToRegister(input), SetCC);
     DeoptimizeIf(vs, instr->environment());
+  } else {
+    __ SmiTag(ToRegister(output), ToRegister(input));
   }
 }
 
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 67b5798..660a76e 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -41,6 +41,7 @@
 #include "platform.h"
 #include "snapshot.h"
 #include "trig-table.h"
+#include "extensions/free-buffer-extension.h"
 #include "extensions/externalize-string-extension.h"
 #include "extensions/gc-extension.h"
 #include "extensions/statistics-extension.h"
@@ -100,6 +101,9 @@
 
 
 void Bootstrapper::InitializeOncePerProcess() {
+#ifdef ADDRESS_SANITIZER
+  FreeBufferExtension::Register();
+#endif
   GCExtension::Register();
   ExternalizeStringExtension::Register();
   StatisticsExtension::Register();
@@ -2278,6 +2282,11 @@
     current = current->next();
   }
 
+#ifdef ADDRESS_SANITIZER
+  if (FLAG_expose_free_buffer) {
+    InstallExtension(isolate, "v8/free-buffer", &extension_states);
+  }
+#endif
   if (FLAG_expose_gc) InstallExtension(isolate, "v8/gc", &extension_states);
   if (FLAG_expose_externalize_string) {
     InstallExtension(isolate, "v8/externalize", &extension_states);
diff --git a/src/builtins.cc b/src/builtins.cc
index f950c59..999969d 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -1385,11 +1385,6 @@
 }
 
 
-static void Generate_StoreIC_Slow_Strict(MacroAssembler* masm) {
-  StoreIC::GenerateSlow(masm);
-}
-
-
 static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
   StoreIC::GenerateInitialize(masm);
 }
@@ -1420,11 +1415,6 @@
 }
 
 
-static void Generate_StoreIC_Normal_Strict(MacroAssembler* masm) {
-  StoreIC::GenerateNormal(masm);
-}
-
-
 static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
   StoreIC::GenerateMegamorphic(masm, kNonStrictMode);
 }
@@ -1480,11 +1470,6 @@
 }
 
 
-static void Generate_KeyedStoreIC_Slow_Strict(MacroAssembler* masm) {
-  KeyedStoreIC::GenerateSlow(masm);
-}
-
-
 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
   KeyedStoreIC::GenerateInitialize(masm);
 }
@@ -1667,13 +1652,14 @@
     functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
     ++functions;
 
-#define DEF_FUNCTION_PTR_H(aname, kind, extra)                              \
+#define DEF_FUNCTION_PTR_H(aname, kind)                                     \
     functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
     functions->c_code = NULL;                                               \
     functions->s_name = #aname;                                             \
     functions->name = k##aname;                                             \
     functions->flags = Code::ComputeFlags(                                  \
-        Code::HANDLER, MONOMORPHIC, extra, Code::NORMAL, Code::kind);       \
+        Code::HANDLER, MONOMORPHIC, Code::kNoExtraICState,                  \
+        Code::NORMAL, Code::kind);                                          \
     functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
     ++functions;
 
@@ -1805,7 +1791,7 @@
       reinterpret_cast<Code**>(builtin_address(k##name));   \
   return Handle<Code>(code_address);                        \
 }
-#define DEFINE_BUILTIN_ACCESSOR_H(name, kind, extra)        \
+#define DEFINE_BUILTIN_ACCESSOR_H(name, kind)               \
 Handle<Code> Builtins::name() {                             \
   Code** code_address =                                     \
       reinterpret_cast<Code**>(builtin_address(k##name));   \
diff --git a/src/builtins.h b/src/builtins.h
index 1a04ad8..e569e8f 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -216,16 +216,13 @@
   CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V)
 
 // Define list of builtin handlers implemented in assembly.
-#define BUILTIN_LIST_H(V)                                                 \
-  V(LoadIC_Slow,                    LOAD_IC, Code::kNoExtraICState)       \
-  V(KeyedLoadIC_Slow,               KEYED_LOAD_IC, Code::kNoExtraICState) \
-  V(StoreIC_Slow,                   STORE_IC, Code::kNoExtraICState)      \
-  V(StoreIC_Slow_Strict,            STORE_IC, kStrictMode)                \
-  V(KeyedStoreIC_Slow,              KEYED_STORE_IC, Code::kNoExtraICState)\
-  V(KeyedStoreIC_Slow_Strict,       KEYED_STORE_IC, kStrictMode)          \
-  V(LoadIC_Normal,                  LOAD_IC, Code::kNoExtraICState)       \
-  V(StoreIC_Normal,                 STORE_IC, Code::kNoExtraICState)      \
-  V(StoreIC_Normal_Strict,          STORE_IC, kStrictMode)
+#define BUILTIN_LIST_H(V)                                               \
+  V(LoadIC_Slow,                    LOAD_IC)                            \
+  V(KeyedLoadIC_Slow,               KEYED_LOAD_IC)                      \
+  V(StoreIC_Slow,                   STORE_IC)                           \
+  V(KeyedStoreIC_Slow,              KEYED_STORE_IC)                     \
+  V(LoadIC_Normal,                  LOAD_IC)                            \
+  V(StoreIC_Normal,                 STORE_IC)
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
 // Define list of builtins used by the debugger implemented in assembly.
@@ -314,7 +311,7 @@
   enum Name {
 #define DEF_ENUM_C(name, ignore) k##name,
 #define DEF_ENUM_A(name, kind, state, extra) k##name,
-#define DEF_ENUM_H(name, kind, extra) k##name,
+#define DEF_ENUM_H(name, kind) k##name,
     BUILTIN_LIST_C(DEF_ENUM_C)
     BUILTIN_LIST_A(DEF_ENUM_A)
     BUILTIN_LIST_H(DEF_ENUM_H)
@@ -341,7 +338,7 @@
 #define DECLARE_BUILTIN_ACCESSOR_C(name, ignore) Handle<Code> name();
 #define DECLARE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
   Handle<Code> name();
-#define DECLARE_BUILTIN_ACCESSOR_H(name, kind, extra) Handle<Code> name();
+#define DECLARE_BUILTIN_ACCESSOR_H(name, kind) Handle<Code> name();
   BUILTIN_LIST_C(DECLARE_BUILTIN_ACCESSOR_C)
   BUILTIN_LIST_A(DECLARE_BUILTIN_ACCESSOR_A)
   BUILTIN_LIST_H(DECLARE_BUILTIN_ACCESSOR_H)
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 051dd45..87fca37 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -864,7 +864,6 @@
   // Note: Although a no-op transition is semantically OK, it is hinting at a
   // bug somewhere in our state transition machinery.
   ASSERT(from != to);
-  #ifdef DEBUG
   if (!FLAG_trace_ic) return;
   char buffer[100];
   NoAllocationStringAllocator allocator(buffer,
@@ -878,7 +877,6 @@
   to.Print(&stream);
   stream.Add("]\n");
   stream.OutputToStdOut();
-  #endif
 }
 
 
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 789983a..52b5942 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -958,9 +958,8 @@
 
 class StoreGlobalStub : public HandlerStub {
  public:
-  StoreGlobalStub(StrictModeFlag strict_mode, bool is_constant) {
-    bit_field_ = StrictModeBits::encode(strict_mode) |
-        IsConstantBits::encode(is_constant);
+  explicit StoreGlobalStub(bool is_constant) {
+    bit_field_ = IsConstantBits::encode(is_constant);
   }
 
   Handle<Code> GetCodeCopyFromTemplate(Isolate* isolate,
@@ -1003,9 +1002,8 @@
   virtual int NotMissMinorKey() { return GetExtraICState(); }
   Major MajorKey() { return StoreGlobal; }
 
-  class StrictModeBits: public BitField<StrictModeFlag, 0, 1> {};
-  class IsConstantBits: public BitField<bool, 1, 1> {};
-  class RepresentationBits: public BitField<Representation::Kind, 2, 8> {};
+  class IsConstantBits: public BitField<bool, 0, 1> {};
+  class RepresentationBits: public BitField<Representation::Kind, 1, 8> {};
 
   int bit_field_;
 
diff --git a/src/conversions.cc b/src/conversions.cc
index 5f1219e..397f3c5 100644
--- a/src/conversions.cc
+++ b/src/conversions.cc
@@ -401,8 +401,9 @@
   // at least one digit.
   int integer_pos = kBufferSize - 2;
   do {
-    integer_buffer[integer_pos--] =
-        chars[static_cast<int>(fmod(integer_part, radix))];
+    double remainder = fmod(integer_part, radix);
+    integer_buffer[integer_pos--] = chars[static_cast<int>(remainder)];
+    integer_part -= remainder;
     integer_part /= radix;
   } while (integer_part >= 1.0);
   // Sanity check.
diff --git a/src/date.js b/src/date.js
index 1b128c3..2363841 100644
--- a/src/date.js
+++ b/src/date.js
@@ -302,8 +302,7 @@
 }
 
 
-// Mozilla-specific extension. Returns the number of milliseconds
-// elapsed since 1 January 1970 00:00:00 UTC.
+// ECMA 262 - 15.9.4.4
 function DateNow() {
   return %DateCurrentTime();
 }
diff --git a/src/extensions/free-buffer-extension.cc b/src/extensions/free-buffer-extension.cc
new file mode 100644
index 0000000..4040c90
--- /dev/null
+++ b/src/extensions/free-buffer-extension.cc
@@ -0,0 +1,59 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "free-buffer-extension.h"
+#include "platform.h"
+#include "v8.h"
+
+namespace v8 {
+namespace internal {
+
+
+v8::Handle<v8::FunctionTemplate> FreeBufferExtension::GetNativeFunction(
+    v8::Handle<v8::String> str) {
+  return v8::FunctionTemplate::New(FreeBufferExtension::FreeBuffer);
+}
+
+
+void FreeBufferExtension::FreeBuffer(
+    const v8::FunctionCallbackInfo<v8::Value>& args) {
+  v8::Handle<v8::ArrayBuffer> arrayBuffer = args[0].As<v8::ArrayBuffer>();
+  v8::ArrayBuffer::Contents contents = arrayBuffer->Externalize();
+  V8::ArrayBufferAllocator()->Free(contents.Data(), contents.ByteLength());
+}
+
+
+void FreeBufferExtension::Register() {
+  static char buffer[100];
+  Vector<char> temp_vector(buffer, sizeof(buffer));
+  OS::SNPrintF(temp_vector, "native function freeBuffer();");
+
+  static FreeBufferExtension buffer_free_extension(buffer);
+  static v8::DeclareExtension declaration(&buffer_free_extension);
+}
+
+} }  // namespace v8::internal
diff --git a/src/extensions/free-buffer-extension.h b/src/extensions/free-buffer-extension.h
new file mode 100644
index 0000000..29ffbc0
--- /dev/null
+++ b/src/extensions/free-buffer-extension.h
@@ -0,0 +1,48 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_EXTENSIONS_FREE_BUFFER_EXTENSION_H_
+#define V8_EXTENSIONS_FREE_BUFFER_EXTENSION_H_
+
+#include "v8.h"
+
+namespace v8 {
+namespace internal {
+
+class FreeBufferExtension : public v8::Extension {
+ public:
+  explicit FreeBufferExtension(const char* source)
+      : v8::Extension("v8/free-buffer", source) {}
+  virtual v8::Handle<v8::FunctionTemplate> GetNativeFunction(
+      v8::Handle<v8::String> name);
+  static void FreeBuffer(const v8::FunctionCallbackInfo<v8::Value>& args);
+  static void Register();
+};
+
+} }  // namespace v8::internal
+
+#endif  // V8_EXTENSIONS_FREE_BUFFER_EXTENSION_H_
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 61e545f..ffec058 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -407,6 +407,9 @@
 // bootstrapper.cc
 DEFINE_string(expose_natives_as, NULL, "expose natives in global object")
 DEFINE_string(expose_debug_as, NULL, "expose debug in global object")
+#ifdef ADDRESS_SANITIZER
+DEFINE_bool(expose_free_buffer, false, "expose freeBuffer extension")
+#endif
 DEFINE_bool(expose_gc, false, "expose gc extension")
 DEFINE_string(expose_gc_as, NULL,
               "expose gc extension under the specified name")
diff --git a/src/heap-inl.h b/src/heap-inl.h
index 9d57c99..2d4b10e 100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -483,6 +483,18 @@
 }
 
 
+void Heap::UpdateAllocationSiteFeedback(HeapObject* object) {
+  if (FLAG_allocation_site_pretenuring && object->IsJSObject()) {
+    AllocationMemento* memento = AllocationMemento::FindForJSObject(
+        JSObject::cast(object), true);
+    if (memento != NULL) {
+      ASSERT(memento->IsValid());
+      memento->GetAllocationSite()->IncrementMementoFoundCount();
+    }
+  }
+}
+
+
 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
   ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
 
@@ -501,12 +513,7 @@
     return;
   }
 
-  if (FLAG_trace_track_allocation_sites && object->IsJSObject()) {
-    if (AllocationMemento::FindForJSObject(JSObject::cast(object), true) !=
-        NULL) {
-      object->GetIsolate()->heap()->allocation_mementos_found_++;
-    }
-  }
+  UpdateAllocationSiteFeedback(object);
 
   // AllocationMementos are unrooted and shouldn't survive a scavenge
   ASSERT(object->map() != object->GetHeap()->allocation_memento_map());
diff --git a/src/heap.cc b/src/heap.cc
index f28c926..86efe4b 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -87,7 +87,6 @@
       contexts_disposed_(0),
       global_ic_age_(0),
       flush_monomorphic_ics_(false),
-      allocation_mementos_found_(0),
       scan_on_scavenge_pages_(0),
       new_space_(this),
       old_pointer_space_(NULL),
@@ -506,6 +505,40 @@
 
 
 void Heap::GarbageCollectionEpilogue() {
+  if (FLAG_allocation_site_pretenuring) {
+    int tenure_decisions = 0;
+    int dont_tenure_decisions = 0;
+    int allocation_mementos_found = 0;
+
+    Object* cur = allocation_sites_list();
+    while (cur->IsAllocationSite()) {
+      AllocationSite* casted = AllocationSite::cast(cur);
+      allocation_mementos_found += casted->memento_found_count()->value();
+      if (casted->DigestPretenuringFeedback()) {
+        if (casted->GetPretenureMode() == TENURED) {
+          tenure_decisions++;
+        } else {
+          dont_tenure_decisions++;
+        }
+      }
+      cur = casted->weak_next();
+    }
+
+    // TODO(mvstanton): Pretenure decisions are only made once for an allocation
+    // site. Find a sane way to decide about revisiting the decision later.
+
+    if (FLAG_trace_track_allocation_sites &&
+        (allocation_mementos_found > 0 ||
+         tenure_decisions > 0 ||
+         dont_tenure_decisions > 0)) {
+      PrintF("GC: (#mementos, #tenure decisions, #donttenure decisions) "
+             "(%d, %d, %d)\n",
+             allocation_mementos_found,
+             tenure_decisions,
+             dont_tenure_decisions);
+    }
+  }
+
   store_buffer()->GCEpilogue();
 
   // In release mode, we only zap the from space under heap verification.
@@ -1393,8 +1426,6 @@
 void Heap::Scavenge() {
   RelocationLock relocation_lock(this);
 
-  allocation_mementos_found_ = 0;
-
 #ifdef VERIFY_HEAP
   if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
 #endif
@@ -1542,11 +1573,6 @@
   gc_state_ = NOT_IN_GC;
 
   scavenges_since_last_idle_round_++;
-
-  if (FLAG_trace_track_allocation_sites && allocation_mementos_found_ > 0) {
-    PrintF("AllocationMementos found during scavenge = %d\n",
-           allocation_mementos_found_);
-  }
 }
 
 
@@ -3968,7 +3994,12 @@
   int length = end - start;
   if (length <= 0) {
     return empty_string();
-  } else if (length == 1) {
+  }
+
+  // Make an attempt to flatten the buffer to reduce access time.
+  buffer = buffer->TryFlattenGetString();
+
+  if (length == 1) {
     return LookupSingleCharacterStringFromCode(buffer->Get(start));
   } else if (length == 2) {
     // Optimization for 2-byte strings often used as keys in a decompression
@@ -3979,9 +4010,6 @@
     return MakeOrFindTwoCharacterString(this, c1, c2);
   }
 
-  // Make an attempt to flatten the buffer to reduce access time.
-  buffer = buffer->TryFlattenGetString();
-
   if (!FLAG_string_slices ||
       !buffer->IsFlat() ||
       length < SlicedString::kMinLength ||
@@ -4357,6 +4385,17 @@
 }
 
 
+void Heap::InitializeAllocationMemento(AllocationMemento* memento,
+                                       AllocationSite* allocation_site) {
+  memento->set_map_no_write_barrier(allocation_memento_map());
+  ASSERT(allocation_site->map() == allocation_site_map());
+  memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
+  if (FLAG_allocation_site_pretenuring) {
+    allocation_site->IncrementMementoCreateCount();
+  }
+}
+
+
 MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
     Handle<AllocationSite> allocation_site) {
   ASSERT(gc_state_ == NOT_IN_GC);
@@ -4373,9 +4412,7 @@
   HeapObject::cast(result)->set_map_no_write_barrier(map);
   AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
       reinterpret_cast<Address>(result) + map->instance_size());
-  alloc_memento->set_map_no_write_barrier(allocation_memento_map());
-  ASSERT(allocation_site->map() == allocation_site_map());
-  alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER);
+  InitializeAllocationMemento(alloc_memento, *allocation_site);
   return result;
 }
 
@@ -4808,8 +4845,7 @@
   int object_size = map->instance_size();
   Object* clone;
 
-  ASSERT(site == NULL || (AllocationSite::CanTrack(map->instance_type()) &&
-                          map->instance_type() == JS_ARRAY_TYPE));
+  ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type()));
 
   WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
 
@@ -4848,9 +4884,7 @@
     if (site != NULL) {
       AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
           reinterpret_cast<Address>(clone) + object_size);
-      alloc_memento->set_map_no_write_barrier(allocation_memento_map());
-      ASSERT(site->map() == allocation_site_map());
-      alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
+      InitializeAllocationMemento(alloc_memento, site);
       HeapProfiler* profiler = isolate()->heap_profiler();
       if (profiler->is_tracking_allocations()) {
         profiler->UpdateObjectSizeEvent(HeapObject::cast(clone)->address(),
@@ -4981,7 +5015,7 @@
 
 
 MaybeObject* Heap::AllocateStringFromOneByte(Vector<const uint8_t> string,
-                                           PretenureFlag pretenure) {
+                                             PretenureFlag pretenure) {
   int length = string.length();
   if (length == 1) {
     return Heap::LookupSingleCharacterStringFromCode(string[0]);
diff --git a/src/heap.h b/src/heap.h
index ee01c22..1c8e0e1 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -1456,6 +1456,11 @@
   static inline void ScavengePointer(HeapObject** p);
   static inline void ScavengeObject(HeapObject** p, HeapObject* object);
 
+  // An object may have an AllocationSite associated with it through a trailing
+  // AllocationMemento. Its feedback should be updated when objects are found
+  // in the heap.
+  static inline void UpdateAllocationSiteFeedback(HeapObject* object);
+
   // Support for partial snapshots.  After calling this we have a linear
   // space to write objects in each space.
   void ReserveSpace(int *sizes, Address* addresses);
@@ -1892,9 +1897,6 @@
 
   bool flush_monomorphic_ics_;
 
-  // AllocationMementos found in new space.
-  int allocation_mementos_found_;
-
   int scan_on_scavenge_pages_;
 
   NewSpace new_space_;
@@ -2110,6 +2112,8 @@
   void InitializeJSObjectFromMap(JSObject* obj,
                                  FixedArray* properties,
                                  Map* map);
+  void InitializeAllocationMemento(AllocationMemento* memento,
+                                   AllocationSite* allocation_site);
 
   bool CreateInitialMaps();
   bool CreateInitialObjects();
diff --git a/src/hydrogen-flow-engine.h b/src/hydrogen-flow-engine.h
index dfe43ec..4e12755 100644
--- a/src/hydrogen-flow-engine.h
+++ b/src/hydrogen-flow-engine.h
@@ -138,12 +138,19 @@
       }
 
       // Propagate the block state forward to all successor blocks.
-      for (int i = 0; i < block->end()->SuccessorCount(); i++) {
+      int max = block->end()->SuccessorCount();
+      for (int i = 0; i < max; i++) {
         HBasicBlock* succ = block->end()->SuccessorAt(i);
         IncrementPredecessorCount(succ);
         if (StateAt(succ) == NULL) {
           // This is the first state to reach the successor.
-          SetStateAt(succ, state->Copy(succ, zone_));
+          if (max == 1 && succ->predecessors()->length() == 1) {
+            // Optimization: successor can inherit this state.
+            SetStateAt(succ, state);
+          } else {
+            // Successor needs a copy of the state.
+            SetStateAt(succ, state->Copy(succ, zone_));
+          }
         } else {
           // Merge the current state with the state already at the successor.
           SetStateAt(succ, state->Merge(succ, StateAt(succ), zone_));
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index 5675c55..e93d340 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -947,6 +947,25 @@
 }
 
 
+Range* HBoundsCheck::InferRange(Zone* zone) {
+  Representation r = representation();
+  if (r.IsSmiOrInteger32() && length()->HasRange()) {
+    int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
+    int lower = 0;
+
+    Range* result = new(zone) Range(lower, upper);
+    if (index()->HasRange()) {
+      result->Intersect(index()->range());
+    }
+
+    // In case of Smi representation, clamp result to Smi::kMaxValue.
+    if (r.IsSmi()) result->ClampToSmi();
+    return result;
+  }
+  return HValue::InferRange(zone);
+}
+
+
 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
   stream->Add("base: ");
   base_index()->PrintNameTo(stream);
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index eac3c51..272c6bd 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -4029,6 +4029,8 @@
  protected:
   friend class HBoundsCheckBaseIndexInformation;
 
+  virtual Range* InferRange(Zone* zone) V8_OVERRIDE;
+
   virtual bool DataEquals(HValue* other) V8_OVERRIDE { return true; }
   bool skip_check_;
   HValue* base_;
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 634acd7..686cf90 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -2240,6 +2240,23 @@
     BuildCreateAllocationMemento(array,
                                  JSArray::kSize,
                                  allocation_site_payload);
+    if (FLAG_allocation_site_pretenuring) {
+      // TODO(mvstanton): move this code into BuildCreateAllocationMemento when
+      // constructed arrays also pay attention to pretenuring.
+      HObjectAccess access =
+          HObjectAccess::ForAllocationSiteOffset(
+              AllocationSite::kMementoCreateCountOffset);
+      HValue* create_info = Add<HLoadNamedField>(allocation_site_payload,
+                                                 access);
+      HInstruction* new_create_info = HAdd::New(zone(), context(),
+                                                create_info,
+                                                graph()->GetConstant1());
+      new_create_info->ClearFlag(HValue::kCanOverflow);
+      HStoreNamedField* store = Add<HStoreNamedField>(allocation_site_payload,
+                                                      access, new_create_info);
+      // No write barrier needed to store a smi.
+      store->SkipWriteBarrier();
+    }
   }
 
   int elements_location = JSArray::kSize;
@@ -9332,8 +9349,26 @@
       ? HType::JSArray() : HType::JSObject();
   HValue* object_size_constant = Add<HConstant>(
       boilerplate_object->map()->instance_size());
+
+  // We should pull pre-tenure mode from the allocation site.
+  // For now, just see what it says, and remark on it if it sez
+  // we should pretenure. That means the rudimentary counting in the garbage
+  // collector is having an effect.
+  PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
+  if (FLAG_allocation_site_pretenuring) {
+    pretenure_flag = site_context->current()->GetPretenureMode()
+        ? TENURED
+        : NOT_TENURED;
+    if (FLAG_trace_track_allocation_sites) {
+      PrintF("Hydrogen: AllocationSite %p boilerplate %p %s\n",
+             static_cast<void*>(*(site_context->current())),
+             static_cast<void*>(*boilerplate_object),
+             pretenure_flag == TENURED ? "tenured" : "not tenured");
+    }
+  }
+
   HInstruction* object = Add<HAllocate>(object_size_constant, type,
-      isolate()->heap()->GetPretenureMode(), instance_type);
+      pretenure_flag, instance_type);
 
   BuildEmitObjectHeader(boilerplate_object, object);
 
@@ -9347,10 +9382,10 @@
     HValue* object_elements_size = Add<HConstant>(elements_size);
     if (boilerplate_object->HasFastDoubleElements()) {
       object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
-          isolate()->heap()->GetPretenureMode(), FIXED_DOUBLE_ARRAY_TYPE);
+          pretenure_flag, FIXED_DOUBLE_ARRAY_TYPE);
     } else {
       object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
-          isolate()->heap()->GetPretenureMode(), FIXED_ARRAY_TYPE);
+          pretenure_flag, FIXED_ARRAY_TYPE);
     }
   }
   BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
@@ -9363,7 +9398,8 @@
 
   // Copy in-object properties.
   if (boilerplate_object->map()->NumberOfFields() != 0) {
-    BuildEmitInObjectProperties(boilerplate_object, object, site_context);
+    BuildEmitInObjectProperties(boilerplate_object, object, site_context,
+                                pretenure_flag);
   }
   return object;
 }
@@ -9416,7 +9452,8 @@
 void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
     Handle<JSObject> boilerplate_object,
     HInstruction* object,
-    AllocationSiteUsageContext* site_context) {
+    AllocationSiteUsageContext* site_context,
+    PretenureFlag pretenure_flag) {
   Handle<DescriptorArray> descriptors(
       boilerplate_object->map()->instance_descriptors());
   int limit = boilerplate_object->map()->NumberOfOwnDescriptors();
@@ -9452,15 +9489,13 @@
       if (representation.IsDouble()) {
         // Allocate a HeapNumber box and store the value into it.
         HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
-        // TODO(mvstanton): This heap number alloc does not have a corresponding
+        // This heap number alloc does not have a corresponding
         // AllocationSite. That is okay because
         // 1) it's a child object of another object with a valid allocation site
         // 2) we can just use the mode of the parent object for pretenuring
-        // The todo is replace GetPretenureMode() with
-        // site_context->top()->GetPretenureMode().
         HInstruction* double_box =
             Add<HAllocate>(heap_number_constant, HType::HeapNumber(),
-                isolate()->heap()->GetPretenureMode(), HEAP_NUMBER_TYPE);
+                pretenure_flag, HEAP_NUMBER_TYPE);
         AddStoreMapConstant(double_box,
             isolate()->factory()->heap_number_map());
         Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
diff --git a/src/hydrogen.h b/src/hydrogen.h
index bd42896..6a6aef0 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -2422,7 +2422,8 @@
 
   void BuildEmitInObjectProperties(Handle<JSObject> boilerplate_object,
                                    HInstruction* object,
-                                   AllocationSiteUsageContext* site_context);
+                                   AllocationSiteUsageContext* site_context,
+                                   PretenureFlag pretenure_flag);
 
   void BuildEmitElements(Handle<JSObject> boilerplate_object,
                          Handle<FixedArrayBase> elements,
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 909294e..649a58b 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -1720,6 +1720,14 @@
   Handle<FixedArrayBase> constant_elements_values(
       FixedArrayBase::cast(constant_elements->get(1)));
 
+  AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
+      ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
+  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
+    // If the only customer of allocation sites is transitioning, then
+    // we can turn it off if we don't have anywhere else to transition to.
+    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
+  }
+
   Heap* heap = isolate()->heap();
   if (has_constant_fast_elements &&
       constant_elements_values->map() == heap->fixed_cow_array_map()) {
@@ -1732,7 +1740,7 @@
     __ mov(ecx, Immediate(constant_elements));
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        DONT_TRACK_ALLOCATION_SITE,
+        allocation_site_mode,
         length);
     __ CallStub(&stub);
   } else if (expr->depth() > 1 || Serializer::enabled() ||
@@ -1748,14 +1756,11 @@
            FLAG_smi_only_arrays);
     FastCloneShallowArrayStub::Mode mode =
         FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
-        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
 
     // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
     // change, so it's possible to specialize the stub in advance.
     if (has_constant_fast_elements) {
       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
     }
 
     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
diff --git a/src/ic.cc b/src/ic.cc
index c22f3c1..557a483 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -879,9 +879,7 @@
       }
       if (!stub.is_null()) {
         set_target(*stub);
-#ifdef DEBUG
         if (FLAG_trace_ic) PrintF("[LoadIC : +#length /stringwrapper]\n");
-#endif
       }
       // Get the string if we have a string wrapper object.
       String* string = String::cast(JSValue::cast(*object)->value());
@@ -904,9 +902,7 @@
       }
       if (!stub.is_null()) {
         set_target(*stub);
-#ifdef DEBUG
         if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n");
-#endif
       }
       return *Accessors::FunctionGetPrototype(Handle<JSFunction>::cast(object));
     }
@@ -1637,7 +1633,8 @@
   Handle<JSObject> receiver = Handle<JSObject>::cast(object);
 
   Handle<JSObject> holder(lookup->holder());
-  StoreStubCompiler compiler(isolate(), strict_mode(), kind());
+  // Handlers do not use strict mode.
+  StoreStubCompiler compiler(isolate(), kNonStrictMode, kind());
   switch (lookup->type()) {
     case FIELD:
       return compiler.CompileStoreField(receiver, lookup, name);
@@ -1665,7 +1662,7 @@
         Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
         Handle<PropertyCell> cell(global->GetPropertyCell(lookup), isolate());
         Handle<Type> union_type = PropertyCell::UpdatedType(cell, value);
-        StoreGlobalStub stub(strict_mode(), union_type->IsConstant());
+        StoreGlobalStub stub(union_type->IsConstant());
 
         Handle<Code> code = stub.GetCodeCopyFromTemplate(
             isolate(), receiver->map(), *cell);
@@ -1674,9 +1671,7 @@
         return code;
       }
       ASSERT(holder.is_identical_to(receiver));
-      return strict_mode() == kStrictMode
-          ? isolate()->builtins()->StoreIC_Normal_Strict()
-          : isolate()->builtins()->StoreIC_Normal();
+      return isolate()->builtins()->StoreIC_Normal();
     case CALLBACKS: {
       if (kind() == Code::KEYED_STORE_IC) break;
       Handle<Object> callback(lookup->GetCallbackObject(), isolate());
@@ -2360,7 +2355,6 @@
   Maybe<Handle<Object> > result = stub.Result(left, right, isolate());
   if (!result.has_value) return Failure::Exception();
 
-#ifdef DEBUG
   if (FLAG_trace_ic) {
     char buffer[100];
     NoAllocationStringAllocator allocator(buffer,
@@ -2381,9 +2375,6 @@
   } else {
     stub.UpdateStatus(left, right, result);
   }
-#else
-  stub.UpdateStatus(left, right, result);
-#endif
 
   Handle<Code> code = stub.GetCode(isolate());
   set_target(*code);
@@ -2612,7 +2603,6 @@
   Handle<Code> new_target = stub.GetCode(isolate());
   set_target(*new_target);
 
-#ifdef DEBUG
   if (FLAG_trace_ic) {
     PrintF("[CompareIC in ");
     JavaScriptFrame::PrintTop(isolate(), stdout, false, true);
@@ -2626,7 +2616,6 @@
            Token::Name(op_),
            static_cast<void*>(*stub.GetCode(isolate())));
   }
-#endif
 
   // Activate inlined smi code.
   if (previous_state == UNINITIALIZED) {
diff --git a/src/ic.h b/src/ic.h
index 4a3e716..bfb73ac 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -606,11 +606,7 @@
   }
 
   virtual Handle<Code> slow_stub() const {
-    if (strict_mode() == kStrictMode) {
-      return isolate()->builtins()->StoreIC_Slow_Strict();
-    } else {
-      return isolate()->builtins()->StoreIC_Slow();
-    }
+    return isolate()->builtins()->StoreIC_Slow();
   }
 
   virtual Handle<Code> pre_monomorphic_stub() {
@@ -723,11 +719,7 @@
     }
   }
   virtual Handle<Code> slow_stub() const {
-    if (strict_mode() == kStrictMode) {
-      return isolate()->builtins()->KeyedStoreIC_Slow_Strict();
-    } else {
-      return isolate()->builtins()->KeyedStoreIC_Slow();
-    }
+    return isolate()->builtins()->KeyedStoreIC_Slow();
   }
   virtual Handle<Code> megamorphic_stub() {
     if (strict_mode() == kStrictMode) {
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index bfb9016..b60768b 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -406,8 +406,6 @@
   ASSERT(state_ == PREPARE_GC);
   ASSERT(encountered_weak_collections_ == Smi::FromInt(0));
 
-  heap()->allocation_mementos_found_ = 0;
-
   MarkLiveObjects();
   ASSERT(heap_->incremental_marking()->IsStopped());
 
@@ -449,11 +447,6 @@
     marking_parity_ = EVEN_MARKING_PARITY;
   }
 
-  if (FLAG_trace_track_allocation_sites &&
-      heap()->allocation_mementos_found_ > 0) {
-    PrintF("AllocationMementos found during mark-sweep = %d\n",
-           heap()->allocation_mementos_found_);
-  }
   tracer_ = NULL;
 }
 
@@ -1889,6 +1882,14 @@
   virtual Object* RetainAs(Object* object) {
     if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) {
       return object;
+    } else if (object->IsAllocationSite() &&
+               !(AllocationSite::cast(object)->IsZombie())) {
+      // "dead" AllocationSites need to live long enough for a traversal of new
+      // space. These sites get a one-time reprieve.
+      AllocationSite* site = AllocationSite::cast(object);
+      site->MarkZombie();
+      site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site);
+      return object;
     } else {
       return NULL;
     }
@@ -2000,12 +2001,7 @@
       int size = object->Size();
       survivors_size += size;
 
-      if (FLAG_trace_track_allocation_sites && object->IsJSObject()) {
-        if (AllocationMemento::FindForJSObject(JSObject::cast(object), true)
-            != NULL) {
-          heap()->allocation_mementos_found_++;
-        }
-      }
+      Heap::UpdateAllocationSiteFeedback(object);
 
       offset++;
       current_cell >>= 1;
@@ -2098,6 +2094,12 @@
 }
 
 
+void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) {
+  MarkBit mark_bit = Marking::MarkBitFrom(site);
+  SetMark(site, mark_bit);
+}
+
+
 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
   // Mark the heap roots including global variables, stack variables,
   // etc., and all objects reachable from them.
diff --git a/src/mark-compact.h b/src/mark-compact.h
index aea5e1c..2a1d97d 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -739,6 +739,10 @@
   // marking its contents.
   void MarkWeakObjectToCodeTable();
 
+  // Special case for processing weak references in a full collection. We need
+  // to artifically keep AllocationSites alive for a time.
+  void MarkAllocationSite(AllocationSite* site);
+
  private:
   MarkCompactCollector();
   ~MarkCompactCollector();
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index 8b16e6c..0407f01 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -1796,6 +1796,14 @@
   Handle<FixedArrayBase> constant_elements_values(
       FixedArrayBase::cast(constant_elements->get(1)));
 
+  AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
+      ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
+  if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
+    // If the only customer of allocation sites is transitioning, then
+    // we can turn it off if we don't have anywhere else to transition to.
+    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
+  }
+
   __ mov(a0, result_register());
   __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
   __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
@@ -1805,7 +1813,7 @@
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        DONT_TRACK_ALLOCATION_SITE,
+        allocation_site_mode,
         length);
     __ CallStub(&stub);
     __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
@@ -1820,12 +1828,9 @@
            FLAG_smi_only_arrays);
     FastCloneShallowArrayStub::Mode mode =
         FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
-        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
 
     if (has_fast_elements) {
       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
     }
 
     FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
diff --git a/src/mips/ic-mips.cc b/src/mips/ic-mips.cc
index 98fb2f7..5062080 100644
--- a/src/mips/ic-mips.cc
+++ b/src/mips/ic-mips.cc
@@ -1650,12 +1650,10 @@
     return;
   }
 
-#ifdef DEBUG
   if (FLAG_trace_ic) {
     PrintF("[  patching ic at %p, andi=%p, delta=%d\n",
            address, andi_instruction_address, delta);
   }
-#endif
 
   Address patch_address =
       andi_instruction_address - delta * Instruction::kInstrSize;
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index af56262..71bacba 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -4633,10 +4633,13 @@
   LOperand* output = instr->result();
   Register scratch = scratch0();
 
-  __ SmiTagCheckOverflow(ToRegister(output), ToRegister(input), scratch);
+  ASSERT(output->IsRegister());
   if (!instr->hydrogen()->value()->HasRange() ||
       !instr->hydrogen()->value()->range()->IsInSmiRange()) {
+    __ SmiTagCheckOverflow(ToRegister(output), ToRegister(input), scratch);
     DeoptimizeIf(lt, instr->environment(), scratch, Operand(zero_reg));
+  } else {
+    __ SmiTag(ToRegister(output), ToRegister(input));
   }
 }
 
diff --git a/src/mips/lithium-mips.cc b/src/mips/lithium-mips.cc
index d26da4a..1a99bb9 100644
--- a/src/mips/lithium-mips.cc
+++ b/src/mips/lithium-mips.cc
@@ -1951,8 +1951,8 @@
       HValue* val = instr->value();
       LOperand* value = UseRegister(val);
       LInstruction* result = val->CheckFlag(HInstruction::kUint32)
-          ? DefineSameAsFirst(new(zone()) LUint32ToSmi(value))
-          : DefineSameAsFirst(new(zone()) LInteger32ToSmi(value));
+          ? DefineAsRegister(new(zone()) LUint32ToSmi(value))
+          : DefineAsRegister(new(zone()) LInteger32ToSmi(value));
       if (val->HasRange() && val->range()->IsInSmiRange()) {
         return result;
       }
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 6e2ad06..832a682 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -1322,6 +1322,16 @@
 }
 
 
+void AllocationSite::MarkZombie() {
+  ASSERT(!IsZombie());
+  set_pretenure_decision(Smi::FromInt(kZombie));
+  // Clear all non-smi fields
+  set_transition_info(Smi::FromInt(0));
+  set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
+                     SKIP_WRITE_BARRIER);
+}
+
+
 // Heuristic: We only need to create allocation site info if the boilerplate
 // elements kind is the initial elements kind.
 AllocationSiteMode AllocationSite::GetMode(
@@ -1348,6 +1358,9 @@
 
 
 inline bool AllocationSite::CanTrack(InstanceType type) {
+  if (FLAG_allocation_site_pretenuring) {
+    return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
+  }
   return type == JS_ARRAY_TYPE;
 }
 
@@ -1367,6 +1380,45 @@
 }
 
 
+inline void AllocationSite::IncrementMementoFoundCount() {
+  int value = memento_found_count()->value();
+  set_memento_found_count(Smi::FromInt(value + 1));
+}
+
+
+inline void AllocationSite::IncrementMementoCreateCount() {
+  ASSERT(FLAG_allocation_site_pretenuring);
+  int value = memento_create_count()->value();
+  set_memento_create_count(Smi::FromInt(value + 1));
+}
+
+
+inline bool AllocationSite::DigestPretenuringFeedback() {
+  bool decision_made = false;
+  if (!PretenuringDecisionMade()) {
+    int create_count = memento_create_count()->value();
+    if (create_count >= kPretenureMinimumCreated) {
+      int found_count = memento_found_count()->value();
+      double ratio = static_cast<double>(found_count) / create_count;
+      if (FLAG_trace_track_allocation_sites) {
+        PrintF("AllocationSite: %p (created, found, ratio) (%d, %d, %f)\n",
+               static_cast<void*>(this), create_count, found_count, ratio);
+      }
+      int result = ratio >= kPretenureRatio ? kTenure : kDontTenure;
+      set_pretenure_decision(Smi::FromInt(result));
+      decision_made = true;
+      // TODO(mvstanton): if the decision represents a change, any dependent
+      // code registered for pretenuring changes should be deopted.
+    }
+  }
+
+  // Clear feedback calculation fields until the next gc.
+  set_memento_found_count(Smi::FromInt(0));
+  set_memento_create_count(Smi::FromInt(0));
+  return decision_made;
+}
+
+
 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
   object->ValidateElements();
   ElementsKind elements_kind = object->map()->elements_kind();
diff --git a/src/objects.cc b/src/objects.cc
index 6e25e62..8fcfd1f 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -5718,10 +5718,7 @@
   Handle<JSObject> copy;
   if (copying) {
     Handle<AllocationSite> site_to_pass;
-    if (site_context()->activated() &&
-        AllocationSite::CanTrack(object->map()->instance_type()) &&
-        AllocationSite::GetMode(object->GetElementsKind()) ==
-        TRACK_ALLOCATION_SITE) {
+    if (site_context()->ShouldCreateMemento(object)) {
       site_to_pass = site_context()->current();
     }
     CALL_AND_RETRY_OR_DIE(isolate,
@@ -9181,9 +9178,10 @@
 AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object,
                                                       bool in_GC) {
   // Currently, AllocationMemento objects are only allocated immediately
-  // after JSArrays in NewSpace, and detecting whether a JSArray has one
-  // involves carefully checking the object immediately after the JSArray
-  // (if there is one) to see if it's an AllocationMemento.
+  // after JSArrays and some JSObjects in NewSpace. Detecting whether a
+  // memento is present involves carefully checking the object immediately
+  // after the current object (if there is one) to see if it's an
+  // AllocationMemento.
   if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) {
     Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) +
         object->Size();
@@ -9201,7 +9199,9 @@
           object->GetHeap()->allocation_memento_map()) {
         AllocationMemento* memento = AllocationMemento::cast(
             reinterpret_cast<Object*>(ptr_end + kHeapObjectTag));
-        return memento;
+        if (memento->IsValid()) {
+          return memento;
+        }
       }
     }
   }
@@ -12789,6 +12789,9 @@
 }
 
 
+const double AllocationSite::kPretenureRatio = 0.60;
+
+
 bool AllocationSite::IsNestedSite() {
   ASSERT(FLAG_trace_track_allocation_sites);
   Object* current = GetHeap()->allocation_sites_list();
diff --git a/src/objects.h b/src/objects.h
index 2ae1429..8813f94 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -8120,6 +8120,16 @@
 class AllocationSite: public Struct {
  public:
   static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024;
+  static const double kPretenureRatio;
+  static const int kPretenureMinimumCreated = 100;
+
+  // Values for pretenure decision field.
+  enum {
+    kUndecided = 0,
+    kDontTenure = 1,
+    kTenure = 2,
+    kZombie = 3
+  };
 
   DECL_ACCESSORS(transition_info, Object)
   // nested_site threads a list of sites that represent nested literals
@@ -8128,16 +8138,14 @@
   DECL_ACCESSORS(nested_site, Object)
   DECL_ACCESSORS(memento_found_count, Smi)
   DECL_ACCESSORS(memento_create_count, Smi)
+  // TODO(mvstanton): we don't need a whole integer to record pretenure
+  // decision. Consider sharing space with memento_found_count.
   DECL_ACCESSORS(pretenure_decision, Smi)
   DECL_ACCESSORS(dependent_code, DependentCode)
   DECL_ACCESSORS(weak_next, Object)
 
   inline void Initialize();
 
-  bool HasNestedSites() {
-    return nested_site()->IsAllocationSite();
-  }
-
   // This method is expensive, it should only be called for reporting.
   bool IsNestedSite();
 
@@ -8145,6 +8153,28 @@
   class UnusedBits:             public BitField<int,          15, 14> {};
   class DoNotInlineBit:         public BitField<bool,         29,  1> {};
 
+  inline void IncrementMementoFoundCount();
+
+  inline void IncrementMementoCreateCount();
+
+  PretenureFlag GetPretenureMode() {
+    int mode = pretenure_decision()->value();
+    // Zombie objects "decide" to be untenured.
+    return (mode == kTenure) ? TENURED : NOT_TENURED;
+  }
+
+  // The pretenuring decision is made during gc, and the zombie state allows
+  // us to recognize when an allocation site is just being kept alive because
+  // a later traversal of new space may discover AllocationMementos that point
+  // to this AllocationSite.
+  bool IsZombie() {
+    return pretenure_decision()->value() == kZombie;
+  }
+
+  inline void MarkZombie();
+
+  inline bool DigestPretenuringFeedback();
+
   ElementsKind GetElementsKind() {
     ASSERT(!SitePointsToLiteral());
     int value = Smi::cast(transition_info())->value();
@@ -8218,6 +8248,10 @@
 
  private:
   inline DependentCode::DependencyGroup ToDependencyGroup(Reason reason);
+  bool PretenuringDecisionMade() {
+    return pretenure_decision()->value() != kUndecided;
+  }
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSite);
 };
 
@@ -8229,7 +8263,10 @@
 
   DECL_ACCESSORS(allocation_site, Object)
 
-  bool IsValid() { return allocation_site()->IsAllocationSite(); }
+  bool IsValid() {
+    return allocation_site()->IsAllocationSite() &&
+        !AllocationSite::cast(allocation_site())->IsZombie();
+  }
   AllocationSite* GetAllocationSite() {
     ASSERT(IsValid());
     return AllocationSite::cast(allocation_site());
diff --git a/src/platform-posix.cc b/src/platform-posix.cc
index 923cd87..879dcc8 100644
--- a/src/platform-posix.cc
+++ b/src/platform-posix.cc
@@ -288,12 +288,6 @@
 // ----------------------------------------------------------------------------
 // Math functions
 
-double ceiling(double x) {
-  // Correct buggy 'ceil' on some systems (i.e. FreeBSD, OS X 10.5)
-  return (-1.0 < x && x < 0.0) ? -0.0 : ceil(x);
-}
-
-
 double modulo(double x, double y) {
   return fmod(x, y);
 }
diff --git a/src/platform-win32.cc b/src/platform-win32.cc
index 35411bf..ea11806 100644
--- a/src/platform-win32.cc
+++ b/src/platform-win32.cc
@@ -133,11 +133,6 @@
 }
 
 
-double ceiling(double x) {
-  return ceil(x);
-}
-
-
 #if V8_TARGET_ARCH_IA32
 static void MemMoveWrapper(void* dest, const void* src, size_t size) {
   memmove(dest, src, size);
diff --git a/src/platform.h b/src/platform.h
index 8e524ae..3bd87a9 100644
--- a/src/platform.h
+++ b/src/platform.h
@@ -93,7 +93,6 @@
 namespace v8 {
 namespace internal {
 
-double ceiling(double x);
 double modulo(double x, double y);
 
 // Custom implementation of math functions.
diff --git a/src/runtime.cc b/src/runtime.cc
index fbe4426..a8b7024 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -4463,10 +4463,6 @@
   RUNTIME_ASSERT(start >= 0);
   RUNTIME_ASSERT(end <= value->length());
   isolate->counters()->sub_string_runtime()->Increment();
-  if (end - start == 1) {
-     return isolate->heap()->LookupSingleCharacterStringFromCode(
-         value->Get(start));
-  }
   return value->SubString(start, end);
 }
 
diff --git a/src/version.cc b/src/version.cc
index e03ed8d..d016dda 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // system so their names cannot be changed without changing the scripts.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     23
-#define BUILD_NUMBER      12
+#define BUILD_NUMBER      13
 #define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index ed4c3ad..8a3f965 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -1741,6 +1741,14 @@
   Handle<FixedArrayBase> constant_elements_values(
       FixedArrayBase::cast(constant_elements->get(1)));
 
+  AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
+      ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
+  if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
+    // If the only customer of allocation sites is transitioning, then
+    // we can turn it off if we don't have anywhere else to transition to.
+    allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
+  }
+
   Heap* heap = isolate()->heap();
   if (has_constant_fast_elements &&
       constant_elements_values->map() == heap->fixed_cow_array_map()) {
@@ -1753,7 +1761,7 @@
     __ Move(rcx, constant_elements);
     FastCloneShallowArrayStub stub(
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        DONT_TRACK_ALLOCATION_SITE,
+        allocation_site_mode,
         length);
     __ CallStub(&stub);
   } else if (expr->depth() > 1 || Serializer::enabled() ||
@@ -1769,14 +1777,11 @@
            FLAG_smi_only_arrays);
     FastCloneShallowArrayStub::Mode mode =
         FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-    AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
-        ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
 
     // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
     // change, so it's possible to specialize the stub in advance.
     if (has_constant_fast_elements) {
       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-      allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
     }
 
     __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
diff --git a/test/cctest/cctest.status b/test/cctest/cctest.status
index a596add..e4fe0d8 100644
--- a/test/cctest/cctest.status
+++ b/test/cctest/cctest.status
@@ -60,6 +60,13 @@
   # running several variants. Note that this still takes ages, because there
   # are actually 13 * 38 * 5 * 128 = 316160 individual tests hidden here.
   'test-parsing/ParserSync': [PASS, NO_VARIANTS],
+
+  ############################################################################
+  # Slow tests.
+  'test-api/Threading1': [PASS, ['mode == debug', SLOW]],
+  'test-api/Threading2': [PASS, ['mode == debug', SLOW]],
+  'test-api/Threading3': [PASS, ['mode == debug', SLOW]],
+  'test-api/Threading4': [PASS, ['mode == debug', SLOW]],
 }],  # ALWAYS
 
 ##############################################################################
@@ -89,6 +96,13 @@
   'test-serialize/DeserializeFromSecondSerializationAndRunScript2': [SKIP],
   'test-serialize/DeserializeAndRunScript2': [SKIP],
   'test-serialize/DeserializeFromSecondSerialization': [SKIP],
+
+  ############################################################################
+  # Slow tests.
+  'test-api/Threading1': [PASS, SLOW],
+  'test-api/Threading2': [PASS, SLOW],
+  'test-api/Threading3': [PASS, SLOW],
+  'test-api/Threading4': [PASS, SLOW],
 }],  # 'arch == arm'
 
 ##############################################################################
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index 23756db..94b397f 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -3299,6 +3299,73 @@
 }
 
 
+template<class T>
+static v8::UniquePersistent<T> PassUnique(v8::UniquePersistent<T> unique) {
+  return unique.Pass();
+}
+
+
+template<class T>
+static v8::UniquePersistent<T> ReturnUnique(v8::Isolate* isolate,
+                                            const v8::Persistent<T> & global) {
+  v8::UniquePersistent<String> unique(isolate, global);
+  return unique.Pass();
+}
+
+
+THREADED_TEST(UniquePersistent) {
+  v8::Isolate* isolate = CcTest::isolate();
+  v8::Persistent<String> global;
+  {
+    v8::HandleScope scope(isolate);
+    global.Reset(isolate, v8_str("str"));
+  }
+  v8::internal::GlobalHandles* global_handles =
+      reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles();
+  int initial_handle_count = global_handles->global_handles_count();
+  {
+    v8::UniquePersistent<String> unique(isolate, global);
+    CHECK_EQ(initial_handle_count + 1, global_handles->global_handles_count());
+    // Test assignment via Pass
+    {
+      v8::UniquePersistent<String> copy = unique.Pass();
+      CHECK(unique.IsEmpty());
+      CHECK(copy == global);
+      CHECK_EQ(initial_handle_count + 1,
+               global_handles->global_handles_count());
+      unique = copy.Pass();
+    }
+    // Test ctor via Pass
+    {
+      v8::UniquePersistent<String> copy(unique.Pass());
+      CHECK(unique.IsEmpty());
+      CHECK(copy == global);
+      CHECK_EQ(initial_handle_count + 1,
+               global_handles->global_handles_count());
+      unique = copy.Pass();
+    }
+    // Test pass through function call
+    {
+      v8::UniquePersistent<String> copy = PassUnique(unique.Pass());
+      CHECK(unique.IsEmpty());
+      CHECK(copy == global);
+      CHECK_EQ(initial_handle_count + 1,
+               global_handles->global_handles_count());
+      unique = copy.Pass();
+    }
+    CHECK_EQ(initial_handle_count + 1, global_handles->global_handles_count());
+  }
+  // Test pass from function call
+  {
+    v8::UniquePersistent<String> unique = ReturnUnique(isolate, global);
+    CHECK(unique == global);
+    CHECK_EQ(initial_handle_count + 1, global_handles->global_handles_count());
+  }
+  CHECK_EQ(initial_handle_count, global_handles->global_handles_count());
+  global.Reset();
+}
+
+
 THREADED_TEST(GlobalHandleUpcast) {
   v8::Isolate* isolate = CcTest::isolate();
   v8::HandleScope scope(isolate);
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index d8678c5..d6405b1 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -2184,6 +2184,7 @@
 
 TEST(OptimizedPretenuringAllocationFolding) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2220,6 +2221,7 @@
 
 TEST(OptimizedPretenuringAllocationFoldingBlocks) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2256,6 +2258,7 @@
 
 TEST(OptimizedPretenuringObjectArrayLiterals) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2281,6 +2284,7 @@
 
 TEST(OptimizedPretenuringMixedInObjectProperties) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2312,6 +2316,7 @@
 
 TEST(OptimizedPretenuringDoubleArrayProperties) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2337,6 +2342,7 @@
 
 TEST(OptimizedPretenuringdoubleArrayLiterals) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2362,6 +2368,7 @@
 
 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2396,6 +2403,7 @@
 
 TEST(OptimizedPretenuringNestedObjectLiterals) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2430,6 +2438,7 @@
 
 TEST(OptimizedPretenuringNestedDoubleLiterals) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@@ -2493,6 +2502,7 @@
 
 TEST(OptimizedPretenuringCallNew) {
   i::FLAG_allow_natives_syntax = true;
+  i::FLAG_allocation_site_pretenuring = false;
   i::FLAG_pretenuring_call_new = true;
   CcTest::InitializeVM();
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status
index be3ec52..a0b45a0 100644
--- a/test/mjsunit/mjsunit.status
+++ b/test/mjsunit/mjsunit.status
@@ -33,7 +33,7 @@
   ##############################################################################
   # Flaky tests.
   # BUG(v8:2921).
-  'debug-step-4-in-frame': [PASS, FAIL],
+  'debug-step-4-in-frame': [PASS, FAIL, SLOW],
 
   ##############################################################################
   # Fails.
@@ -160,6 +160,18 @@
 
   # Currently always deopt on minus zero
   'math-floor-of-div-minus-zero': [SKIP],
+
+  ############################################################################
+  # Slow tests.
+  'regress/regress-2185-2': [PASS, SLOW],
+  'mirror-object': [PASS, SLOW],
+  'compiler/osr-with-args': [PASS, SLOW],
+  'array-sort': [PASS, SLOW],
+  'packed-elements': [PASS, SLOW],
+  'regress/regress-91008': [PASS, SLOW],
+  'regress/regress-2790': [PASS, SLOW],
+  'regress/regress-json-stringify-gc': [PASS, SLOW],
+  'regress/regress-1122': [PASS, SLOW],
 }],  # 'arch == arm or arch == android_arm'
 
 ##############################################################################
diff --git a/test/mjsunit/regress/regress-3025.js b/test/mjsunit/regress/regress-3025.js
new file mode 100644
index 0000000..ccb3830
--- /dev/null
+++ b/test/mjsunit/regress/regress-3025.js
@@ -0,0 +1,32 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var n = 0x8000000000000800;
+assertEquals(n, 9223372036854778000);
+var s = n.toString(5);
+var v = parseInt(s, 5);
+assertEquals(n, v);
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index 37fd407..a7bc992 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -334,6 +334,8 @@
         '../../src/execution.h',
         '../../src/extensions/externalize-string-extension.cc',
         '../../src/extensions/externalize-string-extension.h',
+        '../../src/extensions/free-buffer-extension.cc',
+        '../../src/extensions/free-buffer-extension.h',
         '../../src/extensions/gc-extension.cc',
         '../../src/extensions/gc-extension.h',
         '../../src/extensions/statistics-extension.cc',
diff --git a/tools/run-tests.py b/tools/run-tests.py
index 32b3f49..79095f6 100755
--- a/tools/run-tests.py
+++ b/tools/run-tests.py
@@ -293,14 +293,14 @@
   suite_paths = utils.GetSuitePaths(join(workspace, "test"))
 
   if len(args) == 0:
-    suite_paths = [ s for s in suite_paths if s in DEFAULT_TESTS ]
+    suite_paths = [ s for s in DEFAULT_TESTS if s in suite_paths ]
   else:
     args_suites = set()
     for arg in args:
       suite = arg.split(os.path.sep)[0]
       if not suite in args_suites:
         args_suites.add(suite)
-    suite_paths = [ s for s in suite_paths if s in args_suites ]
+    suite_paths = [ s for s in args_suites if s in suite_paths ]
 
   suites = []
   for root in suite_paths: