Merge V8 at branches/3.2 r8200: Initial merge by Git

Change-Id: I5c434306e98132997e9c5f6024b6ce200b255edf
diff --git a/src/ic.h b/src/ic.h
index 7b7ab43..911cbd8 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -296,6 +296,14 @@
                                    bool support_wrappers);
   static void GenerateFunctionPrototype(MacroAssembler* masm);
 
+  // Clear the use of the inlined version.
+  static void ClearInlinedVersion(Address address);
+
+  // The offset from the inlined patch site to the start of the
+  // inlined load instruction.  It is architecture-dependent, and not
+  // used on ARM.
+  static const int kOffsetToLoadInstruction;
+
  private:
   // Update the inline cache and the global stub cache based on the
   // lookup result.
@@ -320,6 +328,13 @@
 
   static void Clear(Address address, Code* target);
 
+  static bool PatchInlinedLoad(Address address, Object* map, int index);
+
+  static bool PatchInlinedContextualLoad(Address address,
+                                         Object* map,
+                                         Object* cell,
+                                         bool is_dont_delete);
+
   friend class IC;
 };
 
@@ -346,6 +361,9 @@
 
   static void GenerateIndexedInterceptor(MacroAssembler* masm);
 
+  // Clear the use of the inlined version.
+  static void ClearInlinedVersion(Address address);
+
   // Bit mask to be tested against bit field for the cases when
   // generic stub should go into slow case.
   // Access check is necessary explicitly since generic stub does not perform
@@ -389,6 +407,10 @@
 
   static void Clear(Address address, Code* target);
 
+  // Support for patching the map that is checked in an inlined
+  // version of keyed load.
+  static bool PatchInlinedLoad(Address address, Object* map);
+
   friend class IC;
 };
 
@@ -415,6 +437,13 @@
   static void GenerateGlobalProxy(MacroAssembler* masm,
                                   StrictModeFlag strict_mode);
 
+  // Clear the use of an inlined version.
+  static void ClearInlinedVersion(Address address);
+
+  // The offset from the inlined patch site to the start of the
+  // inlined store instruction.
+  static const int kOffsetToStoreInstruction;
+
  private:
   // Update the inline cache and the global stub cache based on the
   // lookup result.
@@ -460,6 +489,10 @@
 
   static void Clear(Address address, Code* target);
 
+  // Support for patching the index and the map that is checked in an
+  // inlined version of the named store.
+  static bool PatchInlinedStore(Address address, Object* map, int index);
+
   friend class IC;
 };
 
@@ -481,6 +514,12 @@
                                          StrictModeFlag strict_mode);
   static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
 
+  // Clear the inlined version so the IC is always hit.
+  static void ClearInlinedVersion(Address address);
+
+  // Restore the inlined version so the fast case can get hit.
+  static void RestoreInlinedVersion(Address address);
+
  private:
   // Update the inline cache.
   void UpdateCaches(LookupResult* lookup,
@@ -525,6 +564,14 @@
 
   static void Clear(Address address, Code* target);
 
+  // Support for patching the map that is checked in an inlined
+  // version of keyed store.
+  // The address is the patch point for the IC call
+  // (Assembler::kCallTargetAddressOffset before the end of
+  // the call/return address).
+  // The map is the new map that the inlined code should check against.
+  static bool PatchInlinedStore(Address address, Object* map);
+
   friend class IC;
 };
 
@@ -539,7 +586,6 @@
     INT32,
     HEAP_NUMBER,
     ODDBALL,
-    BOTH_STRING,  // Only used for addition operation.
     STRING,  // Only used for addition operation.  At least one string operand.
     GENERIC
   };