Entry point clean up.

Create set of entry points needed for image methods to avoid fix-up at load time:
 - interpreter - bridge to interpreter, bridge to compiled code
 - jni - dlsym lookup
 - quick - resolution and bridge to interpreter
 - portable - resolution and bridge to interpreter

Fix JNI work around to use JNI work around argument rewriting code that'd been
accidentally disabled.
Remove abstact method error stub, use interpreter bridge instead.
Consolidate trampoline (previously stub) generation in generic helper.
Simplify trampolines to jump directly into assembly code, keeps stack crawlable.
Dex: replace use of int with ThreadOffset for values that are thread offsets.
Tidy entry point routines between interpreter, jni, quick and portable.

Change-Id: I52a7c2bbb1b7e0ff8a3c3100b774212309d0828e
(cherry picked from commit 848871b4d8481229c32e0d048a9856e5a9a17ef9)
diff --git a/compiler/Android.mk b/compiler/Android.mk
index fec1e11..f81b460 100644
--- a/compiler/Android.mk
+++ b/compiler/Android.mk
@@ -75,8 +75,7 @@
 	llvm/runtime_support_builder_arm.cc \
 	llvm/runtime_support_builder_thumb2.cc \
 	llvm/runtime_support_builder_x86.cc \
-	stubs/portable/stubs.cc \
-	stubs/quick/stubs.cc \
+	trampolines/trampoline_compiler.cc \
 	utils/arm/assembler_arm.cc \
 	utils/arm/managed_register_arm.cc \
 	utils/assembler.cc \
diff --git a/compiler/dex/quick/arm/call_arm.cc b/compiler/dex/quick/arm/call_arm.cc
index 745e43d..2d8e24f 100644
--- a/compiler/dex/quick/arm/call_arm.cc
+++ b/compiler/dex/quick/arm/call_arm.cc
@@ -432,7 +432,7 @@
   // Making a call - use explicit registers
   FlushAllRegs();   /* Everything to home location */
   LoadValueDirectFixed(rl_src, r0);
-  LoadWordDisp(rARM_SELF, QUICK_ENTRYPOINT_OFFSET(pHandleFillArrayDataFromCode),
+  LoadWordDisp(rARM_SELF, QUICK_ENTRYPOINT_OFFSET(pHandleFillArrayData).Int32Value(),
                rARM_LR);
   // Materialize a pointer to the fill data image
   NewLIR3(kThumb2Adr, r1, 0, reinterpret_cast<uintptr_t>(tab_rec));
@@ -488,7 +488,7 @@
   OpRegImm(kOpCmp, r1, 0);
   OpIT(kCondNe, "T");
   // Go expensive route - artLockObjectFromCode(self, obj);
-  LoadWordDisp(rARM_SELF, QUICK_ENTRYPOINT_OFFSET(pLockObjectFromCode), rARM_LR);
+  LoadWordDisp(rARM_SELF, QUICK_ENTRYPOINT_OFFSET(pLockObject).Int32Value(), rARM_LR);
   ClobberCalleeSave();
   LIR* call_inst = OpReg(kOpBlx, rARM_LR);
   MarkSafepointPC(call_inst);
@@ -519,7 +519,7 @@
   OpIT(kCondEq, "EE");
   StoreWordDisp(r0, mirror::Object::MonitorOffset().Int32Value(), r3);
   // Go expensive route - UnlockObjectFromCode(obj);
-  LoadWordDisp(rARM_SELF, QUICK_ENTRYPOINT_OFFSET(pUnlockObjectFromCode), rARM_LR);
+  LoadWordDisp(rARM_SELF, QUICK_ENTRYPOINT_OFFSET(pUnlockObject).Int32Value(), rARM_LR);
   ClobberCalleeSave();
   LIR* call_inst = OpReg(kOpBlx, rARM_LR);
   MarkSafepointPC(call_inst);
diff --git a/compiler/dex/quick/arm/codegen_arm.h b/compiler/dex/quick/arm/codegen_arm.h
index 1599941..f1ccfa0 100644
--- a/compiler/dex/quick/arm/codegen_arm.h
+++ b/compiler/dex/quick/arm/codegen_arm.h
@@ -28,7 +28,7 @@
     // Required for target - codegen helpers.
     bool SmallLiteralDivide(Instruction::Code dalvik_opcode, RegLocation rl_src,
                                     RegLocation rl_dest, int lit);
-    int LoadHelper(int offset);
+    int LoadHelper(ThreadOffset offset);
     LIR* LoadBaseDisp(int rBase, int displacement, int r_dest, OpSize size, int s_reg);
     LIR* LoadBaseDispWide(int rBase, int displacement, int r_dest_lo, int r_dest_hi,
                           int s_reg);
@@ -153,12 +153,12 @@
     LIR* OpRegRegImm(OpKind op, int r_dest, int r_src1, int value);
     LIR* OpRegRegReg(OpKind op, int r_dest, int r_src1, int r_src2);
     LIR* OpTestSuspend(LIR* target);
-    LIR* OpThreadMem(OpKind op, int thread_offset);
+    LIR* OpThreadMem(OpKind op, ThreadOffset thread_offset);
     LIR* OpVldm(int rBase, int count);
     LIR* OpVstm(int rBase, int count);
     void OpLea(int rBase, int reg1, int reg2, int scale, int offset);
     void OpRegCopyWide(int dest_lo, int dest_hi, int src_lo, int src_hi);
-    void OpTlsCmp(int offset, int val);
+    void OpTlsCmp(ThreadOffset offset, int val);
 
     RegLocation ArgLoc(RegLocation loc);
     LIR* LoadBaseDispBody(int rBase, int displacement, int r_dest, int r_dest_hi, OpSize size,
diff --git a/compiler/dex/quick/arm/int_arm.cc b/compiler/dex/quick/arm/int_arm.cc
index 9db1016..c258019 100644
--- a/compiler/dex/quick/arm/int_arm.cc
+++ b/compiler/dex/quick/arm/int_arm.cc
@@ -498,7 +498,7 @@
   LOG(FATAL) << "Unexpected use of OpLea for Arm";
 }
 
-void ArmMir2Lir::OpTlsCmp(int offset, int val) {
+void ArmMir2Lir::OpTlsCmp(ThreadOffset offset, int val) {
   LOG(FATAL) << "Unexpected use of OpTlsCmp for Arm";
 }
 
@@ -665,7 +665,7 @@
      */
     RegLocation rl_result;
     if (BadOverlap(rl_src1, rl_dest) || (BadOverlap(rl_src2, rl_dest))) {
-      int func_offset = QUICK_ENTRYPOINT_OFFSET(pLmul);
+      ThreadOffset func_offset = QUICK_ENTRYPOINT_OFFSET(pLmul);
       FlushAllRegs();
       CallRuntimeHelperRegLocationRegLocation(func_offset, rl_src1, rl_src2, false);
       rl_result = GetReturnWide(false);
@@ -956,7 +956,7 @@
 
   // Get the array's class.
   LoadWordDisp(r_array, mirror::Object::ClassOffset().Int32Value(), r_array_class);
-  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCanPutArrayElementFromCode), r_value,
+  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCanPutArrayElement), r_value,
                           r_array_class, true);
   // Redo LoadValues in case they didn't survive the call.
   LoadValueDirectFixed(rl_array, r_array);  // Reload array
diff --git a/compiler/dex/quick/arm/target_arm.cc b/compiler/dex/quick/arm/target_arm.cc
index 6f37798..47d3d97 100644
--- a/compiler/dex/quick/arm/target_arm.cc
+++ b/compiler/dex/quick/arm/target_arm.cc
@@ -714,8 +714,8 @@
   FreeTemp(r3);
 }
 
-int ArmMir2Lir::LoadHelper(int offset) {
-  LoadWordDisp(rARM_SELF, offset, rARM_LR);
+int ArmMir2Lir::LoadHelper(ThreadOffset offset) {
+  LoadWordDisp(rARM_SELF, offset.Int32Value(), rARM_LR);
   return rARM_LR;
 }
 
diff --git a/compiler/dex/quick/arm/utility_arm.cc b/compiler/dex/quick/arm/utility_arm.cc
index afc8a66..c63de69 100644
--- a/compiler/dex/quick/arm/utility_arm.cc
+++ b/compiler/dex/quick/arm/utility_arm.cc
@@ -1029,7 +1029,7 @@
   return res;
 }
 
-LIR* ArmMir2Lir::OpThreadMem(OpKind op, int thread_offset) {
+LIR* ArmMir2Lir::OpThreadMem(OpKind op, ThreadOffset thread_offset) {
   LOG(FATAL) << "Unexpected use of OpThreadMem for Arm";
   return NULL;
 }
diff --git a/compiler/dex/quick/gen_common.cc b/compiler/dex/quick/gen_common.cc
index ebe10bb..298d389 100644
--- a/compiler/dex/quick/gen_common.cc
+++ b/compiler/dex/quick/gen_common.cc
@@ -208,12 +208,12 @@
 void Mir2Lir::GenNewArray(uint32_t type_idx, RegLocation rl_dest,
                           RegLocation rl_src) {
   FlushAllRegs();  /* Everything to home location */
-  int func_offset;
+  ThreadOffset func_offset(-1);
   if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *cu_->dex_file,
                                                        type_idx)) {
-    func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocArrayFromCode);
+    func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocArray);
   } else {
-    func_offset= QUICK_ENTRYPOINT_OFFSET(pAllocArrayFromCodeWithAccessCheck);
+    func_offset= QUICK_ENTRYPOINT_OFFSET(pAllocArrayWithAccessCheck);
   }
   CallRuntimeHelperImmMethodRegLocation(func_offset, type_idx, rl_src, true);
   RegLocation rl_result = GetReturn(false);
@@ -230,12 +230,12 @@
   int elems = info->num_arg_words;
   int type_idx = info->index;
   FlushAllRegs();  /* Everything to home location */
-  int func_offset;
+  ThreadOffset func_offset(-1);
   if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *cu_->dex_file,
                                                        type_idx)) {
-    func_offset = QUICK_ENTRYPOINT_OFFSET(pCheckAndAllocArrayFromCode);
+    func_offset = QUICK_ENTRYPOINT_OFFSET(pCheckAndAllocArray);
   } else {
-    func_offset = QUICK_ENTRYPOINT_OFFSET(pCheckAndAllocArrayFromCodeWithAccessCheck);
+    func_offset = QUICK_ENTRYPOINT_OFFSET(pCheckAndAllocArrayWithAccessCheck);
   }
   CallRuntimeHelperImmMethodImm(func_offset, type_idx, elems, true);
   FreeTemp(TargetReg(kArg2));
@@ -408,9 +408,10 @@
     FreeTemp(rBase);
   } else {
     FlushAllRegs();  // Everything to home locations
-    int setter_offset = is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Static) :
-        (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjStatic)
-        : QUICK_ENTRYPOINT_OFFSET(pSet32Static));
+    ThreadOffset setter_offset =
+        is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Static)
+                          : (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjStatic)
+                                       : QUICK_ENTRYPOINT_OFFSET(pSet32Static));
     CallRuntimeHelperImmRegLocation(setter_offset, field_idx, rl_src, true);
   }
 }
@@ -483,9 +484,10 @@
     }
   } else {
     FlushAllRegs();  // Everything to home locations
-    int getterOffset = is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Static) :
-        (is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjStatic)
-        : QUICK_ENTRYPOINT_OFFSET(pGet32Static));
+    ThreadOffset getterOffset =
+        is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Static)
+                          :(is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjStatic)
+                                      : QUICK_ENTRYPOINT_OFFSET(pGet32Static));
     CallRuntimeHelperImm(getterOffset, field_idx, true);
     if (is_long_or_double) {
       RegLocation rl_result = GetReturnWide(rl_dest.fp);
@@ -499,7 +501,7 @@
 
 void Mir2Lir::HandleSuspendLaunchPads() {
   int num_elems = suspend_launchpads_.Size();
-  int helper_offset = QUICK_ENTRYPOINT_OFFSET(pTestSuspendFromCode);
+  ThreadOffset helper_offset = QUICK_ENTRYPOINT_OFFSET(pTestSuspend);
   for (int i = 0; i < num_elems; i++) {
     ResetRegPool();
     ResetDefTracking();
@@ -539,13 +541,13 @@
     LIR* lab = throw_launchpads_.Get(i);
     current_dalvik_offset_ = lab->operands[1];
     AppendLIR(lab);
-    int func_offset = 0;
+    ThreadOffset func_offset(-1);
     int v1 = lab->operands[2];
     int v2 = lab->operands[3];
     bool target_x86 = (cu_->instruction_set == kX86);
     switch (lab->operands[0]) {
       case kThrowNullPointer:
-        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowNullPointerFromCode);
+        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowNullPointer);
         break;
       case kThrowConstantArrayBounds:  // v1 is length reg (for Arm/Mips), v2 constant index
         // v1 holds the constant array index.  Mips/Arm uses v2 for length, x86 reloads.
@@ -557,7 +559,7 @@
         // Make sure the following LoadConstant doesn't mess with kArg1.
         LockTemp(TargetReg(kArg1));
         LoadConstant(TargetReg(kArg0), v2);
-        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowArrayBoundsFromCode);
+        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowArrayBounds);
         break;
       case kThrowArrayBounds:
         // Move v1 (array index) to kArg0 and v2 (array length) to kArg1
@@ -590,18 +592,18 @@
             OpRegCopy(TargetReg(kArg0), v1);
           }
         }
-        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowArrayBoundsFromCode);
+        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowArrayBounds);
         break;
       case kThrowDivZero:
-        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowDivZeroFromCode);
+        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowDivZero);
         break;
       case kThrowNoSuchMethod:
         OpRegCopy(TargetReg(kArg0), v1);
         func_offset =
-          QUICK_ENTRYPOINT_OFFSET(pThrowNoSuchMethodFromCode);
+          QUICK_ENTRYPOINT_OFFSET(pThrowNoSuchMethod);
         break;
       case kThrowStackOverflow:
-        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowStackOverflowFromCode);
+        func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowStackOverflow);
         // Restore stack alignment
         if (target_x86) {
           OpRegImm(kOpAdd, TargetReg(kSp), frame_size_);
@@ -664,9 +666,10 @@
       StoreValue(rl_dest, rl_result);
     }
   } else {
-    int getterOffset = is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Instance) :
-        (is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjInstance)
-        : QUICK_ENTRYPOINT_OFFSET(pGet32Instance));
+    ThreadOffset getterOffset =
+        is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Instance)
+                          : (is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjInstance)
+                                       : QUICK_ENTRYPOINT_OFFSET(pGet32Instance));
     CallRuntimeHelperImmRegLocation(getterOffset, field_idx, rl_obj, true);
     if (is_long_or_double) {
       RegLocation rl_result = GetReturnWide(rl_dest.fp);
@@ -719,9 +722,10 @@
       }
     }
   } else {
-    int setter_offset = is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Instance) :
-        (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjInstance)
-        : QUICK_ENTRYPOINT_OFFSET(pSet32Instance));
+    ThreadOffset setter_offset =
+        is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Instance)
+                          : (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjInstance)
+                                       : QUICK_ENTRYPOINT_OFFSET(pSet32Instance));
     CallRuntimeHelperImmRegLocationRegLocation(setter_offset, field_idx, rl_obj, rl_src, true);
   }
 }
@@ -735,7 +739,7 @@
                                                    type_idx)) {
     // Call out to helper which resolves type and verifies access.
     // Resolved type returned in kRet0.
-    CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccessFromCode),
+    CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccess),
                             type_idx, rl_method.low_reg, true);
     RegLocation rl_result = GetReturn(false);
     StoreValue(rl_dest, rl_result);
@@ -764,7 +768,7 @@
       // TUNING: move slow path to end & remove unconditional branch
       LIR* target1 = NewLIR0(kPseudoTargetLabel);
       // Call out to helper, which will return resolved type in kArg0
-      CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeFromCode), type_idx,
+      CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeType), type_idx,
                               rl_method.low_reg, true);
       RegLocation rl_result = GetReturn(false);
       StoreValue(rl_dest, rl_result);
@@ -797,7 +801,7 @@
     LoadWordDisp(TargetReg(kArg2),
                  mirror::AbstractMethod::DexCacheStringsOffset().Int32Value(), TargetReg(kArg0));
     // Might call out to helper, which will return resolved string in kRet0
-    int r_tgt = CallHelperSetup(QUICK_ENTRYPOINT_OFFSET(pResolveStringFromCode));
+    int r_tgt = CallHelperSetup(QUICK_ENTRYPOINT_OFFSET(pResolveString));
     LoadWordDisp(TargetReg(kArg0), offset_of_string, TargetReg(kRet0));
     LoadConstant(TargetReg(kArg1), string_idx);
     if (cu_->instruction_set == kThumb2) {
@@ -821,7 +825,7 @@
       branch->target = target;
     } else {
       DCHECK_EQ(cu_->instruction_set, kX86);
-      CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pResolveStringFromCode), TargetReg(kArg2),
+      CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pResolveString), TargetReg(kArg2),
                               TargetReg(kArg1), true);
     }
     GenBarrier();
@@ -845,12 +849,12 @@
   FlushAllRegs();  /* Everything to home location */
   // alloc will always check for resolution, do we also need to verify
   // access because the verifier was unable to?
-  int func_offset;
+  ThreadOffset func_offset(-1);
   if (cu_->compiler_driver->CanAccessInstantiableTypeWithoutChecks(
       cu_->method_idx, *cu_->dex_file, type_idx)) {
-    func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectFromCode);
+    func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObject);
   } else {
-    func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectFromCodeWithAccessCheck);
+    func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectWithAccessCheck);
   }
   CallRuntimeHelperImmMethod(func_offset, type_idx, true);
   RegLocation rl_result = GetReturn(false);
@@ -929,7 +933,7 @@
   if (needs_access_check) {
     // Check we have access to type_idx and if not throw IllegalAccessError,
     // returns Class* in kArg0
-    CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccessFromCode),
+    CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccess),
                          type_idx, true);
     OpRegCopy(class_reg, TargetReg(kRet0));  // Align usage with fast path
     LoadValueDirectFixed(rl_src, TargetReg(kArg0));  // kArg0 <= ref
@@ -951,7 +955,7 @@
       LIR* hop_branch = OpCmpImmBranch(kCondNe, class_reg, 0, NULL);
       // Not resolved
       // Call out to helper, which will return resolved type in kRet0
-      CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeFromCode), type_idx, true);
+      CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(pInitializeType), type_idx, true);
       OpRegCopy(TargetReg(kArg2), TargetReg(kRet0));  // Align usage with fast path
       LoadValueDirectFixed(rl_src, TargetReg(kArg0));  /* reload Ref */
       // Rejoin code paths
@@ -986,7 +990,7 @@
     }
   } else {
     if (cu_->instruction_set == kThumb2) {
-      int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivialFromCode));
+      int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivial));
       if (!type_known_abstract) {
       /* Uses conditional nullification */
         OpRegReg(kOpCmp, TargetReg(kArg1), TargetReg(kArg2));  // Same?
@@ -1003,13 +1007,13 @@
         branchover = OpCmpBranch(kCondEq, TargetReg(kArg1), TargetReg(kArg2), NULL);
       }
       if (cu_->instruction_set != kX86) {
-        int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivialFromCode));
+        int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivial));
         OpRegCopy(TargetReg(kArg0), TargetReg(kArg2));    // .ne case - arg0 <= class
         OpReg(kOpBlx, r_tgt);    // .ne case: helper(class, ref->class)
         FreeTemp(r_tgt);
       } else {
         OpRegCopy(TargetReg(kArg0), TargetReg(kArg2));
-        OpThreadMem(kOpBlx, QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivialFromCode));
+        OpThreadMem(kOpBlx, QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivial));
       }
     }
   }
@@ -1069,7 +1073,7 @@
     // Check we have access to type_idx and if not throw IllegalAccessError,
     // returns Class* in kRet0
     // InitializeTypeAndVerifyAccess(idx, method)
-    CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccessFromCode),
+    CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccess),
                             type_idx, TargetReg(kArg1), true);
     OpRegCopy(class_reg, TargetReg(kRet0));  // Align usage with fast path
   } else if (use_declaring_class) {
@@ -1089,7 +1093,7 @@
       // Not resolved
       // Call out to helper, which will return resolved type in kArg0
       // InitializeTypeFromCode(idx, method)
-      CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeFromCode), type_idx,
+      CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeType), type_idx,
                               TargetReg(kArg1), true);
       OpRegCopy(class_reg, TargetReg(kRet0));  // Align usage with fast path
       // Rejoin code paths
@@ -1109,7 +1113,7 @@
   if (!type_known_abstract) {
     branch2 = OpCmpBranch(kCondEq, TargetReg(kArg1), class_reg, NULL);
   }
-  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCheckCastFromCode), TargetReg(kArg1),
+  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCheckCast), TargetReg(kArg1),
                           TargetReg(kArg2), true);
   /* branch target here */
   LIR* target = NewLIR0(kPseudoTargetLabel);
@@ -1168,7 +1172,7 @@
 
 void Mir2Lir::GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest,
                              RegLocation rl_src1, RegLocation rl_shift) {
-  int func_offset = -1;  // Make gcc happy
+  ThreadOffset func_offset(-1);
 
   switch (opcode) {
     case Instruction::SHL_LONG:
@@ -1303,7 +1307,7 @@
       }
       rl_result = GenDivRem(rl_dest, rl_src1.low_reg, rl_src2.low_reg, op == kOpDiv);
     } else {
-      int func_offset = QUICK_ENTRYPOINT_OFFSET(pIdivmod);
+      ThreadOffset func_offset = QUICK_ENTRYPOINT_OFFSET(pIdivmod);
       FlushAllRegs();   /* Send everything to home location */
       LoadValueDirectFixed(rl_src2, TargetReg(kArg1));
       int r_tgt = CallHelperSetup(func_offset);
@@ -1558,7 +1562,7 @@
         FlushAllRegs();   /* Everything to home location */
         LoadValueDirectFixed(rl_src, TargetReg(kArg0));
         Clobber(TargetReg(kArg0));
-        int func_offset = QUICK_ENTRYPOINT_OFFSET(pIdivmod);
+        ThreadOffset func_offset = QUICK_ENTRYPOINT_OFFSET(pIdivmod);
         CallRuntimeHelperRegImm(func_offset, TargetReg(kArg0), lit, false);
         if (is_div)
           rl_result = GetReturn(false);
@@ -1589,7 +1593,7 @@
   OpKind second_op = kOpBkpt;
   bool call_out = false;
   bool check_zero = false;
-  int func_offset;
+  ThreadOffset func_offset(-1);
   int ret_reg = TargetReg(kRet0);
 
   switch (opcode) {
@@ -1709,7 +1713,7 @@
   }
 }
 
-void Mir2Lir::GenConversionCall(int func_offset,
+void Mir2Lir::GenConversionCall(ThreadOffset func_offset,
                                 RegLocation rl_dest, RegLocation rl_src) {
   /*
    * Don't optimize the register usage since it calls out to support
diff --git a/compiler/dex/quick/gen_invoke.cc b/compiler/dex/quick/gen_invoke.cc
index 1b34e99..20d683a 100644
--- a/compiler/dex/quick/gen_invoke.cc
+++ b/compiler/dex/quick/gen_invoke.cc
@@ -37,12 +37,12 @@
  * has a memory call operation, part 1 is a NOP for x86.  For other targets,
  * load arguments between the two parts.
  */
-int Mir2Lir::CallHelperSetup(int helper_offset) {
+int Mir2Lir::CallHelperSetup(ThreadOffset helper_offset) {
   return (cu_->instruction_set == kX86) ? 0 : LoadHelper(helper_offset);
 }
 
 /* NOTE: if r_tgt is a temp, it will be freed following use */
-LIR* Mir2Lir::CallHelper(int r_tgt, int helper_offset, bool safepoint_pc) {
+LIR* Mir2Lir::CallHelper(int r_tgt, ThreadOffset helper_offset, bool safepoint_pc) {
   LIR* call_inst;
   if (cu_->instruction_set == kX86) {
     call_inst = OpThreadMem(kOpBlx, helper_offset);
@@ -56,21 +56,22 @@
   return call_inst;
 }
 
-void Mir2Lir::CallRuntimeHelperImm(int helper_offset, int arg0, bool safepoint_pc) {
+void Mir2Lir::CallRuntimeHelperImm(ThreadOffset helper_offset, int arg0, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   LoadConstant(TargetReg(kArg0), arg0);
   ClobberCalleeSave();
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperReg(int helper_offset, int arg0, bool safepoint_pc) {
+void Mir2Lir::CallRuntimeHelperReg(ThreadOffset helper_offset, int arg0, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   OpRegCopy(TargetReg(kArg0), arg0);
   ClobberCalleeSave();
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperRegLocation(int helper_offset, RegLocation arg0, bool safepoint_pc) {
+void Mir2Lir::CallRuntimeHelperRegLocation(ThreadOffset helper_offset, RegLocation arg0,
+                                           bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   if (arg0.wide == 0) {
     LoadValueDirectFixed(arg0, TargetReg(kArg0));
@@ -81,7 +82,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperImmImm(int helper_offset, int arg0, int arg1,
+void Mir2Lir::CallRuntimeHelperImmImm(ThreadOffset helper_offset, int arg0, int arg1,
                                       bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   LoadConstant(TargetReg(kArg0), arg0);
@@ -90,7 +91,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperImmRegLocation(int helper_offset, int arg0,
+void Mir2Lir::CallRuntimeHelperImmRegLocation(ThreadOffset helper_offset, int arg0,
                                               RegLocation arg1, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   if (arg1.wide == 0) {
@@ -103,7 +104,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperRegLocationImm(int helper_offset, RegLocation arg0, int arg1,
+void Mir2Lir::CallRuntimeHelperRegLocationImm(ThreadOffset helper_offset, RegLocation arg0, int arg1,
                                               bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   LoadValueDirectFixed(arg0, TargetReg(kArg0));
@@ -112,7 +113,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperImmReg(int helper_offset, int arg0, int arg1,
+void Mir2Lir::CallRuntimeHelperImmReg(ThreadOffset helper_offset, int arg0, int arg1,
                                       bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   OpRegCopy(TargetReg(kArg1), arg1);
@@ -121,8 +122,8 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperRegImm(int helper_offset, int arg0, int arg1,
-                             bool safepoint_pc) {
+void Mir2Lir::CallRuntimeHelperRegImm(ThreadOffset helper_offset, int arg0, int arg1,
+                                      bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   OpRegCopy(TargetReg(kArg0), arg0);
   LoadConstant(TargetReg(kArg1), arg1);
@@ -130,7 +131,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperImmMethod(int helper_offset, int arg0, bool safepoint_pc) {
+void Mir2Lir::CallRuntimeHelperImmMethod(ThreadOffset helper_offset, int arg0, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   LoadCurrMethodDirect(TargetReg(kArg1));
   LoadConstant(TargetReg(kArg0), arg0);
@@ -138,7 +139,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperRegLocationRegLocation(int helper_offset, RegLocation arg0,
+void Mir2Lir::CallRuntimeHelperRegLocationRegLocation(ThreadOffset helper_offset, RegLocation arg0,
                                                       RegLocation arg1, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   if (arg0.wide == 0) {
@@ -168,7 +169,8 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperRegReg(int helper_offset, int arg0, int arg1, bool safepoint_pc) {
+void Mir2Lir::CallRuntimeHelperRegReg(ThreadOffset helper_offset, int arg0, int arg1,
+                                      bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   DCHECK_NE(TargetReg(kArg0), arg1);  // check copy into arg0 won't clobber arg1
   OpRegCopy(TargetReg(kArg0), arg0);
@@ -177,7 +179,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperRegRegImm(int helper_offset, int arg0, int arg1,
+void Mir2Lir::CallRuntimeHelperRegRegImm(ThreadOffset helper_offset, int arg0, int arg1,
                                          int arg2, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   DCHECK_NE(TargetReg(kArg0), arg1);  // check copy into arg0 won't clobber arg1
@@ -188,7 +190,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperImmMethodRegLocation(int helper_offset,
+void Mir2Lir::CallRuntimeHelperImmMethodRegLocation(ThreadOffset helper_offset,
                                                     int arg0, RegLocation arg2, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   LoadValueDirectFixed(arg2, TargetReg(kArg2));
@@ -198,7 +200,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperImmMethodImm(int helper_offset, int arg0,
+void Mir2Lir::CallRuntimeHelperImmMethodImm(ThreadOffset helper_offset, int arg0,
                                             int arg2, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
   LoadCurrMethodDirect(TargetReg(kArg1));
@@ -208,7 +210,7 @@
   CallHelper(r_tgt, helper_offset, safepoint_pc);
 }
 
-void Mir2Lir::CallRuntimeHelperImmRegLocationRegLocation(int helper_offset,
+void Mir2Lir::CallRuntimeHelperImmRegLocationRegLocation(ThreadOffset helper_offset,
                                                          int arg0, RegLocation arg1,
                                                          RegLocation arg2, bool safepoint_pc) {
   int r_tgt = CallHelperSetup(helper_offset);
@@ -470,14 +472,14 @@
     // Disable sharpening
     direct_method = 0;
   }
-  int trampoline = (cu->instruction_set == kX86) ? 0
-      : QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampoline);
+  ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampoline);
 
   if (direct_method != 0) {
     switch (state) {
       case 0:  // Load the trampoline target [sets kInvokeTgt].
         if (cu->instruction_set != kX86) {
-          cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline, cg->TargetReg(kInvokeTgt));
+          cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline.Int32Value(),
+                           cg->TargetReg(kInvokeTgt));
         }
         // Get the interface Method* [sets kArg0]
         if (direct_method != static_cast<unsigned int>(-1)) {
@@ -506,7 +508,8 @@
         cg->LoadCurrMethodDirect(cg->TargetReg(kArg0));
         // Load the trampoline target [sets kInvokeTgt].
         if (cu->instruction_set != kX86) {
-          cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline, cg->TargetReg(kInvokeTgt));
+          cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline.Int32Value(),
+                           cg->TargetReg(kInvokeTgt));
         }
         break;
     case 1:  // Get method->dex_cache_resolved_methods_ [set/use kArg0]
@@ -528,7 +531,7 @@
   return state + 1;
 }
 
-static int NextInvokeInsnSP(CompilationUnit* cu, CallInfo* info, int trampoline,
+static int NextInvokeInsnSP(CompilationUnit* cu, CallInfo* info, ThreadOffset trampoline,
                             int state, const MethodReference& target_method,
                             uint32_t method_idx) {
   Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
@@ -539,7 +542,7 @@
   if (state == 0) {
     if (cu->instruction_set != kX86) {
       // Load trampoline target
-      cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline, cg->TargetReg(kInvokeTgt));
+      cg->LoadWordDisp(cg->TargetReg(kSelf), trampoline.Int32Value(), cg->TargetReg(kInvokeTgt));
     }
     // Load kArg0 with method index
     CHECK_EQ(cu->dex_file, target_method.dex_file);
@@ -555,7 +558,7 @@
                                 uint32_t method_idx,
                                 uintptr_t unused, uintptr_t unused2,
                                 InvokeType unused3) {
-  int trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeStaticTrampolineWithAccessCheck);
+  ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeStaticTrampolineWithAccessCheck);
   return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
 }
 
@@ -563,7 +566,7 @@
                                 const MethodReference& target_method,
                                 uint32_t method_idx, uintptr_t unused,
                                 uintptr_t unused2, InvokeType unused3) {
-  int trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeDirectTrampolineWithAccessCheck);
+  ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeDirectTrampolineWithAccessCheck);
   return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
 }
 
@@ -571,7 +574,7 @@
                                const MethodReference& target_method,
                                uint32_t method_idx, uintptr_t unused,
                                uintptr_t unused2, InvokeType unused3) {
-  int trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeSuperTrampolineWithAccessCheck);
+  ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeSuperTrampolineWithAccessCheck);
   return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
 }
 
@@ -579,7 +582,7 @@
                            const MethodReference& target_method,
                            uint32_t method_idx, uintptr_t unused,
                            uintptr_t unused2, InvokeType unused3) {
-  int trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeVirtualTrampolineWithAccessCheck);
+  ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeVirtualTrampolineWithAccessCheck);
   return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
 }
 
@@ -589,7 +592,7 @@
                                                 uint32_t unused,
                                                 uintptr_t unused2, uintptr_t unused3,
                                                 InvokeType unused4) {
-  int trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampolineWithAccessCheck);
+  ThreadOffset trampoline = QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampolineWithAccessCheck);
   return NextInvokeInsnSP(cu, info, trampoline, state, target_method, 0);
 }
 
@@ -1108,9 +1111,9 @@
 bool Mir2Lir::GenInlinedCurrentThread(CallInfo* info) {
   RegLocation rl_dest = InlineTarget(info);
   RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
-  int offset = Thread::PeerOffset().Int32Value();
+  ThreadOffset offset = Thread::PeerOffset();
   if (cu_->instruction_set == kThumb2 || cu_->instruction_set == kMips) {
-    LoadWordDisp(TargetReg(kSelf), offset, rl_result.low_reg);
+    LoadWordDisp(TargetReg(kSelf), offset.Int32Value(), rl_result.low_reg);
   } else {
     CHECK(cu_->instruction_set == kX86);
     reinterpret_cast<X86Mir2Lir*>(this)->OpRegThreadMem(kOpMov, rl_result.low_reg, offset);
@@ -1406,7 +1409,7 @@
       call_inst = OpMem(kOpBlx, TargetReg(kArg0),
                         mirror::AbstractMethod::GetEntryPointFromCompiledCodeOffset().Int32Value());
     } else {
-      int trampoline = 0;
+      ThreadOffset trampoline(-1);
       switch (info->type) {
       case kInterface:
         trampoline = fast_path ? QUICK_ENTRYPOINT_OFFSET(pInvokeInterfaceTrampoline)
diff --git a/compiler/dex/quick/mips/call_mips.cc b/compiler/dex/quick/mips/call_mips.cc
index 846c055..eaae0e1 100644
--- a/compiler/dex/quick/mips/call_mips.cc
+++ b/compiler/dex/quick/mips/call_mips.cc
@@ -247,7 +247,7 @@
   GenBarrier();
   NewLIR0(kMipsCurrPC);  // Really a jal to .+8
   // Now, fill the branch delay slot with the helper load
-  int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pHandleFillArrayDataFromCode));
+  int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pHandleFillArrayData));
   GenBarrier();  // Scheduling barrier
 
   // Construct BaseLabel and set up table base register
@@ -272,7 +272,7 @@
   LockCallTemps();  // Prepare for explicit register usage
   GenNullCheck(rl_src.s_reg_low, rMIPS_ARG0, opt_flags);
   // Go expensive route - artLockObjectFromCode(self, obj);
-  int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pLockObjectFromCode));
+  int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pLockObject));
   ClobberCalleeSave();
   LIR* call_inst = OpReg(kOpBlx, r_tgt);
   MarkSafepointPC(call_inst);
@@ -287,7 +287,7 @@
   LockCallTemps();  // Prepare for explicit register usage
   GenNullCheck(rl_src.s_reg_low, rMIPS_ARG0, opt_flags);
   // Go expensive route - UnlockObjectFromCode(obj);
-  int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pUnlockObjectFromCode));
+  int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pUnlockObject));
   ClobberCalleeSave();
   LIR* call_inst = OpReg(kOpBlx, r_tgt);
   MarkSafepointPC(call_inst);
diff --git a/compiler/dex/quick/mips/codegen_mips.h b/compiler/dex/quick/mips/codegen_mips.h
index 802ff62..6100396 100644
--- a/compiler/dex/quick/mips/codegen_mips.h
+++ b/compiler/dex/quick/mips/codegen_mips.h
@@ -29,7 +29,7 @@
     // Required for target - codegen utilities.
     bool SmallLiteralDivide(Instruction::Code dalvik_opcode, RegLocation rl_src,
                                     RegLocation rl_dest, int lit);
-    int LoadHelper(int offset);
+    int LoadHelper(ThreadOffset offset);
     LIR* LoadBaseDisp(int rBase, int displacement, int r_dest, OpSize size, int s_reg);
     LIR* LoadBaseDispWide(int rBase, int displacement, int r_dest_lo, int r_dest_hi,
                                   int s_reg);
@@ -154,12 +154,12 @@
     LIR* OpRegRegImm(OpKind op, int r_dest, int r_src1, int value);
     LIR* OpRegRegReg(OpKind op, int r_dest, int r_src1, int r_src2);
     LIR* OpTestSuspend(LIR* target);
-    LIR* OpThreadMem(OpKind op, int thread_offset);
+    LIR* OpThreadMem(OpKind op, ThreadOffset thread_offset);
     LIR* OpVldm(int rBase, int count);
     LIR* OpVstm(int rBase, int count);
     void OpLea(int rBase, int reg1, int reg2, int scale, int offset);
     void OpRegCopyWide(int dest_lo, int dest_hi, int src_lo, int src_hi);
-    void OpTlsCmp(int offset, int val);
+    void OpTlsCmp(ThreadOffset offset, int val);
 
     LIR* LoadBaseDispBody(int rBase, int displacement, int r_dest, int r_dest_hi, OpSize size,
                           int s_reg);
diff --git a/compiler/dex/quick/mips/fp_mips.cc b/compiler/dex/quick/mips/fp_mips.cc
index 3203017..9e2fea9 100644
--- a/compiler/dex/quick/mips/fp_mips.cc
+++ b/compiler/dex/quick/mips/fp_mips.cc
@@ -176,7 +176,7 @@
 void MipsMir2Lir::GenCmpFP(Instruction::Code opcode, RegLocation rl_dest,
                            RegLocation rl_src1, RegLocation rl_src2) {
   bool wide = true;
-  int offset = -1;  // Make gcc happy.
+  ThreadOffset offset(-1);
 
   switch (opcode) {
     case Instruction::CMPL_FLOAT:
diff --git a/compiler/dex/quick/mips/int_mips.cc b/compiler/dex/quick/mips/int_mips.cc
index bd044c6..4a48c87 100644
--- a/compiler/dex/quick/mips/int_mips.cc
+++ b/compiler/dex/quick/mips/int_mips.cc
@@ -254,7 +254,7 @@
   LOG(FATAL) << "Unexpected use of OpLea for Arm";
 }
 
-void MipsMir2Lir::OpTlsCmp(int offset, int val) {
+void MipsMir2Lir::OpTlsCmp(ThreadOffset offset, int val) {
   LOG(FATAL) << "Unexpected use of OpTlsCmp for Arm";
 }
 
@@ -579,7 +579,7 @@
 
   // Get the array's class.
   LoadWordDisp(r_array, mirror::Object::ClassOffset().Int32Value(), r_array_class);
-  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCanPutArrayElementFromCode), r_value,
+  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCanPutArrayElement), r_value,
                           r_array_class, true);
   // Redo LoadValues in case they didn't survive the call.
   LoadValueDirectFixed(rl_array, r_array);  // Reload array
diff --git a/compiler/dex/quick/mips/target_mips.cc b/compiler/dex/quick/mips/target_mips.cc
index 0a17fb1..7a9e91a 100644
--- a/compiler/dex/quick/mips/target_mips.cc
+++ b/compiler/dex/quick/mips/target_mips.cc
@@ -505,8 +505,8 @@
  * ensure that all branch instructions can be restarted if
  * there is a trap in the shadow.  Allocate a temp register.
  */
-int MipsMir2Lir::LoadHelper(int offset) {
-  LoadWordDisp(rMIPS_SELF, offset, r_T9);
+int MipsMir2Lir::LoadHelper(ThreadOffset offset) {
+  LoadWordDisp(rMIPS_SELF, offset.Int32Value(), r_T9);
   return r_T9;
 }
 
diff --git a/compiler/dex/quick/mips/utility_mips.cc b/compiler/dex/quick/mips/utility_mips.cc
index 68b26f1..5d9ae33 100644
--- a/compiler/dex/quick/mips/utility_mips.cc
+++ b/compiler/dex/quick/mips/utility_mips.cc
@@ -632,7 +632,7 @@
   return StoreBaseDispBody(rBase, displacement, r_src_lo, r_src_hi, kLong);
 }
 
-LIR* MipsMir2Lir::OpThreadMem(OpKind op, int thread_offset) {
+LIR* MipsMir2Lir::OpThreadMem(OpKind op, ThreadOffset thread_offset) {
   LOG(FATAL) << "Unexpected use of OpThreadMem for MIPS";
   return NULL;
 }
diff --git a/compiler/dex/quick/mir_to_lir.h b/compiler/dex/quick/mir_to_lir.h
index a34e929..2794bf5 100644
--- a/compiler/dex/quick/mir_to_lir.h
+++ b/compiler/dex/quick/mir_to_lir.h
@@ -424,42 +424,42 @@
                           RegLocation rl_src, int lit);
     void GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest,
                         RegLocation rl_src1, RegLocation rl_src2);
-    void GenConversionCall(int func_offset, RegLocation rl_dest,
+    void GenConversionCall(ThreadOffset func_offset, RegLocation rl_dest,
                            RegLocation rl_src);
     void GenSuspendTest(int opt_flags);
     void GenSuspendTestAndBranch(int opt_flags, LIR* target);
 
     // Shared by all targets - implemented in gen_invoke.cc.
-    int CallHelperSetup(int helper_offset);
-    LIR* CallHelper(int r_tgt, int helper_offset, bool safepoint_pc);
-    void CallRuntimeHelperImm(int helper_offset, int arg0, bool safepoint_pc);
-    void CallRuntimeHelperReg(int helper_offset, int arg0, bool safepoint_pc);
-    void CallRuntimeHelperRegLocation(int helper_offset, RegLocation arg0,
-                                       bool safepoint_pc);
-    void CallRuntimeHelperImmImm(int helper_offset, int arg0, int arg1,
+    int CallHelperSetup(ThreadOffset helper_offset);
+    LIR* CallHelper(int r_tgt, ThreadOffset helper_offset, bool safepoint_pc);
+    void CallRuntimeHelperImm(ThreadOffset helper_offset, int arg0, bool safepoint_pc);
+    void CallRuntimeHelperReg(ThreadOffset helper_offset, int arg0, bool safepoint_pc);
+    void CallRuntimeHelperRegLocation(ThreadOffset helper_offset, RegLocation arg0,
+                                      bool safepoint_pc);
+    void CallRuntimeHelperImmImm(ThreadOffset helper_offset, int arg0, int arg1,
                                  bool safepoint_pc);
-    void CallRuntimeHelperImmRegLocation(int helper_offset, int arg0,
+    void CallRuntimeHelperImmRegLocation(ThreadOffset helper_offset, int arg0,
                                          RegLocation arg1, bool safepoint_pc);
-    void CallRuntimeHelperRegLocationImm(int helper_offset, RegLocation arg0,
+    void CallRuntimeHelperRegLocationImm(ThreadOffset helper_offset, RegLocation arg0,
                                          int arg1, bool safepoint_pc);
-    void CallRuntimeHelperImmReg(int helper_offset, int arg0, int arg1,
+    void CallRuntimeHelperImmReg(ThreadOffset helper_offset, int arg0, int arg1,
                                  bool safepoint_pc);
-    void CallRuntimeHelperRegImm(int helper_offset, int arg0, int arg1,
+    void CallRuntimeHelperRegImm(ThreadOffset helper_offset, int arg0, int arg1,
                                  bool safepoint_pc);
-    void CallRuntimeHelperImmMethod(int helper_offset, int arg0,
+    void CallRuntimeHelperImmMethod(ThreadOffset helper_offset, int arg0,
                                     bool safepoint_pc);
-    void CallRuntimeHelperRegLocationRegLocation(int helper_offset,
+    void CallRuntimeHelperRegLocationRegLocation(ThreadOffset helper_offset,
                                                  RegLocation arg0, RegLocation arg1,
                                                  bool safepoint_pc);
-    void CallRuntimeHelperRegReg(int helper_offset, int arg0, int arg1,
+    void CallRuntimeHelperRegReg(ThreadOffset helper_offset, int arg0, int arg1,
                                  bool safepoint_pc);
-    void CallRuntimeHelperRegRegImm(int helper_offset, int arg0, int arg1,
+    void CallRuntimeHelperRegRegImm(ThreadOffset helper_offset, int arg0, int arg1,
                                     int arg2, bool safepoint_pc);
-    void CallRuntimeHelperImmMethodRegLocation(int helper_offset, int arg0,
+    void CallRuntimeHelperImmMethodRegLocation(ThreadOffset helper_offset, int arg0,
                                                RegLocation arg2, bool safepoint_pc);
-    void CallRuntimeHelperImmMethodImm(int helper_offset, int arg0, int arg2,
+    void CallRuntimeHelperImmMethodImm(ThreadOffset helper_offset, int arg0, int arg2,
                                        bool safepoint_pc);
-    void CallRuntimeHelperImmRegLocationRegLocation(int helper_offset,
+    void CallRuntimeHelperImmRegLocationRegLocation(ThreadOffset helper_offset,
                                                     int arg0, RegLocation arg1, RegLocation arg2,
                                                     bool safepoint_pc);
     void GenInvoke(CallInfo* info);
@@ -526,7 +526,7 @@
     // Required for target - codegen helpers.
     virtual bool SmallLiteralDivide(Instruction::Code dalvik_opcode,
                                     RegLocation rl_src, RegLocation rl_dest, int lit) = 0;
-    virtual int LoadHelper(int offset) = 0;
+    virtual int LoadHelper(ThreadOffset offset) = 0;
     virtual LIR* LoadBaseDisp(int rBase, int displacement, int r_dest, OpSize size, int s_reg) = 0;
     virtual LIR* LoadBaseDispWide(int rBase, int displacement, int r_dest_lo, int r_dest_hi,
                                   int s_reg) = 0;
@@ -674,14 +674,14 @@
     virtual LIR* OpRegRegReg(OpKind op, int r_dest, int r_src1,
                              int r_src2) = 0;
     virtual LIR* OpTestSuspend(LIR* target) = 0;
-    virtual LIR* OpThreadMem(OpKind op, int thread_offset) = 0;
+    virtual LIR* OpThreadMem(OpKind op, ThreadOffset thread_offset) = 0;
     virtual LIR* OpVldm(int rBase, int count) = 0;
     virtual LIR* OpVstm(int rBase, int count) = 0;
     virtual void OpLea(int rBase, int reg1, int reg2, int scale,
                        int offset) = 0;
     virtual void OpRegCopyWide(int dest_lo, int dest_hi, int src_lo,
                                int src_hi) = 0;
-    virtual void OpTlsCmp(int offset, int val) = 0;
+    virtual void OpTlsCmp(ThreadOffset offset, int val) = 0;
     virtual bool InexpensiveConstantInt(int32_t value) = 0;
     virtual bool InexpensiveConstantFloat(int32_t value) = 0;
     virtual bool InexpensiveConstantLong(int64_t value) = 0;
diff --git a/compiler/dex/quick/x86/call_x86.cc b/compiler/dex/quick/x86/call_x86.cc
index 1c395de..6e3e55f 100644
--- a/compiler/dex/quick/x86/call_x86.cc
+++ b/compiler/dex/quick/x86/call_x86.cc
@@ -148,7 +148,7 @@
   NewLIR1(kX86StartOfMethod, rX86_ARG2);
   NewLIR2(kX86PcRelAdr, rX86_ARG1, reinterpret_cast<uintptr_t>(tab_rec));
   NewLIR2(kX86Add32RR, rX86_ARG1, rX86_ARG2);
-  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pHandleFillArrayDataFromCode), rX86_ARG0,
+  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pHandleFillArrayData), rX86_ARG0,
                           rX86_ARG1, true);
 }
 
@@ -165,7 +165,7 @@
   NewLIR3(kX86LockCmpxchgMR, rCX, mirror::Object::MonitorOffset().Int32Value(), rDX);
   LIR* branch = NewLIR2(kX86Jcc8, 0, kX86CondEq);
   // If lock is held, go the expensive route - artLockObjectFromCode(self, obj);
-  CallRuntimeHelperReg(QUICK_ENTRYPOINT_OFFSET(pLockObjectFromCode), rCX, true);
+  CallRuntimeHelperReg(QUICK_ENTRYPOINT_OFFSET(pLockObject), rCX, true);
   branch->target = NewLIR0(kPseudoTargetLabel);
 }
 
@@ -185,7 +185,7 @@
   LIR* branch2 = NewLIR1(kX86Jmp8, 0);
   branch->target = NewLIR0(kPseudoTargetLabel);
   // Otherwise, go the expensive route - UnlockObjectFromCode(obj);
-  CallRuntimeHelperReg(QUICK_ENTRYPOINT_OFFSET(pUnlockObjectFromCode), rAX, true);
+  CallRuntimeHelperReg(QUICK_ENTRYPOINT_OFFSET(pUnlockObject), rAX, true);
   branch2->target = NewLIR0(kPseudoTargetLabel);
 }
 
@@ -243,7 +243,7 @@
   if (!skip_overflow_check) {
     // cmp rX86_SP, fs:[stack_end_]; jcc throw_launchpad
     LIR* tgt = RawLIR(0, kPseudoThrowTarget, kThrowStackOverflow, 0, 0, 0, 0);
-    OpRegThreadMem(kOpCmp, rX86_SP, Thread::StackEndOffset().Int32Value());
+    OpRegThreadMem(kOpCmp, rX86_SP, Thread::StackEndOffset());
     OpCondBranch(kCondUlt, tgt);
     // Remember branch target - will process later
     throw_launchpads_.Insert(tgt);
diff --git a/compiler/dex/quick/x86/codegen_x86.h b/compiler/dex/quick/x86/codegen_x86.h
index edb5ae5..21328d5 100644
--- a/compiler/dex/quick/x86/codegen_x86.h
+++ b/compiler/dex/quick/x86/codegen_x86.h
@@ -29,7 +29,7 @@
     // Required for target - codegen helpers.
     bool SmallLiteralDivide(Instruction::Code dalvik_opcode, RegLocation rl_src,
                                     RegLocation rl_dest, int lit);
-    int LoadHelper(int offset);
+    int LoadHelper(ThreadOffset offset);
     LIR* LoadBaseDisp(int rBase, int displacement, int r_dest, OpSize size, int s_reg);
     LIR* LoadBaseDispWide(int rBase, int displacement, int r_dest_lo, int r_dest_hi,
                                   int s_reg);
@@ -154,14 +154,14 @@
     LIR* OpRegRegImm(OpKind op, int r_dest, int r_src1, int value);
     LIR* OpRegRegReg(OpKind op, int r_dest, int r_src1, int r_src2);
     LIR* OpTestSuspend(LIR* target);
-    LIR* OpThreadMem(OpKind op, int thread_offset);
+    LIR* OpThreadMem(OpKind op, ThreadOffset thread_offset);
     LIR* OpVldm(int rBase, int count);
     LIR* OpVstm(int rBase, int count);
     void OpLea(int rBase, int reg1, int reg2, int scale, int offset);
     void OpRegCopyWide(int dest_lo, int dest_hi, int src_lo, int src_hi);
-    void OpTlsCmp(int offset, int val);
+    void OpTlsCmp(ThreadOffset offset, int val);
 
-    void OpRegThreadMem(OpKind op, int r_dest, int thread_offset);
+    void OpRegThreadMem(OpKind op, int r_dest, ThreadOffset thread_offset);
     void SpillCoreRegs();
     void UnSpillCoreRegs();
     static const X86EncodingMap EncodingMap[kX86Last];
diff --git a/compiler/dex/quick/x86/int_x86.cc b/compiler/dex/quick/x86/int_x86.cc
index 0b4b4be..377d134 100644
--- a/compiler/dex/quick/x86/int_x86.cc
+++ b/compiler/dex/quick/x86/int_x86.cc
@@ -240,8 +240,8 @@
   NewLIR5(kX86Lea32RA, rBase, reg1, reg2, scale, offset);
 }
 
-void X86Mir2Lir::OpTlsCmp(int offset, int val) {
-  NewLIR2(kX86Cmp16TI8, offset, val);
+void X86Mir2Lir::OpTlsCmp(ThreadOffset offset, int val) {
+  NewLIR2(kX86Cmp16TI8, offset.Int32Value(), val);
 }
 
 bool X86Mir2Lir::GenInlinedCas32(CallInfo* info, bool need_write_barrier) {
@@ -285,7 +285,7 @@
 
 // Test suspend flag, return target of taken suspend branch
 LIR* X86Mir2Lir::OpTestSuspend(LIR* target) {
-  OpTlsCmp(Thread::ThreadFlagsOffset().Int32Value(), 0);
+  OpTlsCmp(Thread::ThreadFlagsOffset(), 0);
   return OpCondBranch((target == NULL) ? kCondNe : kCondEq, target);
 }
 
@@ -403,7 +403,7 @@
   StoreValueWide(rl_dest, rl_result);
 }
 
-void X86Mir2Lir::OpRegThreadMem(OpKind op, int r_dest, int thread_offset) {
+void X86Mir2Lir::OpRegThreadMem(OpKind op, int r_dest, ThreadOffset thread_offset) {
   X86OpCode opcode = kX86Bkpt;
   switch (op) {
   case kOpCmp: opcode = kX86Cmp32RT;  break;
@@ -412,7 +412,7 @@
     LOG(FATAL) << "Bad opcode: " << op;
     break;
   }
-  NewLIR2(opcode, r_dest, thread_offset);
+  NewLIR2(opcode, r_dest, thread_offset.Int32Value());
 }
 
 /*
@@ -532,7 +532,7 @@
 
   // Get the array's class.
   LoadWordDisp(r_array, mirror::Object::ClassOffset().Int32Value(), r_array_class);
-  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCanPutArrayElementFromCode), r_value,
+  CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCanPutArrayElement), r_value,
                           r_array_class, true);
   // Redo LoadValues in case they didn't survive the call.
   LoadValueDirectFixed(rl_array, r_array);  // Reload array
diff --git a/compiler/dex/quick/x86/target_x86.cc b/compiler/dex/quick/x86/target_x86.cc
index 2c9b3c8..699f3ae 100644
--- a/compiler/dex/quick/x86/target_x86.cc
+++ b/compiler/dex/quick/x86/target_x86.cc
@@ -524,7 +524,7 @@
 }
 
 // Not used in x86
-int X86Mir2Lir::LoadHelper(int offset) {
+int X86Mir2Lir::LoadHelper(ThreadOffset offset) {
   LOG(FATAL) << "Unexpected use of LoadHelper in x86";
   return INVALID_REG;
 }
diff --git a/compiler/dex/quick/x86/utility_x86.cc b/compiler/dex/quick/x86/utility_x86.cc
index e15995f..c519bfe 100644
--- a/compiler/dex/quick/x86/utility_x86.cc
+++ b/compiler/dex/quick/x86/utility_x86.cc
@@ -292,7 +292,7 @@
   return OpRegImm(op, r_dest, value);
 }
 
-LIR* X86Mir2Lir::OpThreadMem(OpKind op, int thread_offset) {
+LIR* X86Mir2Lir::OpThreadMem(OpKind op, ThreadOffset thread_offset) {
   X86OpCode opcode = kX86Bkpt;
   switch (op) {
     case kOpBlx: opcode = kX86CallT;  break;
@@ -300,7 +300,7 @@
       LOG(FATAL) << "Bad opcode: " << op;
       break;
   }
-  return NewLIR1(opcode, thread_offset);
+  return NewLIR1(opcode, thread_offset.Int32Value());
 }
 
 LIR* X86Mir2Lir::OpMem(OpKind op, int rBase, int disp) {
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index e7ba402..56b629c 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -41,9 +41,9 @@
 #include "mirror/throwable.h"
 #include "scoped_thread_state_change.h"
 #include "ScopedLocalRef.h"
-#include "stubs/stubs.h"
 #include "thread.h"
 #include "thread_pool.h"
+#include "trampolines/trampoline_compiler.h"
 #include "verifier/method_verifier.h"
 
 #if defined(ART_USE_PORTABLE_COMPILER)
@@ -433,64 +433,38 @@
   return res;
 }
 
+const std::vector<uint8_t>* CompilerDriver::CreateInterpreterToInterpreterBridge() const {
+  return CreateTrampoline(instruction_set_, kInterpreterAbi,
+                          INTERPRETER_ENTRYPOINT_OFFSET(pInterpreterToInterpreterBridge));
+}
+
+const std::vector<uint8_t>* CompilerDriver::CreateInterpreterToCompiledCodeBridge() const {
+  return CreateTrampoline(instruction_set_, kInterpreterAbi,
+                          INTERPRETER_ENTRYPOINT_OFFSET(pInterpreterToCompiledCodeBridge));
+}
+
+const std::vector<uint8_t>* CompilerDriver::CreateJniDlsymLookup() const {
+  return CreateTrampoline(instruction_set_, kJniAbi, JNI_ENTRYPOINT_OFFSET(pDlsymLookup));
+}
+
 const std::vector<uint8_t>* CompilerDriver::CreatePortableResolutionTrampoline() const {
-  switch (instruction_set_) {
-    case kArm:
-    case kThumb2:
-      return arm::CreatePortableResolutionTrampoline();
-    case kMips:
-      return mips::CreatePortableResolutionTrampoline();
-    case kX86:
-      return x86::CreatePortableResolutionTrampoline();
-    default:
-      LOG(FATAL) << "Unknown InstructionSet: " << instruction_set_;
-      return NULL;
-  }
+  return CreateTrampoline(instruction_set_, kPortableAbi,
+                          PORTABLE_ENTRYPOINT_OFFSET(pPortableResolutionTrampoline));
+}
+
+const std::vector<uint8_t>* CompilerDriver::CreatePortableToInterpreterBridge() const {
+  return CreateTrampoline(instruction_set_, kPortableAbi,
+                          PORTABLE_ENTRYPOINT_OFFSET(pPortableToInterpreterBridge));
 }
 
 const std::vector<uint8_t>* CompilerDriver::CreateQuickResolutionTrampoline() const {
-  switch (instruction_set_) {
-    case kArm:
-    case kThumb2:
-      return arm::CreateQuickResolutionTrampoline();
-    case kMips:
-      return mips::CreateQuickResolutionTrampoline();
-    case kX86:
-      return x86::CreateQuickResolutionTrampoline();
-    default:
-      LOG(FATAL) << "Unknown InstructionSet: " << instruction_set_;
-      return NULL;
-  }
+  return CreateTrampoline(instruction_set_, kQuickAbi,
+                          QUICK_ENTRYPOINT_OFFSET(pQuickResolutionTrampoline));
 }
 
-const std::vector<uint8_t>* CompilerDriver::CreateInterpreterToInterpreterEntry() const {
-  switch (instruction_set_) {
-    case kArm:
-    case kThumb2:
-      return arm::CreateInterpreterToInterpreterEntry();
-    case kMips:
-      return mips::CreateInterpreterToInterpreterEntry();
-    case kX86:
-      return x86::CreateInterpreterToInterpreterEntry();
-    default:
-      LOG(FATAL) << "Unknown InstructionSet: " << instruction_set_;
-      return NULL;
-  }
-}
-
-const std::vector<uint8_t>* CompilerDriver::CreateInterpreterToQuickEntry() const {
-  switch (instruction_set_) {
-    case kArm:
-    case kThumb2:
-      return arm::CreateInterpreterToQuickEntry();
-    case kMips:
-      return mips::CreateInterpreterToQuickEntry();
-    case kX86:
-      return x86::CreateInterpreterToQuickEntry();
-    default:
-      LOG(FATAL) << "Unknown InstructionSet: " << instruction_set_;
-      return NULL;
-  }
+const std::vector<uint8_t>* CompilerDriver::CreateQuickToInterpreterBridge() const {
+  return CreateTrampoline(instruction_set_, kQuickAbi,
+                          QUICK_ENTRYPOINT_OFFSET(pQuickToInterpreterBridge));
 }
 
 void CompilerDriver::CompileAll(jobject class_loader,
diff --git a/compiler/driver/compiler_driver.h b/compiler/driver/compiler_driver.h
index 18f852d..b5222c9 100644
--- a/compiler/driver/compiler_driver.h
+++ b/compiler/driver/compiler_driver.h
@@ -48,6 +48,17 @@
   kNoBackend
 };
 
+enum EntryPointCallingConvention {
+  // ABI of invocations to a method's interpreter entry point.
+  kInterpreterAbi,
+  // ABI of calls to a method's native code, only used for native methods.
+  kJniAbi,
+  // ABI of calls to a method's portable code entry point.
+  kPortableAbi,
+  // ABI of calls to a method's quick code entry point.
+  kQuickAbi
+};
+
 enum DexToDexCompilationLevel {
   kDontDexToDexCompile,   // Only meaning wrt image time interpretation.
   kRequired,              // Dex-to-dex compilation required for correctness.
@@ -110,13 +121,19 @@
   CompilerTls* GetTls();
 
   // Generate the trampolines that are invoked by unresolved direct methods.
+  const std::vector<uint8_t>* CreateInterpreterToInterpreterBridge() const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+  const std::vector<uint8_t>* CreateInterpreterToCompiledCodeBridge() const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+  const std::vector<uint8_t>* CreateJniDlsymLookup() const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   const std::vector<uint8_t>* CreatePortableResolutionTrampoline() const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+  const std::vector<uint8_t>* CreatePortableToInterpreterBridge() const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   const std::vector<uint8_t>* CreateQuickResolutionTrampoline() const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-  const std::vector<uint8_t>* CreateInterpreterToInterpreterEntry() const
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-  const std::vector<uint8_t>* CreateInterpreterToQuickEntry() const
+  const std::vector<uint8_t>* CreateQuickToInterpreterBridge() const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   CompiledClass* GetCompiledClass(ClassReference ref) const
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index e73d021..550d642 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -90,11 +90,23 @@
     return false;
   }
   class_linker->RegisterOatFile(*oat_file_);
-  interpreter_to_interpreter_entry_offset_ = oat_file_->GetOatHeader().GetInterpreterToInterpreterEntryOffset();
-  interpreter_to_quick_entry_offset_ = oat_file_->GetOatHeader().GetInterpreterToQuickEntryOffset();
-  portable_resolution_trampoline_offset_ = oat_file_->GetOatHeader().GetPortableResolutionTrampolineOffset();
-  quick_resolution_trampoline_offset_ = oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset();
 
+  interpreter_to_interpreter_bridge_offset_ =
+      oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset();
+  interpreter_to_compiled_code_bridge_offset_ =
+      oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset();
+
+  jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset();
+
+  portable_resolution_trampoline_offset_ =
+      oat_file_->GetOatHeader().GetPortableResolutionTrampolineOffset();
+  portable_to_interpreter_bridge_offset_ =
+      oat_file_->GetOatHeader().GetPortableToInterpreterBridgeOffset();
+
+  quick_resolution_trampoline_offset_ =
+      oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset();
+  quick_to_interpreter_bridge_offset_ =
+      oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset();
   {
     Thread::Current()->TransitionFromSuspendedToRunnable();
     PruneNonImageClasses();  // Remove junk
@@ -490,57 +502,62 @@
 void ImageWriter::FixupMethod(const AbstractMethod* orig, AbstractMethod* copy) {
   FixupInstanceFields(orig, copy);
 
-  // OatWriter replaces the code_ with an offset value.
-  // Here we readjust to a pointer relative to oat_begin_
-  if (orig->IsAbstract()) {
-    // Code for abstract methods is set to the abstract method error stub when we load the image.
-    copy->SetEntryPointFromCompiledCode(NULL);
-    copy->SetEntryPointFromInterpreter(reinterpret_cast<EntryPointFromInterpreter*>
-                                       (GetOatAddress(interpreter_to_interpreter_entry_offset_)));
-    return;
-  } else {
-    copy->SetEntryPointFromInterpreter(reinterpret_cast<EntryPointFromInterpreter*>
-                                       (GetOatAddress(interpreter_to_quick_entry_offset_)));
-  }
+  // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
+  // oat_begin_
 
-  if (orig == Runtime::Current()->GetResolutionMethod()) {
+  // The resolution method has a special trampoline to call.
+  if (UNLIKELY(orig == Runtime::Current()->GetResolutionMethod())) {
 #if defined(ART_USE_PORTABLE_COMPILER)
     copy->SetEntryPointFromCompiledCode(GetOatAddress(portable_resolution_trampoline_offset_));
 #else
     copy->SetEntryPointFromCompiledCode(GetOatAddress(quick_resolution_trampoline_offset_));
 #endif
-    return;
-  }
-
-  // Use original code if it exists. Otherwise, set the code pointer to the resolution trampoline.
-  const byte* code = GetOatAddress(orig->GetOatCodeOffset());
-  if (code != NULL) {
-    copy->SetEntryPointFromCompiledCode(code);
   } else {
+    // We assume all methods have code. If they don't currently then we set them to the use the
+    // resolution trampoline. Abstract methods never have code and so we need to make sure their
+    // use results in an AbstractMethodError. We use the interpreter to achieve this.
+    if (UNLIKELY(orig->IsAbstract())) {
 #if defined(ART_USE_PORTABLE_COMPILER)
-    copy->SetEntryPointFromCompiledCode(GetOatAddress(portable_resolution_trampoline_offset_));
+      copy->SetEntryPointFromCompiledCode(GetOatAddress(portable_to_interpreter_bridge_offset_));
 #else
-    copy->SetEntryPointFromCompiledCode(GetOatAddress(quick_resolution_trampoline_offset_));
+      copy->SetEntryPointFromCompiledCode(GetOatAddress(quick_to_interpreter_bridge_offset_));
 #endif
-  }
+      copy->SetEntryPointFromInterpreter(reinterpret_cast<EntryPointFromInterpreter*>
+      (GetOatAddress(interpreter_to_interpreter_bridge_offset_)));
+    } else {
+      copy->SetEntryPointFromInterpreter(reinterpret_cast<EntryPointFromInterpreter*>
+      (GetOatAddress(interpreter_to_compiled_code_bridge_offset_)));
+      // Use original code if it exists. Otherwise, set the code pointer to the resolution
+      // trampoline.
+      const byte* code = GetOatAddress(orig->GetOatCodeOffset());
+      if (code != NULL) {
+        copy->SetEntryPointFromCompiledCode(code);
+      } else {
+#if defined(ART_USE_PORTABLE_COMPILER)
+        copy->SetEntryPointFromCompiledCode(GetOatAddress(portable_resolution_trampoline_offset_));
+#else
+        copy->SetEntryPointFromCompiledCode(GetOatAddress(quick_resolution_trampoline_offset_));
+#endif
+      }
+      if (orig->IsNative()) {
+        // The native method's pointer is set to a stub to lookup via dlsym.
+        // Note this is not the code_ pointer, that is handled above.
+        copy->SetNativeMethod(GetOatAddress(jni_dlsym_lookup_offset_));
+      } else {
+        // Normal (non-abstract non-native) methods have various tables to relocate.
+        uint32_t mapping_table_off = orig->GetOatMappingTableOffset();
+        const byte* mapping_table = GetOatAddress(mapping_table_off);
+        copy->SetMappingTable(reinterpret_cast<const uint32_t*>(mapping_table));
 
-  if (orig->IsNative()) {
-    // The native method's pointer is set to a stub to lookup via dlsym when we load the image.
-    // Note this is not the code_ pointer, that is handled above.
-    copy->SetNativeMethod(NULL);
-  } else {
-    // normal (non-abstract non-native) methods have mapping tables to relocate
-    uint32_t mapping_table_off = orig->GetOatMappingTableOffset();
-    const byte* mapping_table = GetOatAddress(mapping_table_off);
-    copy->SetMappingTable(reinterpret_cast<const uint32_t*>(mapping_table));
+        uint32_t vmap_table_offset = orig->GetOatVmapTableOffset();
+        const byte* vmap_table = GetOatAddress(vmap_table_offset);
+        copy->SetVmapTable(reinterpret_cast<const uint16_t*>(vmap_table));
 
-    uint32_t vmap_table_offset = orig->GetOatVmapTableOffset();
-    const byte* vmap_table = GetOatAddress(vmap_table_offset);
-    copy->SetVmapTable(reinterpret_cast<const uint16_t*>(vmap_table));
-
-    uint32_t native_gc_map_offset = orig->GetOatNativeGcMapOffset();
-    const byte* native_gc_map = GetOatAddress(native_gc_map_offset);
-    copy->SetNativeGcMap(reinterpret_cast<const uint8_t*>(native_gc_map));
+        uint32_t native_gc_map_offset = orig->GetOatNativeGcMapOffset();
+        const byte* native_gc_map = GetOatAddress(native_gc_map_offset);
+        copy->SetNativeGcMap(reinterpret_cast<const uint8_t*>(native_gc_map));
+      }
+    }
   }
 }
 
diff --git a/compiler/image_writer.h b/compiler/image_writer.h
index e43ec63..545534f 100644
--- a/compiler/image_writer.h
+++ b/compiler/image_writer.h
@@ -39,8 +39,8 @@
  public:
   explicit ImageWriter(const CompilerDriver& compiler_driver)
       : compiler_driver_(compiler_driver), oat_file_(NULL), image_end_(0), image_begin_(NULL),
-        oat_data_begin_(NULL), interpreter_to_interpreter_entry_offset_(0),
-        interpreter_to_quick_entry_offset_(0), portable_resolution_trampoline_offset_(0),
+        oat_data_begin_(NULL), interpreter_to_interpreter_bridge_offset_(0),
+        interpreter_to_compiled_code_bridge_offset_(0), portable_resolution_trampoline_offset_(0),
         quick_resolution_trampoline_offset_(0) {}
 
   ~ImageWriter() {}
@@ -195,10 +195,13 @@
   const byte* oat_data_begin_;
 
   // Offset from oat_data_begin_ to the stubs.
-  uint32_t interpreter_to_interpreter_entry_offset_;
-  uint32_t interpreter_to_quick_entry_offset_;
+  uint32_t interpreter_to_interpreter_bridge_offset_;
+  uint32_t interpreter_to_compiled_code_bridge_offset_;
+  uint32_t jni_dlsym_lookup_offset_;
   uint32_t portable_resolution_trampoline_offset_;
+  uint32_t portable_to_interpreter_bridge_offset_;
   uint32_t quick_resolution_trampoline_offset_;
+  uint32_t quick_to_interpreter_bridge_offset_;
 
   // DexCaches seen while scanning for fixing up CodeAndDirectMethods
   typedef std::set<mirror::DexCache*> Set;
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index b069fbd..9713fe9 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -172,8 +172,8 @@
   //    can occur. The result is the saved JNI local state that is restored by the exit call. We
   //    abuse the JNI calling convention here, that is guaranteed to support passing 2 pointer
   //    arguments.
-  uintptr_t jni_start = is_synchronized ? QUICK_ENTRYPOINT_OFFSET(pJniMethodStartSynchronized)
-                                        : QUICK_ENTRYPOINT_OFFSET(pJniMethodStart);
+  ThreadOffset jni_start = is_synchronized ? QUICK_ENTRYPOINT_OFFSET(pJniMethodStartSynchronized)
+                                           : QUICK_ENTRYPOINT_OFFSET(pJniMethodStart);
   main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
   FrameOffset locked_object_sirt_offset(0);
   if (is_synchronized) {
@@ -301,7 +301,7 @@
   // 12. Call into JNI method end possibly passing a returned reference, the method and the current
   //     thread.
   end_jni_conv->ResetIterator(FrameOffset(end_out_arg_size));
-  uintptr_t jni_end;
+  ThreadOffset jni_end(-1);
   if (reference_return) {
     // Pass result.
     jni_end = is_synchronized ? QUICK_ENTRYPOINT_OFFSET(pJniMethodEndWithReferenceSynchronized)
diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc
index 5eb837b..21c5317 100644
--- a/compiler/oat_writer.cc
+++ b/compiler/oat_writer.cc
@@ -51,11 +51,14 @@
     size_oat_header_(0),
     size_oat_header_image_file_location_(0),
     size_dex_file_(0),
-    size_interpreter_to_interpreter_entry_(0),
-    size_interpreter_to_quick_entry_(0),
+    size_interpreter_to_interpreter_bridge_(0),
+    size_interpreter_to_compiled_code_bridge_(0),
+    size_jni_dlsym_lookup_(0),
     size_portable_resolution_trampoline_(0),
+    size_portable_to_interpreter_bridge_(0),
     size_quick_resolution_trampoline_(0),
-    size_stubs_alignment_(0),
+    size_quick_to_interpreter_bridge_(0),
+    size_trampoline_alignment_(0),
     size_code_size_(0),
     size_code_(0),
     size_code_alignment_(0),
@@ -176,30 +179,30 @@
   size_executable_offset_alignment_ = offset - old_offset;
   if (compiler_driver_->IsImage()) {
     InstructionSet instruction_set = compiler_driver_->GetInstructionSet();
-    oat_header_->SetInterpreterToInterpreterEntryOffset(offset);
-    interpreter_to_interpreter_entry_.reset(
-        compiler_driver_->CreateInterpreterToInterpreterEntry());
-    offset += interpreter_to_interpreter_entry_->size();
 
-    offset = CompiledCode::AlignCode(offset, instruction_set);
-    oat_header_->SetInterpreterToQuickEntryOffset(offset);
-    interpreter_to_quick_entry_.reset(compiler_driver_->CreateInterpreterToQuickEntry());
-    offset += interpreter_to_quick_entry_->size();
+    #define DO_TRAMPOLINE(field, fn_name) \
+      offset = CompiledCode::AlignCode(offset, instruction_set); \
+      oat_header_->Set ## fn_name ## Offset(offset); \
+      field.reset(compiler_driver_->Create ## fn_name()); \
+      offset += field->size();
 
-    offset = CompiledCode::AlignCode(offset, instruction_set);
-    oat_header_->SetPortableResolutionTrampolineOffset(offset);
-    portable_resolution_trampoline_.reset(compiler_driver_->CreatePortableResolutionTrampoline());
-    offset += portable_resolution_trampoline_->size();
+    DO_TRAMPOLINE(interpreter_to_interpreter_bridge_, InterpreterToInterpreterBridge);
+    DO_TRAMPOLINE(interpreter_to_compiled_code_bridge_, InterpreterToCompiledCodeBridge);
+    DO_TRAMPOLINE(jni_dlsym_lookup_, JniDlsymLookup);
+    DO_TRAMPOLINE(portable_resolution_trampoline_, PortableResolutionTrampoline);
+    DO_TRAMPOLINE(portable_to_interpreter_bridge_, PortableToInterpreterBridge);
+    DO_TRAMPOLINE(quick_resolution_trampoline_, QuickResolutionTrampoline);
+    DO_TRAMPOLINE(quick_to_interpreter_bridge_, QuickToInterpreterBridge);
 
-    offset = CompiledCode::AlignCode(offset, instruction_set);
-    oat_header_->SetQuickResolutionTrampolineOffset(offset);
-    quick_resolution_trampoline_.reset(compiler_driver_->CreateQuickResolutionTrampoline());
-    offset += quick_resolution_trampoline_->size();
+    #undef DO_TRAMPOLINE
   } else {
-    oat_header_->SetInterpreterToInterpreterEntryOffset(0);
-    oat_header_->SetInterpreterToQuickEntryOffset(0);
+    oat_header_->SetInterpreterToInterpreterBridgeOffset(0);
+    oat_header_->SetInterpreterToCompiledCodeBridgeOffset(0);
+    oat_header_->SetJniDlsymLookupOffset(0);
     oat_header_->SetPortableResolutionTrampolineOffset(0);
+    oat_header_->SetPortableToInterpreterBridgeOffset(0);
     oat_header_->SetQuickResolutionTrampolineOffset(0);
+    oat_header_->SetQuickToInterpreterBridgeOffset(0);
   }
   return offset;
 }
@@ -469,11 +472,14 @@
     DO_STAT(size_oat_header_);
     DO_STAT(size_oat_header_image_file_location_);
     DO_STAT(size_dex_file_);
-    DO_STAT(size_interpreter_to_interpreter_entry_);
-    DO_STAT(size_interpreter_to_quick_entry_);
+    DO_STAT(size_interpreter_to_interpreter_bridge_);
+    DO_STAT(size_interpreter_to_compiled_code_bridge_);
+    DO_STAT(size_jni_dlsym_lookup_);
     DO_STAT(size_portable_resolution_trampoline_);
+    DO_STAT(size_portable_to_interpreter_bridge_);
     DO_STAT(size_quick_resolution_trampoline_);
-    DO_STAT(size_stubs_alignment_);
+    DO_STAT(size_quick_to_interpreter_bridge_);
+    DO_STAT(size_trampoline_alignment_);
     DO_STAT(size_code_size_);
     DO_STAT(size_code_);
     DO_STAT(size_code_alignment_);
@@ -545,52 +551,30 @@
   DCHECK_OFFSET();
   if (compiler_driver_->IsImage()) {
     InstructionSet instruction_set = compiler_driver_->GetInstructionSet();
-    if (!out.WriteFully(&(*interpreter_to_interpreter_entry_)[0],
-                        interpreter_to_interpreter_entry_->size())) {
-      PLOG(ERROR) << "Failed to write interpreter to interpreter entry to " << out.GetLocation();
-      return false;
-    }
-    size_interpreter_to_interpreter_entry_ += interpreter_to_interpreter_entry_->size();
-    relative_offset += interpreter_to_interpreter_entry_->size();
-    DCHECK_OFFSET();
 
-    uint32_t aligned_offset = CompiledCode::AlignCode(relative_offset, instruction_set);
-    uint32_t alignment_padding = aligned_offset - relative_offset;
-    out.Seek(alignment_padding, kSeekCurrent);
-    size_stubs_alignment_ += alignment_padding;
-    if (!out.WriteFully(&(*interpreter_to_quick_entry_)[0], interpreter_to_quick_entry_->size())) {
-      PLOG(ERROR) << "Failed to write interpreter to quick entry to " << out.GetLocation();
-      return false;
-    }
-    size_interpreter_to_quick_entry_ += interpreter_to_quick_entry_->size();
-    relative_offset += alignment_padding + interpreter_to_quick_entry_->size();
-    DCHECK_OFFSET();
+    #define DO_TRAMPOLINE(field) \
+      do { \
+        uint32_t aligned_offset = CompiledCode::AlignCode(relative_offset, instruction_set); \
+        uint32_t alignment_padding = aligned_offset - relative_offset; \
+        out.Seek(alignment_padding, kSeekCurrent); \
+        size_trampoline_alignment_ += alignment_padding; \
+        if (!out.WriteFully(&(*field)[0], field->size())) { \
+          PLOG(ERROR) << "Failed to write " # field " to " << out.GetLocation(); \
+          return false; \
+        } \
+        size_ ## field += field->size(); \
+        relative_offset += alignment_padding + field->size(); \
+        DCHECK_OFFSET(); \
+      } while (false)
 
-    aligned_offset = CompiledCode::AlignCode(relative_offset, instruction_set);
-    alignment_padding = aligned_offset - relative_offset;
-    out.Seek(alignment_padding, kSeekCurrent);
-    size_stubs_alignment_ += alignment_padding;
-    if (!out.WriteFully(&(*portable_resolution_trampoline_)[0],
-                        portable_resolution_trampoline_->size())) {
-      PLOG(ERROR) << "Failed to write portable resolution trampoline to " << out.GetLocation();
-      return false;
-    }
-    size_portable_resolution_trampoline_ += portable_resolution_trampoline_->size();
-    relative_offset += alignment_padding + portable_resolution_trampoline_->size();
-    DCHECK_OFFSET();
-
-    aligned_offset = CompiledCode::AlignCode(relative_offset, instruction_set);
-    alignment_padding = aligned_offset - relative_offset;
-    out.Seek(alignment_padding, kSeekCurrent);
-    size_stubs_alignment_ += alignment_padding;
-    if (!out.WriteFully(&(*quick_resolution_trampoline_)[0],
-                        quick_resolution_trampoline_->size())) {
-      PLOG(ERROR) << "Failed to write quick resolution trampoline to " << out.GetLocation();
-      return false;
-    }
-    size_quick_resolution_trampoline_ += quick_resolution_trampoline_->size();
-    relative_offset += alignment_padding + quick_resolution_trampoline_->size();
-    DCHECK_OFFSET();
+    DO_TRAMPOLINE(interpreter_to_interpreter_bridge_);
+    DO_TRAMPOLINE(interpreter_to_compiled_code_bridge_);
+    DO_TRAMPOLINE(jni_dlsym_lookup_);
+    DO_TRAMPOLINE(portable_resolution_trampoline_);
+    DO_TRAMPOLINE(portable_to_interpreter_bridge_);
+    DO_TRAMPOLINE(quick_resolution_trampoline_);
+    DO_TRAMPOLINE(quick_to_interpreter_bridge_);
+    #undef DO_TRAMPOLINE
   }
   return relative_offset;
 }
diff --git a/compiler/oat_writer.h b/compiler/oat_writer.h
index f2c5626..e6cc0bc 100644
--- a/compiler/oat_writer.h
+++ b/compiler/oat_writer.h
@@ -181,10 +181,13 @@
   OatHeader* oat_header_;
   std::vector<OatDexFile*> oat_dex_files_;
   std::vector<OatClass*> oat_classes_;
-  UniquePtr<const std::vector<uint8_t> > interpreter_to_interpreter_entry_;
-  UniquePtr<const std::vector<uint8_t> > interpreter_to_quick_entry_;
+  UniquePtr<const std::vector<uint8_t> > interpreter_to_interpreter_bridge_;
+  UniquePtr<const std::vector<uint8_t> > interpreter_to_compiled_code_bridge_;
+  UniquePtr<const std::vector<uint8_t> > jni_dlsym_lookup_;
   UniquePtr<const std::vector<uint8_t> > portable_resolution_trampoline_;
+  UniquePtr<const std::vector<uint8_t> > portable_to_interpreter_bridge_;
   UniquePtr<const std::vector<uint8_t> > quick_resolution_trampoline_;
+  UniquePtr<const std::vector<uint8_t> > quick_to_interpreter_bridge_;
 
   // output stats
   uint32_t size_dex_file_alignment_;
@@ -192,11 +195,14 @@
   uint32_t size_oat_header_;
   uint32_t size_oat_header_image_file_location_;
   uint32_t size_dex_file_;
-  uint32_t size_interpreter_to_interpreter_entry_;
-  uint32_t size_interpreter_to_quick_entry_;
+  uint32_t size_interpreter_to_interpreter_bridge_;
+  uint32_t size_interpreter_to_compiled_code_bridge_;
+  uint32_t size_jni_dlsym_lookup_;
   uint32_t size_portable_resolution_trampoline_;
+  uint32_t size_portable_to_interpreter_bridge_;
   uint32_t size_quick_resolution_trampoline_;
-  uint32_t size_stubs_alignment_;
+  uint32_t size_quick_to_interpreter_bridge_;
+  uint32_t size_trampoline_alignment_;
   uint32_t size_code_size_;
   uint32_t size_code_;
   uint32_t size_code_alignment_;
diff --git a/compiler/stubs/portable/stubs.cc b/compiler/stubs/portable/stubs.cc
deleted file mode 100644
index def43e2..0000000
--- a/compiler/stubs/portable/stubs.cc
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "stubs/stubs.h"
-
-#include "entrypoints/quick/quick_entrypoints.h"
-#include "jni_internal.h"
-#include "utils/arm/assembler_arm.h"
-#include "utils/mips/assembler_mips.h"
-#include "utils/x86/assembler_x86.h"
-#include "stack_indirect_reference_table.h"
-#include "sirt_ref.h"
-
-#define __ assembler->
-
-namespace art {
-
-namespace arm {
-const std::vector<uint8_t>* CreatePortableResolutionTrampoline() {
-  UniquePtr<ArmAssembler> assembler(static_cast<ArmAssembler*>(Assembler::Create(kArm)));
-  RegList save = (1 << R0) | (1 << R1) | (1 << R2) | (1 << R3) | (1 << LR);
-
-  __ PushList(save);
-  __ LoadFromOffset(kLoadWord, R12, TR,
-                    PORTABLE_ENTRYPOINT_OFFSET(pPortableResolutionTrampolineFromCode));
-  __ mov(R3, ShifterOperand(TR));  // Pass Thread::Current() in R3
-  __ mov(R2, ShifterOperand(SP));  // Pass sp for Method** callee_addr
-  __ IncreaseFrameSize(12);         // 3 words of space for alignment
-  // Call to resolution trampoline (callee, receiver, callee_addr, Thread*)
-  __ blx(R12);
-  __ mov(R12, ShifterOperand(R0));  // Save code address returned into R12
-  __ DecreaseFrameSize(12);
-  __ PopList(save);
-  __ cmp(R12, ShifterOperand(0));
-  __ bx(R12, NE);                   // If R12 != 0 tail call method's code
-  __ bx(LR);                        // Return to caller to handle exception
-
-  assembler->EmitSlowPaths();
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > resolution_trampoline(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*resolution_trampoline)[0], resolution_trampoline->size());
-  assembler->FinalizeInstructions(code);
-
-  return resolution_trampoline.release();
-}
-}  // namespace arm
-
-namespace mips {
-const std::vector<uint8_t>* CreatePortableResolutionTrampoline() {
-  UniquePtr<MipsAssembler> assembler(static_cast<MipsAssembler*>(Assembler::Create(kMips)));
-  // Build frame and save argument registers and RA.
-  __ AddConstant(SP, SP, -32);
-  __ StoreToOffset(kStoreWord, RA, SP, 28);
-  __ StoreToOffset(kStoreWord, A3, SP, 12);
-  __ StoreToOffset(kStoreWord, A2, SP, 8);
-  __ StoreToOffset(kStoreWord, A1, SP, 4);
-  __ StoreToOffset(kStoreWord, A0, SP, 0);
-
-  __ LoadFromOffset(kLoadWord, T9, S1,
-                    PORTABLE_ENTRYPOINT_OFFSET(pPortableResolutionTrampolineFromCode));
-  __ Move(A3, S1);  // Pass Thread::Current() in A3
-  __ Move(A2, SP);  // Pass SP for Method** callee_addr
-  __ Jalr(T9);  // Call to resolution trampoline (callee, receiver, callee_addr, Thread*)
-
-  // Restore frame, argument registers, and RA.
-  __ LoadFromOffset(kLoadWord, A0, SP, 0);
-  __ LoadFromOffset(kLoadWord, A1, SP, 4);
-  __ LoadFromOffset(kLoadWord, A2, SP, 8);
-  __ LoadFromOffset(kLoadWord, A3, SP, 12);
-  __ LoadFromOffset(kLoadWord, RA, SP, 28);
-  __ AddConstant(SP, SP, 32);
-
-  Label resolve_fail;
-  __ EmitBranch(V0, ZERO, &resolve_fail, true);
-  __ Jr(V0);  // If V0 != 0 tail call method's code
-  __ Bind(&resolve_fail, false);
-  __ Jr(RA);  // Return to caller to handle exception
-
-  assembler->EmitSlowPaths();
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > resolution_trampoline(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*resolution_trampoline)[0], resolution_trampoline->size());
-  assembler->FinalizeInstructions(code);
-
-  return resolution_trampoline.release();
-}
-}  // namespace mips
-
-namespace x86 {
-const std::vector<uint8_t>* CreatePortableResolutionTrampoline() {
-  UniquePtr<X86Assembler> assembler(static_cast<X86Assembler*>(Assembler::Create(kX86)));
-
-  __ pushl(EBP);
-  __ movl(EBP, ESP);          // save ESP
-  __ subl(ESP, Immediate(8));  // Align stack
-  __ movl(EAX, Address(EBP, 8));  // Method* called
-  __ leal(EDX, Address(EBP, 8));  // Method** called_addr
-  __ fs()->pushl(Address::Absolute(Thread::SelfOffset()));  // pass thread
-  __ pushl(EDX);  // pass called_addr
-  __ pushl(ECX);  // pass receiver
-  __ pushl(EAX);  // pass called
-  // Call to resolve method.
-  __ Call(ThreadOffset(PORTABLE_ENTRYPOINT_OFFSET(pPortableResolutionTrampolineFromCode)),
-          X86ManagedRegister::FromCpuRegister(ECX));
-  __ leave();
-
-  Label resolve_fail;  // forward declaration
-  __ cmpl(EAX, Immediate(0));
-  __ j(kEqual, &resolve_fail);
-  __ jmp(EAX);
-  // Tail call to intended method.
-  __ Bind(&resolve_fail);
-  __ ret();
-
-  assembler->EmitSlowPaths();
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > resolution_trampoline(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*resolution_trampoline)[0], resolution_trampoline->size());
-  assembler->FinalizeInstructions(code);
-
-  return resolution_trampoline.release();
-}
-}  // namespace x86
-
-}  // namespace art
diff --git a/compiler/stubs/quick/stubs.cc b/compiler/stubs/quick/stubs.cc
deleted file mode 100644
index 912f1c0..0000000
--- a/compiler/stubs/quick/stubs.cc
+++ /dev/null
@@ -1,263 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "stubs/stubs.h"
-
-#include "entrypoints/quick/quick_entrypoints.h"
-#include "jni_internal.h"
-#include "utils/arm/assembler_arm.h"
-#include "utils/mips/assembler_mips.h"
-#include "utils/x86/assembler_x86.h"
-#include "sirt_ref.h"
-#include "stack_indirect_reference_table.h"
-
-#define __ assembler->
-
-namespace art {
-
-namespace arm {
-const std::vector<uint8_t>* CreateQuickResolutionTrampoline() {
-  UniquePtr<ArmAssembler> assembler(static_cast<ArmAssembler*>(Assembler::Create(kArm)));
-  // | Out args |
-  // | Method*  | <- SP on entry
-  // | LR       |    return address into caller
-  // | ...      |    callee saves
-  // | R3       |    possible argument
-  // | R2       |    possible argument
-  // | R1       |    possible argument
-  // | R0       |    junk on call to QuickResolutionTrampolineFromCode, holds result Method*
-  // | Method*  |    Callee save Method* set up by QuickResoltuionTrampolineFromCode
-  // Save callee saves and ready frame for exception delivery
-  RegList save = (1 << R1) | (1 << R2) | (1 << R3) | (1 << R5) | (1 << R6) | (1 << R7) | (1 << R8) |
-                 (1 << R10) | (1 << R11) | (1 << LR);
-  // TODO: enable when GetCalleeSaveMethod is available at stub generation time
-  // DCHECK_EQ(save, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetCoreSpillMask());
-  __ PushList(save);
-  __ LoadFromOffset(kLoadWord, R12, TR, QUICK_ENTRYPOINT_OFFSET(pQuickResolutionTrampolineFromCode));
-  __ mov(R3, ShifterOperand(TR));  // Pass Thread::Current() in R3
-  __ IncreaseFrameSize(8);         // 2 words of space for alignment
-  __ mov(R2, ShifterOperand(SP));  // Pass SP
-  // Call to resolution trampoline (method_idx, receiver, sp, Thread*)
-  __ blx(R12);
-  __ mov(R12, ShifterOperand(R0));  // Save code address returned into R12
-  // Restore registers which may have been modified by GC, "R0" will hold the Method*
-  __ DecreaseFrameSize(4);
-  __ PopList((1 << R0) | save);
-  __ bx(R12);  // Leaf call to method's code
-  __ bkpt(0);
-
-  assembler->EmitSlowPaths();
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > resolution_trampoline(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*resolution_trampoline)[0], resolution_trampoline->size());
-  assembler->FinalizeInstructions(code);
-
-  return resolution_trampoline.release();
-}
-
-const std::vector<uint8_t>* CreateInterpreterToInterpreterEntry() {
-  UniquePtr<ArmAssembler> assembler(static_cast<ArmAssembler*>(Assembler::Create(kArm)));
-
-  __ LoadFromOffset(kLoadWord, PC, R0, QUICK_ENTRYPOINT_OFFSET(pInterpreterToInterpreterEntry));
-  __ bkpt(0);
-
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > entry_stub(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*entry_stub)[0], entry_stub->size());
-  assembler->FinalizeInstructions(code);
-
-  return entry_stub.release();
-}
-
-const std::vector<uint8_t>* CreateInterpreterToQuickEntry() {
-  UniquePtr<ArmAssembler> assembler(static_cast<ArmAssembler*>(Assembler::Create(kArm)));
-
-  __ LoadFromOffset(kLoadWord, PC, R0, QUICK_ENTRYPOINT_OFFSET(pInterpreterToQuickEntry));
-  __ bkpt(0);
-
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > entry_stub(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*entry_stub)[0], entry_stub->size());
-  assembler->FinalizeInstructions(code);
-
-  return entry_stub.release();
-}
-}  // namespace arm
-
-namespace mips {
-const std::vector<uint8_t>* CreateQuickResolutionTrampoline() {
-  UniquePtr<MipsAssembler> assembler(static_cast<MipsAssembler*>(Assembler::Create(kMips)));
-  // | Out args   |
-  // | Method*    | <- SP on entry
-  // | RA         |    return address into caller
-  // | ...        |    callee saves
-  // | A3         |    possible argument
-  // | A2         |    possible argument
-  // | A1         |    possible argument
-  // | A0/Method* |    Callee save Method* set up by UnresolvedDirectMethodTrampolineFromCode
-  // Save callee saves and ready frame for exception delivery
-  __ AddConstant(SP, SP, -64);
-  __ StoreToOffset(kStoreWord, RA, SP, 60);
-  __ StoreToOffset(kStoreWord, FP, SP, 56);
-  __ StoreToOffset(kStoreWord, GP, SP, 52);
-  __ StoreToOffset(kStoreWord, S7, SP, 48);
-  __ StoreToOffset(kStoreWord, S6, SP, 44);
-  __ StoreToOffset(kStoreWord, S5, SP, 40);
-  __ StoreToOffset(kStoreWord, S4, SP, 36);
-  __ StoreToOffset(kStoreWord, S3, SP, 32);
-  __ StoreToOffset(kStoreWord, S2, SP, 28);
-  __ StoreToOffset(kStoreWord, A3, SP, 12);
-  __ StoreToOffset(kStoreWord, A2, SP, 8);
-  __ StoreToOffset(kStoreWord, A1, SP, 4);
-
-  __ LoadFromOffset(kLoadWord, T9, S1, QUICK_ENTRYPOINT_OFFSET(pQuickResolutionTrampolineFromCode));
-  __ Move(A3, S1);  // Pass Thread::Current() in A3
-  __ Move(A2, SP);  // Pass SP for Method** callee_addr
-  __ Jalr(T9);  // Call to resolution trampoline (method_idx, receiver, sp, Thread*)
-
-  // Restore registers which may have been modified by GC
-  __ LoadFromOffset(kLoadWord, A0, SP, 0);
-  __ LoadFromOffset(kLoadWord, A1, SP, 4);
-  __ LoadFromOffset(kLoadWord, A2, SP, 8);
-  __ LoadFromOffset(kLoadWord, A3, SP, 12);
-  __ LoadFromOffset(kLoadWord, S2, SP, 28);
-  __ LoadFromOffset(kLoadWord, S3, SP, 32);
-  __ LoadFromOffset(kLoadWord, S4, SP, 36);
-  __ LoadFromOffset(kLoadWord, S5, SP, 40);
-  __ LoadFromOffset(kLoadWord, S6, SP, 44);
-  __ LoadFromOffset(kLoadWord, S7, SP, 48);
-  __ LoadFromOffset(kLoadWord, GP, SP, 52);
-  __ LoadFromOffset(kLoadWord, FP, SP, 56);
-  __ LoadFromOffset(kLoadWord, RA, SP, 60);
-  __ AddConstant(SP, SP, 64);
-
-  __ Move(T9, V0);  // Put method's code in T9
-  __ Jr(T9);  // Leaf call to method's code
-
-  __ Break();
-
-  assembler->EmitSlowPaths();
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > resolution_trampoline(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*resolution_trampoline)[0], resolution_trampoline->size());
-  assembler->FinalizeInstructions(code);
-
-  return resolution_trampoline.release();
-}
-
-const std::vector<uint8_t>* CreateInterpreterToInterpreterEntry() {
-  UniquePtr<MipsAssembler> assembler(static_cast<MipsAssembler*>(Assembler::Create(kMips)));
-
-  __ LoadFromOffset(kLoadWord, T9, A0, QUICK_ENTRYPOINT_OFFSET(pInterpreterToInterpreterEntry));
-  __ Jr(T9);
-  __ Break();
-
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > entry_stub(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*entry_stub)[0], entry_stub->size());
-  assembler->FinalizeInstructions(code);
-
-  return entry_stub.release();
-}
-
-const std::vector<uint8_t>* CreateInterpreterToQuickEntry() {
-  UniquePtr<MipsAssembler> assembler(static_cast<MipsAssembler*>(Assembler::Create(kMips)));
-
-  __ LoadFromOffset(kLoadWord, T9, A0, QUICK_ENTRYPOINT_OFFSET(pInterpreterToInterpreterEntry));
-  __ Jr(T9);
-  __ Break();
-
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > entry_stub(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*entry_stub)[0], entry_stub->size());
-  assembler->FinalizeInstructions(code);
-
-  return entry_stub.release();
-}
-}  // namespace mips
-
-namespace x86 {
-const std::vector<uint8_t>* CreateQuickResolutionTrampoline() {
-  UniquePtr<X86Assembler> assembler(static_cast<X86Assembler*>(Assembler::Create(kX86)));
-  // Set up the callee save frame to conform with Runtime::CreateCalleeSaveMethod(kRefsAndArgs)
-  // return address
-  __ pushl(EDI);
-  __ pushl(ESI);
-  __ pushl(EBP);
-  __ pushl(EBX);
-  __ pushl(EDX);
-  __ pushl(ECX);
-  __ pushl(EAX);  // <-- callee save Method* to go here
-  __ movl(EDX, ESP);          // save ESP
-  __ fs()->pushl(Address::Absolute(Thread::SelfOffset()));  // pass Thread*
-  __ pushl(EDX);              // pass ESP for Method*
-  __ pushl(ECX);              // pass receiver
-  __ pushl(EAX);              // pass Method*
-
-  // Call to resolve method.
-  __ Call(ThreadOffset(QUICK_ENTRYPOINT_OFFSET(pQuickResolutionTrampolineFromCode)),
-          X86ManagedRegister::FromCpuRegister(ECX));
-
-  __ movl(EDI, EAX);  // save code pointer in EDI
-  __ addl(ESP, Immediate(16));  // Pop arguments
-  __ popl(EAX);  // Restore args.
-  __ popl(ECX);
-  __ popl(EDX);
-  __ popl(EBX);
-  __ popl(EBP);  // Restore callee saves.
-  __ popl(ESI);
-  // Swap EDI callee save with code pointer
-  __ xchgl(EDI, Address(ESP, 0));
-  // Tail call to intended method.
-  __ ret();
-
-  assembler->EmitSlowPaths();
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > resolution_trampoline(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*resolution_trampoline)[0], resolution_trampoline->size());
-  assembler->FinalizeInstructions(code);
-
-  return resolution_trampoline.release();
-}
-
-const std::vector<uint8_t>* CreateInterpreterToInterpreterEntry() {
-  UniquePtr<X86Assembler> assembler(static_cast<X86Assembler*>(Assembler::Create(kX86)));
-
-  __ fs()->jmp(Address::Absolute(ThreadOffset(QUICK_ENTRYPOINT_OFFSET(pInterpreterToInterpreterEntry))));
-
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > entry_stub(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*entry_stub)[0], entry_stub->size());
-  assembler->FinalizeInstructions(code);
-
-  return entry_stub.release();
-}
-
-const std::vector<uint8_t>* CreateInterpreterToQuickEntry() {
-  UniquePtr<X86Assembler> assembler(static_cast<X86Assembler*>(Assembler::Create(kX86)));
-
-  __ fs()->jmp(Address::Absolute(ThreadOffset(QUICK_ENTRYPOINT_OFFSET(pInterpreterToQuickEntry))));
-
-  size_t cs = assembler->CodeSize();
-  UniquePtr<std::vector<uint8_t> > entry_stub(new std::vector<uint8_t>(cs));
-  MemoryRegion code(&(*entry_stub)[0], entry_stub->size());
-  assembler->FinalizeInstructions(code);
-
-  return entry_stub.release();
-}
-}  // namespace x86
-
-}  // namespace art
diff --git a/compiler/stubs/stubs.h b/compiler/stubs/stubs.h
deleted file mode 100644
index d85eae8..0000000
--- a/compiler/stubs/stubs.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_COMPILER_STUBS_STUBS_H_
-#define ART_COMPILER_STUBS_STUBS_H_
-
-#include "runtime.h"
-
-namespace art {
-
-namespace arm {
-const std::vector<uint8_t>* CreatePortableResolutionTrampoline()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateQuickResolutionTrampoline()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateInterpreterToInterpreterEntry()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateInterpreterToQuickEntry()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-}
-
-namespace mips {
-const std::vector<uint8_t>* CreatePortableResolutionTrampoline()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateQuickResolutionTrampoline()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateInterpreterToInterpreterEntry()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateInterpreterToQuickEntry()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-}
-
-namespace x86 {
-const std::vector<uint8_t>* CreatePortableResolutionTrampoline()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateQuickResolutionTrampoline()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateInterpreterToInterpreterEntry()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-const std::vector<uint8_t>* CreateInterpreterToQuickEntry()
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-}
-
-}  // namespace art
-
-#endif  // ART_COMPILER_STUBS_STUBS_H_
diff --git a/compiler/utils/arm/assembler_arm.cc b/compiler/utils/arm/assembler_arm.cc
index fa202c3..f0d11d8 100644
--- a/compiler/utils/arm/assembler_arm.cc
+++ b/compiler/utils/arm/assembler_arm.cc
@@ -1246,10 +1246,10 @@
 // Implementation note: this method must emit at most one instruction when
 // Address::CanHoldLoadOffset.
 void ArmAssembler::LoadFromOffset(LoadOperandType type,
-                               Register reg,
-                               Register base,
-                               int32_t offset,
-                               Condition cond) {
+                                  Register reg,
+                                  Register base,
+                                  int32_t offset,
+                                  Condition cond) {
   if (!Address::CanHoldLoadOffset(type, offset)) {
     CHECK(base != IP);
     LoadImmediate(IP, offset, cond);
@@ -1884,7 +1884,7 @@
   // Don't care about preserving R0 as this call won't return
   __ mov(R0, ShifterOperand(scratch_.AsCoreRegister()));
   // Set up call to Thread::Current()->pDeliverException
-  __ LoadFromOffset(kLoadWord, R12, TR, QUICK_ENTRYPOINT_OFFSET(pDeliverException));
+  __ LoadFromOffset(kLoadWord, R12, TR, QUICK_ENTRYPOINT_OFFSET(pDeliverException).Int32Value());
   __ blx(R12);
   // Call never returns
   __ bkpt(0);
diff --git a/compiler/utils/mips/assembler_mips.cc b/compiler/utils/mips/assembler_mips.cc
index 931d7ab..2be3d56 100644
--- a/compiler/utils/mips/assembler_mips.cc
+++ b/compiler/utils/mips/assembler_mips.cc
@@ -813,14 +813,7 @@
 
 void MipsAssembler::Copy(FrameOffset /*dest*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
                          ManagedRegister /*mscratch*/, size_t /*size*/) {
-  UNIMPLEMENTED(FATAL) << "no arm implementation";
-#if 0
-  Register scratch = mscratch.AsMips().AsCoreRegister();
-  CHECK_EQ(size, 4u);
-  movl(scratch, Address(ESP, src_base));
-  movl(scratch, Address(scratch, src_offset));
-  movl(Address(ESP, dest), scratch);
-#endif
+  UNIMPLEMENTED(FATAL) << "no mips implementation";
 }
 
 void MipsAssembler::Copy(ManagedRegister dest, Offset dest_offset,
@@ -834,24 +827,11 @@
 
 void MipsAssembler::Copy(FrameOffset /*dest*/, Offset /*dest_offset*/, FrameOffset /*src*/, Offset /*src_offset*/,
                          ManagedRegister /*mscratch*/, size_t /*size*/) {
-  UNIMPLEMENTED(FATAL) << "no arm implementation";
-#if 0
-  Register scratch = mscratch.AsMips().AsCoreRegister();
-  CHECK_EQ(size, 4u);
-  CHECK_EQ(dest.Int32Value(), src.Int32Value());
-  movl(scratch, Address(ESP, src));
-  pushl(Address(scratch, src_offset));
-  popl(Address(scratch, dest_offset));
-#endif
+  UNIMPLEMENTED(FATAL) << "no mips implementation";
 }
 
 void MipsAssembler::MemoryBarrier(ManagedRegister) {
-  UNIMPLEMENTED(FATAL) << "NEEDS TO BE IMPLEMENTED";
-#if 0
-#if ANDROID_SMP != 0
-  mfence();
-#endif
-#endif
+  UNIMPLEMENTED(FATAL) << "no mips implementation";
 }
 
 void MipsAssembler::CreateSirtEntry(ManagedRegister mout_reg,
@@ -953,10 +933,7 @@
 }
 
 void MipsAssembler::Call(ThreadOffset /*offset*/, ManagedRegister /*mscratch*/) {
-  UNIMPLEMENTED(FATAL) << "no arm implementation";
-#if 0
-  fs()->call(Address::Absolute(offset));
-#endif
+  UNIMPLEMENTED(FATAL) << "no mips implementation";
 }
 
 void MipsAssembler::GetCurrentThread(ManagedRegister tr) {
@@ -988,7 +965,7 @@
   // Don't care about preserving A0 as this call won't return
   __ Move(A0, scratch_.AsCoreRegister());
   // Set up call to Thread::Current()->pDeliverException
-  __ LoadFromOffset(kLoadWord, T9, S1, QUICK_ENTRYPOINT_OFFSET(pDeliverException));
+  __ LoadFromOffset(kLoadWord, T9, S1, QUICK_ENTRYPOINT_OFFSET(pDeliverException).Int32Value());
   __ Jr(T9);
   // Call never returns
   __ Break();
diff --git a/runtime/Android.mk b/runtime/Android.mk
index 51bb3eb..4f25c00 100644
--- a/runtime/Android.mk
+++ b/runtime/Android.mk
@@ -142,6 +142,7 @@
 	arch/x86/registers_x86.cc \
 	arch/mips/registers_mips.cc \
 	entrypoints/entrypoint_utils.cc \
+	entrypoints/interpreter/interpreter_entrypoints.cc \
 	entrypoints/jni/jni_entrypoints.cc \
 	entrypoints/math_entrypoints.cc \
 	entrypoints/portable/portable_alloc_entrypoints.cc \
@@ -163,15 +164,13 @@
 	entrypoints/quick/quick_field_entrypoints.cc \
 	entrypoints/quick/quick_fillarray_entrypoints.cc \
 	entrypoints/quick/quick_instrumentation_entrypoints.cc \
-	entrypoints/quick/quick_interpreter_entrypoints.cc \
 	entrypoints/quick/quick_invoke_entrypoints.cc \
 	entrypoints/quick/quick_jni_entrypoints.cc \
 	entrypoints/quick/quick_lock_entrypoints.cc \
 	entrypoints/quick/quick_math_entrypoints.cc \
-	entrypoints/quick/quick_proxy_entrypoints.cc \
-	entrypoints/quick/quick_stub_entrypoints.cc \
 	entrypoints/quick/quick_thread_entrypoints.cc \
-	entrypoints/quick/quick_throw_entrypoints.cc
+	entrypoints/quick/quick_throw_entrypoints.cc \
+	entrypoints/quick/quick_trampoline_entrypoints.cc
 
 LIBART_TARGET_SRC_FILES := \
 	$(LIBART_COMMON_SRC_FILES) \
diff --git a/runtime/arch/arm/asm_support_arm.S b/runtime/arch/arm/asm_support_arm.S
index ed655e9..559788f 100644
--- a/runtime/arch/arm/asm_support_arm.S
+++ b/runtime/arch/arm/asm_support_arm.S
@@ -35,4 +35,11 @@
     .size \name, .-\name
 .endm
 
+.macro UNIMPLEMENTED name
+    ENTRY \name
+    bkpt
+    bkpt
+    END \name
+.endm
+
 #endif  // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_
diff --git a/runtime/arch/arm/entrypoints_init_arm.cc b/runtime/arch/arm/entrypoints_init_arm.cc
index b71a158..848bacc 100644
--- a/runtime/arch/arm/entrypoints_init_arm.cc
+++ b/runtime/arch/arm/entrypoints_init_arm.cc
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include "entrypoints/interpreter/interpreter_entrypoints.h"
 #include "entrypoints/portable/portable_entrypoints.h"
 #include "entrypoints/quick/quick_entrypoints.h"
 #include "entrypoints/entrypoint_utils.h"
@@ -21,49 +22,61 @@
 
 namespace art {
 
+// Interpreter entrypoints.
+extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh,
+                                                 const DexFile::CodeItem* code_item,
+                                                 ShadowFrame* shadow_frame, JValue* result);
+extern "C" void artInterperterToCompiledCodeBridge(Thread* self, MethodHelper& mh,
+                                           const DexFile::CodeItem* code_item,
+                                           ShadowFrame* shadow_frame, JValue* result);
+
+// Portable entrypoints.
+extern "C" void art_portable_resolution_trampoline(mirror::AbstractMethod*);
+extern "C" void art_portable_to_interpreter_bridge(mirror::AbstractMethod*);
+
 // Alloc entrypoints.
-extern "C" void* art_quick_alloc_array_from_code(uint32_t, void*, int32_t);
-extern "C" void* art_quick_alloc_array_from_code_with_access_check(uint32_t, void*, int32_t);
-extern "C" void* art_quick_alloc_object_from_code(uint32_t type_idx, void* method);
-extern "C" void* art_quick_alloc_object_from_code_with_access_check(uint32_t type_idx, void* method);
-extern "C" void* art_quick_check_and_alloc_array_from_code(uint32_t, void*, int32_t);
-extern "C" void* art_quick_check_and_alloc_array_from_code_with_access_check(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_array(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_array_with_access_check(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_object(uint32_t type_idx, void* method);
+extern "C" void* art_quick_alloc_object_with_access_check(uint32_t type_idx, void* method);
+extern "C" void* art_quick_check_and_alloc_array(uint32_t, void*, int32_t);
+extern "C" void* art_quick_check_and_alloc_array_with_access_check(uint32_t, void*, int32_t);
 
 // Cast entrypoints.
 extern "C" uint32_t artIsAssignableFromCode(const mirror::Class* klass,
                                             const mirror::Class* ref_class);
-extern "C" void art_quick_can_put_array_element_from_code(void*, void*);
-extern "C" void art_quick_check_cast_from_code(void*, void*);
+extern "C" void art_quick_can_put_array_element(void*, void*);
+extern "C" void art_quick_check_cast(void*, void*);
 
 // DexCache entrypoints.
-extern "C" void* art_quick_initialize_static_storage_from_code(uint32_t, void*);
-extern "C" void* art_quick_initialize_type_from_code(uint32_t, void*);
-extern "C" void* art_quick_initialize_type_and_verify_access_from_code(uint32_t, void*);
-extern "C" void* art_quick_resolve_string_from_code(void*, uint32_t);
+extern "C" void* art_quick_initialize_static_storage(uint32_t, void*);
+extern "C" void* art_quick_initialize_type(uint32_t, void*);
+extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
+extern "C" void* art_quick_resolve_string(void*, uint32_t);
 
 // Exception entrypoints.
 extern "C" void* GetAndClearException(Thread*);
 
 // Field entrypoints.
-extern "C" int art_quick_set32_instance_from_code(uint32_t, void*, int32_t);
-extern "C" int art_quick_set32_static_from_code(uint32_t, int32_t);
-extern "C" int art_quick_set64_instance_from_code(uint32_t, void*, int64_t);
-extern "C" int art_quick_set64_static_from_code(uint32_t, int64_t);
-extern "C" int art_quick_set_obj_instance_from_code(uint32_t, void*, void*);
-extern "C" int art_quick_set_obj_static_from_code(uint32_t, void*);
-extern "C" int32_t art_quick_get32_instance_from_code(uint32_t, void*);
-extern "C" int32_t art_quick_get32_static_from_code(uint32_t);
-extern "C" int64_t art_quick_get64_instance_from_code(uint32_t, void*);
-extern "C" int64_t art_quick_get64_static_from_code(uint32_t);
-extern "C" void* art_quick_get_obj_instance_from_code(uint32_t, void*);
-extern "C" void* art_quick_get_obj_static_from_code(uint32_t);
+extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
+extern "C" int art_quick_set32_static(uint32_t, int32_t);
+extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
+extern "C" int art_quick_set64_static(uint32_t, int64_t);
+extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
+extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
+extern "C" int32_t art_quick_get32_static(uint32_t);
+extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
+extern "C" int64_t art_quick_get64_static(uint32_t);
+extern "C" void* art_quick_get_obj_instance(uint32_t, void*);
+extern "C" void* art_quick_get_obj_static(uint32_t);
 
 // FillArray entrypoint.
-extern "C" void art_quick_handle_fill_data_from_code(void*, void*);
+extern "C" void art_quick_handle_fill_data(void*, void*);
 
 // Lock entrypoints.
-extern "C" void art_quick_lock_object_from_code(void*);
-extern "C" void art_quick_unlock_object_from_code(void*);
+extern "C" void art_quick_lock_object(void*);
+extern "C" void art_quick_unlock_object(void*);
 
 // Math entrypoints.
 extern int32_t CmpgDouble(double a, double b);
@@ -93,26 +106,14 @@
 extern "C" uint64_t art_quick_shr_long(uint64_t, uint32_t);
 extern "C" uint64_t art_quick_ushr_long(uint64_t, uint32_t);
 
-// Interpreter entrypoints.
-extern "C" void artInterpreterToInterpreterEntry(Thread* self, MethodHelper& mh,
-                                                 const DexFile::CodeItem* code_item,
-                                                 ShadowFrame* shadow_frame, JValue* result);
-extern "C" void artInterpreterToQuickEntry(Thread* self, MethodHelper& mh,
-                                           const DexFile::CodeItem* code_item,
-                                           ShadowFrame* shadow_frame, JValue* result);
-
 // Intrinsic entrypoints.
 extern "C" int32_t __memcmp16(void*, void*, int32_t);
 extern "C" int32_t art_quick_indexof(void*, uint32_t, uint32_t, uint32_t);
 extern "C" int32_t art_quick_string_compareto(void*, void*);
 
 // Invoke entrypoints.
-extern "C" const void* artPortableResolutionTrampoline(mirror::AbstractMethod* called,
-                                                       mirror::Object* receiver,
-                                                       mirror::AbstractMethod** sp, Thread* thread);
-extern "C" const void* artQuickResolutionTrampoline(mirror::AbstractMethod* called,
-                                                    mirror::Object* receiver,
-                                                    mirror::AbstractMethod** sp, Thread* thread);
+extern "C" void art_quick_resolution_trampoline(mirror::AbstractMethod*);
+extern "C" void art_quick_to_interpreter_bridge(mirror::AbstractMethod*);
 extern "C" void art_quick_invoke_direct_trampoline_with_access_check(uint32_t, void*);
 extern "C" void art_quick_invoke_interface_trampoline(uint32_t, void*);
 extern "C" void art_quick_invoke_interface_trampoline_with_access_check(uint32_t, void*);
@@ -125,49 +126,61 @@
 extern "C" void art_quick_test_suspend();
 
 // Throw entrypoints.
-extern "C" void art_quick_deliver_exception_from_code(void*);
-extern "C" void art_quick_throw_array_bounds_from_code(int32_t index, int32_t limit);
-extern "C" void art_quick_throw_div_zero_from_code();
-extern "C" void art_quick_throw_no_such_method_from_code(int32_t method_idx);
-extern "C" void art_quick_throw_null_pointer_exception_from_code();
-extern "C" void art_quick_throw_stack_overflow_from_code(void*);
+extern "C" void art_quick_deliver_exception(void*);
+extern "C" void art_quick_throw_array_bounds(int32_t index, int32_t limit);
+extern "C" void art_quick_throw_div_zero();
+extern "C" void art_quick_throw_no_such_method(int32_t method_idx);
+extern "C" void art_quick_throw_null_pointer_exception();
+extern "C" void art_quick_throw_stack_overflow(void*);
 
-void InitEntryPoints(QuickEntryPoints* qpoints, PortableEntryPoints* ppoints) {
+void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
+                     PortableEntryPoints* ppoints, QuickEntryPoints* qpoints) {
+  // Interpreter
+  ipoints->pInterpreterToInterpreterBridge = artInterpreterToInterpreterBridge;
+  ipoints->pInterpreterToCompiledCodeBridge = artInterperterToCompiledCodeBridge;
+
+  // JNI
+  jpoints->pDlsymLookup = art_jni_dlsym_lookup_stub;
+
+  // Portable
+  ppoints->pPortableResolutionTrampoline = art_portable_resolution_trampoline;
+  ppoints->pPortableToInterpreterBridge = art_portable_to_interpreter_bridge;
+
   // Alloc
-  qpoints->pAllocArrayFromCode = art_quick_alloc_array_from_code;
-  qpoints->pAllocArrayFromCodeWithAccessCheck = art_quick_alloc_array_from_code_with_access_check;
-  qpoints->pAllocObjectFromCode = art_quick_alloc_object_from_code;
-  qpoints->pAllocObjectFromCodeWithAccessCheck = art_quick_alloc_object_from_code_with_access_check;
-  qpoints->pCheckAndAllocArrayFromCode = art_quick_check_and_alloc_array_from_code;
-  qpoints->pCheckAndAllocArrayFromCodeWithAccessCheck = art_quick_check_and_alloc_array_from_code_with_access_check;
+  qpoints->pAllocArray = art_quick_alloc_array;
+  qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check;
+  qpoints->pAllocObject = art_quick_alloc_object;
+  qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check;
+  qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array;
+  qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check;
 
   // Cast
-  qpoints->pInstanceofNonTrivialFromCode = artIsAssignableFromCode;
-  qpoints->pCanPutArrayElementFromCode = art_quick_can_put_array_element_from_code;
-  qpoints->pCheckCastFromCode = art_quick_check_cast_from_code;
+  qpoints->pInstanceofNonTrivial = artIsAssignableFromCode;
+  qpoints->pCanPutArrayElement = art_quick_can_put_array_element;
+  qpoints->pCheckCast = art_quick_check_cast;
 
   // DexCache
-  qpoints->pInitializeStaticStorage = art_quick_initialize_static_storage_from_code;
-  qpoints->pInitializeTypeAndVerifyAccessFromCode = art_quick_initialize_type_and_verify_access_from_code;
-  qpoints->pInitializeTypeFromCode = art_quick_initialize_type_from_code;
-  qpoints->pResolveStringFromCode = art_quick_resolve_string_from_code;
+  qpoints->pInitializeStaticStorage = art_quick_initialize_static_storage;
+  qpoints->pInitializeTypeAndVerifyAccess = art_quick_initialize_type_and_verify_access;
+  qpoints->pInitializeType = art_quick_initialize_type;
+  qpoints->pResolveString = art_quick_resolve_string;
 
   // Field
-  qpoints->pSet32Instance = art_quick_set32_instance_from_code;
-  qpoints->pSet32Static = art_quick_set32_static_from_code;
-  qpoints->pSet64Instance = art_quick_set64_instance_from_code;
-  qpoints->pSet64Static = art_quick_set64_static_from_code;
-  qpoints->pSetObjInstance = art_quick_set_obj_instance_from_code;
-  qpoints->pSetObjStatic = art_quick_set_obj_static_from_code;
-  qpoints->pGet32Instance = art_quick_get32_instance_from_code;
-  qpoints->pGet64Instance = art_quick_get64_instance_from_code;
-  qpoints->pGetObjInstance = art_quick_get_obj_instance_from_code;
-  qpoints->pGet32Static = art_quick_get32_static_from_code;
-  qpoints->pGet64Static = art_quick_get64_static_from_code;
-  qpoints->pGetObjStatic = art_quick_get_obj_static_from_code;
+  qpoints->pSet32Instance = art_quick_set32_instance;
+  qpoints->pSet32Static = art_quick_set32_static;
+  qpoints->pSet64Instance = art_quick_set64_instance;
+  qpoints->pSet64Static = art_quick_set64_static;
+  qpoints->pSetObjInstance = art_quick_set_obj_instance;
+  qpoints->pSetObjStatic = art_quick_set_obj_static;
+  qpoints->pGet32Instance = art_quick_get32_instance;
+  qpoints->pGet64Instance = art_quick_get64_instance;
+  qpoints->pGetObjInstance = art_quick_get_obj_instance;
+  qpoints->pGet32Static = art_quick_get32_static;
+  qpoints->pGet64Static = art_quick_get64_static;
+  qpoints->pGetObjStatic = art_quick_get_obj_static;
 
   // FillArray
-  qpoints->pHandleFillArrayDataFromCode = art_quick_handle_fill_data_from_code;
+  qpoints->pHandleFillArrayData = art_quick_handle_fill_data;
 
   // JNI
   qpoints->pJniMethodStart = JniMethodStart;
@@ -178,8 +191,8 @@
   qpoints->pJniMethodEndWithReferenceSynchronized = JniMethodEndWithReferenceSynchronized;
 
   // Locks
-  qpoints->pLockObjectFromCode = art_quick_lock_object_from_code;
-  qpoints->pUnlockObjectFromCode = art_quick_unlock_object_from_code;
+  qpoints->pLockObject = art_quick_lock_object;
+  qpoints->pUnlockObject = art_quick_unlock_object;
 
   // Math
   qpoints->pCmpgDouble = CmpgDouble;
@@ -203,10 +216,6 @@
   qpoints->pShrLong = art_quick_shr_long;
   qpoints->pUshrLong = art_quick_ushr_long;
 
-  // Interpreter
-  qpoints->pInterpreterToInterpreterEntry = artInterpreterToInterpreterEntry;
-  qpoints->pInterpreterToQuickEntry = artInterpreterToQuickEntry;
-
   // Intrinsics
   qpoints->pIndexOf = art_quick_indexof;
   qpoints->pMemcmp16 = __memcmp16;
@@ -214,7 +223,8 @@
   qpoints->pMemcpy = memcpy;
 
   // Invocation
-  qpoints->pQuickResolutionTrampolineFromCode = artQuickResolutionTrampoline;
+  qpoints->pQuickResolutionTrampoline = art_quick_resolution_trampoline;
+  qpoints->pQuickToInterpreterBridge = art_quick_to_interpreter_bridge;
   qpoints->pInvokeDirectTrampolineWithAccessCheck = art_quick_invoke_direct_trampoline_with_access_check;
   qpoints->pInvokeInterfaceTrampoline = art_quick_invoke_interface_trampoline;
   qpoints->pInvokeInterfaceTrampolineWithAccessCheck = art_quick_invoke_interface_trampoline_with_access_check;
@@ -223,19 +233,16 @@
   qpoints->pInvokeVirtualTrampolineWithAccessCheck = art_quick_invoke_virtual_trampoline_with_access_check;
 
   // Thread
-  qpoints->pCheckSuspendFromCode = CheckSuspendFromCode;
-  qpoints->pTestSuspendFromCode = art_quick_test_suspend;
+  qpoints->pCheckSuspend = CheckSuspendFromCode;
+  qpoints->pTestSuspend = art_quick_test_suspend;
 
   // Throws
-  qpoints->pDeliverException = art_quick_deliver_exception_from_code;
-  qpoints->pThrowArrayBoundsFromCode = art_quick_throw_array_bounds_from_code;
-  qpoints->pThrowDivZeroFromCode = art_quick_throw_div_zero_from_code;
-  qpoints->pThrowNoSuchMethodFromCode = art_quick_throw_no_such_method_from_code;
-  qpoints->pThrowNullPointerFromCode = art_quick_throw_null_pointer_exception_from_code;
-  qpoints->pThrowStackOverflowFromCode = art_quick_throw_stack_overflow_from_code;
-
-  // Portable
-  ppoints->pPortableResolutionTrampolineFromCode = artPortableResolutionTrampoline;
+  qpoints->pDeliverException = art_quick_deliver_exception;
+  qpoints->pThrowArrayBounds = art_quick_throw_array_bounds;
+  qpoints->pThrowDivZero = art_quick_throw_div_zero;
+  qpoints->pThrowNoSuchMethod = art_quick_throw_no_such_method;
+  qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
+  qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
 };
 
 }  // namespace art
diff --git a/runtime/arch/arm/jni_entrypoints_arm.S b/runtime/arch/arm/jni_entrypoints_arm.S
index 0a0d06a..322c40b 100644
--- a/runtime/arch/arm/jni_entrypoints_arm.S
+++ b/runtime/arch/arm/jni_entrypoints_arm.S
@@ -28,8 +28,7 @@
     sub    sp, #12                        @ pad stack pointer to align frame
     .pad #12
     .cfi_adjust_cfa_offset 12
-    mov    r0, r9                         @ pass Thread::Current
-    blx    artFindNativeMethod            @ (Thread*)
+    blx    artFindNativeMethod
     mov    r12, r0                        @ save result in r12
     add    sp, #12                        @ restore stack pointer
     .cfi_adjust_cfa_offset -12
@@ -44,7 +43,7 @@
      * Entry point of native methods when JNI bug compatibility is enabled.
      */
     .extern artWorkAroundAppJniBugs
-ENTRY art_quick_work_around_app_jni_bugs
+ENTRY art_work_around_app_jni_bugs
     @ save registers that may contain arguments and LR that will be crushed by a call
     push {r0-r3, lr}
     .save {r0-r3, lr}
@@ -62,4 +61,4 @@
     pop {r0-r3, lr}  @ restore possibly modified argument registers
     .cfi_adjust_cfa_offset -16
     bx  r12          @ tail call into JNI routine
-END art_quick_work_around_app_jni_bugs
+END art_work_around_app_jni_bugs
diff --git a/runtime/arch/arm/portable_entrypoints_arm.S b/runtime/arch/arm/portable_entrypoints_arm.S
index 4cc6654..9b1014a 100644
--- a/runtime/arch/arm/portable_entrypoints_arm.S
+++ b/runtime/arch/arm/portable_entrypoints_arm.S
@@ -94,3 +94,6 @@
     .cfi_adjust_cfa_offset -48
     bx      lr                     @ return
 END art_portable_proxy_invoke_handler
+
+UNIMPLEMENTED art_portable_resolution_trampoline
+UNIMPLEMENTED art_portable_to_interpreter_bridge
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 9b8d238..e23f5a8 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -157,33 +157,33 @@
      * Called by managed code, saves callee saves and then calls artThrowException
      * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
      */
-ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception_from_code, artDeliverExceptionFromCode
+ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
 
     /*
      * Called by managed code to create and deliver a NullPointerException.
      */
-NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception_from_code, artThrowNullPointerExceptionFromCode
+NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
 
     /*
      * Called by managed code to create and deliver an ArithmeticException.
      */
-NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero_from_code, artThrowDivZeroFromCode
+NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
 
     /*
      * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
      * index, arg2 holds limit.
      */
-TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds_from_code, artThrowArrayBoundsFromCode
+TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
 
     /*
      * Called by managed code to create and deliver a StackOverflowError.
      */
-NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow_from_code, artThrowStackOverflowFromCode
+NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
 
     /*
      * Called by managed code to create and deliver a NoSuchMethodError.
      */
-ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method_from_code, artThrowNoSuchMethodFromCode
+ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
 
     /*
      * All generated callsites for interface invokes and invocation slow paths will load arguments
@@ -294,7 +294,7 @@
      * failure.
      */
     .extern artHandleFillArrayDataFromCode
-ENTRY art_quick_handle_fill_data_from_code
+ENTRY art_quick_handle_fill_data
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case exception allocation triggers GC
     mov    r2, r9                          @ pass Thread::Current
     mov    r3, sp                          @ pass SP
@@ -303,25 +303,25 @@
     cmp    r0, #0                          @ success?
     bxeq   lr                              @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_handle_fill_data_from_code
+END art_quick_handle_fill_data
 
     /*
      * Entry from managed code that calls artLockObjectFromCode, may block for GC.
      */
     .extern artLockObjectFromCode
-ENTRY art_quick_lock_object_from_code
+ENTRY art_quick_lock_object
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case we block
     mov    r1, r9                     @ pass Thread::Current
     mov    r2, sp                     @ pass SP
     bl     artLockObjectFromCode      @ (Object* obj, Thread*, SP)
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
-END art_quick_lock_object_from_code
+END art_quick_lock_object
 
     /*
      * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
      */
     .extern artUnlockObjectFromCode
-ENTRY art_quick_unlock_object_from_code
+ENTRY art_quick_unlock_object
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case exception allocation triggers GC
     mov    r1, r9                   @ pass Thread::Current
     mov    r2, sp                   @ pass SP
@@ -330,13 +330,13 @@
     cmp    r0, #0                   @ success?
     bxeq   lr                       @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_unlock_object_from_code
+END art_quick_unlock_object
 
     /*
      * Entry from managed code that calls artCheckCastFromCode and delivers exception on failure.
      */
     .extern artCheckCastFromCode
-ENTRY art_quick_check_cast_from_code
+ENTRY art_quick_check_cast
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME    @ save callee saves in case exception allocation triggers GC
     mov    r2, r9                       @ pass Thread::Current
     mov    r3, sp                       @ pass SP
@@ -345,14 +345,14 @@
     cmp    r0, #0                       @ success?
     bxeq   lr                           @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_check_cast_from_code
+END art_quick_check_cast
 
     /*
      * Entry from managed code that calls artCanPutArrayElementFromCode and delivers exception on
      * failure.
      */
     .extern artCanPutArrayElementFromCode
-ENTRY art_quick_can_put_array_element_from_code
+ENTRY art_quick_can_put_array_element
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME    @ save callee saves in case exception allocation triggers GC
     mov    r2, r9                         @ pass Thread::Current
     mov    r3, sp                         @ pass SP
@@ -361,7 +361,7 @@
     cmp    r0, #0                         @ success?
     bxeq   lr                             @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_can_put_array_element_from_code
+END art_quick_can_put_array_element
 
     /*
      * Entry from managed code when uninitialized static storage, this stub will run the class
@@ -369,7 +369,7 @@
      * returned.
      */
     .extern artInitializeStaticStorageFromCode
-ENTRY art_quick_initialize_static_storage_from_code
+ENTRY art_quick_initialize_static_storage
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
     mov    r2, r9                              @ pass Thread::Current
     mov    r3, sp                              @ pass SP
@@ -379,13 +379,13 @@
     cmp    r0, #0                              @ success if result is non-null
     bxne   lr                                  @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_initialize_static_storage_from_code
+END art_quick_initialize_static_storage
 
     /*
      * Entry from managed code when dex cache misses for a type_idx
      */
     .extern artInitializeTypeFromCode
-ENTRY art_quick_initialize_type_from_code
+ENTRY art_quick_initialize_type
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
     mov    r2, r9                              @ pass Thread::Current
     mov    r3, sp                              @ pass SP
@@ -395,14 +395,14 @@
     cmp    r0, #0                              @ success if result is non-null
     bxne   lr                                  @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_initialize_type_from_code
+END art_quick_initialize_type
 
     /*
      * Entry from managed code when type_idx needs to be checked for access and dex cache may also
      * miss.
      */
     .extern artInitializeTypeAndVerifyAccessFromCode
-ENTRY art_quick_initialize_type_and_verify_access_from_code
+ENTRY art_quick_initialize_type_and_verify_access
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           @ save callee saves in case of GC
     mov    r2, r9                              @ pass Thread::Current
     mov    r3, sp                              @ pass SP
@@ -412,13 +412,13 @@
     cmp    r0, #0                              @ success if result is non-null
     bxne   lr                                  @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_initialize_type_and_verify_access_from_code
+END art_quick_initialize_type_and_verify_access
 
     /*
      * Called by managed code to resolve a static field and load a 32-bit primitive value.
      */
     .extern artGet32StaticFromCode
-ENTRY art_quick_get32_static_from_code
+ENTRY art_quick_get32_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r1, [sp, #32]                 @ pass referrer
     mov    r2, r9                        @ pass Thread::Current
@@ -429,13 +429,13 @@
     cmp    r12, #0                       @ success if no exception is pending
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_get32_static_from_code
+END art_quick_get32_static
 
     /*
      * Called by managed code to resolve a static field and load a 64-bit primitive value.
      */
     .extern artGet64StaticFromCode
-ENTRY art_quick_get64_static_from_code
+ENTRY art_quick_get64_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r1, [sp, #32]                 @ pass referrer
     mov    r2, r9                        @ pass Thread::Current
@@ -446,13 +446,13 @@
     cmp    r12, #0                       @ success if no exception is pending
     bxeq    lr                           @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_get64_static_from_code
+END art_quick_get64_static
 
     /*
      * Called by managed code to resolve a static field and load an object reference.
      */
     .extern artGetObjStaticFromCode
-ENTRY art_quick_get_obj_static_from_code
+ENTRY art_quick_get_obj_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r1, [sp, #32]                 @ pass referrer
     mov    r2, r9                        @ pass Thread::Current
@@ -463,13 +463,13 @@
     cmp    r12, #0                       @ success if no exception is pending
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_get_obj_static_from_code
+END art_quick_get_obj_static
 
     /*
      * Called by managed code to resolve an instance field and load a 32-bit primitive value.
      */
     .extern artGet32InstanceFromCode
-ENTRY art_quick_get32_instance_from_code
+ENTRY art_quick_get32_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r2, [sp, #32]                 @ pass referrer
     mov    r3, r9                        @ pass Thread::Current
@@ -482,13 +482,13 @@
     cmp    r12, #0                       @ success if no exception is pending
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_get32_instance_from_code
+END art_quick_get32_instance
 
     /*
      * Called by managed code to resolve an instance field and load a 64-bit primitive value.
      */
     .extern artGet64InstanceFromCode
-ENTRY art_quick_get64_instance_from_code
+ENTRY art_quick_get64_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r2, [sp, #32]                 @ pass referrer
     mov    r3, r9                        @ pass Thread::Current
@@ -504,13 +504,13 @@
     cmp    r12, #0                       @ success if no exception is pending
     bxeq    lr                           @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_get64_instance_from_code
+END art_quick_get64_instance
 
     /*
      * Called by managed code to resolve an instance field and load an object reference.
      */
     .extern artGetObjInstanceFromCode
-ENTRY art_quick_get_obj_instance_from_code
+ENTRY art_quick_get_obj_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r2, [sp, #32]                 @ pass referrer
     mov    r3, r9                        @ pass Thread::Current
@@ -526,13 +526,13 @@
     cmp    r12, #0                       @ success if no exception is pending
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_get_obj_instance_from_code
+END art_quick_get_obj_instance
 
     /*
      * Called by managed code to resolve a static field and store a 32-bit primitive value.
      */
     .extern artSet32StaticFromCode
-ENTRY art_quick_set32_static_from_code
+ENTRY art_quick_set32_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r2, [sp, #32]                 @ pass referrer
     mov    r3, r9                        @ pass Thread::Current
@@ -547,14 +547,14 @@
     cmp    r0, #0                        @ success if result is 0
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_set32_static_from_code
+END art_quick_set32_static
 
     /*
      * Called by managed code to resolve a static field and store a 64-bit primitive value.
      * On entry r0 holds field index, r1:r2 hold new_val
      */
     .extern artSet64StaticFromCode
-ENTRY art_quick_set64_static_from_code
+ENTRY art_quick_set64_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     mov    r3, r2                        @ pass one half of wide argument
     mov    r2, r1                        @ pass other half of wide argument
@@ -573,13 +573,13 @@
     cmp    r0, #0                        @ success if result is 0
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_set64_static_from_code
+END art_quick_set64_static
 
     /*
      * Called by managed code to resolve a static field and store an object reference.
      */
     .extern artSetObjStaticFromCode
-ENTRY art_quick_set_obj_static_from_code
+ENTRY art_quick_set_obj_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r2, [sp, #32]                 @ pass referrer
     mov    r3, r9                        @ pass Thread::Current
@@ -594,13 +594,13 @@
     cmp    r0, #0                        @ success if result is 0
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_set_obj_static_from_code
+END art_quick_set_obj_static
 
     /*
      * Called by managed code to resolve an instance field and store a 32-bit primitive value.
      */
     .extern artSet32InstanceFromCode
-ENTRY art_quick_set32_instance_from_code
+ENTRY art_quick_set32_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r3, [sp, #32]                 @ pass referrer
     mov    r12, sp                       @ save SP
@@ -619,13 +619,13 @@
     cmp    r0, #0                        @ success if result is 0
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_set32_instance_from_code
+END art_quick_set32_instance
 
     /*
      * Called by managed code to resolve an instance field and store a 64-bit primitive value.
      */
     .extern artSet32InstanceFromCode
-ENTRY art_quick_set64_instance_from_code
+ENTRY art_quick_set64_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     mov    r12, sp                       @ save SP
     sub    sp, #8                        @ grow frame for alignment with stack args
@@ -642,13 +642,13 @@
     cmp    r0, #0                        @ success if result is 0
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_set64_instance_from_code
+END art_quick_set64_instance
 
     /*
      * Called by managed code to resolve an instance field and store an object reference.
      */
     .extern artSetObjInstanceFromCode
-ENTRY art_quick_set_obj_instance_from_code
+ENTRY art_quick_set_obj_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     @ save callee saves in case of GC
     ldr    r3, [sp, #32]                 @ pass referrer
     mov    r12, sp                       @ save SP
@@ -666,7 +666,7 @@
     cmp    r0, #0                        @ success if result is 0
     bxeq   lr                            @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_set_obj_instance_from_code
+END art_quick_set_obj_instance
 
     /*
      * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
@@ -675,7 +675,7 @@
      * performed.
      */
     .extern artResolveStringFromCode
-ENTRY art_quick_resolve_string_from_code
+ENTRY art_quick_resolve_string
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
     mov    r2, r9                     @ pass Thread::Current
     mov    r3, sp                     @ pass SP
@@ -685,13 +685,13 @@
     cmp    r0, #0                     @ success if result is non-null
     bxne   lr                         @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_resolve_string_from_code
+END art_quick_resolve_string
 
     /*
      * Called by managed code to allocate an object
      */
     .extern artAllocObjectFromCode
-ENTRY art_quick_alloc_object_from_code
+ENTRY art_quick_alloc_object
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
     mov    r2, r9                     @ pass Thread::Current
     mov    r3, sp                     @ pass SP
@@ -700,14 +700,14 @@
     cmp    r0, #0                     @ success if result is non-null
     bxne   lr                         @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_alloc_object_from_code
+END art_quick_alloc_object
 
     /*
      * Called by managed code to allocate an object when the caller doesn't know whether it has
      * access to the created type.
      */
     .extern artAllocObjectFromCodeWithAccessCheck
-ENTRY art_quick_alloc_object_from_code_with_access_check
+ENTRY art_quick_alloc_object_with_access_check
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
     mov    r2, r9                     @ pass Thread::Current
     mov    r3, sp                     @ pass SP
@@ -716,13 +716,13 @@
     cmp    r0, #0                     @ success if result is non-null
     bxne   lr                         @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_alloc_object_from_code_with_access_check
+END art_quick_alloc_object_with_access_check
 
     /*
      * Called by managed code to allocate an array.
      */
     .extern artAllocArrayFromCode
-ENTRY art_quick_alloc_array_from_code
+ENTRY art_quick_alloc_array
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
     mov    r3, r9                     @ pass Thread::Current
     mov    r12, sp
@@ -737,14 +737,14 @@
     cmp    r0, #0                     @ success if result is non-null
     bxne   lr                         @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_alloc_array_from_code
+END art_quick_alloc_array
 
     /*
      * Called by managed code to allocate an array when the caller doesn't know whether it has
      * access to the created type.
      */
     .extern artAllocArrayFromCodeWithAccessCheck
-ENTRY art_quick_alloc_array_from_code_with_access_check
+ENTRY art_quick_alloc_array_with_access_check
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
     mov    r3, r9                     @ pass Thread::Current
     mov    r12, sp
@@ -759,13 +759,13 @@
     cmp    r0, #0                     @ success if result is non-null
     bxne   lr                         @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_alloc_array_from_code_with_access_check
+END art_quick_alloc_array_with_access_check
 
     /*
      * Called by managed code to allocate an array in a special case for FILLED_NEW_ARRAY.
      */
     .extern artCheckAndAllocArrayFromCode
-ENTRY art_quick_check_and_alloc_array_from_code
+ENTRY art_quick_check_and_alloc_array
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
     mov    r3, r9                     @ pass Thread::Current
     mov    r12, sp
@@ -780,13 +780,13 @@
     cmp    r0, #0                     @ success if result is non-null
     bxne   lr                         @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_check_and_alloc_array_from_code
+END art_quick_check_and_alloc_array
 
     /*
      * Called by managed code to allocate an array in a special case for FILLED_NEW_ARRAY.
      */
     .extern artCheckAndAllocArrayFromCodeWithAccessCheck
-ENTRY art_quick_check_and_alloc_array_from_code_with_access_check
+ENTRY art_quick_check_and_alloc_array_with_access_check
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  @ save callee saves in case of GC
     mov    r3, r9                     @ pass Thread::Current
     mov    r12, sp
@@ -801,7 +801,7 @@
     cmp    r0, #0                     @ success if result is non-null
     bxne   lr                         @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_check_and_alloc_array_from_code_with_access_check
+END art_quick_check_and_alloc_array_with_access_check
 
     /*
      * Called by managed code when the value in rSUSPEND has been decremented to 0.
@@ -840,13 +840,33 @@
     DELIVER_PENDING_EXCEPTION
 END art_quick_proxy_invoke_handler
 
-    .extern artInterpreterEntry
-ENTRY art_quick_interpreter_entry
+    .extern artQuickResolutionTrampoline
+ENTRY art_quick_resolution_trampoline
     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
-    str     r0, [sp, #0]           @ place proxy method at bottom of frame
+    mov     r2, r9                 @ pass Thread::Current
+    mov     r3, sp                 @ pass SP
+    blx     artQuickResolutionTrampoline  @ (Method* called, receiver, Thread*, SP)
+    cmp     r0, #0                 @ is code pointer null?
+    beq     1f                     @ goto exception
+    mov     r12, r0
+    ldr  r0, [sp, #0]              @ load resolved method in r0
+    ldr  r1, [sp, #8]              @ restore non-callee save r1
+    ldrd r2, [sp, #12]             @ restore non-callee saves r2-r3
+    ldr  lr, [sp, #44]             @ restore lr
+    add  sp, #48                   @ rewind sp
+    .cfi_adjust_cfa_offset -48
+    bx      r12                    @ tail-call into actual code
+1:
+    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
+    DELIVER_PENDING_EXCEPTION
+END art_quick_resolution_trampoline
+
+    .extern artQuickToInterpreterBridge
+ENTRY art_quick_to_interpreter_bridge
+    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
     mov     r1, r9                 @ pass Thread::Current
     mov     r2, sp                 @ pass SP
-    blx     artInterpreterEntry    @ (Method* method, Thread*, SP)
+    blx     artQuickToInterpreterBridge    @ (Method* method, Thread*, SP)
     ldr     r12, [r9, #THREAD_EXCEPTION_OFFSET]  @ load Thread::Current()->exception_
     ldr     lr,  [sp, #44]         @ restore lr
     add     sp,  #48               @ pop frame
@@ -854,14 +874,14 @@
     cmp     r12, #0                @ success if no exception is pending
     bxeq    lr                     @ return on success
     DELIVER_PENDING_EXCEPTION
-END art_quick_interpreter_entry
+END art_quick_to_interpreter_bridge
 
     /*
      * Routine that intercepts method calls and returns.
      */
     .extern artInstrumentationMethodEntryFromCode
     .extern artInstrumentationMethodExitFromCode
-ENTRY art_quick_instrumentation_entry_from_code
+ENTRY art_quick_instrumentation_entry
     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
     str   r0, [sp, #4]     @ preserve r0
     mov   r12, sp          @ remember sp
@@ -877,11 +897,11 @@
     mov   r12, r0        @ r12 holds reference to code
     ldr   r0, [sp, #4]   @ restore r0
     RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
-    blx   r12            @ call method with lr set to art_quick_instrumentation_exit_from_code
-END art_quick_instrumentation_entry_from_code
-    .type art_quick_instrumentation_exit_from_code, #function
-    .global art_quick_instrumentation_exit_from_code
-art_quick_instrumentation_exit_from_code:
+    blx   r12            @ call method with lr set to art_quick_instrumentation_exit
+END art_quick_instrumentation_entry
+    .type art_quick_instrumentation_exit, #function
+    .global art_quick_instrumentation_exit
+art_quick_instrumentation_exit:
     .cfi_startproc
     .fnstart
     mov   lr, #0         @ link register is to here, so clobber with 0 for later checks
@@ -910,7 +930,7 @@
     add sp, #32          @ remove callee save frame
     .cfi_adjust_cfa_offset -32
     bx    r2             @ return
-END art_quick_instrumentation_exit_from_code
+END art_quick_instrumentation_exit
 
     /*
      * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
@@ -925,25 +945,6 @@
 END art_quick_deoptimize
 
     /*
-     * Portable abstract method error stub. r0 contains method* on entry. SP unused in portable.
-     */
-    .extern artThrowAbstractMethodErrorFromCode
-ENTRY art_portable_abstract_method_error_stub
-    mov    r1, r9         @ pass Thread::Current
-    b      artThrowAbstractMethodErrorFromCode  @ (Method*, Thread*, SP)
-END art_portable_abstract_method_error_stub
-
-    /*
-     * Quick abstract method error stub. r0 contains method* on entry.
-     */
-ENTRY art_quick_abstract_method_error_stub
-    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
-    mov    r1, r9         @ pass Thread::Current
-    mov    r2, sp         @ pass SP
-    b      artThrowAbstractMethodErrorFromCode  @ (Method*, Thread*, SP)
-END art_quick_abstract_method_error_stub
-
-    /*
      * Signed 64-bit integer multiply.
      *
      * Consider WXxYZ (r1r0 x r3r2) with a long multiply:
diff --git a/runtime/arch/mips/asm_support_mips.S b/runtime/arch/mips/asm_support_mips.S
index 8a34b9d..fe932d2 100644
--- a/runtime/arch/mips/asm_support_mips.S
+++ b/runtime/arch/mips/asm_support_mips.S
@@ -38,4 +38,12 @@
     .cpload $t9
 .endm
 
+.macro UNIMPLEMENTED name
+    ENTRY \name
+    break
+    break
+    END \name
+.endm
+
+
 #endif  // ART_RUNTIME_ARCH_MIPS_ASM_SUPPORT_MIPS_S_
diff --git a/runtime/arch/mips/entrypoints_init_mips.cc b/runtime/arch/mips/entrypoints_init_mips.cc
index 0a62a40..a18079b 100644
--- a/runtime/arch/mips/entrypoints_init_mips.cc
+++ b/runtime/arch/mips/entrypoints_init_mips.cc
@@ -21,49 +21,61 @@
 
 namespace art {
 
+// Interpreter entrypoints.
+extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh,
+                                                 const DexFile::CodeItem* code_item,
+                                                 ShadowFrame* shadow_frame, JValue* result);
+extern "C" void artInterperterToCompiledCodeBridge(Thread* self, MethodHelper& mh,
+                                           const DexFile::CodeItem* code_item,
+                                           ShadowFrame* shadow_frame, JValue* result);
+
+// Portable entrypoints.
+extern "C" void art_portable_resolution_trampoline(mirror::AbstractMethod*);
+extern "C" void art_portable_to_interpreter_bridge(mirror::AbstractMethod*);
+
 // Alloc entrypoints.
-extern "C" void* art_quick_alloc_array_from_code(uint32_t, void*, int32_t);
-extern "C" void* art_quick_alloc_array_from_code_with_access_check(uint32_t, void*, int32_t);
-extern "C" void* art_quick_alloc_object_from_code(uint32_t type_idx, void* method);
-extern "C" void* art_quick_alloc_object_from_code_with_access_check(uint32_t type_idx, void* method);
-extern "C" void* art_quick_check_and_alloc_array_from_code(uint32_t, void*, int32_t);
-extern "C" void* art_quick_check_and_alloc_array_from_code_with_access_check(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_array(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_array_with_access_check(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_object(uint32_t type_idx, void* method);
+extern "C" void* art_quick_alloc_object_with_access_check(uint32_t type_idx, void* method);
+extern "C" void* art_quick_check_and_alloc_array(uint32_t, void*, int32_t);
+extern "C" void* art_quick_check_and_alloc_array_with_access_check(uint32_t, void*, int32_t);
 
 // Cast entrypoints.
 extern "C" uint32_t artIsAssignableFromCode(const mirror::Class* klass,
                                             const mirror::Class* ref_class);
-extern "C" void art_quick_can_put_array_element_from_code(void*, void*);
-extern "C" void art_quick_check_cast_from_code(void*, void*);
+extern "C" void art_quick_can_put_array_element(void*, void*);
+extern "C" void art_quick_check_cast(void*, void*);
 
 // DexCache entrypoints.
-extern "C" void* art_quick_initialize_static_storage_from_code(uint32_t, void*);
-extern "C" void* art_quick_initialize_type_from_code(uint32_t, void*);
-extern "C" void* art_quick_initialize_type_and_verify_access_from_code(uint32_t, void*);
-extern "C" void* art_quick_resolve_string_from_code(void*, uint32_t);
+extern "C" void* art_quick_initialize_static_storage(uint32_t, void*);
+extern "C" void* art_quick_initialize_type(uint32_t, void*);
+extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
+extern "C" void* art_quick_resolve_string(void*, uint32_t);
 
 // Exception entrypoints.
 extern "C" void* GetAndClearException(Thread*);
 
 // Field entrypoints.
-extern "C" int art_quick_set32_instance_from_code(uint32_t, void*, int32_t);
-extern "C" int art_quick_set32_static_from_code(uint32_t, int32_t);
-extern "C" int art_quick_set64_instance_from_code(uint32_t, void*, int64_t);
-extern "C" int art_quick_set64_static_from_code(uint32_t, int64_t);
-extern "C" int art_quick_set_obj_instance_from_code(uint32_t, void*, void*);
-extern "C" int art_quick_set_obj_static_from_code(uint32_t, void*);
-extern "C" int32_t art_quick_get32_instance_from_code(uint32_t, void*);
-extern "C" int32_t art_quick_get32_static_from_code(uint32_t);
-extern "C" int64_t art_quick_get64_instance_from_code(uint32_t, void*);
-extern "C" int64_t art_quick_get64_static_from_code(uint32_t);
-extern "C" void* art_quick_get_obj_instance_from_code(uint32_t, void*);
-extern "C" void* art_quick_get_obj_static_from_code(uint32_t);
+extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
+extern "C" int art_quick_set32_static(uint32_t, int32_t);
+extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
+extern "C" int art_quick_set64_static(uint32_t, int64_t);
+extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
+extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
+extern "C" int32_t art_quick_get32_static(uint32_t);
+extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
+extern "C" int64_t art_quick_get64_static(uint32_t);
+extern "C" void* art_quick_get_obj_instance(uint32_t, void*);
+extern "C" void* art_quick_get_obj_static(uint32_t);
 
 // FillArray entrypoint.
-extern "C" void art_quick_handle_fill_data_from_code(void*, void*);
+extern "C" void art_quick_handle_fill_data(void*, void*);
 
 // Lock entrypoints.
-extern "C" void art_quick_lock_object_from_code(void*);
-extern "C" void art_quick_unlock_object_from_code(void*);
+extern "C" void art_quick_lock_object(void*);
+extern "C" void art_quick_unlock_object(void*);
 
 // Math entrypoints.
 extern int32_t CmpgDouble(double a, double b);
@@ -95,26 +107,14 @@
 extern "C" uint64_t art_quick_shr_long(uint64_t, uint32_t);
 extern "C" uint64_t art_quick_ushr_long(uint64_t, uint32_t);
 
-// Interpreter entrypoints.
-extern "C" void artInterpreterToInterpreterEntry(Thread* self, MethodHelper& mh,
-                                                 const DexFile::CodeItem* code_item,
-                                                 ShadowFrame* shadow_frame, JValue* result);
-extern "C" void artInterpreterToQuickEntry(Thread* self, MethodHelper& mh,
-                                           const DexFile::CodeItem* code_item,
-                                           ShadowFrame* shadow_frame, JValue* result);
-
 // Intrinsic entrypoints.
 extern "C" int32_t __memcmp16(void*, void*, int32_t);
 extern "C" int32_t art_quick_indexof(void*, uint32_t, uint32_t, uint32_t);
 extern "C" int32_t art_quick_string_compareto(void*, void*);
 
 // Invoke entrypoints.
-extern "C" const void* artPortableResolutionTrampoline(mirror::AbstractMethod* called,
-                                                       mirror::Object* receiver,
-                                                       mirror::AbstractMethod** sp, Thread* thread);
-extern "C" const void* artQuickResolutionTrampoline(mirror::AbstractMethod* called,
-                                                    mirror::Object* receiver,
-                                                    mirror::AbstractMethod** sp, Thread* thread);
+extern "C" void art_quick_resolution_trampoline(mirror::AbstractMethod*);
+extern "C" void art_quick_to_interpreter_bridge(mirror::AbstractMethod*);
 extern "C" void art_quick_invoke_direct_trampoline_with_access_check(uint32_t, void*);
 extern "C" void art_quick_invoke_interface_trampoline(uint32_t, void*);
 extern "C" void art_quick_invoke_interface_trampoline_with_access_check(uint32_t, void*);
@@ -127,49 +127,61 @@
 extern "C" void art_quick_test_suspend();
 
 // Throw entrypoints.
-extern "C" void art_quick_deliver_exception_from_code(void*);
-extern "C" void art_quick_throw_array_bounds_from_code(int32_t index, int32_t limit);
-extern "C" void art_quick_throw_div_zero_from_code();
-extern "C" void art_quick_throw_no_such_method_from_code(int32_t method_idx);
-extern "C" void art_quick_throw_null_pointer_exception_from_code();
-extern "C" void art_quick_throw_stack_overflow_from_code(void*);
+extern "C" void art_quick_deliver_exception(void*);
+extern "C" void art_quick_throw_array_bounds(int32_t index, int32_t limit);
+extern "C" void art_quick_throw_div_zero();
+extern "C" void art_quick_throw_no_such_method(int32_t method_idx);
+extern "C" void art_quick_throw_null_pointer_exception();
+extern "C" void art_quick_throw_stack_overflow(void*);
 
-void InitEntryPoints(QuickEntryPoints* qpoints, PortableEntryPoints* ppoints) {
+void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
+                     PortableEntryPoints* ppoints, QuickEntryPoints* qpoints) {
+  // Interpreter
+  ipoints->pInterpreterToInterpreterBridge = artInterpreterToInterpreterBridge;
+  ipoints->pInterpreterToCompiledCodeBridge = artInterperterToCompiledCodeBridge;
+
+  // JNI
+  jpoints->pDlsymLookup = art_jni_dlsym_lookup_stub;
+
+  // Portable
+  ppoints->pPortableResolutionTrampoline = art_portable_resolution_trampoline;
+  ppoints->pPortableToInterpreterBridge = art_portable_to_interpreter_bridge;
+
   // Alloc
-  qpoints->pAllocArrayFromCode = art_quick_alloc_array_from_code;
-  qpoints->pAllocArrayFromCodeWithAccessCheck = art_quick_alloc_array_from_code_with_access_check;
-  qpoints->pAllocObjectFromCode = art_quick_alloc_object_from_code;
-  qpoints->pAllocObjectFromCodeWithAccessCheck = art_quick_alloc_object_from_code_with_access_check;
-  qpoints->pCheckAndAllocArrayFromCode = art_quick_check_and_alloc_array_from_code;
-  qpoints->pCheckAndAllocArrayFromCodeWithAccessCheck = art_quick_check_and_alloc_array_from_code_with_access_check;
+  qpoints->pAllocArray = art_quick_alloc_array;
+  qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check;
+  qpoints->pAllocObject = art_quick_alloc_object;
+  qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check;
+  qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array;
+  qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check;
 
   // Cast
-  qpoints->pInstanceofNonTrivialFromCode = artIsAssignableFromCode;
-  qpoints->pCanPutArrayElementFromCode = art_quick_can_put_array_element_from_code;
-  qpoints->pCheckCastFromCode = art_quick_check_cast_from_code;
+  qpoints->pInstanceofNonTrivial = artIsAssignableFromCode;
+  qpoints->pCanPutArrayElement = art_quick_can_put_array_element;
+  qpoints->pCheckCast = art_quick_check_cast;
 
   // DexCache
-  qpoints->pInitializeStaticStorage = art_quick_initialize_static_storage_from_code;
-  qpoints->pInitializeTypeAndVerifyAccessFromCode = art_quick_initialize_type_and_verify_access_from_code;
-  qpoints->pInitializeTypeFromCode = art_quick_initialize_type_from_code;
-  qpoints->pResolveStringFromCode = art_quick_resolve_string_from_code;
+  qpoints->pInitializeStaticStorage = art_quick_initialize_static_storage;
+  qpoints->pInitializeTypeAndVerifyAccess = art_quick_initialize_type_and_verify_access;
+  qpoints->pInitializeType = art_quick_initialize_type;
+  qpoints->pResolveString = art_quick_resolve_string;
 
   // Field
-  qpoints->pSet32Instance = art_quick_set32_instance_from_code;
-  qpoints->pSet32Static = art_quick_set32_static_from_code;
-  qpoints->pSet64Instance = art_quick_set64_instance_from_code;
-  qpoints->pSet64Static = art_quick_set64_static_from_code;
-  qpoints->pSetObjInstance = art_quick_set_obj_instance_from_code;
-  qpoints->pSetObjStatic = art_quick_set_obj_static_from_code;
-  qpoints->pGet32Instance = art_quick_get32_instance_from_code;
-  qpoints->pGet64Instance = art_quick_get64_instance_from_code;
-  qpoints->pGetObjInstance = art_quick_get_obj_instance_from_code;
-  qpoints->pGet32Static = art_quick_get32_static_from_code;
-  qpoints->pGet64Static = art_quick_get64_static_from_code;
-  qpoints->pGetObjStatic = art_quick_get_obj_static_from_code;
+  qpoints->pSet32Instance = art_quick_set32_instance;
+  qpoints->pSet32Static = art_quick_set32_static;
+  qpoints->pSet64Instance = art_quick_set64_instance;
+  qpoints->pSet64Static = art_quick_set64_static;
+  qpoints->pSetObjInstance = art_quick_set_obj_instance;
+  qpoints->pSetObjStatic = art_quick_set_obj_static;
+  qpoints->pGet32Instance = art_quick_get32_instance;
+  qpoints->pGet64Instance = art_quick_get64_instance;
+  qpoints->pGetObjInstance = art_quick_get_obj_instance;
+  qpoints->pGet32Static = art_quick_get32_static;
+  qpoints->pGet64Static = art_quick_get64_static;
+  qpoints->pGetObjStatic = art_quick_get_obj_static;
 
   // FillArray
-  qpoints->pHandleFillArrayDataFromCode = art_quick_handle_fill_data_from_code;
+  qpoints->pHandleFillArrayData = art_quick_handle_fill_data;
 
   // JNI
   qpoints->pJniMethodStart = JniMethodStart;
@@ -180,8 +192,8 @@
   qpoints->pJniMethodEndWithReferenceSynchronized = JniMethodEndWithReferenceSynchronized;
 
   // Locks
-  qpoints->pLockObjectFromCode = art_quick_lock_object_from_code;
-  qpoints->pUnlockObjectFromCode = art_quick_unlock_object_from_code;
+  qpoints->pLockObject = art_quick_lock_object;
+  qpoints->pUnlockObject = art_quick_unlock_object;
 
   // Math
   qpoints->pCmpgDouble = CmpgDouble;
@@ -204,10 +216,6 @@
   qpoints->pShrLong = art_quick_shr_long;
   qpoints->pUshrLong = art_quick_ushr_long;
 
-  // Interpreter
-  qpoints->pInterpreterToInterpreterEntry = artInterpreterToInterpreterEntry;
-  qpoints->pInterpreterToQuickEntry = artInterpreterToQuickEntry;
-
   // Intrinsics
   qpoints->pIndexOf = art_quick_indexof;
   qpoints->pMemcmp16 = __memcmp16;
@@ -215,7 +223,8 @@
   qpoints->pMemcpy = memcpy;
 
   // Invocation
-  qpoints->pQuickResolutionTrampolineFromCode = artQuickResolutionTrampoline;
+  qpoints->pQuickResolutionTrampoline = art_quick_resolution_trampoline;
+  qpoints->pQuickToInterpreterBridge = art_quick_to_interpreter_bridge;
   qpoints->pInvokeDirectTrampolineWithAccessCheck = art_quick_invoke_direct_trampoline_with_access_check;
   qpoints->pInvokeInterfaceTrampoline = art_quick_invoke_interface_trampoline;
   qpoints->pInvokeInterfaceTrampolineWithAccessCheck = art_quick_invoke_interface_trampoline_with_access_check;
@@ -224,19 +233,16 @@
   qpoints->pInvokeVirtualTrampolineWithAccessCheck = art_quick_invoke_virtual_trampoline_with_access_check;
 
   // Thread
-  qpoints->pCheckSuspendFromCode = CheckSuspendFromCode;
-  qpoints->pTestSuspendFromCode = art_quick_test_suspend;
+  qpoints->pCheckSuspend = CheckSuspendFromCode;
+  qpoints->pTestSuspend = art_quick_test_suspend;
 
   // Throws
-  qpoints->pDeliverException = art_quick_deliver_exception_from_code;
-  qpoints->pThrowArrayBoundsFromCode = art_quick_throw_array_bounds_from_code;
-  qpoints->pThrowDivZeroFromCode = art_quick_throw_div_zero_from_code;
-  qpoints->pThrowNoSuchMethodFromCode = art_quick_throw_no_such_method_from_code;
-  qpoints->pThrowNullPointerFromCode = art_quick_throw_null_pointer_exception_from_code;
-  qpoints->pThrowStackOverflowFromCode = art_quick_throw_stack_overflow_from_code;
-
-  // Portable
-  ppoints->pPortableResolutionTrampolineFromCode = artPortableResolutionTrampoline;
+  qpoints->pDeliverException = art_quick_deliver_exception;
+  qpoints->pThrowArrayBounds = art_quick_throw_array_bounds;
+  qpoints->pThrowDivZero = art_quick_throw_div_zero;
+  qpoints->pThrowNoSuchMethod = art_quick_throw_no_such_method;
+  qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
+  qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
 };
 
 }  // namespace art
diff --git a/runtime/arch/mips/jni_entrypoints_mips.S b/runtime/arch/mips/jni_entrypoints_mips.S
index fca6d77..ad7c021 100644
--- a/runtime/arch/mips/jni_entrypoints_mips.S
+++ b/runtime/arch/mips/jni_entrypoints_mips.S
@@ -59,7 +59,7 @@
      * Entry point of native methods when JNI bug compatibility is enabled.
      */
     .extern artWorkAroundAppJniBugs
-ENTRY art_quick_work_around_app_jni_bugs
+ENTRY art_work_around_app_jni_bugs
     GENERATE_GLOBAL_POINTER
     # save registers that may contain arguments and LR that will be crushed by a call
     addiu    $sp, $sp, -32
@@ -86,4 +86,4 @@
     jr       $t9              # tail call into JNI routine
     addiu    $sp, $sp, 32
     .cfi_adjust_cfa_offset -32
-END art_quick_work_around_app_jni_bugs
+END art_work_around_app_jni_bugs
diff --git a/runtime/arch/mips/portable_entrypoints_mips.S b/runtime/arch/mips/portable_entrypoints_mips.S
index e7a9b0f..9208a8a 100644
--- a/runtime/arch/mips/portable_entrypoints_mips.S
+++ b/runtime/arch/mips/portable_entrypoints_mips.S
@@ -61,13 +61,5 @@
     .cfi_adjust_cfa_offset -64
 END art_portable_proxy_invoke_handler
 
-    /*
-     * Portable abstract method error stub. $a0 contains method* on entry. SP unused in portable.
-     */
-    .extern artThrowAbstractMethodErrorFromCode
-ENTRY art_portable_abstract_method_error_stub
-    GENERATE_GLOBAL_POINTER
-    la       $t9, artThrowAbstractMethodErrorFromCode
-    jr       $t9            # (Method*, Thread*, SP)
-    move     $a1, $s1       # pass Thread::Current
-END art_portable_abstract_method_error_stub
+UNIMPLEMENTED art_portable_resolution_trampoline
+UNIMPLEMENTED art_portable_to_interpreter_bridge
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index d32a2b4..004fda6 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -143,7 +143,7 @@
     lw     $a1, 4($sp)            # restore non-callee save $a1
     lw     $a2, 8($sp)            # restore non-callee save $a2
     lw     $a3, 12($sp)           # restore non-callee save $a3
-    addiu  $sp, $sp, 64           # strip frame
+    addiu  $sp, $sp, 64           # pop frame
     .cfi_adjust_cfa_offset -64
 .endm
 
@@ -268,79 +268,79 @@
      * the bottom of the stack. artDeliverExceptionFromCode will place the callee save Method* at
      * the bottom of the thread. On entry r0 holds Throwable*
      */
-ENTRY art_quick_deliver_exception_from_code
+ENTRY art_quick_deliver_exception
     GENERATE_GLOBAL_POINTER
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
     move $a1, rSELF                 # pass Thread::Current
     la   $t9, artDeliverExceptionFromCode
     jr   $t9                        # artDeliverExceptionFromCode(Throwable*, Thread*, $sp)
     move $a2, $sp                   # pass $sp
-END art_quick_deliver_exception_from_code
+END art_quick_deliver_exception
 
     /*
      * Called by managed code to create and deliver a NullPointerException
      */
     .extern artThrowNullPointerExceptionFromCode
-ENTRY art_quick_throw_null_pointer_exception_from_code
+ENTRY art_quick_throw_null_pointer_exception
     GENERATE_GLOBAL_POINTER
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
     move $a0, rSELF                 # pass Thread::Current
     la   $t9, artThrowNullPointerExceptionFromCode
     jr   $t9                        # artThrowNullPointerExceptionFromCode(Thread*, $sp)
     move $a1, $sp                   # pass $sp
-END art_quick_throw_null_pointer_exception_from_code
+END art_quick_throw_null_pointer_exception
 
     /*
      * Called by managed code to create and deliver an ArithmeticException
      */
     .extern artThrowDivZeroFromCode
-ENTRY art_quick_throw_div_zero_from_code
+ENTRY art_quick_throw_div_zero
     GENERATE_GLOBAL_POINTER
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
     move $a0, rSELF                 # pass Thread::Current
     la   $t9, artThrowDivZeroFromCode
     jr   $t9                        # artThrowDivZeroFromCode(Thread*, $sp)
     move $a1, $sp                   # pass $sp
-END art_quick_throw_div_zero_from_code
+END art_quick_throw_div_zero
 
     /*
      * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException
      */
     .extern artThrowArrayBoundsFromCode
-ENTRY art_quick_throw_array_bounds_from_code
+ENTRY art_quick_throw_array_bounds
     GENERATE_GLOBAL_POINTER
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
     move $a2, rSELF                 # pass Thread::Current
     la   $t9, artThrowArrayBoundsFromCode
     jr   $t9                        # artThrowArrayBoundsFromCode(index, limit, Thread*, $sp)
     move $a3, $sp                   # pass $sp
-END art_quick_throw_array_bounds_from_code
+END art_quick_throw_array_bounds
 
     /*
      * Called by managed code to create and deliver a StackOverflowError.
      */
     .extern artThrowStackOverflowFromCode
-ENTRY art_quick_throw_stack_overflow_from_code
+ENTRY art_quick_throw_stack_overflow
     GENERATE_GLOBAL_POINTER
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
     move $a0, rSELF                 # pass Thread::Current
     la   $t9, artThrowStackOverflowFromCode
     jr   $t9                        # artThrowStackOverflowFromCode(Thread*, $sp)
     move $a1, $sp                   # pass $sp
-END art_quick_throw_stack_overflow_from_code
+END art_quick_throw_stack_overflow
 
     /*
      * Called by managed code to create and deliver a NoSuchMethodError.
      */
     .extern artThrowNoSuchMethodFromCode
-ENTRY art_quick_throw_no_such_method_from_code
+ENTRY art_quick_throw_no_such_method
     GENERATE_GLOBAL_POINTER
     SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
     move $a1, rSELF                 # pass Thread::Current
     la   $t9, artThrowNoSuchMethodFromCode
     jr   $t9                        # artThrowNoSuchMethodFromCode(method_idx, Thread*, $sp)
     move $a2, $sp                   # pass $sp
-END art_quick_throw_no_such_method_from_code
+END art_quick_throw_no_such_method
 
     /*
      * All generated callsites for interface invokes and invocation slow paths will load arguments
@@ -466,67 +466,67 @@
      * failure.
      */
     .extern artHandleFillArrayDataFromCode
-ENTRY art_quick_handle_fill_data_from_code
+ENTRY art_quick_handle_fill_data
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case exception allocation triggers GC
     move    $a2, rSELF                         # pass Thread::Current
     jal     artHandleFillArrayDataFromCode     # (Array*, const DexFile::Payload*, Thread*, $sp)
     move    $a3, $sp                           # pass $sp
     RETURN_IF_ZERO
-END art_quick_handle_fill_data_from_code
+END art_quick_handle_fill_data
 
     /*
      * Entry from managed code that calls artLockObjectFromCode, may block for GC.
      */
     .extern artLockObjectFromCode
-ENTRY art_quick_lock_object_from_code
+ENTRY art_quick_lock_object
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME      # save callee saves in case we block
     move    $a1, rSELF                    # pass Thread::Current
     jal     artLockObjectFromCode         # (Object* obj, Thread*, $sp)
     move    $a2, $sp                      # pass $sp
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
-END art_quick_lock_object_from_code
+END art_quick_lock_object
 
     /*
      * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
      */
     .extern artUnlockObjectFromCode
-ENTRY art_quick_unlock_object_from_code
+ENTRY art_quick_unlock_object
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case exception allocation triggers GC
     move    $a1, rSELF                # pass Thread::Current
     jal     artUnlockObjectFromCode   # (Object* obj, Thread*, $sp)
     move    $a2, $sp                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_unlock_object_from_code
+END art_quick_unlock_object
 
     /*
      * Entry from managed code that calls artCheckCastFromCode and delivers exception on failure.
      */
     .extern artCheckCastFromCode
-ENTRY art_quick_check_cast_from_code
+ENTRY art_quick_check_cast
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case exception allocation triggers GC
     move    $a2, rSELF                # pass Thread::Current
     jal     artCheckCastFromCode      # (Class* a, Class* b, Thread*, $sp)
     move    $a3, $sp                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_check_cast_from_code
+END art_quick_check_cast
 
     /*
      * Entry from managed code that calls artCanPutArrayElementFromCode and delivers exception on
      * failure.
      */
     .extern artCanPutArrayElementFromCode
-ENTRY art_quick_can_put_array_element_from_code
+ENTRY art_quick_can_put_array_element
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME    # save callee saves in case exception allocation triggers GC
     move    $a2, rSELF                     # pass Thread::Current
     jal     artCanPutArrayElementFromCode  # (Object* element, Class* array_class, Thread*, $sp)
     move    $a3, $sp                       # pass $sp
     RETURN_IF_ZERO
-END art_quick_can_put_array_element_from_code
+END art_quick_can_put_array_element
 
     /*
      * Entry from managed code when uninitialized static storage, this stub will run the class
@@ -534,7 +534,7 @@
      * returned.
      */
     .extern artInitializeStaticStorageFromCode
-ENTRY art_quick_initialize_static_storage_from_code
+ENTRY art_quick_initialize_static_storage
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME            # save callee saves in case of GC
     move    $a2, rSELF                          # pass Thread::Current
@@ -542,13 +542,13 @@
     jal     artInitializeStaticStorageFromCode
     move    $a3, $sp                            # pass $sp
     RETURN_IF_NONZERO
-END art_quick_initialize_static_storage_from_code
+END art_quick_initialize_static_storage
 
     /*
      * Entry from managed code when dex cache misses for a type_idx.
      */
     .extern artInitializeTypeFromCode
-ENTRY art_quick_initialize_type_from_code
+ENTRY art_quick_initialize_type
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           # save callee saves in case of GC
     move    $a2, rSELF                         # pass Thread::Current
@@ -556,14 +556,14 @@
     jal     artInitializeTypeFromCode
     move    $a3, $sp                           # pass $sp
     RETURN_IF_NONZERO
-END art_quick_initialize_type_from_code
+END art_quick_initialize_type
 
     /*
      * Entry from managed code when type_idx needs to be checked for access and dex cache may also
      * miss.
      */
     .extern artInitializeTypeAndVerifyAccessFromCode
-ENTRY art_quick_initialize_type_and_verify_access_from_code
+ENTRY art_quick_initialize_type_and_verify_access
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME           # save callee saves in case of GC
     move    $a2, rSELF                         # pass Thread::Current
@@ -571,13 +571,13 @@
     jal     artInitializeTypeAndVerifyAccessFromCode
     move    $a3, $sp                           # pass $sp
     RETURN_IF_NONZERO
-END art_quick_initialize_type_and_verify_access_from_code
+END art_quick_initialize_type_and_verify_access
 
     /*
      * Called by managed code to resolve a static field and load a 32-bit primitive value.
      */
     .extern artGet32StaticFromCode
-ENTRY art_quick_get32_static_from_code
+ENTRY art_quick_get32_static
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a1, 64($sp)                  # pass referrer's Method*
@@ -585,13 +585,13 @@
     jal    artGet32StaticFromCode        # (uint32_t field_idx, const Method* referrer, Thread*, $sp)
     move   $a3, $sp                      # pass $sp
     RETURN_IF_NO_EXCEPTION
-END art_quick_get32_static_from_code
+END art_quick_get32_static
 
     /*
      * Called by managed code to resolve a static field and load a 64-bit primitive value.
      */
     .extern artGet64StaticFromCode
-ENTRY art_quick_get64_static_from_code
+ENTRY art_quick_get64_static
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a1, 64($sp)                  # pass referrer's Method*
@@ -599,13 +599,13 @@
     jal    artGet64StaticFromCode        # (uint32_t field_idx, const Method* referrer, Thread*, $sp)
     move   $a3, $sp                      # pass $sp
     RETURN_IF_NO_EXCEPTION
-END art_quick_get64_static_from_code
+END art_quick_get64_static
 
     /*
      * Called by managed code to resolve a static field and load an object reference.
      */
     .extern artGetObjStaticFromCode
-ENTRY art_quick_get_obj_static_from_code
+ENTRY art_quick_get_obj_static
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a1, 64($sp)                  # pass referrer's Method*
@@ -613,13 +613,13 @@
     jal    artGetObjStaticFromCode       # (uint32_t field_idx, const Method* referrer, Thread*, $sp)
     move   $a3, $sp                      # pass $sp
     RETURN_IF_NO_EXCEPTION
-END art_quick_get_obj_static_from_code
+END art_quick_get_obj_static
 
     /*
      * Called by managed code to resolve an instance field and load a 32-bit primitive value.
      */
     .extern artGet32InstanceFromCode
-ENTRY art_quick_get32_instance_from_code
+ENTRY art_quick_get32_instance
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a2, 64($sp)                  # pass referrer's Method*
@@ -627,13 +627,13 @@
     jal    artGet32InstanceFromCode      # (field_idx, Object*, referrer, Thread*, $sp)
     sw     $sp, 16($sp)                  # pass $sp
     RETURN_IF_NO_EXCEPTION
-END art_quick_get32_instance_from_code
+END art_quick_get32_instance
 
     /*
      * Called by managed code to resolve an instance field and load a 64-bit primitive value.
      */
     .extern artGet64InstanceFromCode
-ENTRY art_quick_get64_instance_from_code
+ENTRY art_quick_get64_instance
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a2, 64($sp)                  # pass referrer's Method*
@@ -641,13 +641,13 @@
     jal    artGet64InstanceFromCode      # (field_idx, Object*, referrer, Thread*, $sp)
     sw     $sp, 16($sp)                  # pass $sp
     RETURN_IF_NO_EXCEPTION
-END art_quick_get64_instance_from_code
+END art_quick_get64_instance
 
     /*
      * Called by managed code to resolve an instance field and load an object reference.
      */
     .extern artGetObjInstanceFromCode
-ENTRY art_quick_get_obj_instance_from_code
+ENTRY art_quick_get_obj_instance
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a2, 64($sp)                  # pass referrer's Method*
@@ -655,13 +655,13 @@
     jal    artGetObjInstanceFromCode     # (field_idx, Object*, referrer, Thread*, $sp)
     sw     $sp, 16($sp)                  # pass $sp
     RETURN_IF_NO_EXCEPTION
-END art_quick_get_obj_instance_from_code
+END art_quick_get_obj_instance
 
     /*
      * Called by managed code to resolve a static field and store a 32-bit primitive value.
      */
     .extern artSet32StaticFromCode
-ENTRY art_quick_set32_static_from_code
+ENTRY art_quick_set32_static
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a2, 64($sp)                  # pass referrer's Method*
@@ -669,13 +669,13 @@
     jal    artSet32StaticFromCode        # (field_idx, new_val, referrer, Thread*, $sp)
     sw     $sp, 16($sp)                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_set32_static_from_code
+END art_quick_set32_static
 
     /*
      * Called by managed code to resolve a static field and store a 64-bit primitive value.
      */
     .extern artSet32StaticFromCode
-ENTRY art_quick_set64_static_from_code
+ENTRY art_quick_set64_static
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a1, 64($sp)                  # pass referrer's Method*
@@ -683,13 +683,13 @@
     jal    artSet64StaticFromCode        # (field_idx, referrer, new_val, Thread*, $sp)
     sw     $sp, 20($sp)                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_set64_static_from_code
+END art_quick_set64_static
 
     /*
      * Called by managed code to resolve a static field and store an object reference.
      */
     .extern artSetObjStaticFromCode
-ENTRY art_quick_set_obj_static_from_code
+ENTRY art_quick_set_obj_static
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a2, 64($sp)                  # pass referrer's Method*
@@ -697,13 +697,13 @@
     jal    artSetObjStaticFromCode       # (field_idx, new_val, referrer, Thread*, $sp)
     sw     $sp, 16($sp)                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_set_obj_static_from_code
+END art_quick_set_obj_static
 
     /*
      * Called by managed code to resolve an instance field and store a 32-bit primitive value.
      */
     .extern artSet32InstanceFromCode
-ENTRY art_quick_set32_instance_from_code
+ENTRY art_quick_set32_instance
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a3, 64($sp)                  # pass referrer's Method*
@@ -711,26 +711,26 @@
     jal    artSet32InstanceFromCode      # (field_idx, Object*, new_val, referrer, Thread*, $sp)
     sw     $sp, 20($sp)                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_set32_instance_from_code
+END art_quick_set32_instance
 
     /*
      * Called by managed code to resolve an instance field and store a 64-bit primitive value.
      */
     .extern artSet32InstanceFromCode
-ENTRY art_quick_set64_instance_from_code
+ENTRY art_quick_set64_instance
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     sw     rSELF, 16($sp)                # pass Thread::Current
     jal    artSet64InstanceFromCode      # (field_idx, Object*, new_val, Thread*, $sp)
     sw     $sp, 20($sp)                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_set64_instance_from_code
+END art_quick_set64_instance
 
     /*
      * Called by managed code to resolve an instance field and store an object reference.
      */
     .extern artSetObjInstanceFromCode
-ENTRY art_quick_set_obj_instance_from_code
+ENTRY art_quick_set_obj_instance
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME     # save callee saves in case of GC
     lw     $a3, 64($sp)                  # pass referrer's Method*
@@ -738,7 +738,7 @@
     jal    artSetObjInstanceFromCode     # (field_idx, Object*, new_val, referrer, Thread*, $sp)
     sw     $sp, 20($sp)                  # pass $sp
     RETURN_IF_ZERO
-END art_quick_set_obj_instance_from_code
+END art_quick_set_obj_instance
 
     /*
      * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
@@ -747,7 +747,7 @@
      * performed.
      */
     .extern artResolveStringFromCode
-ENTRY art_quick_resolve_string_from_code
+ENTRY art_quick_resolve_string
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case of GC
     move    $a2, rSELF                # pass Thread::Current
@@ -755,40 +755,40 @@
     jal     artResolveStringFromCode
     move    $a3, $sp                  # pass $sp
     RETURN_IF_NONZERO
-END art_quick_resolve_string_from_code
+END art_quick_resolve_string
 
     /*
      * Called by managed code to allocate an object.
      */
     .extern artAllocObjectFromCode
-ENTRY art_quick_alloc_object_from_code
+ENTRY art_quick_alloc_object
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case of GC
     move    $a2, rSELF                # pass Thread::Current
     jal     artAllocObjectFromCode    # (uint32_t type_idx, Method* method, Thread*, $sp)
     move    $a3, $sp                  # pass $sp
     RETURN_IF_NONZERO
-END art_quick_alloc_object_from_code
+END art_quick_alloc_object
 
     /*
      * Called by managed code to allocate an object when the caller doesn't know whether it has
      * access to the created type.
      */
     .extern artAllocObjectFromCodeWithAccessCheck
-ENTRY art_quick_alloc_object_from_code_with_access_check
+ENTRY art_quick_alloc_object_with_access_check
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case of GC
     move    $a2, rSELF                # pass Thread::Current
     jal     artAllocObjectFromCodeWithAccessCheck  # (uint32_t type_idx, Method* method, Thread*, $sp)
     move    $a3, $sp                  # pass $sp
     RETURN_IF_NONZERO
-END art_quick_alloc_object_from_code_with_access_check
+END art_quick_alloc_object_with_access_check
 
     /*
      * Called by managed code to allocate an array.
      */
     .extern artAllocArrayFromCode
-ENTRY art_quick_alloc_array_from_code
+ENTRY art_quick_alloc_array
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case of GC
     move    $a3, rSELF                # pass Thread::Current
@@ -796,14 +796,14 @@
     jal     artAllocArrayFromCode
     sw      $sp, 16($sp)              # pass $sp
     RETURN_IF_NONZERO
-END art_quick_alloc_array_from_code
+END art_quick_alloc_array
 
     /*
      * Called by managed code to allocate an array when the caller doesn't know whether it has
      * access to the created type.
      */
     .extern artAllocArrayFromCodeWithAccessCheck
-ENTRY art_quick_alloc_array_from_code_with_access_check
+ENTRY art_quick_alloc_array_with_access_check
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case of GC
     move    $a3, rSELF                # pass Thread::Current
@@ -811,13 +811,13 @@
     jal     artAllocArrayFromCodeWithAccessCheck
     sw      $sp, 16($sp)              # pass $sp
     RETURN_IF_NONZERO
-END art_quick_alloc_array_from_code_with_access_check
+END art_quick_alloc_array_with_access_check
 
     /*
      * Called by managed code to allocate an array in a special case for FILLED_NEW_ARRAY.
      */
     .extern artCheckAndAllocArrayFromCode
-ENTRY art_quick_check_and_alloc_array_from_code
+ENTRY art_quick_check_and_alloc_array
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case of GC
     move    $a3, rSELF                # pass Thread::Current
@@ -825,13 +825,13 @@
     jal     artCheckAndAllocArrayFromCode
     sw      $sp, 16($sp)              # pass $sp
     RETURN_IF_NONZERO
-END art_quick_check_and_alloc_array_from_code
+END art_quick_check_and_alloc_array
 
     /*
      * Called by managed code to allocate an array in a special case for FILLED_NEW_ARRAY.
      */
     .extern artCheckAndAllocArrayFromCodeWithAccessCheck
-ENTRY art_quick_check_and_alloc_array_from_code_with_access_check
+ENTRY art_quick_check_and_alloc_array_with_access_check
     GENERATE_GLOBAL_POINTER
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  # save callee saves in case of GC
     move    $a3, rSELF                # pass Thread::Current
@@ -839,7 +839,7 @@
     jal     artCheckAndAllocArrayFromCodeWithAccessCheck
     sw      $sp, 16($sp)              # pass $sp
     RETURN_IF_NONZERO
-END art_quick_check_and_alloc_array_from_code_with_access_check
+END art_quick_check_and_alloc_array_with_access_check
 
     /*
      * Called by managed code when the value in rSUSPEND has been decremented to 0.
@@ -884,13 +884,33 @@
     DELIVER_PENDING_EXCEPTION
 END art_quick_proxy_invoke_handler
 
-    .extern artInterpreterEntry
-ENTRY art_quick_interpreter_entry
+    .extern artQuickResolutionTrampoline
+ENTRY art_quick_resolution_trampoline
     GENERATE_GLOBAL_POINTER
     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
-    sw      $a0, 0($sp)            # place proxy method at bottom of frame
+    move    $a2, rSELF             # pass Thread::Current
+    jal     artQuickProxyInvokeHandler  # (Method* called, receiver, Thread*, SP)
+    move    $a3, $sp               # pass $sp
+    lw      $gp, 52($sp)           # restore $gp
+    lw      $ra, 60($sp)           # restore $ra
+    beqz    $v0, 1f
+    lw      $a0, 0($sp)            # load resolved method to $a0
+    lw      $a1, 4($sp)            # restore non-callee save $a1
+    lw      $a2, 8($sp)            # restore non-callee save $a2
+    lw      $a3, 12($sp)           # restore non-callee save $a3
+    jr      $v0                    # tail call to method
+1:
+    addiu   $sp, $sp, 64           # pop frame
+    .cfi_adjust_cfa_offset -64
+    DELIVER_PENDING_EXCEPTION
+END art_quick_resolution_trampoline
+
+    .extern artQuickToInterpreterBridge
+ENTRY art_quick_to_interpreter_bridge
+    GENERATE_GLOBAL_POINTER
+    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
     move    $a1, rSELF             # pass Thread::Current
-    jal     artInterpreterEntry    # (Method* method, Thread*, SP)
+    jal     artQuickToInterpreterBridge    # (Method* method, Thread*, SP)
     move    $a2, $sp               # pass $sp
     lw      $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_
     lw      $gp, 52($sp)           # restore $gp
@@ -902,14 +922,14 @@
     nop
 1:
     DELIVER_PENDING_EXCEPTION
-END art_quick_interpreter_entry
+END art_quick_to_interpreter_bridge
 
     /*
      * Routine that intercepts method calls and returns.
      */
     .extern artInstrumentationMethodEntryFromCode
     .extern artInstrumentationMethodExitFromCode
-ENTRY art_quick_instrumentation_entry_from_code
+ENTRY art_quick_instrumentation_entry
     GENERATE_GLOBAL_POINTER
     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
     move     $t0, $sp       # remember bottom of caller's frame
@@ -927,10 +947,10 @@
     RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
     jalr     $t9            # call method
     nop
-END art_quick_instrumentation_entry_from_code
+END art_quick_instrumentation_entry
     /* intentional fallthrough */
-    .global art_quick_instrumentation_exit_from_code
-art_quick_instrumentation_exit_from_code:
+    .global art_quick_instrumentation_exit
+art_quick_instrumentation_exit:
     .cfi_startproc
     addiu    $t9, $ra, 4    # put current address into $t9 to rebuild $gp
     GENERATE_GLOBAL_POINTER
@@ -960,7 +980,7 @@
     jr       $t0            # return
     addiu    $sp, $sp, 112  # 48 bytes of args + 64 bytes of callee save frame
     .cfi_adjust_cfa_offset -112
-END art_quick_instrumentation_exit_from_code
+END art_quick_instrumentation_exit
 
     /*
      * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
@@ -978,18 +998,6 @@
 END art_quick_deoptimize
 
     /*
-     * Quick abstract method error stub. $a0 contains method* on entry.
-     */
-ENTRY art_quick_abstract_method_error_stub
-    GENERATE_GLOBAL_POINTER
-    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
-    move     $a1, $s1       # pass Thread::Current
-    la       $t9, artThrowAbstractMethodErrorFromCode
-    jr       $t9            # (Method*, Thread*, SP)
-    move     $a2, $sp       # pass SP
-END art_quick_abstract_method_error_stub
-
-    /*
      * Long integer shift.  This is different from the generic 32/64-bit
      * binary operations because vAA/vBB are 64-bit but vCC (the shift
      * distance) is 32-bit.  Also, Dalvik requires us to ignore all but the low
diff --git a/runtime/arch/x86/asm_support_x86.S b/runtime/arch/x86/asm_support_x86.S
index 7e6dce9..7a3fdfa 100644
--- a/runtime/arch/x86/asm_support_x86.S
+++ b/runtime/arch/x86/asm_support_x86.S
@@ -88,4 +88,16 @@
   .cfi_restore REG_VAR(reg,0)
 END_MACRO
 
+MACRO1(UNIMPLEMENTED,name)
+    .type VAR(name, 0), @function
+    .globl VAR(name, 0)
+    ALIGN_FUNCTION_ENTRY
+VAR(name, 0):
+    .cfi_startproc
+    int3
+    int3
+    .cfi_endproc
+    .size \name, .-\name
+END_MACRO
+
 #endif  // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_
diff --git a/runtime/arch/x86/entrypoints_init_x86.cc b/runtime/arch/x86/entrypoints_init_x86.cc
index d47dfef..9152674 100644
--- a/runtime/arch/x86/entrypoints_init_x86.cc
+++ b/runtime/arch/x86/entrypoints_init_x86.cc
@@ -20,70 +20,74 @@
 
 namespace art {
 
-// Alloc entrypoints.
-extern "C" void* art_quick_alloc_array_from_code(uint32_t, void*, int32_t);
-extern "C" void* art_quick_alloc_array_from_code_with_access_check(uint32_t, void*, int32_t);
-extern "C" void* art_quick_alloc_object_from_code(uint32_t type_idx, void* method);
-extern "C" void* art_quick_alloc_object_from_code_with_access_check(uint32_t type_idx, void* method);
-extern "C" void* art_quick_check_and_alloc_array_from_code(uint32_t, void*, int32_t);
-extern "C" void* art_quick_check_and_alloc_array_from_code_with_access_check(uint32_t, void*, int32_t);
-
-// Cast entrypoints.
-extern "C" uint32_t art_quick_is_assignable_from_code(const mirror::Class* klass,
-                                                const mirror::Class* ref_class);
-extern "C" void art_quick_can_put_array_element_from_code(void*, void*);
-extern "C" void art_quick_check_cast_from_code(void*, void*);
-
-// DexCache entrypoints.
-extern "C" void* art_quick_initialize_static_storage_from_code(uint32_t, void*);
-extern "C" void* art_quick_initialize_type_from_code(uint32_t, void*);
-extern "C" void* art_quick_initialize_type_and_verify_access_from_code(uint32_t, void*);
-extern "C" void* art_quick_resolve_string_from_code(void*, uint32_t);
-
-// Field entrypoints.
-extern "C" int art_quick_set32_instance_from_code(uint32_t, void*, int32_t);
-extern "C" int art_quick_set32_static_from_code(uint32_t, int32_t);
-extern "C" int art_quick_set64_instance_from_code(uint32_t, void*, int64_t);
-extern "C" int art_quick_set64_static_from_code(uint32_t, int64_t);
-extern "C" int art_quick_set_obj_instance_from_code(uint32_t, void*, void*);
-extern "C" int art_quick_set_obj_static_from_code(uint32_t, void*);
-extern "C" int32_t art_quick_get32_instance_from_code(uint32_t, void*);
-extern "C" int32_t art_quick_get32_static_from_code(uint32_t);
-extern "C" int64_t art_quick_get64_instance_from_code(uint32_t, void*);
-extern "C" int64_t art_quick_get64_static_from_code(uint32_t);
-extern "C" void* art_quick_get_obj_instance_from_code(uint32_t, void*);
-extern "C" void* art_quick_get_obj_static_from_code(uint32_t);
-
-// FillArray entrypoint.
-extern "C" void art_quick_handle_fill_data_from_code(void*, void*);
-
-// Lock entrypoints.
-extern "C" void art_quick_lock_object_from_code(void*);
-extern "C" void art_quick_unlock_object_from_code(void*);
-
-// Math entrypoints.
-extern "C" double art_quick_fmod_from_code(double, double);
-extern "C" float art_quick_fmodf_from_code(float, float);
-extern "C" double art_quick_l2d_from_code(int64_t);
-extern "C" float art_quick_l2f_from_code(int64_t);
-extern "C" int64_t art_quick_d2l_from_code(double);
-extern "C" int64_t art_quick_f2l_from_code(float);
-extern "C" int32_t art_quick_idivmod_from_code(int32_t, int32_t);
-extern "C" int64_t art_quick_ldiv_from_code(int64_t, int64_t);
-extern "C" int64_t art_quick_ldivmod_from_code(int64_t, int64_t);
-extern "C" int64_t art_quick_lmul_from_code(int64_t, int64_t);
-extern "C" uint64_t art_quick_lshl_from_code(uint64_t, uint32_t);
-extern "C" uint64_t art_quick_lshr_from_code(uint64_t, uint32_t);
-extern "C" uint64_t art_quick_lushr_from_code(uint64_t, uint32_t);
-
 // Interpreter entrypoints.
-extern "C" void artInterpreterToInterpreterEntry(Thread* self, MethodHelper& mh,
-                                                 const DexFile::CodeItem* code_item,
-                                                 ShadowFrame* shadow_frame, JValue* result);
-extern "C" void artInterpreterToQuickEntry(Thread* self, MethodHelper& mh,
+extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh,
+                                                  const DexFile::CodeItem* code_item,
+                                                  ShadowFrame* shadow_frame, JValue* result);
+extern "C" void artInterperterToCompiledCodeBridge(Thread* self, MethodHelper& mh,
                                            const DexFile::CodeItem* code_item,
                                            ShadowFrame* shadow_frame, JValue* result);
 
+// Portable entrypoints.
+extern "C" void art_portable_resolution_trampoline(mirror::AbstractMethod*);
+extern "C" void art_portable_to_interpreter_bridge(mirror::AbstractMethod*);
+
+// Alloc entrypoints.
+extern "C" void* art_quick_alloc_array(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_array_with_access_check(uint32_t, void*, int32_t);
+extern "C" void* art_quick_alloc_object(uint32_t type_idx, void* method);
+extern "C" void* art_quick_alloc_object_with_access_check(uint32_t type_idx, void* method);
+extern "C" void* art_quick_check_and_alloc_array(uint32_t, void*, int32_t);
+extern "C" void* art_quick_check_and_alloc_array_with_access_check(uint32_t, void*, int32_t);
+
+// Cast entrypoints.
+extern "C" uint32_t art_quick_is_assignable(const mirror::Class* klass,
+                                                const mirror::Class* ref_class);
+extern "C" void art_quick_can_put_array_element(void*, void*);
+extern "C" void art_quick_check_cast(void*, void*);
+
+// DexCache entrypoints.
+extern "C" void* art_quick_initialize_static_storage(uint32_t, void*);
+extern "C" void* art_quick_initialize_type(uint32_t, void*);
+extern "C" void* art_quick_initialize_type_and_verify_access(uint32_t, void*);
+extern "C" void* art_quick_resolve_string(void*, uint32_t);
+
+// Field entrypoints.
+extern "C" int art_quick_set32_instance(uint32_t, void*, int32_t);
+extern "C" int art_quick_set32_static(uint32_t, int32_t);
+extern "C" int art_quick_set64_instance(uint32_t, void*, int64_t);
+extern "C" int art_quick_set64_static(uint32_t, int64_t);
+extern "C" int art_quick_set_obj_instance(uint32_t, void*, void*);
+extern "C" int art_quick_set_obj_static(uint32_t, void*);
+extern "C" int32_t art_quick_get32_instance(uint32_t, void*);
+extern "C" int32_t art_quick_get32_static(uint32_t);
+extern "C" int64_t art_quick_get64_instance(uint32_t, void*);
+extern "C" int64_t art_quick_get64_static(uint32_t);
+extern "C" void* art_quick_get_obj_instance(uint32_t, void*);
+extern "C" void* art_quick_get_obj_static(uint32_t);
+
+// FillArray entrypoint.
+extern "C" void art_quick_handle_fill_data(void*, void*);
+
+// Lock entrypoints.
+extern "C" void art_quick_lock_object(void*);
+extern "C" void art_quick_unlock_object(void*);
+
+// Math entrypoints.
+extern "C" double art_quick_fmod(double, double);
+extern "C" float art_quick_fmodf(float, float);
+extern "C" double art_quick_l2d(int64_t);
+extern "C" float art_quick_l2f(int64_t);
+extern "C" int64_t art_quick_d2l(double);
+extern "C" int64_t art_quick_f2l(float);
+extern "C" int32_t art_quick_idivmod(int32_t, int32_t);
+extern "C" int64_t art_quick_ldiv(int64_t, int64_t);
+extern "C" int64_t art_quick_ldivmod(int64_t, int64_t);
+extern "C" int64_t art_quick_lmul(int64_t, int64_t);
+extern "C" uint64_t art_quick_lshl(uint64_t, uint32_t);
+extern "C" uint64_t art_quick_lshr(uint64_t, uint32_t);
+extern "C" uint64_t art_quick_lushr(uint64_t, uint32_t);
+
 // Intrinsic entrypoints.
 extern "C" int32_t art_quick_memcmp16(void*, void*, int32_t);
 extern "C" int32_t art_quick_indexof(void*, uint32_t, uint32_t, uint32_t);
@@ -91,12 +95,8 @@
 extern "C" void* art_quick_memcpy(void*, const void*, size_t);
 
 // Invoke entrypoints.
-extern "C" const void* artPortableResolutionTrampoline(mirror::AbstractMethod* called,
-                                                       mirror::Object* receiver,
-                                                       mirror::AbstractMethod** sp, Thread* thread);
-extern "C" const void* artQuickResolutionTrampoline(mirror::AbstractMethod* called,
-                                                    mirror::Object* receiver,
-                                                    mirror::AbstractMethod** sp, Thread* thread);
+extern "C" void art_quick_resolution_trampoline(mirror::AbstractMethod*);
+extern "C" void art_quick_to_interpreter_bridge(mirror::AbstractMethod*);
 extern "C" void art_quick_invoke_direct_trampoline_with_access_check(uint32_t, void*);
 extern "C" void art_quick_invoke_interface_trampoline(uint32_t, void*);
 extern "C" void art_quick_invoke_interface_trampoline_with_access_check(uint32_t, void*);
@@ -109,49 +109,61 @@
 extern "C" void art_quick_test_suspend();
 
 // Throw entrypoints.
-extern "C" void art_quick_deliver_exception_from_code(void*);
-extern "C" void art_quick_throw_array_bounds_from_code(int32_t index, int32_t limit);
-extern "C" void art_quick_throw_div_zero_from_code();
-extern "C" void art_quick_throw_no_such_method_from_code(int32_t method_idx);
-extern "C" void art_quick_throw_null_pointer_exception_from_code();
-extern "C" void art_quick_throw_stack_overflow_from_code(void*);
+extern "C" void art_quick_deliver_exception(void*);
+extern "C" void art_quick_throw_array_bounds(int32_t index, int32_t limit);
+extern "C" void art_quick_throw_div_zero();
+extern "C" void art_quick_throw_no_such_method(int32_t method_idx);
+extern "C" void art_quick_throw_null_pointer_exception();
+extern "C" void art_quick_throw_stack_overflow(void*);
 
-void InitEntryPoints(QuickEntryPoints* qpoints, PortableEntryPoints* ppoints) {
+void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
+                     PortableEntryPoints* ppoints, QuickEntryPoints* qpoints) {
+  // Interpreter
+  ipoints->pInterpreterToInterpreterBridge = artInterpreterToInterpreterBridge;
+  ipoints->pInterpreterToCompiledCodeBridge = artInterperterToCompiledCodeBridge;
+
+  // JNI
+  jpoints->pDlsymLookup = art_jni_dlsym_lookup_stub;
+
+  // Portable
+  ppoints->pPortableResolutionTrampoline = art_portable_resolution_trampoline;
+  ppoints->pPortableToInterpreterBridge = art_portable_to_interpreter_bridge;
+
   // Alloc
-  qpoints->pAllocArrayFromCode = art_quick_alloc_array_from_code;
-  qpoints->pAllocArrayFromCodeWithAccessCheck = art_quick_alloc_array_from_code_with_access_check;
-  qpoints->pAllocObjectFromCode = art_quick_alloc_object_from_code;
-  qpoints->pAllocObjectFromCodeWithAccessCheck = art_quick_alloc_object_from_code_with_access_check;
-  qpoints->pCheckAndAllocArrayFromCode = art_quick_check_and_alloc_array_from_code;
-  qpoints->pCheckAndAllocArrayFromCodeWithAccessCheck = art_quick_check_and_alloc_array_from_code_with_access_check;
+  qpoints->pAllocArray = art_quick_alloc_array;
+  qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check;
+  qpoints->pAllocObject = art_quick_alloc_object;
+  qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check;
+  qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array;
+  qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check;
 
   // Cast
-  qpoints->pInstanceofNonTrivialFromCode = art_quick_is_assignable_from_code;
-  qpoints->pCanPutArrayElementFromCode = art_quick_can_put_array_element_from_code;
-  qpoints->pCheckCastFromCode = art_quick_check_cast_from_code;
+  qpoints->pInstanceofNonTrivial = art_quick_is_assignable;
+  qpoints->pCanPutArrayElement = art_quick_can_put_array_element;
+  qpoints->pCheckCast = art_quick_check_cast;
 
   // DexCache
-  qpoints->pInitializeStaticStorage = art_quick_initialize_static_storage_from_code;
-  qpoints->pInitializeTypeAndVerifyAccessFromCode = art_quick_initialize_type_and_verify_access_from_code;
-  qpoints->pInitializeTypeFromCode = art_quick_initialize_type_from_code;
-  qpoints->pResolveStringFromCode = art_quick_resolve_string_from_code;
+  qpoints->pInitializeStaticStorage = art_quick_initialize_static_storage;
+  qpoints->pInitializeTypeAndVerifyAccess = art_quick_initialize_type_and_verify_access;
+  qpoints->pInitializeType = art_quick_initialize_type;
+  qpoints->pResolveString = art_quick_resolve_string;
 
   // Field
-  qpoints->pSet32Instance = art_quick_set32_instance_from_code;
-  qpoints->pSet32Static = art_quick_set32_static_from_code;
-  qpoints->pSet64Instance = art_quick_set64_instance_from_code;
-  qpoints->pSet64Static = art_quick_set64_static_from_code;
-  qpoints->pSetObjInstance = art_quick_set_obj_instance_from_code;
-  qpoints->pSetObjStatic = art_quick_set_obj_static_from_code;
-  qpoints->pGet32Instance = art_quick_get32_instance_from_code;
-  qpoints->pGet64Instance = art_quick_get64_instance_from_code;
-  qpoints->pGetObjInstance = art_quick_get_obj_instance_from_code;
-  qpoints->pGet32Static = art_quick_get32_static_from_code;
-  qpoints->pGet64Static = art_quick_get64_static_from_code;
-  qpoints->pGetObjStatic = art_quick_get_obj_static_from_code;
+  qpoints->pSet32Instance = art_quick_set32_instance;
+  qpoints->pSet32Static = art_quick_set32_static;
+  qpoints->pSet64Instance = art_quick_set64_instance;
+  qpoints->pSet64Static = art_quick_set64_static;
+  qpoints->pSetObjInstance = art_quick_set_obj_instance;
+  qpoints->pSetObjStatic = art_quick_set_obj_static;
+  qpoints->pGet32Instance = art_quick_get32_instance;
+  qpoints->pGet64Instance = art_quick_get64_instance;
+  qpoints->pGetObjInstance = art_quick_get_obj_instance;
+  qpoints->pGet32Static = art_quick_get32_static;
+  qpoints->pGet64Static = art_quick_get64_static;
+  qpoints->pGetObjStatic = art_quick_get_obj_static;
 
   // FillArray
-  qpoints->pHandleFillArrayDataFromCode = art_quick_handle_fill_data_from_code;
+  qpoints->pHandleFillArrayData = art_quick_handle_fill_data;
 
   // JNI
   qpoints->pJniMethodStart = JniMethodStart;
@@ -162,33 +174,29 @@
   qpoints->pJniMethodEndWithReferenceSynchronized = JniMethodEndWithReferenceSynchronized;
 
   // Locks
-  qpoints->pLockObjectFromCode = art_quick_lock_object_from_code;
-  qpoints->pUnlockObjectFromCode = art_quick_unlock_object_from_code;
+  qpoints->pLockObject = art_quick_lock_object;
+  qpoints->pUnlockObject = art_quick_unlock_object;
 
   // Math
   // points->pCmpgDouble = NULL;  // Not needed on x86.
   // points->pCmpgFloat = NULL;  // Not needed on x86.
   // points->pCmplDouble = NULL;  // Not needed on x86.
   // points->pCmplFloat = NULL;  // Not needed on x86.
-  qpoints->pFmod = art_quick_fmod_from_code;
-  qpoints->pL2d = art_quick_l2d_from_code;
-  qpoints->pFmodf = art_quick_fmodf_from_code;
-  qpoints->pL2f = art_quick_l2f_from_code;
+  qpoints->pFmod = art_quick_fmod;
+  qpoints->pL2d = art_quick_l2d;
+  qpoints->pFmodf = art_quick_fmodf;
+  qpoints->pL2f = art_quick_l2f;
   // points->pD2iz = NULL;  // Not needed on x86.
   // points->pF2iz = NULL;  // Not needed on x86.
-  qpoints->pIdivmod = art_quick_idivmod_from_code;
-  qpoints->pD2l = art_quick_d2l_from_code;
-  qpoints->pF2l = art_quick_f2l_from_code;
-  qpoints->pLdiv = art_quick_ldiv_from_code;
-  qpoints->pLdivmod = art_quick_ldivmod_from_code;
-  qpoints->pLmul = art_quick_lmul_from_code;
-  qpoints->pShlLong = art_quick_lshl_from_code;
-  qpoints->pShrLong = art_quick_lshr_from_code;
-  qpoints->pUshrLong = art_quick_lushr_from_code;
-
-  // Interpreter
-  qpoints->pInterpreterToInterpreterEntry = artInterpreterToInterpreterEntry;
-  qpoints->pInterpreterToQuickEntry = artInterpreterToQuickEntry;
+  qpoints->pIdivmod = art_quick_idivmod;
+  qpoints->pD2l = art_quick_d2l;
+  qpoints->pF2l = art_quick_f2l;
+  qpoints->pLdiv = art_quick_ldiv;
+  qpoints->pLdivmod = art_quick_ldivmod;
+  qpoints->pLmul = art_quick_lmul;
+  qpoints->pShlLong = art_quick_lshl;
+  qpoints->pShrLong = art_quick_lshr;
+  qpoints->pUshrLong = art_quick_lushr;
 
   // Intrinsics
   qpoints->pIndexOf = art_quick_indexof;
@@ -197,7 +205,8 @@
   qpoints->pMemcpy = art_quick_memcpy;
 
   // Invocation
-  qpoints->pQuickResolutionTrampolineFromCode = artQuickResolutionTrampoline;
+  qpoints->pQuickResolutionTrampoline = art_quick_resolution_trampoline;
+  qpoints->pQuickToInterpreterBridge = art_quick_to_interpreter_bridge;
   qpoints->pInvokeDirectTrampolineWithAccessCheck = art_quick_invoke_direct_trampoline_with_access_check;
   qpoints->pInvokeInterfaceTrampoline = art_quick_invoke_interface_trampoline;
   qpoints->pInvokeInterfaceTrampolineWithAccessCheck = art_quick_invoke_interface_trampoline_with_access_check;
@@ -206,19 +215,16 @@
   qpoints->pInvokeVirtualTrampolineWithAccessCheck = art_quick_invoke_virtual_trampoline_with_access_check;
 
   // Thread
-  qpoints->pCheckSuspendFromCode = CheckSuspendFromCode;
-  qpoints->pTestSuspendFromCode = art_quick_test_suspend;
+  qpoints->pCheckSuspend = CheckSuspendFromCode;
+  qpoints->pTestSuspend = art_quick_test_suspend;
 
   // Throws
-  qpoints->pDeliverException = art_quick_deliver_exception_from_code;
-  qpoints->pThrowArrayBoundsFromCode = art_quick_throw_array_bounds_from_code;
-  qpoints->pThrowDivZeroFromCode = art_quick_throw_div_zero_from_code;
-  qpoints->pThrowNoSuchMethodFromCode = art_quick_throw_no_such_method_from_code;
-  qpoints->pThrowNullPointerFromCode = art_quick_throw_null_pointer_exception_from_code;
-  qpoints->pThrowStackOverflowFromCode = art_quick_throw_stack_overflow_from_code;
-
-  // Portable
-  ppoints->pPortableResolutionTrampolineFromCode = artPortableResolutionTrampoline;
+  qpoints->pDeliverException = art_quick_deliver_exception;
+  qpoints->pThrowArrayBounds = art_quick_throw_array_bounds;
+  qpoints->pThrowDivZero = art_quick_throw_div_zero;
+  qpoints->pThrowNoSuchMethod = art_quick_throw_no_such_method;
+  qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
+  qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
 };
 
 }  // namespace art
diff --git a/runtime/arch/x86/portable_entrypoints_x86.S b/runtime/arch/x86/portable_entrypoints_x86.S
index a0fca6c..0313d4b 100644
--- a/runtime/arch/x86/portable_entrypoints_x86.S
+++ b/runtime/arch/x86/portable_entrypoints_x86.S
@@ -90,20 +90,5 @@
     ret
 END_FUNCTION art_portable_proxy_invoke_handler
 
-    /*
-     * Portable abstract method error stub. method* is at %esp + 4 on entry.
-     */
-DEFINE_FUNCTION art_portable_abstract_method_error_stub
-    PUSH ebp
-    movl %esp, %ebp               // Remember SP.
-    .cfi_def_cfa_register ebp
-    subl LITERAL(12), %esp        // Align stack.
-    PUSH esp                      // Pass sp (not used).
-    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
-    pushl 8(%ebp)                 // Pass Method*.
-    call SYMBOL(artThrowAbstractMethodErrorFromCode)  // (Method*, Thread*, SP)
-    leave                         // Restore the stack and %ebp.
-    .cfi_def_cfa esp, 4
-    .cfi_restore ebp
-    ret                           // Return to caller to handle pending exception.
-END_FUNCTION art_portable_abstract_method_error_stub
+UNIMPLEMENTED art_portable_resolution_trampoline
+UNIMPLEMENTED art_portable_to_interpreter_bridge
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 89ea71a..dbf552f 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -135,34 +135,34 @@
     /*
      * Called by managed code to create and deliver a NullPointerException.
      */
-NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception_from_code, artThrowNullPointerExceptionFromCode
+NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
 
     /*
      * Called by managed code to create and deliver an ArithmeticException.
      */
-NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero_from_code, artThrowDivZeroFromCode
+NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
 
     /*
      * Called by managed code to create and deliver a StackOverflowError.
      */
-NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow_from_code, artThrowStackOverflowFromCode
+NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
 
     /*
      * Called by managed code, saves callee saves and then calls artThrowException
      * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
      */
-ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception_from_code, artDeliverExceptionFromCode
+ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
 
     /*
      * Called by managed code to create and deliver a NoSuchMethodError.
      */
-ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method_from_code, artThrowNoSuchMethodFromCode
+ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
 
     /*
      * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
      * index, arg2 holds limit.
      */
-TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds_from_code, artThrowArrayBoundsFromCode
+TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
 
     /*
      * All generated callsites for interface invokes and invocation slow paths will load arguments
@@ -382,24 +382,24 @@
     DELIVER_PENDING_EXCEPTION
 END_MACRO
 
-TWO_ARG_DOWNCALL art_quick_alloc_object_from_code, artAllocObjectFromCode, RETURN_IF_EAX_NOT_ZERO
-TWO_ARG_DOWNCALL art_quick_alloc_object_from_code_with_access_check, artAllocObjectFromCodeWithAccessCheck, RETURN_IF_EAX_NOT_ZERO
-THREE_ARG_DOWNCALL art_quick_alloc_array_from_code, artAllocArrayFromCode, RETURN_IF_EAX_NOT_ZERO
-THREE_ARG_DOWNCALL art_quick_alloc_array_from_code_with_access_check, artAllocArrayFromCodeWithAccessCheck, RETURN_IF_EAX_NOT_ZERO
-THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_from_code, artCheckAndAllocArrayFromCode, RETURN_IF_EAX_NOT_ZERO
-THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_from_code_with_access_check, artCheckAndAllocArrayFromCodeWithAccessCheck, RETURN_IF_EAX_NOT_ZERO
+TWO_ARG_DOWNCALL art_quick_alloc_object, artAllocObjectFromCode, RETURN_IF_EAX_NOT_ZERO
+TWO_ARG_DOWNCALL art_quick_alloc_object_with_access_check, artAllocObjectFromCodeWithAccessCheck, RETURN_IF_EAX_NOT_ZERO
+THREE_ARG_DOWNCALL art_quick_alloc_array, artAllocArrayFromCode, RETURN_IF_EAX_NOT_ZERO
+THREE_ARG_DOWNCALL art_quick_alloc_array_with_access_check, artAllocArrayFromCodeWithAccessCheck, RETURN_IF_EAX_NOT_ZERO
+THREE_ARG_DOWNCALL art_quick_check_and_alloc_array, artCheckAndAllocArrayFromCode, RETURN_IF_EAX_NOT_ZERO
+THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_with_access_check, artCheckAndAllocArrayFromCodeWithAccessCheck, RETURN_IF_EAX_NOT_ZERO
 
-TWO_ARG_DOWNCALL art_quick_resolve_string_from_code, artResolveStringFromCode, RETURN_IF_EAX_NOT_ZERO
-TWO_ARG_DOWNCALL art_quick_initialize_static_storage_from_code, artInitializeStaticStorageFromCode, RETURN_IF_EAX_NOT_ZERO
-TWO_ARG_DOWNCALL art_quick_initialize_type_from_code, artInitializeTypeFromCode, RETURN_IF_EAX_NOT_ZERO
-TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access_from_code, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_EAX_NOT_ZERO
+TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_EAX_NOT_ZERO
+TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_EAX_NOT_ZERO
+TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_EAX_NOT_ZERO
+TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_EAX_NOT_ZERO
 
-ONE_ARG_DOWNCALL art_quick_lock_object_from_code, artLockObjectFromCode, ret
-ONE_ARG_DOWNCALL art_quick_unlock_object_from_code, artUnlockObjectFromCode, RETURN_IF_EAX_ZERO
+ONE_ARG_DOWNCALL art_quick_lock_object, artLockObjectFromCode, ret
+ONE_ARG_DOWNCALL art_quick_unlock_object, artUnlockObjectFromCode, RETURN_IF_EAX_ZERO
 
-TWO_ARG_DOWNCALL art_quick_handle_fill_data_from_code, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
+TWO_ARG_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
 
-DEFINE_FUNCTION art_quick_is_assignable_from_code
+DEFINE_FUNCTION art_quick_is_assignable
     PUSH eax                     // alignment padding
     PUSH ecx                    // pass arg2
     PUSH eax                     // pass arg1
@@ -407,7 +407,7 @@
     addl LITERAL(12), %esp        // pop arguments
     .cfi_adjust_cfa_offset -12
     ret
-END_FUNCTION art_quick_is_assignable_from_code
+END_FUNCTION art_quick_is_assignable
 
 DEFINE_FUNCTION art_quick_memcpy
     PUSH edx                      // pass arg3
@@ -419,12 +419,12 @@
     ret
 END_FUNCTION art_quick_memcpy
 
-TWO_ARG_DOWNCALL art_quick_check_cast_from_code, artCheckCastFromCode, RETURN_IF_EAX_ZERO
-TWO_ARG_DOWNCALL art_quick_can_put_array_element_from_code, artCanPutArrayElementFromCode, RETURN_IF_EAX_ZERO
+TWO_ARG_DOWNCALL art_quick_check_cast, artCheckCastFromCode, RETURN_IF_EAX_ZERO
+TWO_ARG_DOWNCALL art_quick_can_put_array_element, artCanPutArrayElementFromCode, RETURN_IF_EAX_ZERO
 
 NO_ARG_DOWNCALL art_quick_test_suspend, artTestSuspendFromCode, ret
 
-DEFINE_FUNCTION art_quick_fmod_from_code
+DEFINE_FUNCTION art_quick_fmod
     subl LITERAL(12), %esp        // alignment padding
     .cfi_adjust_cfa_offset 12
     PUSH ebx                      // pass arg4 b.hi
@@ -437,9 +437,9 @@
     addl LITERAL(28), %esp        // pop arguments
     .cfi_adjust_cfa_offset -28
     ret
-END_FUNCTION art_quick_fmod_from_code
+END_FUNCTION art_quick_fmod
 
-DEFINE_FUNCTION art_quick_fmodf_from_code
+DEFINE_FUNCTION art_quick_fmodf
     PUSH eax                      // alignment padding
     PUSH ecx                      // pass arg2 b
     PUSH eax                      // pass arg1 a
@@ -449,9 +449,9 @@
     addl LITERAL(12), %esp        // pop arguments
     .cfi_adjust_cfa_offset -12
     ret
-END_FUNCTION art_quick_fmodf_from_code
+END_FUNCTION art_quick_fmodf
 
-DEFINE_FUNCTION art_quick_l2d_from_code
+DEFINE_FUNCTION art_quick_l2d
     PUSH ecx                      // push arg2 a.hi
     PUSH eax                      // push arg1 a.lo
     fildll (%esp)                 // load as integer and push into st0
@@ -460,9 +460,9 @@
     addl LITERAL(8), %esp         // pop arguments
     .cfi_adjust_cfa_offset -8
     ret
-END_FUNCTION art_quick_l2d_from_code
+END_FUNCTION art_quick_l2d
 
-DEFINE_FUNCTION art_quick_l2f_from_code
+DEFINE_FUNCTION art_quick_l2f
     PUSH ecx                      // push arg2 a.hi
     PUSH eax                      // push arg1 a.lo
     fildll (%esp)                 // load as integer and push into st0
@@ -471,9 +471,9 @@
     addl LITERAL(8), %esp         // pop argument
     .cfi_adjust_cfa_offset -8
     ret
-END_FUNCTION art_quick_l2f_from_code
+END_FUNCTION art_quick_l2f
 
-DEFINE_FUNCTION art_quick_d2l_from_code
+DEFINE_FUNCTION art_quick_d2l
     PUSH eax                      // alignment padding
     PUSH ecx                      // pass arg2 a.hi
     PUSH eax                      // pass arg1 a.lo
@@ -481,9 +481,9 @@
     addl LITERAL(12), %esp        // pop arguments
     .cfi_adjust_cfa_offset -12
     ret
-END_FUNCTION art_quick_d2l_from_code
+END_FUNCTION art_quick_d2l
 
-DEFINE_FUNCTION art_quick_f2l_from_code
+DEFINE_FUNCTION art_quick_f2l
     subl LITERAL(8), %esp         // alignment padding
     .cfi_adjust_cfa_offset 8
     PUSH eax                      // pass arg1 a
@@ -491,9 +491,9 @@
     addl LITERAL(12), %esp        // pop arguments
     .cfi_adjust_cfa_offset -12
     ret
-END_FUNCTION art_quick_f2l_from_code
+END_FUNCTION art_quick_f2l
 
-DEFINE_FUNCTION art_quick_idivmod_from_code
+DEFINE_FUNCTION art_quick_idivmod
     cmpl LITERAL(0x80000000), %eax
     je check_arg2  // special case
 args_ok:
@@ -505,9 +505,9 @@
     jne args_ok
     xorl %edx, %edx
     ret         // eax already holds min int
-END_FUNCTION art_quick_idivmod_from_code
+END_FUNCTION art_quick_idivmod
 
-DEFINE_FUNCTION art_quick_ldiv_from_code
+DEFINE_FUNCTION art_quick_ldiv
     subl LITERAL(12), %esp        // alignment padding
     .cfi_adjust_cfa_offset 12
     PUSH ebx                     // pass arg4 b.hi
@@ -518,9 +518,9 @@
     addl LITERAL(28), %esp        // pop arguments
     .cfi_adjust_cfa_offset -28
     ret
-END_FUNCTION art_quick_ldiv_from_code
+END_FUNCTION art_quick_ldiv
 
-DEFINE_FUNCTION art_quick_ldivmod_from_code
+DEFINE_FUNCTION art_quick_ldivmod
     subl LITERAL(12), %esp        // alignment padding
     .cfi_adjust_cfa_offset 12
     PUSH ebx                     // pass arg4 b.hi
@@ -531,18 +531,18 @@
     addl LITERAL(28), %esp        // pop arguments
     .cfi_adjust_cfa_offset -28
     ret
-END_FUNCTION art_quick_ldivmod_from_code
+END_FUNCTION art_quick_ldivmod
 
-DEFINE_FUNCTION art_quick_lmul_from_code
+DEFINE_FUNCTION art_quick_lmul
     imul %eax, %ebx              // ebx = a.lo(eax) * b.hi(ebx)
     imul %edx, %ecx              // ecx = b.lo(edx) * a.hi(ecx)
     mul  %edx                    // edx:eax = a.lo(eax) * b.lo(edx)
     add  %ebx, %ecx
     add  %ecx, %edx              // edx += (a.lo * b.hi) + (b.lo * a.hi)
     ret
-END_FUNCTION art_quick_lmul_from_code
+END_FUNCTION art_quick_lmul
 
-DEFINE_FUNCTION art_quick_lshl_from_code
+DEFINE_FUNCTION art_quick_lshl
     // ecx:eax << edx
     xchg %edx, %ecx
     shld %cl,%eax,%edx
@@ -553,9 +553,9 @@
     xor %eax, %eax
 1:
     ret
-END_FUNCTION art_quick_lshl_from_code
+END_FUNCTION art_quick_lshl
 
-DEFINE_FUNCTION art_quick_lshr_from_code
+DEFINE_FUNCTION art_quick_lshr
     // ecx:eax >> edx
     xchg %edx, %ecx
     shrd %cl,%edx,%eax
@@ -566,9 +566,9 @@
     sar LITERAL(31), %edx
 1:
     ret
-END_FUNCTION art_quick_lshr_from_code
+END_FUNCTION art_quick_lshr
 
-DEFINE_FUNCTION art_quick_lushr_from_code
+DEFINE_FUNCTION art_quick_lushr
     // ecx:eax >>> edx
     xchg %edx, %ecx
     shrd %cl,%edx,%eax
@@ -579,9 +579,9 @@
     xor %edx, %edx
 1:
     ret
-END_FUNCTION art_quick_lushr_from_code
+END_FUNCTION art_quick_lushr
 
-DEFINE_FUNCTION art_quick_set32_instance_from_code
+DEFINE_FUNCTION art_quick_set32_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME       // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     subl LITERAL(8), %esp         // alignment padding
@@ -599,9 +599,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_IF_EAX_ZERO            // return or deliver exception
-END_FUNCTION art_quick_set32_instance_from_code
+END_FUNCTION art_quick_set32_instance
 
-DEFINE_FUNCTION art_quick_set64_instance_from_code
+DEFINE_FUNCTION art_quick_set64_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  // save ref containing registers for GC
     subl LITERAL(8), %esp         // alignment padding
     .cfi_adjust_cfa_offset 8
@@ -618,9 +618,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
     RETURN_IF_EAX_ZERO            // return or deliver exception
-END_FUNCTION art_quick_set64_instance_from_code
+END_FUNCTION art_quick_set64_instance
 
-DEFINE_FUNCTION art_quick_set_obj_instance_from_code
+DEFINE_FUNCTION art_quick_set_obj_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     subl LITERAL(8), %esp         // alignment padding
@@ -638,9 +638,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_IF_EAX_ZERO            // return or deliver exception
-END_FUNCTION art_quick_set_obj_instance_from_code
+END_FUNCTION art_quick_set_obj_instance
 
-DEFINE_FUNCTION art_quick_get32_instance_from_code
+DEFINE_FUNCTION art_quick_get32_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     mov 32(%esp), %edx            // get referrer
@@ -657,9 +657,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
-END_FUNCTION art_quick_get32_instance_from_code
+END_FUNCTION art_quick_get32_instance
 
-DEFINE_FUNCTION art_quick_get64_instance_from_code
+DEFINE_FUNCTION art_quick_get64_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME       // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     mov 32(%esp), %edx            // get referrer
@@ -676,9 +676,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
-END_FUNCTION art_quick_get64_instance_from_code
+END_FUNCTION art_quick_get64_instance
 
-DEFINE_FUNCTION art_quick_get_obj_instance_from_code
+DEFINE_FUNCTION art_quick_get_obj_instance
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME       // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     mov 32(%esp), %edx            // get referrer
@@ -695,9 +695,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
-END_FUNCTION art_quick_get_obj_instance_from_code
+END_FUNCTION art_quick_get_obj_instance
 
-DEFINE_FUNCTION art_quick_set32_static_from_code
+DEFINE_FUNCTION art_quick_set32_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME       // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     mov 32(%esp), %edx            // get referrer
@@ -714,9 +714,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_IF_EAX_ZERO            // return or deliver exception
-END_FUNCTION art_quick_set32_static_from_code
+END_FUNCTION art_quick_set32_static
 
-DEFINE_FUNCTION art_quick_set64_static_from_code
+DEFINE_FUNCTION art_quick_set64_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     subl LITERAL(8), %esp         // alignment padding
@@ -734,9 +734,9 @@
     .cfi_adjust_cfa_offset -32
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
     RETURN_IF_EAX_ZERO            // return or deliver exception
-END_FUNCTION art_quick_set64_static_from_code
+END_FUNCTION art_quick_set64_static
 
-DEFINE_FUNCTION art_quick_set_obj_static_from_code
+DEFINE_FUNCTION art_quick_set_obj_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  // save ref containing registers for GC
     mov %esp, %ebx                // remember SP
     mov 32(%esp), %edx            // get referrer
@@ -752,9 +752,9 @@
     addl LITERAL(32), %esp        // pop arguments
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME  // restore frame up to return address
     RETURN_IF_EAX_ZERO            // return or deliver exception
-END_FUNCTION art_quick_set_obj_static_from_code
+END_FUNCTION art_quick_set_obj_static
 
-DEFINE_FUNCTION art_quick_get32_static_from_code
+DEFINE_FUNCTION art_quick_get32_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME       // save ref containing registers for GC
     mov %esp, %edx                // remember SP
     mov 32(%esp), %ecx            // get referrer
@@ -768,9 +768,9 @@
     .cfi_adjust_cfa_offset -16
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
-END_FUNCTION art_quick_get32_static_from_code
+END_FUNCTION art_quick_get32_static
 
-DEFINE_FUNCTION art_quick_get64_static_from_code
+DEFINE_FUNCTION art_quick_get64_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME  // save ref containing registers for GC
     mov %esp, %edx                // remember SP
     mov 32(%esp), %ecx            // get referrer
@@ -784,9 +784,9 @@
     .cfi_adjust_cfa_offset -16
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
-END_FUNCTION art_quick_get64_static_from_code
+END_FUNCTION art_quick_get64_static
 
-DEFINE_FUNCTION art_quick_get_obj_static_from_code
+DEFINE_FUNCTION art_quick_get_obj_static
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME       // save ref containing registers for GC
     mov %esp, %edx                // remember SP
     mov 32(%esp), %ecx            // get referrer
@@ -800,7 +800,7 @@
     .cfi_adjust_cfa_offset -16
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME     // restore frame up to return address
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
-END_FUNCTION art_quick_get_obj_static_from_code
+END_FUNCTION art_quick_get_obj_static
 
 DEFINE_FUNCTION art_quick_proxy_invoke_handler
     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME   // save frame and Method*
@@ -818,7 +818,32 @@
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
 END_FUNCTION art_quick_proxy_invoke_handler
 
-DEFINE_FUNCTION art_quick_interpreter_entry
+DEFINE_FUNCTION art_quick_resolution_trampoline
+    SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
+    PUSH esp                      // pass SP
+    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
+    .cfi_adjust_cfa_offset 4
+    PUSH ecx                      // pass receiver
+    PUSH eax                      // pass method
+    call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
+    movl %eax, %edi               // remember code pointer in EDI
+    addl LITERAL(16), %esp        // pop arguments
+    test %eax, %eax               // if code pointer is NULL goto deliver pending exception
+    jz 1f
+    POP eax                       // called method
+    POP ecx                       // restore args
+    POP edx
+    POP ebx
+    POP ebp                       // restore callee saves except EDI
+    POP esi
+    xchgl 0(%esp),%edi            // restore EDI and place code pointer as only value on stack
+    ret                           // tail call into method
+1:
+    RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
+    DELIVER_PENDING_EXCEPTION
+END_FUNCTION art_quick_resolution_trampoline
+
+DEFINE_FUNCTION art_quick_to_interpreter_bridge
     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME   // save frame
     mov %esp, %edx                // remember SP
     PUSH eax                      // alignment padding
@@ -826,19 +851,19 @@
     pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
     .cfi_adjust_cfa_offset 4
     PUSH eax                      // pass  method
-    call SYMBOL(artInterpreterEntry)  // (method, Thread*, SP)
+    call SYMBOL(artQuickToInterpreterBridge)  // (method, Thread*, SP)
     movd %eax, %xmm0              // place return value also into floating point return value
     movd %edx, %xmm1
     punpckldq %xmm1, %xmm0
     addl LITERAL(44), %esp        // pop arguments
     .cfi_adjust_cfa_offset -44
     RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
-END_FUNCTION art_quick_interpreter_entry
+END_FUNCTION art_quick_to_interpreter_bridge
 
     /*
      * Routine that intercepts method calls and returns.
      */
-DEFINE_FUNCTION art_quick_instrumentation_entry_from_code
+DEFINE_FUNCTION art_quick_instrumentation_entry
     SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
     movl  %esp, %edx              // Save SP.
     PUSH eax                      // Save eax which will be clobbered by the callee-save method.
@@ -855,7 +880,7 @@
     addl  LITERAL(28), %esp       // Pop arguments upto saved Method*.
     movl 28(%esp), %edi           // Restore edi.
     movl %eax, 28(%esp)           // Place code* over edi, just under return pc.
-    movl LITERAL(SYMBOL(art_quick_instrumentation_exit_from_code)), 32(%esp)
+    movl LITERAL(SYMBOL(art_quick_instrumentation_exit)), 32(%esp)
                                   // Place instrumentation exit as return pc.
     movl (%esp), %eax             // Restore eax.
     movl 8(%esp), %ecx            // Restore ecx.
@@ -865,9 +890,9 @@
     movl 24(%esp), %esi           // Restore esi.
     addl LITERAL(28), %esp        // Wind stack back upto code*.
     ret                           // Call method (and pop).
-END_FUNCTION art_quick_instrumentation_entry_from_code
+END_FUNCTION art_quick_instrumentation_entry
 
-DEFINE_FUNCTION art_quick_instrumentation_exit_from_code
+DEFINE_FUNCTION art_quick_instrumentation_exit
     pushl LITERAL(0)              // Push a fake return PC as there will be none on the stack.
     SETUP_REF_ONLY_CALLEE_SAVE_FRAME
     mov  %esp, %ecx               // Remember SP
@@ -900,7 +925,7 @@
     RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
     addl LITERAL(4), %esp         // Remove fake return pc.
     jmp   *%ecx                   // Return.
-END_FUNCTION art_quick_instrumentation_exit_from_code
+END_FUNCTION art_quick_instrumentation_exit
 
     /*
      * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
@@ -920,21 +945,6 @@
 END_FUNCTION art_quick_deoptimize
 
     /*
-     * Quick abstract method error stub. %eax contains method* on entry.
-     */
-DEFINE_FUNCTION art_quick_abstract_method_error_stub
-    SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
-    movl %esp, %ecx               // Remember SP.
-    PUSH eax                      // Align frame.
-    PUSH ecx                      // Pass SP for Method*.
-    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
-    .cfi_adjust_cfa_offset 4
-    PUSH eax                      // Pass Method*.
-    call SYMBOL(artThrowAbstractMethodErrorFromCode)  // (Method*, Thread*, SP)
-    int3                          // Unreachable.
-END_FUNCTION art_quick_abstract_method_error_stub
-
-    /*
      * String's indexOf.
      *
      * On entry:
@@ -1030,12 +1040,5 @@
     ret
 END_FUNCTION art_quick_string_compareto
 
-MACRO1(UNIMPLEMENTED,name)
-    .globl VAR(name, 0)
-    ALIGN_FUNCTION_ENTRY
-VAR(name, 0):
-    int3
-END_MACRO
-
     // TODO: implement these!
 UNIMPLEMENTED art_quick_memcmp16
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 6052993..71959c6 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -71,7 +71,7 @@
 
 namespace art {
 
-extern "C" void artInterpreterToQuickEntry(Thread* self, MethodHelper& mh,
+extern "C" void artInterperterToCompiledCodeBridge(Thread* self, MethodHelper& mh,
                                            const DexFile::CodeItem* code_item,
                                            ShadowFrame* shadow_frame, JValue* result);
 
@@ -944,6 +944,43 @@
   return oat_file;
 }
 
+static void InitFromImageCallbackCommon(mirror::Object* obj, ClassLinker* class_linker,
+                                        bool interpret_only_mode)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  DCHECK(obj != NULL);
+  DCHECK(class_linker != NULL);
+
+  if (obj->GetClass()->IsStringClass()) {
+    class_linker->GetInternTable()->RegisterStrong(obj->AsString());
+  } else if (obj->IsClass()) {
+    // Restore class to ClassLinker::classes_ table.
+    mirror::Class* klass = obj->AsClass();
+    ClassHelper kh(klass, class_linker);
+    mirror::Class* existing = class_linker->InsertClass(kh.GetDescriptor(), klass, true);
+    DCHECK(existing == NULL) << kh.GetDescriptor();
+  } else if (interpret_only_mode && obj->IsMethod()) {
+    mirror::AbstractMethod* method = obj->AsMethod();
+    if (!method->IsNative()) {
+      method->SetEntryPointFromInterpreter(interpreter::artInterpreterToInterpreterBridge);
+      if (method != Runtime::Current()->GetResolutionMethod()) {
+        method->SetEntryPointFromCompiledCode(GetCompiledCodeToInterpreterBridge());
+      }
+    }
+  }
+}
+
+static void InitFromImageCallback(mirror::Object* obj, void* arg)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  ClassLinker* class_linker = reinterpret_cast<ClassLinker*>(arg);
+  InitFromImageCallbackCommon(obj, class_linker, false);
+}
+
+static void InitFromImageInterpretOnlyCallback(mirror::Object* obj, void* arg)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  ClassLinker* class_linker = reinterpret_cast<ClassLinker*>(arg);
+  InitFromImageCallbackCommon(obj, class_linker, true);
+}
+
 void ClassLinker::InitFromImage() {
   VLOG(startup) << "ClassLinker::InitFromImage entering";
   CHECK(!init_done_);
@@ -997,7 +1034,11 @@
   {
     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
     heap->FlushAllocStack();
-    heap->GetLiveBitmap()->Walk(InitFromImageCallback, this);
+    if (Runtime::Current()->GetInstrumentation()->InterpretOnly()) {
+      heap->GetLiveBitmap()->Walk(InitFromImageInterpretOnlyCallback, this);
+    } else {
+      heap->GetLiveBitmap()->Walk(InitFromImageCallback, this);
+    }
   }
 
   // reinit class_roots_
@@ -1025,40 +1066,6 @@
   VLOG(startup) << "ClassLinker::InitFromImage exiting";
 }
 
-void ClassLinker::InitFromImageCallback(mirror::Object* obj, void* arg) {
-  DCHECK(obj != NULL);
-  DCHECK(arg != NULL);
-  ClassLinker* class_linker = reinterpret_cast<ClassLinker*>(arg);
-
-  if (obj->GetClass()->IsStringClass()) {
-    class_linker->intern_table_->RegisterStrong(obj->AsString());
-    return;
-  }
-  if (obj->IsClass()) {
-    // restore class to ClassLinker::classes_ table
-    mirror::Class* klass = obj->AsClass();
-    ClassHelper kh(klass, class_linker);
-    mirror::Class* existing = class_linker->InsertClass(kh.GetDescriptor(), klass, true);
-    DCHECK(existing == NULL) << kh.GetDescriptor();
-    return;
-  }
-
-  if (obj->IsMethod()) {
-    mirror::AbstractMethod* method = obj->AsMethod();
-    // Set entry points to interpreter for methods in interpreter only mode.
-    if (Runtime::Current()->GetInstrumentation()->InterpretOnly() && !method->IsNative()) {
-      method->SetEntryPointFromInterpreter(interpreter::artInterpreterToInterpreterEntry);
-      if (method != Runtime::Current()->GetResolutionMethod()) {
-        method->SetEntryPointFromCompiledCode(GetInterpreterEntryPoint());
-      }
-    }
-    // Populate native method pointer with jni lookup stub.
-    if (method->IsNative()) {
-      method->UnregisterNative(Thread::Current());
-    }
-  }
-}
-
 // Keep in sync with InitCallback. Anything we visit, we need to
 // reinit references to when reinitializing a ClassLinker from a
 // mapped image.
@@ -1558,7 +1565,7 @@
   const void* result = GetOatMethodFor(method).GetCode();
   if (result == NULL) {
     // No code? You must mean to go into the interpreter.
-    result = GetInterpreterEntryPoint();
+    result = GetCompiledCodeToInterpreterBridge();
   }
   return result;
 }
@@ -1619,7 +1626,7 @@
     const bool enter_interpreter = NeedsInterpreter(method, code);
     if (enter_interpreter) {
       // Use interpreter entry point.
-      code = GetInterpreterEntryPoint();
+      code = GetCompiledCodeToInterpreterBridge();
     }
     runtime->GetInstrumentation()->UpdateMethodsCode(method, code);
   }
@@ -1640,13 +1647,13 @@
   Runtime* runtime = Runtime::Current();
   bool enter_interpreter = NeedsInterpreter(method.get(), method->GetEntryPointFromCompiledCode());
   if (enter_interpreter) {
-    method->SetEntryPointFromInterpreter(interpreter::artInterpreterToInterpreterEntry);
+    method->SetEntryPointFromInterpreter(interpreter::artInterpreterToInterpreterBridge);
   } else {
-    method->SetEntryPointFromInterpreter(artInterpreterToQuickEntry);
+    method->SetEntryPointFromInterpreter(artInterperterToCompiledCodeBridge);
   }
 
   if (method->IsAbstract()) {
-    method->SetEntryPointFromCompiledCode(GetAbstractMethodErrorStub());
+    method->SetEntryPointFromCompiledCode(GetCompiledCodeToInterpreterBridge());
     return;
   }
 
@@ -1657,7 +1664,7 @@
     method->SetEntryPointFromCompiledCode(GetResolutionTrampoline(runtime->GetClassLinker()));
   } else if (enter_interpreter) {
     // Set entry point from compiled code if there's no code or in interpreter only mode.
-    method->SetEntryPointFromCompiledCode(GetInterpreterEntryPoint());
+    method->SetEntryPointFromCompiledCode(GetCompiledCodeToInterpreterBridge());
   }
 
   if (method->IsNative()) {
@@ -2625,12 +2632,8 @@
   method->SetCoreSpillMask(refs_and_args->GetCoreSpillMask());
   method->SetFpSpillMask(refs_and_args->GetFpSpillMask());
   method->SetFrameSizeInBytes(refs_and_args->GetFrameSizeInBytes());
-#if !defined(ART_USE_PORTABLE_COMPILER)
-  method->SetEntryPointFromCompiledCode(reinterpret_cast<void*>(art_quick_proxy_invoke_handler));
-#else
-  method->SetEntryPointFromCompiledCode(reinterpret_cast<void*>(art_portable_proxy_invoke_handler));
-#endif
-  method->SetEntryPointFromInterpreter(artInterpreterToQuickEntry);
+  method->SetEntryPointFromCompiledCode(GetProxyInvokeHandler());
+  method->SetEntryPointFromInterpreter(artInterperterToCompiledCodeBridge);
 
   return method;
 }
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index fdf75c2..060c26c 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -347,6 +347,17 @@
     return quick_resolution_trampoline_;
   }
 
+  InternTable* GetInternTable() const {
+    return intern_table_;
+  }
+
+  // Attempts to insert a class into a class table.  Returns NULL if
+  // the class was inserted, otherwise returns an existing class with
+  // the same descriptor and ClassLoader.
+  mirror::Class* InsertClass(const StringPiece& descriptor, mirror::Class* klass, bool image_class)
+      LOCKS_EXCLUDED(Locks::classlinker_classes_lock_)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
  private:
   explicit ClassLinker(InternTable*);
 
@@ -362,8 +373,6 @@
   OatFile& GetImageOatFile(gc::space::ImageSpace* space)
       LOCKS_EXCLUDED(dex_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-  static void InitFromImageCallback(mirror::Object* obj, void* arg)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   void FinishInit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
@@ -423,13 +432,6 @@
   const OatFile::OatClass* GetOatClass(const DexFile& dex_file, const char* descriptor)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  // Attempts to insert a class into a class table.  Returns NULL if
-  // the class was inserted, otherwise returns an existing class with
-  // the same descriptor and ClassLoader.
-  mirror::Class* InsertClass(const StringPiece& descriptor, mirror::Class* klass, bool image_class)
-      LOCKS_EXCLUDED(Locks::classlinker_classes_lock_)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
   void RegisterDexFileLocked(const DexFile& dex_file, SirtRef<mirror::DexCache>& dex_cache)
       EXCLUSIVE_LOCKS_REQUIRED(dex_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/common_test.h b/runtime/common_test.h
index 7ee6fe2..a543617 100644
--- a/runtime/common_test.h
+++ b/runtime/common_test.h
@@ -192,10 +192,7 @@
       compiled_method =
           compiler_driver_->GetCompiledMethod(MethodReference(&dex_file,
                                                               method->GetDexMethodIndex()));
-
-#ifndef ART_LIGHT_MODE
       CHECK(compiled_method != NULL) << PrettyMethod(method);
-#endif
     }
     if (compiled_method != NULL) {
       const std::vector<uint8_t>& code = compiled_method->GetCode();
@@ -213,12 +210,8 @@
       oat_method.LinkMethod(method);
     } else {
       const void* method_code;
-      if (method->IsAbstract()) {
-        method_code = GetAbstractMethodErrorStub();
-      } else {
-        // No code? You must mean to go into the interpreter.
-        method_code = GetInterpreterEntryPoint();
-      }
+      // No code? You must mean to go into the interpreter.
+      method_code = GetCompiledCodeToInterpreterBridge();
       LOG(INFO) << "MakeExecutable " << PrettyMethod(method) << " code=" << method_code;
       OatFile::OatMethod oat_method = CreateOatMethod(method_code,
                                                       kStackAlignment,
diff --git a/runtime/entrypoints/entrypoint_utils.h b/runtime/entrypoints/entrypoint_utils.h
index 3f28b5e..b6781c0 100644
--- a/runtime/entrypoints/entrypoint_utils.h
+++ b/runtime/entrypoints/entrypoint_utils.h
@@ -30,24 +30,13 @@
 #include "object_utils.h"
 #include "thread.h"
 
-extern "C" void art_interpreter_invoke_handler();
-extern "C" void art_jni_dlsym_lookup_stub();
-extern "C" void art_portable_abstract_method_error_stub();
-extern "C" void art_portable_proxy_invoke_handler();
-extern "C" void art_quick_abstract_method_error_stub();
-extern "C" void art_quick_deoptimize();
-extern "C" void art_quick_instrumentation_entry_from_code(void*);
-extern "C" void art_quick_instrumentation_exit_from_code();
-extern "C" void art_quick_interpreter_entry(void*);
-extern "C" void art_quick_proxy_invoke_handler();
-extern "C" void art_work_around_app_jni_bugs();
-
 namespace art {
+
 namespace mirror {
-class Class;
-class Field;
-class Object;
-}
+  class Class;
+  class Field;
+  class Object;
+}  // namespace mirror
 
 // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
 // cannot be resolved, throw an error. If it can, use it to create an instance.
@@ -350,25 +339,43 @@
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
 // Entry point for deoptimization.
-static inline uintptr_t GetDeoptimizationEntryPoint() {
+extern "C" void art_quick_deoptimize();
+static inline uintptr_t GetQuickDeoptimizationEntryPoint() {
   return reinterpret_cast<uintptr_t>(art_quick_deoptimize);
 }
 
 // Return address of instrumentation stub.
-static inline void* GetInstrumentationEntryPoint() {
-  return reinterpret_cast<void*>(art_quick_instrumentation_entry_from_code);
+extern "C" void art_quick_instrumentation_entry(void*);
+static inline void* GetQuickInstrumentationEntryPoint() {
+  return reinterpret_cast<void*>(art_quick_instrumentation_entry);
 }
 
 // The return_pc of instrumentation exit stub.
-static inline uintptr_t GetInstrumentationExitPc() {
-  return reinterpret_cast<uintptr_t>(art_quick_instrumentation_exit_from_code);
+extern "C" void art_quick_instrumentation_exit();
+static inline uintptr_t GetQuickInstrumentationExitPc() {
+  return reinterpret_cast<uintptr_t>(art_quick_instrumentation_exit);
+}
+
+extern "C" void art_portable_to_interpreter_bridge(mirror::AbstractMethod*);
+static inline const void* GetPortableToInterpreterBridge() {
+  return reinterpret_cast<void*>(art_portable_to_interpreter_bridge);
+}
+
+extern "C" void art_quick_to_interpreter_bridge(mirror::AbstractMethod*);
+static inline const void* GetQuickToInterpreterBridge() {
+  return reinterpret_cast<void*>(art_quick_to_interpreter_bridge);
 }
 
 // Return address of interpreter stub.
-static inline void* GetInterpreterEntryPoint() {
-  return reinterpret_cast<void*>(art_quick_interpreter_entry);
+static inline const void* GetCompiledCodeToInterpreterBridge() {
+#if defined(ART_USE_PORTABLE_COMPILER)
+  return GetPortableToInterpreterBridge();
+#else
+  return GetQuickToInterpreterBridge();
+#endif
 }
 
+
 static inline const void* GetPortableResolutionTrampoline(ClassLinker* class_linker) {
   return class_linker->GetPortableResolutionTrampoline();
 }
@@ -386,23 +393,25 @@
 #endif
 }
 
-static inline void* GetPortableAbstractMethodErrorStub() {
-  return reinterpret_cast<void*>(art_portable_abstract_method_error_stub);
+extern "C" void art_portable_proxy_invoke_handler();
+static inline const void* GetPortableProxyInvokeHandler() {
+  return reinterpret_cast<void*>(art_portable_proxy_invoke_handler);
 }
 
-static inline void* GetQuickAbstractMethodErrorStub() {
-  return reinterpret_cast<void*>(art_quick_abstract_method_error_stub);
+extern "C" void art_quick_proxy_invoke_handler();
+static inline const void* GetQuickProxyInvokeHandler() {
+  return reinterpret_cast<void*>(art_quick_proxy_invoke_handler);
 }
 
-// Return address of abstract method error stub for defined compiler.
-static inline void* GetAbstractMethodErrorStub() {
+static inline const void* GetProxyInvokeHandler() {
 #if defined(ART_USE_PORTABLE_COMPILER)
-  return GetPortableAbstractMethodErrorStub();
+  return GetPortableProxyInvokeHandler();
 #else
-  return GetQuickAbstractMethodErrorStub();
+  return GetQuickProxyInvokeHandler();
 #endif
 }
 
+extern "C" void* art_jni_dlsym_lookup_stub(JNIEnv*, jobject);
 static inline void* GetJniDlsymLookupStub() {
   return reinterpret_cast<void*>(art_jni_dlsym_lookup_stub);
 }
diff --git a/runtime/entrypoints/interpreter/interpreter_entrypoints.cc b/runtime/entrypoints/interpreter/interpreter_entrypoints.cc
new file mode 100644
index 0000000..d99c43e
--- /dev/null
+++ b/runtime/entrypoints/interpreter/interpreter_entrypoints.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "class_linker.h"
+#include "interpreter/interpreter.h"
+#include "invoke_arg_array_builder.h"
+#include "mirror/abstract_method-inl.h"
+#include "mirror/object-inl.h"
+#include "object_utils.h"
+#include "runtime.h"
+#include "stack.h"
+
+namespace art {
+
+extern "C" void artInterperterToCompiledCodeBridge(Thread* self, MethodHelper& mh,
+                                                   const DexFile::CodeItem* code_item,
+                                                   ShadowFrame* shadow_frame, JValue* result)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  mirror::AbstractMethod* method = shadow_frame->GetMethod();
+  // Ensure static methods are initialized.
+  if (method->IsStatic()) {
+    Runtime::Current()->GetClassLinker()->EnsureInitialized(method->GetDeclaringClass(), true, true);
+  }
+  uint16_t arg_offset = (code_item == NULL) ? 0 : code_item->registers_size_ - code_item->ins_size_;
+  ArgArray arg_array(mh.GetShorty(), mh.GetShortyLength());
+  arg_array.BuildArgArray(shadow_frame, arg_offset);
+  method->Invoke(self, arg_array.GetArray(), arg_array.GetNumBytes(), result, mh.GetShorty()[0]);
+}
+
+}  // namespace art
diff --git a/runtime/entrypoints/interpreter/interpreter_entrypoints.h b/runtime/entrypoints/interpreter/interpreter_entrypoints.h
new file mode 100644
index 0000000..c7df4e6
--- /dev/null
+++ b/runtime/entrypoints/interpreter/interpreter_entrypoints.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_ENTRYPOINTS_INTERPRETER_INTERPRETER_ENTRYPOINTS_H_
+#define ART_RUNTIME_ENTRYPOINTS_INTERPRETER_INTERPRETER_ENTRYPOINTS_H_
+
+#include "base/macros.h"
+#include "dex_file.h"
+#include "offsets.h"
+
+#define INTERPRETER_ENTRYPOINT_OFFSET(x) \
+    ThreadOffset(static_cast<uintptr_t>(OFFSETOF_MEMBER(Thread, interpreter_entrypoints_)) + \
+                 static_cast<uintptr_t>(OFFSETOF_MEMBER(InterpreterEntryPoints, x)))
+
+namespace art {
+
+union JValue;
+class MethodHelper;
+class ShadowFrame;
+class Thread;
+
+// Pointers to functions that are called by interpreter trampolines via thread-local storage.
+struct PACKED(4) InterpreterEntryPoints {
+  void (*pInterpreterToInterpreterBridge)(Thread* self, MethodHelper& mh,
+                                          const DexFile::CodeItem* code_item,
+                                          ShadowFrame* shadow_frame, JValue* result);
+  void (*pInterpreterToCompiledCodeBridge)(Thread* self, MethodHelper& mh,
+                                           const DexFile::CodeItem* code_item,
+                                           ShadowFrame* shadow_frame, JValue* result);
+};
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_ENTRYPOINTS_INTERPRETER_INTERPRETER_ENTRYPOINTS_H_
diff --git a/runtime/entrypoints/jni/jni_entrypoints.cc b/runtime/entrypoints/jni/jni_entrypoints.cc
index 98f7b12..88b4936 100644
--- a/runtime/entrypoints/jni/jni_entrypoints.cc
+++ b/runtime/entrypoints/jni/jni_entrypoints.cc
@@ -15,23 +15,26 @@
  */
 
 #include "base/logging.h"
-#include "mirror/abstract_method.h"
+#include "entrypoints/entrypoint_utils.h"
+#include "mirror/abstract_method-inl.h"
+#include "mirror/object-inl.h"
+#include "object_utils.h"
 #include "scoped_thread_state_change.h"
 #include "thread.h"
 
 namespace art {
 
 // Used by the JNI dlsym stub to find the native method to invoke if none is registered.
-extern "C" void* artFindNativeMethod(Thread* self) {
+extern "C" void* artFindNativeMethod() {
+  Thread* self = Thread::Current();
   Locks::mutator_lock_->AssertNotHeld(self);  // We come here as Native.
-  DCHECK(Thread::Current() == self);
   ScopedObjectAccess soa(self);
 
   mirror::AbstractMethod* method = self->GetCurrentMethod(NULL);
   DCHECK(method != NULL);
 
-  // Lookup symbol address for method, on failure we'll return NULL with an
-  // exception set, otherwise we return the address of the method we found.
+  // Lookup symbol address for method, on failure we'll return NULL with an exception set,
+  // otherwise we return the address of the method we found.
   void* native_code = soa.Vm()->FindCodeForNativeMethod(method);
   if (native_code == NULL) {
     DCHECK(self->IsExceptionPending());
@@ -43,4 +46,78 @@
   }
 }
 
+static void WorkAroundJniBugsForJobject(intptr_t* arg_ptr) {
+  intptr_t value = *arg_ptr;
+  mirror::Object** value_as_jni_rep = reinterpret_cast<mirror::Object**>(value);
+  mirror::Object* value_as_work_around_rep = value_as_jni_rep != NULL ? *value_as_jni_rep : NULL;
+  CHECK(Runtime::Current()->GetHeap()->IsHeapAddress(value_as_work_around_rep))
+      << value_as_work_around_rep;
+  *arg_ptr = reinterpret_cast<intptr_t>(value_as_work_around_rep);
+}
+
+extern "C" const void* artWorkAroundAppJniBugs(Thread* self, intptr_t* sp)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  DCHECK(Thread::Current() == self);
+  // TODO: this code is specific to ARM
+  // On entry the stack pointed by sp is:
+  // | arg3   | <- Calling JNI method's frame (and extra bit for out args)
+  // | LR     |
+  // | R3     |    arg2
+  // | R2     |    arg1
+  // | R1     |    jclass/jobject
+  // | R0     |    JNIEnv
+  // | unused |
+  // | unused |
+  // | unused | <- sp
+  mirror::AbstractMethod* jni_method = self->GetCurrentMethod(NULL);
+  DCHECK(jni_method->IsNative()) << PrettyMethod(jni_method);
+  intptr_t* arg_ptr = sp + 4;  // pointer to r1 on stack
+  // Fix up this/jclass argument
+  WorkAroundJniBugsForJobject(arg_ptr);
+  arg_ptr++;
+  // Fix up jobject arguments
+  MethodHelper mh(jni_method);
+  int reg_num = 2;  // Current register being processed, -1 for stack arguments.
+  for (uint32_t i = 1; i < mh.GetShortyLength(); i++) {
+    char shorty_char = mh.GetShorty()[i];
+    if (shorty_char == 'L') {
+      WorkAroundJniBugsForJobject(arg_ptr);
+    }
+    if (shorty_char == 'J' || shorty_char == 'D') {
+      if (reg_num == 2) {
+        arg_ptr = sp + 8;  // skip to out arguments
+        reg_num = -1;
+      } else if (reg_num == 3) {
+        arg_ptr = sp + 10;  // skip to out arguments plus 2 slots as long must be aligned
+        reg_num = -1;
+      } else {
+        DCHECK_EQ(reg_num, -1);
+        if ((reinterpret_cast<intptr_t>(arg_ptr) & 7) == 4) {
+          arg_ptr += 3;  // unaligned, pad and move through stack arguments
+        } else {
+          arg_ptr += 2;  // aligned, move through stack arguments
+        }
+      }
+    } else {
+      if (reg_num == 2) {
+        arg_ptr++;  // move through register arguments
+        reg_num++;
+      } else if (reg_num == 3) {
+        arg_ptr = sp + 8;  // skip to outgoing stack arguments
+        reg_num = -1;
+      } else {
+        DCHECK_EQ(reg_num, -1);
+        arg_ptr++;  // move through stack arguments
+      }
+    }
+  }
+  // Load expected destination, see Method::RegisterNative
+  const void* code = reinterpret_cast<const void*>(jni_method->GetNativeGcMap());
+  if (UNLIKELY(code == NULL)) {
+    code = GetJniDlsymLookupStub();
+    jni_method->RegisterNative(self, code);
+  }
+  return code;
+}
+
 }  // namespace art
diff --git a/runtime/entrypoints/jni/jni_entrypoints.h b/runtime/entrypoints/jni/jni_entrypoints.h
new file mode 100644
index 0000000..0a53447
--- /dev/null
+++ b/runtime/entrypoints/jni/jni_entrypoints.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_ENTRYPOINTS_JNI_JNI_ENTRYPOINTS_H_
+#define ART_RUNTIME_ENTRYPOINTS_JNI_JNI_ENTRYPOINTS_H_
+
+#include "base/macros.h"
+#include "offsets.h"
+
+#define JNI_ENTRYPOINT_OFFSET(x) \
+    ThreadOffset(static_cast<uintptr_t>(OFFSETOF_MEMBER(Thread, jni_entrypoints_)) + \
+                 static_cast<uintptr_t>(OFFSETOF_MEMBER(JniEntryPoints, x)))
+
+namespace art {
+
+// Pointers to functions that are called by JNI trampolines via thread-local storage.
+struct PACKED(4) JniEntryPoints {
+  // Called when the JNI method isn't registered.
+  void* (*pDlsymLookup)(JNIEnv* env, jobject);
+};
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_ENTRYPOINTS_JNI_JNI_ENTRYPOINTS_H_
diff --git a/runtime/entrypoints/portable/portable_entrypoints.h b/runtime/entrypoints/portable/portable_entrypoints.h
index a229c76..ec9e4f8 100644
--- a/runtime/entrypoints/portable/portable_entrypoints.h
+++ b/runtime/entrypoints/portable/portable_entrypoints.h
@@ -28,15 +28,15 @@
 class Thread;
 
 #define PORTABLE_ENTRYPOINT_OFFSET(x) \
-    (static_cast<uintptr_t>(OFFSETOF_MEMBER(Thread, portable_entrypoints_)) + \
-        static_cast<uintptr_t>(OFFSETOF_MEMBER(PortableEntryPoints, x)))
+    ThreadOffset(static_cast<uintptr_t>(OFFSETOF_MEMBER(Thread, portable_entrypoints_)) + \
+                 static_cast<uintptr_t>(OFFSETOF_MEMBER(PortableEntryPoints, x)))
 
 // Pointers to functions that are called by code generated by compiler's adhering to the portable
 // compiler ABI.
 struct PACKED(4) PortableEntryPoints {
   // Invocation
-  const void* (*pPortableResolutionTrampolineFromCode)(mirror::AbstractMethod*, mirror::Object*,
-                                                       mirror::AbstractMethod**, Thread*);
+  void (*pPortableResolutionTrampoline)(mirror::AbstractMethod*);
+  void (*pPortableToInterpreterBridge)(mirror::AbstractMethod*);
 };
 
 }  // namespace art
diff --git a/runtime/entrypoints/quick/quick_argument_visitor.h b/runtime/entrypoints/quick/quick_argument_visitor.h
deleted file mode 100644
index 35fa972..0000000
--- a/runtime/entrypoints/quick/quick_argument_visitor.h
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_ENTRYPOINTS_QUICK_QUICK_ARGUMENT_VISITOR_H_
-#define ART_RUNTIME_ENTRYPOINTS_QUICK_QUICK_ARGUMENT_VISITOR_H_
-
-#include "object_utils.h"
-
-namespace art {
-
-// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
-class QuickArgumentVisitor {
- public:
-// Offset to first (not the Method*) argument in a Runtime::kRefAndArgs callee save frame.
-// Size of Runtime::kRefAndArgs callee save frame.
-// Size of Method* and register parameters in out stack arguments.
-#if defined(__arm__)
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 8
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 48
-#define QUICK_STACK_ARG_SKIP 16
-#elif defined(__mips__)
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 64
-#define QUICK_STACK_ARG_SKIP 16
-#elif defined(__i386__)
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 32
-#define QUICK_STACK_ARG_SKIP 16
-#else
-#error "Unsupported architecture"
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 0
-#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 0
-#define QUICK_STACK_ARG_SKIP 0
-#endif
-
-  QuickArgumentVisitor(MethodHelper& caller_mh, mirror::AbstractMethod** sp)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
-    caller_mh_(caller_mh),
-    args_in_regs_(ComputeArgsInRegs(caller_mh)),
-    num_params_(caller_mh.NumArgs()),
-    reg_args_(reinterpret_cast<byte*>(sp) + QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET),
-    stack_args_(reinterpret_cast<byte*>(sp) + QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE
-                + QUICK_STACK_ARG_SKIP),
-    cur_args_(reg_args_),
-    cur_arg_index_(0),
-    param_index_(0),
-    is_split_long_or_double_(false) {
-  }
-
-  virtual ~QuickArgumentVisitor() {}
-
-  virtual void Visit() = 0;
-
-  bool IsParamAReference() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return caller_mh_.IsParamAReference(param_index_);
-  }
-
-  bool IsParamALongOrDouble() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return caller_mh_.IsParamALongOrDouble(param_index_);
-  }
-
-  Primitive::Type GetParamPrimitiveType() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return caller_mh_.GetParamPrimitiveType(param_index_);
-  }
-
-  byte* GetParamAddress() const {
-    return cur_args_ + (cur_arg_index_ * kPointerSize);
-  }
-
-  bool IsSplitLongOrDouble() const {
-    return is_split_long_or_double_;
-  }
-
-  uint64_t ReadSplitLongParam() const {
-    DCHECK(IsSplitLongOrDouble());
-    uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
-    uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
-    return (low_half & 0xffffffffULL) | (high_half << 32);
-  }
-
-  void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    for (cur_arg_index_ = 0;  cur_arg_index_ < args_in_regs_ && param_index_ < num_params_; ) {
-      is_split_long_or_double_ = (cur_arg_index_ == 2) && IsParamALongOrDouble();
-      Visit();
-      cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
-      param_index_++;
-    }
-    cur_args_ = stack_args_;
-    cur_arg_index_ = is_split_long_or_double_ ? 1 : 0;
-    is_split_long_or_double_ = false;
-    while (param_index_ < num_params_) {
-      Visit();
-      cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
-      param_index_++;
-    }
-  }
-
- private:
-  static size_t ComputeArgsInRegs(MethodHelper& mh) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    size_t args_in_regs = 0;
-    size_t num_params = mh.NumArgs();
-    for (size_t i = 0; i < num_params; i++) {
-      args_in_regs = args_in_regs + (mh.IsParamALongOrDouble(i) ? 2 : 1);
-      if (args_in_regs > 3) {
-        args_in_regs = 3;
-        break;
-      }
-    }
-    return args_in_regs;
-  }
-  MethodHelper& caller_mh_;
-  const size_t args_in_regs_;
-  const size_t num_params_;
-  byte* const reg_args_;
-  byte* const stack_args_;
-  byte* cur_args_;
-  size_t cur_arg_index_;
-  size_t param_index_;
-  // Does a 64bit parameter straddle the register and stack arguments?
-  bool is_split_long_or_double_;
-};
-
-}  // namespace art
-
-#endif  // ART_RUNTIME_ENTRYPOINTS_QUICK_QUICK_ARGUMENT_VISITOR_H_
diff --git a/runtime/entrypoints/quick/quick_entrypoints.h b/runtime/entrypoints/quick/quick_entrypoints.h
index 74b8cfd..e76679b 100644
--- a/runtime/entrypoints/quick/quick_entrypoints.h
+++ b/runtime/entrypoints/quick/quick_entrypoints.h
@@ -17,44 +17,45 @@
 #ifndef ART_RUNTIME_ENTRYPOINTS_QUICK_QUICK_ENTRYPOINTS_H_
 #define ART_RUNTIME_ENTRYPOINTS_QUICK_QUICK_ENTRYPOINTS_H_
 
-#include "dex_file-inl.h"
-#include "runtime.h"
+#include <jni.h>
+
+#include "base/macros.h"
+#include "offsets.h"
 
 #define QUICK_ENTRYPOINT_OFFSET(x) \
-    (static_cast<uintptr_t>(OFFSETOF_MEMBER(Thread, quick_entrypoints_)) + \
-        static_cast<uintptr_t>(OFFSETOF_MEMBER(QuickEntryPoints, x)))
+    ThreadOffset(static_cast<uintptr_t>(OFFSETOF_MEMBER(Thread, quick_entrypoints_)) + \
+                 static_cast<uintptr_t>(OFFSETOF_MEMBER(QuickEntryPoints, x)))
 
 namespace art {
+
 namespace mirror {
   class AbstractMethod;
   class Class;
   class Object;
 }  // namespace mirror
-class DvmDex;
-class MethodHelper;
-class ShadowFrame;
+
 class Thread;
 
 // Pointers to functions that are called by quick compiler generated code via thread-local storage.
 struct PACKED(4) QuickEntryPoints {
   // Alloc
-  void* (*pAllocArrayFromCode)(uint32_t, void*, int32_t);
-  void* (*pAllocArrayFromCodeWithAccessCheck)(uint32_t, void*, int32_t);
-  void* (*pAllocObjectFromCode)(uint32_t, void*);
-  void* (*pAllocObjectFromCodeWithAccessCheck)(uint32_t, void*);
-  void* (*pCheckAndAllocArrayFromCode)(uint32_t, void*, int32_t);
-  void* (*pCheckAndAllocArrayFromCodeWithAccessCheck)(uint32_t, void*, int32_t);
+  void* (*pAllocArray)(uint32_t, void*, int32_t);
+  void* (*pAllocArrayWithAccessCheck)(uint32_t, void*, int32_t);
+  void* (*pAllocObject)(uint32_t, void*);
+  void* (*pAllocObjectWithAccessCheck)(uint32_t, void*);
+  void* (*pCheckAndAllocArray)(uint32_t, void*, int32_t);
+  void* (*pCheckAndAllocArrayWithAccessCheck)(uint32_t, void*, int32_t);
 
   // Cast
-  uint32_t (*pInstanceofNonTrivialFromCode)(const mirror::Class*, const mirror::Class*);
-  void (*pCanPutArrayElementFromCode)(void*, void*);
-  void (*pCheckCastFromCode)(void*, void*);
+  uint32_t (*pInstanceofNonTrivial)(const mirror::Class*, const mirror::Class*);
+  void (*pCanPutArrayElement)(void*, void*);
+  void (*pCheckCast)(void*, void*);
 
   // DexCache
   void* (*pInitializeStaticStorage)(uint32_t, void*);
-  void* (*pInitializeTypeAndVerifyAccessFromCode)(uint32_t, void*);
-  void* (*pInitializeTypeFromCode)(uint32_t, void*);
-  void* (*pResolveStringFromCode)(void*, uint32_t);
+  void* (*pInitializeTypeAndVerifyAccess)(uint32_t, void*);
+  void* (*pInitializeType)(uint32_t, void*);
+  void* (*pResolveString)(void*, uint32_t);
 
   // Field
   int (*pSet32Instance)(uint32_t, void*, int32_t);  // field_idx, obj, src
@@ -71,7 +72,7 @@
   void* (*pGetObjStatic)(uint32_t);
 
   // FillArray
-  void (*pHandleFillArrayDataFromCode)(void*, void*);
+  void (*pHandleFillArrayData)(void*, void*);
 
   // JNI
   uint32_t (*pJniMethodStart)(Thread*);
@@ -83,8 +84,8 @@
                                                     jobject locked, Thread* self);
 
   // Locks
-  void (*pLockObjectFromCode)(void*);
-  void (*pUnlockObjectFromCode)(void*);
+  void (*pLockObject)(void*);
+  void (*pUnlockObject)(void*);
 
   // Math
   int32_t (*pCmpgDouble)(double, double);
@@ -108,14 +109,6 @@
   uint64_t (*pShrLong)(uint64_t, uint32_t);
   uint64_t (*pUshrLong)(uint64_t, uint32_t);
 
-  // Interpreter
-  void (*pInterpreterToInterpreterEntry)(Thread* self, MethodHelper& mh,
-                                         const DexFile::CodeItem* code_item,
-                                         ShadowFrame* shadow_frame, JValue* result);
-  void (*pInterpreterToQuickEntry)(Thread* self, MethodHelper& mh,
-                                   const DexFile::CodeItem* code_item,
-                                   ShadowFrame* shadow_frame, JValue* result);
-
   // Intrinsics
   int32_t (*pIndexOf)(void*, uint32_t, uint32_t, uint32_t);
   int32_t (*pMemcmp16)(void*, void*, int32_t);
@@ -123,8 +116,8 @@
   void* (*pMemcpy)(void*, const void*, size_t);
 
   // Invocation
-  const void* (*pQuickResolutionTrampolineFromCode)(mirror::AbstractMethod*, mirror::Object*,
-                                                    mirror::AbstractMethod**, Thread*);
+  void (*pQuickResolutionTrampoline)(mirror::AbstractMethod*);
+  void (*pQuickToInterpreterBridge)(mirror::AbstractMethod*);
   void (*pInvokeDirectTrampolineWithAccessCheck)(uint32_t, void*);
   void (*pInvokeInterfaceTrampoline)(uint32_t, void*);
   void (*pInvokeInterfaceTrampolineWithAccessCheck)(uint32_t, void*);
@@ -133,22 +126,21 @@
   void (*pInvokeVirtualTrampolineWithAccessCheck)(uint32_t, void*);
 
   // Thread
-  void (*pCheckSuspendFromCode)(Thread*);  // Stub that is called when the suspend count is non-zero
-  void (*pTestSuspendFromCode)();  // Stub that is periodically called to test the suspend count
+  void (*pCheckSuspend)(Thread*);  // Stub that is called when the suspend count is non-zero
+  void (*pTestSuspend)();  // Stub that is periodically called to test the suspend count
 
   // Throws
   void (*pDeliverException)(void*);
-  void (*pThrowArrayBoundsFromCode)(int32_t, int32_t);
-  void (*pThrowDivZeroFromCode)();
-  void (*pThrowNoSuchMethodFromCode)(int32_t);
-  void (*pThrowNullPointerFromCode)();
-  void (*pThrowStackOverflowFromCode)(void*);
+  void (*pThrowArrayBounds)(int32_t, int32_t);
+  void (*pThrowDivZero)();
+  void (*pThrowNoSuchMethod)(int32_t);
+  void (*pThrowNullPointer)();
+  void (*pThrowStackOverflow)(void*);
 };
 
 
 // JNI entrypoints.
-extern uint32_t JniMethodStart(Thread* self)
-    UNLOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
+extern uint32_t JniMethodStart(Thread* self) UNLOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
 extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self)
     UNLOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
 extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self)
diff --git a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc
index 7ecd296..0e61942 100644
--- a/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_instrumentation_entrypoints.cc
@@ -32,7 +32,7 @@
   FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
   instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
   const void* result = instrumentation->GetQuickCodeFor(method);
-  bool interpreter_entry = (result == GetInterpreterEntryPoint());
+  bool interpreter_entry = (result == GetQuickToInterpreterBridge());
   instrumentation->PushInstrumentationStackFrame(self, method->IsStatic() ? NULL : this_object,
                                                  method, lr, interpreter_entry);
   CHECK(result != NULL) << PrettyMethod(method);
diff --git a/runtime/entrypoints/quick/quick_interpreter_entrypoints.cc b/runtime/entrypoints/quick/quick_interpreter_entrypoints.cc
deleted file mode 100644
index 656df8d..0000000
--- a/runtime/entrypoints/quick/quick_interpreter_entrypoints.cc
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "quick_argument_visitor.h"
-#include "callee_save_frame.h"
-#include "dex_file-inl.h"
-#include "interpreter/interpreter.h"
-#include "invoke_arg_array_builder.h"
-#include "mirror/abstract_method-inl.h"
-#include "mirror/class-inl.h"
-#include "mirror/object-inl.h"
-#include "mirror/object_array-inl.h"
-#include "object_utils.h"
-
-namespace art {
-
-// Visits arguments on the stack placing them into the shadow frame.
-class BuildShadowFrameVisitor : public QuickArgumentVisitor {
- public:
-  BuildShadowFrameVisitor(MethodHelper& caller_mh, mirror::AbstractMethod** sp,
-                          ShadowFrame& sf, size_t first_arg_reg) :
-    QuickArgumentVisitor(caller_mh, sp), sf_(sf), cur_reg_(first_arg_reg) {}
-
-  virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    Primitive::Type type = GetParamPrimitiveType();
-    switch (type) {
-      case Primitive::kPrimLong:  // Fall-through.
-      case Primitive::kPrimDouble:
-        if (IsSplitLongOrDouble()) {
-          sf_.SetVRegLong(cur_reg_, ReadSplitLongParam());
-        } else {
-          sf_.SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
-        }
-        ++cur_reg_;
-        break;
-      case Primitive::kPrimNot:
-        sf_.SetVRegReference(cur_reg_, *reinterpret_cast<mirror::Object**>(GetParamAddress()));
-        break;
-      case Primitive::kPrimBoolean:  // Fall-through.
-      case Primitive::kPrimByte:     // Fall-through.
-      case Primitive::kPrimChar:     // Fall-through.
-      case Primitive::kPrimShort:    // Fall-through.
-      case Primitive::kPrimInt:      // Fall-through.
-      case Primitive::kPrimFloat:
-        sf_.SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
-        break;
-      case Primitive::kPrimVoid:
-        LOG(FATAL) << "UNREACHABLE";
-        break;
-    }
-    ++cur_reg_;
-  }
-
- private:
-  ShadowFrame& sf_;
-  size_t cur_reg_;
-
-  DISALLOW_COPY_AND_ASSIGN(BuildShadowFrameVisitor);
-};
-
-extern "C" uint64_t artInterpreterEntry(mirror::AbstractMethod* method, Thread* self,
-                                        mirror::AbstractMethod** sp)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-  // Ensure we don't get thread suspension until the object arguments are safely in the shadow
-  // frame.
-  const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
-  FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
-
-  MethodHelper mh(method);
-  const DexFile::CodeItem* code_item = mh.GetCodeItem();
-  uint16_t num_regs = code_item->registers_size_;
-  void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
-  ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL,  // No last shadow coming from quick.
-                                                method, 0, memory));
-  size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
-  BuildShadowFrameVisitor shadow_frame_builder(mh, sp, *shadow_frame, first_arg_reg);
-  shadow_frame_builder.VisitArguments();
-  // Push a transition back into managed code onto the linked list in thread.
-  ManagedStack fragment;
-  self->PushManagedStackFragment(&fragment);
-  self->PushShadowFrame(shadow_frame);
-  self->EndAssertNoThreadSuspension(old_cause);
-
-  if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) {
-    // Ensure static method's class is initialized.
-    if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(method->GetDeclaringClass(),
-                                                                 true, true)) {
-      DCHECK(Thread::Current()->IsExceptionPending());
-      self->PopManagedStackFragment(fragment);
-      return 0;
-    }
-  }
-
-  JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
-  // Pop transition.
-  self->PopManagedStackFragment(fragment);
-  return result.GetJ();
-}
-
-extern "C" void artInterpreterToQuickEntry(Thread* self, MethodHelper& mh,
-                                           const DexFile::CodeItem* code_item,
-                                           ShadowFrame* shadow_frame, JValue* result)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-  mirror::AbstractMethod* method = shadow_frame->GetMethod();
-  // Ensure static methods are initialized.
-  if (method->IsStatic()) {
-    Runtime::Current()->GetClassLinker()->EnsureInitialized(method->GetDeclaringClass(), true, true);
-  }
-  uint16_t arg_offset = (code_item == NULL) ? 0 : code_item->registers_size_ - code_item->ins_size_;
-  ArgArray arg_array(mh.GetShorty(), mh.GetShortyLength());
-  arg_array.BuildArgArray(shadow_frame, arg_offset);
-  method->Invoke(self, arg_array.GetArray(), arg_array.GetNumBytes(), result, mh.GetShorty()[0]);
-}
-
-}  // namespace art
diff --git a/runtime/entrypoints/quick/quick_jni_entrypoints.cc b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
index 23a28f9..9907c04 100644
--- a/runtime/entrypoints/quick/quick_jni_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
@@ -94,78 +94,4 @@
   return o;
 }
 
-static void WorkAroundJniBugsForJobject(intptr_t* arg_ptr) {
-  intptr_t value = *arg_ptr;
-  mirror::Object** value_as_jni_rep = reinterpret_cast<mirror::Object**>(value);
-  mirror::Object* value_as_work_around_rep = value_as_jni_rep != NULL ? *value_as_jni_rep : NULL;
-  CHECK(Runtime::Current()->GetHeap()->IsHeapAddress(value_as_work_around_rep))
-      << value_as_work_around_rep;
-  *arg_ptr = reinterpret_cast<intptr_t>(value_as_work_around_rep);
-}
-
-extern "C" const void* artWorkAroundAppJniBugs(Thread* self, intptr_t* sp)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-  DCHECK(Thread::Current() == self);
-  // TODO: this code is specific to ARM
-  // On entry the stack pointed by sp is:
-  // | arg3   | <- Calling JNI method's frame (and extra bit for out args)
-  // | LR     |
-  // | R3     |    arg2
-  // | R2     |    arg1
-  // | R1     |    jclass/jobject
-  // | R0     |    JNIEnv
-  // | unused |
-  // | unused |
-  // | unused | <- sp
-  mirror::AbstractMethod* jni_method = self->GetCurrentMethod(NULL);
-  DCHECK(jni_method->IsNative()) << PrettyMethod(jni_method);
-  intptr_t* arg_ptr = sp + 4;  // pointer to r1 on stack
-  // Fix up this/jclass argument
-  WorkAroundJniBugsForJobject(arg_ptr);
-  arg_ptr++;
-  // Fix up jobject arguments
-  MethodHelper mh(jni_method);
-  int reg_num = 2;  // Current register being processed, -1 for stack arguments.
-  for (uint32_t i = 1; i < mh.GetShortyLength(); i++) {
-    char shorty_char = mh.GetShorty()[i];
-    if (shorty_char == 'L') {
-      WorkAroundJniBugsForJobject(arg_ptr);
-    }
-    if (shorty_char == 'J' || shorty_char == 'D') {
-      if (reg_num == 2) {
-        arg_ptr = sp + 8;  // skip to out arguments
-        reg_num = -1;
-      } else if (reg_num == 3) {
-        arg_ptr = sp + 10;  // skip to out arguments plus 2 slots as long must be aligned
-        reg_num = -1;
-      } else {
-        DCHECK_EQ(reg_num, -1);
-        if ((reinterpret_cast<intptr_t>(arg_ptr) & 7) == 4) {
-          arg_ptr += 3;  // unaligned, pad and move through stack arguments
-        } else {
-          arg_ptr += 2;  // aligned, move through stack arguments
-        }
-      }
-    } else {
-      if (reg_num == 2) {
-        arg_ptr++;  // move through register arguments
-        reg_num++;
-      } else if (reg_num == 3) {
-        arg_ptr = sp + 8;  // skip to outgoing stack arguments
-        reg_num = -1;
-      } else {
-        DCHECK_EQ(reg_num, -1);
-        arg_ptr++;  // move through stack arguments
-      }
-    }
-  }
-  // Load expected destination, see Method::RegisterNative
-  const void* code = reinterpret_cast<const void*>(jni_method->GetNativeGcMap());
-  if (UNLIKELY(code == NULL)) {
-    code = GetJniDlsymLookupStub();
-    jni_method->RegisterNative(self, code);
-  }
-  return code;
-}
-
 }  // namespace art
diff --git a/runtime/entrypoints/quick/quick_proxy_entrypoints.cc b/runtime/entrypoints/quick/quick_proxy_entrypoints.cc
deleted file mode 100644
index 4e3d749..0000000
--- a/runtime/entrypoints/quick/quick_proxy_entrypoints.cc
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "quick_argument_visitor.h"
-#include "dex_file-inl.h"
-#include "entrypoints/entrypoint_utils.h"
-#include "mirror/abstract_method-inl.h"
-#include "mirror/object_array-inl.h"
-#include "mirror/object-inl.h"
-#include "object_utils.h"
-#include "reflection.h"
-#include "scoped_thread_state_change.h"
-#include "thread.h"
-#include "well_known_classes.h"
-
-#include "ScopedLocalRef.h"
-
-namespace art {
-
-// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
-// to jobjects.
-class BuildQuickArgumentVisitor : public QuickArgumentVisitor {
- public:
-  BuildQuickArgumentVisitor(MethodHelper& caller_mh, mirror::AbstractMethod** sp,
-                            ScopedObjectAccessUnchecked& soa, std::vector<jvalue>& args) :
-    QuickArgumentVisitor(caller_mh, sp), soa_(soa), args_(args) {}
-
-  virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    jvalue val;
-    Primitive::Type type = GetParamPrimitiveType();
-    switch (type) {
-      case Primitive::kPrimNot: {
-        mirror::Object* obj = *reinterpret_cast<mirror::Object**>(GetParamAddress());
-        val.l = soa_.AddLocalReference<jobject>(obj);
-        break;
-      }
-      case Primitive::kPrimLong:  // Fall-through.
-      case Primitive::kPrimDouble:
-        if (IsSplitLongOrDouble()) {
-          val.j = ReadSplitLongParam();
-        } else {
-          val.j = *reinterpret_cast<jlong*>(GetParamAddress());
-        }
-        break;
-      case Primitive::kPrimBoolean:  // Fall-through.
-      case Primitive::kPrimByte:     // Fall-through.
-      case Primitive::kPrimChar:     // Fall-through.
-      case Primitive::kPrimShort:    // Fall-through.
-      case Primitive::kPrimInt:      // Fall-through.
-      case Primitive::kPrimFloat:
-        val.i =  *reinterpret_cast<jint*>(GetParamAddress());
-        break;
-      case Primitive::kPrimVoid:
-        LOG(FATAL) << "UNREACHABLE";
-        val.j = 0;
-        break;
-    }
-    args_.push_back(val);
-  }
-
- private:
-  ScopedObjectAccessUnchecked& soa_;
-  std::vector<jvalue>& args_;
-
-  DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
-};
-
-// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
-// which is responsible for recording callee save registers. We explicitly place into jobjects the
-// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
-// field within the proxy object, which will box the primitive arguments and deal with error cases.
-extern "C" uint64_t artQuickProxyInvokeHandler(mirror::AbstractMethod* proxy_method,
-                                          mirror::Object* receiver,
-                                          Thread* self, mirror::AbstractMethod** sp)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-  // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
-  const char* old_cause =
-      self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
-  // Register the top of the managed stack, making stack crawlable.
-  DCHECK_EQ(*sp, proxy_method);
-  self->SetTopOfStack(sp, 0);
-  DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
-            Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
-  self->VerifyStack();
-  // Start new JNI local reference state.
-  JNIEnvExt* env = self->GetJniEnv();
-  ScopedObjectAccessUnchecked soa(env);
-  ScopedJniEnvLocalRefState env_state(env);
-  // Create local ref. copies of proxy method and the receiver.
-  jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
-
-  // Placing arguments into args vector and remove the receiver.
-  MethodHelper proxy_mh(proxy_method);
-  std::vector<jvalue> args;
-  BuildQuickArgumentVisitor local_ref_visitor(proxy_mh, sp, soa, args);
-  local_ref_visitor.VisitArguments();
-  args.erase(args.begin());
-
-  // Convert proxy method into expected interface method.
-  mirror::AbstractMethod* interface_method = proxy_method->FindOverriddenMethod();
-  DCHECK(interface_method != NULL);
-  DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
-  jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
-
-  // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
-  // that performs allocations.
-  self->EndAssertNoThreadSuspension(old_cause);
-  JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(),
-                                               rcvr_jobj, interface_method_jobj, args);
-  return result.GetJ();
-}
-
-}  // namespace art
diff --git a/runtime/entrypoints/quick/quick_stub_entrypoints.cc b/runtime/entrypoints/quick/quick_stub_entrypoints.cc
deleted file mode 100644
index d78bbf3..0000000
--- a/runtime/entrypoints/quick/quick_stub_entrypoints.cc
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "callee_save_frame.h"
-#include "class_linker-inl.h"
-#include "dex_file-inl.h"
-#include "dex_instruction-inl.h"
-#include "mirror/class-inl.h"
-#include "mirror/abstract_method-inl.h"
-#include "mirror/object_array-inl.h"
-#include "mirror/object-inl.h"
-#include "object_utils.h"
-#include "scoped_thread_state_change.h"
-
-// Architecture specific assembler helper to deliver exception.
-extern "C" void art_quick_deliver_exception_from_code(void*);
-
-namespace art {
-
-// Lazily resolve a method for quick. Called by stub code.
-extern "C" const void* artQuickResolutionTrampoline(mirror::AbstractMethod* called,
-                                                    mirror::Object* receiver,
-                                                    mirror::AbstractMethod** sp, Thread* thread)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-#if defined(__arm__)
-  // On entry the stack pointed by sp is:
-  // | argN       |  |
-  // | ...        |  |
-  // | arg4       |  |
-  // | arg3 spill |  |  Caller's frame
-  // | arg2 spill |  |
-  // | arg1 spill |  |
-  // | Method*    | ---
-  // | LR         |
-  // | ...        |    callee saves
-  // | R3         |    arg3
-  // | R2         |    arg2
-  // | R1         |    arg1
-  // | R0         |
-  // | Method*    |  <- sp
-  DCHECK_EQ(48U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
-  mirror::AbstractMethod** caller_sp = reinterpret_cast<mirror::AbstractMethod**>(reinterpret_cast<byte*>(sp) + 48);
-  uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp) + kPointerSize);
-  uint32_t pc_offset = 10;
-  uintptr_t caller_pc = regs[pc_offset];
-#elif defined(__i386__)
-  // On entry the stack pointed by sp is:
-  // | argN        |  |
-  // | ...         |  |
-  // | arg4        |  |
-  // | arg3 spill  |  |  Caller's frame
-  // | arg2 spill  |  |
-  // | arg1 spill  |  |
-  // | Method*     | ---
-  // | Return      |
-  // | EBP,ESI,EDI |    callee saves
-  // | EBX         |    arg3
-  // | EDX         |    arg2
-  // | ECX         |    arg1
-  // | EAX/Method* |  <- sp
-  DCHECK_EQ(32U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
-  mirror::AbstractMethod** caller_sp = reinterpret_cast<mirror::AbstractMethod**>(reinterpret_cast<byte*>(sp) + 32);
-  uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
-  uintptr_t caller_pc = regs[7];
-#elif defined(__mips__)
-  // On entry the stack pointed by sp is:
-  // | argN       |  |
-  // | ...        |  |
-  // | arg4       |  |
-  // | arg3 spill |  |  Caller's frame
-  // | arg2 spill |  |
-  // | arg1 spill |  |
-  // | Method*    | ---
-  // | RA         |
-  // | ...        |    callee saves
-  // | A3         |    arg3
-  // | A2         |    arg2
-  // | A1         |    arg1
-  // | A0/Method* |  <- sp
-  DCHECK_EQ(64U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
-  mirror::AbstractMethod** caller_sp = reinterpret_cast<mirror::AbstractMethod**>(reinterpret_cast<byte*>(sp) + 64);
-  uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
-  uint32_t pc_offset = 15;
-  uintptr_t caller_pc = regs[pc_offset];
-#else
-  UNIMPLEMENTED(FATAL);
-  mirror::AbstractMethod** caller_sp = NULL;
-  uintptr_t* regs = NULL;
-  uintptr_t caller_pc = 0;
-#endif
-  FinishCalleeSaveFrameSetup(thread, sp, Runtime::kRefsAndArgs);
-  // Start new JNI local reference state
-  JNIEnvExt* env = thread->GetJniEnv();
-  ScopedObjectAccessUnchecked soa(env);
-  ScopedJniEnvLocalRefState env_state(env);
-
-  // Compute details about the called method (avoid GCs)
-  ClassLinker* linker = Runtime::Current()->GetClassLinker();
-  mirror::AbstractMethod* caller = *caller_sp;
-  InvokeType invoke_type;
-  uint32_t dex_method_idx;
-#if !defined(__i386__)
-  const char* shorty;
-  uint32_t shorty_len;
-#endif
-  if (called->IsRuntimeMethod()) {
-    uint32_t dex_pc = caller->ToDexPc(caller_pc);
-    const DexFile::CodeItem* code = MethodHelper(caller).GetCodeItem();
-    CHECK_LT(dex_pc, code->insns_size_in_code_units_);
-    const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
-    Instruction::Code instr_code = instr->Opcode();
-    bool is_range;
-    switch (instr_code) {
-      case Instruction::INVOKE_DIRECT:
-        invoke_type = kDirect;
-        is_range = false;
-        break;
-      case Instruction::INVOKE_DIRECT_RANGE:
-        invoke_type = kDirect;
-        is_range = true;
-        break;
-      case Instruction::INVOKE_STATIC:
-        invoke_type = kStatic;
-        is_range = false;
-        break;
-      case Instruction::INVOKE_STATIC_RANGE:
-        invoke_type = kStatic;
-        is_range = true;
-        break;
-      case Instruction::INVOKE_SUPER:
-        invoke_type = kSuper;
-        is_range = false;
-        break;
-      case Instruction::INVOKE_SUPER_RANGE:
-        invoke_type = kSuper;
-        is_range = true;
-        break;
-      case Instruction::INVOKE_VIRTUAL:
-        invoke_type = kVirtual;
-        is_range = false;
-        break;
-      case Instruction::INVOKE_VIRTUAL_RANGE:
-        invoke_type = kVirtual;
-        is_range = true;
-        break;
-      case Instruction::INVOKE_INTERFACE:
-        invoke_type = kInterface;
-        is_range = false;
-        break;
-      case Instruction::INVOKE_INTERFACE_RANGE:
-        invoke_type = kInterface;
-        is_range = true;
-        break;
-      default:
-        LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
-        // Avoid used uninitialized warnings.
-        invoke_type = kDirect;
-        is_range = false;
-    }
-    dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
-#if !defined(__i386__)
-    shorty = linker->MethodShorty(dex_method_idx, caller, &shorty_len);
-#endif
-  } else {
-    invoke_type = kStatic;
-    dex_method_idx = called->GetDexMethodIndex();
-#if !defined(__i386__)
-    MethodHelper mh(called);
-    shorty = mh.GetShorty();
-    shorty_len = mh.GetShortyLength();
-#endif
-  }
-#if !defined(__i386__)
-  // Discover shorty (avoid GCs)
-  size_t args_in_regs = 0;
-  for (size_t i = 1; i < shorty_len; i++) {
-    char c = shorty[i];
-    args_in_regs = args_in_regs + (c == 'J' || c == 'D' ? 2 : 1);
-    if (args_in_regs > 3) {
-      args_in_regs = 3;
-      break;
-    }
-  }
-  // Place into local references incoming arguments from the caller's register arguments
-  size_t cur_arg = 1;   // skip method_idx in R0, first arg is in R1
-  if (invoke_type != kStatic) {
-    mirror::Object* obj = reinterpret_cast<mirror::Object*>(regs[cur_arg]);
-    cur_arg++;
-    if (args_in_regs < 3) {
-      // If we thought we had fewer than 3 arguments in registers, account for the receiver
-      args_in_regs++;
-    }
-    soa.AddLocalReference<jobject>(obj);
-  }
-  size_t shorty_index = 1;  // skip return value
-  // Iterate while arguments and arguments in registers (less 1 from cur_arg which is offset to skip
-  // R0)
-  while ((cur_arg - 1) < args_in_regs && shorty_index < shorty_len) {
-    char c = shorty[shorty_index];
-    shorty_index++;
-    if (c == 'L') {
-      mirror::Object* obj = reinterpret_cast<mirror::Object*>(regs[cur_arg]);
-      soa.AddLocalReference<jobject>(obj);
-    }
-    cur_arg = cur_arg + (c == 'J' || c == 'D' ? 2 : 1);
-  }
-  // Place into local references incoming arguments from the caller's stack arguments
-  cur_arg += pc_offset + 1;  // skip LR/RA, Method* and spills for R1-R3/A1-A3 and callee saves
-  while (shorty_index < shorty_len) {
-    char c = shorty[shorty_index];
-    shorty_index++;
-    if (c == 'L') {
-      mirror::Object* obj = reinterpret_cast<mirror::Object*>(regs[cur_arg]);
-      soa.AddLocalReference<jobject>(obj);
-    }
-    cur_arg = cur_arg + (c == 'J' || c == 'D' ? 2 : 1);
-  }
-#endif
-  // Resolve method filling in dex cache
-  if (called->IsRuntimeMethod()) {
-    called = linker->ResolveMethod(dex_method_idx, caller, invoke_type);
-  }
-  const void* code = NULL;
-  if (LIKELY(!thread->IsExceptionPending())) {
-    // Incompatible class change should have been handled in resolve method.
-    CHECK(!called->CheckIncompatibleClassChange(invoke_type));
-    // Refine called method based on receiver.
-    if (invoke_type == kVirtual) {
-      called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
-    } else if (invoke_type == kInterface) {
-      called = receiver->GetClass()->FindVirtualMethodForInterface(called);
-    }
-    // Ensure that the called method's class is initialized.
-    mirror::Class* called_class = called->GetDeclaringClass();
-    linker->EnsureInitialized(called_class, true, true);
-    if (LIKELY(called_class->IsInitialized())) {
-      code = called->GetEntryPointFromCompiledCode();
-    } else if (called_class->IsInitializing()) {
-      if (invoke_type == kStatic) {
-        // Class is still initializing, go to oat and grab code (trampoline must be left in place
-        // until class is initialized to stop races between threads).
-        code = linker->GetOatCodeFor(called);
-      } else {
-        // No trampoline for non-static methods.
-        code = called->GetEntryPointFromCompiledCode();
-      }
-    } else {
-      DCHECK(called_class->IsErroneous());
-    }
-  }
-  if (UNLIKELY(code == NULL)) {
-    // Something went wrong in ResolveMethod or EnsureInitialized,
-    // go into deliver exception with the pending exception in r0
-    CHECK(thread->IsExceptionPending());
-    code = reinterpret_cast<void*>(art_quick_deliver_exception_from_code);
-    regs[0] = reinterpret_cast<uintptr_t>(thread->GetException(NULL));
-    thread->ClearException();
-  } else {
-    // Expect class to at least be initializing.
-    DCHECK(called->GetDeclaringClass()->IsInitializing());
-    // Don't want infinite recursion.
-    DCHECK(code != GetResolutionTrampoline(linker));
-    // Set up entry into main method
-    regs[0] = reinterpret_cast<uintptr_t>(called);
-  }
-  return code;
-}
-
-// Called by the abstract method error stub.
-extern "C" void artThrowAbstractMethodErrorFromCode(mirror::AbstractMethod* method, Thread* self,
-                                                    mirror::AbstractMethod** sp)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-#if !defined(ART_USE_PORTABLE_COMPILER)
-  FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll);
-#else
-  UNUSED(sp);
-#endif
-  ThrowAbstractMethodError(method);
-  self->QuickDeliverException();
-}
-
-}  // namespace art
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
new file mode 100644
index 0000000..9bf02e8
--- /dev/null
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -0,0 +1,558 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "callee_save_frame.h"
+#include "dex_file-inl.h"
+#include "dex_instruction-inl.h"
+#include "interpreter/interpreter.h"
+#include "invoke_arg_array_builder.h"
+#include "mirror/abstract_method-inl.h"
+#include "mirror/class-inl.h"
+#include "mirror/object-inl.h"
+#include "mirror/object_array-inl.h"
+#include "object_utils.h"
+#include "runtime.h"
+
+namespace art {
+
+// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
+class QuickArgumentVisitor {
+ public:
+// Offset to first (not the Method*) argument in a Runtime::kRefAndArgs callee save frame.
+// Size of Runtime::kRefAndArgs callee save frame.
+// Size of Method* and register parameters in out stack arguments.
+#if defined(__arm__)
+  // The callee save frame is pointed to by SP.
+  // | argN       |  |
+  // | ...        |  |
+  // | arg4       |  |
+  // | arg3 spill |  |  Caller's frame
+  // | arg2 spill |  |
+  // | arg1 spill |  |
+  // | Method*    | ---
+  // | LR         |
+  // | ...        |    callee saves
+  // | R3         |    arg3
+  // | R2         |    arg2
+  // | R1         |    arg1
+  // | R0         |
+  // | Method*    |  <- sp
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 8
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__LR_OFFSET 44
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 48
+#define QUICK_STACK_ARG_SKIP 16
+#elif defined(__mips__)
+  // The callee save frame is pointed to by SP.
+  // | argN       |  |
+  // | ...        |  |
+  // | arg4       |  |
+  // | arg3 spill |  |  Caller's frame
+  // | arg2 spill |  |
+  // | arg1 spill |  |
+  // | Method*    | ---
+  // | RA         |
+  // | ...        |    callee saves
+  // | A3         |    arg3
+  // | A2         |    arg2
+  // | A1         |    arg1
+  // | A0/Method* |  <- sp
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__LR_OFFSET 60
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 64
+#define QUICK_STACK_ARG_SKIP 16
+#elif defined(__i386__)
+  // The callee save frame is pointed to by SP.
+  // | argN        |  |
+  // | ...         |  |
+  // | arg4        |  |
+  // | arg3 spill  |  |  Caller's frame
+  // | arg2 spill  |  |
+  // | arg1 spill  |  |
+  // | Method*     | ---
+  // | Return      |
+  // | EBP,ESI,EDI |    callee saves
+  // | EBX         |    arg3
+  // | EDX         |    arg2
+  // | ECX         |    arg1
+  // | EAX/Method* |  <- sp
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 4
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__LR_OFFSET 28
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 32
+#define QUICK_STACK_ARG_SKIP 16
+#else
+#error "Unsupported architecture"
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET 0
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__LR_OFFSET 0
+#define QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE 0
+#define QUICK_STACK_ARG_SKIP 0
+#endif
+
+  static mirror::AbstractMethod* GetCallingMethod(mirror::AbstractMethod** sp) {
+    byte* previous_sp = reinterpret_cast<byte*>(sp) +
+        QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE;
+    return *reinterpret_cast<mirror::AbstractMethod**>(previous_sp);
+  }
+
+  static uintptr_t GetCallingPc(mirror::AbstractMethod** sp) {
+    byte* lr = reinterpret_cast<byte*>(sp) + QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__LR_OFFSET;
+    return *reinterpret_cast<uintptr_t*>(lr);
+  }
+
+  QuickArgumentVisitor(mirror::AbstractMethod** sp, bool is_static,
+                       const char* shorty, uint32_t shorty_len)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
+    is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
+    args_in_regs_(ComputeArgsInRegs(is_static, shorty, shorty_len)),
+    num_params_((is_static ? 0 : 1) + shorty_len - 1),  // +1 for this, -1 for return type
+    reg_args_(reinterpret_cast<byte*>(sp) + QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__R1_OFFSET),
+    stack_args_(reinterpret_cast<byte*>(sp) + QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE
+                + QUICK_STACK_ARG_SKIP),
+    cur_args_(reg_args_),
+    cur_arg_index_(0),
+    param_index_(0),
+    is_split_long_or_double_(false) {
+    DCHECK_EQ(static_cast<size_t>(QUICK_CALLEE_SAVE_FRAME__REF_AND_ARGS__FRAME_SIZE),
+              Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
+  }
+
+  virtual ~QuickArgumentVisitor() {}
+
+  virtual void Visit() = 0;
+
+  Primitive::Type GetParamPrimitiveType() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    size_t index = param_index_;
+    if (is_static_) {
+      index++;  // 0th argument must skip return value at start of the shorty
+    } else if (index == 0) {
+      return Primitive::kPrimNot;
+    }
+    CHECK_LT(index, shorty_len_);
+    return Primitive::GetType(shorty_[index]);
+  }
+
+  byte* GetParamAddress() const {
+    return cur_args_ + (cur_arg_index_ * kPointerSize);
+  }
+
+  bool IsSplitLongOrDouble() const {
+    return is_split_long_or_double_;
+  }
+
+  bool IsParamAReference() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    return GetParamPrimitiveType() == Primitive::kPrimNot;
+  }
+
+  bool IsParamALongOrDouble() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    Primitive::Type type = GetParamPrimitiveType();
+    return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
+  }
+
+  uint64_t ReadSplitLongParam() const {
+    DCHECK(IsSplitLongOrDouble());
+    uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
+    uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
+    return (low_half & 0xffffffffULL) | (high_half << 32);
+  }
+
+  void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    for (cur_arg_index_ = 0;  cur_arg_index_ < args_in_regs_ && param_index_ < num_params_; ) {
+      is_split_long_or_double_ = (cur_arg_index_ == 2) && IsParamALongOrDouble();
+      Visit();
+      cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
+      param_index_++;
+    }
+    cur_args_ = stack_args_;
+    cur_arg_index_ = is_split_long_or_double_ ? 1 : 0;
+    is_split_long_or_double_ = false;
+    while (param_index_ < num_params_) {
+      Visit();
+      cur_arg_index_ += (IsParamALongOrDouble() ? 2 : 1);
+      param_index_++;
+    }
+  }
+
+ private:
+  static size_t ComputeArgsInRegs(bool is_static, const char* shorty, uint32_t shorty_len)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    size_t args_in_regs = (is_static ? 0 : 1);
+    for (size_t i = 0; i < shorty_len; i++) {
+      char s = shorty[i];
+      if (s == 'J' || s == 'D') {
+        args_in_regs += 2;
+      } else {
+        args_in_regs++;
+      }
+      if (args_in_regs > 3) {
+        args_in_regs = 3;
+        break;
+      }
+    }
+    return args_in_regs;
+  }
+
+  const bool is_static_;
+  const char* const shorty_;
+  const uint32_t shorty_len_;
+  const size_t args_in_regs_;
+  const size_t num_params_;
+  byte* const reg_args_;
+  byte* const stack_args_;
+  byte* cur_args_;
+  size_t cur_arg_index_;
+  size_t param_index_;
+  // Does a 64bit parameter straddle the register and stack arguments?
+  bool is_split_long_or_double_;
+};
+
+// Visits arguments on the stack placing them into the shadow frame.
+class BuildShadowFrameVisitor : public QuickArgumentVisitor {
+ public:
+  BuildShadowFrameVisitor(mirror::AbstractMethod** sp, bool is_static, const char* shorty,
+                          uint32_t shorty_len, ShadowFrame& sf, size_t first_arg_reg) :
+    QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
+
+  virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    Primitive::Type type = GetParamPrimitiveType();
+    switch (type) {
+      case Primitive::kPrimLong:  // Fall-through.
+      case Primitive::kPrimDouble:
+        if (IsSplitLongOrDouble()) {
+          sf_.SetVRegLong(cur_reg_, ReadSplitLongParam());
+        } else {
+          sf_.SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
+        }
+        ++cur_reg_;
+        break;
+      case Primitive::kPrimNot:
+        sf_.SetVRegReference(cur_reg_, *reinterpret_cast<mirror::Object**>(GetParamAddress()));
+        break;
+      case Primitive::kPrimBoolean:  // Fall-through.
+      case Primitive::kPrimByte:     // Fall-through.
+      case Primitive::kPrimChar:     // Fall-through.
+      case Primitive::kPrimShort:    // Fall-through.
+      case Primitive::kPrimInt:      // Fall-through.
+      case Primitive::kPrimFloat:
+        sf_.SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
+        break;
+      case Primitive::kPrimVoid:
+        LOG(FATAL) << "UNREACHABLE";
+        break;
+    }
+    ++cur_reg_;
+  }
+
+ private:
+  ShadowFrame& sf_;
+  size_t cur_reg_;
+
+  DISALLOW_COPY_AND_ASSIGN(BuildShadowFrameVisitor);
+};
+
+extern "C" uint64_t artQuickToInterpreterBridge(mirror::AbstractMethod* method, Thread* self,
+                                                mirror::AbstractMethod** sp)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  // Ensure we don't get thread suspension until the object arguments are safely in the shadow
+  // frame.
+  FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
+
+  if (method->IsAbstract()) {
+    ThrowAbstractMethodError(method);
+    return 0;
+  } else {
+    const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
+    MethodHelper mh(method);
+    const DexFile::CodeItem* code_item = mh.GetCodeItem();
+    uint16_t num_regs = code_item->registers_size_;
+    void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
+    ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL,  // No last shadow coming from quick.
+                                                  method, 0, memory));
+    size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
+    BuildShadowFrameVisitor shadow_frame_builder(sp, mh.IsStatic(), mh.GetShorty(),
+                                                 mh.GetShortyLength(),
+                                                 *shadow_frame, first_arg_reg);
+    shadow_frame_builder.VisitArguments();
+    // Push a transition back into managed code onto the linked list in thread.
+    ManagedStack fragment;
+    self->PushManagedStackFragment(&fragment);
+    self->PushShadowFrame(shadow_frame);
+    self->EndAssertNoThreadSuspension(old_cause);
+
+    if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) {
+      // Ensure static method's class is initialized.
+      if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(method->GetDeclaringClass(),
+                                                                   true, true)) {
+        DCHECK(Thread::Current()->IsExceptionPending());
+        self->PopManagedStackFragment(fragment);
+        return 0;
+      }
+    }
+
+    JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
+    // Pop transition.
+    self->PopManagedStackFragment(fragment);
+    return result.GetJ();
+  }
+}
+
+// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
+// to jobjects.
+class BuildQuickArgumentVisitor : public QuickArgumentVisitor {
+ public:
+  BuildQuickArgumentVisitor(mirror::AbstractMethod** sp, bool is_static, const char* shorty,
+                            uint32_t shorty_len, ScopedObjectAccessUnchecked* soa,
+                            std::vector<jvalue>* args) :
+    QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
+
+  virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    jvalue val;
+    Primitive::Type type = GetParamPrimitiveType();
+    switch (type) {
+      case Primitive::kPrimNot: {
+        mirror::Object* obj = *reinterpret_cast<mirror::Object**>(GetParamAddress());
+        val.l = soa_->AddLocalReference<jobject>(obj);
+        break;
+      }
+      case Primitive::kPrimLong:  // Fall-through.
+      case Primitive::kPrimDouble:
+        if (IsSplitLongOrDouble()) {
+          val.j = ReadSplitLongParam();
+        } else {
+          val.j = *reinterpret_cast<jlong*>(GetParamAddress());
+        }
+        break;
+      case Primitive::kPrimBoolean:  // Fall-through.
+      case Primitive::kPrimByte:     // Fall-through.
+      case Primitive::kPrimChar:     // Fall-through.
+      case Primitive::kPrimShort:    // Fall-through.
+      case Primitive::kPrimInt:      // Fall-through.
+      case Primitive::kPrimFloat:
+        val.i =  *reinterpret_cast<jint*>(GetParamAddress());
+        break;
+      case Primitive::kPrimVoid:
+        LOG(FATAL) << "UNREACHABLE";
+        val.j = 0;
+        break;
+    }
+    args_->push_back(val);
+  }
+
+ private:
+  ScopedObjectAccessUnchecked* soa_;
+  std::vector<jvalue>* args_;
+
+  DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
+};
+
+// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
+// which is responsible for recording callee save registers. We explicitly place into jobjects the
+// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
+// field within the proxy object, which will box the primitive arguments and deal with error cases.
+extern "C" uint64_t artQuickProxyInvokeHandler(mirror::AbstractMethod* proxy_method,
+                                               mirror::Object* receiver,
+                                               Thread* self, mirror::AbstractMethod** sp)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
+  const char* old_cause =
+      self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
+  // Register the top of the managed stack, making stack crawlable.
+  DCHECK_EQ(*sp, proxy_method);
+  self->SetTopOfStack(sp, 0);
+  DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
+            Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
+  self->VerifyStack();
+  // Start new JNI local reference state.
+  JNIEnvExt* env = self->GetJniEnv();
+  ScopedObjectAccessUnchecked soa(env);
+  ScopedJniEnvLocalRefState env_state(env);
+  // Create local ref. copies of proxy method and the receiver.
+  jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
+
+  // Placing arguments into args vector and remove the receiver.
+  MethodHelper proxy_mh(proxy_method);
+  std::vector<jvalue> args;
+  BuildQuickArgumentVisitor local_ref_visitor(sp, proxy_mh.IsStatic(), proxy_mh.GetShorty(),
+                                              proxy_mh.GetShortyLength(), &soa, &args);
+  local_ref_visitor.VisitArguments();
+  args.erase(args.begin());
+
+  // Convert proxy method into expected interface method.
+  mirror::AbstractMethod* interface_method = proxy_method->FindOverriddenMethod();
+  DCHECK(interface_method != NULL);
+  DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
+  jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
+
+  // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
+  // that performs allocations.
+  self->EndAssertNoThreadSuspension(old_cause);
+  JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(),
+                                               rcvr_jobj, interface_method_jobj, args);
+  return result.GetJ();
+}
+
+// Read object references held in arguments from quick frames and place in a JNI local references,
+// so they don't get garbage collected.
+class RememberFoGcArgumentVisitor : public QuickArgumentVisitor {
+ public:
+  RememberFoGcArgumentVisitor(mirror::AbstractMethod** sp, bool is_static, const char* shorty,
+                              uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
+    QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
+
+  virtual void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    if (IsParamAReference()) {
+      soa_->AddLocalReference<jobject>(*reinterpret_cast<mirror::Object**>(GetParamAddress()));
+    }
+  }
+
+ private:
+  ScopedObjectAccessUnchecked* soa_;
+
+  DISALLOW_COPY_AND_ASSIGN(RememberFoGcArgumentVisitor);
+};
+
+// Lazily resolve a method for quick. Called by stub code.
+extern "C" const void* artQuickResolutionTrampoline(mirror::AbstractMethod* called,
+                                                    mirror::Object* receiver,
+                                                    Thread* thread, mirror::AbstractMethod** sp)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  FinishCalleeSaveFrameSetup(thread, sp, Runtime::kRefsAndArgs);
+  // Start new JNI local reference state
+  JNIEnvExt* env = thread->GetJniEnv();
+  ScopedObjectAccessUnchecked soa(env);
+  ScopedJniEnvLocalRefState env_state(env);
+  const char* old_cause = thread->StartAssertNoThreadSuspension("Quick method resolution set up");
+
+  // Compute details about the called method (avoid GCs)
+  ClassLinker* linker = Runtime::Current()->GetClassLinker();
+  mirror::AbstractMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
+  InvokeType invoke_type;
+  const DexFile* dex_file;
+  uint32_t dex_method_idx;
+  if (called->IsRuntimeMethod()) {
+    uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
+    const DexFile::CodeItem* code;
+    {
+      MethodHelper mh(caller);
+      dex_file = &mh.GetDexFile();
+      code = mh.GetCodeItem();
+    }
+    CHECK_LT(dex_pc, code->insns_size_in_code_units_);
+    const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
+    Instruction::Code instr_code = instr->Opcode();
+    bool is_range;
+    switch (instr_code) {
+      case Instruction::INVOKE_DIRECT:
+        invoke_type = kDirect;
+        is_range = false;
+        break;
+      case Instruction::INVOKE_DIRECT_RANGE:
+        invoke_type = kDirect;
+        is_range = true;
+        break;
+      case Instruction::INVOKE_STATIC:
+        invoke_type = kStatic;
+        is_range = false;
+        break;
+      case Instruction::INVOKE_STATIC_RANGE:
+        invoke_type = kStatic;
+        is_range = true;
+        break;
+      case Instruction::INVOKE_SUPER:
+        invoke_type = kSuper;
+        is_range = false;
+        break;
+      case Instruction::INVOKE_SUPER_RANGE:
+        invoke_type = kSuper;
+        is_range = true;
+        break;
+      case Instruction::INVOKE_VIRTUAL:
+        invoke_type = kVirtual;
+        is_range = false;
+        break;
+      case Instruction::INVOKE_VIRTUAL_RANGE:
+        invoke_type = kVirtual;
+        is_range = true;
+        break;
+      case Instruction::INVOKE_INTERFACE:
+        invoke_type = kInterface;
+        is_range = false;
+        break;
+      case Instruction::INVOKE_INTERFACE_RANGE:
+        invoke_type = kInterface;
+        is_range = true;
+        break;
+      default:
+        LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
+        // Avoid used uninitialized warnings.
+        invoke_type = kDirect;
+        is_range = false;
+    }
+    dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
+
+  } else {
+    invoke_type = kStatic;
+    dex_file = &MethodHelper(called).GetDexFile();
+    dex_method_idx = called->GetDexMethodIndex();
+  }
+  uint32_t shorty_len;
+  const char* shorty =
+      dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
+  RememberFoGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
+  visitor.VisitArguments();
+  thread->EndAssertNoThreadSuspension(old_cause);
+  // Resolve method filling in dex cache.
+  if (called->IsRuntimeMethod()) {
+    called = linker->ResolveMethod(dex_method_idx, caller, invoke_type);
+  }
+  const void* code = NULL;
+  if (LIKELY(!thread->IsExceptionPending())) {
+    // Incompatible class change should have been handled in resolve method.
+    CHECK(!called->CheckIncompatibleClassChange(invoke_type));
+    // Refine called method based on receiver.
+    if (invoke_type == kVirtual) {
+      called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
+    } else if (invoke_type == kInterface) {
+      called = receiver->GetClass()->FindVirtualMethodForInterface(called);
+    }
+    // Ensure that the called method's class is initialized.
+    mirror::Class* called_class = called->GetDeclaringClass();
+    linker->EnsureInitialized(called_class, true, true);
+    if (LIKELY(called_class->IsInitialized())) {
+      code = called->GetEntryPointFromCompiledCode();
+    } else if (called_class->IsInitializing()) {
+      if (invoke_type == kStatic) {
+        // Class is still initializing, go to oat and grab code (trampoline must be left in place
+        // until class is initialized to stop races between threads).
+        code = linker->GetOatCodeFor(called);
+      } else {
+        // No trampoline for non-static methods.
+        code = called->GetEntryPointFromCompiledCode();
+      }
+    } else {
+      DCHECK(called_class->IsErroneous());
+    }
+  }
+  CHECK_EQ(code == NULL, thread->IsExceptionPending());
+#ifdef MOVING_GARBAGE_COLLECTOR
+  // TODO: locally saved objects may have moved during a GC during resolution. Need to update the
+  //       registers so that the stale objects aren't passed to the method we've resolved.
+    UNIMPLEMENTED(WARNING);
+#endif
+  // Place called method in callee-save frame to be placed as first argument to quick method.
+  *sp = called;
+  return code;
+}
+
+}  // namespace art
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index c0b85f4..c3b66b3 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -60,7 +60,7 @@
       const void* new_code;
       if (uninstall) {
         if (forced_interpret_only_ && !method->IsNative() && !method->IsProxyMethod()) {
-          new_code = GetInterpreterEntryPoint();
+          new_code = GetCompiledCodeToInterpreterBridge();
         } else if (is_initialized || !method->IsStatic() || method->IsConstructor()) {
           new_code = class_linker->GetOatCodeFor(method);
         } else {
@@ -68,9 +68,9 @@
         }
       } else {  // !uninstall
         if (!interpreter_stubs_installed_ || method->IsNative()) {
-          new_code = GetInstrumentationEntryPoint();
+          new_code = GetQuickInstrumentationEntryPoint();
         } else {
-          new_code = GetInterpreterEntryPoint();
+          new_code = GetCompiledCodeToInterpreterBridge();
         }
       }
       method->SetEntryPointFromCompiledCode(new_code);
@@ -82,15 +82,15 @@
       const void* new_code;
       if (uninstall) {
         if (forced_interpret_only_ && !method->IsNative() && !method->IsProxyMethod()) {
-          new_code = GetInterpreterEntryPoint();
+          new_code = GetCompiledCodeToInterpreterBridge();
         } else {
           new_code = class_linker->GetOatCodeFor(method);
         }
       } else {  // !uninstall
         if (!interpreter_stubs_installed_ || method->IsNative()) {
-          new_code = GetInstrumentationEntryPoint();
+          new_code = GetQuickInstrumentationEntryPoint();
         } else {
-          new_code = GetInterpreterEntryPoint();
+          new_code = GetCompiledCodeToInterpreterBridge();
         }
       }
       method->SetEntryPointFromCompiledCode(new_code);
@@ -159,7 +159,7 @@
     LOG(INFO) << "Installing exit stubs in " << thread_name;
   }
   UniquePtr<Context> context(Context::Create());
-  uintptr_t instrumentation_exit_pc = GetInstrumentationExitPc();
+  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
   InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
   visitor.WalkStack(true);
 
@@ -251,7 +251,7 @@
   std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
   if (stack->size() > 0) {
     Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
-    uintptr_t instrumentation_exit_pc = GetInstrumentationExitPc();
+    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
     RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
     visitor.WalkStack(true);
     CHECK_EQ(visitor.frames_removed_, stack->size());
@@ -384,9 +384,9 @@
     method->SetEntryPointFromCompiledCode(code);
   } else {
     if (!interpreter_stubs_installed_ || method->IsNative()) {
-      method->SetEntryPointFromCompiledCode(GetInstrumentationEntryPoint());
+      method->SetEntryPointFromCompiledCode(GetQuickInstrumentationEntryPoint());
     } else {
-      method->SetEntryPointFromCompiledCode(GetInterpreterEntryPoint());
+      method->SetEntryPointFromCompiledCode(GetCompiledCodeToInterpreterBridge());
     }
   }
 }
@@ -396,8 +396,8 @@
   if (LIKELY(!instrumentation_stubs_installed_)) {
     const void* code = method->GetEntryPointFromCompiledCode();
     DCHECK(code != NULL);
-    if (LIKELY(code != GetResolutionTrampoline(runtime->GetClassLinker()) &&
-               code != GetInterpreterEntryPoint())) {
+    if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
+               code != GetQuickToInterpreterBridge())) {
       return code;
     }
   }
@@ -548,7 +548,7 @@
           << " result is " << std::hex << return_value.GetJ();
     }
     self->SetDeoptimizationReturnValue(return_value);
-    return static_cast<uint64_t>(GetDeoptimizationEntryPoint()) |
+    return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
         (static_cast<uint64_t>(*return_pc) << 32);
   } else {
     if (kVerboseInstrumentation) {
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index ef4b95c..6e35d93 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -148,7 +148,7 @@
     }
   } else {
     // Not special, continue with regular interpreter execution.
-    artInterpreterToInterpreterEntry(self, mh, code_item, shadow_frame, result);
+    artInterpreterToInterpreterBridge(self, mh, code_item, shadow_frame, result);
   }
 }
 
@@ -3039,6 +3039,10 @@
 
 static inline JValue Execute(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
                              ShadowFrame& shadow_frame, JValue result_register) {
+  DCHECK(shadow_frame.GetMethod() == mh.GetMethod() ||
+         shadow_frame.GetMethod()->GetDeclaringClass()->IsProxyClass());
+  DCHECK(!shadow_frame.GetMethod()->IsAbstract());
+  DCHECK(!shadow_frame.GetMethod()->IsNative());
   if (shadow_frame.GetMethod()->IsPreverified()) {
     // Enter the "without access check" interpreter.
     return ExecuteImpl<false>(self, mh, code_item, shadow_frame, result_register);
@@ -3150,8 +3154,7 @@
 }
 
 JValue EnterInterpreterFromStub(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
-                                ShadowFrame& shadow_frame)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+                                ShadowFrame& shadow_frame) {
   DCHECK_EQ(self, Thread::Current());
   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEnd())) {
     ThrowStackOverflowError(self);
@@ -3161,10 +3164,9 @@
   return Execute(self, mh, code_item, shadow_frame, JValue());
 }
 
-void artInterpreterToInterpreterEntry(Thread* self, MethodHelper& mh,
-                                      const DexFile::CodeItem* code_item,
-                                      ShadowFrame* shadow_frame, JValue* result)
-    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh,
+                                                  const DexFile::CodeItem* code_item,
+                                                  ShadowFrame* shadow_frame, JValue* result) {
   if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEnd())) {
     ThrowStackOverflowError(self);
     return;
diff --git a/runtime/interpreter/interpreter.h b/runtime/interpreter/interpreter.h
index 17884b9..af4a147 100644
--- a/runtime/interpreter/interpreter.h
+++ b/runtime/interpreter/interpreter.h
@@ -47,9 +47,9 @@
                                        ShadowFrame& shadow_frame)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-extern "C" void artInterpreterToInterpreterEntry(Thread* self, MethodHelper& mh,
-                                                const DexFile::CodeItem* code_item,
-                                                ShadowFrame* shadow_frame, JValue* result)
+extern "C" void artInterpreterToInterpreterBridge(Thread* self, MethodHelper& mh,
+                                                  const DexFile::CodeItem* code_item,
+                                                  ShadowFrame* shadow_frame, JValue* result)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
 }  // namespace interpreter
diff --git a/runtime/jni_internal.h b/runtime/jni_internal.h
index ad66ada..fcac481 100644
--- a/runtime/jni_internal.h
+++ b/runtime/jni_internal.h
@@ -144,6 +144,10 @@
     return Offset(OFFSETOF_MEMBER(JNIEnvExt, local_ref_cookie));
   }
 
+  static Offset SelfOffset() {
+    return Offset(OFFSETOF_MEMBER(JNIEnvExt, self));
+  }
+
   Thread* const self;
   JavaVMExt* vm;
 
diff --git a/runtime/mirror/abstract_method-inl.h b/runtime/mirror/abstract_method-inl.h
index d235e3e..8fde99b 100644
--- a/runtime/mirror/abstract_method-inl.h
+++ b/runtime/mirror/abstract_method-inl.h
@@ -114,11 +114,11 @@
   if (IsNative() || IsRuntimeMethod() || IsProxyMethod()) {
     return;
   }
-  if (pc == GetInstrumentationExitPc()) {
+  if (pc == GetQuickInstrumentationExitPc()) {
     return;
   }
   const void* code = GetEntryPointFromCompiledCode();
-  if (code == GetInterpreterEntryPoint() || code == GetInstrumentationEntryPoint()) {
+  if (code == GetCompiledCodeToInterpreterBridge() || code == GetQuickInstrumentationEntryPoint()) {
     return;
   }
   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
diff --git a/runtime/mirror/abstract_method.cc b/runtime/mirror/abstract_method.cc
index 4d7f99e..93065e7 100644
--- a/runtime/mirror/abstract_method.cc
+++ b/runtime/mirror/abstract_method.cc
@@ -321,6 +321,7 @@
   return native_method != jni_stub;
 }
 
+extern "C" void art_work_around_app_jni_bugs(JNIEnv*, jobject);
 void AbstractMethod::RegisterNative(Thread* self, const void* native_method) {
   DCHECK(Thread::Current() == self);
   CHECK(IsNative()) << PrettyMethod(this);
@@ -332,10 +333,10 @@
     // around JNI bugs, that include not giving Object** SIRT references to native methods. Direct
     // the native method to runtime support and store the target somewhere runtime support will
     // find it.
-#if defined(__arm__) && !defined(ART_USE_PORTABLE_COMPILER)
-    SetNativeMethod(native_method);
-#else
+#if defined(__i386__)
     UNIMPLEMENTED(FATAL);
+#else
+    SetNativeMethod(reinterpret_cast<void*>(art_work_around_app_jni_bugs));
 #endif
     SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, gc_map_),
         reinterpret_cast<const uint8_t*>(native_method), false);
diff --git a/runtime/native/dalvik_system_VMRuntime.cc b/runtime/native/dalvik_system_VMRuntime.cc
index 4ef7607..47fe025 100644
--- a/runtime/native/dalvik_system_VMRuntime.cc
+++ b/runtime/native/dalvik_system_VMRuntime.cc
@@ -144,8 +144,6 @@
   if (targetSdkVersion > 0 && targetSdkVersion <= 13 /* honeycomb-mr2 */) {
     Runtime* runtime = Runtime::Current();
     JavaVMExt* vm = runtime->GetJavaVM();
-
-#if !defined(ART_USE_PORTABLE_COMPILER)
     if (vm->check_jni) {
       LOG(WARNING) << "Turning off CheckJNI so we can turn on JNI app bug workarounds...";
       Thread* self = static_cast<JNIEnvExt*>(env)->self;
@@ -158,11 +156,6 @@
               << targetSdkVersion << "...";
 
     vm->work_around_app_jni_bugs = true;
-#else
-    UNUSED(env);
-    LOG(WARNING) << "LLVM does not work-around app jni bugs.";
-    vm->work_around_app_jni_bugs = false;
-#endif
   }
 }
 
diff --git a/runtime/oat.cc b/runtime/oat.cc
index e606953..c01f77c 100644
--- a/runtime/oat.cc
+++ b/runtime/oat.cc
@@ -22,7 +22,7 @@
 namespace art {
 
 const uint8_t OatHeader::kOatMagic[] = { 'o', 'a', 't', '\n' };
-const uint8_t OatHeader::kOatVersion[] = { '0', '0', '6', '\0' };
+const uint8_t OatHeader::kOatVersion[] = { '0', '0', '7', '\0' };
 
 OatHeader::OatHeader() {
   memset(this, 0, sizeof(*this));
@@ -57,10 +57,13 @@
   UpdateChecksum(image_file_location.data(), image_file_location_size_);
 
   executable_offset_ = 0;
-  interpreter_to_interpreter_entry_offset_ = 0;
-  interpreter_to_quick_entry_offset_ = 0;
+  interpreter_to_interpreter_bridge_offset_ = 0;
+  interpreter_to_compiled_code_bridge_offset_ = 0;
+  jni_dlsym_lookup_offset_ = 0;
   portable_resolution_trampoline_offset_ = 0;
+  portable_to_interpreter_bridge_offset_ = 0;
   quick_resolution_trampoline_offset_ = 0;
+  quick_to_interpreter_bridge_offset_ = 0;
 }
 
 bool OatHeader::IsValid() const {
@@ -111,42 +114,61 @@
   UpdateChecksum(&executable_offset_, sizeof(executable_offset));
 }
 
-const void* OatHeader::GetInterpreterToInterpreterEntry() const {
-  return reinterpret_cast<const uint8_t*>(this) + GetInterpreterToInterpreterEntryOffset();
+const void* OatHeader::GetInterpreterToInterpreterBridge() const {
+  return reinterpret_cast<const uint8_t*>(this) + GetInterpreterToInterpreterBridgeOffset();
 }
 
-uint32_t OatHeader::GetInterpreterToInterpreterEntryOffset() const {
+uint32_t OatHeader::GetInterpreterToInterpreterBridgeOffset() const {
   DCHECK(IsValid());
-  CHECK_GE(interpreter_to_interpreter_entry_offset_, executable_offset_);
-  return interpreter_to_interpreter_entry_offset_;
+  CHECK_GE(interpreter_to_interpreter_bridge_offset_, executable_offset_);
+  return interpreter_to_interpreter_bridge_offset_;
 }
 
-void OatHeader::SetInterpreterToInterpreterEntryOffset(uint32_t offset) {
+void OatHeader::SetInterpreterToInterpreterBridgeOffset(uint32_t offset) {
   CHECK(offset == 0 || offset >= executable_offset_);
   DCHECK(IsValid());
-  DCHECK_EQ(interpreter_to_interpreter_entry_offset_, 0U) << offset;
+  DCHECK_EQ(interpreter_to_interpreter_bridge_offset_, 0U) << offset;
 
-  interpreter_to_interpreter_entry_offset_ = offset;
-  UpdateChecksum(&interpreter_to_interpreter_entry_offset_, sizeof(offset));
+  interpreter_to_interpreter_bridge_offset_ = offset;
+  UpdateChecksum(&interpreter_to_interpreter_bridge_offset_, sizeof(offset));
 }
 
-const void* OatHeader::GetInterpreterToQuickEntry() const {
-  return reinterpret_cast<const uint8_t*>(this) + GetInterpreterToQuickEntryOffset();
+const void* OatHeader::GetInterpreterToCompiledCodeBridge() const {
+  return reinterpret_cast<const uint8_t*>(this) + GetInterpreterToCompiledCodeBridgeOffset();
 }
 
-uint32_t OatHeader::GetInterpreterToQuickEntryOffset() const {
+uint32_t OatHeader::GetInterpreterToCompiledCodeBridgeOffset() const {
   DCHECK(IsValid());
-  CHECK_GE(interpreter_to_quick_entry_offset_, interpreter_to_interpreter_entry_offset_);
-  return interpreter_to_quick_entry_offset_;
+  CHECK_GE(interpreter_to_compiled_code_bridge_offset_, interpreter_to_interpreter_bridge_offset_);
+  return interpreter_to_compiled_code_bridge_offset_;
 }
 
-void OatHeader::SetInterpreterToQuickEntryOffset(uint32_t offset) {
-  CHECK(offset == 0 || offset >= interpreter_to_interpreter_entry_offset_);
+void OatHeader::SetInterpreterToCompiledCodeBridgeOffset(uint32_t offset) {
+  CHECK(offset == 0 || offset >= interpreter_to_interpreter_bridge_offset_);
   DCHECK(IsValid());
-  DCHECK_EQ(interpreter_to_quick_entry_offset_, 0U) << offset;
+  DCHECK_EQ(interpreter_to_compiled_code_bridge_offset_, 0U) << offset;
 
-  interpreter_to_quick_entry_offset_ = offset;
-  UpdateChecksum(&interpreter_to_quick_entry_offset_, sizeof(offset));
+  interpreter_to_compiled_code_bridge_offset_ = offset;
+  UpdateChecksum(&interpreter_to_compiled_code_bridge_offset_, sizeof(offset));
+}
+
+const void* OatHeader::GetJniDlsymLookup() const {
+  return reinterpret_cast<const uint8_t*>(this) + GetJniDlsymLookupOffset();
+}
+
+uint32_t OatHeader::GetJniDlsymLookupOffset() const {
+  DCHECK(IsValid());
+  CHECK_GE(jni_dlsym_lookup_offset_, interpreter_to_compiled_code_bridge_offset_);
+  return jni_dlsym_lookup_offset_;
+}
+
+void OatHeader::SetJniDlsymLookupOffset(uint32_t offset) {
+  CHECK(offset == 0 || offset >= interpreter_to_compiled_code_bridge_offset_);
+  DCHECK(IsValid());
+  DCHECK_EQ(jni_dlsym_lookup_offset_, 0U) << offset;
+
+  jni_dlsym_lookup_offset_ = offset;
+  UpdateChecksum(&jni_dlsym_lookup_offset_, sizeof(offset));
 }
 
 const void* OatHeader::GetPortableResolutionTrampoline() const {
@@ -155,12 +177,12 @@
 
 uint32_t OatHeader::GetPortableResolutionTrampolineOffset() const {
   DCHECK(IsValid());
-  CHECK_GE(portable_resolution_trampoline_offset_, interpreter_to_quick_entry_offset_);
+  CHECK_GE(portable_resolution_trampoline_offset_, jni_dlsym_lookup_offset_);
   return portable_resolution_trampoline_offset_;
 }
 
 void OatHeader::SetPortableResolutionTrampolineOffset(uint32_t offset) {
-  CHECK(offset == 0 || offset >= interpreter_to_quick_entry_offset_);
+  CHECK(offset == 0 || offset >= jni_dlsym_lookup_offset_);
   DCHECK(IsValid());
   DCHECK_EQ(portable_resolution_trampoline_offset_, 0U) << offset;
 
@@ -168,18 +190,37 @@
   UpdateChecksum(&portable_resolution_trampoline_offset_, sizeof(offset));
 }
 
+const void* OatHeader::GetPortableToInterpreterBridge() const {
+  return reinterpret_cast<const uint8_t*>(this) + GetPortableToInterpreterBridgeOffset();
+}
+
+uint32_t OatHeader::GetPortableToInterpreterBridgeOffset() const {
+  DCHECK(IsValid());
+  CHECK_GE(portable_to_interpreter_bridge_offset_, portable_resolution_trampoline_offset_);
+  return portable_to_interpreter_bridge_offset_;
+}
+
+void OatHeader::SetPortableToInterpreterBridgeOffset(uint32_t offset) {
+  CHECK(offset == 0 || offset >= portable_resolution_trampoline_offset_);
+  DCHECK(IsValid());
+  DCHECK_EQ(portable_to_interpreter_bridge_offset_, 0U) << offset;
+
+  portable_to_interpreter_bridge_offset_ = offset;
+  UpdateChecksum(&portable_to_interpreter_bridge_offset_, sizeof(offset));
+}
+
 const void* OatHeader::GetQuickResolutionTrampoline() const {
   return reinterpret_cast<const uint8_t*>(this) + GetQuickResolutionTrampolineOffset();
 }
 
 uint32_t OatHeader::GetQuickResolutionTrampolineOffset() const {
   DCHECK(IsValid());
-  CHECK_GE(quick_resolution_trampoline_offset_, portable_resolution_trampoline_offset_);
+  CHECK_GE(quick_resolution_trampoline_offset_, portable_to_interpreter_bridge_offset_);
   return quick_resolution_trampoline_offset_;
 }
 
 void OatHeader::SetQuickResolutionTrampolineOffset(uint32_t offset) {
-  CHECK(offset == 0 || offset >= portable_resolution_trampoline_offset_);
+  CHECK(offset == 0 || offset >= portable_to_interpreter_bridge_offset_);
   DCHECK(IsValid());
   DCHECK_EQ(quick_resolution_trampoline_offset_, 0U) << offset;
 
@@ -187,6 +228,25 @@
   UpdateChecksum(&quick_resolution_trampoline_offset_, sizeof(offset));
 }
 
+const void* OatHeader::GetQuickToInterpreterBridge() const {
+  return reinterpret_cast<const uint8_t*>(this) + GetQuickToInterpreterBridgeOffset();
+}
+
+uint32_t OatHeader::GetQuickToInterpreterBridgeOffset() const {
+  DCHECK(IsValid());
+  CHECK_GE(quick_to_interpreter_bridge_offset_, quick_resolution_trampoline_offset_);
+  return quick_to_interpreter_bridge_offset_;
+}
+
+void OatHeader::SetQuickToInterpreterBridgeOffset(uint32_t offset) {
+  CHECK(offset == 0 || offset >= quick_resolution_trampoline_offset_);
+  DCHECK(IsValid());
+  DCHECK_EQ(quick_to_interpreter_bridge_offset_, 0U) << offset;
+
+  quick_to_interpreter_bridge_offset_ = offset;
+  UpdateChecksum(&quick_to_interpreter_bridge_offset_, sizeof(offset));
+}
+
 uint32_t OatHeader::GetImageFileLocationOatChecksum() const {
   CHECK(IsValid());
   return image_file_location_oat_checksum_;
diff --git a/runtime/oat.h b/runtime/oat.h
index 4bd1871..a5c6bed 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -44,18 +44,32 @@
   }
   uint32_t GetExecutableOffset() const;
   void SetExecutableOffset(uint32_t executable_offset);
-  const void* GetInterpreterToInterpreterEntry() const;
-  uint32_t GetInterpreterToInterpreterEntryOffset() const;
-  void SetInterpreterToInterpreterEntryOffset(uint32_t offset);
-  const void* GetInterpreterToQuickEntry() const;
-  uint32_t GetInterpreterToQuickEntryOffset() const;
-  void SetInterpreterToQuickEntryOffset(uint32_t offset);
+
+  const void* GetInterpreterToInterpreterBridge() const;
+  uint32_t GetInterpreterToInterpreterBridgeOffset() const;
+  void SetInterpreterToInterpreterBridgeOffset(uint32_t offset);
+  const void* GetInterpreterToCompiledCodeBridge() const;
+  uint32_t GetInterpreterToCompiledCodeBridgeOffset() const;
+  void SetInterpreterToCompiledCodeBridgeOffset(uint32_t offset);
+
+  const void* GetJniDlsymLookup() const;
+  uint32_t GetJniDlsymLookupOffset() const;
+  void SetJniDlsymLookupOffset(uint32_t offset);
+
   const void* GetPortableResolutionTrampoline() const;
   uint32_t GetPortableResolutionTrampolineOffset() const;
   void SetPortableResolutionTrampolineOffset(uint32_t offset);
+  const void* GetPortableToInterpreterBridge() const;
+  uint32_t GetPortableToInterpreterBridgeOffset() const;
+  void SetPortableToInterpreterBridgeOffset(uint32_t offset);
+
   const void* GetQuickResolutionTrampoline() const;
   uint32_t GetQuickResolutionTrampolineOffset() const;
   void SetQuickResolutionTrampolineOffset(uint32_t offset);
+  const void* GetQuickToInterpreterBridge() const;
+  uint32_t GetQuickToInterpreterBridgeOffset() const;
+  void SetQuickToInterpreterBridgeOffset(uint32_t offset);
+
   InstructionSet GetInstructionSet() const;
   uint32_t GetImageFileLocationOatChecksum() const;
   uint32_t GetImageFileLocationOatDataBegin() const;
@@ -74,10 +88,13 @@
   InstructionSet instruction_set_;
   uint32_t dex_file_count_;
   uint32_t executable_offset_;
-  uint32_t interpreter_to_interpreter_entry_offset_;
-  uint32_t interpreter_to_quick_entry_offset_;
+  uint32_t interpreter_to_interpreter_bridge_offset_;
+  uint32_t interpreter_to_compiled_code_bridge_offset_;
+  uint32_t jni_dlsym_lookup_offset_;
   uint32_t portable_resolution_trampoline_offset_;
+  uint32_t portable_to_interpreter_bridge_offset_;
   uint32_t quick_resolution_trampoline_offset_;
+  uint32_t quick_to_interpreter_bridge_offset_;
 
   uint32_t image_file_location_oat_checksum_;
   uint32_t image_file_location_oat_data_begin_;
diff --git a/runtime/oat_test.cc b/runtime/oat_test.cc
index 5d0dca9..68595c8 100644
--- a/runtime/oat_test.cc
+++ b/runtime/oat_test.cc
@@ -141,7 +141,7 @@
 TEST_F(OatTest, OatHeaderSizeCheck) {
   // If this test is failing and you have to update these constants,
   // it is time to update OatHeader::kOatVersion
-  EXPECT_EQ(52U, sizeof(OatHeader));
+  EXPECT_EQ(64U, sizeof(OatHeader));
   EXPECT_EQ(28U, sizeof(OatMethodOffsets));
 }
 
diff --git a/runtime/object_utils.h b/runtime/object_utils.h
index fa7763e..3639a80 100644
--- a/runtime/object_utils.h
+++ b/runtime/object_utils.h
@@ -411,6 +411,10 @@
     shorty_ = NULL;
   }
 
+  const mirror::AbstractMethod* GetMethod() const {
+    return method_;
+  }
+
   const char* GetName() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     const DexFile& dex_file = GetDexFile();
     uint32_t dex_method_idx = method_->GetDexMethodIndex();
diff --git a/runtime/stack.cc b/runtime/stack.cc
index aeb15f0..d49d6ba 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -304,7 +304,7 @@
         if (UNLIKELY(exit_stubs_installed)) {
           // While profiling, the return pc is restored from the side stack, except when walking
           // the stack for an exception where the side stack will be unwound in VisitFrame.
-          if (GetInstrumentationExitPc() == return_pc) {
+          if (GetQuickInstrumentationExitPc() == return_pc) {
             instrumentation::InstrumentationStackFrame instrumentation_frame =
                 GetInstrumentationStackFrame(instrumentation_stack_depth);
             instrumentation_stack_depth++;
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 97a1410..c79caa2 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -86,23 +86,25 @@
 }
 #endif
 
-void InitEntryPoints(QuickEntryPoints* qpoints, PortableEntryPoints* ppoints);
+void InitEntryPoints(InterpreterEntryPoints* ipoints, JniEntryPoints* jpoints,
+                     PortableEntryPoints* ppoints, QuickEntryPoints* qpoints);
 
-void Thread::InitFunctionPointers() {
+void Thread::InitTlsEntryPoints() {
 #if !defined(__APPLE__)  // The Mac GCC is too old to accept this code.
   // Insert a placeholder so we can easily tell if we call an unimplemented entry point.
-  uintptr_t* begin = reinterpret_cast<uintptr_t*>(&quick_entrypoints_);
+  uintptr_t* begin = reinterpret_cast<uintptr_t*>(&interpreter_entrypoints_);
   uintptr_t* end = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(begin) + sizeof(quick_entrypoints_));
   for (uintptr_t* it = begin; it != end; ++it) {
     *it = reinterpret_cast<uintptr_t>(UnimplementedEntryPoint);
   }
-  begin = reinterpret_cast<uintptr_t*>(&portable_entrypoints_);
+  begin = reinterpret_cast<uintptr_t*>(&interpreter_entrypoints_);
   end = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(begin) + sizeof(portable_entrypoints_));
   for (uintptr_t* it = begin; it != end; ++it) {
     *it = reinterpret_cast<uintptr_t>(UnimplementedEntryPoint);
   }
 #endif
-  InitEntryPoints(&quick_entrypoints_, &portable_entrypoints_);
+  InitEntryPoints(&interpreter_entrypoints_, &jni_entrypoints_, &portable_entrypoints_,
+                  &quick_entrypoints_);
 }
 
 void Thread::SetDeoptimizationShadowFrame(ShadowFrame* sf) {
@@ -292,7 +294,7 @@
   CHECK(Thread::Current() == NULL);
   SetUpAlternateSignalStack();
   InitCpu();
-  InitFunctionPointers();
+  InitTlsEntryPoints();
   InitCardTable();
   InitTid();
   // Set pthread_self_ ahead of pthread_setspecific, that makes Thread::Current function, this
@@ -1589,22 +1591,29 @@
   uint32_t offset;
   const char* name;
 };
-#define QUICK_ENTRY_POINT_INFO(x) { QUICK_ENTRYPOINT_OFFSET(x), #x }
-#define PORTABLE_ENTRY_POINT_INFO(x) { PORTABLE_ENTRYPOINT_OFFSET(x), #x }
+#define INTERPRETER_ENTRY_POINT_INFO(x) { INTERPRETER_ENTRYPOINT_OFFSET(x).Uint32Value(), #x }
+#define JNI_ENTRY_POINT_INFO(x)         { JNI_ENTRYPOINT_OFFSET(x).Uint32Value(), #x }
+#define PORTABLE_ENTRY_POINT_INFO(x)    { PORTABLE_ENTRYPOINT_OFFSET(x).Uint32Value(), #x }
+#define QUICK_ENTRY_POINT_INFO(x)       { QUICK_ENTRYPOINT_OFFSET(x).Uint32Value(), #x }
 static const EntryPointInfo gThreadEntryPointInfo[] = {
-  QUICK_ENTRY_POINT_INFO(pAllocArrayFromCode),
-  QUICK_ENTRY_POINT_INFO(pAllocArrayFromCodeWithAccessCheck),
-  QUICK_ENTRY_POINT_INFO(pAllocObjectFromCode),
-  QUICK_ENTRY_POINT_INFO(pAllocObjectFromCodeWithAccessCheck),
-  QUICK_ENTRY_POINT_INFO(pCheckAndAllocArrayFromCode),
-  QUICK_ENTRY_POINT_INFO(pCheckAndAllocArrayFromCodeWithAccessCheck),
-  QUICK_ENTRY_POINT_INFO(pInstanceofNonTrivialFromCode),
-  QUICK_ENTRY_POINT_INFO(pCanPutArrayElementFromCode),
-  QUICK_ENTRY_POINT_INFO(pCheckCastFromCode),
+  INTERPRETER_ENTRY_POINT_INFO(pInterpreterToInterpreterBridge),
+  INTERPRETER_ENTRY_POINT_INFO(pInterpreterToCompiledCodeBridge),
+  JNI_ENTRY_POINT_INFO(pDlsymLookup),
+  PORTABLE_ENTRY_POINT_INFO(pPortableResolutionTrampoline),
+  PORTABLE_ENTRY_POINT_INFO(pPortableToInterpreterBridge),
+  QUICK_ENTRY_POINT_INFO(pAllocArray),
+  QUICK_ENTRY_POINT_INFO(pAllocArrayWithAccessCheck),
+  QUICK_ENTRY_POINT_INFO(pAllocObject),
+  QUICK_ENTRY_POINT_INFO(pAllocObjectWithAccessCheck),
+  QUICK_ENTRY_POINT_INFO(pCheckAndAllocArray),
+  QUICK_ENTRY_POINT_INFO(pCheckAndAllocArrayWithAccessCheck),
+  QUICK_ENTRY_POINT_INFO(pInstanceofNonTrivial),
+  QUICK_ENTRY_POINT_INFO(pCanPutArrayElement),
+  QUICK_ENTRY_POINT_INFO(pCheckCast),
   QUICK_ENTRY_POINT_INFO(pInitializeStaticStorage),
-  QUICK_ENTRY_POINT_INFO(pInitializeTypeAndVerifyAccessFromCode),
-  QUICK_ENTRY_POINT_INFO(pInitializeTypeFromCode),
-  QUICK_ENTRY_POINT_INFO(pResolveStringFromCode),
+  QUICK_ENTRY_POINT_INFO(pInitializeTypeAndVerifyAccess),
+  QUICK_ENTRY_POINT_INFO(pInitializeType),
+  QUICK_ENTRY_POINT_INFO(pResolveString),
   QUICK_ENTRY_POINT_INFO(pSet32Instance),
   QUICK_ENTRY_POINT_INFO(pSet32Static),
   QUICK_ENTRY_POINT_INFO(pSet64Instance),
@@ -1617,15 +1626,15 @@
   QUICK_ENTRY_POINT_INFO(pGet64Static),
   QUICK_ENTRY_POINT_INFO(pGetObjInstance),
   QUICK_ENTRY_POINT_INFO(pGetObjStatic),
-  QUICK_ENTRY_POINT_INFO(pHandleFillArrayDataFromCode),
+  QUICK_ENTRY_POINT_INFO(pHandleFillArrayData),
   QUICK_ENTRY_POINT_INFO(pJniMethodStart),
   QUICK_ENTRY_POINT_INFO(pJniMethodStartSynchronized),
   QUICK_ENTRY_POINT_INFO(pJniMethodEnd),
   QUICK_ENTRY_POINT_INFO(pJniMethodEndSynchronized),
   QUICK_ENTRY_POINT_INFO(pJniMethodEndWithReference),
   QUICK_ENTRY_POINT_INFO(pJniMethodEndWithReferenceSynchronized),
-  QUICK_ENTRY_POINT_INFO(pLockObjectFromCode),
-  QUICK_ENTRY_POINT_INFO(pUnlockObjectFromCode),
+  QUICK_ENTRY_POINT_INFO(pLockObject),
+  QUICK_ENTRY_POINT_INFO(pUnlockObject),
   QUICK_ENTRY_POINT_INFO(pCmpgDouble),
   QUICK_ENTRY_POINT_INFO(pCmpgFloat),
   QUICK_ENTRY_POINT_INFO(pCmplDouble),
@@ -1646,28 +1655,26 @@
   QUICK_ENTRY_POINT_INFO(pShlLong),
   QUICK_ENTRY_POINT_INFO(pShrLong),
   QUICK_ENTRY_POINT_INFO(pUshrLong),
-  QUICK_ENTRY_POINT_INFO(pInterpreterToInterpreterEntry),
-  QUICK_ENTRY_POINT_INFO(pInterpreterToQuickEntry),
   QUICK_ENTRY_POINT_INFO(pIndexOf),
   QUICK_ENTRY_POINT_INFO(pMemcmp16),
   QUICK_ENTRY_POINT_INFO(pStringCompareTo),
   QUICK_ENTRY_POINT_INFO(pMemcpy),
-  QUICK_ENTRY_POINT_INFO(pQuickResolutionTrampolineFromCode),
+  QUICK_ENTRY_POINT_INFO(pQuickResolutionTrampoline),
+  QUICK_ENTRY_POINT_INFO(pQuickToInterpreterBridge),
   QUICK_ENTRY_POINT_INFO(pInvokeDirectTrampolineWithAccessCheck),
   QUICK_ENTRY_POINT_INFO(pInvokeInterfaceTrampoline),
   QUICK_ENTRY_POINT_INFO(pInvokeInterfaceTrampolineWithAccessCheck),
   QUICK_ENTRY_POINT_INFO(pInvokeStaticTrampolineWithAccessCheck),
   QUICK_ENTRY_POINT_INFO(pInvokeSuperTrampolineWithAccessCheck),
   QUICK_ENTRY_POINT_INFO(pInvokeVirtualTrampolineWithAccessCheck),
-  QUICK_ENTRY_POINT_INFO(pCheckSuspendFromCode),
-  QUICK_ENTRY_POINT_INFO(pTestSuspendFromCode),
+  QUICK_ENTRY_POINT_INFO(pCheckSuspend),
+  QUICK_ENTRY_POINT_INFO(pTestSuspend),
   QUICK_ENTRY_POINT_INFO(pDeliverException),
-  QUICK_ENTRY_POINT_INFO(pThrowArrayBoundsFromCode),
-  QUICK_ENTRY_POINT_INFO(pThrowDivZeroFromCode),
-  QUICK_ENTRY_POINT_INFO(pThrowNoSuchMethodFromCode),
-  QUICK_ENTRY_POINT_INFO(pThrowNullPointerFromCode),
-  QUICK_ENTRY_POINT_INFO(pThrowStackOverflowFromCode),
-  PORTABLE_ENTRY_POINT_INFO(pPortableResolutionTrampolineFromCode),
+  QUICK_ENTRY_POINT_INFO(pThrowArrayBounds),
+  QUICK_ENTRY_POINT_INFO(pThrowDivZero),
+  QUICK_ENTRY_POINT_INFO(pThrowNoSuchMethod),
+  QUICK_ENTRY_POINT_INFO(pThrowNullPointer),
+  QUICK_ENTRY_POINT_INFO(pThrowStackOverflow),
 };
 #undef QUICK_ENTRY_POINT_INFO
 
@@ -1695,8 +1702,9 @@
 
   size_t entry_point_count = arraysize(gThreadEntryPointInfo);
   CHECK_EQ(entry_point_count * size_of_pointers,
-           sizeof(QuickEntryPoints) + sizeof(PortableEntryPoints));
-  uint32_t expected_offset = OFFSETOF_MEMBER(Thread, quick_entrypoints_);
+           sizeof(InterpreterEntryPoints) + sizeof(JniEntryPoints) + sizeof(PortableEntryPoints) +
+           sizeof(QuickEntryPoints));
+  uint32_t expected_offset = OFFSETOF_MEMBER(Thread, interpreter_entrypoints_);
   for (size_t i = 0; i < entry_point_count; ++i) {
     CHECK_EQ(gThreadEntryPointInfo[i].offset, expected_offset) << gThreadEntryPointInfo[i].name;
     expected_offset += size_of_pointers;
@@ -1739,7 +1747,7 @@
       return false;  // End stack walk.
     } else {
       if (UNLIKELY(method_tracing_active_ &&
-                   GetInstrumentationExitPc() == GetReturnPc())) {
+                   GetQuickInstrumentationExitPc() == GetReturnPc())) {
         // Keep count of the number of unwinds during instrumentation.
         instrumentation_frames_to_pop_++;
       }
diff --git a/runtime/thread.h b/runtime/thread.h
index ff0fe22..8b6771e 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -26,6 +26,8 @@
 #include <string>
 
 #include "base/macros.h"
+#include "entrypoints/interpreter/interpreter_entrypoints.h"
+#include "entrypoints/jni/jni_entrypoints.h"
 #include "entrypoints/portable/portable_entrypoints.h"
 #include "entrypoints/quick/quick_entrypoints.h"
 #include "globals.h"
@@ -43,17 +45,17 @@
 namespace art {
 
 namespace mirror {
-class AbstractMethod;
-class Array;
-class Class;
-class ClassLoader;
-class Object;
-template<class T> class ObjectArray;
-template<class T> class PrimitiveArray;
-typedef PrimitiveArray<int32_t> IntArray;
-class StackTraceElement;
-class StaticStorageBase;
-class Throwable;
+  class AbstractMethod;
+  class Array;
+  class Class;
+  class ClassLoader;
+  class Object;
+  template<class T> class ObjectArray;
+  template<class T> class PrimitiveArray;
+  typedef PrimitiveArray<int32_t> IntArray;
+  class StackTraceElement;
+  class StaticStorageBase;
+  class Throwable;
 }  // namespace mirror
 class BaseMutex;
 class ClassLinker;
@@ -614,7 +616,7 @@
   void Init(ThreadList*, JavaVMExt*) EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_);
   void InitCardTable();
   void InitCpu();
-  void InitFunctionPointers();
+  void InitTlsEntryPoints();
   void InitTid();
   void InitPthreadKeySelf();
   void InitStackHwm();
@@ -776,8 +778,10 @@
  public:
   // Entrypoint function pointers
   // TODO: move this near the top, since changing its offset requires all oats to be recompiled!
-  QuickEntryPoints quick_entrypoints_;
+  InterpreterEntryPoints interpreter_entrypoints_;
+  JniEntryPoints jni_entrypoints_;
   PortableEntryPoints portable_entrypoints_;
+  QuickEntryPoints quick_entrypoints_;
 
  private:
   // How many times has our pthread key's destructor been called?