Merge "Avoid conditional loads if WORKAROUND_CORTEX_A9_745320 is defined."
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 1bc6e95..1149aff 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -49,6 +49,7 @@
$(call add-clean-step, rm -rf $(OUT)/obj/SHARED_LIBRARIES/libdvm*)
$(call add-clean-step, rm -rf $(OUT)/obj/SHARED_LIBRARIES/libdvm*)
$(call add-clean-step, rm -rf $(OUT)/obj/SHARED_LIBRARIES/libdvm*)
+$(call add-clean-step, rm -rf $(OUT)/obj/SHARED_LIBRARIES/libdvm*)
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/vm/compiler/template/armv5te/TEMPLATE_INTERPRET.S b/vm/compiler/template/armv5te/TEMPLATE_INTERPRET.S
index 2b0c730..0163ce0 100644
--- a/vm/compiler/template/armv5te/TEMPLATE_INTERPRET.S
+++ b/vm/compiler/template/armv5te/TEMPLATE_INTERPRET.S
@@ -13,7 +13,14 @@
* rFP - Dalvik frame pointer
*/
cmp lr, #0
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r1,[lr, #3]
+101:
+#else
ldrne r1,[lr, #3]
+#endif
ldr r2, .LinterpPunt
mov r0, r1 @ set Dalvik PC
bx r2
diff --git a/vm/compiler/template/armv5te/TEMPLATE_INVOKE_METHOD_PREDICTED_CHAIN.S b/vm/compiler/template/armv5te/TEMPLATE_INVOKE_METHOD_PREDICTED_CHAIN.S
index c3085b9..c22e3a0 100644
--- a/vm/compiler/template/armv5te/TEMPLATE_INVOKE_METHOD_PREDICTED_CHAIN.S
+++ b/vm/compiler/template/armv5te/TEMPLATE_INVOKE_METHOD_PREDICTED_CHAIN.S
@@ -29,7 +29,14 @@
cmp r3, r8 @ predicted class == actual class?
#if defined(WITH_JIT_TUNING)
ldr r7, .LdvmICHitCount
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ bne 101f
+ ldr r10, [r7, #0]
+101:
+#else
ldreq r10, [r7, #0]
+#endif
add r10, r10, #1
streq r10, [r7, #0]
#endif
diff --git a/vm/compiler/template/armv5te/TEMPLATE_RETURN.S b/vm/compiler/template/armv5te/TEMPLATE_RETURN.S
index b2e71ee..de99668 100644
--- a/vm/compiler/template/armv5te/TEMPLATE_RETURN.S
+++ b/vm/compiler/template/armv5te/TEMPLATE_RETURN.S
@@ -32,7 +32,7 @@
#endif
ldr r1, .LdvmJitToInterpNoChainNoProfile @ defined in footer.S
mov rFP, r10 @ publish new FP
- ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
ldr r8, [r8] @ r8<- suspendCount
str r2, [rGLUE, #offGlue_method]@ glue->method = newSave->method
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S b/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S
index 655bc54..df189d6 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv5te-vfp.S
@@ -204,7 +204,7 @@
#endif
ldr r1, .LdvmJitToInterpNoChainNoProfile @ defined in footer.S
mov rFP, r10 @ publish new FP
- ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
ldr r8, [r8] @ r8<- suspendCount
str r2, [rGLUE, #offGlue_method]@ glue->method = newSave->method
@@ -389,7 +389,14 @@
cmp r3, r8 @ predicted class == actual class?
#if defined(WITH_JIT_TUNING)
ldr r7, .LdvmICHitCount
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ bne 101f
+ ldr r10, [r7, #0]
+101:
+#else
ldreq r10, [r7, #0]
+#endif
add r10, r10, #1
streq r10, [r7, #0]
#endif
@@ -1390,7 +1397,14 @@
* rFP - Dalvik frame pointer
*/
cmp lr, #0
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r1,[lr, #3]
+101:
+#else
ldrne r1,[lr, #3]
+#endif
ldr r2, .LinterpPunt
mov r0, r1 @ set Dalvik PC
bx r2
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S b/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S
index ff552bb..c469eac 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv5te.S
@@ -204,7 +204,7 @@
#endif
ldr r1, .LdvmJitToInterpNoChainNoProfile @ defined in footer.S
mov rFP, r10 @ publish new FP
- ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
ldr r8, [r8] @ r8<- suspendCount
str r2, [rGLUE, #offGlue_method]@ glue->method = newSave->method
@@ -389,7 +389,14 @@
cmp r3, r8 @ predicted class == actual class?
#if defined(WITH_JIT_TUNING)
ldr r7, .LdvmICHitCount
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ bne 101f
+ ldr r10, [r7, #0]
+101:
+#else
ldreq r10, [r7, #0]
+#endif
add r10, r10, #1
streq r10, [r7, #0]
#endif
@@ -1113,7 +1120,14 @@
* rFP - Dalvik frame pointer
*/
cmp lr, #0
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r1,[lr, #3]
+101:
+#else
ldrne r1,[lr, #3]
+#endif
ldr r2, .LinterpPunt
mov r0, r1 @ set Dalvik PC
bx r2
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S
index 34931f8..6382269 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a-neon.S
@@ -204,7 +204,7 @@
#endif
ldr r1, .LdvmJitToInterpNoChainNoProfile @ defined in footer.S
mov rFP, r10 @ publish new FP
- ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
ldr r8, [r8] @ r8<- suspendCount
str r2, [rGLUE, #offGlue_method]@ glue->method = newSave->method
@@ -389,7 +389,14 @@
cmp r3, r8 @ predicted class == actual class?
#if defined(WITH_JIT_TUNING)
ldr r7, .LdvmICHitCount
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ bne 101f
+ ldr r10, [r7, #0]
+101:
+#else
ldreq r10, [r7, #0]
+#endif
add r10, r10, #1
streq r10, [r7, #0]
#endif
@@ -1390,7 +1397,14 @@
* rFP - Dalvik frame pointer
*/
cmp lr, #0
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r1,[lr, #3]
+101:
+#else
ldrne r1,[lr, #3]
+#endif
ldr r2, .LinterpPunt
mov r0, r1 @ set Dalvik PC
bx r2
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S
index b10beef..5753cc2 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-armv7-a.S
@@ -204,7 +204,7 @@
#endif
ldr r1, .LdvmJitToInterpNoChainNoProfile @ defined in footer.S
mov rFP, r10 @ publish new FP
- ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
ldr r8, [r8] @ r8<- suspendCount
str r2, [rGLUE, #offGlue_method]@ glue->method = newSave->method
@@ -389,7 +389,14 @@
cmp r3, r8 @ predicted class == actual class?
#if defined(WITH_JIT_TUNING)
ldr r7, .LdvmICHitCount
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ bne 101f
+ ldr r10, [r7, #0]
+101:
+#else
ldreq r10, [r7, #0]
+#endif
add r10, r10, #1
streq r10, [r7, #0]
#endif
@@ -1390,7 +1397,14 @@
* rFP - Dalvik frame pointer
*/
cmp lr, #0
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r1,[lr, #3]
+101:
+#else
ldrne r1,[lr, #3]
+#endif
ldr r2, .LinterpPunt
mov r0, r1 @ set Dalvik PC
bx r2
diff --git a/vm/compiler/template/out/CompilerTemplateAsm-ia32.S b/vm/compiler/template/out/CompilerTemplateAsm-ia32.S
index 6ccb067..7726e97 100644
--- a/vm/compiler/template/out/CompilerTemplateAsm-ia32.S
+++ b/vm/compiler/template/out/CompilerTemplateAsm-ia32.S
@@ -6,7 +6,7 @@
/* File: ia32/header.S */
/*
- * Copyright (C) 2008 The Android Open Source Project
+ * Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/vm/mterp/armv5te/footer.S b/vm/mterp/armv5te/footer.S
index ac348a4..fe61117 100644
--- a/vm/mterp/armv5te/footer.S
+++ b/vm/mterp/armv5te/footer.S
@@ -448,7 +448,14 @@
ldr ip, [r3] @ ip<- suspendCount (int)
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
ldr r2, [r2] @ r2<- activeProfilers (int)
orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
/*
@@ -503,7 +510,14 @@
*/
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
#if !defined(WITH_INLINE_PROFILING)
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
@@ -816,7 +830,14 @@
@ r2<- method we're returning to
ldr r3, [rGLUE, #offGlue_self] @ r3<- glue->self
cmp r2, #0 @ is this a break frame?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+101:
+#else
ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+#endif
mov r1, #0 @ "want switch" = false
beq common_gotoBail @ break frame, bail out completely
diff --git a/vm/mterp/out/InterpAsm-armv4t.S b/vm/mterp/out/InterpAsm-armv4t.S
index 7a572f7..08b9907 100644
--- a/vm/mterp/out/InterpAsm-armv4t.S
+++ b/vm/mterp/out/InterpAsm-armv4t.S
@@ -10300,7 +10300,14 @@
ldr ip, [r3] @ ip<- suspendCount (int)
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
ldr r2, [r2] @ r2<- activeProfilers (int)
orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
/*
@@ -10355,7 +10362,14 @@
*/
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
#if !defined(WITH_INLINE_PROFILING)
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
@@ -10668,7 +10682,14 @@
@ r2<- method we're returning to
ldr r3, [rGLUE, #offGlue_self] @ r3<- glue->self
cmp r2, #0 @ is this a break frame?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+101:
+#else
ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+#endif
mov r1, #0 @ "want switch" = false
beq common_gotoBail @ break frame, bail out completely
diff --git a/vm/mterp/out/InterpAsm-armv5te-vfp.S b/vm/mterp/out/InterpAsm-armv5te-vfp.S
index 8b6eeca..fd1ff07 100644
--- a/vm/mterp/out/InterpAsm-armv5te-vfp.S
+++ b/vm/mterp/out/InterpAsm-armv5te-vfp.S
@@ -9838,7 +9838,14 @@
ldr ip, [r3] @ ip<- suspendCount (int)
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
ldr r2, [r2] @ r2<- activeProfilers (int)
orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
/*
@@ -9893,7 +9900,14 @@
*/
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
#if !defined(WITH_INLINE_PROFILING)
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
@@ -10206,7 +10220,14 @@
@ r2<- method we're returning to
ldr r3, [rGLUE, #offGlue_self] @ r3<- glue->self
cmp r2, #0 @ is this a break frame?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+101:
+#else
ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+#endif
mov r1, #0 @ "want switch" = false
beq common_gotoBail @ break frame, bail out completely
diff --git a/vm/mterp/out/InterpAsm-armv5te.S b/vm/mterp/out/InterpAsm-armv5te.S
index 3b1b786..c285c02 100644
--- a/vm/mterp/out/InterpAsm-armv5te.S
+++ b/vm/mterp/out/InterpAsm-armv5te.S
@@ -10296,7 +10296,14 @@
ldr ip, [r3] @ ip<- suspendCount (int)
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
ldr r2, [r2] @ r2<- activeProfilers (int)
orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
/*
@@ -10351,7 +10358,14 @@
*/
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
#if !defined(WITH_INLINE_PROFILING)
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
@@ -10664,7 +10678,14 @@
@ r2<- method we're returning to
ldr r3, [rGLUE, #offGlue_self] @ r3<- glue->self
cmp r2, #0 @ is this a break frame?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+101:
+#else
ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+#endif
mov r1, #0 @ "want switch" = false
beq common_gotoBail @ break frame, bail out completely
diff --git a/vm/mterp/out/InterpAsm-armv7-a-neon.S b/vm/mterp/out/InterpAsm-armv7-a-neon.S
index 6340fe5..e51d093 100644
--- a/vm/mterp/out/InterpAsm-armv7-a-neon.S
+++ b/vm/mterp/out/InterpAsm-armv7-a-neon.S
@@ -263,7 +263,7 @@
*/
.macro SMP_DMB_ST
#if ANDROID_SMP != 0
- dmb @ TODO: want "dmb st" here
+ dmb st
#else
/* not SMP */
#endif
@@ -9776,7 +9776,14 @@
ldr ip, [r3] @ ip<- suspendCount (int)
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
ldr r2, [r2] @ r2<- activeProfilers (int)
orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
/*
@@ -9831,7 +9838,14 @@
*/
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
#if !defined(WITH_INLINE_PROFILING)
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
@@ -10144,7 +10158,14 @@
@ r2<- method we're returning to
ldr r3, [rGLUE, #offGlue_self] @ r3<- glue->self
cmp r2, #0 @ is this a break frame?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+101:
+#else
ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+#endif
mov r1, #0 @ "want switch" = false
beq common_gotoBail @ break frame, bail out completely
diff --git a/vm/mterp/out/InterpAsm-armv7-a.S b/vm/mterp/out/InterpAsm-armv7-a.S
index 246da07..e7a8e91 100644
--- a/vm/mterp/out/InterpAsm-armv7-a.S
+++ b/vm/mterp/out/InterpAsm-armv7-a.S
@@ -263,7 +263,7 @@
*/
.macro SMP_DMB_ST
#if ANDROID_SMP != 0
- dmb @ TODO: want "dmb st" here
+ dmb st
#else
/* not SMP */
#endif
@@ -9776,7 +9776,14 @@
ldr ip, [r3] @ ip<- suspendCount (int)
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
ldr r2, [r2] @ r2<- activeProfilers (int)
orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
/*
@@ -9831,7 +9838,14 @@
*/
ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
cmp r1, #0 @ debugger enabled?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+101:
+#else
ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+#endif
#if !defined(WITH_INLINE_PROFILING)
ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
@@ -10144,7 +10158,14 @@
@ r2<- method we're returning to
ldr r3, [rGLUE, #offGlue_self] @ r3<- glue->self
cmp r2, #0 @ is this a break frame?
+#if defined(WORKAROUND_CORTEX_A9_745320)
+ /* Don't use conditional loads if the HW defect exists */
+ beq 101f
+ ldr r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+101:
+#else
ldrne r10, [r2, #offMethod_clazz] @ r10<- method->clazz
+#endif
mov r1, #0 @ "want switch" = false
beq common_gotoBail @ break frame, bail out completely
diff --git a/vm/mterp/out/InterpAsm-x86-atom.S b/vm/mterp/out/InterpAsm-x86-atom.S
index e3ea070..dedd946 100644
--- a/vm/mterp/out/InterpAsm-x86-atom.S
+++ b/vm/mterp/out/InterpAsm-x86-atom.S
@@ -2529,7 +2529,7 @@
shl $16, %ecx # prepare to create +AAAAAAAA
or %ecx, %edx # %edx<- +AAAAAAAA
shl $1, %edx # %edx is doubled to get the byte offset
- jc common_periodicChecks2 # do check on backwards branch
+ jle common_periodicChecks2 # do check on backwards branch
FINISH_RB %edx, %ecx # jump to next instruction
/* ------------------------------ */
@@ -18209,14 +18209,15 @@
jne common_exceptionThrown # handle exception
FGETOP_JMP 3, %edx # jump to next instruction; getop, jmp
-.LstackOverflow:
+.LstackOverflow: # %ecx=methodToCall
+ movl %ecx, -4(%esp) # push parameter method to call
movl rGLUE, %ecx # %ecx<- pMterpGlue
movl offGlue_self(%ecx), %ecx # %ecx<- glue->self
- movl %ecx, -4(%esp) # push parameter self
- lea -4(%esp), %esp
- call dvmHandleStackOverflow # call: (Thread* self)
+ movl %ecx, -8(%esp) # push parameter self
+ lea -8(%esp), %esp
+ call dvmHandleStackOverflow # call: (Thread* self, Method* method)
# return: void
- lea 4(%esp), %esp
+ lea 8(%esp), %esp
jmp common_exceptionThrown # handle exception
#ifdef ASSIST_DEBUGGER
#endif
@@ -18353,7 +18354,7 @@
movl %eax, -4(%esp) # save %eax for later
movl %ecx, -12(%esp) # push parameter 2 glue->self
lea -12(%esp), %esp
- call dvmCleanupStackOverflow # call: (Thread* self)
+ call dvmCleanupStackOverflow # call: (Thread* self, Object* exception)
# return: void
lea 12(%esp), %esp
movl -4(%esp), %eax # %eax<- restore %eax
@@ -18403,7 +18404,7 @@
je 1f #
movl %edx, -12(%esp) # push parameter 1 glue->self
lea -12(%esp), %esp
- call dvmCleanupStackOverflow # call: (Thread* self)
+ call dvmCleanupStackOverflow # call: (Thread* self, Object* exception)
# return: void
lea 12(%esp), %esp