Consolidate mterp's debug/profile/suspend control
This is a step towards full debug & profiling support in JIT'd code.
Previously, the interpreter made multiple distinct checks for pending
suspend requests, debugger and profiler checks at each safe point.
This CL moves the individual controls into a single control word,
significantly speeding up the safe-point check code path in the common
fast case.
In short, any time some VM component wants control to break at a safe
point it will set a bit in gDvm.interpBreak, which will be examined
at the safe point check in footer.S. In the old code, the safe point
check consisted of 11 instructions (including 6 loads). The new sequence
is 6 instructions (4 loads - two of which are needed and two are
speculative to fill otherwise stalling slots).
This code path is hot enough in the interpreter that we actually see
some measureable speedups in benchmarks. The old sieve benchmark
improves from 252 to 256 (~1.5%).
As part of the change, global debuggerActive and activeProfilers variables
have been eliminated as redundant. Note also that there is a subtle
change in thread suspension. Thread suspend request counts are kept on
a per-thread basis, and previously each thread would only examine its own
suspend count. With this change, a bit has been allocated in interpBreak
to signify that at least one suspend request is active across all
threads. This bit is treated as "some thread is supposed to
suspend, check to see if it's me".
Change-Id: I527dc918f58d1486ef3324136080ef541a775ba8
diff --git a/vm/Debugger.c b/vm/Debugger.c
index 98bd86d..87ea008 100644
--- a/vm/Debugger.c
+++ b/vm/Debugger.c
@@ -392,15 +392,12 @@
*/
void dvmDbgActive(void)
{
- if (gDvm.debuggerActive)
+ if (DEBUGGER_ACTIVE)
return;
LOGI("Debugger is active\n");
dvmInitBreakpoints();
- gDvm.debuggerActive = true;
-#if defined(WITH_JIT)
- dvmCompilerStateRefresh();
-#endif
+ dvmUpdateInterpBreak(kSubModeDebuggerActive, true);
}
/*
@@ -415,7 +412,7 @@
{
assert(gDvm.debuggerConnected);
- gDvm.debuggerActive = false;
+ dvmUpdateInterpBreak(kSubModeDebuggerActive, false);
dvmHashTableLock(gDvm.dbgRegistry);
gDvm.debuggerConnected = false;
@@ -428,9 +425,6 @@
dvmHashTableClear(gDvm.dbgRegistry);
dvmHashTableUnlock(gDvm.dbgRegistry);
-#if defined(WITH_JIT)
- dvmCompilerStateRefresh();
-#endif
}
/*
@@ -440,7 +434,7 @@
*/
bool dvmDbgIsDebuggerConnected(void)
{
- return gDvm.debuggerActive;
+ return DEBUGGER_ACTIVE;
}
/*
@@ -2576,7 +2570,7 @@
*/
void dvmDbgPostThreadStart(Thread* thread)
{
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmJdwpPostThreadChange(gDvm.jdwpState,
objectToObjectId(thread->threadObj), true);
}
@@ -2589,7 +2583,7 @@
*/
void dvmDbgPostThreadDeath(Thread* thread)
{
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmJdwpPostThreadChange(gDvm.jdwpState,
objectToObjectId(thread->threadObj), false);
}
diff --git a/vm/Debugger.h b/vm/Debugger.h
index d722160..a842442 100644
--- a/vm/Debugger.h
+++ b/vm/Debugger.h
@@ -32,6 +32,8 @@
struct Method;
struct Thread;
+#define DEBUGGER_ACTIVE (gDvm.interpBreak & kSubModeDebuggerActive)
+
/*
* Used by StepControl to track a set of addresses associated with
* a single line.
diff --git a/vm/Globals.h b/vm/Globals.h
index e761081..1237510 100644
--- a/vm/Globals.h
+++ b/vm/Globals.h
@@ -62,6 +62,21 @@
} ExecutionMode;
/*
+ * Execution sub modes, e.g. debugging, profiling, etc.
+ * Treated as bit flags for fast access. These values are used directly
+ * by assembly code in the mterp interpeter and may also be used by
+ * code generated by the JIT. Take care when changing.
+ */
+typedef enum ExecutionSubModes {
+ kSubModeNormal = 0x00,
+ kSubModeMethodTrace = 0x01,
+ kSubModeEmulatorTrace = 0x02,
+ kSubModeInstCounting = 0x04,
+ kSubModeDebuggerActive = 0x08,
+ kSubModeSuspendRequest = 0x10, /* Set if any suspend request active */
+} ExecutionSubModes;
+
+/*
* Register map generation mode. Only applicable when generateRegisterMaps
* is enabled. (The "disabled" state is not folded into this because
* there are callers like dexopt that want to enable/disable without
@@ -408,8 +423,8 @@
pthread_cond_t threadSuspendCountCond;
/*
- * Sum of all threads' suspendCount fields. The JIT needs to know if any
- * thread is suspended. Guarded by threadSuspendCountLock.
+ * Sum of all threads' suspendCount fields. Guarded by
+ * threadSuspendCountLock.
*/
int sumThreadSuspendCount;
@@ -550,12 +565,8 @@
/*
* JDWP debugger support.
- *
- * Note "debuggerActive" is accessed from mterp, so its storage size and
- * meaning must not be changed without updating the assembly sources.
*/
bool debuggerConnected; /* debugger or DDMS is connected */
- u1 debuggerActive; /* debugger is making requests */
JdwpState* jdwpState;
/*
@@ -603,14 +614,12 @@
#endif
/*
- * When a profiler is enabled, this is incremented. Distinct profilers
- * include "dmtrace" method tracing, emulator method tracing, and
- * possibly instruction counting.
- *
- * The purpose of this is to have a single value that the interpreter
- * can check to see if any profiling activity is enabled.
+ * When normal control flow needs to be interrupted because
+ * of an attached debugger, profiler, thread stop request, etc.,
+ * a bit is set here. We collapse all stop reasons into
+ * a single location for performance reasons.
*/
- volatile int activeProfilers;
+ volatile int interpBreak;
/*
* State for method-trace profiling.
diff --git a/vm/Profile.c b/vm/Profile.c
index 9b356f6..a2cfac1 100644
--- a/vm/Profile.c
+++ b/vm/Profile.c
@@ -212,28 +212,12 @@
}
/*
- * Update the "active profilers" count.
- *
- * "count" should be +1 or -1.
+ * Update the set of active profilers
*/
-static void updateActiveProfilers(int count)
+static void updateActiveProfilers(ExecutionSubModes newMode, bool enable)
{
- int oldValue, newValue;
-
- do {
- oldValue = gDvm.activeProfilers;
- newValue = oldValue + count;
- if (newValue < 0) {
- LOGE("Can't have %d active profilers\n", newValue);
- dvmAbort();
- }
- } while (android_atomic_release_cas(oldValue, newValue,
- &gDvm.activeProfilers) != 0);
-
- LOGD("+++ active profiler count now %d\n", newValue);
-#if defined(WITH_JIT)
- dvmCompilerStateRefresh();
-#endif
+ dvmUpdateInterpBreak(newMode, enable);
+ LOGD("+++ active profiler set now %d\n", gDvm.interpBreak);
}
@@ -348,7 +332,9 @@
dvmMethodTraceStop();
dvmLockMutex(&state->startStopLock);
}
- updateActiveProfilers(1);
+ /* Should only have a single trace going at once */
+ assert((gDvm.interpBreak & kSubModeMethodTrace) == 0);
+ updateActiveProfilers(kSubModeMethodTrace, true);
LOGI("TRACE STARTED: '%s' %dKB\n", traceFileName, bufferSize / 1024);
/*
@@ -416,7 +402,7 @@
return;
fail:
- updateActiveProfilers(-1);
+ updateActiveProfilers(kSubModeMethodTrace, false);
if (state->traceFile != NULL) {
fclose(state->traceFile);
state->traceFile = NULL;
@@ -508,7 +494,7 @@
dvmUnlockMutex(&state->startStopLock);
return;
} else {
- updateActiveProfilers(-1);
+ updateActiveProfilers(kSubModeMethodTrace, false);
}
/* compute elapsed time */
@@ -574,7 +560,7 @@
LOGI("TRACE STOPPED%s: writing %d records\n",
state->overflow ? " (NOTE: overflowed buffer)" : "",
(finalCurOffset - TRACE_HEADER_LEN) / TRACE_REC_SIZE);
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
LOGW("WARNING: a debugger is active; method-tracing results "
"will be skewed\n");
}
@@ -736,7 +722,7 @@
void dvmFastMethodTraceEnter(const Method* method,
const struct InterpState* interpState)
{
- if (gDvm.activeProfilers) {
+ if (gDvm.interpBreak & kSubModeMethodTrace) {
dvmMethodTraceAdd(interpState->self, method, METHOD_TRACE_ENTER);
}
}
@@ -748,7 +734,7 @@
*/
void dvmFastJavaMethodTraceExit(const struct InterpState* interpState)
{
- if (gDvm.activeProfilers) {
+ if (gDvm.interpBreak & kSubModeMethodTrace) {
dvmMethodTraceAdd(interpState->self, interpState->method,
METHOD_TRACE_EXIT);
}
@@ -762,7 +748,7 @@
void dvmFastNativeMethodTraceExit(const Method* method,
const struct InterpState* interpState)
{
- if (gDvm.activeProfilers) {
+ if (gDvm.interpBreak & kSubModeMethodTrace) {
dvmMethodTraceAdd(interpState->self, method, METHOD_TRACE_EXIT);
}
}
@@ -869,12 +855,11 @@
if (gDvm.emulatorTracePage == NULL)
return;
- updateActiveProfilers(1);
-
/* in theory we should make this an atomic inc; in practice not important */
gDvm.emulatorTraceEnableCount++;
if (gDvm.emulatorTraceEnableCount == 1)
LOGD("--- emulator method traces enabled\n");
+ updateActiveProfilers(kSubModeEmulatorTrace, true);
}
/*
@@ -886,11 +871,12 @@
LOGE("ERROR: emulator tracing not enabled\n");
return;
}
- updateActiveProfilers(-1);
/* in theory we should make this an atomic inc; in practice not important */
gDvm.emulatorTraceEnableCount--;
if (gDvm.emulatorTraceEnableCount == 0)
LOGD("--- emulator method traces disabled\n");
+ updateActiveProfilers(kSubModeEmulatorTrace,
+ (gDvm.emulatorTraceEnableCount != 0));
}
@@ -902,9 +888,9 @@
#if defined(WITH_INLINE_PROFILING)
LOGW("Instruction counting not supported with inline profiling");
#endif
- updateActiveProfilers(1);
/* in theory we should make this an atomic inc; in practice not important */
gDvm.instructionCountEnableCount++;
+ updateActiveProfilers(kSubModeInstCounting, true);
}
/*
@@ -916,8 +902,9 @@
LOGE("ERROR: instruction counting not enabled\n");
dvmAbort();
}
- updateActiveProfilers(-1);
gDvm.instructionCountEnableCount--;
+ updateActiveProfilers(kSubModeInstCounting,
+ (gDvm.instructionCountEnableCount != 0));
}
diff --git a/vm/Profile.h b/vm/Profile.h
index dd38252..167eafe 100644
--- a/vm/Profile.h
+++ b/vm/Profile.h
@@ -113,30 +113,24 @@
*/
#define TRACE_METHOD_ENTER(_self, _method) \
do { \
- if (gDvm.activeProfilers != 0) { \
- if (gDvm.methodTrace.traceEnabled) \
- dvmMethodTraceAdd(_self, _method, METHOD_TRACE_ENTER); \
- if (gDvm.emulatorTraceEnableCount != 0) \
- dvmEmitEmulatorTrace(_method, METHOD_TRACE_ENTER); \
- } \
+ if (gDvm.interpBreak & kSubModeMethodTrace) \
+ dvmMethodTraceAdd(_self, _method, METHOD_TRACE_ENTER); \
+ if (gDvm.interpBreak & kSubModeEmulatorTrace) \
+ dvmEmitEmulatorTrace(_method, METHOD_TRACE_ENTER); \
} while(0);
#define TRACE_METHOD_EXIT(_self, _method) \
do { \
- if (gDvm.activeProfilers != 0) { \
- if (gDvm.methodTrace.traceEnabled) \
- dvmMethodTraceAdd(_self, _method, METHOD_TRACE_EXIT); \
- if (gDvm.emulatorTraceEnableCount != 0) \
- dvmEmitEmulatorTrace(_method, METHOD_TRACE_EXIT); \
- } \
+ if (gDvm.interpBreak & kSubModeMethodTrace) \
+ dvmMethodTraceAdd(_self, _method, METHOD_TRACE_EXIT); \
+ if (gDvm.interpBreak & kSubModeEmulatorTrace) \
+ dvmEmitEmulatorTrace(_method, METHOD_TRACE_EXIT); \
} while(0);
#define TRACE_METHOD_UNROLL(_self, _method) \
do { \
- if (gDvm.activeProfilers != 0) { \
- if (gDvm.methodTrace.traceEnabled) \
- dvmMethodTraceAdd(_self, _method, METHOD_TRACE_UNROLL); \
- if (gDvm.emulatorTraceEnableCount != 0) \
- dvmEmitEmulatorTrace(_method, METHOD_TRACE_UNROLL); \
- } \
+ if (gDvm.interpBreak & kSubModeMethodTrace) \
+ dvmMethodTraceAdd(_self, _method, METHOD_TRACE_UNROLL); \
+ if (gDvm.interpBreak & kSubModeEmulatorTrace) \
+ dvmEmitEmulatorTrace(_method, METHOD_TRACE_UNROLL); \
} while(0);
void dvmMethodTraceAdd(struct Thread* self, const Method* method, int action);
diff --git a/vm/Thread.c b/vm/Thread.c
index cd9b9d3..36c34f4 100644
--- a/vm/Thread.c
+++ b/vm/Thread.c
@@ -243,14 +243,18 @@
static int getThreadPriorityFromSystem(void);
/*
- * The JIT needs to know if any thread is suspended. We do this by
- * maintaining a global sum of all threads' suspend counts. All suspendCount
- * updates should go through this after aquiring threadSuspendCountLock.
+ * If there is any thread suspend request outstanding,
+ * we need to mark it in interpState to signal the interpreter that
+ * something is pending. We do this by maintaining a global sum of
+ * all threads' suspend counts. All suspendCount updates should go
+ * through this after aquiring threadSuspendCountLock.
*/
-static inline void dvmAddToThreadSuspendCount(int *pSuspendCount, int delta)
+static void dvmAddToThreadSuspendCount(int *pSuspendCount, int delta)
{
*pSuspendCount += delta;
gDvm.sumThreadSuspendCount += delta;
+ dvmUpdateInterpBreak(kSubModeSuspendRequest,
+ (gDvm.sumThreadSuspendCount != 0));
}
/*
@@ -1797,12 +1801,10 @@
}
bail:
-#if defined(WITH_JIT)
/* Remove this thread's suspendCount from global suspendCount sum */
lockThreadSuspendCount();
dvmAddToThreadSuspendCount(&self->suspendCount, -self->suspendCount);
unlockThreadSuspendCount();
-#endif
dvmReleaseTrackedAlloc(exception, self);
}
diff --git a/vm/alloc/HeapWorker.c b/vm/alloc/HeapWorker.c
index d67f49e..127d03c 100644
--- a/vm/alloc/HeapWorker.c
+++ b/vm/alloc/HeapWorker.c
@@ -128,7 +128,7 @@
u8 delta = now - heapWorkerInterpStartTime;
if (delta > HEAP_WORKER_WATCHDOG_TIMEOUT &&
- (gDvm.debuggerActive || gDvm.nativeDebuggerActive))
+ (DEBUGGER_ACTIVE || gDvm.nativeDebuggerActive))
{
/*
* Debugger suspension can block the thread indefinitely. For
diff --git a/vm/compiler/Compiler.c b/vm/compiler/Compiler.c
index adb58dd..886b1f1 100644
--- a/vm/compiler/Compiler.c
+++ b/vm/compiler/Compiler.c
@@ -751,11 +751,7 @@
dvmLockMutex(&gDvmJit.tableLock);
jitActive = gDvmJit.pProfTable != NULL;
- bool disableJit = gDvm.debuggerActive;
-#if !defined(WITH_INLINE_PROFILING)
- disableJit = disableJit || (gDvm.activeProfilers > 0);
-#endif
- jitActivate = !disableJit;
+ jitActivate = !dvmDebuggerOrProfilerActive();
if (jitActivate && !jitActive) {
gDvmJit.pProfTable = gDvmJit.pProfTableCopy;
diff --git a/vm/interp/Interp.c b/vm/interp/Interp.c
index 2e241b1..ef11547 100644
--- a/vm/interp/Interp.c
+++ b/vm/interp/Interp.c
@@ -1215,6 +1215,23 @@
}
/*
+ * Update interpBreak
+ */
+void dvmUpdateInterpBreak(int newMode, bool enable)
+{
+ ExecutionSubModes oldValue, newValue;
+
+ do {
+ oldValue = gDvm.interpBreak;
+ newValue = enable ? oldValue | newMode : oldValue & ~newMode;
+ } while (android_atomic_release_cas(oldValue, newValue,
+ &gDvm.interpBreak) != 0);
+#if defined(WITH_JIT)
+ dvmCompilerStateRefresh();
+#endif
+}
+
+/*
* Main interpreter loop entry point. Select "standard" or "debug"
* interpreter and switch between them as required.
*
diff --git a/vm/interp/Interp.h b/vm/interp/Interp.h
index 784fc7a..e5a4fdc 100644
--- a/vm/interp/Interp.h
+++ b/vm/interp/Interp.h
@@ -59,4 +59,9 @@
*/
void dvmFlushBreakpoints(ClassObject* clazz);
+/*
+ * Update interpBreak
+ */
+void dvmUpdateInterpBreak(int newMode, bool enable);
+
#endif /*_DALVIK_INTERP_INTERP*/
diff --git a/vm/interp/InterpDefs.h b/vm/interp/InterpDefs.h
index 8158868..3781dba 100644
--- a/vm/interp/InterpDefs.h
+++ b/vm/interp/InterpDefs.h
@@ -123,10 +123,8 @@
volatile int* pSelfSuspendCount;
/* Biased base of GC's card table */
u1* cardTable;
- /* points at gDvm.debuggerActive, or NULL if debugger not enabled */
- volatile u1* pDebuggerActive;
- /* points at gDvm.activeProfilers */
- volatile int* pActiveProfilers;
+ /* points at gDvm.interpBreak */
+ volatile int* pInterpBreak;
/* ----------------------------------------------------------------------
*/
@@ -229,11 +227,16 @@
*/
static inline bool dvmDebuggerOrProfilerActive(void)
{
- bool result = gDvm.debuggerActive;
-#if !defined(WITH_INLINE_PROFILING)
- result = result || (gDvm.activeProfilers != 0);
+#if defined(WITH_INLINE_PROFILING)
+ return gDvm.interpBreak & (kSubModeDebuggerActive |
+ kSubModeEmulatorTrace |
+ kSubModeInstCounting);
+#else
+ return gDvm.interpBreak & (kSubModeDebuggerActive |
+ kSubModeEmulatorTrace |
+ kSubModeMethodTrace |
+ kSubModeInstCounting);
#endif
- return result;
}
#if defined(WITH_JIT)
@@ -243,11 +246,7 @@
*/
static inline bool dvmJitDebuggerOrProfilerActive()
{
- bool result = (gDvmJit.pProfTable != NULL) || gDvm.debuggerActive;
-#if !defined(WITH_INLINE_PROFILING)
- result = result || (gDvm.activeProfilers != 0);
-#endif
- return result;
+ return (gDvmJit.pProfTable != NULL) || dvmDebuggerOrProfilerActive();
}
/*
diff --git a/vm/jdwp/JdwpMain.c b/vm/jdwp/JdwpMain.c
index 24e5c6c..b4471da 100644
--- a/vm/jdwp/JdwpMain.c
+++ b/vm/jdwp/JdwpMain.c
@@ -393,7 +393,7 @@
*/
s8 dvmJdwpLastDebuggerActivity(JdwpState* state)
{
- if (!gDvm.debuggerActive) {
+ if (!DEBUGGER_ACTIVE) {
LOGD("dvmJdwpLastDebuggerActivity: no active debugger\n");
return -1;
}
diff --git a/vm/mterp/Mterp.c b/vm/mterp/Mterp.c
index 68218a3..4722779 100644
--- a/vm/mterp/Mterp.c
+++ b/vm/mterp/Mterp.c
@@ -96,12 +96,7 @@
TRACE_METHOD_ENTER(self, glue->method);
}
#endif
- if (gDvm.jdwpConfigured) {
- glue->pDebuggerActive = &gDvm.debuggerActive;
- } else {
- glue->pDebuggerActive = NULL;
- }
- glue->pActiveProfilers = &gDvm.activeProfilers;
+ glue->pInterpBreak = &gDvm.interpBreak;
IF_LOGVV() {
char* desc = dexProtoCopyMethodDescriptor(&glue->method->prototype);
diff --git a/vm/mterp/armv5te/footer.S b/vm/mterp/armv5te/footer.S
index be71941..7081c52 100644
--- a/vm/mterp/armv5te/footer.S
+++ b/vm/mterp/armv5te/footer.S
@@ -440,48 +440,21 @@
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r3<- &interpBreak
+ /* speculatively load address of thread-specific suspend count */
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
-
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
-
+ ldr r1, [r1] @ r1<- interpBreak
+ /* speculatively load thread-specific suspend count */
ldr ip, [r3] @ ip<- suspendCount (int)
-
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
-#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-#endif
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
- /*
- * Don't switch the interpreter in the libdvm_traceview build even if the
- * profiler is active.
- * The code here is opted for less intrusion instead of performance.
- * That is, *pActiveProfilers is still loaded into r2 even though it is not
- * used when WITH_INLINE_PROFILING is defined.
- */
-#if !defined(WITH_INLINE_PROFILING)
- orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
-#endif
-
-
- bxeq lr @ all zero, return
-
+ cmp r1, #0 @ anything unusual?
+ bxeq lr @ return if not
/*
* One or more interesting events have happened. Figure out what.
*
- * If debugging or profiling are compiled in, we need to disambiguate.
- *
* r0 still holds the reentry type.
*/
- ldr ip, [r3] @ ip<- suspendCount (int)
cmp ip, #0 @ want suspend?
- beq 1f @ no, must be debugger/profiler
+ beq 3f @ no, must be something else
stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
@@ -502,42 +475,33 @@
ldmfd sp!, {r0, lr} @ restore r0 and lr
/*
- * Reload the debugger/profiler enable flags. We're checking to see
- * if either of these got set while we were suspended.
- *
- * If WITH_INLINE_PROFILING is configured, don't check whether the profiler
- * is enabled or not as the profiling will be done inline.
+ * Reload the interpBreak flags - they may have changed while we
+ * were suspended.
*/
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r1<- &interpBreak
+ ldr r1, [r1] @ r1<- interpBreak
+3:
+ /*
+ * TODO: this code is too fragile. Need a general mechanism
+ * to identify what actions to take by submode. Some profiling modes
+ * (instruction count) need to single-step, while method tracing
+ * may not. Debugging with breakpoints can run unfettered, but
+ * source-level single-stepping requires Dalvik singlestepping.
+ * GC may require a one-shot action and then full-speed resumption.
+ */
+#if defined(WITH_INLINE_PROFILING)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting)
#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting | kSubModeMethodTrace)
#endif
+ bxeq lr @ nothing to do, return
-#if !defined(WITH_INLINE_PROFILING)
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrs r1, r1, r2
-#else
- cmp r1, #0 @ only consult the debuggerActive flag
-#endif
-
- beq 2f
-
-1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ @ debugger/profiler enabled, bail out; glue->entryPoint was set above
str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
b common_gotoBail @ side exit
-2:
- bx lr @ nothing to do, return
-
/*
* The equivalent of "goto bail", this calls through the "bail handler".
diff --git a/vm/mterp/c/OP_INVOKE_DIRECT_EMPTY.c b/vm/mterp/c/OP_INVOKE_DIRECT_EMPTY.c
index 7adbdab..8bdf78a 100644
--- a/vm/mterp/c/OP_INVOKE_DIRECT_EMPTY.c
+++ b/vm/mterp/c/OP_INVOKE_DIRECT_EMPTY.c
@@ -3,7 +3,7 @@
//LOGI("Ignoring empty\n");
FINISH(3);
#else
- if (!gDvm.debuggerActive) {
+ if (!DEBUGGER_ACTIVE) {
//LOGI("Skipping empty\n");
FINISH(3); // don't want it to show up in profiler output
} else {
diff --git a/vm/mterp/c/gotoTargets.c b/vm/mterp/c/gotoTargets.c
index 9a16175..df5575d 100644
--- a/vm/mterp/c/gotoTargets.c
+++ b/vm/mterp/c/gotoTargets.c
@@ -703,7 +703,7 @@
* here, and have the JNI exception code do the reporting to the
* debugger.
*/
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
void* catchFrame;
catchRelPc = dvmFindCatchBlock(self, pc - curMethod->insns,
exception, true, &catchFrame);
@@ -985,7 +985,7 @@
DUMP_REGS(methodToCall, newFp, true); // show input args
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_ENTRY);
}
@@ -1012,7 +1012,7 @@
(*methodToCall->nativeFunc)(newFp, &retval, methodToCall, self);
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_EXIT);
}
diff --git a/vm/mterp/common/asm-constants.h b/vm/mterp/common/asm-constants.h
index dcd5c1c..e4070ec 100644
--- a/vm/mterp/common/asm-constants.h
+++ b/vm/mterp/common/asm-constants.h
@@ -81,10 +81,6 @@
* values are incorrect.
*/
-/* globals (sanity check for LDR vs LDRB) */
-MTERP_SIZEOF(sizeofGlobal_debuggerActive, gDvm.debuggerActive, 1)
-MTERP_SIZEOF(sizeofGlobal_activeProfilers, gDvm.activeProfilers, 4)
-
/* MterpGlue fields */
MTERP_OFFSET(offGlue_pc, MterpGlue, pc, 0)
MTERP_OFFSET(offGlue_fp, MterpGlue, fp, 4)
@@ -96,24 +92,23 @@
MTERP_OFFSET(offGlue_interpStackEnd, MterpGlue, interpStackEnd, 32)
MTERP_OFFSET(offGlue_pSelfSuspendCount, MterpGlue, pSelfSuspendCount, 36)
MTERP_OFFSET(offGlue_cardTable, MterpGlue, cardTable, 40)
-MTERP_OFFSET(offGlue_pDebuggerActive, MterpGlue, pDebuggerActive, 44)
-MTERP_OFFSET(offGlue_pActiveProfilers, MterpGlue, pActiveProfilers, 48)
-MTERP_OFFSET(offGlue_entryPoint, MterpGlue, entryPoint, 52)
+MTERP_OFFSET(offGlue_pInterpBreak, MterpGlue, pInterpBreak, 44)
+MTERP_OFFSET(offGlue_entryPoint, MterpGlue, entryPoint, 48)
#if defined(WITH_JIT)
-MTERP_OFFSET(offGlue_pJitProfTable, MterpGlue, pJitProfTable, 60)
-MTERP_OFFSET(offGlue_jitState, MterpGlue, jitState, 64)
-MTERP_OFFSET(offGlue_jitResumeNPC, MterpGlue, jitResumeNPC, 68)
-MTERP_OFFSET(offGlue_jitResumeDPC, MterpGlue, jitResumeDPC, 72)
-MTERP_OFFSET(offGlue_jitThreshold, MterpGlue, jitThreshold, 76)
-MTERP_OFFSET(offGlue_ppJitProfTable, MterpGlue, ppJitProfTable, 80)
-MTERP_OFFSET(offGlue_icRechainCount, MterpGlue, icRechainCount, 84)
-MTERP_OFFSET(offGlue_pProfileCountdown, MterpGlue, pProfileCountdown, 88)
+MTERP_OFFSET(offGlue_pJitProfTable, MterpGlue, pJitProfTable, 56)
+MTERP_OFFSET(offGlue_jitState, MterpGlue, jitState, 60)
+MTERP_OFFSET(offGlue_jitResumeNPC, MterpGlue, jitResumeNPC, 64)
+MTERP_OFFSET(offGlue_jitResumeDPC, MterpGlue, jitResumeDPC, 68)
+MTERP_OFFSET(offGlue_jitThreshold, MterpGlue, jitThreshold, 72)
+MTERP_OFFSET(offGlue_ppJitProfTable, MterpGlue, ppJitProfTable, 76)
+MTERP_OFFSET(offGlue_icRechainCount, MterpGlue, icRechainCount, 80)
+MTERP_OFFSET(offGlue_pProfileCountdown, MterpGlue, pProfileCountdown, 84)
#if defined(WITH_SELF_VERIFICATION)
-MTERP_OFFSET(offGlue_jitCacheStart, MterpGlue, jitCacheStart, 116)
-MTERP_OFFSET(offGlue_jitCacheEnd, MterpGlue, jitCacheEnd, 120)
-#else
MTERP_OFFSET(offGlue_jitCacheStart, MterpGlue, jitCacheStart, 112)
MTERP_OFFSET(offGlue_jitCacheEnd, MterpGlue, jitCacheEnd, 116)
+#else
+MTERP_OFFSET(offGlue_jitCacheStart, MterpGlue, jitCacheStart, 108)
+MTERP_OFFSET(offGlue_jitCacheEnd, MterpGlue, jitCacheEnd, 112)
#endif
#endif
/* make sure all JValue union members are stored at the same offset */
@@ -332,3 +327,11 @@
/* opcode number */
MTERP_CONSTANT(OP_MOVE_EXCEPTION, 0x0d)
+
+/* flags for interpBreak */
+MTERP_CONSTANT(kSubModeNormal, 0x0000)
+MTERP_CONSTANT(kSubModeMethodTrace, 0x0001)
+MTERP_CONSTANT(kSubModeEmulatorTrace, 0x0002)
+MTERP_CONSTANT(kSubModeInstCounting, 0x0004)
+MTERP_CONSTANT(kSubModeDebuggerActive, 0x0008)
+MTERP_CONSTANT(kSubModeSuspendRequest, 0x0010)
diff --git a/vm/mterp/out/InterpAsm-armv5te-vfp.S b/vm/mterp/out/InterpAsm-armv5te-vfp.S
index 59cb4a6..b5e306a 100644
--- a/vm/mterp/out/InterpAsm-armv5te-vfp.S
+++ b/vm/mterp/out/InterpAsm-armv5te-vfp.S
@@ -13596,48 +13596,21 @@
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r3<- &interpBreak
+ /* speculatively load address of thread-specific suspend count */
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
-
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
-
+ ldr r1, [r1] @ r1<- interpBreak
+ /* speculatively load thread-specific suspend count */
ldr ip, [r3] @ ip<- suspendCount (int)
-
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
-#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-#endif
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
- /*
- * Don't switch the interpreter in the libdvm_traceview build even if the
- * profiler is active.
- * The code here is opted for less intrusion instead of performance.
- * That is, *pActiveProfilers is still loaded into r2 even though it is not
- * used when WITH_INLINE_PROFILING is defined.
- */
-#if !defined(WITH_INLINE_PROFILING)
- orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
-#endif
-
-
- bxeq lr @ all zero, return
-
+ cmp r1, #0 @ anything unusual?
+ bxeq lr @ return if not
/*
* One or more interesting events have happened. Figure out what.
*
- * If debugging or profiling are compiled in, we need to disambiguate.
- *
* r0 still holds the reentry type.
*/
- ldr ip, [r3] @ ip<- suspendCount (int)
cmp ip, #0 @ want suspend?
- beq 1f @ no, must be debugger/profiler
+ beq 3f @ no, must be something else
stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
@@ -13658,42 +13631,33 @@
ldmfd sp!, {r0, lr} @ restore r0 and lr
/*
- * Reload the debugger/profiler enable flags. We're checking to see
- * if either of these got set while we were suspended.
- *
- * If WITH_INLINE_PROFILING is configured, don't check whether the profiler
- * is enabled or not as the profiling will be done inline.
+ * Reload the interpBreak flags - they may have changed while we
+ * were suspended.
*/
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r1<- &interpBreak
+ ldr r1, [r1] @ r1<- interpBreak
+3:
+ /*
+ * TODO: this code is too fragile. Need a general mechanism
+ * to identify what actions to take by submode. Some profiling modes
+ * (instruction count) need to single-step, while method tracing
+ * may not. Debugging with breakpoints can run unfettered, but
+ * source-level single-stepping requires Dalvik singlestepping.
+ * GC may require a one-shot action and then full-speed resumption.
+ */
+#if defined(WITH_INLINE_PROFILING)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting)
#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting | kSubModeMethodTrace)
#endif
+ bxeq lr @ nothing to do, return
-#if !defined(WITH_INLINE_PROFILING)
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrs r1, r1, r2
-#else
- cmp r1, #0 @ only consult the debuggerActive flag
-#endif
-
- beq 2f
-
-1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ @ debugger/profiler enabled, bail out; glue->entryPoint was set above
str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
b common_gotoBail @ side exit
-2:
- bx lr @ nothing to do, return
-
/*
* The equivalent of "goto bail", this calls through the "bail handler".
diff --git a/vm/mterp/out/InterpAsm-armv5te.S b/vm/mterp/out/InterpAsm-armv5te.S
index d2df62b..ce1f7f0 100644
--- a/vm/mterp/out/InterpAsm-armv5te.S
+++ b/vm/mterp/out/InterpAsm-armv5te.S
@@ -14054,48 +14054,21 @@
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r3<- &interpBreak
+ /* speculatively load address of thread-specific suspend count */
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
-
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
-
+ ldr r1, [r1] @ r1<- interpBreak
+ /* speculatively load thread-specific suspend count */
ldr ip, [r3] @ ip<- suspendCount (int)
-
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
-#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-#endif
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
- /*
- * Don't switch the interpreter in the libdvm_traceview build even if the
- * profiler is active.
- * The code here is opted for less intrusion instead of performance.
- * That is, *pActiveProfilers is still loaded into r2 even though it is not
- * used when WITH_INLINE_PROFILING is defined.
- */
-#if !defined(WITH_INLINE_PROFILING)
- orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
-#endif
-
-
- bxeq lr @ all zero, return
-
+ cmp r1, #0 @ anything unusual?
+ bxeq lr @ return if not
/*
* One or more interesting events have happened. Figure out what.
*
- * If debugging or profiling are compiled in, we need to disambiguate.
- *
* r0 still holds the reentry type.
*/
- ldr ip, [r3] @ ip<- suspendCount (int)
cmp ip, #0 @ want suspend?
- beq 1f @ no, must be debugger/profiler
+ beq 3f @ no, must be something else
stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
@@ -14116,42 +14089,33 @@
ldmfd sp!, {r0, lr} @ restore r0 and lr
/*
- * Reload the debugger/profiler enable flags. We're checking to see
- * if either of these got set while we were suspended.
- *
- * If WITH_INLINE_PROFILING is configured, don't check whether the profiler
- * is enabled or not as the profiling will be done inline.
+ * Reload the interpBreak flags - they may have changed while we
+ * were suspended.
*/
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r1<- &interpBreak
+ ldr r1, [r1] @ r1<- interpBreak
+3:
+ /*
+ * TODO: this code is too fragile. Need a general mechanism
+ * to identify what actions to take by submode. Some profiling modes
+ * (instruction count) need to single-step, while method tracing
+ * may not. Debugging with breakpoints can run unfettered, but
+ * source-level single-stepping requires Dalvik singlestepping.
+ * GC may require a one-shot action and then full-speed resumption.
+ */
+#if defined(WITH_INLINE_PROFILING)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting)
#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting | kSubModeMethodTrace)
#endif
+ bxeq lr @ nothing to do, return
-#if !defined(WITH_INLINE_PROFILING)
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrs r1, r1, r2
-#else
- cmp r1, #0 @ only consult the debuggerActive flag
-#endif
-
- beq 2f
-
-1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ @ debugger/profiler enabled, bail out; glue->entryPoint was set above
str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
b common_gotoBail @ side exit
-2:
- bx lr @ nothing to do, return
-
/*
* The equivalent of "goto bail", this calls through the "bail handler".
diff --git a/vm/mterp/out/InterpAsm-armv7-a-neon.S b/vm/mterp/out/InterpAsm-armv7-a-neon.S
index 3c965e4..0d0136e 100644
--- a/vm/mterp/out/InterpAsm-armv7-a-neon.S
+++ b/vm/mterp/out/InterpAsm-armv7-a-neon.S
@@ -13534,48 +13534,21 @@
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r3<- &interpBreak
+ /* speculatively load address of thread-specific suspend count */
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
-
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
-
+ ldr r1, [r1] @ r1<- interpBreak
+ /* speculatively load thread-specific suspend count */
ldr ip, [r3] @ ip<- suspendCount (int)
-
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
-#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-#endif
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
- /*
- * Don't switch the interpreter in the libdvm_traceview build even if the
- * profiler is active.
- * The code here is opted for less intrusion instead of performance.
- * That is, *pActiveProfilers is still loaded into r2 even though it is not
- * used when WITH_INLINE_PROFILING is defined.
- */
-#if !defined(WITH_INLINE_PROFILING)
- orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
-#endif
-
-
- bxeq lr @ all zero, return
-
+ cmp r1, #0 @ anything unusual?
+ bxeq lr @ return if not
/*
* One or more interesting events have happened. Figure out what.
*
- * If debugging or profiling are compiled in, we need to disambiguate.
- *
* r0 still holds the reentry type.
*/
- ldr ip, [r3] @ ip<- suspendCount (int)
cmp ip, #0 @ want suspend?
- beq 1f @ no, must be debugger/profiler
+ beq 3f @ no, must be something else
stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
@@ -13596,42 +13569,33 @@
ldmfd sp!, {r0, lr} @ restore r0 and lr
/*
- * Reload the debugger/profiler enable flags. We're checking to see
- * if either of these got set while we were suspended.
- *
- * If WITH_INLINE_PROFILING is configured, don't check whether the profiler
- * is enabled or not as the profiling will be done inline.
+ * Reload the interpBreak flags - they may have changed while we
+ * were suspended.
*/
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r1<- &interpBreak
+ ldr r1, [r1] @ r1<- interpBreak
+3:
+ /*
+ * TODO: this code is too fragile. Need a general mechanism
+ * to identify what actions to take by submode. Some profiling modes
+ * (instruction count) need to single-step, while method tracing
+ * may not. Debugging with breakpoints can run unfettered, but
+ * source-level single-stepping requires Dalvik singlestepping.
+ * GC may require a one-shot action and then full-speed resumption.
+ */
+#if defined(WITH_INLINE_PROFILING)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting)
#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting | kSubModeMethodTrace)
#endif
+ bxeq lr @ nothing to do, return
-#if !defined(WITH_INLINE_PROFILING)
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrs r1, r1, r2
-#else
- cmp r1, #0 @ only consult the debuggerActive flag
-#endif
-
- beq 2f
-
-1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ @ debugger/profiler enabled, bail out; glue->entryPoint was set above
str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
b common_gotoBail @ side exit
-2:
- bx lr @ nothing to do, return
-
/*
* The equivalent of "goto bail", this calls through the "bail handler".
diff --git a/vm/mterp/out/InterpAsm-armv7-a.S b/vm/mterp/out/InterpAsm-armv7-a.S
index 664f380..f638d9f 100644
--- a/vm/mterp/out/InterpAsm-armv7-a.S
+++ b/vm/mterp/out/InterpAsm-armv7-a.S
@@ -13534,48 +13534,21 @@
* r9 is trampoline PC adjustment *in bytes*
*/
common_periodicChecks:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r3<- &interpBreak
+ /* speculatively load address of thread-specific suspend count */
ldr r3, [rGLUE, #offGlue_pSelfSuspendCount] @ r3<- &suspendCount
-
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
-
+ ldr r1, [r1] @ r1<- interpBreak
+ /* speculatively load thread-specific suspend count */
ldr ip, [r3] @ ip<- suspendCount (int)
-
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
-#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-#endif
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrnes ip, ip, r1 @ ip<- suspendCount | debuggerActive
- /*
- * Don't switch the interpreter in the libdvm_traceview build even if the
- * profiler is active.
- * The code here is opted for less intrusion instead of performance.
- * That is, *pActiveProfilers is still loaded into r2 even though it is not
- * used when WITH_INLINE_PROFILING is defined.
- */
-#if !defined(WITH_INLINE_PROFILING)
- orrs ip, ip, r2 @ ip<- suspend|debugger|profiler; set Z
-#endif
-
-
- bxeq lr @ all zero, return
-
+ cmp r1, #0 @ anything unusual?
+ bxeq lr @ return if not
/*
* One or more interesting events have happened. Figure out what.
*
- * If debugging or profiling are compiled in, we need to disambiguate.
- *
* r0 still holds the reentry type.
*/
- ldr ip, [r3] @ ip<- suspendCount (int)
cmp ip, #0 @ want suspend?
- beq 1f @ no, must be debugger/profiler
+ beq 3f @ no, must be something else
stmfd sp!, {r0, lr} @ preserve r0 and lr
#if defined(WITH_JIT)
@@ -13596,42 +13569,33 @@
ldmfd sp!, {r0, lr} @ restore r0 and lr
/*
- * Reload the debugger/profiler enable flags. We're checking to see
- * if either of these got set while we were suspended.
- *
- * If WITH_INLINE_PROFILING is configured, don't check whether the profiler
- * is enabled or not as the profiling will be done inline.
+ * Reload the interpBreak flags - they may have changed while we
+ * were suspended.
*/
- ldr r1, [rGLUE, #offGlue_pDebuggerActive] @ r1<- &debuggerActive
- cmp r1, #0 @ debugger enabled?
-#if defined(WORKAROUND_CORTEX_A9_745320)
- /* Don't use conditional loads if the HW defect exists */
- beq 101f
- ldrb r1, [r1] @ yes, r1<- debuggerActive (boolean)
-101:
+ ldr r1, [rGLUE, #offGlue_pInterpBreak] @ r1<- &interpBreak
+ ldr r1, [r1] @ r1<- interpBreak
+3:
+ /*
+ * TODO: this code is too fragile. Need a general mechanism
+ * to identify what actions to take by submode. Some profiling modes
+ * (instruction count) need to single-step, while method tracing
+ * may not. Debugging with breakpoints can run unfettered, but
+ * source-level single-stepping requires Dalvik singlestepping.
+ * GC may require a one-shot action and then full-speed resumption.
+ */
+#if defined(WITH_INLINE_PROFILING)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting)
#else
- ldrneb r1, [r1] @ yes, r1<- debuggerActive (boolean)
+ ands r1, #(kSubModeDebuggerActive | kSubModeEmulatorTrace | kSubModeInstCounting | kSubModeMethodTrace)
#endif
+ bxeq lr @ nothing to do, return
-#if !defined(WITH_INLINE_PROFILING)
- ldr r2, [rGLUE, #offGlue_pActiveProfilers] @ r2<- &activeProfilers
- ldr r2, [r2] @ r2<- activeProfilers (int)
- orrs r1, r1, r2
-#else
- cmp r1, #0 @ only consult the debuggerActive flag
-#endif
-
- beq 2f
-
-1: @ debugger/profiler enabled, bail out; glue->entryPoint was set above
+ @ debugger/profiler enabled, bail out; glue->entryPoint was set above
str r0, [rGLUE, #offGlue_entryPoint] @ store r0, need for debug/prof
add rPC, rPC, r9 @ update rPC
mov r1, #1 @ "want switch" = true
b common_gotoBail @ side exit
-2:
- bx lr @ nothing to do, return
-
/*
* The equivalent of "goto bail", this calls through the "bail handler".
diff --git a/vm/mterp/out/InterpAsm-x86.S b/vm/mterp/out/InterpAsm-x86.S
index 7c27b4a..f42f592 100644
--- a/vm/mterp/out/InterpAsm-x86.S
+++ b/vm/mterp/out/InterpAsm-x86.S
@@ -13695,6 +13695,7 @@
* is a bit ugly, but will happen in the relatively uncommon path.
* TODO: Basic-block style Jit will need a hook here as well. Fold it into
* the suspendCount check so we can get both in 1 shot.
+ * TUNING: Improve scheduling here & do initial single test for all.
*/
common_periodicChecks:
movl offGlue_pSelfSuspendCount(%ecx),%eax # eax <- &suspendCount
@@ -13702,17 +13703,9 @@
jne 1f
6:
- movl offGlue_pDebuggerActive(%ecx),%eax # eax <- &DebuggerActive
- movl offGlue_pActiveProfilers(%ecx),%ecx # ecx <- &ActiveProfilers
- testl %eax,%eax # debugger enabled?
- je 2f
- movzbl (%eax),%eax # get active count
-2:
- orl (%ecx),%eax # eax <- debuggerActive | activeProfilers
- movl rGLUE,%ecx # restore rGLUE
- jne 3f # one or both active - switch interp
-
-5:
+ movl offGlue_pInterpBreak(%ecx),%eax # eax <- &interpBreak
+ cmpl $0,(%eax) # something interesting happening?
+ jne 3f # yes - switch interpreters
ret
/* Check for suspend */
diff --git a/vm/mterp/out/InterpC-allstubs.c b/vm/mterp/out/InterpC-allstubs.c
index 59beb97..861a150 100644
--- a/vm/mterp/out/InterpC-allstubs.c
+++ b/vm/mterp/out/InterpC-allstubs.c
@@ -3063,7 +3063,7 @@
//LOGI("Ignoring empty\n");
FINISH(3);
#else
- if (!gDvm.debuggerActive) {
+ if (!DEBUGGER_ACTIVE) {
//LOGI("Skipping empty\n");
FINISH(3); // don't want it to show up in profiler output
} else {
@@ -5137,7 +5137,7 @@
* here, and have the JNI exception code do the reporting to the
* debugger.
*/
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
void* catchFrame;
catchRelPc = dvmFindCatchBlock(self, pc - curMethod->insns,
exception, true, &catchFrame);
@@ -5419,7 +5419,7 @@
DUMP_REGS(methodToCall, newFp, true); // show input args
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_ENTRY);
}
@@ -5446,7 +5446,7 @@
(*methodToCall->nativeFunc)(newFp, &retval, methodToCall, self);
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_EXIT);
}
diff --git a/vm/mterp/out/InterpC-portdbg.c b/vm/mterp/out/InterpC-portdbg.c
index 54dec96..00caa6e 100644
--- a/vm/mterp/out/InterpC-portdbg.c
+++ b/vm/mterp/out/InterpC-portdbg.c
@@ -1488,7 +1488,7 @@
static const char* mn = "shiftTest2";
static const char* sg = "()V";
- if (/*gDvm.debuggerActive &&*/
+ if (/*DEBUGGER_ACTIVE &&*/
strcmp(method->clazz->descriptor, cd) == 0 &&
strcmp(method->name, mn) == 0 &&
strcmp(method->shorty, sg) == 0)
@@ -1499,7 +1499,7 @@
dumpRegs(method, fp, true);
}
- if (!gDvm.debuggerActive)
+ if (!DEBUGGER_ACTIVE)
*pIsMethodEntry = false;
}
#endif
@@ -1516,7 +1516,7 @@
*pIsMethodEntry = false;
TRACE_METHOD_ENTER(self, method);
}
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
updateDebugger(method, pc, fp, isEntry, self);
}
if (gDvm.instructionCountEnableCount != 0) {
@@ -3425,7 +3425,7 @@
//LOGI("Ignoring empty\n");
FINISH(3);
#else
- if (!gDvm.debuggerActive) {
+ if (!DEBUGGER_ACTIVE) {
//LOGI("Skipping empty\n");
FINISH(3); // don't want it to show up in profiler output
} else {
@@ -5417,7 +5417,7 @@
* here, and have the JNI exception code do the reporting to the
* debugger.
*/
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
void* catchFrame;
catchRelPc = dvmFindCatchBlock(self, pc - curMethod->insns,
exception, true, &catchFrame);
@@ -5699,7 +5699,7 @@
DUMP_REGS(methodToCall, newFp, true); // show input args
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_ENTRY);
}
@@ -5726,7 +5726,7 @@
(*methodToCall->nativeFunc)(newFp, &retval, methodToCall, self);
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_EXIT);
}
diff --git a/vm/mterp/out/InterpC-portstd.c b/vm/mterp/out/InterpC-portstd.c
index c6926e7..46b712f 100644
--- a/vm/mterp/out/InterpC-portstd.c
+++ b/vm/mterp/out/InterpC-portstd.c
@@ -3175,7 +3175,7 @@
//LOGI("Ignoring empty\n");
FINISH(3);
#else
- if (!gDvm.debuggerActive) {
+ if (!DEBUGGER_ACTIVE) {
//LOGI("Skipping empty\n");
FINISH(3); // don't want it to show up in profiler output
} else {
@@ -5167,7 +5167,7 @@
* here, and have the JNI exception code do the reporting to the
* debugger.
*/
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
void* catchFrame;
catchRelPc = dvmFindCatchBlock(self, pc - curMethod->insns,
exception, true, &catchFrame);
@@ -5449,7 +5449,7 @@
DUMP_REGS(methodToCall, newFp, true); // show input args
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_ENTRY);
}
@@ -5476,7 +5476,7 @@
(*methodToCall->nativeFunc)(newFp, &retval, methodToCall, self);
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_EXIT);
}
diff --git a/vm/mterp/out/InterpC-x86-atom.c b/vm/mterp/out/InterpC-x86-atom.c
index 9ff04cd..9b4fb5e 100644
--- a/vm/mterp/out/InterpC-x86-atom.c
+++ b/vm/mterp/out/InterpC-x86-atom.c
@@ -2064,7 +2064,7 @@
* here, and have the JNI exception code do the reporting to the
* debugger.
*/
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
void* catchFrame;
catchRelPc = dvmFindCatchBlock(self, pc - curMethod->insns,
exception, true, &catchFrame);
@@ -2346,7 +2346,7 @@
DUMP_REGS(methodToCall, newFp, true); // show input args
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_ENTRY);
}
@@ -2373,7 +2373,7 @@
(*methodToCall->nativeFunc)(newFp, &retval, methodToCall, self);
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_EXIT);
}
diff --git a/vm/mterp/out/InterpC-x86.c b/vm/mterp/out/InterpC-x86.c
index 0d51ad2..3227fe3 100644
--- a/vm/mterp/out/InterpC-x86.c
+++ b/vm/mterp/out/InterpC-x86.c
@@ -2077,7 +2077,7 @@
* here, and have the JNI exception code do the reporting to the
* debugger.
*/
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
void* catchFrame;
catchRelPc = dvmFindCatchBlock(self, pc - curMethod->insns,
exception, true, &catchFrame);
@@ -2359,7 +2359,7 @@
DUMP_REGS(methodToCall, newFp, true); // show input args
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_ENTRY);
}
@@ -2386,7 +2386,7 @@
(*methodToCall->nativeFunc)(newFp, &retval, methodToCall, self);
#if (INTERP_TYPE == INTERP_DBG)
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostLocationEvent(methodToCall, -1,
dvmGetThisPtr(curMethod, fp), DBG_METHOD_EXIT);
}
diff --git a/vm/mterp/portable/debug.c b/vm/mterp/portable/debug.c
index 1d06188..e9fe72b 100644
--- a/vm/mterp/portable/debug.c
+++ b/vm/mterp/portable/debug.c
@@ -194,7 +194,7 @@
static const char* mn = "shiftTest2";
static const char* sg = "()V";
- if (/*gDvm.debuggerActive &&*/
+ if (/*DEBUGGER_ACTIVE &&*/
strcmp(method->clazz->descriptor, cd) == 0 &&
strcmp(method->name, mn) == 0 &&
strcmp(method->shorty, sg) == 0)
@@ -205,7 +205,7 @@
dumpRegs(method, fp, true);
}
- if (!gDvm.debuggerActive)
+ if (!DEBUGGER_ACTIVE)
*pIsMethodEntry = false;
}
#endif
@@ -222,7 +222,7 @@
*pIsMethodEntry = false;
TRACE_METHOD_ENTER(self, method);
}
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
updateDebugger(method, pc, fp, isEntry, self);
}
if (gDvm.instructionCountEnableCount != 0) {
diff --git a/vm/mterp/x86/footer.S b/vm/mterp/x86/footer.S
index edbf132..0724f42 100644
--- a/vm/mterp/x86/footer.S
+++ b/vm/mterp/x86/footer.S
@@ -449,6 +449,7 @@
* is a bit ugly, but will happen in the relatively uncommon path.
* TODO: Basic-block style Jit will need a hook here as well. Fold it into
* the suspendCount check so we can get both in 1 shot.
+ * TUNING: Improve scheduling here & do initial single test for all.
*/
common_periodicChecks:
movl offGlue_pSelfSuspendCount(%ecx),%eax # eax <- &suspendCount
@@ -456,17 +457,9 @@
jne 1f
6:
- movl offGlue_pDebuggerActive(%ecx),%eax # eax <- &DebuggerActive
- movl offGlue_pActiveProfilers(%ecx),%ecx # ecx <- &ActiveProfilers
- testl %eax,%eax # debugger enabled?
- je 2f
- movzbl (%eax),%eax # get active count
-2:
- orl (%ecx),%eax # eax <- debuggerActive | activeProfilers
- movl rGLUE,%ecx # restore rGLUE
- jne 3f # one or both active - switch interp
-
-5:
+ movl offGlue_pInterpBreak(%ecx),%eax # eax <- &interpBreak
+ cmpl $$0,(%eax) # something interesting happening?
+ jne 3f # yes - switch interpreters
ret
/* Check for suspend */
diff --git a/vm/oo/Class.c b/vm/oo/Class.c
index 2f2aae6..27907b8 100644
--- a/vm/oo/Class.c
+++ b/vm/oo/Class.c
@@ -2775,7 +2775,7 @@
* The class has been prepared and resolved but possibly not yet verified
* at this point.
*/
- if (gDvm.debuggerActive) {
+ if (DEBUGGER_ACTIVE) {
dvmDbgPostClassPrepare(clazz);
}