David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu> |
| 3 | */ |
| 4 | #ifndef _ASM_POWERPC_RUNLATCH_H |
| 5 | #define _ASM_POWERPC_RUNLATCH_H |
| 6 | |
| 7 | #ifdef CONFIG_PPC64 |
| 8 | |
| 9 | extern void __ppc64_runlatch_on(void); |
| 10 | extern void __ppc64_runlatch_off(void); |
| 11 | |
| 12 | /* |
| 13 | * We manually hard enable-disable, this is called |
| 14 | * in the idle loop and we don't want to mess up |
| 15 | * with soft-disable/enable & interrupt replay. |
| 16 | */ |
| 17 | #define ppc64_runlatch_off() \ |
| 18 | do { \ |
| 19 | if (cpu_has_feature(CPU_FTR_CTRL) && \ |
| 20 | test_thread_local_flags(_TLF_RUNLATCH)) { \ |
| 21 | unsigned long msr = mfmsr(); \ |
| 22 | __hard_irq_disable(); \ |
| 23 | __ppc64_runlatch_off(); \ |
| 24 | if (msr & MSR_EE) \ |
| 25 | __hard_irq_enable(); \ |
| 26 | } \ |
| 27 | } while (0) |
| 28 | |
| 29 | #define ppc64_runlatch_on() \ |
| 30 | do { \ |
| 31 | if (cpu_has_feature(CPU_FTR_CTRL) && \ |
| 32 | !test_thread_local_flags(_TLF_RUNLATCH)) { \ |
| 33 | unsigned long msr = mfmsr(); \ |
| 34 | __hard_irq_disable(); \ |
| 35 | __ppc64_runlatch_on(); \ |
| 36 | if (msr & MSR_EE) \ |
| 37 | __hard_irq_enable(); \ |
| 38 | } \ |
| 39 | } while (0) |
| 40 | #else |
| 41 | #define ppc64_runlatch_on() |
| 42 | #define ppc64_runlatch_off() |
| 43 | #endif /* CONFIG_PPC64 */ |
| 44 | |
| 45 | #endif /* _ASM_POWERPC_RUNLATCH_H */ |