Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 1 | /* |
Heiko Carstens | 428aecf | 2010-01-27 10:12:36 +0100 | [diff] [blame^] | 2 | * Copyright IBM Corp. 2006,2010 |
| 3 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> |
Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 4 | */ |
| 5 | |
| 6 | #ifndef __ASM_IRQFLAGS_H |
| 7 | #define __ASM_IRQFLAGS_H |
| 8 | |
Heiko Carstens | 428aecf | 2010-01-27 10:12:36 +0100 | [diff] [blame^] | 9 | #include <linux/types.h> |
Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 10 | |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 11 | #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) |
Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 12 | |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 13 | /* store then or system mask. */ |
| 14 | #define __raw_local_irq_stosm(__or) \ |
| 15 | ({ \ |
| 16 | unsigned long __mask; \ |
| 17 | asm volatile( \ |
| 18 | " stosm %0,%1" \ |
| 19 | : "=Q" (__mask) : "i" (__or) : "memory"); \ |
| 20 | __mask; \ |
Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 21 | }) |
| 22 | |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 23 | /* store then and system mask. */ |
| 24 | #define __raw_local_irq_stnsm(__and) \ |
| 25 | ({ \ |
| 26 | unsigned long __mask; \ |
| 27 | asm volatile( \ |
| 28 | " stnsm %0,%1" \ |
| 29 | : "=Q" (__mask) : "i" (__and) : "memory"); \ |
| 30 | __mask; \ |
| 31 | }) |
| 32 | |
| 33 | /* set system mask. */ |
| 34 | #define __raw_local_irq_ssm(__mask) \ |
| 35 | ({ \ |
| 36 | asm volatile("ssm %0" : : "Q" (__mask) : "memory"); \ |
| 37 | }) |
| 38 | |
| 39 | #else /* __GNUC__ */ |
| 40 | |
| 41 | /* store then or system mask. */ |
| 42 | #define __raw_local_irq_stosm(__or) \ |
| 43 | ({ \ |
| 44 | unsigned long __mask; \ |
| 45 | asm volatile( \ |
| 46 | " stosm 0(%1),%2" \ |
| 47 | : "=m" (__mask) \ |
| 48 | : "a" (&__mask), "i" (__or) : "memory"); \ |
| 49 | __mask; \ |
| 50 | }) |
| 51 | |
| 52 | /* store then and system mask. */ |
| 53 | #define __raw_local_irq_stnsm(__and) \ |
| 54 | ({ \ |
| 55 | unsigned long __mask; \ |
| 56 | asm volatile( \ |
| 57 | " stnsm 0(%1),%2" \ |
| 58 | : "=m" (__mask) \ |
| 59 | : "a" (&__mask), "i" (__and) : "memory"); \ |
| 60 | __mask; \ |
| 61 | }) |
| 62 | |
| 63 | /* set system mask. */ |
| 64 | #define __raw_local_irq_ssm(__mask) \ |
| 65 | ({ \ |
| 66 | asm volatile( \ |
| 67 | " ssm 0(%0)" \ |
| 68 | : : "a" (&__mask), "m" (__mask) : "memory"); \ |
| 69 | }) |
| 70 | |
| 71 | #endif /* __GNUC__ */ |
| 72 | |
| 73 | /* interrupt control.. */ |
| 74 | static inline unsigned long raw_local_irq_enable(void) |
| 75 | { |
| 76 | return __raw_local_irq_stosm(0x03); |
| 77 | } |
| 78 | |
| 79 | static inline unsigned long raw_local_irq_disable(void) |
| 80 | { |
| 81 | return __raw_local_irq_stnsm(0xfc); |
| 82 | } |
| 83 | |
| 84 | #define raw_local_save_flags(x) \ |
| 85 | do { \ |
| 86 | typecheck(unsigned long, x); \ |
| 87 | (x) = __raw_local_irq_stosm(0x00); \ |
| 88 | } while (0) |
| 89 | |
| 90 | static inline void raw_local_irq_restore(unsigned long flags) |
| 91 | { |
| 92 | __raw_local_irq_ssm(flags); |
| 93 | } |
| 94 | |
Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 95 | static inline int raw_irqs_disabled_flags(unsigned long flags) |
| 96 | { |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 97 | return !(flags & (3UL << (BITS_PER_LONG - 8))); |
Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 98 | } |
| 99 | |
| 100 | /* For spinlocks etc */ |
| 101 | #define raw_local_irq_save(x) ((x) = raw_local_irq_disable()) |
| 102 | |
Heiko Carstens | 1f194a4 | 2006-07-03 00:24:46 -0700 | [diff] [blame] | 103 | #endif /* __ASM_IRQFLAGS_H */ |