Thomas Gleixner | dd3a1db | 2008-10-16 18:20:58 +0200 | [diff] [blame] | 1 | #ifndef _LINUX_IRQNR_H |
| 2 | #define _LINUX_IRQNR_H |
| 3 | |
Ingo Molnar | 0ebb26e | 2008-12-12 11:26:39 +0100 | [diff] [blame] | 4 | /* |
| 5 | * Generic irq_desc iterators: |
| 6 | */ |
| 7 | #ifdef __KERNEL__ |
| 8 | |
Thomas Gleixner | dd3a1db | 2008-10-16 18:20:58 +0200 | [diff] [blame] | 9 | #ifndef CONFIG_GENERIC_HARDIRQS |
| 10 | #include <asm/irq.h> |
Ingo Molnar | be92d7a | 2009-01-05 14:34:42 +0100 | [diff] [blame] | 11 | |
| 12 | /* |
| 13 | * Wrappers for non-genirq architectures: |
| 14 | */ |
| 15 | #define nr_irqs NR_IRQS |
| 16 | #define irq_to_desc(irq) (&irq_desc[irq]) |
Thomas Gleixner | dd3a1db | 2008-10-16 18:20:58 +0200 | [diff] [blame] | 17 | |
| 18 | # define for_each_irq_desc(irq, desc) \ |
| 19 | for (irq = 0; irq < nr_irqs; irq++) |
Thomas Gleixner | dd3a1db | 2008-10-16 18:20:58 +0200 | [diff] [blame] | 20 | |
Yinghai Lu | 240d367 | 2008-12-08 14:06:17 -0800 | [diff] [blame] | 21 | # define for_each_irq_desc_reverse(irq, desc) \ |
| 22 | for (irq = nr_irqs - 1; irq >= 0; irq--) |
Mike Travis | 9332fcc | 2009-01-10 22:24:07 -0800 | [diff] [blame] | 23 | |
KOSAKI Motohiro | f9af0e7 | 2008-12-26 12:24:24 +0900 | [diff] [blame] | 24 | #else /* CONFIG_GENERIC_HARDIRQS */ |
Ingo Molnar | 0ebb26e | 2008-12-12 11:26:39 +0100 | [diff] [blame] | 25 | |
| 26 | extern int nr_irqs; |
KOSAKI Motohiro | f9af0e7 | 2008-12-26 12:24:24 +0900 | [diff] [blame] | 27 | extern struct irq_desc *irq_to_desc(unsigned int irq); |
Thomas Gleixner | a98d24b | 2010-09-30 10:45:07 +0200 | [diff] [blame^] | 28 | unsigned int irq_get_next_irq(unsigned int offset); |
Ingo Molnar | 0ebb26e | 2008-12-12 11:26:39 +0100 | [diff] [blame] | 29 | |
KOSAKI Motohiro | f9af0e7 | 2008-12-26 12:24:24 +0900 | [diff] [blame] | 30 | # define for_each_irq_desc(irq, desc) \ |
| 31 | for (irq = 0, desc = irq_to_desc(irq); irq < nr_irqs; \ |
KOSAKI Motohiro | 18eefed | 2008-12-26 12:29:48 +0900 | [diff] [blame] | 32 | irq++, desc = irq_to_desc(irq)) \ |
KOSAKI Motohiro | 01d0782 | 2009-01-04 03:11:05 +0900 | [diff] [blame] | 33 | if (!desc) \ |
| 34 | ; \ |
| 35 | else |
KOSAKI Motohiro | 18eefed | 2008-12-26 12:29:48 +0900 | [diff] [blame] | 36 | |
| 37 | |
KOSAKI Motohiro | f9af0e7 | 2008-12-26 12:24:24 +0900 | [diff] [blame] | 38 | # define for_each_irq_desc_reverse(irq, desc) \ |
| 39 | for (irq = nr_irqs - 1, desc = irq_to_desc(irq); irq >= 0; \ |
KOSAKI Motohiro | 18eefed | 2008-12-26 12:29:48 +0900 | [diff] [blame] | 40 | irq--, desc = irq_to_desc(irq)) \ |
KOSAKI Motohiro | 01d0782 | 2009-01-04 03:11:05 +0900 | [diff] [blame] | 41 | if (!desc) \ |
| 42 | ; \ |
| 43 | else |
Yinghai Lu | 240d367 | 2008-12-08 14:06:17 -0800 | [diff] [blame] | 44 | |
Yinghai Lu | 5bfb5b5 | 2009-08-26 16:20:48 -0700 | [diff] [blame] | 45 | #ifdef CONFIG_SMP |
| 46 | #define irq_node(irq) (irq_to_desc(irq)->node) |
| 47 | #else |
| 48 | #define irq_node(irq) 0 |
| 49 | #endif |
| 50 | |
Thomas Gleixner | a98d24b | 2010-09-30 10:45:07 +0200 | [diff] [blame^] | 51 | # define for_each_active_irq(irq) \ |
| 52 | for (irq = irq_get_next_irq(0); irq < nr_irqs; \ |
| 53 | irq = irq_get_next_irq(irq + 1)) |
| 54 | |
KOSAKI Motohiro | f9af0e7 | 2008-12-26 12:24:24 +0900 | [diff] [blame] | 55 | #endif /* CONFIG_GENERIC_HARDIRQS */ |
Yinghai Lu | 240d367 | 2008-12-08 14:06:17 -0800 | [diff] [blame] | 56 | |
| 57 | #define for_each_irq_nr(irq) \ |
| 58 | for (irq = 0; irq < nr_irqs; irq++) |
Thomas Gleixner | dd3a1db | 2008-10-16 18:20:58 +0200 | [diff] [blame] | 59 | |
Ingo Molnar | 0ebb26e | 2008-12-12 11:26:39 +0100 | [diff] [blame] | 60 | #endif /* __KERNEL__ */ |
| 61 | |
Thomas Gleixner | dd3a1db | 2008-10-16 18:20:58 +0200 | [diff] [blame] | 62 | #endif |