viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 1 | /* |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 2 | * SPEAr platform shared irq layer source file |
| 3 | * |
Viresh Kumar | df1590d | 2012-11-12 22:56:03 +0530 | [diff] [blame] | 4 | * Copyright (C) 2009-2012 ST Microelectronics |
Viresh Kumar | 10d8935 | 2012-06-20 12:53:02 -0700 | [diff] [blame] | 5 | * Viresh Kumar <viresh.linux@gmail.com> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 6 | * |
Viresh Kumar | df1590d | 2012-11-12 22:56:03 +0530 | [diff] [blame] | 7 | * Copyright (C) 2012 ST Microelectronics |
Viresh Kumar | 9cc2368 | 2014-04-18 15:07:16 -0700 | [diff] [blame] | 8 | * Shiraz Hashim <shiraz.linux.kernel@gmail.com> |
Viresh Kumar | df1590d | 2012-11-12 22:56:03 +0530 | [diff] [blame] | 9 | * |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 10 | * This file is licensed under the terms of the GNU General Public |
| 11 | * License version 2. This program is licensed "as is" without any |
| 12 | * warranty of any kind, whether express or implied. |
| 13 | */ |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 14 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 15 | |
| 16 | #include <linux/err.h> |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 17 | #include <linux/export.h> |
| 18 | #include <linux/interrupt.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 19 | #include <linux/io.h> |
| 20 | #include <linux/irq.h> |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 21 | #include <linux/irqdomain.h> |
| 22 | #include <linux/of.h> |
| 23 | #include <linux/of_address.h> |
| 24 | #include <linux/of_irq.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 25 | #include <linux/spinlock.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 26 | |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 27 | #include "irqchip.h" |
| 28 | |
Thomas Gleixner | 078bc00 | 2014-06-19 21:34:38 +0000 | [diff] [blame] | 29 | /* |
| 30 | * struct shirq_regs: shared irq register configuration |
| 31 | * |
| 32 | * enb_reg: enable register offset |
| 33 | * reset_to_enb: val 1 indicates, we need to clear bit for enabling interrupt |
| 34 | * status_reg: status register offset |
| 35 | * status_reg_mask: status register valid mask |
| 36 | * clear_reg: clear register offset |
| 37 | * reset_to_clear: val 1 indicates, we need to clear bit for clearing interrupt |
| 38 | */ |
| 39 | struct shirq_regs { |
| 40 | u32 enb_reg; |
| 41 | u32 reset_to_enb; |
| 42 | u32 status_reg; |
| 43 | u32 clear_reg; |
| 44 | u32 reset_to_clear; |
| 45 | }; |
| 46 | |
| 47 | /* |
| 48 | * struct spear_shirq: shared irq structure |
| 49 | * |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 50 | * base: Base register address |
| 51 | * regs: Register configuration for shared irq block |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 52 | * mask: Mask to apply to the status register |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 53 | * virq_base: Base virtual interrupt number |
| 54 | * nr_irqs: Number of interrupts handled by this block |
| 55 | * offset: Bit offset of the first interrupt |
| 56 | * disabled: Group is disabled, but accounted |
Thomas Gleixner | 078bc00 | 2014-06-19 21:34:38 +0000 | [diff] [blame] | 57 | */ |
| 58 | struct spear_shirq { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 59 | void __iomem *base; |
| 60 | struct shirq_regs regs; |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 61 | u32 mask; |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 62 | u32 virq_base; |
| 63 | u32 nr_irqs; |
| 64 | u32 offset; |
| 65 | bool disabled; |
Thomas Gleixner | 078bc00 | 2014-06-19 21:34:38 +0000 | [diff] [blame] | 66 | }; |
| 67 | |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 68 | static DEFINE_SPINLOCK(lock); |
| 69 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 70 | /* spear300 shared irq registers offsets and masks */ |
| 71 | #define SPEAR300_INT_ENB_MASK_REG 0x54 |
| 72 | #define SPEAR300_INT_STS_MASK_REG 0x58 |
| 73 | |
| 74 | static struct spear_shirq spear300_shirq_ras1 = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 75 | .offset = 0, |
| 76 | .nr_irqs = 9, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 77 | .mask = ((0x1 << 9) - 1) << 0, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 78 | .regs = { |
| 79 | .enb_reg = SPEAR300_INT_ENB_MASK_REG, |
| 80 | .status_reg = SPEAR300_INT_STS_MASK_REG, |
| 81 | .clear_reg = -1, |
| 82 | }, |
| 83 | }; |
| 84 | |
| 85 | static struct spear_shirq *spear300_shirq_blocks[] = { |
| 86 | &spear300_shirq_ras1, |
| 87 | }; |
| 88 | |
| 89 | /* spear310 shared irq registers offsets and masks */ |
| 90 | #define SPEAR310_INT_STS_MASK_REG 0x04 |
| 91 | |
| 92 | static struct spear_shirq spear310_shirq_ras1 = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 93 | .offset = 0, |
| 94 | .nr_irqs = 8, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 95 | .mask = ((0x1 << 8) - 1) << 0, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 96 | .regs = { |
| 97 | .enb_reg = -1, |
| 98 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 99 | .clear_reg = -1, |
| 100 | }, |
| 101 | }; |
| 102 | |
| 103 | static struct spear_shirq spear310_shirq_ras2 = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 104 | .offset = 8, |
| 105 | .nr_irqs = 5, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 106 | .mask = ((0x1 << 5) - 1) << 8, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 107 | .regs = { |
| 108 | .enb_reg = -1, |
| 109 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 110 | .clear_reg = -1, |
| 111 | }, |
| 112 | }; |
| 113 | |
| 114 | static struct spear_shirq spear310_shirq_ras3 = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 115 | .offset = 13, |
| 116 | .nr_irqs = 1, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 117 | .mask = ((0x1 << 1) - 1) << 13, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 118 | .regs = { |
| 119 | .enb_reg = -1, |
| 120 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 121 | .clear_reg = -1, |
| 122 | }, |
| 123 | }; |
| 124 | |
| 125 | static struct spear_shirq spear310_shirq_intrcomm_ras = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 126 | .offset = 14, |
| 127 | .nr_irqs = 3, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 128 | .mask = ((0x1 << 3) - 1) << 14, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 129 | .regs = { |
| 130 | .enb_reg = -1, |
| 131 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 132 | .clear_reg = -1, |
| 133 | }, |
| 134 | }; |
| 135 | |
| 136 | static struct spear_shirq *spear310_shirq_blocks[] = { |
| 137 | &spear310_shirq_ras1, |
| 138 | &spear310_shirq_ras2, |
| 139 | &spear310_shirq_ras3, |
| 140 | &spear310_shirq_intrcomm_ras, |
| 141 | }; |
| 142 | |
| 143 | /* spear320 shared irq registers offsets and masks */ |
| 144 | #define SPEAR320_INT_STS_MASK_REG 0x04 |
| 145 | #define SPEAR320_INT_CLR_MASK_REG 0x04 |
| 146 | #define SPEAR320_INT_ENB_MASK_REG 0x08 |
| 147 | |
Thomas Gleixner | 03319a1 | 2014-06-19 21:34:40 +0000 | [diff] [blame] | 148 | static struct spear_shirq spear320_shirq_ras3 = { |
| 149 | .offset = 0, |
| 150 | .nr_irqs = 7, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 151 | .mask = ((0x1 << 7) - 1) << 0, |
Thomas Gleixner | 03319a1 | 2014-06-19 21:34:40 +0000 | [diff] [blame] | 152 | .disabled = 1, |
| 153 | .regs = { |
| 154 | .enb_reg = SPEAR320_INT_ENB_MASK_REG, |
| 155 | .reset_to_enb = 1, |
| 156 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 157 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 158 | .reset_to_clear = 1, |
| 159 | }, |
| 160 | }; |
| 161 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 162 | static struct spear_shirq spear320_shirq_ras1 = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 163 | .offset = 7, |
| 164 | .nr_irqs = 3, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 165 | .mask = ((0x1 << 3) - 1) << 7, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 166 | .regs = { |
| 167 | .enb_reg = -1, |
| 168 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 169 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 170 | .reset_to_clear = 1, |
| 171 | }, |
| 172 | }; |
| 173 | |
| 174 | static struct spear_shirq spear320_shirq_ras2 = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 175 | .offset = 10, |
| 176 | .nr_irqs = 1, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 177 | .mask = ((0x1 << 1) - 1) << 10, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 178 | .regs = { |
| 179 | .enb_reg = -1, |
| 180 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 181 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 182 | .reset_to_clear = 1, |
| 183 | }, |
| 184 | }; |
| 185 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 186 | static struct spear_shirq spear320_shirq_intrcomm_ras = { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 187 | .offset = 11, |
| 188 | .nr_irqs = 11, |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 189 | .mask = ((0x1 << 11) - 1) << 11, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 190 | .regs = { |
| 191 | .enb_reg = -1, |
| 192 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 193 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 194 | .reset_to_clear = 1, |
| 195 | }, |
| 196 | }; |
| 197 | |
| 198 | static struct spear_shirq *spear320_shirq_blocks[] = { |
| 199 | &spear320_shirq_ras3, |
| 200 | &spear320_shirq_ras1, |
| 201 | &spear320_shirq_ras2, |
| 202 | &spear320_shirq_intrcomm_ras, |
| 203 | }; |
| 204 | |
| 205 | static void shirq_irq_mask_unmask(struct irq_data *d, bool mask) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 206 | { |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 207 | struct spear_shirq *shirq = irq_data_get_irq_chip_data(d); |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 208 | u32 val, offset = d->irq - shirq->virq_base; |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 209 | unsigned long flags; |
| 210 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 211 | if (shirq->regs.enb_reg == -1) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 212 | return; |
| 213 | |
| 214 | spin_lock_irqsave(&lock, flags); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 215 | val = readl(shirq->base + shirq->regs.enb_reg); |
| 216 | |
| 217 | if (mask ^ shirq->regs.reset_to_enb) |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 218 | val &= ~(0x1 << shirq->offset << offset); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 219 | else |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 220 | val |= 0x1 << shirq->offset << offset; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 221 | |
| 222 | writel(val, shirq->base + shirq->regs.enb_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 223 | spin_unlock_irqrestore(&lock, flags); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 224 | |
| 225 | } |
| 226 | |
| 227 | static void shirq_irq_mask(struct irq_data *d) |
| 228 | { |
| 229 | shirq_irq_mask_unmask(d, 1); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 230 | } |
| 231 | |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 232 | static void shirq_irq_unmask(struct irq_data *d) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 233 | { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 234 | shirq_irq_mask_unmask(d, 0); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 235 | } |
| 236 | |
| 237 | static struct irq_chip shirq_chip = { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 238 | .name = "spear-shirq", |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 239 | .irq_ack = shirq_irq_mask, |
| 240 | .irq_mask = shirq_irq_mask, |
| 241 | .irq_unmask = shirq_irq_unmask, |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 242 | }; |
| 243 | |
| 244 | static void shirq_handler(unsigned irq, struct irq_desc *desc) |
| 245 | { |
Thomas Gleixner | 6845664a | 2011-03-24 13:25:22 +0100 | [diff] [blame] | 246 | struct spear_shirq *shirq = irq_get_handler_data(irq); |
Thomas Gleixner | e3c871a | 2014-06-19 21:34:40 +0000 | [diff] [blame] | 247 | struct irq_data *idata = irq_desc_get_irq_data(desc); |
| 248 | struct irq_chip *chip = irq_data_get_irq_chip(idata); |
| 249 | u32 i, j, val, mask, tmp; |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 250 | |
Thomas Gleixner | e3c871a | 2014-06-19 21:34:40 +0000 | [diff] [blame] | 251 | chip->irq_ack(idata); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 252 | |
Thomas Gleixner | 4ecc832 | 2014-06-19 21:34:41 +0000 | [diff] [blame^] | 253 | mask = shirq->mask; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 254 | while ((val = readl(shirq->base + shirq->regs.status_reg) & |
| 255 | mask)) { |
| 256 | |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 257 | val >>= shirq->offset; |
| 258 | for (i = 0, j = 1; i < shirq->nr_irqs; i++, j <<= 1) { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 259 | |
| 260 | if (!(j & val)) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 261 | continue; |
| 262 | |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 263 | generic_handle_irq(shirq->virq_base + i); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 264 | |
| 265 | /* clear interrupt */ |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 266 | if (shirq->regs.clear_reg == -1) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 267 | continue; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 268 | |
| 269 | tmp = readl(shirq->base + shirq->regs.clear_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 270 | if (shirq->regs.reset_to_clear) |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 271 | tmp &= ~(j << shirq->offset); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 272 | else |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 273 | tmp |= (j << shirq->offset); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 274 | writel(tmp, shirq->base + shirq->regs.clear_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 275 | } |
| 276 | } |
Thomas Gleixner | e3c871a | 2014-06-19 21:34:40 +0000 | [diff] [blame] | 277 | chip->irq_unmask(idata); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 278 | } |
| 279 | |
Thomas Gleixner | f37ecbc | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 280 | static void __init spear_shirq_register(struct spear_shirq *shirq, |
| 281 | int parent_irq) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 282 | { |
| 283 | int i; |
| 284 | |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 285 | if (shirq->disabled) |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 286 | return; |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 287 | |
Thomas Gleixner | f37ecbc | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 288 | irq_set_chained_handler(parent_irq, shirq_handler); |
| 289 | irq_set_handler_data(parent_irq, shirq); |
| 290 | |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 291 | for (i = 0; i < shirq->nr_irqs; i++) { |
| 292 | irq_set_chip_and_handler(shirq->virq_base + i, |
Thomas Gleixner | f38c02f | 2011-03-24 13:35:09 +0100 | [diff] [blame] | 293 | &shirq_chip, handle_simple_irq); |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 294 | set_irq_flags(shirq->virq_base + i, IRQF_VALID); |
| 295 | irq_set_chip_data(shirq->virq_base + i, shirq); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 296 | } |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 297 | } |
| 298 | |
| 299 | static int __init shirq_init(struct spear_shirq **shirq_blocks, int block_nr, |
| 300 | struct device_node *np) |
| 301 | { |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 302 | int i, parent_irq, virq_base, hwirq = 0, nr_irqs = 0; |
Thomas Gleixner | a26c06f | 2014-06-19 21:34:37 +0000 | [diff] [blame] | 303 | struct irq_domain *shirq_domain; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 304 | void __iomem *base; |
| 305 | |
| 306 | base = of_iomap(np, 0); |
| 307 | if (!base) { |
| 308 | pr_err("%s: failed to map shirq registers\n", __func__); |
| 309 | return -ENXIO; |
| 310 | } |
| 311 | |
| 312 | for (i = 0; i < block_nr; i++) |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 313 | nr_irqs += shirq_blocks[i]->nr_irqs; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 314 | |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 315 | virq_base = irq_alloc_descs(-1, 0, nr_irqs, 0); |
| 316 | if (IS_ERR_VALUE(virq_base)) { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 317 | pr_err("%s: irq desc alloc failed\n", __func__); |
| 318 | goto err_unmap; |
| 319 | } |
| 320 | |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 321 | shirq_domain = irq_domain_add_legacy(np, nr_irqs, virq_base, 0, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 322 | &irq_domain_simple_ops, NULL); |
| 323 | if (WARN_ON(!shirq_domain)) { |
| 324 | pr_warn("%s: irq domain init failed\n", __func__); |
| 325 | goto err_free_desc; |
| 326 | } |
| 327 | |
| 328 | for (i = 0; i < block_nr; i++) { |
| 329 | shirq_blocks[i]->base = base; |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 330 | shirq_blocks[i]->virq_base = irq_find_mapping(shirq_domain, |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 331 | hwirq); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 332 | |
Thomas Gleixner | f37ecbc | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 333 | parent_irq = irq_of_parse_and_map(np, i); |
| 334 | spear_shirq_register(shirq_blocks[i], parent_irq); |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 335 | hwirq += shirq_blocks[i]->nr_irqs; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 336 | } |
| 337 | |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 338 | return 0; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 339 | |
| 340 | err_free_desc: |
Thomas Gleixner | c5d1d85 | 2014-06-19 21:34:39 +0000 | [diff] [blame] | 341 | irq_free_descs(virq_base, nr_irqs); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 342 | err_unmap: |
| 343 | iounmap(base); |
| 344 | return -ENXIO; |
| 345 | } |
| 346 | |
Thomas Gleixner | 078bc00 | 2014-06-19 21:34:38 +0000 | [diff] [blame] | 347 | static int __init spear300_shirq_of_init(struct device_node *np, |
| 348 | struct device_node *parent) |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 349 | { |
| 350 | return shirq_init(spear300_shirq_blocks, |
| 351 | ARRAY_SIZE(spear300_shirq_blocks), np); |
| 352 | } |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 353 | IRQCHIP_DECLARE(spear300_shirq, "st,spear300-shirq", spear300_shirq_of_init); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 354 | |
Thomas Gleixner | 078bc00 | 2014-06-19 21:34:38 +0000 | [diff] [blame] | 355 | static int __init spear310_shirq_of_init(struct device_node *np, |
| 356 | struct device_node *parent) |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 357 | { |
| 358 | return shirq_init(spear310_shirq_blocks, |
| 359 | ARRAY_SIZE(spear310_shirq_blocks), np); |
| 360 | } |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 361 | IRQCHIP_DECLARE(spear310_shirq, "st,spear310-shirq", spear310_shirq_of_init); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 362 | |
Thomas Gleixner | 078bc00 | 2014-06-19 21:34:38 +0000 | [diff] [blame] | 363 | static int __init spear320_shirq_of_init(struct device_node *np, |
| 364 | struct device_node *parent) |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 365 | { |
| 366 | return shirq_init(spear320_shirq_blocks, |
| 367 | ARRAY_SIZE(spear320_shirq_blocks), np); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 368 | } |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 369 | IRQCHIP_DECLARE(spear320_shirq, "st,spear320-shirq", spear320_shirq_of_init); |