viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 1 | /* |
| 2 | * arch/arm/plat-spear/shirq.c |
| 3 | * |
| 4 | * SPEAr platform shared irq layer source file |
| 5 | * |
| 6 | * Copyright (C) 2009 ST Microelectronics |
Viresh Kumar | 10d8935 | 2012-06-20 12:53:02 -0700 | [diff] [blame] | 7 | * Viresh Kumar <viresh.linux@gmail.com> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 8 | * |
| 9 | * This file is licensed under the terms of the GNU General Public |
| 10 | * License version 2. This program is licensed "as is" without any |
| 11 | * warranty of any kind, whether express or implied. |
| 12 | */ |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 13 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 14 | |
| 15 | #include <linux/err.h> |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 16 | #include <linux/export.h> |
| 17 | #include <linux/interrupt.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 18 | #include <linux/io.h> |
| 19 | #include <linux/irq.h> |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 20 | #include <linux/irqdomain.h> |
| 21 | #include <linux/of.h> |
| 22 | #include <linux/of_address.h> |
| 23 | #include <linux/of_irq.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 24 | #include <linux/spinlock.h> |
| 25 | #include <plat/shirq.h> |
| 26 | |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 27 | static DEFINE_SPINLOCK(lock); |
| 28 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 29 | /* spear300 shared irq registers offsets and masks */ |
| 30 | #define SPEAR300_INT_ENB_MASK_REG 0x54 |
| 31 | #define SPEAR300_INT_STS_MASK_REG 0x58 |
| 32 | |
| 33 | static struct spear_shirq spear300_shirq_ras1 = { |
| 34 | .irq_nr = 9, |
| 35 | .irq_bit_off = 0, |
| 36 | .regs = { |
| 37 | .enb_reg = SPEAR300_INT_ENB_MASK_REG, |
| 38 | .status_reg = SPEAR300_INT_STS_MASK_REG, |
| 39 | .clear_reg = -1, |
| 40 | }, |
| 41 | }; |
| 42 | |
| 43 | static struct spear_shirq *spear300_shirq_blocks[] = { |
| 44 | &spear300_shirq_ras1, |
| 45 | }; |
| 46 | |
| 47 | /* spear310 shared irq registers offsets and masks */ |
| 48 | #define SPEAR310_INT_STS_MASK_REG 0x04 |
| 49 | |
| 50 | static struct spear_shirq spear310_shirq_ras1 = { |
| 51 | .irq_nr = 8, |
| 52 | .irq_bit_off = 0, |
| 53 | .regs = { |
| 54 | .enb_reg = -1, |
| 55 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 56 | .clear_reg = -1, |
| 57 | }, |
| 58 | }; |
| 59 | |
| 60 | static struct spear_shirq spear310_shirq_ras2 = { |
| 61 | .irq_nr = 5, |
| 62 | .irq_bit_off = 8, |
| 63 | .regs = { |
| 64 | .enb_reg = -1, |
| 65 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 66 | .clear_reg = -1, |
| 67 | }, |
| 68 | }; |
| 69 | |
| 70 | static struct spear_shirq spear310_shirq_ras3 = { |
| 71 | .irq_nr = 1, |
| 72 | .irq_bit_off = 13, |
| 73 | .regs = { |
| 74 | .enb_reg = -1, |
| 75 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 76 | .clear_reg = -1, |
| 77 | }, |
| 78 | }; |
| 79 | |
| 80 | static struct spear_shirq spear310_shirq_intrcomm_ras = { |
| 81 | .irq_nr = 3, |
| 82 | .irq_bit_off = 14, |
| 83 | .regs = { |
| 84 | .enb_reg = -1, |
| 85 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 86 | .clear_reg = -1, |
| 87 | }, |
| 88 | }; |
| 89 | |
| 90 | static struct spear_shirq *spear310_shirq_blocks[] = { |
| 91 | &spear310_shirq_ras1, |
| 92 | &spear310_shirq_ras2, |
| 93 | &spear310_shirq_ras3, |
| 94 | &spear310_shirq_intrcomm_ras, |
| 95 | }; |
| 96 | |
| 97 | /* spear320 shared irq registers offsets and masks */ |
| 98 | #define SPEAR320_INT_STS_MASK_REG 0x04 |
| 99 | #define SPEAR320_INT_CLR_MASK_REG 0x04 |
| 100 | #define SPEAR320_INT_ENB_MASK_REG 0x08 |
| 101 | |
| 102 | static struct spear_shirq spear320_shirq_ras1 = { |
| 103 | .irq_nr = 3, |
| 104 | .irq_bit_off = 7, |
| 105 | .regs = { |
| 106 | .enb_reg = -1, |
| 107 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 108 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 109 | .reset_to_clear = 1, |
| 110 | }, |
| 111 | }; |
| 112 | |
| 113 | static struct spear_shirq spear320_shirq_ras2 = { |
| 114 | .irq_nr = 1, |
| 115 | .irq_bit_off = 10, |
| 116 | .regs = { |
| 117 | .enb_reg = -1, |
| 118 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 119 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 120 | .reset_to_clear = 1, |
| 121 | }, |
| 122 | }; |
| 123 | |
| 124 | static struct spear_shirq spear320_shirq_ras3 = { |
| 125 | .irq_nr = 3, |
| 126 | .irq_bit_off = 0, |
| 127 | .invalid_irq = 1, |
| 128 | .regs = { |
| 129 | .enb_reg = SPEAR320_INT_ENB_MASK_REG, |
| 130 | .reset_to_enb = 1, |
| 131 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 132 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 133 | .reset_to_clear = 1, |
| 134 | }, |
| 135 | }; |
| 136 | |
| 137 | static struct spear_shirq spear320_shirq_intrcomm_ras = { |
| 138 | .irq_nr = 11, |
| 139 | .irq_bit_off = 11, |
| 140 | .regs = { |
| 141 | .enb_reg = -1, |
| 142 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 143 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 144 | .reset_to_clear = 1, |
| 145 | }, |
| 146 | }; |
| 147 | |
| 148 | static struct spear_shirq *spear320_shirq_blocks[] = { |
| 149 | &spear320_shirq_ras3, |
| 150 | &spear320_shirq_ras1, |
| 151 | &spear320_shirq_ras2, |
| 152 | &spear320_shirq_intrcomm_ras, |
| 153 | }; |
| 154 | |
| 155 | static void shirq_irq_mask_unmask(struct irq_data *d, bool mask) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 156 | { |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 157 | struct spear_shirq *shirq = irq_data_get_irq_chip_data(d); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 158 | u32 val, offset = d->irq - shirq->irq_base; |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 159 | unsigned long flags; |
| 160 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 161 | if (shirq->regs.enb_reg == -1) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 162 | return; |
| 163 | |
| 164 | spin_lock_irqsave(&lock, flags); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 165 | val = readl(shirq->base + shirq->regs.enb_reg); |
| 166 | |
| 167 | if (mask ^ shirq->regs.reset_to_enb) |
| 168 | val &= ~(0x1 << shirq->irq_bit_off << offset); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 169 | else |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 170 | val |= 0x1 << shirq->irq_bit_off << offset; |
| 171 | |
| 172 | writel(val, shirq->base + shirq->regs.enb_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 173 | spin_unlock_irqrestore(&lock, flags); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 174 | |
| 175 | } |
| 176 | |
| 177 | static void shirq_irq_mask(struct irq_data *d) |
| 178 | { |
| 179 | shirq_irq_mask_unmask(d, 1); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 180 | } |
| 181 | |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 182 | static void shirq_irq_unmask(struct irq_data *d) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 183 | { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 184 | shirq_irq_mask_unmask(d, 0); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 185 | } |
| 186 | |
| 187 | static struct irq_chip shirq_chip = { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 188 | .name = "spear-shirq", |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 189 | .irq_ack = shirq_irq_mask, |
| 190 | .irq_mask = shirq_irq_mask, |
| 191 | .irq_unmask = shirq_irq_unmask, |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 192 | }; |
| 193 | |
| 194 | static void shirq_handler(unsigned irq, struct irq_desc *desc) |
| 195 | { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 196 | u32 i, j, val, mask, tmp; |
| 197 | struct irq_chip *chip; |
Thomas Gleixner | 6845664a | 2011-03-24 13:25:22 +0100 | [diff] [blame] | 198 | struct spear_shirq *shirq = irq_get_handler_data(irq); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 199 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 200 | chip = irq_get_chip(irq); |
| 201 | chip->irq_ack(&desc->irq_data); |
| 202 | |
| 203 | mask = ((0x1 << shirq->irq_nr) - 1) << shirq->irq_bit_off; |
| 204 | while ((val = readl(shirq->base + shirq->regs.status_reg) & |
| 205 | mask)) { |
| 206 | |
| 207 | val >>= shirq->irq_bit_off; |
| 208 | for (i = 0, j = 1; i < shirq->irq_nr; i++, j <<= 1) { |
| 209 | |
| 210 | if (!(j & val)) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 211 | continue; |
| 212 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 213 | generic_handle_irq(shirq->irq_base + i); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 214 | |
| 215 | /* clear interrupt */ |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 216 | if (shirq->regs.clear_reg == -1) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 217 | continue; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 218 | |
| 219 | tmp = readl(shirq->base + shirq->regs.clear_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 220 | if (shirq->regs.reset_to_clear) |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 221 | tmp &= ~(j << shirq->irq_bit_off); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 222 | else |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 223 | tmp |= (j << shirq->irq_bit_off); |
| 224 | writel(tmp, shirq->base + shirq->regs.clear_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 225 | } |
| 226 | } |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 227 | chip->irq_unmask(&desc->irq_data); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 228 | } |
| 229 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 230 | static void __init spear_shirq_register(struct spear_shirq *shirq) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 231 | { |
| 232 | int i; |
| 233 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 234 | if (shirq->invalid_irq) |
| 235 | return; |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 236 | |
Thomas Gleixner | 6845664a | 2011-03-24 13:25:22 +0100 | [diff] [blame] | 237 | irq_set_chained_handler(shirq->irq, shirq_handler); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 238 | for (i = 0; i < shirq->irq_nr; i++) { |
| 239 | irq_set_chip_and_handler(shirq->irq_base + i, |
Thomas Gleixner | f38c02f | 2011-03-24 13:35:09 +0100 | [diff] [blame] | 240 | &shirq_chip, handle_simple_irq); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 241 | set_irq_flags(shirq->irq_base + i, IRQF_VALID); |
| 242 | irq_set_chip_data(shirq->irq_base + i, shirq); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 243 | } |
| 244 | |
Thomas Gleixner | 6845664a | 2011-03-24 13:25:22 +0100 | [diff] [blame] | 245 | irq_set_handler_data(shirq->irq, shirq); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 246 | } |
| 247 | |
| 248 | static int __init shirq_init(struct spear_shirq **shirq_blocks, int block_nr, |
| 249 | struct device_node *np) |
| 250 | { |
| 251 | int i, irq_base, hwirq = 0, irq_nr = 0; |
| 252 | static struct irq_domain *shirq_domain; |
| 253 | void __iomem *base; |
| 254 | |
| 255 | base = of_iomap(np, 0); |
| 256 | if (!base) { |
| 257 | pr_err("%s: failed to map shirq registers\n", __func__); |
| 258 | return -ENXIO; |
| 259 | } |
| 260 | |
| 261 | for (i = 0; i < block_nr; i++) |
| 262 | irq_nr += shirq_blocks[i]->irq_nr; |
| 263 | |
| 264 | irq_base = irq_alloc_descs(-1, 0, irq_nr, 0); |
| 265 | if (IS_ERR_VALUE(irq_base)) { |
| 266 | pr_err("%s: irq desc alloc failed\n", __func__); |
| 267 | goto err_unmap; |
| 268 | } |
| 269 | |
| 270 | shirq_domain = irq_domain_add_legacy(np, irq_nr, irq_base, 0, |
| 271 | &irq_domain_simple_ops, NULL); |
| 272 | if (WARN_ON(!shirq_domain)) { |
| 273 | pr_warn("%s: irq domain init failed\n", __func__); |
| 274 | goto err_free_desc; |
| 275 | } |
| 276 | |
| 277 | for (i = 0; i < block_nr; i++) { |
| 278 | shirq_blocks[i]->base = base; |
| 279 | shirq_blocks[i]->irq_base = irq_find_mapping(shirq_domain, |
| 280 | hwirq); |
| 281 | shirq_blocks[i]->irq = irq_of_parse_and_map(np, i); |
| 282 | |
| 283 | spear_shirq_register(shirq_blocks[i]); |
| 284 | hwirq += shirq_blocks[i]->irq_nr; |
| 285 | } |
| 286 | |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 287 | return 0; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame^] | 288 | |
| 289 | err_free_desc: |
| 290 | irq_free_descs(irq_base, irq_nr); |
| 291 | err_unmap: |
| 292 | iounmap(base); |
| 293 | return -ENXIO; |
| 294 | } |
| 295 | |
| 296 | int __init spear300_shirq_of_init(struct device_node *np, |
| 297 | struct device_node *parent) |
| 298 | { |
| 299 | return shirq_init(spear300_shirq_blocks, |
| 300 | ARRAY_SIZE(spear300_shirq_blocks), np); |
| 301 | } |
| 302 | |
| 303 | int __init spear310_shirq_of_init(struct device_node *np, |
| 304 | struct device_node *parent) |
| 305 | { |
| 306 | return shirq_init(spear310_shirq_blocks, |
| 307 | ARRAY_SIZE(spear310_shirq_blocks), np); |
| 308 | } |
| 309 | |
| 310 | int __init spear320_shirq_of_init(struct device_node *np, |
| 311 | struct device_node *parent) |
| 312 | { |
| 313 | return shirq_init(spear320_shirq_blocks, |
| 314 | ARRAY_SIZE(spear320_shirq_blocks), np); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 315 | } |