viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 1 | /* |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 2 | * SPEAr platform shared irq layer source file |
| 3 | * |
Viresh Kumar | df1590d | 2012-11-12 22:56:03 +0530 | [diff] [blame] | 4 | * Copyright (C) 2009-2012 ST Microelectronics |
Viresh Kumar | 10d8935 | 2012-06-20 12:53:02 -0700 | [diff] [blame] | 5 | * Viresh Kumar <viresh.linux@gmail.com> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 6 | * |
Viresh Kumar | df1590d | 2012-11-12 22:56:03 +0530 | [diff] [blame] | 7 | * Copyright (C) 2012 ST Microelectronics |
| 8 | * Shiraz Hashim <shiraz.hashim@st.com> |
| 9 | * |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 10 | * This file is licensed under the terms of the GNU General Public |
| 11 | * License version 2. This program is licensed "as is" without any |
| 12 | * warranty of any kind, whether express or implied. |
| 13 | */ |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 14 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 15 | |
| 16 | #include <linux/err.h> |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 17 | #include <linux/export.h> |
| 18 | #include <linux/interrupt.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 19 | #include <linux/io.h> |
| 20 | #include <linux/irq.h> |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 21 | #include <linux/irqdomain.h> |
Viresh Kumar | df1590d | 2012-11-12 22:56:03 +0530 | [diff] [blame] | 22 | #include <linux/irqchip/spear-shirq.h> |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 23 | #include <linux/of.h> |
| 24 | #include <linux/of_address.h> |
| 25 | #include <linux/of_irq.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 26 | #include <linux/spinlock.h> |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 27 | |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 28 | #include "irqchip.h" |
| 29 | |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 30 | static DEFINE_SPINLOCK(lock); |
| 31 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 32 | /* spear300 shared irq registers offsets and masks */ |
| 33 | #define SPEAR300_INT_ENB_MASK_REG 0x54 |
| 34 | #define SPEAR300_INT_STS_MASK_REG 0x58 |
| 35 | |
| 36 | static struct spear_shirq spear300_shirq_ras1 = { |
| 37 | .irq_nr = 9, |
| 38 | .irq_bit_off = 0, |
| 39 | .regs = { |
| 40 | .enb_reg = SPEAR300_INT_ENB_MASK_REG, |
| 41 | .status_reg = SPEAR300_INT_STS_MASK_REG, |
| 42 | .clear_reg = -1, |
| 43 | }, |
| 44 | }; |
| 45 | |
| 46 | static struct spear_shirq *spear300_shirq_blocks[] = { |
| 47 | &spear300_shirq_ras1, |
| 48 | }; |
| 49 | |
| 50 | /* spear310 shared irq registers offsets and masks */ |
| 51 | #define SPEAR310_INT_STS_MASK_REG 0x04 |
| 52 | |
| 53 | static struct spear_shirq spear310_shirq_ras1 = { |
| 54 | .irq_nr = 8, |
| 55 | .irq_bit_off = 0, |
| 56 | .regs = { |
| 57 | .enb_reg = -1, |
| 58 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 59 | .clear_reg = -1, |
| 60 | }, |
| 61 | }; |
| 62 | |
| 63 | static struct spear_shirq spear310_shirq_ras2 = { |
| 64 | .irq_nr = 5, |
| 65 | .irq_bit_off = 8, |
| 66 | .regs = { |
| 67 | .enb_reg = -1, |
| 68 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 69 | .clear_reg = -1, |
| 70 | }, |
| 71 | }; |
| 72 | |
| 73 | static struct spear_shirq spear310_shirq_ras3 = { |
| 74 | .irq_nr = 1, |
| 75 | .irq_bit_off = 13, |
| 76 | .regs = { |
| 77 | .enb_reg = -1, |
| 78 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 79 | .clear_reg = -1, |
| 80 | }, |
| 81 | }; |
| 82 | |
| 83 | static struct spear_shirq spear310_shirq_intrcomm_ras = { |
| 84 | .irq_nr = 3, |
| 85 | .irq_bit_off = 14, |
| 86 | .regs = { |
| 87 | .enb_reg = -1, |
| 88 | .status_reg = SPEAR310_INT_STS_MASK_REG, |
| 89 | .clear_reg = -1, |
| 90 | }, |
| 91 | }; |
| 92 | |
| 93 | static struct spear_shirq *spear310_shirq_blocks[] = { |
| 94 | &spear310_shirq_ras1, |
| 95 | &spear310_shirq_ras2, |
| 96 | &spear310_shirq_ras3, |
| 97 | &spear310_shirq_intrcomm_ras, |
| 98 | }; |
| 99 | |
| 100 | /* spear320 shared irq registers offsets and masks */ |
| 101 | #define SPEAR320_INT_STS_MASK_REG 0x04 |
| 102 | #define SPEAR320_INT_CLR_MASK_REG 0x04 |
| 103 | #define SPEAR320_INT_ENB_MASK_REG 0x08 |
| 104 | |
| 105 | static struct spear_shirq spear320_shirq_ras1 = { |
| 106 | .irq_nr = 3, |
| 107 | .irq_bit_off = 7, |
| 108 | .regs = { |
| 109 | .enb_reg = -1, |
| 110 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 111 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 112 | .reset_to_clear = 1, |
| 113 | }, |
| 114 | }; |
| 115 | |
| 116 | static struct spear_shirq spear320_shirq_ras2 = { |
| 117 | .irq_nr = 1, |
| 118 | .irq_bit_off = 10, |
| 119 | .regs = { |
| 120 | .enb_reg = -1, |
| 121 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 122 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 123 | .reset_to_clear = 1, |
| 124 | }, |
| 125 | }; |
| 126 | |
| 127 | static struct spear_shirq spear320_shirq_ras3 = { |
| 128 | .irq_nr = 3, |
| 129 | .irq_bit_off = 0, |
| 130 | .invalid_irq = 1, |
| 131 | .regs = { |
| 132 | .enb_reg = SPEAR320_INT_ENB_MASK_REG, |
| 133 | .reset_to_enb = 1, |
| 134 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 135 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 136 | .reset_to_clear = 1, |
| 137 | }, |
| 138 | }; |
| 139 | |
| 140 | static struct spear_shirq spear320_shirq_intrcomm_ras = { |
| 141 | .irq_nr = 11, |
| 142 | .irq_bit_off = 11, |
| 143 | .regs = { |
| 144 | .enb_reg = -1, |
| 145 | .status_reg = SPEAR320_INT_STS_MASK_REG, |
| 146 | .clear_reg = SPEAR320_INT_CLR_MASK_REG, |
| 147 | .reset_to_clear = 1, |
| 148 | }, |
| 149 | }; |
| 150 | |
| 151 | static struct spear_shirq *spear320_shirq_blocks[] = { |
| 152 | &spear320_shirq_ras3, |
| 153 | &spear320_shirq_ras1, |
| 154 | &spear320_shirq_ras2, |
| 155 | &spear320_shirq_intrcomm_ras, |
| 156 | }; |
| 157 | |
| 158 | static void shirq_irq_mask_unmask(struct irq_data *d, bool mask) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 159 | { |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 160 | struct spear_shirq *shirq = irq_data_get_irq_chip_data(d); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 161 | u32 val, offset = d->irq - shirq->irq_base; |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 162 | unsigned long flags; |
| 163 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 164 | if (shirq->regs.enb_reg == -1) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 165 | return; |
| 166 | |
| 167 | spin_lock_irqsave(&lock, flags); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 168 | val = readl(shirq->base + shirq->regs.enb_reg); |
| 169 | |
| 170 | if (mask ^ shirq->regs.reset_to_enb) |
| 171 | val &= ~(0x1 << shirq->irq_bit_off << offset); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 172 | else |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 173 | val |= 0x1 << shirq->irq_bit_off << offset; |
| 174 | |
| 175 | writel(val, shirq->base + shirq->regs.enb_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 176 | spin_unlock_irqrestore(&lock, flags); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 177 | |
| 178 | } |
| 179 | |
| 180 | static void shirq_irq_mask(struct irq_data *d) |
| 181 | { |
| 182 | shirq_irq_mask_unmask(d, 1); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 183 | } |
| 184 | |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 185 | static void shirq_irq_unmask(struct irq_data *d) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 186 | { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 187 | shirq_irq_mask_unmask(d, 0); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 188 | } |
| 189 | |
| 190 | static struct irq_chip shirq_chip = { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 191 | .name = "spear-shirq", |
Lennert Buytenhek | 0e60e11 | 2010-11-29 11:22:33 +0100 | [diff] [blame] | 192 | .irq_ack = shirq_irq_mask, |
| 193 | .irq_mask = shirq_irq_mask, |
| 194 | .irq_unmask = shirq_irq_unmask, |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 195 | }; |
| 196 | |
| 197 | static void shirq_handler(unsigned irq, struct irq_desc *desc) |
| 198 | { |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 199 | u32 i, j, val, mask, tmp; |
| 200 | struct irq_chip *chip; |
Thomas Gleixner | 6845664a | 2011-03-24 13:25:22 +0100 | [diff] [blame] | 201 | struct spear_shirq *shirq = irq_get_handler_data(irq); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 202 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 203 | chip = irq_get_chip(irq); |
| 204 | chip->irq_ack(&desc->irq_data); |
| 205 | |
| 206 | mask = ((0x1 << shirq->irq_nr) - 1) << shirq->irq_bit_off; |
| 207 | while ((val = readl(shirq->base + shirq->regs.status_reg) & |
| 208 | mask)) { |
| 209 | |
| 210 | val >>= shirq->irq_bit_off; |
| 211 | for (i = 0, j = 1; i < shirq->irq_nr; i++, j <<= 1) { |
| 212 | |
| 213 | if (!(j & val)) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 214 | continue; |
| 215 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 216 | generic_handle_irq(shirq->irq_base + i); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 217 | |
| 218 | /* clear interrupt */ |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 219 | if (shirq->regs.clear_reg == -1) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 220 | continue; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 221 | |
| 222 | tmp = readl(shirq->base + shirq->regs.clear_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 223 | if (shirq->regs.reset_to_clear) |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 224 | tmp &= ~(j << shirq->irq_bit_off); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 225 | else |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 226 | tmp |= (j << shirq->irq_bit_off); |
| 227 | writel(tmp, shirq->base + shirq->regs.clear_reg); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 228 | } |
| 229 | } |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 230 | chip->irq_unmask(&desc->irq_data); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 231 | } |
| 232 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 233 | static void __init spear_shirq_register(struct spear_shirq *shirq) |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 234 | { |
| 235 | int i; |
| 236 | |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 237 | if (shirq->invalid_irq) |
| 238 | return; |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 239 | |
Thomas Gleixner | 6845664a | 2011-03-24 13:25:22 +0100 | [diff] [blame] | 240 | irq_set_chained_handler(shirq->irq, shirq_handler); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 241 | for (i = 0; i < shirq->irq_nr; i++) { |
| 242 | irq_set_chip_and_handler(shirq->irq_base + i, |
Thomas Gleixner | f38c02f | 2011-03-24 13:35:09 +0100 | [diff] [blame] | 243 | &shirq_chip, handle_simple_irq); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 244 | set_irq_flags(shirq->irq_base + i, IRQF_VALID); |
| 245 | irq_set_chip_data(shirq->irq_base + i, shirq); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 246 | } |
| 247 | |
Thomas Gleixner | 6845664a | 2011-03-24 13:25:22 +0100 | [diff] [blame] | 248 | irq_set_handler_data(shirq->irq, shirq); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 249 | } |
| 250 | |
| 251 | static int __init shirq_init(struct spear_shirq **shirq_blocks, int block_nr, |
| 252 | struct device_node *np) |
| 253 | { |
| 254 | int i, irq_base, hwirq = 0, irq_nr = 0; |
| 255 | static struct irq_domain *shirq_domain; |
| 256 | void __iomem *base; |
| 257 | |
| 258 | base = of_iomap(np, 0); |
| 259 | if (!base) { |
| 260 | pr_err("%s: failed to map shirq registers\n", __func__); |
| 261 | return -ENXIO; |
| 262 | } |
| 263 | |
| 264 | for (i = 0; i < block_nr; i++) |
| 265 | irq_nr += shirq_blocks[i]->irq_nr; |
| 266 | |
| 267 | irq_base = irq_alloc_descs(-1, 0, irq_nr, 0); |
| 268 | if (IS_ERR_VALUE(irq_base)) { |
| 269 | pr_err("%s: irq desc alloc failed\n", __func__); |
| 270 | goto err_unmap; |
| 271 | } |
| 272 | |
| 273 | shirq_domain = irq_domain_add_legacy(np, irq_nr, irq_base, 0, |
| 274 | &irq_domain_simple_ops, NULL); |
| 275 | if (WARN_ON(!shirq_domain)) { |
| 276 | pr_warn("%s: irq domain init failed\n", __func__); |
| 277 | goto err_free_desc; |
| 278 | } |
| 279 | |
| 280 | for (i = 0; i < block_nr; i++) { |
| 281 | shirq_blocks[i]->base = base; |
| 282 | shirq_blocks[i]->irq_base = irq_find_mapping(shirq_domain, |
| 283 | hwirq); |
| 284 | shirq_blocks[i]->irq = irq_of_parse_and_map(np, i); |
| 285 | |
| 286 | spear_shirq_register(shirq_blocks[i]); |
| 287 | hwirq += shirq_blocks[i]->irq_nr; |
| 288 | } |
| 289 | |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 290 | return 0; |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 291 | |
| 292 | err_free_desc: |
| 293 | irq_free_descs(irq_base, irq_nr); |
| 294 | err_unmap: |
| 295 | iounmap(base); |
| 296 | return -ENXIO; |
| 297 | } |
| 298 | |
| 299 | int __init spear300_shirq_of_init(struct device_node *np, |
| 300 | struct device_node *parent) |
| 301 | { |
| 302 | return shirq_init(spear300_shirq_blocks, |
| 303 | ARRAY_SIZE(spear300_shirq_blocks), np); |
| 304 | } |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 305 | IRQCHIP_DECLARE(spear300_shirq, "st,spear300-shirq", spear300_shirq_of_init); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 306 | |
| 307 | int __init spear310_shirq_of_init(struct device_node *np, |
| 308 | struct device_node *parent) |
| 309 | { |
| 310 | return shirq_init(spear310_shirq_blocks, |
| 311 | ARRAY_SIZE(spear310_shirq_blocks), np); |
| 312 | } |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 313 | IRQCHIP_DECLARE(spear310_shirq, "st,spear310-shirq", spear310_shirq_of_init); |
Shiraz Hashim | 80515a5a | 2012-08-03 15:33:10 +0530 | [diff] [blame] | 314 | |
| 315 | int __init spear320_shirq_of_init(struct device_node *np, |
| 316 | struct device_node *parent) |
| 317 | { |
| 318 | return shirq_init(spear320_shirq_blocks, |
| 319 | ARRAY_SIZE(spear320_shirq_blocks), np); |
viresh kumar | 4c18e77 | 2010-05-03 09:24:30 +0100 | [diff] [blame] | 320 | } |
Rob Herring | e9c5155 | 2013-01-02 09:37:56 -0600 | [diff] [blame] | 321 | IRQCHIP_DECLARE(spear320_shirq, "st,spear320-shirq", spear320_shirq_of_init); |