Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 1 | /* |
| 2 | * TI Common Platform Interrupt Controller (cp_intc) driver |
| 3 | * |
| 4 | * Author: Steve Chen <schen@mvista.com> |
| 5 | * Copyright (C) 2008-2009, MontaVista Software, Inc. <source@mvista.com> |
| 6 | * |
| 7 | * This file is licensed under the terms of the GNU General Public License |
| 8 | * version 2. This program is licensed "as is" without any warranty of any |
| 9 | * kind, whether express or implied. |
| 10 | */ |
| 11 | |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 12 | #include <linux/export.h> |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 13 | #include <linux/init.h> |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 14 | #include <linux/irq.h> |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 15 | #include <linux/irqdomain.h> |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 16 | #include <linux/io.h> |
Heiko Schocher | 961e657f | 2012-05-30 12:18:58 +0200 | [diff] [blame] | 17 | #include <linux/of.h> |
| 18 | #include <linux/of_address.h> |
| 19 | #include <linux/of_irq.h> |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 20 | |
Cyril Chemparathy | bd80894 | 2010-05-07 17:06:37 -0400 | [diff] [blame] | 21 | #include <mach/common.h> |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 22 | #include <mach/cp_intc.h> |
| 23 | |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 24 | static inline unsigned int cp_intc_read(unsigned offset) |
| 25 | { |
Cyril Chemparathy | bd80894 | 2010-05-07 17:06:37 -0400 | [diff] [blame] | 26 | return __raw_readl(davinci_intc_base + offset); |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 27 | } |
| 28 | |
| 29 | static inline void cp_intc_write(unsigned long value, unsigned offset) |
| 30 | { |
Cyril Chemparathy | bd80894 | 2010-05-07 17:06:37 -0400 | [diff] [blame] | 31 | __raw_writel(value, davinci_intc_base + offset); |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 32 | } |
| 33 | |
Lennert Buytenhek | 2326544 | 2010-11-29 10:27:27 +0100 | [diff] [blame] | 34 | static void cp_intc_ack_irq(struct irq_data *d) |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 35 | { |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 36 | cp_intc_write(d->hwirq, CP_INTC_SYS_STAT_IDX_CLR); |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 37 | } |
| 38 | |
| 39 | /* Disable interrupt */ |
Lennert Buytenhek | 2326544 | 2010-11-29 10:27:27 +0100 | [diff] [blame] | 40 | static void cp_intc_mask_irq(struct irq_data *d) |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 41 | { |
| 42 | /* XXX don't know why we need to disable nIRQ here... */ |
| 43 | cp_intc_write(1, CP_INTC_HOST_ENABLE_IDX_CLR); |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 44 | cp_intc_write(d->hwirq, CP_INTC_SYS_ENABLE_IDX_CLR); |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 45 | cp_intc_write(1, CP_INTC_HOST_ENABLE_IDX_SET); |
| 46 | } |
| 47 | |
| 48 | /* Enable interrupt */ |
Lennert Buytenhek | 2326544 | 2010-11-29 10:27:27 +0100 | [diff] [blame] | 49 | static void cp_intc_unmask_irq(struct irq_data *d) |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 50 | { |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 51 | cp_intc_write(d->hwirq, CP_INTC_SYS_ENABLE_IDX_SET); |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 52 | } |
| 53 | |
Lennert Buytenhek | 2326544 | 2010-11-29 10:27:27 +0100 | [diff] [blame] | 54 | static int cp_intc_set_irq_type(struct irq_data *d, unsigned int flow_type) |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 55 | { |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 56 | unsigned reg = BIT_WORD(d->hwirq); |
| 57 | unsigned mask = BIT_MASK(d->hwirq); |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 58 | unsigned polarity = cp_intc_read(CP_INTC_SYS_POLARITY(reg)); |
| 59 | unsigned type = cp_intc_read(CP_INTC_SYS_TYPE(reg)); |
| 60 | |
| 61 | switch (flow_type) { |
| 62 | case IRQ_TYPE_EDGE_RISING: |
| 63 | polarity |= mask; |
| 64 | type |= mask; |
| 65 | break; |
| 66 | case IRQ_TYPE_EDGE_FALLING: |
| 67 | polarity &= ~mask; |
| 68 | type |= mask; |
| 69 | break; |
| 70 | case IRQ_TYPE_LEVEL_HIGH: |
| 71 | polarity |= mask; |
| 72 | type &= ~mask; |
| 73 | break; |
| 74 | case IRQ_TYPE_LEVEL_LOW: |
| 75 | polarity &= ~mask; |
| 76 | type &= ~mask; |
| 77 | break; |
| 78 | default: |
| 79 | return -EINVAL; |
| 80 | } |
| 81 | |
| 82 | cp_intc_write(polarity, CP_INTC_SYS_POLARITY(reg)); |
| 83 | cp_intc_write(type, CP_INTC_SYS_TYPE(reg)); |
| 84 | |
| 85 | return 0; |
| 86 | } |
| 87 | |
| 88 | static struct irq_chip cp_intc_irq_chip = { |
| 89 | .name = "cp_intc", |
Lennert Buytenhek | 2326544 | 2010-11-29 10:27:27 +0100 | [diff] [blame] | 90 | .irq_ack = cp_intc_ack_irq, |
| 91 | .irq_mask = cp_intc_mask_irq, |
| 92 | .irq_unmask = cp_intc_unmask_irq, |
| 93 | .irq_set_type = cp_intc_set_irq_type, |
Sudeep Holla | 3f86e57 | 2015-08-01 21:03:56 +0530 | [diff] [blame] | 94 | .flags = IRQCHIP_SKIP_SET_WAKE, |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 95 | }; |
| 96 | |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 97 | static struct irq_domain *cp_intc_domain; |
| 98 | |
| 99 | static int cp_intc_host_map(struct irq_domain *h, unsigned int virq, |
| 100 | irq_hw_number_t hw) |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 101 | { |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 102 | pr_debug("cp_intc_host_map(%d, 0x%lx)\n", virq, hw); |
| 103 | |
| 104 | irq_set_chip(virq, &cp_intc_irq_chip); |
Rob Herring | e8d36d5 | 2015-07-27 15:55:13 -0500 | [diff] [blame] | 105 | irq_set_probe(virq); |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 106 | irq_set_handler(virq, handle_edge_irq); |
| 107 | return 0; |
| 108 | } |
| 109 | |
| 110 | static const struct irq_domain_ops cp_intc_host_ops = { |
| 111 | .map = cp_intc_host_map, |
| 112 | .xlate = irq_domain_xlate_onetwocell, |
| 113 | }; |
| 114 | |
Heiko Schocher | 961e657f | 2012-05-30 12:18:58 +0200 | [diff] [blame] | 115 | int __init cp_intc_of_init(struct device_node *node, struct device_node *parent) |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 116 | { |
| 117 | u32 num_irq = davinci_soc_info.intc_irq_num; |
Cyril Chemparathy | bd80894 | 2010-05-07 17:06:37 -0400 | [diff] [blame] | 118 | u8 *irq_prio = davinci_soc_info.intc_irq_prios; |
| 119 | u32 *host_map = davinci_soc_info.intc_host_map; |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 120 | unsigned num_reg = BITS_TO_LONGS(num_irq); |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 121 | int i, irq_base; |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 122 | |
Cyril Chemparathy | bd80894 | 2010-05-07 17:06:37 -0400 | [diff] [blame] | 123 | davinci_intc_type = DAVINCI_INTC_TYPE_CP_INTC; |
Heiko Schocher | 961e657f | 2012-05-30 12:18:58 +0200 | [diff] [blame] | 124 | if (node) { |
| 125 | davinci_intc_base = of_iomap(node, 0); |
| 126 | if (of_property_read_u32(node, "ti,intc-size", &num_irq)) |
| 127 | pr_warn("unable to get intc-size, default to %d\n", |
| 128 | num_irq); |
| 129 | } else { |
| 130 | davinci_intc_base = ioremap(davinci_soc_info.intc_base, SZ_8K); |
| 131 | } |
Cyril Chemparathy | bd80894 | 2010-05-07 17:06:37 -0400 | [diff] [blame] | 132 | if (WARN_ON(!davinci_intc_base)) |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 133 | return -EINVAL; |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 134 | |
| 135 | cp_intc_write(0, CP_INTC_GLOBAL_ENABLE); |
| 136 | |
| 137 | /* Disable all host interrupts */ |
| 138 | cp_intc_write(0, CP_INTC_HOST_ENABLE(0)); |
| 139 | |
| 140 | /* Disable system interrupts */ |
| 141 | for (i = 0; i < num_reg; i++) |
| 142 | cp_intc_write(~0, CP_INTC_SYS_ENABLE_CLR(i)); |
| 143 | |
| 144 | /* Set to normal mode, no nesting, no priority hold */ |
| 145 | cp_intc_write(0, CP_INTC_CTRL); |
| 146 | cp_intc_write(0, CP_INTC_HOST_CTRL); |
| 147 | |
| 148 | /* Clear system interrupt status */ |
| 149 | for (i = 0; i < num_reg; i++) |
| 150 | cp_intc_write(~0, CP_INTC_SYS_STAT_CLR(i)); |
| 151 | |
| 152 | /* Enable nIRQ (what about nFIQ?) */ |
| 153 | cp_intc_write(1, CP_INTC_HOST_ENABLE_IDX_SET); |
| 154 | |
| 155 | /* |
| 156 | * Priority is determined by host channel: lower channel number has |
| 157 | * higher priority i.e. channel 0 has highest priority and channel 31 |
| 158 | * had the lowest priority. |
| 159 | */ |
| 160 | num_reg = (num_irq + 3) >> 2; /* 4 channels per register */ |
| 161 | if (irq_prio) { |
| 162 | unsigned j, k; |
| 163 | u32 val; |
| 164 | |
| 165 | for (k = i = 0; i < num_reg; i++) { |
| 166 | for (val = j = 0; j < 4; j++, k++) { |
| 167 | val >>= 8; |
| 168 | if (k < num_irq) |
| 169 | val |= irq_prio[k] << 24; |
| 170 | } |
| 171 | |
| 172 | cp_intc_write(val, CP_INTC_CHAN_MAP(i)); |
| 173 | } |
| 174 | } else { |
| 175 | /* |
| 176 | * Default everything to channel 15 if priority not specified. |
| 177 | * Note that channel 0-1 are mapped to nFIQ and channels 2-31 |
| 178 | * are mapped to nIRQ. |
| 179 | */ |
| 180 | for (i = 0; i < num_reg; i++) |
| 181 | cp_intc_write(0x0f0f0f0f, CP_INTC_CHAN_MAP(i)); |
| 182 | } |
| 183 | |
Cyril Chemparathy | 449ef7f | 2010-03-25 17:43:46 -0400 | [diff] [blame] | 184 | if (host_map) |
| 185 | for (i = 0; host_map[i] != -1; i++) |
| 186 | cp_intc_write(host_map[i], CP_INTC_HOST_MAP(i)); |
| 187 | |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 188 | irq_base = irq_alloc_descs(-1, 0, num_irq, 0); |
| 189 | if (irq_base < 0) { |
| 190 | pr_warn("Couldn't allocate IRQ numbers\n"); |
| 191 | irq_base = 0; |
| 192 | } |
| 193 | |
| 194 | /* create a legacy host */ |
| 195 | cp_intc_domain = irq_domain_add_legacy(node, num_irq, |
| 196 | irq_base, 0, &cp_intc_host_ops, NULL); |
| 197 | |
| 198 | if (!cp_intc_domain) { |
| 199 | pr_err("cp_intc: failed to allocate irq host!\n"); |
| 200 | return -EINVAL; |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 201 | } |
| 202 | |
| 203 | /* Enable global interrupt */ |
| 204 | cp_intc_write(1, CP_INTC_GLOBAL_ENABLE); |
Heiko Schocher | 07caba9 | 2012-05-30 12:18:57 +0200 | [diff] [blame] | 205 | |
| 206 | return 0; |
| 207 | } |
| 208 | |
| 209 | void __init cp_intc_init(void) |
| 210 | { |
Heiko Schocher | 961e657f | 2012-05-30 12:18:58 +0200 | [diff] [blame] | 211 | cp_intc_of_init(NULL, NULL); |
Sergei Shtylyov | 0521444 | 2009-03-11 19:49:05 +0400 | [diff] [blame] | 212 | } |