Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Broadcom specific AMBA |
| 3 | * PCI Host |
| 4 | * |
| 5 | * Licensed under the GNU/GPL. See COPYING for details. |
| 6 | */ |
| 7 | |
| 8 | #include "bcma_private.h" |
Andrew Morton | ba7328b | 2011-05-26 16:24:57 -0700 | [diff] [blame] | 9 | #include <linux/slab.h> |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 10 | #include <linux/bcma/bcma.h> |
| 11 | #include <linux/pci.h> |
Paul Gortmaker | 200351c | 2011-07-01 16:06:37 -0400 | [diff] [blame] | 12 | #include <linux/module.h> |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 13 | |
| 14 | static void bcma_host_pci_switch_core(struct bcma_device *core) |
| 15 | { |
Rafał Miłecki | 8be08a3 | 2015-01-25 13:41:19 +0100 | [diff] [blame] | 16 | int win2 = core->bus->host_is_pcie2 ? |
| 17 | BCMA_PCIE2_BAR0_WIN2 : BCMA_PCI_BAR0_WIN2; |
| 18 | |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 19 | pci_write_config_dword(core->bus->host_pci, BCMA_PCI_BAR0_WIN, |
| 20 | core->addr); |
Rafał Miłecki | 8be08a3 | 2015-01-25 13:41:19 +0100 | [diff] [blame] | 21 | pci_write_config_dword(core->bus->host_pci, win2, core->wrap); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 22 | core->bus->mapped_core = core; |
Rafał Miłecki | 3d9d8af | 2012-07-05 22:07:32 +0200 | [diff] [blame] | 23 | bcma_debug(core->bus, "Switched to core: 0x%X\n", core->id.id); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 24 | } |
| 25 | |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 26 | /* Provides access to the requested core. Returns base offset that has to be |
| 27 | * used. It makes use of fixed windows when possible. */ |
| 28 | static u16 bcma_host_pci_provide_access_to_core(struct bcma_device *core) |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 29 | { |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 30 | switch (core->id.id) { |
| 31 | case BCMA_CORE_CHIPCOMMON: |
| 32 | return 3 * BCMA_CORE_SIZE; |
| 33 | case BCMA_CORE_PCIE: |
| 34 | return 2 * BCMA_CORE_SIZE; |
| 35 | } |
| 36 | |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 37 | if (core->bus->mapped_core != core) |
| 38 | bcma_host_pci_switch_core(core); |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 39 | return 0; |
| 40 | } |
| 41 | |
| 42 | static u8 bcma_host_pci_read8(struct bcma_device *core, u16 offset) |
| 43 | { |
| 44 | offset += bcma_host_pci_provide_access_to_core(core); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 45 | return ioread8(core->bus->mmio + offset); |
| 46 | } |
| 47 | |
| 48 | static u16 bcma_host_pci_read16(struct bcma_device *core, u16 offset) |
| 49 | { |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 50 | offset += bcma_host_pci_provide_access_to_core(core); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 51 | return ioread16(core->bus->mmio + offset); |
| 52 | } |
| 53 | |
| 54 | static u32 bcma_host_pci_read32(struct bcma_device *core, u16 offset) |
| 55 | { |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 56 | offset += bcma_host_pci_provide_access_to_core(core); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 57 | return ioread32(core->bus->mmio + offset); |
| 58 | } |
| 59 | |
| 60 | static void bcma_host_pci_write8(struct bcma_device *core, u16 offset, |
| 61 | u8 value) |
| 62 | { |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 63 | offset += bcma_host_pci_provide_access_to_core(core); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 64 | iowrite8(value, core->bus->mmio + offset); |
| 65 | } |
| 66 | |
| 67 | static void bcma_host_pci_write16(struct bcma_device *core, u16 offset, |
| 68 | u16 value) |
| 69 | { |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 70 | offset += bcma_host_pci_provide_access_to_core(core); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 71 | iowrite16(value, core->bus->mmio + offset); |
| 72 | } |
| 73 | |
| 74 | static void bcma_host_pci_write32(struct bcma_device *core, u16 offset, |
| 75 | u32 value) |
| 76 | { |
Rafał Miłecki | 439678f | 2011-12-05 19:13:39 +0100 | [diff] [blame] | 77 | offset += bcma_host_pci_provide_access_to_core(core); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 78 | iowrite32(value, core->bus->mmio + offset); |
| 79 | } |
| 80 | |
Rafał Miłecki | 9d75ef0 | 2011-05-20 03:27:06 +0200 | [diff] [blame] | 81 | #ifdef CONFIG_BCMA_BLOCKIO |
Hauke Mehrtens | 94f3457 | 2012-08-05 16:54:41 +0200 | [diff] [blame] | 82 | static void bcma_host_pci_block_read(struct bcma_device *core, void *buffer, |
| 83 | size_t count, u16 offset, u8 reg_width) |
Rafał Miłecki | 9d75ef0 | 2011-05-20 03:27:06 +0200 | [diff] [blame] | 84 | { |
| 85 | void __iomem *addr = core->bus->mmio + offset; |
| 86 | if (core->bus->mapped_core != core) |
| 87 | bcma_host_pci_switch_core(core); |
| 88 | switch (reg_width) { |
| 89 | case sizeof(u8): |
| 90 | ioread8_rep(addr, buffer, count); |
| 91 | break; |
| 92 | case sizeof(u16): |
| 93 | WARN_ON(count & 1); |
| 94 | ioread16_rep(addr, buffer, count >> 1); |
| 95 | break; |
| 96 | case sizeof(u32): |
| 97 | WARN_ON(count & 3); |
| 98 | ioread32_rep(addr, buffer, count >> 2); |
| 99 | break; |
| 100 | default: |
| 101 | WARN_ON(1); |
| 102 | } |
| 103 | } |
| 104 | |
Hauke Mehrtens | 94f3457 | 2012-08-05 16:54:41 +0200 | [diff] [blame] | 105 | static void bcma_host_pci_block_write(struct bcma_device *core, |
| 106 | const void *buffer, size_t count, |
| 107 | u16 offset, u8 reg_width) |
Rafał Miłecki | 9d75ef0 | 2011-05-20 03:27:06 +0200 | [diff] [blame] | 108 | { |
| 109 | void __iomem *addr = core->bus->mmio + offset; |
| 110 | if (core->bus->mapped_core != core) |
| 111 | bcma_host_pci_switch_core(core); |
| 112 | switch (reg_width) { |
| 113 | case sizeof(u8): |
| 114 | iowrite8_rep(addr, buffer, count); |
| 115 | break; |
| 116 | case sizeof(u16): |
| 117 | WARN_ON(count & 1); |
| 118 | iowrite16_rep(addr, buffer, count >> 1); |
| 119 | break; |
| 120 | case sizeof(u32): |
| 121 | WARN_ON(count & 3); |
| 122 | iowrite32_rep(addr, buffer, count >> 2); |
| 123 | break; |
| 124 | default: |
| 125 | WARN_ON(1); |
| 126 | } |
| 127 | } |
| 128 | #endif |
| 129 | |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 130 | static u32 bcma_host_pci_aread32(struct bcma_device *core, u16 offset) |
| 131 | { |
| 132 | if (core->bus->mapped_core != core) |
| 133 | bcma_host_pci_switch_core(core); |
| 134 | return ioread32(core->bus->mmio + (1 * BCMA_CORE_SIZE) + offset); |
| 135 | } |
| 136 | |
| 137 | static void bcma_host_pci_awrite32(struct bcma_device *core, u16 offset, |
| 138 | u32 value) |
| 139 | { |
| 140 | if (core->bus->mapped_core != core) |
| 141 | bcma_host_pci_switch_core(core); |
| 142 | iowrite32(value, core->bus->mmio + (1 * BCMA_CORE_SIZE) + offset); |
| 143 | } |
| 144 | |
Hauke Mehrtens | 94f3457 | 2012-08-05 16:54:41 +0200 | [diff] [blame] | 145 | static const struct bcma_host_ops bcma_host_pci_ops = { |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 146 | .read8 = bcma_host_pci_read8, |
| 147 | .read16 = bcma_host_pci_read16, |
| 148 | .read32 = bcma_host_pci_read32, |
| 149 | .write8 = bcma_host_pci_write8, |
| 150 | .write16 = bcma_host_pci_write16, |
| 151 | .write32 = bcma_host_pci_write32, |
Rafał Miłecki | 9d75ef0 | 2011-05-20 03:27:06 +0200 | [diff] [blame] | 152 | #ifdef CONFIG_BCMA_BLOCKIO |
| 153 | .block_read = bcma_host_pci_block_read, |
| 154 | .block_write = bcma_host_pci_block_write, |
| 155 | #endif |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 156 | .aread32 = bcma_host_pci_aread32, |
| 157 | .awrite32 = bcma_host_pci_awrite32, |
| 158 | }; |
| 159 | |
Greg Kroah-Hartman | 0f58a01 | 2012-12-21 15:12:59 -0800 | [diff] [blame] | 160 | static int bcma_host_pci_probe(struct pci_dev *dev, |
| 161 | const struct pci_device_id *id) |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 162 | { |
| 163 | struct bcma_bus *bus; |
| 164 | int err = -ENOMEM; |
| 165 | const char *name; |
| 166 | u32 val; |
| 167 | |
| 168 | /* Alloc */ |
| 169 | bus = kzalloc(sizeof(*bus), GFP_KERNEL); |
| 170 | if (!bus) |
| 171 | goto out; |
| 172 | |
| 173 | /* Basic PCI configuration */ |
| 174 | err = pci_enable_device(dev); |
| 175 | if (err) |
| 176 | goto err_kfree_bus; |
| 177 | |
| 178 | name = dev_name(&dev->dev); |
| 179 | if (dev->driver && dev->driver->name) |
| 180 | name = dev->driver->name; |
| 181 | err = pci_request_regions(dev, name); |
| 182 | if (err) |
| 183 | goto err_pci_disable; |
| 184 | pci_set_master(dev); |
| 185 | |
| 186 | /* Disable the RETRY_TIMEOUT register (0x41) to keep |
| 187 | * PCI Tx retries from interfering with C3 CPU state */ |
| 188 | pci_read_config_dword(dev, 0x40, &val); |
| 189 | if ((val & 0x0000ff00) != 0) |
| 190 | pci_write_config_dword(dev, 0x40, val & 0xffff00ff); |
| 191 | |
| 192 | /* SSB needed additional powering up, do we have any AMBA PCI cards? */ |
Hauke Mehrtens | dfa0415 | 2013-10-03 13:49:09 +0200 | [diff] [blame] | 193 | if (!pci_is_pcie(dev)) { |
| 194 | bcma_err(bus, "PCI card detected, they are not supported.\n"); |
| 195 | err = -ENXIO; |
| 196 | goto err_pci_release_regions; |
| 197 | } |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 198 | |
| 199 | /* Map MMIO */ |
| 200 | err = -ENOMEM; |
| 201 | bus->mmio = pci_iomap(dev, 0, ~0UL); |
| 202 | if (!bus->mmio) |
| 203 | goto err_pci_release_regions; |
| 204 | |
| 205 | /* Host specific */ |
| 206 | bus->host_pci = dev; |
| 207 | bus->hosttype = BCMA_HOSTTYPE_PCI; |
| 208 | bus->ops = &bcma_host_pci_ops; |
| 209 | |
Hauke Mehrtens | 0a2fcaa | 2012-04-29 02:04:08 +0200 | [diff] [blame] | 210 | bus->boardinfo.vendor = bus->host_pci->subsystem_vendor; |
| 211 | bus->boardinfo.type = bus->host_pci->subsystem_device; |
| 212 | |
Rafał Miłecki | dc8ecdd | 2014-09-01 23:11:06 +0200 | [diff] [blame] | 213 | /* Initialize struct, detect chip */ |
| 214 | bcma_init_bus(bus); |
| 215 | |
Rafał Miłecki | 9b6cc9a | 2015-02-08 17:11:50 +0100 | [diff] [blame] | 216 | /* Scan bus to find out generation of PCIe core */ |
| 217 | err = bcma_bus_scan(bus); |
| 218 | if (err) |
| 219 | goto err_pci_unmap_mmio; |
| 220 | |
| 221 | if (bcma_find_core(bus, BCMA_CORE_PCIE2)) |
| 222 | bus->host_is_pcie2 = true; |
| 223 | |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 224 | /* Register */ |
| 225 | err = bcma_bus_register(bus); |
| 226 | if (err) |
Rafał Miłecki | 9b6cc9a | 2015-02-08 17:11:50 +0100 | [diff] [blame] | 227 | goto err_unregister_cores; |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 228 | |
| 229 | pci_set_drvdata(dev, bus); |
| 230 | |
| 231 | out: |
| 232 | return err; |
| 233 | |
Rafał Miłecki | 9b6cc9a | 2015-02-08 17:11:50 +0100 | [diff] [blame] | 234 | err_unregister_cores: |
| 235 | bcma_unregister_cores(bus); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 236 | err_pci_unmap_mmio: |
| 237 | pci_iounmap(dev, bus->mmio); |
| 238 | err_pci_release_regions: |
| 239 | pci_release_regions(dev); |
| 240 | err_pci_disable: |
| 241 | pci_disable_device(dev); |
| 242 | err_kfree_bus: |
| 243 | kfree(bus); |
| 244 | return err; |
| 245 | } |
| 246 | |
Greg Kroah-Hartman | 0f58a01 | 2012-12-21 15:12:59 -0800 | [diff] [blame] | 247 | static void bcma_host_pci_remove(struct pci_dev *dev) |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 248 | { |
| 249 | struct bcma_bus *bus = pci_get_drvdata(dev); |
| 250 | |
| 251 | bcma_bus_unregister(bus); |
| 252 | pci_iounmap(dev, bus->mmio); |
| 253 | pci_release_regions(dev); |
| 254 | pci_disable_device(dev); |
| 255 | kfree(bus); |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 256 | } |
| 257 | |
Yuanhan Liu | ccd6095 | 2012-10-16 22:59:02 +0800 | [diff] [blame] | 258 | #ifdef CONFIG_PM_SLEEP |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 259 | static int bcma_host_pci_suspend(struct device *dev) |
Rafał Miłecki | 775ab52 | 2011-12-09 22:16:07 +0100 | [diff] [blame] | 260 | { |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 261 | struct pci_dev *pdev = to_pci_dev(dev); |
| 262 | struct bcma_bus *bus = pci_get_drvdata(pdev); |
Rafał Miłecki | 775ab52 | 2011-12-09 22:16:07 +0100 | [diff] [blame] | 263 | |
Rafał Miłecki | 28e7d21 | 2012-01-13 23:58:38 +0100 | [diff] [blame] | 264 | bus->mapped_core = NULL; |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 265 | |
Linus Torvalds | 685a4ef | 2012-01-13 23:58:40 +0100 | [diff] [blame] | 266 | return bcma_bus_suspend(bus); |
Rafał Miłecki | 775ab52 | 2011-12-09 22:16:07 +0100 | [diff] [blame] | 267 | } |
| 268 | |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 269 | static int bcma_host_pci_resume(struct device *dev) |
Rafał Miłecki | 775ab52 | 2011-12-09 22:16:07 +0100 | [diff] [blame] | 270 | { |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 271 | struct pci_dev *pdev = to_pci_dev(dev); |
| 272 | struct bcma_bus *bus = pci_get_drvdata(pdev); |
Rafał Miłecki | 775ab52 | 2011-12-09 22:16:07 +0100 | [diff] [blame] | 273 | |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 274 | return bcma_bus_resume(bus); |
Rafał Miłecki | 775ab52 | 2011-12-09 22:16:07 +0100 | [diff] [blame] | 275 | } |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 276 | |
| 277 | static SIMPLE_DEV_PM_OPS(bcma_pm_ops, bcma_host_pci_suspend, |
| 278 | bcma_host_pci_resume); |
| 279 | #define BCMA_PM_OPS (&bcma_pm_ops) |
| 280 | |
Yuanhan Liu | ccd6095 | 2012-10-16 22:59:02 +0800 | [diff] [blame] | 281 | #else /* CONFIG_PM_SLEEP */ |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 282 | |
| 283 | #define BCMA_PM_OPS NULL |
| 284 | |
Yuanhan Liu | ccd6095 | 2012-10-16 22:59:02 +0800 | [diff] [blame] | 285 | #endif /* CONFIG_PM_SLEEP */ |
Rafał Miłecki | 775ab52 | 2011-12-09 22:16:07 +0100 | [diff] [blame] | 286 | |
Jingoo Han | 342a11e | 2013-12-03 08:00:27 +0900 | [diff] [blame] | 287 | static const struct pci_device_id bcma_pci_bridge_tbl[] = { |
Rafał Miłecki | 9594b56 | 2011-05-14 10:31:46 +0200 | [diff] [blame] | 288 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x0576) }, |
Hauke Mehrtens | b9f54bd | 2013-10-03 13:49:10 +0200 | [diff] [blame] | 289 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4313) }, |
Rafał Miłecki | 34b6d42 | 2014-10-15 07:51:44 +0200 | [diff] [blame] | 290 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 43224) }, /* 0xa8d8 */ |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 291 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4331) }, |
| 292 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4353) }, |
Rafał Miłecki | 91fa4b0 | 2011-06-17 13:15:23 +0200 | [diff] [blame] | 293 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4357) }, |
Rafał Miłecki | 646e082 | 2012-09-21 08:38:38 +0200 | [diff] [blame] | 294 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4358) }, |
Rafał Miłecki | c263c2c | 2012-07-23 18:20:12 +0200 | [diff] [blame] | 295 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4359) }, |
Rafał Miłecki | 9b6cc9a | 2015-02-08 17:11:50 +0100 | [diff] [blame] | 296 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4360) }, |
Rafał Miłecki | 88f9b65 | 2013-06-26 10:02:11 +0200 | [diff] [blame] | 297 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4365) }, |
Rafał Miłecki | 9b6cc9a | 2015-02-08 17:11:50 +0100 | [diff] [blame] | 298 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x43a0) }, |
Rafał Miłecki | d1d3799 | 2014-07-15 19:44:28 +0200 | [diff] [blame] | 299 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x43a9) }, |
Rafał Miłecki | 27cfdb0 | 2014-07-24 15:29:19 +0200 | [diff] [blame] | 300 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x43aa) }, |
Rafał Miłecki | 9b6cc9a | 2015-02-08 17:11:50 +0100 | [diff] [blame] | 301 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x43b1) }, |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 302 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 0x4727) }, |
Rafał Miłecki | 34b6d42 | 2014-10-15 07:51:44 +0200 | [diff] [blame] | 303 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 43227) }, /* 0xa8db, BCM43217 (sic!) */ |
| 304 | { PCI_DEVICE(PCI_VENDOR_ID_BROADCOM, 43228) }, /* 0xa8dc */ |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 305 | { 0, }, |
| 306 | }; |
| 307 | MODULE_DEVICE_TABLE(pci, bcma_pci_bridge_tbl); |
| 308 | |
| 309 | static struct pci_driver bcma_pci_bridge_driver = { |
| 310 | .name = "bcma-pci-bridge", |
| 311 | .id_table = bcma_pci_bridge_tbl, |
| 312 | .probe = bcma_host_pci_probe, |
Greg Kroah-Hartman | 0f58a01 | 2012-12-21 15:12:59 -0800 | [diff] [blame] | 313 | .remove = bcma_host_pci_remove, |
Linus Torvalds | 5d2031f | 2012-01-13 23:58:39 +0100 | [diff] [blame] | 314 | .driver.pm = BCMA_PM_OPS, |
Rafał Miłecki | 8369ae3 | 2011-05-09 18:56:46 +0200 | [diff] [blame] | 315 | }; |
| 316 | |
| 317 | int __init bcma_host_pci_init(void) |
| 318 | { |
| 319 | return pci_register_driver(&bcma_pci_bridge_driver); |
| 320 | } |
| 321 | |
| 322 | void __exit bcma_host_pci_exit(void) |
| 323 | { |
| 324 | pci_unregister_driver(&bcma_pci_bridge_driver); |
| 325 | } |
Rafał Miłecki | 4186721 | 2015-02-08 17:11:47 +0100 | [diff] [blame] | 326 | |
| 327 | /************************************************** |
| 328 | * Runtime ops for drivers. |
| 329 | **************************************************/ |
| 330 | |
| 331 | /* See also pcicore_up */ |
| 332 | void bcma_host_pci_up(struct bcma_bus *bus) |
| 333 | { |
| 334 | if (bus->hosttype != BCMA_HOSTTYPE_PCI) |
| 335 | return; |
| 336 | |
| 337 | if (bus->host_is_pcie2) |
Rafał Miłecki | 804e27d | 2015-02-08 17:11:49 +0100 | [diff] [blame] | 338 | bcma_core_pcie2_up(&bus->drv_pcie2); |
Rafał Miłecki | 4186721 | 2015-02-08 17:11:47 +0100 | [diff] [blame] | 339 | else |
| 340 | bcma_core_pci_up(&bus->drv_pci[0]); |
| 341 | } |
| 342 | EXPORT_SYMBOL_GPL(bcma_host_pci_up); |
| 343 | |
| 344 | /* See also pcicore_down */ |
| 345 | void bcma_host_pci_down(struct bcma_bus *bus) |
| 346 | { |
| 347 | if (bus->hosttype != BCMA_HOSTTYPE_PCI) |
| 348 | return; |
| 349 | |
| 350 | if (!bus->host_is_pcie2) |
| 351 | bcma_core_pci_down(&bus->drv_pci[0]); |
| 352 | } |
| 353 | EXPORT_SYMBOL_GPL(bcma_host_pci_down); |
Rafał Miłecki | 702131e | 2015-03-05 18:25:10 +0100 | [diff] [blame] | 354 | |
| 355 | /* See also si_pci_setup */ |
| 356 | int bcma_host_pci_irq_ctl(struct bcma_bus *bus, struct bcma_device *core, |
| 357 | bool enable) |
| 358 | { |
| 359 | struct pci_dev *pdev; |
| 360 | u32 coremask, tmp; |
| 361 | int err = 0; |
| 362 | |
| 363 | if (bus->hosttype != BCMA_HOSTTYPE_PCI) { |
| 364 | /* This bcma device is not on a PCI host-bus. So the IRQs are |
| 365 | * not routed through the PCI core. |
| 366 | * So we must not enable routing through the PCI core. */ |
| 367 | goto out; |
| 368 | } |
| 369 | |
| 370 | pdev = bus->host_pci; |
| 371 | |
| 372 | err = pci_read_config_dword(pdev, BCMA_PCI_IRQMASK, &tmp); |
| 373 | if (err) |
| 374 | goto out; |
| 375 | |
| 376 | coremask = BIT(core->core_index) << 8; |
| 377 | if (enable) |
| 378 | tmp |= coremask; |
| 379 | else |
| 380 | tmp &= ~coremask; |
| 381 | |
| 382 | err = pci_write_config_dword(pdev, BCMA_PCI_IRQMASK, tmp); |
| 383 | |
| 384 | out: |
| 385 | return err; |
| 386 | } |
| 387 | EXPORT_SYMBOL_GPL(bcma_host_pci_irq_ctl); |