Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2008 Advanced Micro Devices, Inc. |
| 3 | * Copyright 2008 Red Hat Inc. |
| 4 | * Copyright 2009 Jerome Glisse. |
| 5 | * |
| 6 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 7 | * copy of this software and associated documentation files (the "Software"), |
| 8 | * to deal in the Software without restriction, including without limitation |
| 9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 10 | * and/or sell copies of the Software, and to permit persons to whom the |
| 11 | * Software is furnished to do so, subject to the following conditions: |
| 12 | * |
| 13 | * The above copyright notice and this permission notice shall be included in |
| 14 | * all copies or substantial portions of the Software. |
| 15 | * |
| 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
| 20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
| 21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
| 22 | * OTHER DEALINGS IN THE SOFTWARE. |
| 23 | * |
| 24 | * Authors: Dave Airlie |
| 25 | * Alex Deucher |
| 26 | * Jerome Glisse |
| 27 | */ |
| 28 | #include "drmP.h" |
Dave Airlie | eb1f8e4 | 2010-05-07 06:42:51 +0000 | [diff] [blame] | 29 | #include "drm_crtc_helper.h" |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 30 | #include "radeon_drm.h" |
| 31 | #include "radeon_reg.h" |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 32 | #include "radeon.h" |
| 33 | #include "atom.h" |
| 34 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 35 | #define RADEON_WAIT_IDLE_TIMEOUT 200 |
| 36 | |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 37 | irqreturn_t radeon_driver_irq_handler_kms(DRM_IRQ_ARGS) |
| 38 | { |
| 39 | struct drm_device *dev = (struct drm_device *) arg; |
| 40 | struct radeon_device *rdev = dev->dev_private; |
| 41 | |
| 42 | return radeon_irq_process(rdev); |
| 43 | } |
| 44 | |
Alex Deucher | d4877cf | 2009-12-04 16:56:37 -0500 | [diff] [blame] | 45 | /* |
| 46 | * Handle hotplug events outside the interrupt handler proper. |
| 47 | */ |
| 48 | static void radeon_hotplug_work_func(struct work_struct *work) |
| 49 | { |
| 50 | struct radeon_device *rdev = container_of(work, struct radeon_device, |
| 51 | hotplug_work); |
| 52 | struct drm_device *dev = rdev->ddev; |
| 53 | struct drm_mode_config *mode_config = &dev->mode_config; |
| 54 | struct drm_connector *connector; |
| 55 | |
| 56 | if (mode_config->num_connector) { |
| 57 | list_for_each_entry(connector, &mode_config->connector_list, head) |
| 58 | radeon_connector_hotplug(connector); |
| 59 | } |
| 60 | /* Just fire off a uevent and let userspace tell us what to do */ |
Dave Airlie | eb1f8e4 | 2010-05-07 06:42:51 +0000 | [diff] [blame] | 61 | drm_helper_hpd_irq_event(dev); |
Alex Deucher | d4877cf | 2009-12-04 16:56:37 -0500 | [diff] [blame] | 62 | } |
| 63 | |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 64 | void radeon_driver_irq_preinstall_kms(struct drm_device *dev) |
| 65 | { |
| 66 | struct radeon_device *rdev = dev->dev_private; |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 67 | unsigned long irqflags; |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 68 | unsigned i; |
| 69 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 70 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 71 | /* Disable *all* interrupts */ |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 72 | for (i = 0; i < RADEON_NUM_RINGS; i++) |
| 73 | rdev->irq.sw_int[i] = false; |
Alex Deucher | 2031f77 | 2010-04-22 12:52:11 -0400 | [diff] [blame] | 74 | rdev->irq.gui_idle = false; |
Ilija Hadzic | 54bd520 | 2011-10-26 15:43:58 -0400 | [diff] [blame] | 75 | for (i = 0; i < RADEON_MAX_HPD_PINS; i++) |
Alex Deucher | 9e7b414 | 2010-03-16 17:08:06 -0400 | [diff] [blame] | 76 | rdev->irq.hpd[i] = false; |
Ilija Hadzic | 54bd520 | 2011-10-26 15:43:58 -0400 | [diff] [blame] | 77 | for (i = 0; i < RADEON_MAX_CRTCS; i++) { |
| 78 | rdev->irq.crtc_vblank_int[i] = false; |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 79 | rdev->irq.pflip[i] = false; |
Alex Deucher | f122c61 | 2012-03-30 08:59:57 -0400 | [diff] [blame] | 80 | rdev->irq.afmt[i] = false; |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 81 | } |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 82 | radeon_irq_set(rdev); |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 83 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 84 | /* Clear bits */ |
| 85 | radeon_irq_process(rdev); |
| 86 | } |
| 87 | |
| 88 | int radeon_driver_irq_postinstall_kms(struct drm_device *dev) |
| 89 | { |
| 90 | struct radeon_device *rdev = dev->dev_private; |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 91 | unsigned long irqflags; |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 92 | unsigned i; |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 93 | |
| 94 | dev->max_vblank_count = 0x001fffff; |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 95 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 96 | for (i = 0; i < RADEON_NUM_RINGS; i++) |
| 97 | rdev->irq.sw_int[i] = true; |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 98 | radeon_irq_set(rdev); |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 99 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 100 | return 0; |
| 101 | } |
| 102 | |
| 103 | void radeon_driver_irq_uninstall_kms(struct drm_device *dev) |
| 104 | { |
| 105 | struct radeon_device *rdev = dev->dev_private; |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 106 | unsigned long irqflags; |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 107 | unsigned i; |
| 108 | |
| 109 | if (rdev == NULL) { |
| 110 | return; |
| 111 | } |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 112 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 113 | /* Disable *all* interrupts */ |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 114 | for (i = 0; i < RADEON_NUM_RINGS; i++) |
| 115 | rdev->irq.sw_int[i] = false; |
Alex Deucher | 2031f77 | 2010-04-22 12:52:11 -0400 | [diff] [blame] | 116 | rdev->irq.gui_idle = false; |
Ilija Hadzic | 54bd520 | 2011-10-26 15:43:58 -0400 | [diff] [blame] | 117 | for (i = 0; i < RADEON_MAX_HPD_PINS; i++) |
Jerome Glisse | 003e69f | 2010-01-07 15:39:14 +0100 | [diff] [blame] | 118 | rdev->irq.hpd[i] = false; |
Ilija Hadzic | 54bd520 | 2011-10-26 15:43:58 -0400 | [diff] [blame] | 119 | for (i = 0; i < RADEON_MAX_CRTCS; i++) { |
| 120 | rdev->irq.crtc_vblank_int[i] = false; |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 121 | rdev->irq.pflip[i] = false; |
Alex Deucher | f122c61 | 2012-03-30 08:59:57 -0400 | [diff] [blame] | 122 | rdev->irq.afmt[i] = false; |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 123 | } |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 124 | radeon_irq_set(rdev); |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 125 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 126 | } |
| 127 | |
Alex Deucher | 8f6c25c | 2011-10-25 14:58:49 -0400 | [diff] [blame] | 128 | static bool radeon_msi_ok(struct radeon_device *rdev) |
| 129 | { |
| 130 | /* RV370/RV380 was first asic with MSI support */ |
| 131 | if (rdev->family < CHIP_RV380) |
| 132 | return false; |
| 133 | |
| 134 | /* MSIs don't work on AGP */ |
| 135 | if (rdev->flags & RADEON_IS_AGP) |
| 136 | return false; |
| 137 | |
Alex Deucher | a18cee1 | 2011-11-01 14:20:30 -0400 | [diff] [blame] | 138 | /* force MSI on */ |
| 139 | if (radeon_msi == 1) |
| 140 | return true; |
| 141 | else if (radeon_msi == 0) |
| 142 | return false; |
| 143 | |
Alex Deucher | b362105 | 2011-10-25 15:11:08 -0400 | [diff] [blame] | 144 | /* Quirks */ |
| 145 | /* HP RS690 only seems to work with MSIs. */ |
| 146 | if ((rdev->pdev->device == 0x791f) && |
| 147 | (rdev->pdev->subsystem_vendor == 0x103c) && |
| 148 | (rdev->pdev->subsystem_device == 0x30c2)) |
| 149 | return true; |
| 150 | |
Alex Deucher | 01e718e | 2011-11-01 14:14:18 -0400 | [diff] [blame] | 151 | /* Dell RS690 only seems to work with MSIs. */ |
| 152 | if ((rdev->pdev->device == 0x791f) && |
| 153 | (rdev->pdev->subsystem_vendor == 0x1028) && |
Alex Deucher | 44517c4 | 2012-01-15 08:51:12 -0500 | [diff] [blame] | 154 | (rdev->pdev->subsystem_device == 0x01fc)) |
| 155 | return true; |
| 156 | |
| 157 | /* Dell RS690 only seems to work with MSIs. */ |
| 158 | if ((rdev->pdev->device == 0x791f) && |
| 159 | (rdev->pdev->subsystem_vendor == 0x1028) && |
Alex Deucher | 01e718e | 2011-11-01 14:14:18 -0400 | [diff] [blame] | 160 | (rdev->pdev->subsystem_device == 0x01fd)) |
| 161 | return true; |
| 162 | |
Dave Airlie | 16a5e32 | 2012-04-13 11:14:50 +0100 | [diff] [blame] | 163 | /* RV515 seems to have MSI issues where it loses |
| 164 | * MSI rearms occasionally. This leads to lockups and freezes. |
| 165 | * disable it by default. |
| 166 | */ |
| 167 | if (rdev->family == CHIP_RV515) |
| 168 | return false; |
Alex Deucher | 8f6c25c | 2011-10-25 14:58:49 -0400 | [diff] [blame] | 169 | if (rdev->flags & RADEON_IS_IGP) { |
| 170 | /* APUs work fine with MSIs */ |
| 171 | if (rdev->family >= CHIP_PALM) |
| 172 | return true; |
| 173 | /* lots of IGPs have problems with MSIs */ |
| 174 | return false; |
| 175 | } |
| 176 | |
| 177 | return true; |
| 178 | } |
| 179 | |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 180 | int radeon_irq_kms_init(struct radeon_device *rdev) |
| 181 | { |
| 182 | int r = 0; |
| 183 | |
Tejun Heo | 32c87fc | 2011-01-03 14:49:32 +0100 | [diff] [blame] | 184 | INIT_WORK(&rdev->hotplug_work, radeon_hotplug_work_func); |
Alex Deucher | f122c61 | 2012-03-30 08:59:57 -0400 | [diff] [blame] | 185 | INIT_WORK(&rdev->audio_work, r600_audio_update_hdmi); |
Tejun Heo | 32c87fc | 2011-01-03 14:49:32 +0100 | [diff] [blame] | 186 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 187 | spin_lock_init(&rdev->irq.lock); |
Alex Deucher | 9e7b414 | 2010-03-16 17:08:06 -0400 | [diff] [blame] | 188 | r = drm_vblank_init(rdev->ddev, rdev->num_crtc); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 189 | if (r) { |
| 190 | return r; |
| 191 | } |
Alex Deucher | 3e5cb98 | 2009-10-16 12:21:24 -0400 | [diff] [blame] | 192 | /* enable msi */ |
| 193 | rdev->msi_enabled = 0; |
Alex Deucher | 8f6c25c | 2011-10-25 14:58:49 -0400 | [diff] [blame] | 194 | |
| 195 | if (radeon_msi_ok(rdev)) { |
Alex Deucher | 3e5cb98 | 2009-10-16 12:21:24 -0400 | [diff] [blame] | 196 | int ret = pci_enable_msi(rdev->pdev); |
Alex Deucher | d8f60cf | 2009-12-01 13:43:46 -0500 | [diff] [blame] | 197 | if (!ret) { |
Alex Deucher | 3e5cb98 | 2009-10-16 12:21:24 -0400 | [diff] [blame] | 198 | rdev->msi_enabled = 1; |
Alex Deucher | da7be68 | 2010-08-12 18:05:34 -0400 | [diff] [blame] | 199 | dev_info(rdev->dev, "radeon: using MSI.\n"); |
Alex Deucher | d8f60cf | 2009-12-01 13:43:46 -0500 | [diff] [blame] | 200 | } |
Alex Deucher | 3e5cb98 | 2009-10-16 12:21:24 -0400 | [diff] [blame] | 201 | } |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 202 | rdev->irq.installed = true; |
Jerome Glisse | 003e69f | 2010-01-07 15:39:14 +0100 | [diff] [blame] | 203 | r = drm_irq_install(rdev->ddev); |
| 204 | if (r) { |
| 205 | rdev->irq.installed = false; |
| 206 | return r; |
| 207 | } |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 208 | DRM_INFO("radeon: irq initialized.\n"); |
| 209 | return 0; |
| 210 | } |
| 211 | |
| 212 | void radeon_irq_kms_fini(struct radeon_device *rdev) |
| 213 | { |
Jerome Glisse | 003e69f | 2010-01-07 15:39:14 +0100 | [diff] [blame] | 214 | drm_vblank_cleanup(rdev->ddev); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 215 | if (rdev->irq.installed) { |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 216 | drm_irq_uninstall(rdev->ddev); |
Jerome Glisse | 003e69f | 2010-01-07 15:39:14 +0100 | [diff] [blame] | 217 | rdev->irq.installed = false; |
Alex Deucher | 3e5cb98 | 2009-10-16 12:21:24 -0400 | [diff] [blame] | 218 | if (rdev->msi_enabled) |
| 219 | pci_disable_msi(rdev->pdev); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 220 | } |
Tejun Heo | 32c87fc | 2011-01-03 14:49:32 +0100 | [diff] [blame] | 221 | flush_work_sync(&rdev->hotplug_work); |
Jerome Glisse | 771fe6b | 2009-06-05 14:42:42 +0200 | [diff] [blame] | 222 | } |
Dave Airlie | 1614f8b | 2009-12-01 16:04:56 +1000 | [diff] [blame] | 223 | |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 224 | void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev, int ring) |
Dave Airlie | 1614f8b | 2009-12-01 16:04:56 +1000 | [diff] [blame] | 225 | { |
| 226 | unsigned long irqflags; |
| 227 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 228 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 229 | if (rdev->ddev->irq_enabled && (++rdev->irq.sw_refcount[ring] == 1)) { |
| 230 | rdev->irq.sw_int[ring] = true; |
Dave Airlie | 1614f8b | 2009-12-01 16:04:56 +1000 | [diff] [blame] | 231 | radeon_irq_set(rdev); |
| 232 | } |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 233 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
Dave Airlie | 1614f8b | 2009-12-01 16:04:56 +1000 | [diff] [blame] | 234 | } |
| 235 | |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 236 | void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev, int ring) |
Dave Airlie | 1614f8b | 2009-12-01 16:04:56 +1000 | [diff] [blame] | 237 | { |
| 238 | unsigned long irqflags; |
| 239 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 240 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
Alex Deucher | 1b37078 | 2011-11-17 20:13:28 -0500 | [diff] [blame] | 241 | BUG_ON(rdev->ddev->irq_enabled && rdev->irq.sw_refcount[ring] <= 0); |
| 242 | if (rdev->ddev->irq_enabled && (--rdev->irq.sw_refcount[ring] == 0)) { |
| 243 | rdev->irq.sw_int[ring] = false; |
Dave Airlie | 1614f8b | 2009-12-01 16:04:56 +1000 | [diff] [blame] | 244 | radeon_irq_set(rdev); |
| 245 | } |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 246 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
Dave Airlie | 1614f8b | 2009-12-01 16:04:56 +1000 | [diff] [blame] | 247 | } |
| 248 | |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 249 | void radeon_irq_kms_pflip_irq_get(struct radeon_device *rdev, int crtc) |
| 250 | { |
| 251 | unsigned long irqflags; |
| 252 | |
| 253 | if (crtc < 0 || crtc >= rdev->num_crtc) |
| 254 | return; |
| 255 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 256 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 257 | if (rdev->ddev->irq_enabled && (++rdev->irq.pflip_refcount[crtc] == 1)) { |
| 258 | rdev->irq.pflip[crtc] = true; |
| 259 | radeon_irq_set(rdev); |
| 260 | } |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 261 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 262 | } |
| 263 | |
| 264 | void radeon_irq_kms_pflip_irq_put(struct radeon_device *rdev, int crtc) |
| 265 | { |
| 266 | unsigned long irqflags; |
| 267 | |
| 268 | if (crtc < 0 || crtc >= rdev->num_crtc) |
| 269 | return; |
| 270 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 271 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 272 | BUG_ON(rdev->ddev->irq_enabled && rdev->irq.pflip_refcount[crtc] <= 0); |
| 273 | if (rdev->ddev->irq_enabled && (--rdev->irq.pflip_refcount[crtc] == 0)) { |
| 274 | rdev->irq.pflip[crtc] = false; |
| 275 | radeon_irq_set(rdev); |
| 276 | } |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 277 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
Alex Deucher | 6f34be5 | 2010-11-21 10:59:01 -0500 | [diff] [blame] | 278 | } |
| 279 | |
Christian Koenig | fb98257 | 2012-05-17 01:33:30 +0200 | [diff] [blame^] | 280 | void radeon_irq_kms_enable_afmt(struct radeon_device *rdev, int block) |
| 281 | { |
| 282 | unsigned long irqflags; |
| 283 | |
| 284 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
| 285 | rdev->irq.afmt[block] = true; |
| 286 | radeon_irq_set(rdev); |
| 287 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
| 288 | |
| 289 | } |
| 290 | |
| 291 | void radeon_irq_kms_disable_afmt(struct radeon_device *rdev, int block) |
| 292 | { |
| 293 | unsigned long irqflags; |
| 294 | |
| 295 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
| 296 | rdev->irq.afmt[block] = false; |
| 297 | radeon_irq_set(rdev); |
| 298 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
| 299 | } |
| 300 | |
| 301 | void radeon_irq_kms_enable_hpd(struct radeon_device *rdev, unsigned hpd_mask) |
| 302 | { |
| 303 | unsigned long irqflags; |
| 304 | int i; |
| 305 | |
| 306 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
| 307 | for (i = 0; i < RADEON_MAX_HPD_PINS; ++i) |
| 308 | rdev->irq.hpd[i] |= !!(hpd_mask & (1 << i)); |
| 309 | radeon_irq_set(rdev); |
| 310 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
| 311 | } |
| 312 | |
| 313 | void radeon_irq_kms_disable_hpd(struct radeon_device *rdev, unsigned hpd_mask) |
| 314 | { |
| 315 | unsigned long irqflags; |
| 316 | int i; |
| 317 | |
| 318 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
| 319 | for (i = 0; i < RADEON_MAX_HPD_PINS; ++i) |
| 320 | rdev->irq.hpd[i] &= !(hpd_mask & (1 << i)); |
| 321 | radeon_irq_set(rdev); |
| 322 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
| 323 | } |
| 324 | |
| 325 | int radeon_irq_kms_wait_gui_idle(struct radeon_device *rdev) |
| 326 | { |
| 327 | unsigned long irqflags; |
| 328 | int r; |
| 329 | |
| 330 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
| 331 | rdev->irq.gui_idle = true; |
| 332 | radeon_irq_set(rdev); |
| 333 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
| 334 | |
| 335 | r = wait_event_timeout(rdev->irq.idle_queue, radeon_gui_idle(rdev), |
| 336 | msecs_to_jiffies(RADEON_WAIT_IDLE_TIMEOUT)); |
| 337 | |
| 338 | spin_lock_irqsave(&rdev->irq.lock, irqflags); |
| 339 | rdev->irq.gui_idle = false; |
| 340 | radeon_irq_set(rdev); |
| 341 | spin_unlock_irqrestore(&rdev->irq.lock, irqflags); |
| 342 | return r; |
| 343 | } |