Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2014 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | * |
| 23 | * Authors: |
| 24 | * Vinit Azad <vinit.azad@intel.com> |
| 25 | * Ben Widawsky <ben@bwidawsk.net> |
| 26 | * Dave Gordon <david.s.gordon@intel.com> |
| 27 | * Alex Dai <yu.dai@intel.com> |
| 28 | */ |
Michal Wajdeczko | e8668bb | 2017-10-16 14:47:14 +0000 | [diff] [blame] | 29 | |
| 30 | #include "intel_guc_fw.h" |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 31 | #include "i915_drv.h" |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 32 | |
Sagar Arun Kamble | 580b9d0 | 2017-11-29 21:49:09 +0530 | [diff] [blame] | 33 | #define SKL_FW_MAJOR 9 |
| 34 | #define SKL_FW_MINOR 33 |
Tvrtko Ursulin | 5e334c1 | 2016-08-10 16:16:46 +0100 | [diff] [blame] | 35 | |
Sagar Arun Kamble | d416ac7 | 2017-11-29 21:49:10 +0530 | [diff] [blame] | 36 | #define BXT_FW_MAJOR 9 |
| 37 | #define BXT_FW_MINOR 29 |
Tvrtko Ursulin | 5e334c1 | 2016-08-10 16:16:46 +0100 | [diff] [blame] | 38 | |
| 39 | #define KBL_FW_MAJOR 9 |
Sagar Arun Kamble | cc44085 | 2017-11-29 21:49:11 +0530 | [diff] [blame] | 40 | #define KBL_FW_MINOR 39 |
Tvrtko Ursulin | 5e334c1 | 2016-08-10 16:16:46 +0100 | [diff] [blame] | 41 | |
| 42 | #define GUC_FW_PATH(platform, major, minor) \ |
| 43 | "i915/" __stringify(platform) "_guc_ver" __stringify(major) "_" __stringify(minor) ".bin" |
| 44 | |
| 45 | #define I915_SKL_GUC_UCODE GUC_FW_PATH(skl, SKL_FW_MAJOR, SKL_FW_MINOR) |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 46 | MODULE_FIRMWARE(I915_SKL_GUC_UCODE); |
| 47 | |
Tvrtko Ursulin | 5e334c1 | 2016-08-10 16:16:46 +0100 | [diff] [blame] | 48 | #define I915_BXT_GUC_UCODE GUC_FW_PATH(bxt, BXT_FW_MAJOR, BXT_FW_MINOR) |
Nick Hoath | 57bf5c8 | 2016-05-06 11:42:53 +0100 | [diff] [blame] | 49 | MODULE_FIRMWARE(I915_BXT_GUC_UCODE); |
| 50 | |
Tvrtko Ursulin | 5e334c1 | 2016-08-10 16:16:46 +0100 | [diff] [blame] | 51 | #define I915_KBL_GUC_UCODE GUC_FW_PATH(kbl, KBL_FW_MAJOR, KBL_FW_MINOR) |
Peter Antoine | ff64cc1 | 2016-06-30 09:37:52 -0700 | [diff] [blame] | 52 | MODULE_FIRMWARE(I915_KBL_GUC_UCODE); |
| 53 | |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 54 | static void guc_fw_select(struct intel_uc_fw *guc_fw) |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 55 | { |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 56 | struct intel_guc *guc = container_of(guc_fw, struct intel_guc, fw); |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 57 | struct drm_i915_private *dev_priv = guc_to_i915(guc); |
| 58 | |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 59 | GEM_BUG_ON(guc_fw->type != INTEL_UC_FW_TYPE_GUC); |
| 60 | |
| 61 | if (!HAS_GUC(dev_priv)) |
| 62 | return; |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 63 | |
| 64 | if (i915_modparams.guc_firmware_path) { |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 65 | guc_fw->path = i915_modparams.guc_firmware_path; |
| 66 | guc_fw->major_ver_wanted = 0; |
| 67 | guc_fw->minor_ver_wanted = 0; |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 68 | } else if (IS_SKYLAKE(dev_priv)) { |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 69 | guc_fw->path = I915_SKL_GUC_UCODE; |
| 70 | guc_fw->major_ver_wanted = SKL_FW_MAJOR; |
| 71 | guc_fw->minor_ver_wanted = SKL_FW_MINOR; |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 72 | } else if (IS_BROXTON(dev_priv)) { |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 73 | guc_fw->path = I915_BXT_GUC_UCODE; |
| 74 | guc_fw->major_ver_wanted = BXT_FW_MAJOR; |
| 75 | guc_fw->minor_ver_wanted = BXT_FW_MINOR; |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 76 | } else if (IS_KABYLAKE(dev_priv) || IS_COFFEELAKE(dev_priv)) { |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 77 | guc_fw->path = I915_KBL_GUC_UCODE; |
| 78 | guc_fw->major_ver_wanted = KBL_FW_MAJOR; |
| 79 | guc_fw->minor_ver_wanted = KBL_FW_MINOR; |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 80 | } else { |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 81 | DRM_WARN("%s: No firmware known for this platform!\n", |
| 82 | intel_uc_fw_type_repr(guc_fw->type)); |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 83 | } |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 84 | } |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 85 | |
Michal Wajdeczko | 0dd940c | 2017-12-06 13:53:11 +0000 | [diff] [blame] | 86 | /** |
| 87 | * intel_guc_fw_init_early() - initializes GuC firmware struct |
| 88 | * @guc: intel_guc struct |
| 89 | * |
| 90 | * On platforms with GuC selects firmware for uploading |
| 91 | */ |
| 92 | void intel_guc_fw_init_early(struct intel_guc *guc) |
| 93 | { |
| 94 | struct intel_uc_fw *guc_fw = &guc->fw; |
| 95 | |
| 96 | intel_uc_fw_init(guc_fw, INTEL_UC_FW_TYPE_GUC); |
| 97 | guc_fw_select(guc_fw); |
Michal Wajdeczko | cd5a917 | 2017-10-16 14:47:15 +0000 | [diff] [blame] | 98 | } |
| 99 | |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 100 | static void guc_prepare_xfer(struct intel_guc *guc) |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 101 | { |
Michal Wajdeczko | 4502e9e | 2017-10-16 14:47:21 +0000 | [diff] [blame] | 102 | struct drm_i915_private *dev_priv = guc_to_i915(guc); |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 103 | |
Michal Wajdeczko | b814b07 | 2017-11-03 15:18:15 +0000 | [diff] [blame] | 104 | /* Must program this register before loading the ucode with DMA */ |
| 105 | I915_WRITE(GUC_SHIM_CONTROL, GUC_DISABLE_SRAM_INIT_TO_ZEROES | |
| 106 | GUC_ENABLE_READ_CACHE_LOGIC | |
| 107 | GUC_ENABLE_MIA_CACHING | |
| 108 | GUC_ENABLE_READ_CACHE_FOR_SRAM_DATA | |
| 109 | GUC_ENABLE_READ_CACHE_FOR_WOPCM_DATA | |
| 110 | GUC_ENABLE_MIA_CLOCK_GATING); |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 111 | |
Michel Thierry | 254e093 | 2017-01-09 16:51:35 +0200 | [diff] [blame] | 112 | if (IS_GEN9_LP(dev_priv)) |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 113 | I915_WRITE(GEN9LP_GT_PM_CONFIG, GT_DOORBELL_ENABLE); |
| 114 | else |
| 115 | I915_WRITE(GEN9_GT_PM_CONFIG, GT_DOORBELL_ENABLE); |
| 116 | |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 117 | if (IS_GEN9(dev_priv)) { |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 118 | /* DOP Clock Gating Enable for GuC clocks */ |
| 119 | I915_WRITE(GEN7_MISCCPCTL, (GEN8_DOP_CLOCK_GATE_GUC_ENABLE | |
| 120 | I915_READ(GEN7_MISCCPCTL))); |
| 121 | |
Dave Gordon | 0c5664e | 2016-09-12 21:19:36 +0100 | [diff] [blame] | 122 | /* allows for 5us (in 10ns units) before GT can go to RC6 */ |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 123 | I915_WRITE(GUC_ARAT_C6DIS, 0x1FF); |
| 124 | } |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 125 | } |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 126 | |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 127 | /* Copy RSA signature from the fw image to HW for verification */ |
| 128 | static int guc_xfer_rsa(struct intel_guc *guc, struct i915_vma *vma) |
| 129 | { |
| 130 | struct drm_i915_private *dev_priv = guc_to_i915(guc); |
| 131 | struct intel_uc_fw *guc_fw = &guc->fw; |
| 132 | struct sg_table *sg = vma->pages; |
Michal Wajdeczko | 53fa54a | 2017-11-24 17:02:39 +0000 | [diff] [blame] | 133 | u32 rsa[UOS_RSA_SCRATCH_COUNT]; |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 134 | int i; |
| 135 | |
| 136 | if (sg_pcopy_to_buffer(sg->sgl, sg->nents, rsa, sizeof(rsa), |
| 137 | guc_fw->rsa_offset) != sizeof(rsa)) |
| 138 | return -EINVAL; |
| 139 | |
Michal Wajdeczko | 53fa54a | 2017-11-24 17:02:39 +0000 | [diff] [blame] | 140 | for (i = 0; i < UOS_RSA_SCRATCH_COUNT; i++) |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 141 | I915_WRITE(UOS_RSA_SCRATCH(i), rsa[i]); |
| 142 | |
| 143 | return 0; |
| 144 | } |
| 145 | |
| 146 | /* |
| 147 | * Transfer the firmware image to RAM for execution by the microcontroller. |
| 148 | * |
| 149 | * Architecturally, the DMA engine is bidirectional, and can potentially even |
| 150 | * transfer between GTT locations. This functionality is left out of the API |
| 151 | * for now as there is no need for it. |
| 152 | */ |
| 153 | static int guc_xfer_ucode(struct intel_guc *guc, struct i915_vma *vma) |
| 154 | { |
| 155 | struct drm_i915_private *dev_priv = guc_to_i915(guc); |
| 156 | struct intel_uc_fw *guc_fw = &guc->fw; |
| 157 | unsigned long offset; |
Michal Wajdeczko | a86af23 | 2017-11-03 15:18:13 +0000 | [diff] [blame] | 158 | u32 status; |
| 159 | int ret; |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 160 | |
| 161 | /* |
| 162 | * The header plus uCode will be copied to WOPCM via DMA, excluding any |
| 163 | * other components |
| 164 | */ |
| 165 | I915_WRITE(DMA_COPY_SIZE, guc_fw->header_size + guc_fw->ucode_size); |
| 166 | |
| 167 | /* Set the source address for the new blob */ |
| 168 | offset = guc_ggtt_offset(vma) + guc_fw->header_offset; |
| 169 | I915_WRITE(DMA_ADDR_0_LOW, lower_32_bits(offset)); |
| 170 | I915_WRITE(DMA_ADDR_0_HIGH, upper_32_bits(offset) & 0xFFFF); |
| 171 | |
| 172 | /* |
| 173 | * Set the DMA destination. Current uCode expects the code to be |
| 174 | * loaded at 8k; locations below this are used for the stack. |
| 175 | */ |
| 176 | I915_WRITE(DMA_ADDR_1_LOW, 0x2000); |
| 177 | I915_WRITE(DMA_ADDR_1_HIGH, DMA_ADDRESS_SPACE_WOPCM); |
| 178 | |
| 179 | /* Finally start the DMA */ |
| 180 | I915_WRITE(DMA_CTRL, _MASKED_BIT_ENABLE(UOS_MOVE | START_DMA)); |
| 181 | |
Michal Wajdeczko | a86af23 | 2017-11-03 15:18:13 +0000 | [diff] [blame] | 182 | /* Wait for DMA to finish */ |
| 183 | ret = __intel_wait_for_register_fw(dev_priv, DMA_CTRL, START_DMA, 0, |
| 184 | 2, 100, &status); |
| 185 | DRM_DEBUG_DRIVER("GuC DMA status %#x\n", status); |
| 186 | |
| 187 | return ret; |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 188 | } |
| 189 | |
| 190 | /* |
| 191 | * Read the GuC status register (GUC_STATUS) and store it in the |
| 192 | * specified location; then return a boolean indicating whether |
| 193 | * the value matches either of two values representing completion |
| 194 | * of the GuC boot process. |
| 195 | * |
| 196 | * This is used for polling the GuC status in a wait_for() |
| 197 | * loop below. |
| 198 | */ |
| 199 | static inline bool guc_ready(struct intel_guc *guc, u32 *status) |
| 200 | { |
| 201 | struct drm_i915_private *dev_priv = guc_to_i915(guc); |
| 202 | u32 val = I915_READ(GUC_STATUS); |
| 203 | u32 uk_val = val & GS_UKERNEL_MASK; |
| 204 | |
| 205 | *status = val; |
| 206 | return (uk_val == GS_UKERNEL_READY) || |
| 207 | ((val & GS_MIA_CORE_STATE) && (uk_val == GS_UKERNEL_LAPIC_DONE)); |
| 208 | } |
| 209 | |
| 210 | static int guc_wait_ucode(struct intel_guc *guc) |
| 211 | { |
| 212 | u32 status; |
| 213 | int ret; |
| 214 | |
| 215 | /* |
| 216 | * Wait for the GuC to start up. |
| 217 | * NB: Docs recommend not using the interrupt for completion. |
| 218 | * Measurements indicate this should take no more than 20ms, so a |
| 219 | * timeout here indicates that the GuC has failed and is unusable. |
| 220 | * (Higher levels of the driver will attempt to fall back to |
| 221 | * execlist mode if this happens.) |
| 222 | */ |
| 223 | ret = wait_for(guc_ready(guc, &status), 100); |
| 224 | DRM_DEBUG_DRIVER("GuC status %#x\n", status); |
| 225 | |
| 226 | if ((status & GS_BOOTROM_MASK) == GS_BOOTROM_RSA_FAILED) { |
| 227 | DRM_ERROR("GuC firmware signature verification failed\n"); |
| 228 | ret = -ENOEXEC; |
| 229 | } |
| 230 | |
| 231 | return ret; |
| 232 | } |
| 233 | |
| 234 | /* |
| 235 | * Load the GuC firmware blob into the MinuteIA. |
| 236 | */ |
| 237 | static int guc_fw_xfer(struct intel_uc_fw *guc_fw, struct i915_vma *vma) |
| 238 | { |
| 239 | struct intel_guc *guc = container_of(guc_fw, struct intel_guc, fw); |
| 240 | struct drm_i915_private *dev_priv = guc_to_i915(guc); |
| 241 | int ret; |
| 242 | |
| 243 | GEM_BUG_ON(guc_fw->type != INTEL_UC_FW_TYPE_GUC); |
| 244 | |
| 245 | intel_uncore_forcewake_get(dev_priv, FORCEWAKE_ALL); |
| 246 | |
| 247 | guc_prepare_xfer(guc); |
| 248 | |
| 249 | /* |
| 250 | * Note that GuC needs the CSS header plus uKernel code to be copied |
| 251 | * by the DMA engine in one operation, whereas the RSA signature is |
| 252 | * loaded via MMIO. |
| 253 | */ |
| 254 | ret = guc_xfer_rsa(guc, vma); |
| 255 | if (ret) |
| 256 | DRM_WARN("GuC firmware signature xfer error %d\n", ret); |
| 257 | |
| 258 | ret = guc_xfer_ucode(guc, vma); |
| 259 | if (ret) |
| 260 | DRM_WARN("GuC firmware code xfer error %d\n", ret); |
| 261 | |
| 262 | ret = guc_wait_ucode(guc); |
| 263 | if (ret) |
| 264 | DRM_ERROR("GuC firmware xfer error %d\n", ret); |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 265 | |
| 266 | intel_uncore_forcewake_put(dev_priv, FORCEWAKE_ALL); |
| 267 | |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 268 | return ret; |
| 269 | } |
| 270 | |
| 271 | /** |
Michal Wajdeczko | e8668bb | 2017-10-16 14:47:14 +0000 | [diff] [blame] | 272 | * intel_guc_fw_upload() - finish preparing the GuC for activity |
Arkadiusz Hiler | 882d1db | 2017-03-14 15:28:07 +0100 | [diff] [blame] | 273 | * @guc: intel_guc structure |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 274 | * |
Arkadiusz Hiler | 882d1db | 2017-03-14 15:28:07 +0100 | [diff] [blame] | 275 | * Called during driver loading and also after a GPU reset. |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 276 | * |
Dave Gordon | f09d675 | 2016-05-13 15:36:29 +0100 | [diff] [blame] | 277 | * The main action required here it to load the GuC uCode into the device. |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 278 | * The firmware image should have already been fetched into memory by the |
Arkadiusz Hiler | 882d1db | 2017-03-14 15:28:07 +0100 | [diff] [blame] | 279 | * earlier call to intel_guc_init(), so here we need only check that |
| 280 | * worked, and then transfer the image to the h/w. |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 281 | * |
| 282 | * Return: non-zero code on error |
| 283 | */ |
Michal Wajdeczko | e8668bb | 2017-10-16 14:47:14 +0000 | [diff] [blame] | 284 | int intel_guc_fw_upload(struct intel_guc *guc) |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 285 | { |
Michal Wajdeczko | 2ceddb1 | 2017-11-03 15:18:12 +0000 | [diff] [blame] | 286 | return intel_uc_fw_upload(&guc->fw, guc_fw_xfer); |
Alex Dai | 33a732f | 2015-08-12 15:43:36 +0100 | [diff] [blame] | 287 | } |