Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2014 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | * |
| 23 | */ |
| 24 | #include <linux/firmware.h> |
| 25 | #include "i915_drv.h" |
| 26 | #include "i915_reg.h" |
| 27 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 28 | /** |
| 29 | * DOC: csr support for dmc |
| 30 | * |
| 31 | * Display Context Save and Restore (CSR) firmware support added from gen9 |
| 32 | * onwards to drive newly added DMC (Display microcontroller) in display |
| 33 | * engine to save and restore the state of display engine when it enter into |
| 34 | * low-power state and comes back to normal. |
| 35 | * |
| 36 | * Firmware loading status will be one of the below states: FW_UNINITIALIZED, |
| 37 | * FW_LOADED, FW_FAILED. |
| 38 | * |
| 39 | * Once the firmware is written into the registers status will be moved from |
| 40 | * FW_UNINITIALIZED to FW_LOADED and for any erroneous condition status will |
| 41 | * be moved to FW_FAILED. |
| 42 | */ |
| 43 | |
Rodrigo Vivi | bf546f8 | 2015-06-03 16:50:19 -0700 | [diff] [blame] | 44 | #define I915_CSR_SKL "i915/skl_dmc_ver1.bin" |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 45 | #define I915_CSR_BXT "i915/bxt_dmc_ver1.bin" |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 46 | |
Chris Wilson | cbfc2d2 | 2016-01-13 17:38:15 +0000 | [diff] [blame] | 47 | #define FIRMWARE_URL "https://01.org/linuxgraphics/intel-linux-graphics-firmwares" |
| 48 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 49 | MODULE_FIRMWARE(I915_CSR_SKL); |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 50 | MODULE_FIRMWARE(I915_CSR_BXT); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 51 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 52 | #define SKL_CSR_VERSION_REQUIRED CSR_VERSION(1, 23) |
| 53 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 54 | #define CSR_MAX_FW_SIZE 0x2FFF |
| 55 | #define CSR_DEFAULT_FW_OFFSET 0xFFFFFFFF |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 56 | |
| 57 | struct intel_css_header { |
| 58 | /* 0x09 for DMC */ |
| 59 | uint32_t module_type; |
| 60 | |
| 61 | /* Includes the DMC specific header in dwords */ |
| 62 | uint32_t header_len; |
| 63 | |
| 64 | /* always value would be 0x10000 */ |
| 65 | uint32_t header_ver; |
| 66 | |
| 67 | /* Not used */ |
| 68 | uint32_t module_id; |
| 69 | |
| 70 | /* Not used */ |
| 71 | uint32_t module_vendor; |
| 72 | |
| 73 | /* in YYYYMMDD format */ |
| 74 | uint32_t date; |
| 75 | |
| 76 | /* Size in dwords (CSS_Headerlen + PackageHeaderLen + dmc FWsLen)/4 */ |
| 77 | uint32_t size; |
| 78 | |
| 79 | /* Not used */ |
| 80 | uint32_t key_size; |
| 81 | |
| 82 | /* Not used */ |
| 83 | uint32_t modulus_size; |
| 84 | |
| 85 | /* Not used */ |
| 86 | uint32_t exponent_size; |
| 87 | |
| 88 | /* Not used */ |
| 89 | uint32_t reserved1[12]; |
| 90 | |
| 91 | /* Major Minor */ |
| 92 | uint32_t version; |
| 93 | |
| 94 | /* Not used */ |
| 95 | uint32_t reserved2[8]; |
| 96 | |
| 97 | /* Not used */ |
| 98 | uint32_t kernel_header_info; |
| 99 | } __packed; |
| 100 | |
| 101 | struct intel_fw_info { |
| 102 | uint16_t reserved1; |
| 103 | |
| 104 | /* Stepping (A, B, C, ..., *). * is a wildcard */ |
| 105 | char stepping; |
| 106 | |
| 107 | /* Sub-stepping (0, 1, ..., *). * is a wildcard */ |
| 108 | char substepping; |
| 109 | |
| 110 | uint32_t offset; |
| 111 | uint32_t reserved2; |
| 112 | } __packed; |
| 113 | |
| 114 | struct intel_package_header { |
| 115 | /* DMC container header length in dwords */ |
| 116 | unsigned char header_len; |
| 117 | |
| 118 | /* always value would be 0x01 */ |
| 119 | unsigned char header_ver; |
| 120 | |
| 121 | unsigned char reserved[10]; |
| 122 | |
| 123 | /* Number of valid entries in the FWInfo array below */ |
| 124 | uint32_t num_entries; |
| 125 | |
| 126 | struct intel_fw_info fw_info[20]; |
| 127 | } __packed; |
| 128 | |
| 129 | struct intel_dmc_header { |
| 130 | /* always value would be 0x40403E3E */ |
| 131 | uint32_t signature; |
| 132 | |
| 133 | /* DMC binary header length */ |
| 134 | unsigned char header_len; |
| 135 | |
| 136 | /* 0x01 */ |
| 137 | unsigned char header_ver; |
| 138 | |
| 139 | /* Reserved */ |
| 140 | uint16_t dmcc_ver; |
| 141 | |
| 142 | /* Major, Minor */ |
| 143 | uint32_t project; |
| 144 | |
| 145 | /* Firmware program size (excluding header) in dwords */ |
| 146 | uint32_t fw_size; |
| 147 | |
| 148 | /* Major Minor version */ |
| 149 | uint32_t fw_version; |
| 150 | |
| 151 | /* Number of valid MMIO cycles present. */ |
| 152 | uint32_t mmio_count; |
| 153 | |
| 154 | /* MMIO address */ |
| 155 | uint32_t mmioaddr[8]; |
| 156 | |
| 157 | /* MMIO data */ |
| 158 | uint32_t mmiodata[8]; |
| 159 | |
| 160 | /* FW filename */ |
| 161 | unsigned char dfile[32]; |
| 162 | |
| 163 | uint32_t reserved1[2]; |
| 164 | } __packed; |
| 165 | |
| 166 | struct stepping_info { |
| 167 | char stepping; |
| 168 | char substepping; |
| 169 | }; |
| 170 | |
Rodrigo Vivi | a25c9f0 | 2015-12-09 07:51:59 -0800 | [diff] [blame] | 171 | /* |
| 172 | * Kabylake derivated from Skylake H0, so SKL H0 |
| 173 | * is the right firmware for KBL A0 (revid 0). |
| 174 | */ |
| 175 | static const struct stepping_info kbl_stepping_info[] = { |
| 176 | {'H', '0'}, {'I', '0'} |
| 177 | }; |
| 178 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 179 | static const struct stepping_info skl_stepping_info[] = { |
Jani Nikula | 84cb00e | 2015-10-20 15:38:31 +0300 | [diff] [blame] | 180 | {'A', '0'}, {'B', '0'}, {'C', '0'}, |
| 181 | {'D', '0'}, {'E', '0'}, {'F', '0'}, |
Mat Martineau | a41c888 | 2016-01-28 15:19:23 -0800 | [diff] [blame] | 182 | {'G', '0'}, {'H', '0'}, {'I', '0'}, |
| 183 | {'J', '0'}, {'K', '0'} |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 184 | }; |
| 185 | |
Jani Nikula | b9cd5bf | 2015-10-20 15:38:32 +0300 | [diff] [blame] | 186 | static const struct stepping_info bxt_stepping_info[] = { |
Animesh Manna | cff765f | 2015-08-04 22:02:43 +0530 | [diff] [blame] | 187 | {'A', '0'}, {'A', '1'}, {'A', '2'}, |
| 188 | {'B', '0'}, {'B', '1'}, {'B', '2'} |
| 189 | }; |
| 190 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 191 | static const struct stepping_info no_stepping_info = { '*', '*' }; |
| 192 | |
| 193 | static const struct stepping_info * |
| 194 | intel_get_stepping_info(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 195 | { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 196 | const struct stepping_info *si; |
| 197 | unsigned int size; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 198 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 199 | if (IS_KABYLAKE(dev_priv)) { |
Rodrigo Vivi | a25c9f0 | 2015-12-09 07:51:59 -0800 | [diff] [blame] | 200 | size = ARRAY_SIZE(kbl_stepping_info); |
| 201 | si = kbl_stepping_info; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 202 | } else if (IS_SKYLAKE(dev_priv)) { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 203 | size = ARRAY_SIZE(skl_stepping_info); |
| 204 | si = skl_stepping_info; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 205 | } else if (IS_BROXTON(dev_priv)) { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 206 | size = ARRAY_SIZE(bxt_stepping_info); |
| 207 | si = bxt_stepping_info; |
| 208 | } else { |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 209 | size = 0; |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 210 | } |
| 211 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 212 | if (INTEL_REVID(dev_priv) < size) |
| 213 | return si + INTEL_REVID(dev_priv); |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 214 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 215 | return &no_stepping_info; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 216 | } |
| 217 | |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 218 | static void gen9_set_dc_state_debugmask(struct drm_i915_private *dev_priv) |
| 219 | { |
| 220 | uint32_t val, mask; |
| 221 | |
| 222 | mask = DC_STATE_DEBUG_MASK_MEMORY_UP; |
| 223 | |
| 224 | if (IS_BROXTON(dev_priv)) |
| 225 | mask |= DC_STATE_DEBUG_MASK_CORES; |
| 226 | |
| 227 | /* The below bit doesn't need to be cleared ever afterwards */ |
| 228 | val = I915_READ(DC_STATE_DEBUG); |
| 229 | if ((val & mask) != mask) { |
| 230 | val |= mask; |
| 231 | I915_WRITE(DC_STATE_DEBUG, val); |
| 232 | POSTING_READ(DC_STATE_DEBUG); |
| 233 | } |
| 234 | } |
| 235 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 236 | /** |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 237 | * intel_csr_load_program() - write the firmware from memory to register. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 238 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 239 | * |
| 240 | * CSR firmware is read from a .bin file and kept in internal memory one time. |
| 241 | * Everytime display comes back from low power state this function is called to |
| 242 | * copy the firmware from internal memory to registers. |
| 243 | */ |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 244 | void intel_csr_load_program(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 245 | { |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 246 | u32 *payload = dev_priv->csr.dmc_payload; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 247 | uint32_t i, fw_size; |
| 248 | |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 249 | if (!IS_GEN9(dev_priv)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 250 | DRM_ERROR("No CSR support available for this platform\n"); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 251 | return; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 252 | } |
| 253 | |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 254 | if (!dev_priv->csr.dmc_payload) { |
| 255 | DRM_ERROR("Tried to program CSR with empty payload\n"); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 256 | return; |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 257 | } |
Animesh Manna | 4b7ab5f | 2015-08-26 01:36:05 +0530 | [diff] [blame] | 258 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 259 | fw_size = dev_priv->csr.dmc_fw_size; |
| 260 | for (i = 0; i < fw_size; i++) |
Ville Syrjälä | d2aa5ae | 2015-09-18 20:03:23 +0300 | [diff] [blame] | 261 | I915_WRITE(CSR_PROGRAM(i), payload[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 262 | |
| 263 | for (i = 0; i < dev_priv->csr.mmio_count; i++) { |
| 264 | I915_WRITE(dev_priv->csr.mmioaddr[i], |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 265 | dev_priv->csr.mmiodata[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 266 | } |
Patrik Jakobsson | 832dba8 | 2016-02-18 17:21:11 +0200 | [diff] [blame] | 267 | |
| 268 | dev_priv->csr.dc_state = 0; |
Mika Kuoppala | 1e657ad | 2016-02-18 17:21:14 +0200 | [diff] [blame] | 269 | |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 270 | gen9_set_dc_state_debugmask(dev_priv); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 271 | } |
| 272 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 273 | static uint32_t *parse_csr_fw(struct drm_i915_private *dev_priv, |
| 274 | const struct firmware *fw) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 275 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 276 | struct intel_css_header *css_header; |
| 277 | struct intel_package_header *package_header; |
| 278 | struct intel_dmc_header *dmc_header; |
| 279 | struct intel_csr *csr = &dev_priv->csr; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 280 | const struct stepping_info *si = intel_get_stepping_info(dev_priv); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 281 | uint32_t dmc_offset = CSR_DEFAULT_FW_OFFSET, readcount = 0, nbytes; |
| 282 | uint32_t i; |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 283 | uint32_t *dmc_payload; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 284 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 285 | if (!fw) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 286 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 287 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 288 | /* Extract CSS Header information*/ |
| 289 | css_header = (struct intel_css_header *)fw->data; |
| 290 | if (sizeof(struct intel_css_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 291 | (css_header->header_len * 4)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 292 | DRM_ERROR("Firmware has wrong CSS header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 293 | (css_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 294 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 295 | } |
Damien Lespiau | b6e7d89 | 2015-10-27 14:46:59 +0200 | [diff] [blame] | 296 | |
| 297 | csr->version = css_header->version; |
| 298 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 299 | if ((IS_SKYLAKE(dev_priv) || IS_KABYLAKE(dev_priv)) && |
Rodrigo Vivi | 8d7a1c4 | 2016-01-07 16:49:39 -0800 | [diff] [blame] | 300 | csr->version < SKL_CSR_VERSION_REQUIRED) { |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 301 | DRM_INFO("Refusing to load old Skylake DMC firmware v%u.%u," |
| 302 | " please upgrade to v%u.%u or later" |
Chris Wilson | cbfc2d2 | 2016-01-13 17:38:15 +0000 | [diff] [blame] | 303 | " [" FIRMWARE_URL "].\n", |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 304 | CSR_VERSION_MAJOR(csr->version), |
| 305 | CSR_VERSION_MINOR(csr->version), |
| 306 | CSR_VERSION_MAJOR(SKL_CSR_VERSION_REQUIRED), |
| 307 | CSR_VERSION_MINOR(SKL_CSR_VERSION_REQUIRED)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 308 | return NULL; |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 309 | } |
| 310 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 311 | readcount += sizeof(struct intel_css_header); |
| 312 | |
| 313 | /* Extract Package Header information*/ |
| 314 | package_header = (struct intel_package_header *) |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 315 | &fw->data[readcount]; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 316 | if (sizeof(struct intel_package_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 317 | (package_header->header_len * 4)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 318 | DRM_ERROR("Firmware has wrong package header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 319 | (package_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 320 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 321 | } |
| 322 | readcount += sizeof(struct intel_package_header); |
| 323 | |
| 324 | /* Search for dmc_offset to find firware binary. */ |
| 325 | for (i = 0; i < package_header->num_entries; i++) { |
| 326 | if (package_header->fw_info[i].substepping == '*' && |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 327 | si->stepping == package_header->fw_info[i].stepping) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 328 | dmc_offset = package_header->fw_info[i].offset; |
| 329 | break; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 330 | } else if (si->stepping == package_header->fw_info[i].stepping && |
| 331 | si->substepping == package_header->fw_info[i].substepping) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 332 | dmc_offset = package_header->fw_info[i].offset; |
| 333 | break; |
| 334 | } else if (package_header->fw_info[i].stepping == '*' && |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 335 | package_header->fw_info[i].substepping == '*') |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 336 | dmc_offset = package_header->fw_info[i].offset; |
| 337 | } |
| 338 | if (dmc_offset == CSR_DEFAULT_FW_OFFSET) { |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 339 | DRM_ERROR("Firmware not supported for %c stepping\n", |
| 340 | si->stepping); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 341 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 342 | } |
| 343 | readcount += dmc_offset; |
| 344 | |
| 345 | /* Extract dmc_header information. */ |
| 346 | dmc_header = (struct intel_dmc_header *)&fw->data[readcount]; |
| 347 | if (sizeof(struct intel_dmc_header) != (dmc_header->header_len)) { |
| 348 | DRM_ERROR("Firmware has wrong dmc header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 349 | (dmc_header->header_len)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 350 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 351 | } |
| 352 | readcount += sizeof(struct intel_dmc_header); |
| 353 | |
| 354 | /* Cache the dmc header info. */ |
| 355 | if (dmc_header->mmio_count > ARRAY_SIZE(csr->mmioaddr)) { |
| 356 | DRM_ERROR("Firmware has wrong mmio count %u\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 357 | dmc_header->mmio_count); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 358 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 359 | } |
| 360 | csr->mmio_count = dmc_header->mmio_count; |
| 361 | for (i = 0; i < dmc_header->mmio_count; i++) { |
Takashi Iwai | 982b0b2 | 2015-09-09 16:52:09 +0200 | [diff] [blame] | 362 | if (dmc_header->mmioaddr[i] < CSR_MMIO_START_RANGE || |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 363 | dmc_header->mmioaddr[i] > CSR_MMIO_END_RANGE) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 364 | DRM_ERROR(" Firmware has wrong mmio address 0x%x\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 365 | dmc_header->mmioaddr[i]); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 366 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 367 | } |
Ville Syrjälä | f0f59a0 | 2015-11-18 15:33:26 +0200 | [diff] [blame] | 368 | csr->mmioaddr[i] = _MMIO(dmc_header->mmioaddr[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 369 | csr->mmiodata[i] = dmc_header->mmiodata[i]; |
| 370 | } |
| 371 | |
| 372 | /* fw_size is in dwords, so multiplied by 4 to convert into bytes. */ |
| 373 | nbytes = dmc_header->fw_size * 4; |
| 374 | if (nbytes > CSR_MAX_FW_SIZE) { |
| 375 | DRM_ERROR("CSR firmware too big (%u) bytes\n", nbytes); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 376 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 377 | } |
| 378 | csr->dmc_fw_size = dmc_header->fw_size; |
| 379 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 380 | dmc_payload = kmalloc(nbytes, GFP_KERNEL); |
| 381 | if (!dmc_payload) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 382 | DRM_ERROR("Memory allocation failed for dmc payload\n"); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 383 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 384 | } |
| 385 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 386 | return memcpy(dmc_payload, &fw->data[readcount], nbytes); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 387 | } |
| 388 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 389 | static void csr_load_work_fn(struct work_struct *work) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 390 | { |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 391 | struct drm_i915_private *dev_priv; |
| 392 | struct intel_csr *csr; |
| 393 | const struct firmware *fw; |
| 394 | int ret; |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 395 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 396 | dev_priv = container_of(work, typeof(*dev_priv), csr.work); |
| 397 | csr = &dev_priv->csr; |
| 398 | |
| 399 | ret = request_firmware(&fw, dev_priv->csr.fw_path, |
| 400 | &dev_priv->dev->pdev->dev); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 401 | if (fw) |
| 402 | dev_priv->csr.dmc_payload = parse_csr_fw(dev_priv, fw); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 403 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 404 | if (dev_priv->csr.dmc_payload) { |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 405 | intel_csr_load_program(dev_priv); |
| 406 | |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 407 | intel_display_power_put(dev_priv, POWER_DOMAIN_INIT); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 408 | |
| 409 | DRM_INFO("Finished loading %s (v%u.%u)\n", |
| 410 | dev_priv->csr.fw_path, |
| 411 | CSR_VERSION_MAJOR(csr->version), |
| 412 | CSR_VERSION_MINOR(csr->version)); |
| 413 | } else { |
Chris Wilson | cbfc2d2 | 2016-01-13 17:38:15 +0000 | [diff] [blame] | 414 | dev_notice(dev_priv->dev->dev, |
| 415 | "Failed to load DMC firmware" |
| 416 | " [" FIRMWARE_URL "]," |
| 417 | " disabling runtime power management.\n"); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 418 | } |
| 419 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 420 | release_firmware(fw); |
| 421 | } |
| 422 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 423 | /** |
| 424 | * intel_csr_ucode_init() - initialize the firmware loading. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 425 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 426 | * |
| 427 | * This function is called at the time of loading the display driver to read |
| 428 | * firmware from a .bin file and copied into a internal memory. |
| 429 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 430 | void intel_csr_ucode_init(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 431 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 432 | struct intel_csr *csr = &dev_priv->csr; |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 433 | |
| 434 | INIT_WORK(&dev_priv->csr.work, csr_load_work_fn); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 435 | |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 436 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 437 | return; |
| 438 | |
Rodrigo Vivi | 8d7a1c4 | 2016-01-07 16:49:39 -0800 | [diff] [blame] | 439 | if (IS_SKYLAKE(dev_priv) || IS_KABYLAKE(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 440 | csr->fw_path = I915_CSR_SKL; |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 441 | else if (IS_BROXTON(dev_priv)) |
| 442 | csr->fw_path = I915_CSR_BXT; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 443 | else { |
| 444 | DRM_ERROR("Unexpected: no known CSR firmware for platform\n"); |
| 445 | return; |
| 446 | } |
| 447 | |
Damien Lespiau | abd41dc | 2015-06-04 16:42:16 +0100 | [diff] [blame] | 448 | DRM_DEBUG_KMS("Loading %s\n", csr->fw_path); |
| 449 | |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 450 | /* |
| 451 | * Obtain a runtime pm reference, until CSR is loaded, |
| 452 | * to avoid entering runtime-suspend. |
| 453 | */ |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 454 | intel_display_power_get(dev_priv, POWER_DOMAIN_INIT); |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 455 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 456 | schedule_work(&dev_priv->csr.work); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 457 | } |
| 458 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 459 | /** |
| 460 | * intel_csr_ucode_fini() - unload the CSR firmware. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 461 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 462 | * |
| 463 | * Firmmware unloading includes freeing the internal momory and reset the |
| 464 | * firmware loading status. |
| 465 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 466 | void intel_csr_ucode_fini(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 467 | { |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 468 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 469 | return; |
| 470 | |
Animesh Manna | 15e72c1 | 2015-10-28 23:59:05 +0200 | [diff] [blame] | 471 | flush_work(&dev_priv->csr.work); |
| 472 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 473 | kfree(dev_priv->csr.dmc_payload); |
| 474 | } |