Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2014 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | * |
| 23 | */ |
| 24 | #include <linux/firmware.h> |
| 25 | #include "i915_drv.h" |
| 26 | #include "i915_reg.h" |
| 27 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 28 | /** |
| 29 | * DOC: csr support for dmc |
| 30 | * |
| 31 | * Display Context Save and Restore (CSR) firmware support added from gen9 |
| 32 | * onwards to drive newly added DMC (Display microcontroller) in display |
| 33 | * engine to save and restore the state of display engine when it enter into |
| 34 | * low-power state and comes back to normal. |
| 35 | * |
| 36 | * Firmware loading status will be one of the below states: FW_UNINITIALIZED, |
| 37 | * FW_LOADED, FW_FAILED. |
| 38 | * |
| 39 | * Once the firmware is written into the registers status will be moved from |
| 40 | * FW_UNINITIALIZED to FW_LOADED and for any erroneous condition status will |
| 41 | * be moved to FW_FAILED. |
| 42 | */ |
| 43 | |
Rodrigo Vivi | bf546f8 | 2015-06-03 16:50:19 -0700 | [diff] [blame] | 44 | #define I915_CSR_SKL "i915/skl_dmc_ver1.bin" |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 45 | #define I915_CSR_BXT "i915/bxt_dmc_ver1.bin" |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 46 | |
| 47 | MODULE_FIRMWARE(I915_CSR_SKL); |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 48 | MODULE_FIRMWARE(I915_CSR_BXT); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 49 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 50 | #define SKL_CSR_VERSION_REQUIRED CSR_VERSION(1, 23) |
| 51 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 52 | #define CSR_MAX_FW_SIZE 0x2FFF |
| 53 | #define CSR_DEFAULT_FW_OFFSET 0xFFFFFFFF |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 54 | |
| 55 | struct intel_css_header { |
| 56 | /* 0x09 for DMC */ |
| 57 | uint32_t module_type; |
| 58 | |
| 59 | /* Includes the DMC specific header in dwords */ |
| 60 | uint32_t header_len; |
| 61 | |
| 62 | /* always value would be 0x10000 */ |
| 63 | uint32_t header_ver; |
| 64 | |
| 65 | /* Not used */ |
| 66 | uint32_t module_id; |
| 67 | |
| 68 | /* Not used */ |
| 69 | uint32_t module_vendor; |
| 70 | |
| 71 | /* in YYYYMMDD format */ |
| 72 | uint32_t date; |
| 73 | |
| 74 | /* Size in dwords (CSS_Headerlen + PackageHeaderLen + dmc FWsLen)/4 */ |
| 75 | uint32_t size; |
| 76 | |
| 77 | /* Not used */ |
| 78 | uint32_t key_size; |
| 79 | |
| 80 | /* Not used */ |
| 81 | uint32_t modulus_size; |
| 82 | |
| 83 | /* Not used */ |
| 84 | uint32_t exponent_size; |
| 85 | |
| 86 | /* Not used */ |
| 87 | uint32_t reserved1[12]; |
| 88 | |
| 89 | /* Major Minor */ |
| 90 | uint32_t version; |
| 91 | |
| 92 | /* Not used */ |
| 93 | uint32_t reserved2[8]; |
| 94 | |
| 95 | /* Not used */ |
| 96 | uint32_t kernel_header_info; |
| 97 | } __packed; |
| 98 | |
| 99 | struct intel_fw_info { |
| 100 | uint16_t reserved1; |
| 101 | |
| 102 | /* Stepping (A, B, C, ..., *). * is a wildcard */ |
| 103 | char stepping; |
| 104 | |
| 105 | /* Sub-stepping (0, 1, ..., *). * is a wildcard */ |
| 106 | char substepping; |
| 107 | |
| 108 | uint32_t offset; |
| 109 | uint32_t reserved2; |
| 110 | } __packed; |
| 111 | |
| 112 | struct intel_package_header { |
| 113 | /* DMC container header length in dwords */ |
| 114 | unsigned char header_len; |
| 115 | |
| 116 | /* always value would be 0x01 */ |
| 117 | unsigned char header_ver; |
| 118 | |
| 119 | unsigned char reserved[10]; |
| 120 | |
| 121 | /* Number of valid entries in the FWInfo array below */ |
| 122 | uint32_t num_entries; |
| 123 | |
| 124 | struct intel_fw_info fw_info[20]; |
| 125 | } __packed; |
| 126 | |
| 127 | struct intel_dmc_header { |
| 128 | /* always value would be 0x40403E3E */ |
| 129 | uint32_t signature; |
| 130 | |
| 131 | /* DMC binary header length */ |
| 132 | unsigned char header_len; |
| 133 | |
| 134 | /* 0x01 */ |
| 135 | unsigned char header_ver; |
| 136 | |
| 137 | /* Reserved */ |
| 138 | uint16_t dmcc_ver; |
| 139 | |
| 140 | /* Major, Minor */ |
| 141 | uint32_t project; |
| 142 | |
| 143 | /* Firmware program size (excluding header) in dwords */ |
| 144 | uint32_t fw_size; |
| 145 | |
| 146 | /* Major Minor version */ |
| 147 | uint32_t fw_version; |
| 148 | |
| 149 | /* Number of valid MMIO cycles present. */ |
| 150 | uint32_t mmio_count; |
| 151 | |
| 152 | /* MMIO address */ |
| 153 | uint32_t mmioaddr[8]; |
| 154 | |
| 155 | /* MMIO data */ |
| 156 | uint32_t mmiodata[8]; |
| 157 | |
| 158 | /* FW filename */ |
| 159 | unsigned char dfile[32]; |
| 160 | |
| 161 | uint32_t reserved1[2]; |
| 162 | } __packed; |
| 163 | |
| 164 | struct stepping_info { |
| 165 | char stepping; |
| 166 | char substepping; |
| 167 | }; |
| 168 | |
Rodrigo Vivi | a25c9f0 | 2015-12-09 07:51:59 -0800 | [diff] [blame] | 169 | /* |
| 170 | * Kabylake derivated from Skylake H0, so SKL H0 |
| 171 | * is the right firmware for KBL A0 (revid 0). |
| 172 | */ |
| 173 | static const struct stepping_info kbl_stepping_info[] = { |
| 174 | {'H', '0'}, {'I', '0'} |
| 175 | }; |
| 176 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 177 | static const struct stepping_info skl_stepping_info[] = { |
Jani Nikula | 84cb00e | 2015-10-20 15:38:31 +0300 | [diff] [blame] | 178 | {'A', '0'}, {'B', '0'}, {'C', '0'}, |
| 179 | {'D', '0'}, {'E', '0'}, {'F', '0'}, |
| 180 | {'G', '0'}, {'H', '0'}, {'I', '0'} |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 181 | }; |
| 182 | |
Jani Nikula | b9cd5bfd | 2015-10-20 15:38:32 +0300 | [diff] [blame] | 183 | static const struct stepping_info bxt_stepping_info[] = { |
Animesh Manna | cff765f | 2015-08-04 22:02:43 +0530 | [diff] [blame] | 184 | {'A', '0'}, {'A', '1'}, {'A', '2'}, |
| 185 | {'B', '0'}, {'B', '1'}, {'B', '2'} |
| 186 | }; |
| 187 | |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 188 | static const struct stepping_info *intel_get_stepping_info(struct drm_device *dev) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 189 | { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 190 | const struct stepping_info *si; |
| 191 | unsigned int size; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 192 | |
Rodrigo Vivi | a25c9f0 | 2015-12-09 07:51:59 -0800 | [diff] [blame] | 193 | if (IS_KABYLAKE(dev)) { |
| 194 | size = ARRAY_SIZE(kbl_stepping_info); |
| 195 | si = kbl_stepping_info; |
| 196 | } else if (IS_SKYLAKE(dev)) { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 197 | size = ARRAY_SIZE(skl_stepping_info); |
| 198 | si = skl_stepping_info; |
| 199 | } else if (IS_BROXTON(dev)) { |
| 200 | size = ARRAY_SIZE(bxt_stepping_info); |
| 201 | si = bxt_stepping_info; |
| 202 | } else { |
| 203 | return NULL; |
| 204 | } |
| 205 | |
| 206 | if (INTEL_REVID(dev) < size) |
| 207 | return si + INTEL_REVID(dev); |
| 208 | |
| 209 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 210 | } |
| 211 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 212 | /** |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 213 | * intel_csr_load_program() - write the firmware from memory to register. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 214 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 215 | * |
| 216 | * CSR firmware is read from a .bin file and kept in internal memory one time. |
| 217 | * Everytime display comes back from low power state this function is called to |
| 218 | * copy the firmware from internal memory to registers. |
| 219 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 220 | void intel_csr_load_program(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 221 | { |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 222 | u32 *payload = dev_priv->csr.dmc_payload; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 223 | uint32_t i, fw_size; |
| 224 | |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 225 | if (!IS_GEN9(dev_priv)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 226 | DRM_ERROR("No CSR support available for this platform\n"); |
| 227 | return; |
| 228 | } |
| 229 | |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 230 | if (!dev_priv->csr.dmc_payload) { |
| 231 | DRM_ERROR("Tried to program CSR with empty payload\n"); |
Animesh Manna | 4b7ab5f | 2015-08-26 01:36:05 +0530 | [diff] [blame] | 232 | return; |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 233 | } |
Animesh Manna | 4b7ab5f | 2015-08-26 01:36:05 +0530 | [diff] [blame] | 234 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 235 | fw_size = dev_priv->csr.dmc_fw_size; |
| 236 | for (i = 0; i < fw_size; i++) |
Ville Syrjälä | d2aa5ae | 2015-09-18 20:03:23 +0300 | [diff] [blame] | 237 | I915_WRITE(CSR_PROGRAM(i), payload[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 238 | |
| 239 | for (i = 0; i < dev_priv->csr.mmio_count; i++) { |
| 240 | I915_WRITE(dev_priv->csr.mmioaddr[i], |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 241 | dev_priv->csr.mmiodata[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 242 | } |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 243 | } |
| 244 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 245 | static uint32_t *parse_csr_fw(struct drm_i915_private *dev_priv, |
| 246 | const struct firmware *fw) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 247 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 248 | struct drm_device *dev = dev_priv->dev; |
| 249 | struct intel_css_header *css_header; |
| 250 | struct intel_package_header *package_header; |
| 251 | struct intel_dmc_header *dmc_header; |
| 252 | struct intel_csr *csr = &dev_priv->csr; |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 253 | const struct stepping_info *stepping_info = intel_get_stepping_info(dev); |
| 254 | char stepping, substepping; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 255 | uint32_t dmc_offset = CSR_DEFAULT_FW_OFFSET, readcount = 0, nbytes; |
| 256 | uint32_t i; |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 257 | uint32_t *dmc_payload; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 258 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 259 | if (!fw) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 260 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 261 | |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 262 | if (!stepping_info) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 263 | DRM_ERROR("Unknown stepping info, firmware loading failed\n"); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 264 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 265 | } |
| 266 | |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 267 | stepping = stepping_info->stepping; |
| 268 | substepping = stepping_info->substepping; |
| 269 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 270 | /* Extract CSS Header information*/ |
| 271 | css_header = (struct intel_css_header *)fw->data; |
| 272 | if (sizeof(struct intel_css_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 273 | (css_header->header_len * 4)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 274 | DRM_ERROR("Firmware has wrong CSS header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 275 | (css_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 276 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 277 | } |
Damien Lespiau | b6e7d89 | 2015-10-27 14:46:59 +0200 | [diff] [blame] | 278 | |
| 279 | csr->version = css_header->version; |
| 280 | |
Rodrigo Vivi | 8d7a1c4 | 2016-01-07 16:49:39 -0800 | [diff] [blame^] | 281 | if ((IS_SKYLAKE(dev) || IS_KABYLAKE(dev)) && |
| 282 | csr->version < SKL_CSR_VERSION_REQUIRED) { |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 283 | DRM_INFO("Refusing to load old Skylake DMC firmware v%u.%u," |
| 284 | " please upgrade to v%u.%u or later" |
| 285 | " [https://01.org/linuxgraphics/intel-linux-graphics-firmwares].\n", |
| 286 | CSR_VERSION_MAJOR(csr->version), |
| 287 | CSR_VERSION_MINOR(csr->version), |
| 288 | CSR_VERSION_MAJOR(SKL_CSR_VERSION_REQUIRED), |
| 289 | CSR_VERSION_MINOR(SKL_CSR_VERSION_REQUIRED)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 290 | return NULL; |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 291 | } |
| 292 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 293 | readcount += sizeof(struct intel_css_header); |
| 294 | |
| 295 | /* Extract Package Header information*/ |
| 296 | package_header = (struct intel_package_header *) |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 297 | &fw->data[readcount]; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 298 | if (sizeof(struct intel_package_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 299 | (package_header->header_len * 4)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 300 | DRM_ERROR("Firmware has wrong package header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 301 | (package_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 302 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 303 | } |
| 304 | readcount += sizeof(struct intel_package_header); |
| 305 | |
| 306 | /* Search for dmc_offset to find firware binary. */ |
| 307 | for (i = 0; i < package_header->num_entries; i++) { |
| 308 | if (package_header->fw_info[i].substepping == '*' && |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 309 | stepping == package_header->fw_info[i].stepping) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 310 | dmc_offset = package_header->fw_info[i].offset; |
| 311 | break; |
| 312 | } else if (stepping == package_header->fw_info[i].stepping && |
| 313 | substepping == package_header->fw_info[i].substepping) { |
| 314 | dmc_offset = package_header->fw_info[i].offset; |
| 315 | break; |
| 316 | } else if (package_header->fw_info[i].stepping == '*' && |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 317 | package_header->fw_info[i].substepping == '*') |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 318 | dmc_offset = package_header->fw_info[i].offset; |
| 319 | } |
| 320 | if (dmc_offset == CSR_DEFAULT_FW_OFFSET) { |
| 321 | DRM_ERROR("Firmware not supported for %c stepping\n", stepping); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 322 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 323 | } |
| 324 | readcount += dmc_offset; |
| 325 | |
| 326 | /* Extract dmc_header information. */ |
| 327 | dmc_header = (struct intel_dmc_header *)&fw->data[readcount]; |
| 328 | if (sizeof(struct intel_dmc_header) != (dmc_header->header_len)) { |
| 329 | DRM_ERROR("Firmware has wrong dmc header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 330 | (dmc_header->header_len)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 331 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 332 | } |
| 333 | readcount += sizeof(struct intel_dmc_header); |
| 334 | |
| 335 | /* Cache the dmc header info. */ |
| 336 | if (dmc_header->mmio_count > ARRAY_SIZE(csr->mmioaddr)) { |
| 337 | DRM_ERROR("Firmware has wrong mmio count %u\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 338 | dmc_header->mmio_count); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 339 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 340 | } |
| 341 | csr->mmio_count = dmc_header->mmio_count; |
| 342 | for (i = 0; i < dmc_header->mmio_count; i++) { |
Takashi Iwai | 982b0b2 | 2015-09-09 16:52:09 +0200 | [diff] [blame] | 343 | if (dmc_header->mmioaddr[i] < CSR_MMIO_START_RANGE || |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 344 | dmc_header->mmioaddr[i] > CSR_MMIO_END_RANGE) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 345 | DRM_ERROR(" Firmware has wrong mmio address 0x%x\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 346 | dmc_header->mmioaddr[i]); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 347 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 348 | } |
Ville Syrjälä | f0f59a0 | 2015-11-18 15:33:26 +0200 | [diff] [blame] | 349 | csr->mmioaddr[i] = _MMIO(dmc_header->mmioaddr[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 350 | csr->mmiodata[i] = dmc_header->mmiodata[i]; |
| 351 | } |
| 352 | |
| 353 | /* fw_size is in dwords, so multiplied by 4 to convert into bytes. */ |
| 354 | nbytes = dmc_header->fw_size * 4; |
| 355 | if (nbytes > CSR_MAX_FW_SIZE) { |
| 356 | DRM_ERROR("CSR firmware too big (%u) bytes\n", nbytes); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 357 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 358 | } |
| 359 | csr->dmc_fw_size = dmc_header->fw_size; |
| 360 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 361 | dmc_payload = kmalloc(nbytes, GFP_KERNEL); |
| 362 | if (!dmc_payload) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 363 | DRM_ERROR("Memory allocation failed for dmc payload\n"); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 364 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 365 | } |
| 366 | |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 367 | memcpy(dmc_payload, &fw->data[readcount], nbytes); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 368 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 369 | return dmc_payload; |
| 370 | } |
| 371 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 372 | static void csr_load_work_fn(struct work_struct *work) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 373 | { |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 374 | struct drm_i915_private *dev_priv; |
| 375 | struct intel_csr *csr; |
| 376 | const struct firmware *fw; |
| 377 | int ret; |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 378 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 379 | dev_priv = container_of(work, typeof(*dev_priv), csr.work); |
| 380 | csr = &dev_priv->csr; |
| 381 | |
| 382 | ret = request_firmware(&fw, dev_priv->csr.fw_path, |
| 383 | &dev_priv->dev->pdev->dev); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 384 | if (!fw) |
| 385 | goto out; |
| 386 | |
| 387 | dev_priv->csr.dmc_payload = parse_csr_fw(dev_priv, fw); |
| 388 | if (!dev_priv->csr.dmc_payload) |
| 389 | goto out; |
| 390 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 391 | /* load csr program during system boot, as needed for DC states */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 392 | intel_csr_load_program(dev_priv); |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 393 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 394 | out: |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 395 | if (dev_priv->csr.dmc_payload) { |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 396 | intel_display_power_put(dev_priv, POWER_DOMAIN_INIT); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 397 | |
| 398 | DRM_INFO("Finished loading %s (v%u.%u)\n", |
| 399 | dev_priv->csr.fw_path, |
| 400 | CSR_VERSION_MAJOR(csr->version), |
| 401 | CSR_VERSION_MINOR(csr->version)); |
| 402 | } else { |
Daniel Vetter | c729ed8 | 2015-10-28 23:59:00 +0200 | [diff] [blame] | 403 | DRM_ERROR("Failed to load DMC firmware, disabling rpm\n"); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 404 | } |
| 405 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 406 | release_firmware(fw); |
| 407 | } |
| 408 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 409 | /** |
| 410 | * intel_csr_ucode_init() - initialize the firmware loading. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 411 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 412 | * |
| 413 | * This function is called at the time of loading the display driver to read |
| 414 | * firmware from a .bin file and copied into a internal memory. |
| 415 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 416 | void intel_csr_ucode_init(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 417 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 418 | struct intel_csr *csr = &dev_priv->csr; |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 419 | |
| 420 | INIT_WORK(&dev_priv->csr.work, csr_load_work_fn); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 421 | |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 422 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 423 | return; |
| 424 | |
Rodrigo Vivi | 8d7a1c4 | 2016-01-07 16:49:39 -0800 | [diff] [blame^] | 425 | if (IS_SKYLAKE(dev_priv) || IS_KABYLAKE(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 426 | csr->fw_path = I915_CSR_SKL; |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 427 | else if (IS_BROXTON(dev_priv)) |
| 428 | csr->fw_path = I915_CSR_BXT; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 429 | else { |
| 430 | DRM_ERROR("Unexpected: no known CSR firmware for platform\n"); |
| 431 | return; |
| 432 | } |
| 433 | |
Damien Lespiau | abd41dc | 2015-06-04 16:42:16 +0100 | [diff] [blame] | 434 | DRM_DEBUG_KMS("Loading %s\n", csr->fw_path); |
| 435 | |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 436 | /* |
| 437 | * Obtain a runtime pm reference, until CSR is loaded, |
| 438 | * to avoid entering runtime-suspend. |
| 439 | */ |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 440 | intel_display_power_get(dev_priv, POWER_DOMAIN_INIT); |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 441 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 442 | schedule_work(&dev_priv->csr.work); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 443 | } |
| 444 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 445 | /** |
| 446 | * intel_csr_ucode_fini() - unload the CSR firmware. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 447 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 448 | * |
| 449 | * Firmmware unloading includes freeing the internal momory and reset the |
| 450 | * firmware loading status. |
| 451 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 452 | void intel_csr_ucode_fini(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 453 | { |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 454 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 455 | return; |
| 456 | |
Animesh Manna | 15e72c1 | 2015-10-28 23:59:05 +0200 | [diff] [blame] | 457 | flush_work(&dev_priv->csr.work); |
| 458 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 459 | kfree(dev_priv->csr.dmc_payload); |
| 460 | } |