Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2014 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | * |
| 23 | */ |
| 24 | #include <linux/firmware.h> |
| 25 | #include "i915_drv.h" |
| 26 | #include "i915_reg.h" |
| 27 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 28 | /** |
| 29 | * DOC: csr support for dmc |
| 30 | * |
| 31 | * Display Context Save and Restore (CSR) firmware support added from gen9 |
| 32 | * onwards to drive newly added DMC (Display microcontroller) in display |
| 33 | * engine to save and restore the state of display engine when it enter into |
| 34 | * low-power state and comes back to normal. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 35 | */ |
| 36 | |
Maarten Lankhorst | 536ab3c | 2016-08-15 15:09:27 +0200 | [diff] [blame] | 37 | #define I915_CSR_KBL "i915/kbl_dmc_ver1_01.bin" |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 38 | MODULE_FIRMWARE(I915_CSR_KBL); |
| 39 | #define KBL_CSR_VERSION_REQUIRED CSR_VERSION(1, 1) |
| 40 | |
Maarten Lankhorst | 536ab3c | 2016-08-15 15:09:27 +0200 | [diff] [blame] | 41 | #define I915_CSR_SKL "i915/skl_dmc_ver1_26.bin" |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 42 | MODULE_FIRMWARE(I915_CSR_SKL); |
Maarten Lankhorst | 536ab3c | 2016-08-15 15:09:27 +0200 | [diff] [blame] | 43 | #define SKL_CSR_VERSION_REQUIRED CSR_VERSION(1, 26) |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 44 | |
Maarten Lankhorst | 536ab3c | 2016-08-15 15:09:27 +0200 | [diff] [blame] | 45 | #define I915_CSR_BXT "i915/bxt_dmc_ver1_07.bin" |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 46 | MODULE_FIRMWARE(I915_CSR_BXT); |
| 47 | #define BXT_CSR_VERSION_REQUIRED CSR_VERSION(1, 7) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 48 | |
Chris Wilson | cbfc2d2 | 2016-01-13 17:38:15 +0000 | [diff] [blame] | 49 | #define FIRMWARE_URL "https://01.org/linuxgraphics/intel-linux-graphics-firmwares" |
| 50 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 51 | |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 52 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 53 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 54 | #define CSR_MAX_FW_SIZE 0x2FFF |
| 55 | #define CSR_DEFAULT_FW_OFFSET 0xFFFFFFFF |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 56 | |
| 57 | struct intel_css_header { |
| 58 | /* 0x09 for DMC */ |
| 59 | uint32_t module_type; |
| 60 | |
| 61 | /* Includes the DMC specific header in dwords */ |
| 62 | uint32_t header_len; |
| 63 | |
| 64 | /* always value would be 0x10000 */ |
| 65 | uint32_t header_ver; |
| 66 | |
| 67 | /* Not used */ |
| 68 | uint32_t module_id; |
| 69 | |
| 70 | /* Not used */ |
| 71 | uint32_t module_vendor; |
| 72 | |
| 73 | /* in YYYYMMDD format */ |
| 74 | uint32_t date; |
| 75 | |
| 76 | /* Size in dwords (CSS_Headerlen + PackageHeaderLen + dmc FWsLen)/4 */ |
| 77 | uint32_t size; |
| 78 | |
| 79 | /* Not used */ |
| 80 | uint32_t key_size; |
| 81 | |
| 82 | /* Not used */ |
| 83 | uint32_t modulus_size; |
| 84 | |
| 85 | /* Not used */ |
| 86 | uint32_t exponent_size; |
| 87 | |
| 88 | /* Not used */ |
| 89 | uint32_t reserved1[12]; |
| 90 | |
| 91 | /* Major Minor */ |
| 92 | uint32_t version; |
| 93 | |
| 94 | /* Not used */ |
| 95 | uint32_t reserved2[8]; |
| 96 | |
| 97 | /* Not used */ |
| 98 | uint32_t kernel_header_info; |
| 99 | } __packed; |
| 100 | |
| 101 | struct intel_fw_info { |
| 102 | uint16_t reserved1; |
| 103 | |
| 104 | /* Stepping (A, B, C, ..., *). * is a wildcard */ |
| 105 | char stepping; |
| 106 | |
| 107 | /* Sub-stepping (0, 1, ..., *). * is a wildcard */ |
| 108 | char substepping; |
| 109 | |
| 110 | uint32_t offset; |
| 111 | uint32_t reserved2; |
| 112 | } __packed; |
| 113 | |
| 114 | struct intel_package_header { |
| 115 | /* DMC container header length in dwords */ |
| 116 | unsigned char header_len; |
| 117 | |
| 118 | /* always value would be 0x01 */ |
| 119 | unsigned char header_ver; |
| 120 | |
| 121 | unsigned char reserved[10]; |
| 122 | |
| 123 | /* Number of valid entries in the FWInfo array below */ |
| 124 | uint32_t num_entries; |
| 125 | |
| 126 | struct intel_fw_info fw_info[20]; |
| 127 | } __packed; |
| 128 | |
| 129 | struct intel_dmc_header { |
| 130 | /* always value would be 0x40403E3E */ |
| 131 | uint32_t signature; |
| 132 | |
| 133 | /* DMC binary header length */ |
| 134 | unsigned char header_len; |
| 135 | |
| 136 | /* 0x01 */ |
| 137 | unsigned char header_ver; |
| 138 | |
| 139 | /* Reserved */ |
| 140 | uint16_t dmcc_ver; |
| 141 | |
| 142 | /* Major, Minor */ |
| 143 | uint32_t project; |
| 144 | |
| 145 | /* Firmware program size (excluding header) in dwords */ |
| 146 | uint32_t fw_size; |
| 147 | |
| 148 | /* Major Minor version */ |
| 149 | uint32_t fw_version; |
| 150 | |
| 151 | /* Number of valid MMIO cycles present. */ |
| 152 | uint32_t mmio_count; |
| 153 | |
| 154 | /* MMIO address */ |
| 155 | uint32_t mmioaddr[8]; |
| 156 | |
| 157 | /* MMIO data */ |
| 158 | uint32_t mmiodata[8]; |
| 159 | |
| 160 | /* FW filename */ |
| 161 | unsigned char dfile[32]; |
| 162 | |
| 163 | uint32_t reserved1[2]; |
| 164 | } __packed; |
| 165 | |
| 166 | struct stepping_info { |
| 167 | char stepping; |
| 168 | char substepping; |
| 169 | }; |
| 170 | |
| 171 | static const struct stepping_info skl_stepping_info[] = { |
Jani Nikula | 84cb00e | 2015-10-20 15:38:31 +0300 | [diff] [blame] | 172 | {'A', '0'}, {'B', '0'}, {'C', '0'}, |
| 173 | {'D', '0'}, {'E', '0'}, {'F', '0'}, |
Mat Martineau | a41c888 | 2016-01-28 15:19:23 -0800 | [diff] [blame] | 174 | {'G', '0'}, {'H', '0'}, {'I', '0'}, |
| 175 | {'J', '0'}, {'K', '0'} |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 176 | }; |
| 177 | |
Jani Nikula | b9cd5bfd | 2015-10-20 15:38:32 +0300 | [diff] [blame] | 178 | static const struct stepping_info bxt_stepping_info[] = { |
Animesh Manna | cff765f | 2015-08-04 22:02:43 +0530 | [diff] [blame] | 179 | {'A', '0'}, {'A', '1'}, {'A', '2'}, |
| 180 | {'B', '0'}, {'B', '1'}, {'B', '2'} |
| 181 | }; |
| 182 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 183 | static const struct stepping_info no_stepping_info = { '*', '*' }; |
| 184 | |
| 185 | static const struct stepping_info * |
| 186 | intel_get_stepping_info(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 187 | { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 188 | const struct stepping_info *si; |
| 189 | unsigned int size; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 190 | |
Anusha Srivatsa | 1c00164 | 2016-10-24 17:28:21 -0700 | [diff] [blame] | 191 | if (IS_SKYLAKE(dev_priv)) { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 192 | size = ARRAY_SIZE(skl_stepping_info); |
| 193 | si = skl_stepping_info; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 194 | } else if (IS_BROXTON(dev_priv)) { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 195 | size = ARRAY_SIZE(bxt_stepping_info); |
| 196 | si = bxt_stepping_info; |
| 197 | } else { |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 198 | size = 0; |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 199 | } |
| 200 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 201 | if (INTEL_REVID(dev_priv) < size) |
| 202 | return si + INTEL_REVID(dev_priv); |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 203 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 204 | return &no_stepping_info; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 205 | } |
| 206 | |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 207 | static void gen9_set_dc_state_debugmask(struct drm_i915_private *dev_priv) |
| 208 | { |
| 209 | uint32_t val, mask; |
| 210 | |
| 211 | mask = DC_STATE_DEBUG_MASK_MEMORY_UP; |
| 212 | |
| 213 | if (IS_BROXTON(dev_priv)) |
| 214 | mask |= DC_STATE_DEBUG_MASK_CORES; |
| 215 | |
| 216 | /* The below bit doesn't need to be cleared ever afterwards */ |
| 217 | val = I915_READ(DC_STATE_DEBUG); |
| 218 | if ((val & mask) != mask) { |
| 219 | val |= mask; |
| 220 | I915_WRITE(DC_STATE_DEBUG, val); |
| 221 | POSTING_READ(DC_STATE_DEBUG); |
| 222 | } |
| 223 | } |
| 224 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 225 | /** |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 226 | * intel_csr_load_program() - write the firmware from memory to register. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 227 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 228 | * |
| 229 | * CSR firmware is read from a .bin file and kept in internal memory one time. |
| 230 | * Everytime display comes back from low power state this function is called to |
| 231 | * copy the firmware from internal memory to registers. |
| 232 | */ |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 233 | void intel_csr_load_program(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 234 | { |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 235 | u32 *payload = dev_priv->csr.dmc_payload; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 236 | uint32_t i, fw_size; |
| 237 | |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 238 | if (!IS_GEN9(dev_priv)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 239 | DRM_ERROR("No CSR support available for this platform\n"); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 240 | return; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 241 | } |
| 242 | |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 243 | if (!dev_priv->csr.dmc_payload) { |
| 244 | DRM_ERROR("Tried to program CSR with empty payload\n"); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 245 | return; |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 246 | } |
Animesh Manna | 4b7ab5f | 2015-08-26 01:36:05 +0530 | [diff] [blame] | 247 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 248 | fw_size = dev_priv->csr.dmc_fw_size; |
| 249 | for (i = 0; i < fw_size; i++) |
Ville Syrjälä | d2aa5ae | 2015-09-18 20:03:23 +0300 | [diff] [blame] | 250 | I915_WRITE(CSR_PROGRAM(i), payload[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 251 | |
| 252 | for (i = 0; i < dev_priv->csr.mmio_count; i++) { |
| 253 | I915_WRITE(dev_priv->csr.mmioaddr[i], |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 254 | dev_priv->csr.mmiodata[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 255 | } |
Patrik Jakobsson | 832dba8 | 2016-02-18 17:21:11 +0200 | [diff] [blame] | 256 | |
| 257 | dev_priv->csr.dc_state = 0; |
Mika Kuoppala | 1e657ad | 2016-02-18 17:21:14 +0200 | [diff] [blame] | 258 | |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 259 | gen9_set_dc_state_debugmask(dev_priv); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 260 | } |
| 261 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 262 | static uint32_t *parse_csr_fw(struct drm_i915_private *dev_priv, |
| 263 | const struct firmware *fw) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 264 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 265 | struct intel_css_header *css_header; |
| 266 | struct intel_package_header *package_header; |
| 267 | struct intel_dmc_header *dmc_header; |
| 268 | struct intel_csr *csr = &dev_priv->csr; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 269 | const struct stepping_info *si = intel_get_stepping_info(dev_priv); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 270 | uint32_t dmc_offset = CSR_DEFAULT_FW_OFFSET, readcount = 0, nbytes; |
| 271 | uint32_t i; |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 272 | uint32_t *dmc_payload; |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 273 | uint32_t required_version; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 274 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 275 | if (!fw) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 276 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 277 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 278 | /* Extract CSS Header information*/ |
| 279 | css_header = (struct intel_css_header *)fw->data; |
| 280 | if (sizeof(struct intel_css_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 281 | (css_header->header_len * 4)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 282 | DRM_ERROR("Firmware has wrong CSS header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 283 | (css_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 284 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 285 | } |
Damien Lespiau | b6e7d89 | 2015-10-27 14:46:59 +0200 | [diff] [blame] | 286 | |
| 287 | csr->version = css_header->version; |
| 288 | |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 289 | if (IS_KABYLAKE(dev_priv)) { |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 290 | required_version = KBL_CSR_VERSION_REQUIRED; |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 291 | } else if (IS_SKYLAKE(dev_priv)) { |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 292 | required_version = SKL_CSR_VERSION_REQUIRED; |
Imre Deak | e796853 | 2016-04-01 16:02:32 +0300 | [diff] [blame] | 293 | } else if (IS_BROXTON(dev_priv)) { |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 294 | required_version = BXT_CSR_VERSION_REQUIRED; |
Imre Deak | e796853 | 2016-04-01 16:02:32 +0300 | [diff] [blame] | 295 | } else { |
| 296 | MISSING_CASE(INTEL_REVID(dev_priv)); |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 297 | required_version = 0; |
Imre Deak | e796853 | 2016-04-01 16:02:32 +0300 | [diff] [blame] | 298 | } |
| 299 | |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 300 | if (csr->version != required_version) { |
| 301 | DRM_INFO("Refusing to load DMC firmware v%u.%u," |
| 302 | " please use v%u.%u [" FIRMWARE_URL "].\n", |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 303 | CSR_VERSION_MAJOR(csr->version), |
| 304 | CSR_VERSION_MINOR(csr->version), |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 305 | CSR_VERSION_MAJOR(required_version), |
| 306 | CSR_VERSION_MINOR(required_version)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 307 | return NULL; |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 308 | } |
| 309 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 310 | readcount += sizeof(struct intel_css_header); |
| 311 | |
| 312 | /* Extract Package Header information*/ |
| 313 | package_header = (struct intel_package_header *) |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 314 | &fw->data[readcount]; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 315 | if (sizeof(struct intel_package_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 316 | (package_header->header_len * 4)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 317 | DRM_ERROR("Firmware has wrong package header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 318 | (package_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 319 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 320 | } |
| 321 | readcount += sizeof(struct intel_package_header); |
| 322 | |
| 323 | /* Search for dmc_offset to find firware binary. */ |
| 324 | for (i = 0; i < package_header->num_entries; i++) { |
| 325 | if (package_header->fw_info[i].substepping == '*' && |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 326 | si->stepping == package_header->fw_info[i].stepping) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 327 | dmc_offset = package_header->fw_info[i].offset; |
| 328 | break; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 329 | } else if (si->stepping == package_header->fw_info[i].stepping && |
| 330 | si->substepping == package_header->fw_info[i].substepping) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 331 | dmc_offset = package_header->fw_info[i].offset; |
| 332 | break; |
| 333 | } else if (package_header->fw_info[i].stepping == '*' && |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 334 | package_header->fw_info[i].substepping == '*') |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 335 | dmc_offset = package_header->fw_info[i].offset; |
| 336 | } |
| 337 | if (dmc_offset == CSR_DEFAULT_FW_OFFSET) { |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 338 | DRM_ERROR("Firmware not supported for %c stepping\n", |
| 339 | si->stepping); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 340 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 341 | } |
| 342 | readcount += dmc_offset; |
| 343 | |
| 344 | /* Extract dmc_header information. */ |
| 345 | dmc_header = (struct intel_dmc_header *)&fw->data[readcount]; |
| 346 | if (sizeof(struct intel_dmc_header) != (dmc_header->header_len)) { |
| 347 | DRM_ERROR("Firmware has wrong dmc header length %u bytes\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 348 | (dmc_header->header_len)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 349 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 350 | } |
| 351 | readcount += sizeof(struct intel_dmc_header); |
| 352 | |
| 353 | /* Cache the dmc header info. */ |
| 354 | if (dmc_header->mmio_count > ARRAY_SIZE(csr->mmioaddr)) { |
| 355 | DRM_ERROR("Firmware has wrong mmio count %u\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 356 | dmc_header->mmio_count); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 357 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 358 | } |
| 359 | csr->mmio_count = dmc_header->mmio_count; |
| 360 | for (i = 0; i < dmc_header->mmio_count; i++) { |
Takashi Iwai | 982b0b2 | 2015-09-09 16:52:09 +0200 | [diff] [blame] | 361 | if (dmc_header->mmioaddr[i] < CSR_MMIO_START_RANGE || |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 362 | dmc_header->mmioaddr[i] > CSR_MMIO_END_RANGE) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 363 | DRM_ERROR(" Firmware has wrong mmio address 0x%x\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 364 | dmc_header->mmioaddr[i]); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 365 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 366 | } |
Ville Syrjälä | f0f59a0 | 2015-11-18 15:33:26 +0200 | [diff] [blame] | 367 | csr->mmioaddr[i] = _MMIO(dmc_header->mmioaddr[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 368 | csr->mmiodata[i] = dmc_header->mmiodata[i]; |
| 369 | } |
| 370 | |
| 371 | /* fw_size is in dwords, so multiplied by 4 to convert into bytes. */ |
| 372 | nbytes = dmc_header->fw_size * 4; |
| 373 | if (nbytes > CSR_MAX_FW_SIZE) { |
| 374 | DRM_ERROR("CSR firmware too big (%u) bytes\n", nbytes); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 375 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 376 | } |
| 377 | csr->dmc_fw_size = dmc_header->fw_size; |
| 378 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 379 | dmc_payload = kmalloc(nbytes, GFP_KERNEL); |
| 380 | if (!dmc_payload) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 381 | DRM_ERROR("Memory allocation failed for dmc payload\n"); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 382 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 383 | } |
| 384 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 385 | return memcpy(dmc_payload, &fw->data[readcount], nbytes); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 386 | } |
| 387 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 388 | static void csr_load_work_fn(struct work_struct *work) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 389 | { |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 390 | struct drm_i915_private *dev_priv; |
| 391 | struct intel_csr *csr; |
Jérémy Lefaure | 3aaa8ab | 2016-11-28 18:43:19 -0500 | [diff] [blame^] | 392 | const struct firmware *fw = NULL; |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 393 | int ret; |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 394 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 395 | dev_priv = container_of(work, typeof(*dev_priv), csr.work); |
| 396 | csr = &dev_priv->csr; |
| 397 | |
| 398 | ret = request_firmware(&fw, dev_priv->csr.fw_path, |
Chris Wilson | 91c8a32 | 2016-07-05 10:40:23 +0100 | [diff] [blame] | 399 | &dev_priv->drm.pdev->dev); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 400 | if (fw) |
| 401 | dev_priv->csr.dmc_payload = parse_csr_fw(dev_priv, fw); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 402 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 403 | if (dev_priv->csr.dmc_payload) { |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 404 | intel_csr_load_program(dev_priv); |
| 405 | |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 406 | intel_display_power_put(dev_priv, POWER_DOMAIN_INIT); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 407 | |
Mika Kuoppala | b2251c0 | 2016-11-16 11:33:26 +0200 | [diff] [blame] | 408 | DRM_INFO("Finished loading DMC firmware %s (v%u.%u)\n", |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 409 | dev_priv->csr.fw_path, |
| 410 | CSR_VERSION_MAJOR(csr->version), |
| 411 | CSR_VERSION_MINOR(csr->version)); |
| 412 | } else { |
Chris Wilson | 91c8a32 | 2016-07-05 10:40:23 +0100 | [diff] [blame] | 413 | dev_notice(dev_priv->drm.dev, |
Chris Wilson | cbfc2d2 | 2016-01-13 17:38:15 +0000 | [diff] [blame] | 414 | "Failed to load DMC firmware" |
| 415 | " [" FIRMWARE_URL "]," |
| 416 | " disabling runtime power management.\n"); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 417 | } |
| 418 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 419 | release_firmware(fw); |
| 420 | } |
| 421 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 422 | /** |
| 423 | * intel_csr_ucode_init() - initialize the firmware loading. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 424 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 425 | * |
| 426 | * This function is called at the time of loading the display driver to read |
| 427 | * firmware from a .bin file and copied into a internal memory. |
| 428 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 429 | void intel_csr_ucode_init(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 430 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 431 | struct intel_csr *csr = &dev_priv->csr; |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 432 | |
| 433 | INIT_WORK(&dev_priv->csr.work, csr_load_work_fn); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 434 | |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 435 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 436 | return; |
| 437 | |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 438 | if (IS_KABYLAKE(dev_priv)) |
| 439 | csr->fw_path = I915_CSR_KBL; |
| 440 | else if (IS_SKYLAKE(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 441 | csr->fw_path = I915_CSR_SKL; |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 442 | else if (IS_BROXTON(dev_priv)) |
| 443 | csr->fw_path = I915_CSR_BXT; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 444 | else { |
| 445 | DRM_ERROR("Unexpected: no known CSR firmware for platform\n"); |
| 446 | return; |
| 447 | } |
| 448 | |
Damien Lespiau | abd41dc | 2015-06-04 16:42:16 +0100 | [diff] [blame] | 449 | DRM_DEBUG_KMS("Loading %s\n", csr->fw_path); |
| 450 | |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 451 | /* |
| 452 | * Obtain a runtime pm reference, until CSR is loaded, |
| 453 | * to avoid entering runtime-suspend. |
| 454 | */ |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 455 | intel_display_power_get(dev_priv, POWER_DOMAIN_INIT); |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 456 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 457 | schedule_work(&dev_priv->csr.work); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 458 | } |
| 459 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 460 | /** |
Imre Deak | f74ed08 | 2016-04-18 14:48:21 +0300 | [diff] [blame] | 461 | * intel_csr_ucode_suspend() - prepare CSR firmware before system suspend |
| 462 | * @dev_priv: i915 drm device |
| 463 | * |
| 464 | * Prepare the DMC firmware before entering system suspend. This includes |
| 465 | * flushing pending work items and releasing any resources acquired during |
| 466 | * init. |
| 467 | */ |
| 468 | void intel_csr_ucode_suspend(struct drm_i915_private *dev_priv) |
| 469 | { |
| 470 | if (!HAS_CSR(dev_priv)) |
| 471 | return; |
| 472 | |
| 473 | flush_work(&dev_priv->csr.work); |
| 474 | |
| 475 | /* Drop the reference held in case DMC isn't loaded. */ |
| 476 | if (!dev_priv->csr.dmc_payload) |
| 477 | intel_display_power_put(dev_priv, POWER_DOMAIN_INIT); |
| 478 | } |
| 479 | |
| 480 | /** |
| 481 | * intel_csr_ucode_resume() - init CSR firmware during system resume |
| 482 | * @dev_priv: i915 drm device |
| 483 | * |
| 484 | * Reinitialize the DMC firmware during system resume, reacquiring any |
| 485 | * resources released in intel_csr_ucode_suspend(). |
| 486 | */ |
| 487 | void intel_csr_ucode_resume(struct drm_i915_private *dev_priv) |
| 488 | { |
| 489 | if (!HAS_CSR(dev_priv)) |
| 490 | return; |
| 491 | |
| 492 | /* |
| 493 | * Reacquire the reference to keep RPM disabled in case DMC isn't |
| 494 | * loaded. |
| 495 | */ |
| 496 | if (!dev_priv->csr.dmc_payload) |
| 497 | intel_display_power_get(dev_priv, POWER_DOMAIN_INIT); |
| 498 | } |
| 499 | |
| 500 | /** |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 501 | * intel_csr_ucode_fini() - unload the CSR firmware. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 502 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 503 | * |
Imre Deak | f74ed08 | 2016-04-18 14:48:21 +0300 | [diff] [blame] | 504 | * Firmmware unloading includes freeing the internal memory and reset the |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 505 | * firmware loading status. |
| 506 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 507 | void intel_csr_ucode_fini(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 508 | { |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 509 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 510 | return; |
| 511 | |
Imre Deak | f74ed08 | 2016-04-18 14:48:21 +0300 | [diff] [blame] | 512 | intel_csr_ucode_suspend(dev_priv); |
Animesh Manna | 15e72c1 | 2015-10-28 23:59:05 +0200 | [diff] [blame] | 513 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 514 | kfree(dev_priv->csr.dmc_payload); |
| 515 | } |