Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2014 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | * |
| 23 | */ |
| 24 | #include <linux/firmware.h> |
| 25 | #include "i915_drv.h" |
| 26 | #include "i915_reg.h" |
| 27 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 28 | /** |
| 29 | * DOC: csr support for dmc |
| 30 | * |
| 31 | * Display Context Save and Restore (CSR) firmware support added from gen9 |
| 32 | * onwards to drive newly added DMC (Display microcontroller) in display |
| 33 | * engine to save and restore the state of display engine when it enter into |
| 34 | * low-power state and comes back to normal. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 35 | */ |
| 36 | |
Anusha Srivatsa | aebfd1d | 2017-02-22 11:55:36 -0800 | [diff] [blame] | 37 | #define I915_CSR_GLK "i915/glk_dmc_ver1_04.bin" |
Ian W MORRISON | b607990 | 2018-04-11 14:42:13 +1000 | [diff] [blame] | 38 | MODULE_FIRMWARE(I915_CSR_GLK); |
Anusha Srivatsa | aebfd1d | 2017-02-22 11:55:36 -0800 | [diff] [blame] | 39 | #define GLK_CSR_VERSION_REQUIRED CSR_VERSION(1, 4) |
Anusha Srivatsa | dbb28b5 | 2016-12-16 17:42:24 +0200 | [diff] [blame] | 40 | |
Anusha Srivatsa | fe9a9da | 2018-01-04 15:51:42 -0800 | [diff] [blame] | 41 | #define I915_CSR_CNL "i915/cnl_dmc_ver1_07.bin" |
| 42 | MODULE_FIRMWARE(I915_CSR_CNL); |
| 43 | #define CNL_CSR_VERSION_REQUIRED CSR_VERSION(1, 7) |
Anusha Srivatsa | cebfcea | 2017-06-09 15:26:10 -0700 | [diff] [blame] | 44 | |
Anusha Srivatsa | 4f0aa1f | 2017-11-09 10:51:43 -0800 | [diff] [blame] | 45 | #define I915_CSR_KBL "i915/kbl_dmc_ver1_04.bin" |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 46 | MODULE_FIRMWARE(I915_CSR_KBL); |
Anusha Srivatsa | 4f0aa1f | 2017-11-09 10:51:43 -0800 | [diff] [blame] | 47 | #define KBL_CSR_VERSION_REQUIRED CSR_VERSION(1, 4) |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 48 | |
Anusha Srivatsa | 39ccc98 | 2017-11-09 17:18:32 -0800 | [diff] [blame] | 49 | #define I915_CSR_SKL "i915/skl_dmc_ver1_27.bin" |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 50 | MODULE_FIRMWARE(I915_CSR_SKL); |
Anusha Srivatsa | 39ccc98 | 2017-11-09 17:18:32 -0800 | [diff] [blame] | 51 | #define SKL_CSR_VERSION_REQUIRED CSR_VERSION(1, 27) |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 52 | |
Maarten Lankhorst | 536ab3c | 2016-08-15 15:09:27 +0200 | [diff] [blame] | 53 | #define I915_CSR_BXT "i915/bxt_dmc_ver1_07.bin" |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 54 | MODULE_FIRMWARE(I915_CSR_BXT); |
| 55 | #define BXT_CSR_VERSION_REQUIRED CSR_VERSION(1, 7) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 56 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 57 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 58 | #define CSR_MAX_FW_SIZE 0x2FFF |
| 59 | #define CSR_DEFAULT_FW_OFFSET 0xFFFFFFFF |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 60 | |
| 61 | struct intel_css_header { |
| 62 | /* 0x09 for DMC */ |
| 63 | uint32_t module_type; |
| 64 | |
| 65 | /* Includes the DMC specific header in dwords */ |
| 66 | uint32_t header_len; |
| 67 | |
| 68 | /* always value would be 0x10000 */ |
| 69 | uint32_t header_ver; |
| 70 | |
| 71 | /* Not used */ |
| 72 | uint32_t module_id; |
| 73 | |
| 74 | /* Not used */ |
| 75 | uint32_t module_vendor; |
| 76 | |
| 77 | /* in YYYYMMDD format */ |
| 78 | uint32_t date; |
| 79 | |
| 80 | /* Size in dwords (CSS_Headerlen + PackageHeaderLen + dmc FWsLen)/4 */ |
| 81 | uint32_t size; |
| 82 | |
| 83 | /* Not used */ |
| 84 | uint32_t key_size; |
| 85 | |
| 86 | /* Not used */ |
| 87 | uint32_t modulus_size; |
| 88 | |
| 89 | /* Not used */ |
| 90 | uint32_t exponent_size; |
| 91 | |
| 92 | /* Not used */ |
| 93 | uint32_t reserved1[12]; |
| 94 | |
| 95 | /* Major Minor */ |
| 96 | uint32_t version; |
| 97 | |
| 98 | /* Not used */ |
| 99 | uint32_t reserved2[8]; |
| 100 | |
| 101 | /* Not used */ |
| 102 | uint32_t kernel_header_info; |
| 103 | } __packed; |
| 104 | |
| 105 | struct intel_fw_info { |
| 106 | uint16_t reserved1; |
| 107 | |
| 108 | /* Stepping (A, B, C, ..., *). * is a wildcard */ |
| 109 | char stepping; |
| 110 | |
| 111 | /* Sub-stepping (0, 1, ..., *). * is a wildcard */ |
| 112 | char substepping; |
| 113 | |
| 114 | uint32_t offset; |
| 115 | uint32_t reserved2; |
| 116 | } __packed; |
| 117 | |
| 118 | struct intel_package_header { |
| 119 | /* DMC container header length in dwords */ |
| 120 | unsigned char header_len; |
| 121 | |
| 122 | /* always value would be 0x01 */ |
| 123 | unsigned char header_ver; |
| 124 | |
| 125 | unsigned char reserved[10]; |
| 126 | |
| 127 | /* Number of valid entries in the FWInfo array below */ |
| 128 | uint32_t num_entries; |
| 129 | |
| 130 | struct intel_fw_info fw_info[20]; |
| 131 | } __packed; |
| 132 | |
| 133 | struct intel_dmc_header { |
| 134 | /* always value would be 0x40403E3E */ |
| 135 | uint32_t signature; |
| 136 | |
| 137 | /* DMC binary header length */ |
| 138 | unsigned char header_len; |
| 139 | |
| 140 | /* 0x01 */ |
| 141 | unsigned char header_ver; |
| 142 | |
| 143 | /* Reserved */ |
| 144 | uint16_t dmcc_ver; |
| 145 | |
| 146 | /* Major, Minor */ |
| 147 | uint32_t project; |
| 148 | |
| 149 | /* Firmware program size (excluding header) in dwords */ |
| 150 | uint32_t fw_size; |
| 151 | |
| 152 | /* Major Minor version */ |
| 153 | uint32_t fw_version; |
| 154 | |
| 155 | /* Number of valid MMIO cycles present. */ |
| 156 | uint32_t mmio_count; |
| 157 | |
| 158 | /* MMIO address */ |
| 159 | uint32_t mmioaddr[8]; |
| 160 | |
| 161 | /* MMIO data */ |
| 162 | uint32_t mmiodata[8]; |
| 163 | |
| 164 | /* FW filename */ |
| 165 | unsigned char dfile[32]; |
| 166 | |
| 167 | uint32_t reserved1[2]; |
| 168 | } __packed; |
| 169 | |
| 170 | struct stepping_info { |
| 171 | char stepping; |
| 172 | char substepping; |
| 173 | }; |
| 174 | |
| 175 | static const struct stepping_info skl_stepping_info[] = { |
Jani Nikula | 84cb00e | 2015-10-20 15:38:31 +0300 | [diff] [blame] | 176 | {'A', '0'}, {'B', '0'}, {'C', '0'}, |
| 177 | {'D', '0'}, {'E', '0'}, {'F', '0'}, |
Mat Martineau | a41c888 | 2016-01-28 15:19:23 -0800 | [diff] [blame] | 178 | {'G', '0'}, {'H', '0'}, {'I', '0'}, |
| 179 | {'J', '0'}, {'K', '0'} |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 180 | }; |
| 181 | |
Jani Nikula | b9cd5bfd | 2015-10-20 15:38:32 +0300 | [diff] [blame] | 182 | static const struct stepping_info bxt_stepping_info[] = { |
Animesh Manna | cff765f | 2015-08-04 22:02:43 +0530 | [diff] [blame] | 183 | {'A', '0'}, {'A', '1'}, {'A', '2'}, |
| 184 | {'B', '0'}, {'B', '1'}, {'B', '2'} |
| 185 | }; |
| 186 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 187 | static const struct stepping_info no_stepping_info = { '*', '*' }; |
| 188 | |
| 189 | static const struct stepping_info * |
| 190 | intel_get_stepping_info(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 191 | { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 192 | const struct stepping_info *si; |
| 193 | unsigned int size; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 194 | |
Anusha Srivatsa | 1c00164 | 2016-10-24 17:28:21 -0700 | [diff] [blame] | 195 | if (IS_SKYLAKE(dev_priv)) { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 196 | size = ARRAY_SIZE(skl_stepping_info); |
| 197 | si = skl_stepping_info; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 198 | } else if (IS_BROXTON(dev_priv)) { |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 199 | size = ARRAY_SIZE(bxt_stepping_info); |
| 200 | si = bxt_stepping_info; |
| 201 | } else { |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 202 | size = 0; |
Chris Wilson | 2f59f1b | 2017-11-07 14:53:34 +0000 | [diff] [blame] | 203 | si = NULL; |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 204 | } |
| 205 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 206 | if (INTEL_REVID(dev_priv) < size) |
| 207 | return si + INTEL_REVID(dev_priv); |
Jani Nikula | b1a14c6 | 2015-10-20 15:38:33 +0300 | [diff] [blame] | 208 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 209 | return &no_stepping_info; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 210 | } |
| 211 | |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 212 | static void gen9_set_dc_state_debugmask(struct drm_i915_private *dev_priv) |
| 213 | { |
| 214 | uint32_t val, mask; |
| 215 | |
| 216 | mask = DC_STATE_DEBUG_MASK_MEMORY_UP; |
| 217 | |
Imre Deak | b7208a3 | 2017-10-03 12:51:59 +0300 | [diff] [blame] | 218 | if (IS_GEN9_LP(dev_priv)) |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 219 | mask |= DC_STATE_DEBUG_MASK_CORES; |
| 220 | |
| 221 | /* The below bit doesn't need to be cleared ever afterwards */ |
| 222 | val = I915_READ(DC_STATE_DEBUG); |
| 223 | if ((val & mask) != mask) { |
| 224 | val |= mask; |
| 225 | I915_WRITE(DC_STATE_DEBUG, val); |
| 226 | POSTING_READ(DC_STATE_DEBUG); |
| 227 | } |
| 228 | } |
| 229 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 230 | /** |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 231 | * intel_csr_load_program() - write the firmware from memory to register. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 232 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 233 | * |
| 234 | * CSR firmware is read from a .bin file and kept in internal memory one time. |
| 235 | * Everytime display comes back from low power state this function is called to |
| 236 | * copy the firmware from internal memory to registers. |
| 237 | */ |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 238 | void intel_csr_load_program(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 239 | { |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 240 | u32 *payload = dev_priv->csr.dmc_payload; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 241 | uint32_t i, fw_size; |
| 242 | |
Rodrigo Vivi | 1a7399a | 2017-06-09 15:26:11 -0700 | [diff] [blame] | 243 | if (!HAS_CSR(dev_priv)) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 244 | DRM_ERROR("No CSR support available for this platform\n"); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 245 | return; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 246 | } |
| 247 | |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 248 | if (!dev_priv->csr.dmc_payload) { |
| 249 | DRM_ERROR("Tried to program CSR with empty payload\n"); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 250 | return; |
Patrik Jakobsson | fc131bf | 2015-11-09 16:48:16 +0100 | [diff] [blame] | 251 | } |
Animesh Manna | 4b7ab5f | 2015-08-26 01:36:05 +0530 | [diff] [blame] | 252 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 253 | fw_size = dev_priv->csr.dmc_fw_size; |
David Weinehall | dff457d | 2017-09-05 16:10:50 +0300 | [diff] [blame] | 254 | assert_rpm_wakelock_held(dev_priv); |
| 255 | |
| 256 | preempt_disable(); |
| 257 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 258 | for (i = 0; i < fw_size; i++) |
David Weinehall | dff457d | 2017-09-05 16:10:50 +0300 | [diff] [blame] | 259 | I915_WRITE_FW(CSR_PROGRAM(i), payload[i]); |
| 260 | |
| 261 | preempt_enable(); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 262 | |
| 263 | for (i = 0; i < dev_priv->csr.mmio_count; i++) { |
| 264 | I915_WRITE(dev_priv->csr.mmioaddr[i], |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 265 | dev_priv->csr.mmiodata[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 266 | } |
Patrik Jakobsson | 832dba8 | 2016-02-18 17:21:11 +0200 | [diff] [blame] | 267 | |
| 268 | dev_priv->csr.dc_state = 0; |
Mika Kuoppala | 1e657ad | 2016-02-18 17:21:14 +0200 | [diff] [blame] | 269 | |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 270 | gen9_set_dc_state_debugmask(dev_priv); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 271 | } |
| 272 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 273 | static uint32_t *parse_csr_fw(struct drm_i915_private *dev_priv, |
| 274 | const struct firmware *fw) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 275 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 276 | struct intel_css_header *css_header; |
| 277 | struct intel_package_header *package_header; |
| 278 | struct intel_dmc_header *dmc_header; |
| 279 | struct intel_csr *csr = &dev_priv->csr; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 280 | const struct stepping_info *si = intel_get_stepping_info(dev_priv); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 281 | uint32_t dmc_offset = CSR_DEFAULT_FW_OFFSET, readcount = 0, nbytes; |
| 282 | uint32_t i; |
Animesh Manna | a7f749f | 2015-08-03 21:55:32 +0530 | [diff] [blame] | 283 | uint32_t *dmc_payload; |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 284 | uint32_t required_version; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 285 | |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 286 | if (!fw) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 287 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 288 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 289 | /* Extract CSS Header information*/ |
| 290 | css_header = (struct intel_css_header *)fw->data; |
| 291 | if (sizeof(struct intel_css_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 292 | (css_header->header_len * 4)) { |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 293 | DRM_ERROR("DMC firmware has wrong CSS header length " |
| 294 | "(%u bytes)\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 295 | (css_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 296 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 297 | } |
Damien Lespiau | b6e7d89 | 2015-10-27 14:46:59 +0200 | [diff] [blame] | 298 | |
| 299 | csr->version = css_header->version; |
| 300 | |
Anusha Srivatsa | cebfcea | 2017-06-09 15:26:10 -0700 | [diff] [blame] | 301 | if (IS_CANNONLAKE(dev_priv)) { |
| 302 | required_version = CNL_CSR_VERSION_REQUIRED; |
| 303 | } else if (IS_GEMINILAKE(dev_priv)) { |
Anusha Srivatsa | dbb28b5 | 2016-12-16 17:42:24 +0200 | [diff] [blame] | 304 | required_version = GLK_CSR_VERSION_REQUIRED; |
Rodrigo Vivi | 84cd843 | 2017-06-09 13:02:30 -0700 | [diff] [blame] | 305 | } else if (IS_KABYLAKE(dev_priv) || IS_COFFEELAKE(dev_priv)) { |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 306 | required_version = KBL_CSR_VERSION_REQUIRED; |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 307 | } else if (IS_SKYLAKE(dev_priv)) { |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 308 | required_version = SKL_CSR_VERSION_REQUIRED; |
Imre Deak | e796853 | 2016-04-01 16:02:32 +0300 | [diff] [blame] | 309 | } else if (IS_BROXTON(dev_priv)) { |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 310 | required_version = BXT_CSR_VERSION_REQUIRED; |
Imre Deak | e796853 | 2016-04-01 16:02:32 +0300 | [diff] [blame] | 311 | } else { |
| 312 | MISSING_CASE(INTEL_REVID(dev_priv)); |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 313 | required_version = 0; |
Imre Deak | e796853 | 2016-04-01 16:02:32 +0300 | [diff] [blame] | 314 | } |
| 315 | |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 316 | if (csr->version != required_version) { |
| 317 | DRM_INFO("Refusing to load DMC firmware v%u.%u," |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 318 | " please use v%u.%u\n", |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 319 | CSR_VERSION_MAJOR(csr->version), |
| 320 | CSR_VERSION_MINOR(csr->version), |
Patrik Jakobsson | 4aa7fb9 | 2016-05-16 11:30:57 +0200 | [diff] [blame] | 321 | CSR_VERSION_MAJOR(required_version), |
| 322 | CSR_VERSION_MINOR(required_version)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 323 | return NULL; |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 324 | } |
| 325 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 326 | readcount += sizeof(struct intel_css_header); |
| 327 | |
| 328 | /* Extract Package Header information*/ |
| 329 | package_header = (struct intel_package_header *) |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 330 | &fw->data[readcount]; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 331 | if (sizeof(struct intel_package_header) != |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 332 | (package_header->header_len * 4)) { |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 333 | DRM_ERROR("DMC firmware has wrong package header length " |
| 334 | "(%u bytes)\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 335 | (package_header->header_len * 4)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 336 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 337 | } |
| 338 | readcount += sizeof(struct intel_package_header); |
| 339 | |
| 340 | /* Search for dmc_offset to find firware binary. */ |
| 341 | for (i = 0; i < package_header->num_entries; i++) { |
| 342 | if (package_header->fw_info[i].substepping == '*' && |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 343 | si->stepping == package_header->fw_info[i].stepping) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 344 | dmc_offset = package_header->fw_info[i].offset; |
| 345 | break; |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 346 | } else if (si->stepping == package_header->fw_info[i].stepping && |
| 347 | si->substepping == package_header->fw_info[i].substepping) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 348 | dmc_offset = package_header->fw_info[i].offset; |
| 349 | break; |
| 350 | } else if (package_header->fw_info[i].stepping == '*' && |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 351 | package_header->fw_info[i].substepping == '*') |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 352 | dmc_offset = package_header->fw_info[i].offset; |
| 353 | } |
| 354 | if (dmc_offset == CSR_DEFAULT_FW_OFFSET) { |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 355 | DRM_ERROR("DMC firmware not supported for %c stepping\n", |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 356 | si->stepping); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 357 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 358 | } |
| 359 | readcount += dmc_offset; |
| 360 | |
| 361 | /* Extract dmc_header information. */ |
| 362 | dmc_header = (struct intel_dmc_header *)&fw->data[readcount]; |
| 363 | if (sizeof(struct intel_dmc_header) != (dmc_header->header_len)) { |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 364 | DRM_ERROR("DMC firmware has wrong dmc header length " |
| 365 | "(%u bytes)\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 366 | (dmc_header->header_len)); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 367 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 368 | } |
| 369 | readcount += sizeof(struct intel_dmc_header); |
| 370 | |
| 371 | /* Cache the dmc header info. */ |
| 372 | if (dmc_header->mmio_count > ARRAY_SIZE(csr->mmioaddr)) { |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 373 | DRM_ERROR("DMC firmware has wrong mmio count %u\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 374 | dmc_header->mmio_count); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 375 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 376 | } |
| 377 | csr->mmio_count = dmc_header->mmio_count; |
| 378 | for (i = 0; i < dmc_header->mmio_count; i++) { |
Takashi Iwai | 982b0b2 | 2015-09-09 16:52:09 +0200 | [diff] [blame] | 379 | if (dmc_header->mmioaddr[i] < CSR_MMIO_START_RANGE || |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 380 | dmc_header->mmioaddr[i] > CSR_MMIO_END_RANGE) { |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 381 | DRM_ERROR("DMC firmware has wrong mmio address 0x%x\n", |
Daniel Vetter | f98f70d | 2015-10-28 23:58:59 +0200 | [diff] [blame] | 382 | dmc_header->mmioaddr[i]); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 383 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 384 | } |
Ville Syrjälä | f0f59a0 | 2015-11-18 15:33:26 +0200 | [diff] [blame] | 385 | csr->mmioaddr[i] = _MMIO(dmc_header->mmioaddr[i]); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 386 | csr->mmiodata[i] = dmc_header->mmiodata[i]; |
| 387 | } |
| 388 | |
| 389 | /* fw_size is in dwords, so multiplied by 4 to convert into bytes. */ |
| 390 | nbytes = dmc_header->fw_size * 4; |
| 391 | if (nbytes > CSR_MAX_FW_SIZE) { |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 392 | DRM_ERROR("DMC firmware too big (%u bytes)\n", nbytes); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 393 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 394 | } |
| 395 | csr->dmc_fw_size = dmc_header->fw_size; |
| 396 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 397 | dmc_payload = kmalloc(nbytes, GFP_KERNEL); |
| 398 | if (!dmc_payload) { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 399 | DRM_ERROR("Memory allocation failed for dmc payload\n"); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 400 | return NULL; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 401 | } |
| 402 | |
Chris Wilson | 1bb4308 | 2016-03-07 12:05:57 +0000 | [diff] [blame] | 403 | return memcpy(dmc_payload, &fw->data[readcount], nbytes); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 404 | } |
| 405 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 406 | static void csr_load_work_fn(struct work_struct *work) |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 407 | { |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 408 | struct drm_i915_private *dev_priv; |
| 409 | struct intel_csr *csr; |
Jérémy Lefaure | 3aaa8ab | 2016-11-28 18:43:19 -0500 | [diff] [blame] | 410 | const struct firmware *fw = NULL; |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 411 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 412 | dev_priv = container_of(work, typeof(*dev_priv), csr.work); |
| 413 | csr = &dev_priv->csr; |
| 414 | |
Chris Wilson | ec78828 | 2017-01-18 12:18:08 +0000 | [diff] [blame] | 415 | request_firmware(&fw, dev_priv->csr.fw_path, &dev_priv->drm.pdev->dev); |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 416 | if (fw) |
| 417 | dev_priv->csr.dmc_payload = parse_csr_fw(dev_priv, fw); |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 418 | |
Daniel Vetter | 6a6582b | 2015-11-12 17:11:29 +0200 | [diff] [blame] | 419 | if (dev_priv->csr.dmc_payload) { |
Imre Deak | 2abc525 | 2016-03-04 21:57:41 +0200 | [diff] [blame] | 420 | intel_csr_load_program(dev_priv); |
| 421 | |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 422 | intel_display_power_put(dev_priv, POWER_DOMAIN_INIT); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 423 | |
Mika Kuoppala | b2251c0 | 2016-11-16 11:33:26 +0200 | [diff] [blame] | 424 | DRM_INFO("Finished loading DMC firmware %s (v%u.%u)\n", |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 425 | dev_priv->csr.fw_path, |
| 426 | CSR_VERSION_MAJOR(csr->version), |
| 427 | CSR_VERSION_MINOR(csr->version)); |
| 428 | } else { |
Chris Wilson | 91c8a32 | 2016-07-05 10:40:23 +0100 | [diff] [blame] | 429 | dev_notice(dev_priv->drm.dev, |
Michal Wajdeczko | f1e86ce | 2017-10-16 14:47:20 +0000 | [diff] [blame] | 430 | "Failed to load DMC firmware %s." |
| 431 | " Disabling runtime power management.\n", |
| 432 | csr->fw_path); |
| 433 | dev_notice(dev_priv->drm.dev, "DMC firmware homepage: %s", |
| 434 | INTEL_UC_FIRMWARE_URL); |
Mika Kuoppala | 9c5308e | 2015-10-30 17:52:16 +0200 | [diff] [blame] | 435 | } |
| 436 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 437 | release_firmware(fw); |
| 438 | } |
| 439 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 440 | /** |
| 441 | * intel_csr_ucode_init() - initialize the firmware loading. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 442 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 443 | * |
| 444 | * This function is called at the time of loading the display driver to read |
| 445 | * firmware from a .bin file and copied into a internal memory. |
| 446 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 447 | void intel_csr_ucode_init(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 448 | { |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 449 | struct intel_csr *csr = &dev_priv->csr; |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 450 | |
| 451 | INIT_WORK(&dev_priv->csr.work, csr_load_work_fn); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 452 | |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 453 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 454 | return; |
| 455 | |
Anusha Srivatsa | cebfcea | 2017-06-09 15:26:10 -0700 | [diff] [blame] | 456 | if (IS_CANNONLAKE(dev_priv)) |
| 457 | csr->fw_path = I915_CSR_CNL; |
| 458 | else if (IS_GEMINILAKE(dev_priv)) |
Anusha Srivatsa | dbb28b5 | 2016-12-16 17:42:24 +0200 | [diff] [blame] | 459 | csr->fw_path = I915_CSR_GLK; |
Rodrigo Vivi | 84cd843 | 2017-06-09 13:02:30 -0700 | [diff] [blame] | 460 | else if (IS_KABYLAKE(dev_priv) || IS_COFFEELAKE(dev_priv)) |
Rodrigo Vivi | 4922d49 | 2016-04-26 14:59:51 -0700 | [diff] [blame] | 461 | csr->fw_path = I915_CSR_KBL; |
| 462 | else if (IS_SKYLAKE(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 463 | csr->fw_path = I915_CSR_SKL; |
Animesh Manna | 18c237c | 2015-08-04 22:02:41 +0530 | [diff] [blame] | 464 | else if (IS_BROXTON(dev_priv)) |
| 465 | csr->fw_path = I915_CSR_BXT; |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 466 | else { |
| 467 | DRM_ERROR("Unexpected: no known CSR firmware for platform\n"); |
| 468 | return; |
| 469 | } |
| 470 | |
Damien Lespiau | abd41dc | 2015-06-04 16:42:16 +0100 | [diff] [blame] | 471 | DRM_DEBUG_KMS("Loading %s\n", csr->fw_path); |
| 472 | |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 473 | /* |
| 474 | * Obtain a runtime pm reference, until CSR is loaded, |
| 475 | * to avoid entering runtime-suspend. |
| 476 | */ |
Daniel Vetter | 01a6908 | 2015-10-28 23:58:56 +0200 | [diff] [blame] | 477 | intel_display_power_get(dev_priv, POWER_DOMAIN_INIT); |
Suketu Shah | dc17430 | 2015-04-17 19:46:16 +0530 | [diff] [blame] | 478 | |
Daniel Vetter | 8144ac5 | 2015-10-28 23:59:04 +0200 | [diff] [blame] | 479 | schedule_work(&dev_priv->csr.work); |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 480 | } |
| 481 | |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 482 | /** |
Imre Deak | f74ed08 | 2016-04-18 14:48:21 +0300 | [diff] [blame] | 483 | * intel_csr_ucode_suspend() - prepare CSR firmware before system suspend |
| 484 | * @dev_priv: i915 drm device |
| 485 | * |
| 486 | * Prepare the DMC firmware before entering system suspend. This includes |
| 487 | * flushing pending work items and releasing any resources acquired during |
| 488 | * init. |
| 489 | */ |
| 490 | void intel_csr_ucode_suspend(struct drm_i915_private *dev_priv) |
| 491 | { |
| 492 | if (!HAS_CSR(dev_priv)) |
| 493 | return; |
| 494 | |
| 495 | flush_work(&dev_priv->csr.work); |
| 496 | |
| 497 | /* Drop the reference held in case DMC isn't loaded. */ |
| 498 | if (!dev_priv->csr.dmc_payload) |
| 499 | intel_display_power_put(dev_priv, POWER_DOMAIN_INIT); |
| 500 | } |
| 501 | |
| 502 | /** |
| 503 | * intel_csr_ucode_resume() - init CSR firmware during system resume |
| 504 | * @dev_priv: i915 drm device |
| 505 | * |
| 506 | * Reinitialize the DMC firmware during system resume, reacquiring any |
| 507 | * resources released in intel_csr_ucode_suspend(). |
| 508 | */ |
| 509 | void intel_csr_ucode_resume(struct drm_i915_private *dev_priv) |
| 510 | { |
| 511 | if (!HAS_CSR(dev_priv)) |
| 512 | return; |
| 513 | |
| 514 | /* |
| 515 | * Reacquire the reference to keep RPM disabled in case DMC isn't |
| 516 | * loaded. |
| 517 | */ |
| 518 | if (!dev_priv->csr.dmc_payload) |
| 519 | intel_display_power_get(dev_priv, POWER_DOMAIN_INIT); |
| 520 | } |
| 521 | |
| 522 | /** |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 523 | * intel_csr_ucode_fini() - unload the CSR firmware. |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 524 | * @dev_priv: i915 drm device. |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 525 | * |
Imre Deak | f74ed08 | 2016-04-18 14:48:21 +0300 | [diff] [blame] | 526 | * Firmmware unloading includes freeing the internal memory and reset the |
Animesh Manna | aa9145c | 2015-05-13 22:13:29 +0530 | [diff] [blame] | 527 | * firmware loading status. |
| 528 | */ |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 529 | void intel_csr_ucode_fini(struct drm_i915_private *dev_priv) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 530 | { |
Daniel Vetter | f444837 | 2015-10-28 23:59:02 +0200 | [diff] [blame] | 531 | if (!HAS_CSR(dev_priv)) |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 532 | return; |
| 533 | |
Imre Deak | f74ed08 | 2016-04-18 14:48:21 +0300 | [diff] [blame] | 534 | intel_csr_ucode_suspend(dev_priv); |
Animesh Manna | 15e72c1 | 2015-10-28 23:59:05 +0200 | [diff] [blame] | 535 | |
Daniel Vetter | eb80562 | 2015-05-04 14:58:44 +0200 | [diff] [blame] | 536 | kfree(dev_priv->csr.dmc_payload); |
| 537 | } |