blob: 5bf825dfaa098ec6b6ad8f479856ce676f51ba21 [file] [log] [blame]
Dave Airlie746c1aa2009-12-08 07:07:28 +10001/*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
Jerome Glisse8d1c7022012-07-17 17:17:16 -040025 * Jerome Glisse
Dave Airlie746c1aa2009-12-08 07:07:28 +100026 */
David Howells760285e2012-10-02 18:01:07 +010027#include <drm/drmP.h>
28#include <drm/radeon_drm.h>
Dave Airlie746c1aa2009-12-08 07:07:28 +100029#include "radeon.h"
30
31#include "atom.h"
32#include "atom-bits.h"
David Howells760285e2012-10-02 18:01:07 +010033#include <drm/drm_dp_helper.h>
Dave Airlie746c1aa2009-12-08 07:07:28 +100034
Alex Deucherf92a8b62009-11-23 18:40:40 -050035/* move these to drm_dp_helper.c/h */
Alex Deucher5801ead2009-11-24 13:32:59 -050036#define DP_LINK_CONFIGURATION_SIZE 9
Daniel Vetter1a644cd2012-10-18 15:32:40 +020037#define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
Alex Deucher5801ead2009-11-24 13:32:59 -050038
39static char *voltage_names[] = {
40 "0.4V", "0.6V", "0.8V", "1.2V"
41};
42static char *pre_emph_names[] = {
43 "0dB", "3.5dB", "6dB", "9.5dB"
44};
Alex Deucherf92a8b62009-11-23 18:40:40 -050045
Alex Deucher224d94b2011-05-20 04:34:28 -040046/***** radeon AUX functions *****/
Alex Deucher34be8c92013-07-18 11:13:53 -040047
48/* Atom needs data in little endian format
49 * so swap as appropriate when copying data to
50 * or from atom. Note that atom operates on
51 * dw units.
52 */
Alex Deucher4543eda2013-08-07 19:34:53 -040053void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
Alex Deucher34be8c92013-07-18 11:13:53 -040054{
55#ifdef __BIG_ENDIAN
56 u8 src_tmp[20], dst_tmp[20]; /* used for byteswapping */
57 u32 *dst32, *src32;
58 int i;
59
60 memcpy(src_tmp, src, num_bytes);
61 src32 = (u32 *)src_tmp;
62 dst32 = (u32 *)dst_tmp;
63 if (to_le) {
64 for (i = 0; i < ((num_bytes + 3) / 4); i++)
65 dst32[i] = cpu_to_le32(src32[i]);
66 memcpy(dst, dst_tmp, num_bytes);
67 } else {
68 u8 dws = num_bytes & ~3;
69 for (i = 0; i < ((num_bytes + 3) / 4); i++)
70 dst32[i] = le32_to_cpu(src32[i]);
71 memcpy(dst, dst_tmp, dws);
72 if (num_bytes % 4) {
73 for (i = 0; i < (num_bytes % 4); i++)
74 dst[dws+i] = dst_tmp[dws+i];
75 }
76 }
77#else
78 memcpy(dst, src, num_bytes);
79#endif
80}
81
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050082union aux_channel_transaction {
83 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
84 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
85};
Alex Deucher5801ead2009-11-24 13:32:59 -050086
Alex Deucher834b2902011-05-20 04:34:24 -040087static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
88 u8 *send, int send_bytes,
89 u8 *recv, int recv_size,
90 u8 delay, u8 *ack)
Dave Airlie746c1aa2009-12-08 07:07:28 +100091{
92 struct drm_device *dev = chan->dev;
93 struct radeon_device *rdev = dev->dev_private;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050094 union aux_channel_transaction args;
Dave Airlie746c1aa2009-12-08 07:07:28 +100095 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
96 unsigned char *base;
Alex Deucher834b2902011-05-20 04:34:24 -040097 int recv_bytes;
Alex Deucher831719d62014-05-08 10:58:04 -040098 int r = 0;
Alex Deucher1a66c952009-11-20 19:40:13 -050099
Dave Airlie746c1aa2009-12-08 07:07:28 +1000100 memset(&args, 0, sizeof(args));
Alex Deucher1a66c952009-11-20 19:40:13 -0500101
Alex Deucher831719d62014-05-08 10:58:04 -0400102 mutex_lock(&chan->mutex);
Dave Airlie1c949842014-11-11 09:16:15 +1000103 mutex_lock(&rdev->mode_info.atom_context->scratch_mutex);
Alex Deucher831719d62014-05-08 10:58:04 -0400104
Alex Deucher97412a72012-03-20 17:18:06 -0400105 base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
Dave Airlie746c1aa2009-12-08 07:07:28 +1000106
Alex Deucher4543eda2013-08-07 19:34:53 -0400107 radeon_atom_copy_swap(base, send, send_bytes, true);
Dave Airlie746c1aa2009-12-08 07:07:28 +1000108
Alex Deucher34be8c92013-07-18 11:13:53 -0400109 args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
110 args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500111 args.v1.ucDataOutLen = 0;
112 args.v1.ucChannelID = chan->rec.i2c_id;
113 args.v1.ucDelay = delay / 10;
114 if (ASIC_IS_DCE4(rdev))
Alex Deucher8e36ed02010-05-18 19:26:47 -0400115 args.v2.ucHPD_ID = chan->rec.hpd;
Dave Airlie746c1aa2009-12-08 07:07:28 +1000116
Dave Airlie1c949842014-11-11 09:16:15 +1000117 atom_execute_table_scratch_unlocked(rdev->mode_info.atom_context, index, (uint32_t *)&args);
Dave Airlie746c1aa2009-12-08 07:07:28 +1000118
Alex Deucher834b2902011-05-20 04:34:24 -0400119 *ack = args.v1.ucReplyStatus;
120
121 /* timeout */
122 if (args.v1.ucReplyStatus == 1) {
123 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
Alex Deucher831719d62014-05-08 10:58:04 -0400124 r = -ETIMEDOUT;
125 goto done;
Dave Airlie746c1aa2009-12-08 07:07:28 +1000126 }
127
Alex Deucher834b2902011-05-20 04:34:24 -0400128 /* flags not zero */
129 if (args.v1.ucReplyStatus == 2) {
130 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
Alex Deucherf6be5e62014-07-03 11:17:55 -0400131 r = -EIO;
Alex Deucher831719d62014-05-08 10:58:04 -0400132 goto done;
Dave Airlie746c1aa2009-12-08 07:07:28 +1000133 }
Alex Deucher834b2902011-05-20 04:34:24 -0400134
135 /* error */
136 if (args.v1.ucReplyStatus == 3) {
137 DRM_DEBUG_KMS("dp_aux_ch error\n");
Alex Deucher831719d62014-05-08 10:58:04 -0400138 r = -EIO;
139 goto done;
Alex Deucher834b2902011-05-20 04:34:24 -0400140 }
141
142 recv_bytes = args.v1.ucDataOutLen;
143 if (recv_bytes > recv_size)
144 recv_bytes = recv_size;
145
146 if (recv && recv_size)
Alex Deucher4543eda2013-08-07 19:34:53 -0400147 radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
Alex Deucher834b2902011-05-20 04:34:24 -0400148
Alex Deucher831719d62014-05-08 10:58:04 -0400149 r = recv_bytes;
150done:
Dave Airlie1c949842014-11-11 09:16:15 +1000151 mutex_unlock(&rdev->mode_info.atom_context->scratch_mutex);
Alex Deucher831719d62014-05-08 10:58:04 -0400152 mutex_unlock(&chan->mutex);
153
154 return r;
Dave Airlie746c1aa2009-12-08 07:07:28 +1000155}
156
Alex Deucher25377b92014-04-07 10:33:43 -0400157#define BARE_ADDRESS_SIZE 3
158#define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
Alex Deucher496263b2014-03-21 10:34:07 -0400159
160static ssize_t
161radeon_dp_aux_transfer(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
Alex Deucher5801ead2009-11-24 13:32:59 -0500162{
Alex Deucher496263b2014-03-21 10:34:07 -0400163 struct radeon_i2c_chan *chan =
164 container_of(aux, struct radeon_i2c_chan, aux);
Alex Deucher834b2902011-05-20 04:34:24 -0400165 int ret;
Alex Deucher496263b2014-03-21 10:34:07 -0400166 u8 tx_buf[20];
167 size_t tx_size;
168 u8 ack, delay = 0;
Alex Deucher5801ead2009-11-24 13:32:59 -0500169
Alex Deucher496263b2014-03-21 10:34:07 -0400170 if (WARN_ON(msg->size > 16))
171 return -E2BIG;
Alex Deucher834b2902011-05-20 04:34:24 -0400172
Alex Deucher496263b2014-03-21 10:34:07 -0400173 tx_buf[0] = msg->address & 0xff;
174 tx_buf[1] = msg->address >> 8;
175 tx_buf[2] = msg->request << 4;
Alex Deucher25377b92014-04-07 10:33:43 -0400176 tx_buf[3] = msg->size ? (msg->size - 1) : 0;
Alex Deucher834b2902011-05-20 04:34:24 -0400177
Alex Deucher496263b2014-03-21 10:34:07 -0400178 switch (msg->request & ~DP_AUX_I2C_MOT) {
179 case DP_AUX_NATIVE_WRITE:
180 case DP_AUX_I2C_WRITE:
Alex Deucher25377b92014-04-07 10:33:43 -0400181 /* tx_size needs to be 4 even for bare address packets since the atom
182 * table needs the info in tx_buf[3].
183 */
Alex Deucher496263b2014-03-21 10:34:07 -0400184 tx_size = HEADER_SIZE + msg->size;
Alex Deucher25377b92014-04-07 10:33:43 -0400185 if (msg->size == 0)
186 tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
187 else
188 tx_buf[3] |= tx_size << 4;
Alex Deucher496263b2014-03-21 10:34:07 -0400189 memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
190 ret = radeon_process_aux_ch(chan,
191 tx_buf, tx_size, NULL, 0, delay, &ack);
192 if (ret >= 0)
193 /* Return payload size. */
194 ret = msg->size;
195 break;
196 case DP_AUX_NATIVE_READ:
197 case DP_AUX_I2C_READ:
Alex Deucher25377b92014-04-07 10:33:43 -0400198 /* tx_size needs to be 4 even for bare address packets since the atom
199 * table needs the info in tx_buf[3].
200 */
Alex Deucher496263b2014-03-21 10:34:07 -0400201 tx_size = HEADER_SIZE;
Alex Deucher25377b92014-04-07 10:33:43 -0400202 if (msg->size == 0)
203 tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
204 else
205 tx_buf[3] |= tx_size << 4;
Alex Deucher496263b2014-03-21 10:34:07 -0400206 ret = radeon_process_aux_ch(chan,
207 tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
208 break;
209 default:
210 ret = -EINVAL;
211 break;
Alex Deucher834b2902011-05-20 04:34:24 -0400212 }
213
Alex Deucher25377b92014-04-07 10:33:43 -0400214 if (ret >= 0)
Alex Deucher496263b2014-03-21 10:34:07 -0400215 msg->reply = ack >> 4;
216
217 return ret;
Alex Deucher5801ead2009-11-24 13:32:59 -0500218}
219
Alex Deucher496263b2014-03-21 10:34:07 -0400220void radeon_dp_aux_init(struct radeon_connector *radeon_connector)
Alex Deucher5801ead2009-11-24 13:32:59 -0500221{
Alex Deucher834b2902011-05-20 04:34:24 -0400222 int ret;
Dave Airlie746c1aa2009-12-08 07:07:28 +1000223
Alex Deucherad47b8f2014-04-22 02:02:06 -0400224 radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd;
Alex Deucher379dfc22014-04-07 10:33:46 -0400225 radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev;
226 radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer;
Dave Airlie4f71d0c2014-06-04 16:02:28 +1000227
228 ret = drm_dp_aux_register(&radeon_connector->ddc_bus->aux);
Alex Deucher379dfc22014-04-07 10:33:46 -0400229 if (!ret)
230 radeon_connector->ddc_bus->has_aux = true;
Dave Airlie746c1aa2009-12-08 07:07:28 +1000231
Dave Airlie4f71d0c2014-06-04 16:02:28 +1000232 WARN(ret, "drm_dp_aux_register() failed with error %d\n", ret);
Dave Airlie746c1aa2009-12-08 07:07:28 +1000233}
Alex Deucher5801ead2009-11-24 13:32:59 -0500234
Alex Deucher224d94b2011-05-20 04:34:28 -0400235/***** general DP utility functions *****/
236
Sonika Jindal9cecb372014-08-08 16:23:44 +0530237#define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
238#define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPH_LEVEL_3
Alex Deucher224d94b2011-05-20 04:34:28 -0400239
240static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
241 int lane_count,
242 u8 train_set[4])
243{
244 u8 v = 0;
245 u8 p = 0;
246 int lane;
247
248 for (lane = 0; lane < lane_count; lane++) {
Daniel Vetter0f037bd2012-10-18 10:15:27 +0200249 u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
250 u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
Alex Deucher224d94b2011-05-20 04:34:28 -0400251
252 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
253 lane,
254 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
255 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
256
257 if (this_v > v)
258 v = this_v;
259 if (this_p > p)
260 p = this_p;
261 }
262
263 if (v >= DP_VOLTAGE_MAX)
264 v |= DP_TRAIN_MAX_SWING_REACHED;
265
266 if (p >= DP_PRE_EMPHASIS_MAX)
267 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
268
269 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
270 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
271 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
272
273 for (lane = 0; lane < 4; lane++)
274 train_set[lane] = v | p;
275}
276
277/* convert bits per color to bits per pixel */
278/* get bpc from the EDID */
279static int convert_bpc_to_bpp(int bpc)
280{
281 if (bpc == 0)
282 return 24;
283 else
284 return bpc * 3;
285}
286
287/* get the max pix clock supported by the link rate and lane num */
288static int dp_get_max_dp_pix_clock(int link_rate,
289 int lane_num,
290 int bpp)
291{
292 return (link_rate * lane_num * 8) / bpp;
293}
294
Alex Deucher224d94b2011-05-20 04:34:28 -0400295/***** radeon specific DP functions *****/
296
Alex Deucher3b6d9fd2014-05-27 13:48:05 -0400297static int radeon_dp_get_max_link_rate(struct drm_connector *connector,
298 u8 dpcd[DP_DPCD_SIZE])
299{
300 int max_link_rate;
301
302 if (radeon_connector_is_dp12_capable(connector))
303 max_link_rate = min(drm_dp_max_link_rate(dpcd), 540000);
304 else
305 max_link_rate = min(drm_dp_max_link_rate(dpcd), 270000);
306
307 return max_link_rate;
308}
309
Alex Deucher224d94b2011-05-20 04:34:28 -0400310/* First get the min lane# when low rate is used according to pixel clock
311 * (prefer low rate), second check max lane# supported by DP panel,
312 * if the max lane# < low rate lane# then use max lane# instead.
313 */
314static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
315 u8 dpcd[DP_DPCD_SIZE],
316 int pix_clock)
317{
Alex Deuchereccea792012-03-26 15:12:54 -0400318 int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
Alex Deucher3b6d9fd2014-05-27 13:48:05 -0400319 int max_link_rate = radeon_dp_get_max_link_rate(connector, dpcd);
Daniel Vetter397fe152012-10-22 22:56:43 +0200320 int max_lane_num = drm_dp_max_lane_count(dpcd);
Alex Deucher224d94b2011-05-20 04:34:28 -0400321 int lane_num;
322 int max_dp_pix_clock;
323
324 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
325 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
326 if (pix_clock <= max_dp_pix_clock)
327 break;
328 }
329
330 return lane_num;
331}
332
333static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
334 u8 dpcd[DP_DPCD_SIZE],
335 int pix_clock)
336{
Alex Deuchereccea792012-03-26 15:12:54 -0400337 int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
Alex Deucher224d94b2011-05-20 04:34:28 -0400338 int lane_num, max_pix_clock;
339
Alex Deucherfdca78c2011-10-25 11:54:52 -0400340 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
341 ENCODER_OBJECT_ID_NUTMEG)
Alex Deucher224d94b2011-05-20 04:34:28 -0400342 return 270000;
343
344 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
345 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
346 if (pix_clock <= max_pix_clock)
347 return 162000;
348 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
349 if (pix_clock <= max_pix_clock)
350 return 270000;
351 if (radeon_connector_is_dp12_capable(connector)) {
352 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
353 if (pix_clock <= max_pix_clock)
354 return 540000;
355 }
356
Alex Deucher3b6d9fd2014-05-27 13:48:05 -0400357 return radeon_dp_get_max_link_rate(connector, dpcd);
Alex Deucher224d94b2011-05-20 04:34:28 -0400358}
359
360static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
361 int action, int dp_clock,
362 u8 ucconfig, u8 lane_num)
363{
364 DP_ENCODER_SERVICE_PARAMETERS args;
365 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
366
367 memset(&args, 0, sizeof(args));
368 args.ucLinkClock = dp_clock / 10;
369 args.ucConfig = ucconfig;
370 args.ucAction = action;
371 args.ucLaneNum = lane_num;
372 args.ucStatus = 0;
373
374 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
375 return args.ucStatus;
376}
377
378u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
379{
Alex Deucher224d94b2011-05-20 04:34:28 -0400380 struct drm_device *dev = radeon_connector->base.dev;
381 struct radeon_device *rdev = dev->dev_private;
382
383 return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
Alex Deucher379dfc22014-04-07 10:33:46 -0400384 radeon_connector->ddc_bus->rec.i2c_id, 0);
Alex Deucher224d94b2011-05-20 04:34:28 -0400385}
386
Adam Jackson40c5d872012-05-14 16:05:48 -0400387static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
388{
389 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
390 u8 buf[3];
391
392 if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
393 return;
394
Alex Deucheraa019b72014-04-30 09:27:15 -0400395 if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
Adam Jackson40c5d872012-05-14 16:05:48 -0400396 DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
397 buf[0], buf[1], buf[2]);
398
Alex Deucheraa019b72014-04-30 09:27:15 -0400399 if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
Adam Jackson40c5d872012-05-14 16:05:48 -0400400 DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
401 buf[0], buf[1], buf[2]);
402}
403
Alex Deucher224d94b2011-05-20 04:34:28 -0400404bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
405{
406 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
Daniel Vetter1a644cd2012-10-18 15:32:40 +0200407 u8 msg[DP_DPCD_SIZE];
Stefan Brüns4e5f97d2014-06-29 21:03:53 +0200408 int ret;
409
Alex Deucher379dfc22014-04-07 10:33:46 -0400410 ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg,
Alex Deucher496263b2014-03-21 10:34:07 -0400411 DP_DPCD_SIZE);
Alex Deucher224d94b2011-05-20 04:34:28 -0400412 if (ret > 0) {
Daniel Vetter1a644cd2012-10-18 15:32:40 +0200413 memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
Stefan Brüns4e5f97d2014-06-29 21:03:53 +0200414
Andy Shevchenkodf8fbc232014-09-04 15:46:24 +0300415 DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
416 dig_connector->dpcd);
Adam Jackson40c5d872012-05-14 16:05:48 -0400417
418 radeon_dp_probe_oui(radeon_connector);
419
Alex Deucher224d94b2011-05-20 04:34:28 -0400420 return true;
421 }
422 dig_connector->dpcd[0] = 0;
423 return false;
424}
425
Alex Deucher386d4d72012-01-20 15:01:29 -0500426int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
427 struct drm_connector *connector)
Alex Deucher224d94b2011-05-20 04:34:28 -0400428{
429 struct drm_device *dev = encoder->dev;
430 struct radeon_device *rdev = dev->dev_private;
Alex Deucher00dfb8d2011-10-31 08:54:41 -0400431 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
Alex Deucher496263b2014-03-21 10:34:07 -0400432 struct radeon_connector_atom_dig *dig_connector;
Alex Deucher224d94b2011-05-20 04:34:28 -0400433 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
Alex Deucher0ceb9962012-08-27 17:48:18 -0400434 u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
435 u8 tmp;
Alex Deucher224d94b2011-05-20 04:34:28 -0400436
437 if (!ASIC_IS_DCE4(rdev))
Alex Deucher386d4d72012-01-20 15:01:29 -0500438 return panel_mode;
Alex Deucher224d94b2011-05-20 04:34:28 -0400439
Alex Deucher496263b2014-03-21 10:34:07 -0400440 if (!radeon_connector->con_priv)
441 return panel_mode;
442
443 dig_connector = radeon_connector->con_priv;
444
Alex Deucher0ceb9962012-08-27 17:48:18 -0400445 if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
446 /* DP bridge chips */
Alex Deucheraa019b72014-04-30 09:27:15 -0400447 if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
448 DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
449 if (tmp & 1)
450 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
451 else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
452 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
453 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
454 else
455 panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
456 }
Alex Deucher304a4842012-02-02 10:18:00 -0500457 } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
Alex Deucher0ceb9962012-08-27 17:48:18 -0400458 /* eDP */
Alex Deucheraa019b72014-04-30 09:27:15 -0400459 if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux,
460 DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
461 if (tmp & 1)
462 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
463 }
Alex Deucher00dfb8d2011-10-31 08:54:41 -0400464 }
Alex Deucher224d94b2011-05-20 04:34:28 -0400465
Alex Deucher386d4d72012-01-20 15:01:29 -0500466 return panel_mode;
Alex Deucher224d94b2011-05-20 04:34:28 -0400467}
468
469void radeon_dp_set_link_config(struct drm_connector *connector,
Laurent Pincharte811f5a2012-07-17 17:56:50 +0200470 const struct drm_display_mode *mode)
Alex Deucher224d94b2011-05-20 04:34:28 -0400471{
472 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
473 struct radeon_connector_atom_dig *dig_connector;
474
475 if (!radeon_connector->con_priv)
476 return;
477 dig_connector = radeon_connector->con_priv;
478
479 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
480 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
481 dig_connector->dp_clock =
482 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
483 dig_connector->dp_lane_count =
484 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
485 }
486}
487
488int radeon_dp_mode_valid_helper(struct drm_connector *connector,
489 struct drm_display_mode *mode)
490{
491 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
492 struct radeon_connector_atom_dig *dig_connector;
493 int dp_clock;
494
Alex Deucher410cce22014-12-10 09:42:10 -0500495 if ((mode->clock > 340000) &&
496 (!radeon_connector_is_dp12_capable(connector)))
497 return MODE_CLOCK_HIGH;
498
Alex Deucher224d94b2011-05-20 04:34:28 -0400499 if (!radeon_connector->con_priv)
500 return MODE_CLOCK_HIGH;
501 dig_connector = radeon_connector->con_priv;
502
503 dp_clock =
504 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
505
506 if ((dp_clock == 540000) &&
507 (!radeon_connector_is_dp12_capable(connector)))
508 return MODE_CLOCK_HIGH;
509
510 return MODE_OK;
511}
512
Alex Deucherd5811e82011-08-13 13:36:13 -0400513bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
514{
515 u8 link_status[DP_LINK_STATUS_SIZE];
516 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
517
Alex Deucher379dfc22014-04-07 10:33:46 -0400518 if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status)
519 <= 0)
Alex Deucherd5811e82011-08-13 13:36:13 -0400520 return false;
Daniel Vetter1ffdff12012-10-18 10:15:24 +0200521 if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
Alex Deucherd5811e82011-08-13 13:36:13 -0400522 return false;
523 return true;
524}
525
Alex Deucher2953da12014-03-17 23:48:15 -0400526void radeon_dp_set_rx_power_state(struct drm_connector *connector,
527 u8 power_state)
528{
529 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
530 struct radeon_connector_atom_dig *dig_connector;
531
532 if (!radeon_connector->con_priv)
533 return;
534
535 dig_connector = radeon_connector->con_priv;
536
537 /* power up/down the sink */
538 if (dig_connector->dpcd[0] >= 0x11) {
Alex Deucher379dfc22014-04-07 10:33:46 -0400539 drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux,
Alex Deucher2953da12014-03-17 23:48:15 -0400540 DP_SET_POWER, power_state);
541 usleep_range(1000, 2000);
542 }
543}
544
545
Alex Deucher224d94b2011-05-20 04:34:28 -0400546struct radeon_dp_link_train_info {
547 struct radeon_device *rdev;
548 struct drm_encoder *encoder;
549 struct drm_connector *connector;
Alex Deucher224d94b2011-05-20 04:34:28 -0400550 int enc_id;
551 int dp_clock;
552 int dp_lane_count;
Alex Deucher224d94b2011-05-20 04:34:28 -0400553 bool tp3_supported;
Daniel Vetter1a644cd2012-10-18 15:32:40 +0200554 u8 dpcd[DP_RECEIVER_CAP_SIZE];
Alex Deucher224d94b2011-05-20 04:34:28 -0400555 u8 train_set[4];
556 u8 link_status[DP_LINK_STATUS_SIZE];
557 u8 tries;
Jerome Glisse5a96a892011-07-25 11:57:43 -0400558 bool use_dpencoder;
Alex Deucher496263b2014-03-21 10:34:07 -0400559 struct drm_dp_aux *aux;
Alex Deucher224d94b2011-05-20 04:34:28 -0400560};
561
562static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
563{
564 /* set the initial vs/emph on the source */
565 atombios_dig_transmitter_setup(dp_info->encoder,
566 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
567 0, dp_info->train_set[0]); /* sets all lanes at once */
568
569 /* set the vs/emph on the sink */
Alex Deucher496263b2014-03-21 10:34:07 -0400570 drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
571 dp_info->train_set, dp_info->dp_lane_count);
Alex Deucher224d94b2011-05-20 04:34:28 -0400572}
573
574static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
575{
576 int rtp = 0;
577
578 /* set training pattern on the source */
Jerome Glisse5a96a892011-07-25 11:57:43 -0400579 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
Alex Deucher224d94b2011-05-20 04:34:28 -0400580 switch (tp) {
581 case DP_TRAINING_PATTERN_1:
582 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
583 break;
584 case DP_TRAINING_PATTERN_2:
585 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
586 break;
587 case DP_TRAINING_PATTERN_3:
588 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
589 break;
590 }
591 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
592 } else {
593 switch (tp) {
594 case DP_TRAINING_PATTERN_1:
595 rtp = 0;
596 break;
597 case DP_TRAINING_PATTERN_2:
598 rtp = 1;
599 break;
600 }
601 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
602 dp_info->dp_clock, dp_info->enc_id, rtp);
603 }
604
605 /* enable training pattern on the sink */
Alex Deucher496263b2014-03-21 10:34:07 -0400606 drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
Alex Deucher224d94b2011-05-20 04:34:28 -0400607}
608
609static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
610{
Alex Deucher386d4d72012-01-20 15:01:29 -0500611 struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
612 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
Alex Deucher224d94b2011-05-20 04:34:28 -0400613 u8 tmp;
614
615 /* power up the sink */
Alex Deucher2953da12014-03-17 23:48:15 -0400616 radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
Alex Deucher224d94b2011-05-20 04:34:28 -0400617
618 /* possibly enable downspread on the sink */
619 if (dp_info->dpcd[3] & 0x1)
Alex Deucher496263b2014-03-21 10:34:07 -0400620 drm_dp_dpcd_writeb(dp_info->aux,
621 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
Alex Deucher224d94b2011-05-20 04:34:28 -0400622 else
Alex Deucher496263b2014-03-21 10:34:07 -0400623 drm_dp_dpcd_writeb(dp_info->aux,
624 DP_DOWNSPREAD_CTRL, 0);
Alex Deucher224d94b2011-05-20 04:34:28 -0400625
Alex Deucher66c2b842015-02-11 18:34:36 -0500626 if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
Alex Deucher496263b2014-03-21 10:34:07 -0400627 drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
Alex Deucher224d94b2011-05-20 04:34:28 -0400628
629 /* set the lane count on the sink */
630 tmp = dp_info->dp_lane_count;
Jani Nikula27f75dc62013-10-04 15:08:09 +0300631 if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
Alex Deucher224d94b2011-05-20 04:34:28 -0400632 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
Alex Deucher496263b2014-03-21 10:34:07 -0400633 drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
Alex Deucher224d94b2011-05-20 04:34:28 -0400634
635 /* set the link rate on the sink */
Daniel Vetter3b5c6622012-10-18 10:15:31 +0200636 tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
Alex Deucher496263b2014-03-21 10:34:07 -0400637 drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
Alex Deucher224d94b2011-05-20 04:34:28 -0400638
639 /* start training on the source */
Jerome Glisse5a96a892011-07-25 11:57:43 -0400640 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
Alex Deucher224d94b2011-05-20 04:34:28 -0400641 atombios_dig_encoder_setup(dp_info->encoder,
642 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
643 else
644 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
645 dp_info->dp_clock, dp_info->enc_id, 0);
646
647 /* disable the training pattern on the sink */
Alex Deucher496263b2014-03-21 10:34:07 -0400648 drm_dp_dpcd_writeb(dp_info->aux,
649 DP_TRAINING_PATTERN_SET,
650 DP_TRAINING_PATTERN_DISABLE);
Alex Deucher224d94b2011-05-20 04:34:28 -0400651
652 return 0;
653}
654
655static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
656{
657 udelay(400);
658
659 /* disable the training pattern on the sink */
Alex Deucher496263b2014-03-21 10:34:07 -0400660 drm_dp_dpcd_writeb(dp_info->aux,
661 DP_TRAINING_PATTERN_SET,
662 DP_TRAINING_PATTERN_DISABLE);
Alex Deucher224d94b2011-05-20 04:34:28 -0400663
664 /* disable the training pattern on the source */
Jerome Glisse5a96a892011-07-25 11:57:43 -0400665 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
Alex Deucher224d94b2011-05-20 04:34:28 -0400666 atombios_dig_encoder_setup(dp_info->encoder,
667 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
668 else
669 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
670 dp_info->dp_clock, dp_info->enc_id, 0);
671
672 return 0;
673}
674
675static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
676{
677 bool clock_recovery;
678 u8 voltage;
679 int i;
680
681 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
682 memset(dp_info->train_set, 0, 4);
683 radeon_dp_update_vs_emph(dp_info);
684
685 udelay(400);
686
687 /* clock recovery loop */
688 clock_recovery = false;
689 dp_info->tries = 0;
690 voltage = 0xff;
691 while (1) {
Daniel Vetter1a644cd2012-10-18 15:32:40 +0200692 drm_dp_link_train_clock_recovery_delay(dp_info->dpcd);
Alex Deucher224d94b2011-05-20 04:34:28 -0400693
Alex Deucherab8f1a22014-03-21 10:34:08 -0400694 if (drm_dp_dpcd_read_link_status(dp_info->aux,
695 dp_info->link_status) <= 0) {
Jerome Glisse8d1c7022012-07-17 17:17:16 -0400696 DRM_ERROR("displayport link status failed\n");
Alex Deucher224d94b2011-05-20 04:34:28 -0400697 break;
Jerome Glisse8d1c7022012-07-17 17:17:16 -0400698 }
Alex Deucher224d94b2011-05-20 04:34:28 -0400699
Daniel Vetter01916272012-10-18 10:15:25 +0200700 if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
Alex Deucher224d94b2011-05-20 04:34:28 -0400701 clock_recovery = true;
702 break;
703 }
704
705 for (i = 0; i < dp_info->dp_lane_count; i++) {
706 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
707 break;
708 }
709 if (i == dp_info->dp_lane_count) {
710 DRM_ERROR("clock recovery reached max voltage\n");
711 break;
712 }
713
714 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
715 ++dp_info->tries;
716 if (dp_info->tries == 5) {
717 DRM_ERROR("clock recovery tried 5 times\n");
718 break;
719 }
720 } else
721 dp_info->tries = 0;
722
723 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
724
725 /* Compute new train_set as requested by sink */
726 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
727
728 radeon_dp_update_vs_emph(dp_info);
729 }
730 if (!clock_recovery) {
731 DRM_ERROR("clock recovery failed\n");
732 return -1;
733 } else {
734 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
735 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
736 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
737 DP_TRAIN_PRE_EMPHASIS_SHIFT);
738 return 0;
739 }
740}
741
742static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
743{
744 bool channel_eq;
745
746 if (dp_info->tp3_supported)
747 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
748 else
749 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
750
751 /* channel equalization loop */
752 dp_info->tries = 0;
753 channel_eq = false;
754 while (1) {
Daniel Vetter1a644cd2012-10-18 15:32:40 +0200755 drm_dp_link_train_channel_eq_delay(dp_info->dpcd);
Alex Deucher224d94b2011-05-20 04:34:28 -0400756
Alex Deucherab8f1a22014-03-21 10:34:08 -0400757 if (drm_dp_dpcd_read_link_status(dp_info->aux,
758 dp_info->link_status) <= 0) {
Jerome Glisse8d1c7022012-07-17 17:17:16 -0400759 DRM_ERROR("displayport link status failed\n");
Alex Deucher224d94b2011-05-20 04:34:28 -0400760 break;
Jerome Glisse8d1c7022012-07-17 17:17:16 -0400761 }
Alex Deucher224d94b2011-05-20 04:34:28 -0400762
Daniel Vetter1ffdff12012-10-18 10:15:24 +0200763 if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
Alex Deucher224d94b2011-05-20 04:34:28 -0400764 channel_eq = true;
765 break;
766 }
767
768 /* Try 5 times */
769 if (dp_info->tries > 5) {
770 DRM_ERROR("channel eq failed: 5 tries\n");
771 break;
772 }
773
774 /* Compute new train_set as requested by sink */
775 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
776
777 radeon_dp_update_vs_emph(dp_info);
778 dp_info->tries++;
779 }
780
781 if (!channel_eq) {
782 DRM_ERROR("channel eq failed\n");
783 return -1;
784 } else {
785 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
786 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
787 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
788 >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
789 return 0;
790 }
791}
792
793void radeon_dp_link_train(struct drm_encoder *encoder,
794 struct drm_connector *connector)
795{
796 struct drm_device *dev = encoder->dev;
797 struct radeon_device *rdev = dev->dev_private;
798 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
799 struct radeon_encoder_atom_dig *dig;
800 struct radeon_connector *radeon_connector;
801 struct radeon_connector_atom_dig *dig_connector;
802 struct radeon_dp_link_train_info dp_info;
Jerome Glisse5a96a892011-07-25 11:57:43 -0400803 int index;
804 u8 tmp, frev, crev;
Alex Deucher224d94b2011-05-20 04:34:28 -0400805
806 if (!radeon_encoder->enc_priv)
807 return;
808 dig = radeon_encoder->enc_priv;
809
810 radeon_connector = to_radeon_connector(connector);
811 if (!radeon_connector->con_priv)
812 return;
813 dig_connector = radeon_connector->con_priv;
814
815 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
816 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
817 return;
818
Jerome Glisse5a96a892011-07-25 11:57:43 -0400819 /* DPEncoderService newer than 1.1 can't program properly the
820 * training pattern. When facing such version use the
821 * DIGXEncoderControl (X== 1 | 2)
822 */
823 dp_info.use_dpencoder = true;
824 index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
825 if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
826 if (crev > 1) {
827 dp_info.use_dpencoder = false;
828 }
829 }
830
Alex Deucher224d94b2011-05-20 04:34:28 -0400831 dp_info.enc_id = 0;
832 if (dig->dig_encoder)
833 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
834 else
835 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
836 if (dig->linkb)
837 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
838 else
839 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
840
Alex Deucheraa019b72014-04-30 09:27:15 -0400841 if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
842 == 1) {
843 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
844 dp_info.tp3_supported = true;
845 else
846 dp_info.tp3_supported = false;
847 } else {
Alex Deucher224d94b2011-05-20 04:34:28 -0400848 dp_info.tp3_supported = false;
Alex Deucheraa019b72014-04-30 09:27:15 -0400849 }
Alex Deucher224d94b2011-05-20 04:34:28 -0400850
Daniel Vetter1a644cd2012-10-18 15:32:40 +0200851 memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
Alex Deucher224d94b2011-05-20 04:34:28 -0400852 dp_info.rdev = rdev;
853 dp_info.encoder = encoder;
854 dp_info.connector = connector;
Alex Deucher224d94b2011-05-20 04:34:28 -0400855 dp_info.dp_lane_count = dig_connector->dp_lane_count;
856 dp_info.dp_clock = dig_connector->dp_clock;
Alex Deucher379dfc22014-04-07 10:33:46 -0400857 dp_info.aux = &radeon_connector->ddc_bus->aux;
Alex Deucher224d94b2011-05-20 04:34:28 -0400858
859 if (radeon_dp_link_train_init(&dp_info))
860 goto done;
861 if (radeon_dp_link_train_cr(&dp_info))
862 goto done;
863 if (radeon_dp_link_train_ce(&dp_info))
864 goto done;
865done:
866 if (radeon_dp_link_train_finish(&dp_info))
867 return;
868}