2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
28 #include <drm/amdgpu_drm.h>
32 #include "atom-bits.h"
33 #include "atombios_encoders.h"
34 #include "atombios_dp.h"
35 #include "amdgpu_connectors.h"
36 #include "amdgpu_atombios.h"
37 #include <drm/drm_dp_helper.h>
39 /* move these to drm_dp_helper.c/h */
40 #define DP_LINK_CONFIGURATION_SIZE 9
41 #define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
43 static char *voltage_names
[] = {
44 "0.4V", "0.6V", "0.8V", "1.2V"
46 static char *pre_emph_names
[] = {
47 "0dB", "3.5dB", "6dB", "9.5dB"
50 /***** amdgpu AUX functions *****/
52 union aux_channel_transaction
{
53 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1
;
54 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2
;
57 static int amdgpu_atombios_dp_process_aux_ch(struct amdgpu_i2c_chan
*chan
,
58 u8
*send
, int send_bytes
,
59 u8
*recv
, int recv_size
,
62 struct drm_device
*dev
= chan
->dev
;
63 struct amdgpu_device
*adev
= dev
->dev_private
;
64 union aux_channel_transaction args
;
65 int index
= GetIndexIntoMasterTable(COMMAND
, ProcessAuxChannelTransaction
);
70 memset(&args
, 0, sizeof(args
));
72 mutex_lock(&chan
->mutex
);
74 base
= (unsigned char *)(adev
->mode_info
.atom_context
->scratch
+ 1);
76 amdgpu_atombios_copy_swap(base
, send
, send_bytes
, true);
78 args
.v2
.lpAuxRequest
= cpu_to_le16((u16
)(0 + 4));
79 args
.v2
.lpDataOut
= cpu_to_le16((u16
)(16 + 4));
80 args
.v2
.ucDataOutLen
= 0;
81 args
.v2
.ucChannelID
= chan
->rec
.i2c_id
;
82 args
.v2
.ucDelay
= delay
/ 10;
83 args
.v2
.ucHPD_ID
= chan
->rec
.hpd
;
85 amdgpu_atom_execute_table(adev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
87 *ack
= args
.v2
.ucReplyStatus
;
90 if (args
.v2
.ucReplyStatus
== 1) {
96 if (args
.v2
.ucReplyStatus
== 2) {
97 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
103 if (args
.v2
.ucReplyStatus
== 3) {
104 DRM_DEBUG_KMS("dp_aux_ch error\n");
109 recv_bytes
= args
.v1
.ucDataOutLen
;
110 if (recv_bytes
> recv_size
)
111 recv_bytes
= recv_size
;
113 if (recv
&& recv_size
)
114 amdgpu_atombios_copy_swap(recv
, base
+ 16, recv_bytes
, false);
118 mutex_unlock(&chan
->mutex
);
123 #define BARE_ADDRESS_SIZE 3
124 #define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
127 amdgpu_atombios_dp_aux_transfer(struct drm_dp_aux
*aux
, struct drm_dp_aux_msg
*msg
)
129 struct amdgpu_i2c_chan
*chan
=
130 container_of(aux
, struct amdgpu_i2c_chan
, aux
);
136 if (WARN_ON(msg
->size
> 16))
139 tx_buf
[0] = msg
->address
& 0xff;
140 tx_buf
[1] = msg
->address
>> 8;
141 tx_buf
[2] = (msg
->request
<< 4) |
142 ((msg
->address
>> 16) & 0xf);
143 tx_buf
[3] = msg
->size
? (msg
->size
- 1) : 0;
145 switch (msg
->request
& ~DP_AUX_I2C_MOT
) {
146 case DP_AUX_NATIVE_WRITE
:
147 case DP_AUX_I2C_WRITE
:
148 /* tx_size needs to be 4 even for bare address packets since the atom
149 * table needs the info in tx_buf[3].
151 tx_size
= HEADER_SIZE
+ msg
->size
;
153 tx_buf
[3] |= BARE_ADDRESS_SIZE
<< 4;
155 tx_buf
[3] |= tx_size
<< 4;
156 memcpy(tx_buf
+ HEADER_SIZE
, msg
->buffer
, msg
->size
);
157 ret
= amdgpu_atombios_dp_process_aux_ch(chan
,
158 tx_buf
, tx_size
, NULL
, 0, delay
, &ack
);
160 /* Return payload size. */
163 case DP_AUX_NATIVE_READ
:
164 case DP_AUX_I2C_READ
:
165 /* tx_size needs to be 4 even for bare address packets since the atom
166 * table needs the info in tx_buf[3].
168 tx_size
= HEADER_SIZE
;
170 tx_buf
[3] |= BARE_ADDRESS_SIZE
<< 4;
172 tx_buf
[3] |= tx_size
<< 4;
173 ret
= amdgpu_atombios_dp_process_aux_ch(chan
,
174 tx_buf
, tx_size
, msg
->buffer
, msg
->size
, delay
, &ack
);
182 msg
->reply
= ack
>> 4;
187 void amdgpu_atombios_dp_aux_init(struct amdgpu_connector
*amdgpu_connector
)
191 amdgpu_connector
->ddc_bus
->rec
.hpd
= amdgpu_connector
->hpd
.hpd
;
192 amdgpu_connector
->ddc_bus
->aux
.dev
= amdgpu_connector
->base
.kdev
;
193 amdgpu_connector
->ddc_bus
->aux
.transfer
= amdgpu_atombios_dp_aux_transfer
;
194 ret
= drm_dp_aux_register(&amdgpu_connector
->ddc_bus
->aux
);
196 amdgpu_connector
->ddc_bus
->has_aux
= true;
198 WARN(ret
, "drm_dp_aux_register_i2c_bus() failed with error %d\n", ret
);
201 /***** general DP utility functions *****/
203 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
204 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPH_LEVEL_3
206 static void amdgpu_atombios_dp_get_adjust_train(const u8 link_status
[DP_LINK_STATUS_SIZE
],
214 for (lane
= 0; lane
< lane_count
; lane
++) {
215 u8 this_v
= drm_dp_get_adjust_request_voltage(link_status
, lane
);
216 u8 this_p
= drm_dp_get_adjust_request_pre_emphasis(link_status
, lane
);
218 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
220 voltage_names
[this_v
>> DP_TRAIN_VOLTAGE_SWING_SHIFT
],
221 pre_emph_names
[this_p
>> DP_TRAIN_PRE_EMPHASIS_SHIFT
]);
229 if (v
>= DP_VOLTAGE_MAX
)
230 v
|= DP_TRAIN_MAX_SWING_REACHED
;
232 if (p
>= DP_PRE_EMPHASIS_MAX
)
233 p
|= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED
;
235 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
236 voltage_names
[(v
& DP_TRAIN_VOLTAGE_SWING_MASK
) >> DP_TRAIN_VOLTAGE_SWING_SHIFT
],
237 pre_emph_names
[(p
& DP_TRAIN_PRE_EMPHASIS_MASK
) >> DP_TRAIN_PRE_EMPHASIS_SHIFT
]);
239 for (lane
= 0; lane
< 4; lane
++)
240 train_set
[lane
] = v
| p
;
243 /* convert bits per color to bits per pixel */
244 /* get bpc from the EDID */
245 static unsigned amdgpu_atombios_dp_convert_bpc_to_bpp(int bpc
)
253 /***** amdgpu specific DP functions *****/
255 static int amdgpu_atombios_dp_get_dp_link_config(struct drm_connector
*connector
,
256 const u8 dpcd
[DP_DPCD_SIZE
],
258 unsigned *dp_lanes
, unsigned *dp_rate
)
261 amdgpu_atombios_dp_convert_bpc_to_bpp(amdgpu_connector_get_monitor_bpc(connector
));
262 static const unsigned link_rates
[3] = { 162000, 270000, 540000 };
263 unsigned max_link_rate
= drm_dp_max_link_rate(dpcd
);
264 unsigned max_lane_num
= drm_dp_max_lane_count(dpcd
);
265 unsigned lane_num
, i
, max_pix_clock
;
267 if (amdgpu_connector_encoder_get_dp_bridge_encoder_id(connector
) ==
268 ENCODER_OBJECT_ID_NUTMEG
) {
269 for (lane_num
= 1; lane_num
<= max_lane_num
; lane_num
<<= 1) {
270 max_pix_clock
= (lane_num
* 270000 * 8) / bpp
;
271 if (max_pix_clock
>= pix_clock
) {
272 *dp_lanes
= lane_num
;
278 for (i
= 0; i
< ARRAY_SIZE(link_rates
) && link_rates
[i
] <= max_link_rate
; i
++) {
279 for (lane_num
= 1; lane_num
<= max_lane_num
; lane_num
<<= 1) {
280 max_pix_clock
= (lane_num
* link_rates
[i
] * 8) / bpp
;
281 if (max_pix_clock
>= pix_clock
) {
282 *dp_lanes
= lane_num
;
283 *dp_rate
= link_rates
[i
];
293 static u8
amdgpu_atombios_dp_encoder_service(struct amdgpu_device
*adev
,
294 int action
, int dp_clock
,
295 u8 ucconfig
, u8 lane_num
)
297 DP_ENCODER_SERVICE_PARAMETERS args
;
298 int index
= GetIndexIntoMasterTable(COMMAND
, DPEncoderService
);
300 memset(&args
, 0, sizeof(args
));
301 args
.ucLinkClock
= dp_clock
/ 10;
302 args
.ucConfig
= ucconfig
;
303 args
.ucAction
= action
;
304 args
.ucLaneNum
= lane_num
;
307 amdgpu_atom_execute_table(adev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
308 return args
.ucStatus
;
311 u8
amdgpu_atombios_dp_get_sinktype(struct amdgpu_connector
*amdgpu_connector
)
313 struct drm_device
*dev
= amdgpu_connector
->base
.dev
;
314 struct amdgpu_device
*adev
= dev
->dev_private
;
316 return amdgpu_atombios_dp_encoder_service(adev
, ATOM_DP_ACTION_GET_SINK_TYPE
, 0,
317 amdgpu_connector
->ddc_bus
->rec
.i2c_id
, 0);
320 static void amdgpu_atombios_dp_probe_oui(struct amdgpu_connector
*amdgpu_connector
)
322 struct amdgpu_connector_atom_dig
*dig_connector
= amdgpu_connector
->con_priv
;
325 if (!(dig_connector
->dpcd
[DP_DOWN_STREAM_PORT_COUNT
] & DP_OUI_SUPPORT
))
328 if (drm_dp_dpcd_read(&amdgpu_connector
->ddc_bus
->aux
, DP_SINK_OUI
, buf
, 3) == 3)
329 DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
330 buf
[0], buf
[1], buf
[2]);
332 if (drm_dp_dpcd_read(&amdgpu_connector
->ddc_bus
->aux
, DP_BRANCH_OUI
, buf
, 3) == 3)
333 DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
334 buf
[0], buf
[1], buf
[2]);
337 int amdgpu_atombios_dp_get_dpcd(struct amdgpu_connector
*amdgpu_connector
)
339 struct amdgpu_connector_atom_dig
*dig_connector
= amdgpu_connector
->con_priv
;
340 u8 msg
[DP_DPCD_SIZE
];
343 ret
= drm_dp_dpcd_read(&amdgpu_connector
->ddc_bus
->aux
, DP_DPCD_REV
,
345 if (ret
== DP_DPCD_SIZE
) {
346 memcpy(dig_connector
->dpcd
, msg
, DP_DPCD_SIZE
);
348 DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector
->dpcd
),
349 dig_connector
->dpcd
);
351 amdgpu_atombios_dp_probe_oui(amdgpu_connector
);
356 dig_connector
->dpcd
[0] = 0;
360 int amdgpu_atombios_dp_get_panel_mode(struct drm_encoder
*encoder
,
361 struct drm_connector
*connector
)
363 struct amdgpu_connector
*amdgpu_connector
= to_amdgpu_connector(connector
);
364 struct amdgpu_connector_atom_dig
*dig_connector
;
365 int panel_mode
= DP_PANEL_MODE_EXTERNAL_DP_MODE
;
366 u16 dp_bridge
= amdgpu_connector_encoder_get_dp_bridge_encoder_id(connector
);
369 if (!amdgpu_connector
->con_priv
)
372 dig_connector
= amdgpu_connector
->con_priv
;
374 if (dp_bridge
!= ENCODER_OBJECT_ID_NONE
) {
375 /* DP bridge chips */
376 if (drm_dp_dpcd_readb(&amdgpu_connector
->ddc_bus
->aux
,
377 DP_EDP_CONFIGURATION_CAP
, &tmp
) == 1) {
379 panel_mode
= DP_PANEL_MODE_INTERNAL_DP2_MODE
;
380 else if ((dp_bridge
== ENCODER_OBJECT_ID_NUTMEG
) ||
381 (dp_bridge
== ENCODER_OBJECT_ID_TRAVIS
))
382 panel_mode
= DP_PANEL_MODE_INTERNAL_DP1_MODE
;
384 panel_mode
= DP_PANEL_MODE_EXTERNAL_DP_MODE
;
386 } else if (connector
->connector_type
== DRM_MODE_CONNECTOR_eDP
) {
388 if (drm_dp_dpcd_readb(&amdgpu_connector
->ddc_bus
->aux
,
389 DP_EDP_CONFIGURATION_CAP
, &tmp
) == 1) {
391 panel_mode
= DP_PANEL_MODE_INTERNAL_DP2_MODE
;
398 void amdgpu_atombios_dp_set_link_config(struct drm_connector
*connector
,
399 const struct drm_display_mode
*mode
)
401 struct amdgpu_connector
*amdgpu_connector
= to_amdgpu_connector(connector
);
402 struct amdgpu_connector_atom_dig
*dig_connector
;
405 if (!amdgpu_connector
->con_priv
)
407 dig_connector
= amdgpu_connector
->con_priv
;
409 if ((dig_connector
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
) ||
410 (dig_connector
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
)) {
411 ret
= amdgpu_atombios_dp_get_dp_link_config(connector
, dig_connector
->dpcd
,
413 &dig_connector
->dp_lane_count
,
414 &dig_connector
->dp_clock
);
416 dig_connector
->dp_clock
= 0;
417 dig_connector
->dp_lane_count
= 0;
422 int amdgpu_atombios_dp_mode_valid_helper(struct drm_connector
*connector
,
423 struct drm_display_mode
*mode
)
425 struct amdgpu_connector
*amdgpu_connector
= to_amdgpu_connector(connector
);
426 struct amdgpu_connector_atom_dig
*dig_connector
;
427 unsigned dp_lanes
, dp_clock
;
430 if (!amdgpu_connector
->con_priv
)
431 return MODE_CLOCK_HIGH
;
432 dig_connector
= amdgpu_connector
->con_priv
;
434 ret
= amdgpu_atombios_dp_get_dp_link_config(connector
, dig_connector
->dpcd
,
435 mode
->clock
, &dp_lanes
, &dp_clock
);
437 return MODE_CLOCK_HIGH
;
439 if ((dp_clock
== 540000) &&
440 (!amdgpu_connector_is_dp12_capable(connector
)))
441 return MODE_CLOCK_HIGH
;
446 bool amdgpu_atombios_dp_needs_link_train(struct amdgpu_connector
*amdgpu_connector
)
448 u8 link_status
[DP_LINK_STATUS_SIZE
];
449 struct amdgpu_connector_atom_dig
*dig
= amdgpu_connector
->con_priv
;
451 if (drm_dp_dpcd_read_link_status(&amdgpu_connector
->ddc_bus
->aux
, link_status
)
454 if (drm_dp_channel_eq_ok(link_status
, dig
->dp_lane_count
))
459 void amdgpu_atombios_dp_set_rx_power_state(struct drm_connector
*connector
,
462 struct amdgpu_connector
*amdgpu_connector
= to_amdgpu_connector(connector
);
463 struct amdgpu_connector_atom_dig
*dig_connector
;
465 if (!amdgpu_connector
->con_priv
)
468 dig_connector
= amdgpu_connector
->con_priv
;
470 /* power up/down the sink */
471 if (dig_connector
->dpcd
[0] >= 0x11) {
472 drm_dp_dpcd_writeb(&amdgpu_connector
->ddc_bus
->aux
,
473 DP_SET_POWER
, power_state
);
474 usleep_range(1000, 2000);
478 struct amdgpu_atombios_dp_link_train_info
{
479 struct amdgpu_device
*adev
;
480 struct drm_encoder
*encoder
;
481 struct drm_connector
*connector
;
485 u8 dpcd
[DP_RECEIVER_CAP_SIZE
];
487 u8 link_status
[DP_LINK_STATUS_SIZE
];
489 struct drm_dp_aux
*aux
;
493 amdgpu_atombios_dp_update_vs_emph(struct amdgpu_atombios_dp_link_train_info
*dp_info
)
495 /* set the initial vs/emph on the source */
496 amdgpu_atombios_encoder_setup_dig_transmitter(dp_info
->encoder
,
497 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH
,
498 0, dp_info
->train_set
[0]); /* sets all lanes at once */
500 /* set the vs/emph on the sink */
501 drm_dp_dpcd_write(dp_info
->aux
, DP_TRAINING_LANE0_SET
,
502 dp_info
->train_set
, dp_info
->dp_lane_count
);
506 amdgpu_atombios_dp_set_tp(struct amdgpu_atombios_dp_link_train_info
*dp_info
, int tp
)
510 /* set training pattern on the source */
512 case DP_TRAINING_PATTERN_1
:
513 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1
;
515 case DP_TRAINING_PATTERN_2
:
516 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2
;
518 case DP_TRAINING_PATTERN_3
:
519 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3
;
522 amdgpu_atombios_encoder_setup_dig_encoder(dp_info
->encoder
, rtp
, 0);
524 /* enable training pattern on the sink */
525 drm_dp_dpcd_writeb(dp_info
->aux
, DP_TRAINING_PATTERN_SET
, tp
);
529 amdgpu_atombios_dp_link_train_init(struct amdgpu_atombios_dp_link_train_info
*dp_info
)
531 struct amdgpu_encoder
*amdgpu_encoder
= to_amdgpu_encoder(dp_info
->encoder
);
532 struct amdgpu_encoder_atom_dig
*dig
= amdgpu_encoder
->enc_priv
;
535 /* power up the sink */
536 amdgpu_atombios_dp_set_rx_power_state(dp_info
->connector
, DP_SET_POWER_D0
);
538 /* possibly enable downspread on the sink */
539 if (dp_info
->dpcd
[3] & 0x1)
540 drm_dp_dpcd_writeb(dp_info
->aux
,
541 DP_DOWNSPREAD_CTRL
, DP_SPREAD_AMP_0_5
);
543 drm_dp_dpcd_writeb(dp_info
->aux
,
544 DP_DOWNSPREAD_CTRL
, 0);
546 if (dig
->panel_mode
== DP_PANEL_MODE_INTERNAL_DP2_MODE
)
547 drm_dp_dpcd_writeb(dp_info
->aux
, DP_EDP_CONFIGURATION_SET
, 1);
549 /* set the lane count on the sink */
550 tmp
= dp_info
->dp_lane_count
;
551 if (drm_dp_enhanced_frame_cap(dp_info
->dpcd
))
552 tmp
|= DP_LANE_COUNT_ENHANCED_FRAME_EN
;
553 drm_dp_dpcd_writeb(dp_info
->aux
, DP_LANE_COUNT_SET
, tmp
);
555 /* set the link rate on the sink */
556 tmp
= drm_dp_link_rate_to_bw_code(dp_info
->dp_clock
);
557 drm_dp_dpcd_writeb(dp_info
->aux
, DP_LINK_BW_SET
, tmp
);
559 /* start training on the source */
560 amdgpu_atombios_encoder_setup_dig_encoder(dp_info
->encoder
,
561 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START
, 0);
563 /* disable the training pattern on the sink */
564 drm_dp_dpcd_writeb(dp_info
->aux
,
565 DP_TRAINING_PATTERN_SET
,
566 DP_TRAINING_PATTERN_DISABLE
);
572 amdgpu_atombios_dp_link_train_finish(struct amdgpu_atombios_dp_link_train_info
*dp_info
)
576 /* disable the training pattern on the sink */
577 drm_dp_dpcd_writeb(dp_info
->aux
,
578 DP_TRAINING_PATTERN_SET
,
579 DP_TRAINING_PATTERN_DISABLE
);
581 /* disable the training pattern on the source */
582 amdgpu_atombios_encoder_setup_dig_encoder(dp_info
->encoder
,
583 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE
, 0);
589 amdgpu_atombios_dp_link_train_cr(struct amdgpu_atombios_dp_link_train_info
*dp_info
)
595 amdgpu_atombios_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_1
);
596 memset(dp_info
->train_set
, 0, 4);
597 amdgpu_atombios_dp_update_vs_emph(dp_info
);
601 /* clock recovery loop */
602 clock_recovery
= false;
606 drm_dp_link_train_clock_recovery_delay(dp_info
->dpcd
);
608 if (drm_dp_dpcd_read_link_status(dp_info
->aux
,
609 dp_info
->link_status
) <= 0) {
610 DRM_ERROR("displayport link status failed\n");
614 if (drm_dp_clock_recovery_ok(dp_info
->link_status
, dp_info
->dp_lane_count
)) {
615 clock_recovery
= true;
619 for (i
= 0; i
< dp_info
->dp_lane_count
; i
++) {
620 if ((dp_info
->train_set
[i
] & DP_TRAIN_MAX_SWING_REACHED
) == 0)
623 if (i
== dp_info
->dp_lane_count
) {
624 DRM_ERROR("clock recovery reached max voltage\n");
628 if ((dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
) == voltage
) {
630 if (dp_info
->tries
== 5) {
631 DRM_ERROR("clock recovery tried 5 times\n");
637 voltage
= dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
;
639 /* Compute new train_set as requested by sink */
640 amdgpu_atombios_dp_get_adjust_train(dp_info
->link_status
, dp_info
->dp_lane_count
,
643 amdgpu_atombios_dp_update_vs_emph(dp_info
);
645 if (!clock_recovery
) {
646 DRM_ERROR("clock recovery failed\n");
649 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
650 dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
,
651 (dp_info
->train_set
[0] & DP_TRAIN_PRE_EMPHASIS_MASK
) >>
652 DP_TRAIN_PRE_EMPHASIS_SHIFT
);
658 amdgpu_atombios_dp_link_train_ce(struct amdgpu_atombios_dp_link_train_info
*dp_info
)
662 if (dp_info
->tp3_supported
)
663 amdgpu_atombios_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_3
);
665 amdgpu_atombios_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_2
);
667 /* channel equalization loop */
671 drm_dp_link_train_channel_eq_delay(dp_info
->dpcd
);
673 if (drm_dp_dpcd_read_link_status(dp_info
->aux
,
674 dp_info
->link_status
) <= 0) {
675 DRM_ERROR("displayport link status failed\n");
679 if (drm_dp_channel_eq_ok(dp_info
->link_status
, dp_info
->dp_lane_count
)) {
685 if (dp_info
->tries
> 5) {
686 DRM_ERROR("channel eq failed: 5 tries\n");
690 /* Compute new train_set as requested by sink */
691 amdgpu_atombios_dp_get_adjust_train(dp_info
->link_status
, dp_info
->dp_lane_count
,
694 amdgpu_atombios_dp_update_vs_emph(dp_info
);
699 DRM_ERROR("channel eq failed\n");
702 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
703 dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
,
704 (dp_info
->train_set
[0] & DP_TRAIN_PRE_EMPHASIS_MASK
)
705 >> DP_TRAIN_PRE_EMPHASIS_SHIFT
);
710 void amdgpu_atombios_dp_link_train(struct drm_encoder
*encoder
,
711 struct drm_connector
*connector
)
713 struct drm_device
*dev
= encoder
->dev
;
714 struct amdgpu_device
*adev
= dev
->dev_private
;
715 struct amdgpu_encoder
*amdgpu_encoder
= to_amdgpu_encoder(encoder
);
716 struct amdgpu_encoder_atom_dig
*dig
;
717 struct amdgpu_connector
*amdgpu_connector
;
718 struct amdgpu_connector_atom_dig
*dig_connector
;
719 struct amdgpu_atombios_dp_link_train_info dp_info
;
722 if (!amdgpu_encoder
->enc_priv
)
724 dig
= amdgpu_encoder
->enc_priv
;
726 amdgpu_connector
= to_amdgpu_connector(connector
);
727 if (!amdgpu_connector
->con_priv
)
729 dig_connector
= amdgpu_connector
->con_priv
;
731 if ((dig_connector
->dp_sink_type
!= CONNECTOR_OBJECT_ID_DISPLAYPORT
) &&
732 (dig_connector
->dp_sink_type
!= CONNECTOR_OBJECT_ID_eDP
))
735 if (drm_dp_dpcd_readb(&amdgpu_connector
->ddc_bus
->aux
, DP_MAX_LANE_COUNT
, &tmp
)
737 if (tmp
& DP_TPS3_SUPPORTED
)
738 dp_info
.tp3_supported
= true;
740 dp_info
.tp3_supported
= false;
742 dp_info
.tp3_supported
= false;
745 memcpy(dp_info
.dpcd
, dig_connector
->dpcd
, DP_RECEIVER_CAP_SIZE
);
747 dp_info
.encoder
= encoder
;
748 dp_info
.connector
= connector
;
749 dp_info
.dp_lane_count
= dig_connector
->dp_lane_count
;
750 dp_info
.dp_clock
= dig_connector
->dp_clock
;
751 dp_info
.aux
= &amdgpu_connector
->ddc_bus
->aux
;
753 if (amdgpu_atombios_dp_link_train_init(&dp_info
))
755 if (amdgpu_atombios_dp_link_train_cr(&dp_info
))
757 if (amdgpu_atombios_dp_link_train_ce(&dp_info
))
760 if (amdgpu_atombios_dp_link_train_finish(&dp_info
))