2 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
24 #include "dm_services.h"
26 #include "dc_link_dp.h"
27 #include "dm_helpers.h"
32 #include "inc/core_types.h"
33 #include "link_hwss.h"
34 #include "dc_link_ddc.h"
35 #include "core_status.h"
36 #include "dpcd_defs.h"
37 #include "dc_dmub_srv.h"
38 #include "dce/dmub_hw_lock_mgr.h"
39 #include "inc/dc_link_dpia.h"
40 #include "inc/link_enc_cfg.h"
43 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_2[] = "sivarT";
45 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_3[] = "dnomlA";
49 #define DC_TRACE_LEVEL_MESSAGE(...) /* do nothing */
51 #include "link_dpcd.h"
53 /* maximum pre emphasis level allowed for each voltage swing level*/
54 static const enum dc_pre_emphasis
55 voltage_swing_to_pre_emphasis[] = { PRE_EMPHASIS_LEVEL3,
58 PRE_EMPHASIS_DISABLED };
61 POST_LT_ADJ_REQ_LIMIT = 6,
62 POST_LT_ADJ_REQ_TIMEOUT = 200
65 #if defined(CONFIG_DRM_AMD_DC_DCN)
66 struct dp_lt_fallback_entry {
67 enum dc_lane_count lane_count;
68 enum dc_link_rate link_rate;
71 static const struct dp_lt_fallback_entry dp_lt_fallbacks[] = {
72 /* This link training fallback array is ordered by
73 * link bandwidth from highest to lowest.
74 * DP specs makes it a normative policy to always
75 * choose the next highest link bandwidth during
76 * link training fallback.
78 {LANE_COUNT_FOUR, LINK_RATE_UHBR20},
79 {LANE_COUNT_FOUR, LINK_RATE_UHBR13_5},
80 {LANE_COUNT_TWO, LINK_RATE_UHBR20},
81 {LANE_COUNT_FOUR, LINK_RATE_UHBR10},
82 {LANE_COUNT_TWO, LINK_RATE_UHBR13_5},
83 {LANE_COUNT_FOUR, LINK_RATE_HIGH3},
84 {LANE_COUNT_ONE, LINK_RATE_UHBR20},
85 {LANE_COUNT_TWO, LINK_RATE_UHBR10},
86 {LANE_COUNT_FOUR, LINK_RATE_HIGH2},
87 {LANE_COUNT_ONE, LINK_RATE_UHBR13_5},
88 {LANE_COUNT_TWO, LINK_RATE_HIGH3},
89 {LANE_COUNT_ONE, LINK_RATE_UHBR10},
90 {LANE_COUNT_TWO, LINK_RATE_HIGH2},
91 {LANE_COUNT_FOUR, LINK_RATE_HIGH},
92 {LANE_COUNT_ONE, LINK_RATE_HIGH3},
93 {LANE_COUNT_FOUR, LINK_RATE_LOW},
94 {LANE_COUNT_ONE, LINK_RATE_HIGH2},
95 {LANE_COUNT_TWO, LINK_RATE_HIGH},
96 {LANE_COUNT_TWO, LINK_RATE_LOW},
97 {LANE_COUNT_ONE, LINK_RATE_HIGH},
98 {LANE_COUNT_ONE, LINK_RATE_LOW},
102 static bool decide_fallback_link_setting(
103 struct dc_link_settings initial_link_settings,
104 struct dc_link_settings *current_link_setting,
105 enum link_training_result training_result);
106 static struct dc_link_settings get_common_supported_link_settings(
107 struct dc_link_settings link_setting_a,
108 struct dc_link_settings link_setting_b);
109 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
110 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
111 static void override_lane_settings(const struct link_training_settings *lt_settings,
112 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
114 static uint32_t get_cr_training_aux_rd_interval(struct dc_link *link,
115 const struct dc_link_settings *link_settings)
117 union training_aux_rd_interval training_rd_interval;
118 uint32_t wait_in_micro_secs = 100;
119 #if defined(CONFIG_DRM_AMD_DC_DCN)
120 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
121 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
122 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
125 DP_TRAINING_AUX_RD_INTERVAL,
126 (uint8_t *)&training_rd_interval,
127 sizeof(training_rd_interval));
128 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
129 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
134 DP_TRAINING_AUX_RD_INTERVAL,
135 (uint8_t *)&training_rd_interval,
136 sizeof(training_rd_interval));
137 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
138 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
140 return wait_in_micro_secs;
143 static uint32_t get_eq_training_aux_rd_interval(
144 struct dc_link *link,
145 const struct dc_link_settings *link_settings)
147 #if defined(CONFIG_DRM_AMD_DC_DCN)
148 union training_aux_rd_interval training_rd_interval;
150 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
151 if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING) {
154 DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
155 (uint8_t *)&training_rd_interval,
156 sizeof(training_rd_interval));
157 } else if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
158 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
161 DP_TRAINING_AUX_RD_INTERVAL,
162 (uint8_t *)&training_rd_interval,
163 sizeof(training_rd_interval));
166 switch (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL) {
170 case 3: return 12000;
171 case 4: return 16000;
172 case 5: return 32000;
173 case 6: return 64000;
177 union training_aux_rd_interval training_rd_interval;
178 uint32_t wait_in_micro_secs = 400;
180 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
181 /* overwrite the delay if rev > 1.1*/
182 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
183 /* DP 1.2 or later - retrieve delay through
184 * "DPCD_ADDR_TRAINING_AUX_RD_INTERVAL" register */
187 DP_TRAINING_AUX_RD_INTERVAL,
188 (uint8_t *)&training_rd_interval,
189 sizeof(training_rd_interval));
191 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
192 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
195 return wait_in_micro_secs;
199 void dp_wait_for_training_aux_rd_interval(
200 struct dc_link *link,
201 uint32_t wait_in_micro_secs)
203 #if defined(CONFIG_DRM_AMD_DC_DCN)
204 if (wait_in_micro_secs > 16000)
205 msleep(wait_in_micro_secs/1000);
207 udelay(wait_in_micro_secs);
209 udelay(wait_in_micro_secs);
212 DC_LOG_HW_LINK_TRAINING("%s:\n wait = %d\n",
217 enum dpcd_training_patterns
218 dc_dp_training_pattern_to_dpcd_training_pattern(
219 struct dc_link *link,
220 enum dc_dp_training_pattern pattern)
222 enum dpcd_training_patterns dpcd_tr_pattern =
223 DPCD_TRAINING_PATTERN_VIDEOIDLE;
226 case DP_TRAINING_PATTERN_SEQUENCE_1:
227 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_1;
229 case DP_TRAINING_PATTERN_SEQUENCE_2:
230 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_2;
232 case DP_TRAINING_PATTERN_SEQUENCE_3:
233 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_3;
235 case DP_TRAINING_PATTERN_SEQUENCE_4:
236 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_4;
238 #if defined(CONFIG_DRM_AMD_DC_DCN)
239 case DP_128b_132b_TPS1:
240 dpcd_tr_pattern = DPCD_128b_132b_TPS1;
242 case DP_128b_132b_TPS2:
243 dpcd_tr_pattern = DPCD_128b_132b_TPS2;
245 case DP_128b_132b_TPS2_CDS:
246 dpcd_tr_pattern = DPCD_128b_132b_TPS2_CDS;
249 case DP_TRAINING_PATTERN_VIDEOIDLE:
250 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_VIDEOIDLE;
254 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
259 return dpcd_tr_pattern;
262 static void dpcd_set_training_pattern(
263 struct dc_link *link,
264 enum dc_dp_training_pattern training_pattern)
266 union dpcd_training_pattern dpcd_pattern = {0};
268 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
269 dc_dp_training_pattern_to_dpcd_training_pattern(
270 link, training_pattern);
272 core_link_write_dpcd(
274 DP_TRAINING_PATTERN_SET,
278 DC_LOG_HW_LINK_TRAINING("%s\n %x pattern = %x\n",
280 DP_TRAINING_PATTERN_SET,
281 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
284 static enum dc_dp_training_pattern decide_cr_training_pattern(
285 const struct dc_link_settings *link_settings)
287 switch (dp_get_link_encoding_format(link_settings)) {
288 case DP_8b_10b_ENCODING:
290 return DP_TRAINING_PATTERN_SEQUENCE_1;
291 #if defined(CONFIG_DRM_AMD_DC_DCN)
292 case DP_128b_132b_ENCODING:
293 return DP_128b_132b_TPS1;
298 static enum dc_dp_training_pattern decide_eq_training_pattern(struct dc_link *link,
299 const struct dc_link_settings *link_settings)
301 struct link_encoder *link_enc;
302 #if defined(CONFIG_DRM_AMD_DC_DCN)
303 struct encoder_feature_support *enc_caps;
304 struct dpcd_caps *rx_caps = &link->dpcd_caps;
305 enum dc_dp_training_pattern pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
307 /* Access link encoder capability based on whether it is statically
308 * or dynamically assigned to a link.
310 if (link->is_dig_mapping_flexible &&
311 link->dc->res_pool->funcs->link_encs_assign)
312 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
314 link_enc = link->link_enc;
316 enc_caps = &link_enc->features;
318 switch (dp_get_link_encoding_format(link_settings)) {
319 case DP_8b_10b_ENCODING:
320 if (enc_caps->flags.bits.IS_TPS4_CAPABLE &&
321 rx_caps->max_down_spread.bits.TPS4_SUPPORTED)
322 pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
323 else if (enc_caps->flags.bits.IS_TPS3_CAPABLE &&
324 rx_caps->max_ln_count.bits.TPS3_SUPPORTED)
325 pattern = DP_TRAINING_PATTERN_SEQUENCE_3;
327 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
329 case DP_128b_132b_ENCODING:
330 pattern = DP_128b_132b_TPS2;
333 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
338 enum dc_dp_training_pattern highest_tp = DP_TRAINING_PATTERN_SEQUENCE_2;
339 struct encoder_feature_support *features;
340 struct dpcd_caps *dpcd_caps = &link->dpcd_caps;
342 /* Access link encoder capability based on whether it is statically
343 * or dynamically assigned to a link.
345 if (link->is_dig_mapping_flexible &&
346 link->dc->res_pool->funcs->link_encs_assign)
347 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
349 link_enc = link->link_enc;
351 features = &link_enc->features;
353 if (features->flags.bits.IS_TPS3_CAPABLE)
354 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_3;
356 if (features->flags.bits.IS_TPS4_CAPABLE)
357 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_4;
359 if (dpcd_caps->max_down_spread.bits.TPS4_SUPPORTED &&
360 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_4)
361 return DP_TRAINING_PATTERN_SEQUENCE_4;
363 if (dpcd_caps->max_ln_count.bits.TPS3_SUPPORTED &&
364 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_3)
365 return DP_TRAINING_PATTERN_SEQUENCE_3;
367 return DP_TRAINING_PATTERN_SEQUENCE_2;
371 #if defined(CONFIG_DRM_AMD_DC_DCN)
372 static uint8_t get_dpcd_link_rate(const struct dc_link_settings *link_settings)
374 uint8_t link_rate = 0;
375 enum dp_link_encoding encoding = dp_get_link_encoding_format(link_settings);
377 if (encoding == DP_128b_132b_ENCODING)
378 switch (link_settings->link_rate) {
379 case LINK_RATE_UHBR10:
382 case LINK_RATE_UHBR20:
385 case LINK_RATE_UHBR13_5:
392 else if (encoding == DP_8b_10b_ENCODING)
393 link_rate = (uint8_t) link_settings->link_rate;
401 enum dc_status dpcd_set_link_settings(
402 struct dc_link *link,
403 const struct link_training_settings *lt_settings)
406 enum dc_status status;
408 union down_spread_ctrl downspread = {0};
409 union lane_count_set lane_count_set = {0};
411 downspread.raw = (uint8_t)
412 (lt_settings->link_settings.link_spread);
414 lane_count_set.bits.LANE_COUNT_SET =
415 lt_settings->link_settings.lane_count;
417 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
418 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
421 if (link->ep_type == DISPLAY_ENDPOINT_PHY &&
422 lt_settings->pattern_for_eq < DP_TRAINING_PATTERN_SEQUENCE_4) {
423 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
424 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
427 status = core_link_write_dpcd(link, DP_DOWNSPREAD_CTRL,
428 &downspread.raw, sizeof(downspread));
430 status = core_link_write_dpcd(link, DP_LANE_COUNT_SET,
431 &lane_count_set.raw, 1);
433 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14 &&
434 lt_settings->link_settings.use_link_rate_set == true) {
436 /* WA for some MUX chips that will power down with eDP and lose supported
437 * link rate set for eDP 1.4. Source reads DPCD 0x010 again to ensure
438 * MUX chip gets link rate set back before link training.
440 if (link->connector_signal == SIGNAL_TYPE_EDP) {
441 uint8_t supported_link_rates[16];
443 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
444 supported_link_rates, sizeof(supported_link_rates));
446 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
447 status = core_link_write_dpcd(link, DP_LINK_RATE_SET,
448 <_settings->link_settings.link_rate_set, 1);
450 #if defined(CONFIG_DRM_AMD_DC_DCN)
451 rate = get_dpcd_link_rate(<_settings->link_settings);
453 rate = (uint8_t) (lt_settings->link_settings.link_rate);
455 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
459 DC_LOG_HW_LINK_TRAINING("%s\n %x rate = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
462 lt_settings->link_settings.link_rate,
464 lt_settings->link_settings.lane_count,
465 lt_settings->enhanced_framing,
467 lt_settings->link_settings.link_spread);
469 DC_LOG_HW_LINK_TRAINING("%s\n %x rate set = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
472 lt_settings->link_settings.link_rate_set,
474 lt_settings->link_settings.lane_count,
475 lt_settings->enhanced_framing,
477 lt_settings->link_settings.link_spread);
483 uint8_t dc_dp_initialize_scrambling_data_symbols(
484 struct dc_link *link,
485 enum dc_dp_training_pattern pattern)
487 uint8_t disable_scrabled_data_symbols = 0;
490 case DP_TRAINING_PATTERN_SEQUENCE_1:
491 case DP_TRAINING_PATTERN_SEQUENCE_2:
492 case DP_TRAINING_PATTERN_SEQUENCE_3:
493 disable_scrabled_data_symbols = 1;
495 case DP_TRAINING_PATTERN_SEQUENCE_4:
496 #if defined(CONFIG_DRM_AMD_DC_DCN)
497 case DP_128b_132b_TPS1:
498 case DP_128b_132b_TPS2:
500 disable_scrabled_data_symbols = 0;
504 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
508 return disable_scrabled_data_symbols;
511 static inline bool is_repeater(struct dc_link *link, uint32_t offset)
513 return (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (offset != 0);
516 static void dpcd_set_lt_pattern_and_lane_settings(
517 struct dc_link *link,
518 const struct link_training_settings *lt_settings,
519 enum dc_dp_training_pattern pattern,
522 uint32_t dpcd_base_lt_offset;
524 uint8_t dpcd_lt_buffer[5] = {0};
525 union dpcd_training_pattern dpcd_pattern = { 0 };
526 uint32_t size_in_bytes;
527 bool edp_workaround = false; /* TODO link_prop.INTERNAL */
528 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET;
530 if (is_repeater(link, offset))
531 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
532 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
534 /*****************************************************************
535 * DpcdAddress_TrainingPatternSet
536 *****************************************************************/
537 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
538 dc_dp_training_pattern_to_dpcd_training_pattern(link, pattern);
540 dpcd_pattern.v1_4.SCRAMBLING_DISABLE =
541 dc_dp_initialize_scrambling_data_symbols(link, pattern);
543 dpcd_lt_buffer[DP_TRAINING_PATTERN_SET - DP_TRAINING_PATTERN_SET]
546 if (is_repeater(link, offset)) {
547 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n 0x%X pattern = %x\n",
551 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
553 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X pattern = %x\n",
556 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
559 /* concatenate everything into one buffer*/
560 size_in_bytes = lt_settings->link_settings.lane_count *
561 sizeof(lt_settings->dpcd_lane_settings[0]);
565 &dpcd_lt_buffer[DP_TRAINING_LANE0_SET - DP_TRAINING_PATTERN_SET],
566 lt_settings->dpcd_lane_settings,
569 if (is_repeater(link, offset)) {
570 #if defined(CONFIG_DRM_AMD_DC_DCN)
571 if (dp_get_link_encoding_format(<_settings->link_settings) ==
572 DP_128b_132b_ENCODING)
573 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
574 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
578 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
579 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
582 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
583 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
587 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
588 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
589 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
590 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
592 #if defined(CONFIG_DRM_AMD_DC_DCN)
593 if (dp_get_link_encoding_format(<_settings->link_settings) ==
594 DP_128b_132b_ENCODING)
595 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
598 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
599 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
602 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
605 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
606 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
607 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
608 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
610 if (edp_workaround) {
611 /* for eDP write in 2 parts because the 5-byte burst is
612 * causing issues on some eDP panels (EPR#366724)
614 core_link_write_dpcd(
616 DP_TRAINING_PATTERN_SET,
618 sizeof(dpcd_pattern.raw));
620 core_link_write_dpcd(
622 DP_TRAINING_LANE0_SET,
623 (uint8_t *)(lt_settings->dpcd_lane_settings),
626 #if defined(CONFIG_DRM_AMD_DC_DCN)
627 } else if (dp_get_link_encoding_format(<_settings->link_settings) ==
628 DP_128b_132b_ENCODING) {
629 core_link_write_dpcd(
633 sizeof(dpcd_lt_buffer));
636 /* write it all in (1 + number-of-lanes)-byte burst*/
637 core_link_write_dpcd(
641 size_in_bytes + sizeof(dpcd_pattern.raw));
644 bool dp_is_cr_done(enum dc_lane_count ln_count,
645 union lane_status *dpcd_lane_status)
648 /*LANEx_CR_DONE bits All 1's?*/
649 for (lane = 0; lane < (uint32_t)(ln_count); lane++) {
650 if (!dpcd_lane_status[lane].bits.CR_DONE_0)
656 bool dp_is_ch_eq_done(enum dc_lane_count ln_count,
657 union lane_status *dpcd_lane_status)
661 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
662 if (!dpcd_lane_status[lane].bits.CHANNEL_EQ_DONE_0)
667 bool dp_is_symbol_locked(enum dc_lane_count ln_count,
668 union lane_status *dpcd_lane_status)
672 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
673 if (!dpcd_lane_status[lane].bits.SYMBOL_LOCKED_0)
678 bool dp_is_interlane_aligned(union lane_align_status_updated align_status)
680 return align_status.bits.INTERLANE_ALIGN_DONE == 1;
683 void dp_hw_to_dpcd_lane_settings(
684 const struct link_training_settings *lt_settings,
685 const struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
686 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
690 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
691 if (dp_get_link_encoding_format(<_settings->link_settings) ==
692 DP_8b_10b_ENCODING) {
693 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET =
694 (uint8_t)(hw_lane_settings[lane].VOLTAGE_SWING);
695 dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET =
696 (uint8_t)(hw_lane_settings[lane].PRE_EMPHASIS);
697 dpcd_lane_settings[lane].bits.MAX_SWING_REACHED =
698 (hw_lane_settings[lane].VOLTAGE_SWING ==
699 VOLTAGE_SWING_MAX_LEVEL ? 1 : 0);
700 dpcd_lane_settings[lane].bits.MAX_PRE_EMPHASIS_REACHED =
701 (hw_lane_settings[lane].PRE_EMPHASIS ==
702 PRE_EMPHASIS_MAX_LEVEL ? 1 : 0);
704 #if defined(CONFIG_DRM_AMD_DC_DCN)
705 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
706 DP_128b_132b_ENCODING) {
707 dpcd_lane_settings[lane].tx_ffe.PRESET_VALUE =
708 hw_lane_settings[lane].FFE_PRESET.settings.level;
714 void dp_decide_lane_settings(
715 const struct link_training_settings *lt_settings,
716 const union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
717 struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
718 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
722 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
723 if (dp_get_link_encoding_format(<_settings->link_settings) ==
724 DP_8b_10b_ENCODING) {
725 hw_lane_settings[lane].VOLTAGE_SWING =
726 (enum dc_voltage_swing)(ln_adjust[lane].bits.
728 hw_lane_settings[lane].PRE_EMPHASIS =
729 (enum dc_pre_emphasis)(ln_adjust[lane].bits.
732 #if defined(CONFIG_DRM_AMD_DC_DCN)
733 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
734 DP_128b_132b_ENCODING) {
735 hw_lane_settings[lane].FFE_PRESET.raw =
736 ln_adjust[lane].tx_ffe.PRESET_VALUE;
740 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
742 if (lt_settings->disallow_per_lane_settings) {
743 /* we find the maximum of the requested settings across all lanes*/
744 /* and set this maximum for all lanes*/
745 maximize_lane_settings(lt_settings, hw_lane_settings);
746 override_lane_settings(lt_settings, hw_lane_settings);
748 if (lt_settings->always_match_dpcd_with_hw_lane_settings)
749 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
754 static uint8_t get_nibble_at_index(const uint8_t *buf,
758 nibble = buf[index / 2];
768 static enum dc_pre_emphasis get_max_pre_emphasis_for_voltage_swing(
769 enum dc_voltage_swing voltage)
771 enum dc_pre_emphasis pre_emphasis;
772 pre_emphasis = PRE_EMPHASIS_MAX_LEVEL;
774 if (voltage <= VOLTAGE_SWING_MAX_LEVEL)
775 pre_emphasis = voltage_swing_to_pre_emphasis[voltage];
781 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
782 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
785 struct dc_lane_settings max_requested;
787 max_requested.VOLTAGE_SWING = lane_settings[0].VOLTAGE_SWING;
788 max_requested.PRE_EMPHASIS = lane_settings[0].PRE_EMPHASIS;
789 #if defined(CONFIG_DRM_AMD_DC_DCN)
790 max_requested.FFE_PRESET = lane_settings[0].FFE_PRESET;
793 /* Determine what the maximum of the requested settings are*/
794 for (lane = 1; lane < lt_settings->link_settings.lane_count; lane++) {
795 if (lane_settings[lane].VOLTAGE_SWING > max_requested.VOLTAGE_SWING)
796 max_requested.VOLTAGE_SWING = lane_settings[lane].VOLTAGE_SWING;
798 if (lane_settings[lane].PRE_EMPHASIS > max_requested.PRE_EMPHASIS)
799 max_requested.PRE_EMPHASIS = lane_settings[lane].PRE_EMPHASIS;
800 #if defined(CONFIG_DRM_AMD_DC_DCN)
801 if (lane_settings[lane].FFE_PRESET.settings.level >
802 max_requested.FFE_PRESET.settings.level)
803 max_requested.FFE_PRESET.settings.level =
804 lane_settings[lane].FFE_PRESET.settings.level;
808 /* make sure the requested settings are
809 * not higher than maximum settings*/
810 if (max_requested.VOLTAGE_SWING > VOLTAGE_SWING_MAX_LEVEL)
811 max_requested.VOLTAGE_SWING = VOLTAGE_SWING_MAX_LEVEL;
813 if (max_requested.PRE_EMPHASIS > PRE_EMPHASIS_MAX_LEVEL)
814 max_requested.PRE_EMPHASIS = PRE_EMPHASIS_MAX_LEVEL;
815 #if defined(CONFIG_DRM_AMD_DC_DCN)
816 if (max_requested.FFE_PRESET.settings.level > DP_FFE_PRESET_MAX_LEVEL)
817 max_requested.FFE_PRESET.settings.level = DP_FFE_PRESET_MAX_LEVEL;
820 /* make sure the pre-emphasis matches the voltage swing*/
821 if (max_requested.PRE_EMPHASIS >
822 get_max_pre_emphasis_for_voltage_swing(
823 max_requested.VOLTAGE_SWING))
824 max_requested.PRE_EMPHASIS =
825 get_max_pre_emphasis_for_voltage_swing(
826 max_requested.VOLTAGE_SWING);
828 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
829 lane_settings[lane].VOLTAGE_SWING = max_requested.VOLTAGE_SWING;
830 lane_settings[lane].PRE_EMPHASIS = max_requested.PRE_EMPHASIS;
831 #if defined(CONFIG_DRM_AMD_DC_DCN)
832 lane_settings[lane].FFE_PRESET = max_requested.FFE_PRESET;
837 static void override_lane_settings(const struct link_training_settings *lt_settings,
838 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
842 if (lt_settings->voltage_swing == NULL &&
843 lt_settings->pre_emphasis == NULL &&
844 #if defined(CONFIG_DRM_AMD_DC_DCN)
845 lt_settings->ffe_preset == NULL &&
847 lt_settings->post_cursor2 == NULL)
851 for (lane = 1; lane < LANE_COUNT_DP_MAX; lane++) {
852 if (lt_settings->voltage_swing)
853 lane_settings[lane].VOLTAGE_SWING = *lt_settings->voltage_swing;
854 if (lt_settings->pre_emphasis)
855 lane_settings[lane].PRE_EMPHASIS = *lt_settings->pre_emphasis;
856 if (lt_settings->post_cursor2)
857 lane_settings[lane].POST_CURSOR2 = *lt_settings->post_cursor2;
858 #if defined(CONFIG_DRM_AMD_DC_DCN)
859 if (lt_settings->ffe_preset)
860 lane_settings[lane].FFE_PRESET = *lt_settings->ffe_preset;
865 enum dc_status dp_get_lane_status_and_lane_adjust(
866 struct dc_link *link,
867 const struct link_training_settings *link_training_setting,
868 union lane_status ln_status[LANE_COUNT_DP_MAX],
869 union lane_align_status_updated *ln_align,
870 union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
873 unsigned int lane01_status_address = DP_LANE0_1_STATUS;
874 uint8_t lane_adjust_offset = 4;
875 unsigned int lane01_adjust_address;
876 uint8_t dpcd_buf[6] = {0};
878 enum dc_status status;
880 if (is_repeater(link, offset)) {
881 lane01_status_address =
882 DP_LANE0_1_STATUS_PHY_REPEATER1 +
883 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
884 lane_adjust_offset = 3;
887 status = core_link_read_dpcd(
889 lane01_status_address,
890 (uint8_t *)(dpcd_buf),
893 for (lane = 0; lane <
894 (uint32_t)(link_training_setting->link_settings.lane_count);
897 ln_status[lane].raw =
898 get_nibble_at_index(&dpcd_buf[0], lane);
899 ln_adjust[lane].raw =
900 get_nibble_at_index(&dpcd_buf[lane_adjust_offset], lane);
903 ln_align->raw = dpcd_buf[2];
905 if (is_repeater(link, offset)) {
906 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
907 " 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
910 lane01_status_address, dpcd_buf[0],
911 lane01_status_address + 1, dpcd_buf[1]);
913 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
915 lane01_status_address, dpcd_buf[0],
916 lane01_status_address + 1, dpcd_buf[1]);
918 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1;
920 if (is_repeater(link, offset))
921 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1_PHY_REPEATER1 +
922 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
924 if (is_repeater(link, offset)) {
925 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
926 " 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
929 lane01_adjust_address,
930 dpcd_buf[lane_adjust_offset],
931 lane01_adjust_address + 1,
932 dpcd_buf[lane_adjust_offset + 1]);
934 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
936 lane01_adjust_address,
937 dpcd_buf[lane_adjust_offset],
938 lane01_adjust_address + 1,
939 dpcd_buf[lane_adjust_offset + 1]);
945 enum dc_status dpcd_set_lane_settings(
946 struct dc_link *link,
947 const struct link_training_settings *link_training_setting,
950 unsigned int lane0_set_address;
951 enum dc_status status;
953 lane0_set_address = DP_TRAINING_LANE0_SET;
955 if (is_repeater(link, offset))
956 lane0_set_address = DP_TRAINING_LANE0_SET_PHY_REPEATER1 +
957 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
959 status = core_link_write_dpcd(link,
961 (uint8_t *)(link_training_setting->dpcd_lane_settings),
962 link_training_setting->link_settings.lane_count);
964 if (is_repeater(link, offset)) {
965 #if defined(CONFIG_DRM_AMD_DC_DCN)
966 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
967 DP_128b_132b_ENCODING)
968 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
969 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
973 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
974 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
977 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n"
978 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
982 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
983 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
984 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
985 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
988 #if defined(CONFIG_DRM_AMD_DC_DCN)
989 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
990 DP_128b_132b_ENCODING)
991 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
994 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
995 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
998 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1001 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1002 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1003 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1004 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1010 bool dp_is_max_vs_reached(
1011 const struct link_training_settings *lt_settings)
1014 for (lane = 0; lane <
1015 (uint32_t)(lt_settings->link_settings.lane_count);
1017 if (lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET
1018 == VOLTAGE_SWING_MAX_LEVEL)
1025 static bool perform_post_lt_adj_req_sequence(
1026 struct dc_link *link,
1027 struct link_training_settings *lt_settings)
1029 enum dc_lane_count lane_count =
1030 lt_settings->link_settings.lane_count;
1032 uint32_t adj_req_count;
1033 uint32_t adj_req_timer;
1034 bool req_drv_setting_changed;
1037 req_drv_setting_changed = false;
1038 for (adj_req_count = 0; adj_req_count < POST_LT_ADJ_REQ_LIMIT;
1041 req_drv_setting_changed = false;
1043 for (adj_req_timer = 0;
1044 adj_req_timer < POST_LT_ADJ_REQ_TIMEOUT;
1047 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1048 union lane_align_status_updated
1049 dpcd_lane_status_updated;
1050 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1052 dp_get_lane_status_and_lane_adjust(
1056 &dpcd_lane_status_updated,
1060 if (dpcd_lane_status_updated.bits.
1061 POST_LT_ADJ_REQ_IN_PROGRESS == 0)
1064 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1067 if (!dp_is_ch_eq_done(lane_count, dpcd_lane_status) ||
1068 !dp_is_symbol_locked(lane_count, dpcd_lane_status) ||
1069 !dp_is_interlane_aligned(dpcd_lane_status_updated))
1072 for (lane = 0; lane < (uint32_t)(lane_count); lane++) {
1075 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET !=
1076 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE ||
1077 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET !=
1078 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE) {
1080 req_drv_setting_changed = true;
1085 if (req_drv_setting_changed) {
1086 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1087 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1089 dc_link_dp_set_drive_settings(link,
1097 if (!req_drv_setting_changed) {
1098 DC_LOG_WARNING("%s: Post Link Training Adjust Request Timed out\n",
1105 DC_LOG_WARNING("%s: Post Link Training Adjust Request limit reached\n",
1113 /* Only used for channel equalization */
1114 uint32_t dp_translate_training_aux_read_interval(uint32_t dpcd_aux_read_interval)
1116 unsigned int aux_rd_interval_us = 400;
1118 switch (dpcd_aux_read_interval) {
1120 aux_rd_interval_us = 4000;
1123 aux_rd_interval_us = 8000;
1126 aux_rd_interval_us = 12000;
1129 aux_rd_interval_us = 16000;
1131 #if defined(CONFIG_DRM_AMD_DC_DCN)
1133 aux_rd_interval_us = 32000;
1136 aux_rd_interval_us = 64000;
1143 return aux_rd_interval_us;
1146 enum link_training_result dp_get_cr_failure(enum dc_lane_count ln_count,
1147 union lane_status *dpcd_lane_status)
1149 enum link_training_result result = LINK_TRAINING_SUCCESS;
1151 if (ln_count >= LANE_COUNT_ONE && !dpcd_lane_status[0].bits.CR_DONE_0)
1152 result = LINK_TRAINING_CR_FAIL_LANE0;
1153 else if (ln_count >= LANE_COUNT_TWO && !dpcd_lane_status[1].bits.CR_DONE_0)
1154 result = LINK_TRAINING_CR_FAIL_LANE1;
1155 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[2].bits.CR_DONE_0)
1156 result = LINK_TRAINING_CR_FAIL_LANE23;
1157 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[3].bits.CR_DONE_0)
1158 result = LINK_TRAINING_CR_FAIL_LANE23;
1162 static enum link_training_result perform_channel_equalization_sequence(
1163 struct dc_link *link,
1164 struct link_training_settings *lt_settings,
1167 enum dc_dp_training_pattern tr_pattern;
1168 uint32_t retries_ch_eq;
1169 uint32_t wait_time_microsec;
1170 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1171 union lane_align_status_updated dpcd_lane_status_updated = {0};
1172 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
1173 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
1175 /* Note: also check that TPS4 is a supported feature*/
1176 tr_pattern = lt_settings->pattern_for_eq;
1178 #if defined(CONFIG_DRM_AMD_DC_DCN)
1179 if (is_repeater(link, offset) && dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING)
1180 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1182 if (is_repeater(link, offset))
1183 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1186 dp_set_hw_training_pattern(link, tr_pattern, offset);
1188 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
1191 dp_set_hw_lane_settings(link, lt_settings, offset);
1195 /* EPR #361076 - write as a 5-byte burst,
1196 * but only for the 1-st iteration
1199 dpcd_set_lt_pattern_and_lane_settings(
1202 tr_pattern, offset);
1204 dpcd_set_lane_settings(link, lt_settings, offset);
1206 /* 3. wait for receiver to lock-on*/
1207 wait_time_microsec = lt_settings->eq_pattern_time;
1209 if (is_repeater(link, offset))
1210 wait_time_microsec =
1211 dp_translate_training_aux_read_interval(
1212 link->dpcd_caps.lttpr_caps.aux_rd_interval[offset - 1]);
1214 dp_wait_for_training_aux_rd_interval(
1216 wait_time_microsec);
1218 /* 4. Read lane status and requested
1219 * drive settings as set by the sink*/
1221 dp_get_lane_status_and_lane_adjust(
1225 &dpcd_lane_status_updated,
1229 /* 5. check CR done*/
1230 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1231 return LINK_TRAINING_EQ_FAIL_CR;
1233 /* 6. check CHEQ done*/
1234 if (dp_is_ch_eq_done(lane_count, dpcd_lane_status) &&
1235 dp_is_symbol_locked(lane_count, dpcd_lane_status) &&
1236 dp_is_interlane_aligned(dpcd_lane_status_updated))
1237 return LINK_TRAINING_SUCCESS;
1239 /* 7. update VS/PE/PC2 in lt_settings*/
1240 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1241 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1244 return LINK_TRAINING_EQ_FAIL_EQ;
1248 static void start_clock_recovery_pattern_early(struct dc_link *link,
1249 struct link_training_settings *lt_settings,
1252 DC_LOG_HW_LINK_TRAINING("%s\n GPU sends TPS1. Wait 400us.\n",
1254 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1255 dp_set_hw_lane_settings(link, lt_settings, offset);
1259 static enum link_training_result perform_clock_recovery_sequence(
1260 struct dc_link *link,
1261 struct link_training_settings *lt_settings,
1264 uint32_t retries_cr;
1265 uint32_t retry_count;
1266 uint32_t wait_time_microsec;
1267 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1268 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1269 union lane_align_status_updated dpcd_lane_status_updated;
1270 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1275 if (!link->ctx->dc->work_arounds.lt_early_cr_pattern)
1276 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1278 /* najeeb - The synaptics MST hub can put the LT in
1279 * infinite loop by switching the VS
1281 /* between level 0 and level 1 continuously, here
1282 * we try for CR lock for LinkTrainingMaxCRRetry count*/
1283 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
1284 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
1286 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
1287 memset(&dpcd_lane_status_updated, '\0',
1288 sizeof(dpcd_lane_status_updated));
1290 /* 1. call HWSS to set lane settings*/
1291 dp_set_hw_lane_settings(
1296 /* 2. update DPCD of the receiver*/
1298 /* EPR #361076 - write as a 5-byte burst,
1299 * but only for the 1-st iteration.*/
1300 dpcd_set_lt_pattern_and_lane_settings(
1303 lt_settings->pattern_for_cr,
1306 dpcd_set_lane_settings(
1311 /* 3. wait receiver to lock-on*/
1312 wait_time_microsec = lt_settings->cr_pattern_time;
1314 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
1315 wait_time_microsec = TRAINING_AUX_RD_INTERVAL;
1317 dp_wait_for_training_aux_rd_interval(
1319 wait_time_microsec);
1321 /* 4. Read lane status and requested drive
1322 * settings as set by the sink
1324 dp_get_lane_status_and_lane_adjust(
1328 &dpcd_lane_status_updated,
1332 /* 5. check CR done*/
1333 if (dp_is_cr_done(lane_count, dpcd_lane_status))
1334 return LINK_TRAINING_SUCCESS;
1336 /* 6. max VS reached*/
1337 #if defined(CONFIG_DRM_AMD_DC_DCN)
1338 if ((dp_get_link_encoding_format(<_settings->link_settings) ==
1339 DP_8b_10b_ENCODING) &&
1340 dp_is_max_vs_reached(lt_settings))
1343 if (dp_is_max_vs_reached(lt_settings))
1347 /* 7. same lane settings*/
1348 /* Note: settings are the same for all lanes,
1349 * so comparing first lane is sufficient*/
1350 if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING) &&
1351 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET ==
1352 dpcd_lane_adjust[0].bits.VOLTAGE_SWING_LANE)
1354 #if defined(CONFIG_DRM_AMD_DC_DCN)
1355 else if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_128b_132b_ENCODING) &&
1356 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE ==
1357 dpcd_lane_adjust[0].tx_ffe.PRESET_VALUE)
1363 /* 8. update VS/PE/PC2 in lt_settings*/
1364 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1365 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1369 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
1371 DC_LOG_ERROR("%s: Link Training Error, could not get CR after %d tries. Possibly voltage swing issue",
1373 LINK_TRAINING_MAX_CR_RETRY);
1377 return dp_get_cr_failure(lane_count, dpcd_lane_status);
1380 static inline enum link_training_result dp_transition_to_video_idle(
1381 struct dc_link *link,
1382 struct link_training_settings *lt_settings,
1383 enum link_training_result status)
1385 union lane_count_set lane_count_set = {0};
1387 /* 4. mainlink output idle pattern*/
1388 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
1391 * 5. post training adjust if required
1392 * If the upstream DPTX and downstream DPRX both support TPS4,
1393 * TPS4 must be used instead of POST_LT_ADJ_REQ.
1395 if (link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED != 1 ||
1396 #if defined(CONFIG_DRM_AMD_DC_DCN)
1397 lt_settings->pattern_for_eq >= DP_TRAINING_PATTERN_SEQUENCE_4) {
1399 lt_settings->pattern_for_eq == DP_TRAINING_PATTERN_SEQUENCE_4) {
1401 /* delay 5ms after Main Link output idle pattern and then check
1404 if (link->connector_signal != SIGNAL_TYPE_EDP && status == LINK_TRAINING_SUCCESS) {
1406 status = dp_check_link_loss_status(link, lt_settings);
1411 if (status == LINK_TRAINING_SUCCESS &&
1412 perform_post_lt_adj_req_sequence(link, lt_settings) == false)
1413 status = LINK_TRAINING_LQA_FAIL;
1415 lane_count_set.bits.LANE_COUNT_SET = lt_settings->link_settings.lane_count;
1416 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
1417 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
1419 core_link_write_dpcd(
1422 &lane_count_set.raw,
1423 sizeof(lane_count_set));
1428 enum link_training_result dp_check_link_loss_status(
1429 struct dc_link *link,
1430 const struct link_training_settings *link_training_setting)
1432 enum link_training_result status = LINK_TRAINING_SUCCESS;
1433 union lane_status lane_status;
1434 uint8_t dpcd_buf[6] = {0};
1437 core_link_read_dpcd(
1440 (uint8_t *)(dpcd_buf),
1443 /*parse lane status*/
1444 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
1446 * check lanes status
1448 lane_status.raw = get_nibble_at_index(&dpcd_buf[2], lane);
1450 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
1451 !lane_status.bits.CR_DONE_0 ||
1452 !lane_status.bits.SYMBOL_LOCKED_0) {
1453 /* if one of the channel equalization, clock
1454 * recovery or symbol lock is dropped
1455 * consider it as (link has been
1456 * dropped) dp sink status has changed
1458 status = LINK_TRAINING_LINK_LOSS;
1466 static inline void decide_8b_10b_training_settings(
1467 struct dc_link *link,
1468 const struct dc_link_settings *link_setting,
1469 struct link_training_settings *lt_settings)
1471 memset(lt_settings, '\0', sizeof(struct link_training_settings));
1473 /* Initialize link settings */
1474 lt_settings->link_settings.use_link_rate_set = link_setting->use_link_rate_set;
1475 lt_settings->link_settings.link_rate_set = link_setting->link_rate_set;
1476 lt_settings->link_settings.link_rate = link_setting->link_rate;
1477 lt_settings->link_settings.lane_count = link_setting->lane_count;
1478 /* TODO hard coded to SS for now
1479 * lt_settings.link_settings.link_spread =
1480 * dal_display_path_is_ss_supported(
1481 * path_mode->display_path) ?
1482 * LINK_SPREAD_05_DOWNSPREAD_30KHZ :
1483 * LINK_SPREAD_DISABLED;
1485 lt_settings->link_settings.link_spread = link->dp_ss_off ?
1486 LINK_SPREAD_DISABLED : LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1487 lt_settings->lttpr_mode = link->lttpr_mode;
1488 lt_settings->cr_pattern_time = get_cr_training_aux_rd_interval(link, link_setting);
1489 lt_settings->eq_pattern_time = get_eq_training_aux_rd_interval(link, link_setting);
1490 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_setting);
1491 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_setting);
1492 lt_settings->enhanced_framing = 1;
1493 lt_settings->should_set_fec_ready = true;
1494 lt_settings->disallow_per_lane_settings = true;
1495 lt_settings->always_match_dpcd_with_hw_lane_settings = true;
1496 dp_hw_to_dpcd_lane_settings(lt_settings, lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1499 #if defined(CONFIG_DRM_AMD_DC_DCN)
1500 static inline void decide_128b_132b_training_settings(struct dc_link *link,
1501 const struct dc_link_settings *link_settings,
1502 struct link_training_settings *lt_settings)
1504 memset(lt_settings, 0, sizeof(*lt_settings));
1506 lt_settings->link_settings = *link_settings;
1507 /* TODO: should decide link spread when populating link_settings */
1508 lt_settings->link_settings.link_spread = link->dp_ss_off ? LINK_SPREAD_DISABLED :
1509 LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1511 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_settings);
1512 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_settings);
1513 lt_settings->eq_pattern_time = 2500;
1514 lt_settings->eq_wait_time_limit = 400000;
1515 lt_settings->eq_loop_count_limit = 20;
1516 lt_settings->pattern_for_cds = DP_128b_132b_TPS2_CDS;
1517 lt_settings->cds_pattern_time = 2500;
1518 lt_settings->cds_wait_time_limit = (dp_convert_to_count(
1519 link->dpcd_caps.lttpr_caps.phy_repeater_cnt) + 1) * 20000;
1520 lt_settings->lttpr_mode = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) ?
1521 LTTPR_MODE_NON_TRANSPARENT : LTTPR_MODE_TRANSPARENT;
1522 lt_settings->disallow_per_lane_settings = true;
1523 dp_hw_to_dpcd_lane_settings(lt_settings,
1524 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1528 void dp_decide_training_settings(
1529 struct dc_link *link,
1530 const struct dc_link_settings *link_settings,
1531 struct link_training_settings *lt_settings)
1533 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING)
1534 decide_8b_10b_training_settings(link, link_settings, lt_settings);
1535 #if defined(CONFIG_DRM_AMD_DC_DCN)
1536 else if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING)
1537 decide_128b_132b_training_settings(link, link_settings, lt_settings);
1541 static void override_training_settings(
1542 struct dc_link *link,
1543 const struct dc_link_training_overrides *overrides,
1544 struct link_training_settings *lt_settings)
1548 /* Override link spread */
1549 if (!link->dp_ss_off && overrides->downspread != NULL)
1550 lt_settings->link_settings.link_spread = *overrides->downspread ?
1551 LINK_SPREAD_05_DOWNSPREAD_30KHZ
1552 : LINK_SPREAD_DISABLED;
1554 /* Override lane settings */
1555 if (overrides->voltage_swing != NULL)
1556 lt_settings->voltage_swing = overrides->voltage_swing;
1557 if (overrides->pre_emphasis != NULL)
1558 lt_settings->pre_emphasis = overrides->pre_emphasis;
1559 if (overrides->post_cursor2 != NULL)
1560 lt_settings->post_cursor2 = overrides->post_cursor2;
1561 #if defined(CONFIG_DRM_AMD_DC_DCN)
1562 if (overrides->ffe_preset != NULL)
1563 lt_settings->ffe_preset = overrides->ffe_preset;
1565 /* Override HW lane settings with BIOS forced values if present */
1566 if (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN &&
1567 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1568 lt_settings->voltage_swing = &link->bios_forced_drive_settings.VOLTAGE_SWING;
1569 lt_settings->pre_emphasis = &link->bios_forced_drive_settings.PRE_EMPHASIS;
1570 lt_settings->always_match_dpcd_with_hw_lane_settings = false;
1572 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1573 lt_settings->lane_settings[lane].VOLTAGE_SWING =
1574 lt_settings->voltage_swing != NULL ?
1575 *lt_settings->voltage_swing :
1576 VOLTAGE_SWING_LEVEL0;
1577 lt_settings->lane_settings[lane].PRE_EMPHASIS =
1578 lt_settings->pre_emphasis != NULL ?
1579 *lt_settings->pre_emphasis
1580 : PRE_EMPHASIS_DISABLED;
1581 lt_settings->lane_settings[lane].POST_CURSOR2 =
1582 lt_settings->post_cursor2 != NULL ?
1583 *lt_settings->post_cursor2
1584 : POST_CURSOR2_DISABLED;
1587 dp_hw_to_dpcd_lane_settings(lt_settings,
1588 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1590 /* Initialize training timings */
1591 if (overrides->cr_pattern_time != NULL)
1592 lt_settings->cr_pattern_time = *overrides->cr_pattern_time;
1594 if (overrides->eq_pattern_time != NULL)
1595 lt_settings->eq_pattern_time = *overrides->eq_pattern_time;
1597 if (overrides->pattern_for_cr != NULL)
1598 lt_settings->pattern_for_cr = *overrides->pattern_for_cr;
1599 if (overrides->pattern_for_eq != NULL)
1600 lt_settings->pattern_for_eq = *overrides->pattern_for_eq;
1602 if (overrides->enhanced_framing != NULL)
1603 lt_settings->enhanced_framing = *overrides->enhanced_framing;
1605 if (link->preferred_training_settings.fec_enable != NULL)
1606 lt_settings->should_set_fec_ready = *link->preferred_training_settings.fec_enable;
1609 uint8_t dp_convert_to_count(uint8_t lttpr_repeater_count)
1611 switch (lttpr_repeater_count) {
1612 case 0x80: // 1 lttpr repeater
1614 case 0x40: // 2 lttpr repeaters
1616 case 0x20: // 3 lttpr repeaters
1618 case 0x10: // 4 lttpr repeaters
1620 case 0x08: // 5 lttpr repeaters
1622 case 0x04: // 6 lttpr repeaters
1624 case 0x02: // 7 lttpr repeaters
1626 case 0x01: // 8 lttpr repeaters
1631 return 0; // invalid value
1634 static enum dc_status configure_lttpr_mode_transparent(struct dc_link *link)
1636 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1638 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1639 return core_link_write_dpcd(link,
1640 DP_PHY_REPEATER_MODE,
1641 (uint8_t *)&repeater_mode,
1642 sizeof(repeater_mode));
1645 static enum dc_status configure_lttpr_mode_non_transparent(
1646 struct dc_link *link,
1647 const struct link_training_settings *lt_settings)
1649 /* aux timeout is already set to extended */
1650 /* RESET/SET lttpr mode to enable non transparent mode */
1651 uint8_t repeater_cnt;
1652 uint32_t aux_interval_address;
1653 uint8_t repeater_id;
1654 enum dc_status result = DC_ERROR_UNEXPECTED;
1655 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1657 enum dp_link_encoding encoding = dp_get_link_encoding_format(<_settings->link_settings);
1659 if (encoding == DP_8b_10b_ENCODING) {
1660 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1661 result = core_link_write_dpcd(link,
1662 DP_PHY_REPEATER_MODE,
1663 (uint8_t *)&repeater_mode,
1664 sizeof(repeater_mode));
1668 if (result == DC_OK) {
1669 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1672 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
1674 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Non Transparent Mode\n", __func__);
1676 repeater_mode = DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
1677 result = core_link_write_dpcd(link,
1678 DP_PHY_REPEATER_MODE,
1679 (uint8_t *)&repeater_mode,
1680 sizeof(repeater_mode));
1682 if (result == DC_OK) {
1683 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1686 if (encoding == DP_8b_10b_ENCODING) {
1687 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
1689 /* Driver does not need to train the first hop. Skip DPCD read and clear
1690 * AUX_RD_INTERVAL for DPTX-to-DPIA hop.
1692 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA)
1693 link->dpcd_caps.lttpr_caps.aux_rd_interval[--repeater_cnt] = 0;
1695 for (repeater_id = repeater_cnt; repeater_id > 0; repeater_id--) {
1696 aux_interval_address = DP_TRAINING_AUX_RD_INTERVAL_PHY_REPEATER1 +
1697 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (repeater_id - 1));
1698 core_link_read_dpcd(
1700 aux_interval_address,
1701 (uint8_t *)&link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1],
1702 sizeof(link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1]));
1703 link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1] &= 0x7F;
1711 static void repeater_training_done(struct dc_link *link, uint32_t offset)
1713 union dpcd_training_pattern dpcd_pattern = {0};
1715 const uint32_t dpcd_base_lt_offset =
1716 DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
1717 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1718 /* Set training not in progress*/
1719 dpcd_pattern.v1_4.TRAINING_PATTERN_SET = DPCD_TRAINING_PATTERN_VIDEOIDLE;
1721 core_link_write_dpcd(
1723 dpcd_base_lt_offset,
1727 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Id: %d 0x%X pattern = %x\n",
1730 dpcd_base_lt_offset,
1731 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
1734 static void print_status_message(
1735 struct dc_link *link,
1736 const struct link_training_settings *lt_settings,
1737 enum link_training_result status)
1739 char *link_rate = "Unknown";
1740 char *lt_result = "Unknown";
1741 char *lt_spread = "Disabled";
1743 switch (lt_settings->link_settings.link_rate) {
1747 case LINK_RATE_RATE_2:
1750 case LINK_RATE_RATE_3:
1753 case LINK_RATE_HIGH:
1756 case LINK_RATE_RBR2:
1759 case LINK_RATE_RATE_6:
1762 case LINK_RATE_HIGH2:
1765 case LINK_RATE_HIGH3:
1768 #if defined(CONFIG_DRM_AMD_DC_DCN)
1769 case LINK_RATE_UHBR10:
1770 link_rate = "UHBR10";
1772 case LINK_RATE_UHBR13_5:
1773 link_rate = "UHBR13.5";
1775 case LINK_RATE_UHBR20:
1776 link_rate = "UHBR20";
1784 case LINK_TRAINING_SUCCESS:
1787 case LINK_TRAINING_CR_FAIL_LANE0:
1788 lt_result = "CR failed lane0";
1790 case LINK_TRAINING_CR_FAIL_LANE1:
1791 lt_result = "CR failed lane1";
1793 case LINK_TRAINING_CR_FAIL_LANE23:
1794 lt_result = "CR failed lane23";
1796 case LINK_TRAINING_EQ_FAIL_CR:
1797 lt_result = "CR failed in EQ";
1799 case LINK_TRAINING_EQ_FAIL_EQ:
1800 lt_result = "EQ failed";
1802 case LINK_TRAINING_LQA_FAIL:
1803 lt_result = "LQA failed";
1805 case LINK_TRAINING_LINK_LOSS:
1806 lt_result = "Link loss";
1808 #if defined(CONFIG_DRM_AMD_DC_DCN)
1809 case DP_128b_132b_LT_FAILED:
1810 lt_result = "LT_FAILED received";
1812 case DP_128b_132b_MAX_LOOP_COUNT_REACHED:
1813 lt_result = "max loop count reached";
1815 case DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT:
1816 lt_result = "channel EQ timeout";
1818 case DP_128b_132b_CDS_DONE_TIMEOUT:
1819 lt_result = "CDS timeout";
1826 switch (lt_settings->link_settings.link_spread) {
1827 case LINK_SPREAD_DISABLED:
1828 lt_spread = "Disabled";
1830 case LINK_SPREAD_05_DOWNSPREAD_30KHZ:
1831 lt_spread = "0.5% 30KHz";
1833 case LINK_SPREAD_05_DOWNSPREAD_33KHZ:
1834 lt_spread = "0.5% 33KHz";
1840 /* Connectivity log: link training */
1841 #if defined(CONFIG_DRM_AMD_DC_DCN)
1842 /* TODO - DP2.0 Log: add connectivity log for FFE PRESET */
1844 CONN_MSG_LT(link, "%sx%d %s VS=%d, PE=%d, DS=%s",
1846 lt_settings->link_settings.lane_count,
1848 lt_settings->lane_settings[0].VOLTAGE_SWING,
1849 lt_settings->lane_settings[0].PRE_EMPHASIS,
1853 void dc_link_dp_set_drive_settings(
1854 struct dc_link *link,
1855 struct link_training_settings *lt_settings)
1857 /* program ASIC PHY settings*/
1858 dp_set_hw_lane_settings(link, lt_settings, DPRX);
1860 dp_hw_to_dpcd_lane_settings(lt_settings,
1861 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1863 /* Notify DP sink the PHY settings from source */
1864 dpcd_set_lane_settings(link, lt_settings, DPRX);
1867 bool dc_link_dp_perform_link_training_skip_aux(
1868 struct dc_link *link,
1869 const struct dc_link_settings *link_setting)
1871 struct link_training_settings lt_settings = {0};
1873 dp_decide_training_settings(
1877 override_training_settings(
1879 &link->preferred_training_settings,
1882 /* 1. Perform_clock_recovery_sequence. */
1884 /* transmit training pattern for clock recovery */
1885 dp_set_hw_training_pattern(link, lt_settings.pattern_for_cr, DPRX);
1887 /* call HWSS to set lane settings*/
1888 dp_set_hw_lane_settings(link, <_settings, DPRX);
1890 /* wait receiver to lock-on*/
1891 dp_wait_for_training_aux_rd_interval(link, lt_settings.cr_pattern_time);
1893 /* 2. Perform_channel_equalization_sequence. */
1895 /* transmit training pattern for channel equalization. */
1896 dp_set_hw_training_pattern(link, lt_settings.pattern_for_eq, DPRX);
1898 /* call HWSS to set lane settings*/
1899 dp_set_hw_lane_settings(link, <_settings, DPRX);
1901 /* wait receiver to lock-on. */
1902 dp_wait_for_training_aux_rd_interval(link, lt_settings.eq_pattern_time);
1904 /* 3. Perform_link_training_int. */
1906 /* Mainlink output idle pattern. */
1907 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
1909 print_status_message(link, <_settings, LINK_TRAINING_SUCCESS);
1914 enum dc_status dpcd_configure_lttpr_mode(struct dc_link *link, struct link_training_settings *lt_settings)
1916 enum dc_status status = DC_OK;
1918 if (lt_settings->lttpr_mode == LTTPR_MODE_TRANSPARENT)
1919 status = configure_lttpr_mode_transparent(link);
1921 else if (lt_settings->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
1922 status = configure_lttpr_mode_non_transparent(link, lt_settings);
1927 static void dpcd_exit_training_mode(struct dc_link *link)
1929 #if defined(CONFIG_DRM_AMD_DC_DCN)
1930 uint8_t sink_status = 0;
1934 /* clear training pattern set */
1935 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
1937 #if defined(CONFIG_DRM_AMD_DC_DCN)
1938 /* poll for intra-hop disable */
1939 for (i = 0; i < 10; i++) {
1940 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
1941 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
1948 enum dc_status dpcd_configure_channel_coding(struct dc_link *link,
1949 struct link_training_settings *lt_settings)
1951 enum dp_link_encoding encoding =
1952 dp_get_link_encoding_format(
1953 <_settings->link_settings);
1954 enum dc_status status;
1956 status = core_link_write_dpcd(
1958 DP_MAIN_LINK_CHANNEL_CODING_SET,
1959 (uint8_t *) &encoding,
1961 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X MAIN_LINK_CHANNEL_CODING_SET = %x\n",
1963 DP_MAIN_LINK_CHANNEL_CODING_SET,
1969 #if defined(CONFIG_DRM_AMD_DC_DCN)
1970 static void dpcd_128b_132b_get_aux_rd_interval(struct dc_link *link,
1971 uint32_t *interval_in_us)
1973 union dp_128b_132b_training_aux_rd_interval dpcd_interval;
1974 uint32_t interval_unit = 0;
1976 dpcd_interval.raw = 0;
1977 core_link_read_dpcd(link, DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
1978 &dpcd_interval.raw, sizeof(dpcd_interval.raw));
1979 interval_unit = dpcd_interval.bits.UNIT ? 1 : 2; /* 0b = 2 ms, 1b = 1 ms */
1980 /* (128b/132b_TRAINING_AUX_RD_INTERVAL value + 1) *
1981 * INTERVAL_UNIT. The maximum is 256 ms
1983 *interval_in_us = (dpcd_interval.bits.VALUE + 1) * interval_unit * 1000;
1986 static enum link_training_result dp_perform_128b_132b_channel_eq_done_sequence(
1987 struct dc_link *link,
1988 struct link_training_settings *lt_settings)
1991 uint32_t aux_rd_interval = 0;
1992 uint32_t wait_time = 0;
1993 union lane_align_status_updated dpcd_lane_status_updated = {0};
1994 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
1995 enum link_training_result status = LINK_TRAINING_SUCCESS;
1996 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
1998 /* Transmit 128b/132b_TPS1 over Main-Link */
1999 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, DPRX);
2000 /* Set TRAINING_PATTERN_SET to 01h */
2001 dpcd_set_training_pattern(link, lt_settings->pattern_for_cr);
2003 /* Adjust TX_FFE_PRESET_VALUE and Transmit 128b/132b_TPS2 over Main-Link */
2004 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2005 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2006 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2007 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2008 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2009 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2010 dp_set_hw_training_pattern(link, lt_settings->pattern_for_eq, DPRX);
2012 /* Set loop counter to start from 1 */
2015 /* Set TRAINING_PATTERN_SET to 02h and TX_FFE_PRESET_VALUE in one AUX transaction */
2016 dpcd_set_lt_pattern_and_lane_settings(link, lt_settings,
2017 lt_settings->pattern_for_eq, DPRX);
2019 /* poll for channel EQ done */
2020 while (status == LINK_TRAINING_SUCCESS) {
2021 dp_wait_for_training_aux_rd_interval(link, aux_rd_interval);
2022 wait_time += aux_rd_interval;
2023 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2024 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2025 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2026 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2027 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2028 if (dp_is_ch_eq_done(lt_settings->link_settings.lane_count,
2029 dpcd_lane_status)) {
2032 } else if (loop_count >= lt_settings->eq_loop_count_limit) {
2033 status = DP_128b_132b_MAX_LOOP_COUNT_REACHED;
2034 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2035 status = DP_128b_132b_LT_FAILED;
2037 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2038 dpcd_set_lane_settings(link, lt_settings, DPRX);
2043 /* poll for EQ interlane align done */
2044 while (status == LINK_TRAINING_SUCCESS) {
2045 if (dpcd_lane_status_updated.bits.EQ_INTERLANE_ALIGN_DONE_128b_132b) {
2048 } else if (wait_time >= lt_settings->eq_wait_time_limit) {
2049 status = DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT;
2050 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2051 status = DP_128b_132b_LT_FAILED;
2053 dp_wait_for_training_aux_rd_interval(link,
2054 lt_settings->eq_pattern_time);
2055 wait_time += lt_settings->eq_pattern_time;
2056 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2057 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2064 static enum link_training_result dp_perform_128b_132b_cds_done_sequence(
2065 struct dc_link *link,
2066 struct link_training_settings *lt_settings)
2068 /* Assumption: assume hardware has transmitted eq pattern */
2069 enum link_training_result status = LINK_TRAINING_SUCCESS;
2070 union lane_align_status_updated dpcd_lane_status_updated = {0};
2071 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2072 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
2073 uint32_t wait_time = 0;
2075 /* initiate CDS done sequence */
2076 dpcd_set_training_pattern(link, lt_settings->pattern_for_cds);
2078 /* poll for CDS interlane align done and symbol lock */
2079 while (status == LINK_TRAINING_SUCCESS) {
2080 dp_wait_for_training_aux_rd_interval(link,
2081 lt_settings->cds_pattern_time);
2082 wait_time += lt_settings->cds_pattern_time;
2083 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2084 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2085 if (dp_is_symbol_locked(lt_settings->link_settings.lane_count, dpcd_lane_status) &&
2086 dpcd_lane_status_updated.bits.CDS_INTERLANE_ALIGN_DONE_128b_132b) {
2089 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2090 status = DP_128b_132b_LT_FAILED;
2091 } else if (wait_time >= lt_settings->cds_wait_time_limit) {
2092 status = DP_128b_132b_CDS_DONE_TIMEOUT;
2100 static enum link_training_result dp_perform_8b_10b_link_training(
2101 struct dc_link *link,
2102 struct link_training_settings *lt_settings)
2104 enum link_training_result status = LINK_TRAINING_SUCCESS;
2106 uint8_t repeater_cnt;
2107 uint8_t repeater_id;
2110 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2111 start_clock_recovery_pattern_early(link, lt_settings, DPRX);
2113 /* 1. set link rate, lane count and spread. */
2114 dpcd_set_link_settings(link, lt_settings);
2116 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2118 /* 2. perform link training (set link training done
2119 * to false is done as well)
2121 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
2123 for (repeater_id = repeater_cnt; (repeater_id > 0 && status == LINK_TRAINING_SUCCESS);
2125 status = perform_clock_recovery_sequence(link, lt_settings, repeater_id);
2127 if (status != LINK_TRAINING_SUCCESS)
2130 status = perform_channel_equalization_sequence(link,
2134 if (status != LINK_TRAINING_SUCCESS)
2137 repeater_training_done(link, repeater_id);
2140 for (lane = 0; lane < (uint8_t)lt_settings->link_settings.lane_count; lane++)
2141 lt_settings->dpcd_lane_settings[lane].raw = 0;
2144 if (status == LINK_TRAINING_SUCCESS) {
2145 status = perform_clock_recovery_sequence(link, lt_settings, DPRX);
2146 if (status == LINK_TRAINING_SUCCESS) {
2147 status = perform_channel_equalization_sequence(link,
2156 #if defined(CONFIG_DRM_AMD_DC_DCN)
2157 static enum link_training_result dp_perform_128b_132b_link_training(
2158 struct dc_link *link,
2159 struct link_training_settings *lt_settings)
2161 enum link_training_result result = LINK_TRAINING_SUCCESS;
2163 /* TODO - DP2.0 Link: remove legacy_dp2_lt logic */
2164 if (link->dc->debug.legacy_dp2_lt) {
2165 struct link_training_settings legacy_settings;
2167 decide_8b_10b_training_settings(link,
2168 <_settings->link_settings,
2170 return dp_perform_8b_10b_link_training(link, &legacy_settings);
2173 dpcd_set_link_settings(link, lt_settings);
2175 if (result == LINK_TRAINING_SUCCESS)
2176 result = dp_perform_128b_132b_channel_eq_done_sequence(link, lt_settings);
2178 if (result == LINK_TRAINING_SUCCESS)
2179 result = dp_perform_128b_132b_cds_done_sequence(link, lt_settings);
2185 enum link_training_result dc_link_dp_perform_link_training(
2186 struct dc_link *link,
2187 const struct dc_link_settings *link_settings,
2188 bool skip_video_pattern)
2190 enum link_training_result status = LINK_TRAINING_SUCCESS;
2191 struct link_training_settings lt_settings = {0};
2192 enum dp_link_encoding encoding =
2193 dp_get_link_encoding_format(link_settings);
2195 /* decide training settings */
2196 dp_decide_training_settings(
2200 override_training_settings(
2202 &link->preferred_training_settings,
2205 /* reset previous training states */
2206 dpcd_exit_training_mode(link);
2208 /* configure link prior to entering training mode */
2209 dpcd_configure_lttpr_mode(link, <_settings);
2210 dp_set_fec_ready(link, lt_settings.should_set_fec_ready);
2211 dpcd_configure_channel_coding(link, <_settings);
2213 /* enter training mode:
2214 * Per DP specs starting from here, DPTX device shall not issue
2215 * Non-LT AUX transactions inside training mode.
2217 if (encoding == DP_8b_10b_ENCODING)
2218 status = dp_perform_8b_10b_link_training(link, <_settings);
2219 #if defined(CONFIG_DRM_AMD_DC_DCN)
2220 else if (encoding == DP_128b_132b_ENCODING)
2221 status = dp_perform_128b_132b_link_training(link, <_settings);
2226 /* exit training mode and switch to video idle */
2227 dpcd_exit_training_mode(link);
2228 if ((status == LINK_TRAINING_SUCCESS) || !skip_video_pattern)
2229 status = dp_transition_to_video_idle(link,
2233 /* dump debug data */
2234 print_status_message(link, <_settings, status);
2235 if (status != LINK_TRAINING_SUCCESS)
2236 link->ctx->dc->debug_data.ltFailCount++;
2240 bool perform_link_training_with_retries(
2241 const struct dc_link_settings *link_setting,
2242 bool skip_video_pattern,
2244 struct pipe_ctx *pipe_ctx,
2245 enum signal_type signal,
2249 uint8_t delay_between_attempts = LINK_TRAINING_RETRY_DELAY;
2250 struct dc_stream_state *stream = pipe_ctx->stream;
2251 struct dc_link *link = stream->link;
2252 enum dp_panel_mode panel_mode = dp_get_panel_mode(link);
2253 struct link_encoder *link_enc;
2254 enum link_training_result status = LINK_TRAINING_CR_FAIL_LANE0;
2255 struct dc_link_settings current_setting = *link_setting;
2257 /* Dynamically assigned link encoders associated with stream rather than
2260 if (link->is_dig_mapping_flexible && link->dc->res_pool->funcs->link_encs_assign)
2261 link_enc = link_enc_cfg_get_link_enc_used_by_stream(link->ctx->dc, pipe_ctx->stream);
2263 link_enc = link->link_enc;
2265 /* We need to do this before the link training to ensure the idle pattern in SST
2266 * mode will be sent right after the link training
2268 if (dp_get_link_encoding_format(¤t_setting) == DP_8b_10b_ENCODING) {
2269 link_enc->funcs->connect_dig_be_to_fe(link_enc,
2270 pipe_ctx->stream_res.stream_enc->id, true);
2271 dp_source_sequence_trace(link, DPCD_SOURCE_SEQ_AFTER_CONNECT_DIG_FE_BE);
2274 for (j = 0; j < attempts; ++j) {
2276 DC_LOG_HW_LINK_TRAINING("%s: Beginning link training attempt %u of %d\n",
2277 __func__, (unsigned int)j + 1, attempts);
2282 pipe_ctx->clock_source->id,
2285 if (stream->sink_patches.dppowerup_delay > 0) {
2286 int delay_dp_power_up_in_ms = stream->sink_patches.dppowerup_delay;
2288 msleep(delay_dp_power_up_in_ms);
2291 #ifdef CONFIG_DRM_AMD_DC_HDCP
2292 if (panel_mode == DP_PANEL_MODE_EDP) {
2293 struct cp_psp *cp_psp = &stream->ctx->cp_psp;
2295 if (cp_psp && cp_psp->funcs.enable_assr)
2296 /* ASSR is bound to fail with unsigned PSP
2297 * verstage used during devlopment phase.
2298 * Report and continue with eDP panel mode to
2299 * perform eDP link training with right settings
2301 cp_psp->funcs.enable_assr(cp_psp->handle, link);
2305 dp_set_panel_mode(link, panel_mode);
2307 if (link->aux_access_disabled) {
2308 dc_link_dp_perform_link_training_skip_aux(link, ¤t_setting);
2311 /** @todo Consolidate USB4 DP and DPx.x training. */
2312 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA) {
2313 status = dc_link_dpia_perform_link_training(link,
2315 skip_video_pattern);
2317 /* Transmit idle pattern once training successful. */
2318 if (status == LINK_TRAINING_SUCCESS)
2319 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE,
2322 status = dc_link_dp_perform_link_training(link,
2324 skip_video_pattern);
2327 if (status == LINK_TRAINING_SUCCESS)
2331 /* latest link training still fail, skip delay and keep PHY on
2333 if (j == (attempts - 1) && link->ep_type == DISPLAY_ENDPOINT_PHY)
2336 DC_LOG_WARNING("%s: Link training attempt %u of %d failed\n",
2337 __func__, (unsigned int)j + 1, attempts);
2339 dp_disable_link_phy(link, signal);
2341 /* Abort link training if failure due to sink being unplugged. */
2342 if (status == LINK_TRAINING_ABORT) {
2343 enum dc_connection_type type = dc_connection_none;
2345 dc_link_detect_sink(link, &type);
2346 if (type == dc_connection_none)
2348 } else if (do_fallback) {
2352 decide_fallback_link_setting(*link_setting, ¤t_setting, status);
2353 /* Fail link training if reduced link bandwidth no longer meets
2354 * stream requirements.
2356 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
2357 link_bw = dc_link_bandwidth_kbps(link, ¤t_setting);
2358 if (req_bw > link_bw)
2362 msleep(delay_between_attempts);
2364 delay_between_attempts += LINK_TRAINING_RETRY_DELAY;
2370 static enum clock_source_id get_clock_source_id(struct dc_link *link)
2372 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_UNDEFINED;
2373 struct clock_source *dp_cs = link->dc->res_pool->dp_clock_source;
2375 if (dp_cs != NULL) {
2376 dp_cs_id = dp_cs->id;
2379 * dp clock source is not initialized for some reason.
2380 * Should not happen, CLOCK_SOURCE_ID_EXTERNAL will be used
2388 static void set_dp_mst_mode(struct dc_link *link, bool mst_enable)
2390 if (mst_enable == false &&
2391 link->type == dc_connection_mst_branch) {
2392 /* Disable MST on link. Use only local sink. */
2393 dp_disable_link_phy_mst(link, link->connector_signal);
2395 link->type = dc_connection_single;
2396 link->local_sink = link->remote_sinks[0];
2397 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT;
2398 dc_sink_retain(link->local_sink);
2399 dm_helpers_dp_mst_stop_top_mgr(link->ctx, link);
2400 } else if (mst_enable == true &&
2401 link->type == dc_connection_single &&
2402 link->remote_sinks[0] != NULL) {
2403 /* Re-enable MST on link. */
2404 dp_disable_link_phy(link, link->connector_signal);
2405 dp_enable_mst_on_sink(link, true);
2407 link->type = dc_connection_mst_branch;
2408 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT_MST;
2412 bool dc_link_dp_sync_lt_begin(struct dc_link *link)
2414 /* Begin Sync LT. During this time,
2415 * DPCD:600h must not be powered down.
2417 link->sync_lt_in_progress = true;
2419 /*Clear any existing preferred settings.*/
2420 memset(&link->preferred_training_settings, 0,
2421 sizeof(struct dc_link_training_overrides));
2422 memset(&link->preferred_link_setting, 0,
2423 sizeof(struct dc_link_settings));
2428 enum link_training_result dc_link_dp_sync_lt_attempt(
2429 struct dc_link *link,
2430 struct dc_link_settings *link_settings,
2431 struct dc_link_training_overrides *lt_overrides)
2433 struct link_training_settings lt_settings = {0};
2434 enum link_training_result lt_status = LINK_TRAINING_SUCCESS;
2435 enum dp_panel_mode panel_mode = DP_PANEL_MODE_DEFAULT;
2436 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
2437 bool fec_enable = false;
2439 dp_decide_training_settings(
2443 override_training_settings(
2447 /* Setup MST Mode */
2448 if (lt_overrides->mst_enable)
2449 set_dp_mst_mode(link, *lt_overrides->mst_enable);
2452 dp_disable_link_phy(link, link->connector_signal);
2455 dp_cs_id = get_clock_source_id(link);
2456 dp_enable_link_phy(link, link->connector_signal,
2457 dp_cs_id, link_settings);
2459 /* Set FEC enable */
2460 #if defined(CONFIG_DRM_AMD_DC_DCN)
2461 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING) {
2463 fec_enable = lt_overrides->fec_enable && *lt_overrides->fec_enable;
2464 dp_set_fec_ready(link, fec_enable);
2465 #if defined(CONFIG_DRM_AMD_DC_DCN)
2469 if (lt_overrides->alternate_scrambler_reset) {
2470 if (*lt_overrides->alternate_scrambler_reset)
2471 panel_mode = DP_PANEL_MODE_EDP;
2473 panel_mode = DP_PANEL_MODE_DEFAULT;
2475 panel_mode = dp_get_panel_mode(link);
2477 dp_set_panel_mode(link, panel_mode);
2479 /* Attempt to train with given link training settings */
2480 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2481 start_clock_recovery_pattern_early(link, <_settings, DPRX);
2483 /* Set link rate, lane count and spread. */
2484 dpcd_set_link_settings(link, <_settings);
2486 /* 2. perform link training (set link training done
2487 * to false is done as well)
2489 lt_status = perform_clock_recovery_sequence(link, <_settings, DPRX);
2490 if (lt_status == LINK_TRAINING_SUCCESS) {
2491 lt_status = perform_channel_equalization_sequence(link,
2496 /* 3. Sync LT must skip TRAINING_PATTERN_SET:0 (video pattern)*/
2497 /* 4. print status message*/
2498 print_status_message(link, <_settings, lt_status);
2503 bool dc_link_dp_sync_lt_end(struct dc_link *link, bool link_down)
2505 /* If input parameter is set, shut down phy.
2506 * Still shouldn't turn off dp_receiver (DPCD:600h)
2508 if (link_down == true) {
2509 #if defined(CONFIG_DRM_AMD_DC_DCN)
2510 struct dc_link_settings link_settings = link->cur_link_settings;
2512 dp_disable_link_phy(link, link->connector_signal);
2513 #if defined(CONFIG_DRM_AMD_DC_DCN)
2514 if (dp_get_link_encoding_format(&link_settings) == DP_8b_10b_ENCODING)
2516 dp_set_fec_ready(link, false);
2519 link->sync_lt_in_progress = false;
2523 #if defined(CONFIG_DRM_AMD_DC_DCN)
2524 static enum dc_link_rate get_lttpr_max_link_rate(struct dc_link *link)
2526 enum dc_link_rate lttpr_max_link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
2528 if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR20)
2529 lttpr_max_link_rate = LINK_RATE_UHBR20;
2530 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR13_5)
2531 lttpr_max_link_rate = LINK_RATE_UHBR13_5;
2532 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR10)
2533 lttpr_max_link_rate = LINK_RATE_UHBR10;
2535 return lttpr_max_link_rate;
2539 bool dc_link_dp_get_max_link_enc_cap(const struct dc_link *link, struct dc_link_settings *max_link_enc_cap)
2541 struct link_encoder *link_enc = NULL;
2543 if (!max_link_enc_cap) {
2544 DC_LOG_ERROR("%s: Could not return max link encoder caps", __func__);
2548 /* Links supporting dynamically assigned link encoder will be assigned next
2549 * available encoder if one not already assigned.
2551 if (link->is_dig_mapping_flexible &&
2552 link->dc->res_pool->funcs->link_encs_assign) {
2553 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
2554 if (link_enc == NULL)
2555 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
2557 link_enc = link->link_enc;
2560 if (link_enc && link_enc->funcs->get_max_link_cap) {
2561 link_enc->funcs->get_max_link_cap(link_enc, max_link_enc_cap);
2565 DC_LOG_ERROR("%s: Max link encoder caps unknown", __func__);
2566 max_link_enc_cap->lane_count = 1;
2567 max_link_enc_cap->link_rate = 6;
2571 static struct dc_link_settings get_max_link_cap(struct dc_link *link)
2573 struct dc_link_settings max_link_cap = {0};
2574 #if defined(CONFIG_DRM_AMD_DC_DCN)
2575 enum dc_link_rate lttpr_max_link_rate;
2577 struct link_encoder *link_enc = NULL;
2579 /* Links supporting dynamically assigned link encoder will be assigned next
2580 * available encoder if one not already assigned.
2582 if (link->is_dig_mapping_flexible &&
2583 link->dc->res_pool->funcs->link_encs_assign) {
2584 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
2585 if (link_enc == NULL)
2586 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
2588 link_enc = link->link_enc;
2591 /* get max link encoder capability */
2593 link_enc->funcs->get_max_link_cap(link_enc, &max_link_cap);
2594 #if defined(CONFIG_DRM_AMD_DC_DCN)
2595 if (max_link_cap.link_rate >= LINK_RATE_UHBR10 &&
2596 !link->hpo_dp_link_enc)
2597 max_link_cap.link_rate = LINK_RATE_HIGH3;
2600 /* Lower link settings based on sink's link cap */
2601 if (link->reported_link_cap.lane_count < max_link_cap.lane_count)
2602 max_link_cap.lane_count =
2603 link->reported_link_cap.lane_count;
2604 if (link->reported_link_cap.link_rate < max_link_cap.link_rate)
2605 max_link_cap.link_rate =
2606 link->reported_link_cap.link_rate;
2607 if (link->reported_link_cap.link_spread <
2608 max_link_cap.link_spread)
2609 max_link_cap.link_spread =
2610 link->reported_link_cap.link_spread;
2612 * account for lttpr repeaters cap
2613 * notes: repeaters do not snoop in the DPRX Capabilities addresses (3.6.3).
2615 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2616 if (link->dpcd_caps.lttpr_caps.max_lane_count < max_link_cap.lane_count)
2617 max_link_cap.lane_count = link->dpcd_caps.lttpr_caps.max_lane_count;
2619 #if defined(CONFIG_DRM_AMD_DC_DCN)
2620 lttpr_max_link_rate = get_lttpr_max_link_rate(link);
2622 if (lttpr_max_link_rate < max_link_cap.link_rate)
2623 max_link_cap.link_rate = lttpr_max_link_rate;
2625 if (link->dpcd_caps.lttpr_caps.max_link_rate < max_link_cap.link_rate)
2626 max_link_cap.link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
2629 DC_LOG_HW_LINK_TRAINING("%s\n Training with LTTPR, max_lane count %d max_link rate %d \n",
2631 max_link_cap.lane_count,
2632 max_link_cap.link_rate);
2634 return max_link_cap;
2637 static enum dc_status read_hpd_rx_irq_data(
2638 struct dc_link *link,
2639 union hpd_irq_data *irq_data)
2641 static enum dc_status retval;
2643 /* The HW reads 16 bytes from 200h on HPD,
2644 * but if we get an AUX_DEFER, the HW cannot retry
2645 * and this causes the CTS tests 4.3.2.1 - 3.2.4 to
2646 * fail, so we now explicitly read 6 bytes which is
2647 * the req from the above mentioned test cases.
2649 * For DP 1.4 we need to read those from 2002h range.
2651 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_14)
2652 retval = core_link_read_dpcd(
2656 sizeof(union hpd_irq_data));
2658 /* Read 14 bytes in a single read and then copy only the required fields.
2659 * This is more efficient than doing it in two separate AUX reads. */
2661 uint8_t tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI + 1];
2663 retval = core_link_read_dpcd(
2669 if (retval != DC_OK)
2672 irq_data->bytes.sink_cnt.raw = tmp[DP_SINK_COUNT_ESI - DP_SINK_COUNT_ESI];
2673 irq_data->bytes.device_service_irq.raw = tmp[DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 - DP_SINK_COUNT_ESI];
2674 irq_data->bytes.lane01_status.raw = tmp[DP_LANE0_1_STATUS_ESI - DP_SINK_COUNT_ESI];
2675 irq_data->bytes.lane23_status.raw = tmp[DP_LANE2_3_STATUS_ESI - DP_SINK_COUNT_ESI];
2676 irq_data->bytes.lane_status_updated.raw = tmp[DP_LANE_ALIGN_STATUS_UPDATED_ESI - DP_SINK_COUNT_ESI];
2677 irq_data->bytes.sink_status.raw = tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI];
2683 bool hpd_rx_irq_check_link_loss_status(
2684 struct dc_link *link,
2685 union hpd_irq_data *hpd_irq_dpcd_data)
2687 uint8_t irq_reg_rx_power_state = 0;
2688 enum dc_status dpcd_result = DC_ERROR_UNEXPECTED;
2689 union lane_status lane_status;
2691 bool sink_status_changed;
2694 sink_status_changed = false;
2695 return_code = false;
2697 if (link->cur_link_settings.lane_count == 0)
2700 /*1. Check that Link Status changed, before re-training.*/
2702 /*parse lane status*/
2703 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
2704 /* check status of lanes 0,1
2705 * changed DpcdAddress_Lane01Status (0x202)
2707 lane_status.raw = get_nibble_at_index(
2708 &hpd_irq_dpcd_data->bytes.lane01_status.raw,
2711 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
2712 !lane_status.bits.CR_DONE_0 ||
2713 !lane_status.bits.SYMBOL_LOCKED_0) {
2714 /* if one of the channel equalization, clock
2715 * recovery or symbol lock is dropped
2716 * consider it as (link has been
2717 * dropped) dp sink status has changed
2719 sink_status_changed = true;
2724 /* Check interlane align.*/
2725 if (sink_status_changed ||
2726 !hpd_irq_dpcd_data->bytes.lane_status_updated.bits.INTERLANE_ALIGN_DONE) {
2728 DC_LOG_HW_HPD_IRQ("%s: Link Status changed.\n", __func__);
2732 /*2. Check that we can handle interrupt: Not in FS DOS,
2733 * Not in "Display Timeout" state, Link is trained.
2735 dpcd_result = core_link_read_dpcd(link,
2737 &irq_reg_rx_power_state,
2738 sizeof(irq_reg_rx_power_state));
2740 if (dpcd_result != DC_OK) {
2741 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain power state.\n",
2744 if (irq_reg_rx_power_state != DP_SET_POWER_D0)
2745 return_code = false;
2752 bool dp_verify_link_cap(
2753 struct dc_link *link,
2754 struct dc_link_settings *known_limit_link_setting,
2757 struct dc_link_settings max_link_cap = {0};
2758 struct dc_link_settings cur_link_setting = {0};
2759 struct dc_link_settings *cur = &cur_link_setting;
2760 struct dc_link_settings initial_link_settings = {0};
2762 bool skip_link_training;
2763 bool skip_video_pattern;
2764 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
2765 enum link_training_result status;
2766 union hpd_irq_data irq_data;
2768 /* link training starts with the maximum common settings
2769 * supported by both sink and ASIC.
2771 max_link_cap = get_max_link_cap(link);
2772 initial_link_settings = get_common_supported_link_settings(
2773 *known_limit_link_setting,
2776 /* Accept reported capabilities if link supports flexible encoder mapping or encoder already in use. */
2777 if (link->dc->debug.skip_detection_link_training ||
2778 link->is_dig_mapping_flexible) {
2779 /* TODO - should we check link encoder's max link caps here?
2780 * How do we know which link encoder to check from?
2782 link->verified_link_cap = *known_limit_link_setting;
2784 } else if (link->link_enc && link->dc->res_pool->funcs->link_encs_assign &&
2785 !link_enc_cfg_is_link_enc_avail(link->ctx->dc, link->link_enc->preferred_engine, link)) {
2786 link->verified_link_cap = initial_link_settings;
2790 memset(&irq_data, 0, sizeof(irq_data));
2792 skip_link_training = false;
2794 /* Grant extended timeout request */
2795 if ((link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (link->dpcd_caps.lttpr_caps.max_ext_timeout > 0)) {
2796 uint8_t grant = link->dpcd_caps.lttpr_caps.max_ext_timeout & 0x80;
2798 core_link_write_dpcd(link, DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT, &grant, sizeof(grant));
2801 #if defined(CONFIG_DRM_AMD_DC_DCN)
2802 if (dp_get_link_encoding_format(&link->cur_link_settings) == DP_128b_132b_ENCODING)
2803 reset_dp_hpo_stream_encoders_for_link(link);
2805 /* TODO implement override and monitor patch later */
2807 /* try to train the link from high to low to
2808 * find the physical link capability
2810 /* disable PHY done possible by BIOS, will be done by driver itself */
2811 dp_disable_link_phy(link, link->connector_signal);
2813 dp_cs_id = get_clock_source_id(link);
2815 cur_link_setting = initial_link_settings;
2817 /* Temporary Renoir-specific workaround for SWDEV-215184;
2818 * PHY will sometimes be in bad state on hotplugging display from certain USB-C dongle,
2819 * so add extra cycle of enabling and disabling the PHY before first link training.
2821 if (link->link_enc && link->link_enc->features.flags.bits.DP_IS_USB_C &&
2822 link->dc->debug.usbc_combo_phy_reset_wa) {
2823 dp_enable_link_phy(link, link->connector_signal, dp_cs_id, cur);
2824 dp_disable_link_phy(link, link->connector_signal);
2828 skip_video_pattern = true;
2830 if (cur->link_rate == LINK_RATE_LOW)
2831 skip_video_pattern = false;
2835 link->connector_signal,
2840 if (skip_link_training)
2843 status = dc_link_dp_perform_link_training(
2846 skip_video_pattern);
2847 if (status == LINK_TRAINING_SUCCESS)
2854 link->verified_link_cap = *cur;
2856 if (read_hpd_rx_irq_data(link, &irq_data) == DC_OK)
2857 if (hpd_rx_irq_check_link_loss_status(
2862 /* always disable the link before trying another
2863 * setting or before returning we'll enable it later
2864 * based on the actual mode we're driving
2866 dp_disable_link_phy(link, link->connector_signal);
2867 } while (!success && decide_fallback_link_setting(
2868 initial_link_settings, cur, status));
2870 /* Link Training failed for all Link Settings
2871 * (Lane Count is still unknown)
2874 /* If all LT fails for all settings,
2875 * set verified = failed safe (1 lane low)
2877 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
2878 link->verified_link_cap.link_rate = LINK_RATE_LOW;
2880 link->verified_link_cap.link_spread =
2881 LINK_SPREAD_DISABLED;
2888 bool dp_verify_link_cap_with_retries(
2889 struct dc_link *link,
2890 struct dc_link_settings *known_limit_link_setting,
2894 bool success = false;
2896 for (i = 0; i < attempts; i++) {
2898 enum dc_connection_type type = dc_connection_none;
2900 memset(&link->verified_link_cap, 0,
2901 sizeof(struct dc_link_settings));
2902 if (!dc_link_detect_sink(link, &type) || type == dc_connection_none) {
2903 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
2904 link->verified_link_cap.link_rate = LINK_RATE_LOW;
2905 link->verified_link_cap.link_spread = LINK_SPREAD_DISABLED;
2907 } else if (dp_verify_link_cap(link,
2908 known_limit_link_setting,
2909 &fail_count) && fail_count == 0) {
2918 bool dp_verify_mst_link_cap(
2919 struct dc_link *link)
2921 struct dc_link_settings max_link_cap = {0};
2923 if (dp_get_link_encoding_format(&link->reported_link_cap) ==
2924 DP_8b_10b_ENCODING) {
2925 max_link_cap = get_max_link_cap(link);
2926 link->verified_link_cap = get_common_supported_link_settings(
2927 link->reported_link_cap,
2930 #if defined(CONFIG_DRM_AMD_DC_DCN)
2931 else if (dp_get_link_encoding_format(&link->reported_link_cap) ==
2932 DP_128b_132b_ENCODING) {
2933 dp_verify_link_cap_with_retries(link,
2934 &link->reported_link_cap,
2935 LINK_TRAINING_MAX_VERIFY_RETRY);
2941 static struct dc_link_settings get_common_supported_link_settings(
2942 struct dc_link_settings link_setting_a,
2943 struct dc_link_settings link_setting_b)
2945 struct dc_link_settings link_settings = {0};
2947 link_settings.lane_count =
2948 (link_setting_a.lane_count <=
2949 link_setting_b.lane_count) ?
2950 link_setting_a.lane_count :
2951 link_setting_b.lane_count;
2952 link_settings.link_rate =
2953 (link_setting_a.link_rate <=
2954 link_setting_b.link_rate) ?
2955 link_setting_a.link_rate :
2956 link_setting_b.link_rate;
2957 link_settings.link_spread = LINK_SPREAD_DISABLED;
2959 /* in DP compliance test, DPR-120 may have
2960 * a random value in its MAX_LINK_BW dpcd field.
2961 * We map it to the maximum supported link rate that
2962 * is smaller than MAX_LINK_BW in this case.
2964 #if defined(CONFIG_DRM_AMD_DC_DCN)
2965 if (link_settings.link_rate > LINK_RATE_UHBR20) {
2966 link_settings.link_rate = LINK_RATE_UHBR20;
2967 } else if (link_settings.link_rate < LINK_RATE_UHBR20 &&
2968 link_settings.link_rate > LINK_RATE_UHBR13_5) {
2969 link_settings.link_rate = LINK_RATE_UHBR13_5;
2970 } else if (link_settings.link_rate < LINK_RATE_UHBR10 &&
2971 link_settings.link_rate > LINK_RATE_HIGH3) {
2973 if (link_settings.link_rate > LINK_RATE_HIGH3) {
2975 link_settings.link_rate = LINK_RATE_HIGH3;
2976 } else if (link_settings.link_rate < LINK_RATE_HIGH3
2977 && link_settings.link_rate > LINK_RATE_HIGH2) {
2978 link_settings.link_rate = LINK_RATE_HIGH2;
2979 } else if (link_settings.link_rate < LINK_RATE_HIGH2
2980 && link_settings.link_rate > LINK_RATE_HIGH) {
2981 link_settings.link_rate = LINK_RATE_HIGH;
2982 } else if (link_settings.link_rate < LINK_RATE_HIGH
2983 && link_settings.link_rate > LINK_RATE_LOW) {
2984 link_settings.link_rate = LINK_RATE_LOW;
2985 } else if (link_settings.link_rate < LINK_RATE_LOW) {
2986 link_settings.link_rate = LINK_RATE_UNKNOWN;
2989 return link_settings;
2992 static inline bool reached_minimum_lane_count(enum dc_lane_count lane_count)
2994 return lane_count <= LANE_COUNT_ONE;
2997 static inline bool reached_minimum_link_rate(enum dc_link_rate link_rate)
2999 return link_rate <= LINK_RATE_LOW;
3002 static enum dc_lane_count reduce_lane_count(enum dc_lane_count lane_count)
3004 switch (lane_count) {
3005 case LANE_COUNT_FOUR:
3006 return LANE_COUNT_TWO;
3007 case LANE_COUNT_TWO:
3008 return LANE_COUNT_ONE;
3009 case LANE_COUNT_ONE:
3010 return LANE_COUNT_UNKNOWN;
3012 return LANE_COUNT_UNKNOWN;
3016 static enum dc_link_rate reduce_link_rate(enum dc_link_rate link_rate)
3018 switch (link_rate) {
3019 #if defined(CONFIG_DRM_AMD_DC_DCN)
3020 case LINK_RATE_UHBR20:
3021 return LINK_RATE_UHBR13_5;
3022 case LINK_RATE_UHBR13_5:
3023 return LINK_RATE_UHBR10;
3024 case LINK_RATE_UHBR10:
3025 return LINK_RATE_HIGH3;
3027 case LINK_RATE_HIGH3:
3028 return LINK_RATE_HIGH2;
3029 case LINK_RATE_HIGH2:
3030 return LINK_RATE_HIGH;
3031 case LINK_RATE_HIGH:
3032 return LINK_RATE_LOW;
3034 return LINK_RATE_UNKNOWN;
3036 return LINK_RATE_UNKNOWN;
3040 static enum dc_lane_count increase_lane_count(enum dc_lane_count lane_count)
3042 switch (lane_count) {
3043 case LANE_COUNT_ONE:
3044 return LANE_COUNT_TWO;
3045 case LANE_COUNT_TWO:
3046 return LANE_COUNT_FOUR;
3048 return LANE_COUNT_UNKNOWN;
3052 static enum dc_link_rate increase_link_rate(enum dc_link_rate link_rate)
3054 switch (link_rate) {
3056 return LINK_RATE_HIGH;
3057 case LINK_RATE_HIGH:
3058 return LINK_RATE_HIGH2;
3059 case LINK_RATE_HIGH2:
3060 return LINK_RATE_HIGH3;
3061 #if defined(CONFIG_DRM_AMD_DC_DCN)
3062 case LINK_RATE_HIGH3:
3063 return LINK_RATE_UHBR10;
3064 case LINK_RATE_UHBR10:
3065 return LINK_RATE_UHBR13_5;
3066 case LINK_RATE_UHBR13_5:
3067 return LINK_RATE_UHBR20;
3070 return LINK_RATE_UNKNOWN;
3074 #if defined(CONFIG_DRM_AMD_DC_DCN)
3075 static bool decide_fallback_link_setting_max_bw_policy(
3076 const struct dc_link_settings *max,
3077 struct dc_link_settings *cur)
3079 uint8_t cur_idx = 0, next_idx;
3082 while (cur_idx < ARRAY_SIZE(dp_lt_fallbacks))
3083 /* find current index */
3084 if (dp_lt_fallbacks[cur_idx].lane_count == cur->lane_count &&
3085 dp_lt_fallbacks[cur_idx].link_rate == cur->link_rate)
3090 next_idx = cur_idx + 1;
3092 while (next_idx < ARRAY_SIZE(dp_lt_fallbacks))
3093 /* find next index */
3094 if (dp_lt_fallbacks[next_idx].lane_count <= max->lane_count &&
3095 dp_lt_fallbacks[next_idx].link_rate <= max->link_rate)
3100 if (next_idx < ARRAY_SIZE(dp_lt_fallbacks)) {
3101 cur->lane_count = dp_lt_fallbacks[next_idx].lane_count;
3102 cur->link_rate = dp_lt_fallbacks[next_idx].link_rate;
3111 * function: set link rate and lane count fallback based
3112 * on current link setting and last link training result
3114 * true - link setting could be set
3115 * false - has reached minimum setting
3116 * and no further fallback could be done
3118 static bool decide_fallback_link_setting(
3119 struct dc_link_settings initial_link_settings,
3120 struct dc_link_settings *current_link_setting,
3121 enum link_training_result training_result)
3123 if (!current_link_setting)
3125 #if defined(CONFIG_DRM_AMD_DC_DCN)
3126 if (dp_get_link_encoding_format(&initial_link_settings) == DP_128b_132b_ENCODING)
3127 return decide_fallback_link_setting_max_bw_policy(&initial_link_settings,
3128 current_link_setting);
3131 switch (training_result) {
3132 case LINK_TRAINING_CR_FAIL_LANE0:
3133 case LINK_TRAINING_CR_FAIL_LANE1:
3134 case LINK_TRAINING_CR_FAIL_LANE23:
3135 case LINK_TRAINING_LQA_FAIL:
3137 if (!reached_minimum_link_rate
3138 (current_link_setting->link_rate)) {
3139 current_link_setting->link_rate =
3141 current_link_setting->link_rate);
3142 } else if (!reached_minimum_lane_count
3143 (current_link_setting->lane_count)) {
3144 current_link_setting->link_rate =
3145 initial_link_settings.link_rate;
3146 if (training_result == LINK_TRAINING_CR_FAIL_LANE0)
3148 else if (training_result == LINK_TRAINING_CR_FAIL_LANE1)
3149 current_link_setting->lane_count =
3151 else if (training_result ==
3152 LINK_TRAINING_CR_FAIL_LANE23)
3153 current_link_setting->lane_count =
3156 current_link_setting->lane_count =
3158 current_link_setting->lane_count);
3164 case LINK_TRAINING_EQ_FAIL_EQ:
3166 if (!reached_minimum_lane_count
3167 (current_link_setting->lane_count)) {
3168 current_link_setting->lane_count =
3170 current_link_setting->lane_count);
3171 } else if (!reached_minimum_link_rate
3172 (current_link_setting->link_rate)) {
3173 current_link_setting->link_rate =
3175 current_link_setting->link_rate);
3181 case LINK_TRAINING_EQ_FAIL_CR:
3183 if (!reached_minimum_link_rate
3184 (current_link_setting->link_rate)) {
3185 current_link_setting->link_rate =
3187 current_link_setting->link_rate);
3199 bool dp_validate_mode_timing(
3200 struct dc_link *link,
3201 const struct dc_crtc_timing *timing)
3206 const struct dc_link_settings *link_setting;
3208 /* According to spec, VSC SDP should be used if pixel format is YCbCr420 */
3209 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420 &&
3210 !link->dpcd_caps.dprx_feature.bits.VSC_SDP_COLORIMETRY_SUPPORTED &&
3211 dal_graphics_object_id_get_connector_id(link->link_id) != CONNECTOR_ID_VIRTUAL)
3214 /*always DP fail safe mode*/
3215 if ((timing->pix_clk_100hz / 10) == (uint32_t) 25175 &&
3216 timing->h_addressable == (uint32_t) 640 &&
3217 timing->v_addressable == (uint32_t) 480)
3220 link_setting = dc_link_get_link_cap(link);
3222 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3223 /*if (flags.DYNAMIC_VALIDATION == 1 &&
3224 link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN)
3225 link_setting = &link->verified_link_cap;
3228 req_bw = dc_bandwidth_in_kbps_from_timing(timing);
3229 max_bw = dc_link_bandwidth_kbps(link, link_setting);
3231 if (req_bw <= max_bw) {
3232 /* remember the biggest mode here, during
3233 * initial link training (to get
3234 * verified_link_cap), LS sends event about
3235 * cannot train at reported cap to upper
3236 * layer and upper layer will re-enumerate modes.
3237 * this is not necessary if the lower
3238 * verified_link_cap is enough to drive
3241 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3242 /* if (flags.DYNAMIC_VALIDATION == 1)
3243 dpsst->max_req_bw_for_verified_linkcap = dal_max(
3244 dpsst->max_req_bw_for_verified_linkcap, req_bw); */
3250 static bool decide_dp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3252 struct dc_link_settings initial_link_setting = {
3253 LANE_COUNT_ONE, LINK_RATE_LOW, LINK_SPREAD_DISABLED, false, 0};
3254 struct dc_link_settings current_link_setting =
3255 initial_link_setting;
3258 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3261 /* search for the minimum link setting that:
3262 * 1. is supported according to the link training result
3263 * 2. could support the b/w requested by the timing
3265 while (current_link_setting.link_rate <=
3266 link->verified_link_cap.link_rate) {
3267 link_bw = dc_link_bandwidth_kbps(
3269 ¤t_link_setting);
3270 if (req_bw <= link_bw) {
3271 *link_setting = current_link_setting;
3275 if (current_link_setting.lane_count <
3276 link->verified_link_cap.lane_count) {
3277 current_link_setting.lane_count =
3278 increase_lane_count(
3279 current_link_setting.lane_count);
3281 current_link_setting.link_rate =
3283 current_link_setting.link_rate);
3284 current_link_setting.lane_count =
3285 initial_link_setting.lane_count;
3292 bool decide_edp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3294 struct dc_link_settings initial_link_setting;
3295 struct dc_link_settings current_link_setting;
3299 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3300 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3302 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3303 link->dpcd_caps.edp_supported_link_rates_count == 0) {
3304 *link_setting = link->verified_link_cap;
3308 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3309 initial_link_setting.lane_count = LANE_COUNT_ONE;
3310 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
3311 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3312 initial_link_setting.use_link_rate_set = true;
3313 initial_link_setting.link_rate_set = 0;
3314 current_link_setting = initial_link_setting;
3316 /* search for the minimum link setting that:
3317 * 1. is supported according to the link training result
3318 * 2. could support the b/w requested by the timing
3320 while (current_link_setting.link_rate <=
3321 link->verified_link_cap.link_rate) {
3322 link_bw = dc_link_bandwidth_kbps(
3324 ¤t_link_setting);
3325 if (req_bw <= link_bw) {
3326 *link_setting = current_link_setting;
3330 if (current_link_setting.lane_count <
3331 link->verified_link_cap.lane_count) {
3332 current_link_setting.lane_count =
3333 increase_lane_count(
3334 current_link_setting.lane_count);
3336 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
3337 current_link_setting.link_rate_set++;
3338 current_link_setting.link_rate =
3339 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3340 current_link_setting.lane_count =
3341 initial_link_setting.lane_count;
3349 static bool decide_mst_link_settings(const struct dc_link *link, struct dc_link_settings *link_setting)
3351 *link_setting = link->verified_link_cap;
3355 void decide_link_settings(struct dc_stream_state *stream,
3356 struct dc_link_settings *link_setting)
3358 struct dc_link *link;
3361 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
3363 link = stream->link;
3365 /* if preferred is specified through AMDDP, use it, if it's enough
3368 if (link->preferred_link_setting.lane_count !=
3369 LANE_COUNT_UNKNOWN &&
3370 link->preferred_link_setting.link_rate !=
3371 LINK_RATE_UNKNOWN) {
3372 *link_setting = link->preferred_link_setting;
3376 /* MST doesn't perform link training for now
3377 * TODO: add MST specific link training routine
3379 if (stream->signal == SIGNAL_TYPE_DISPLAY_PORT_MST) {
3380 if (decide_mst_link_settings(link, link_setting))
3382 } else if (link->connector_signal == SIGNAL_TYPE_EDP) {
3383 if (decide_edp_link_settings(link, link_setting, req_bw))
3385 } else if (decide_dp_link_settings(link, link_setting, req_bw))
3388 BREAK_TO_DEBUGGER();
3389 ASSERT(link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN);
3391 *link_setting = link->verified_link_cap;
3394 /*************************Short Pulse IRQ***************************/
3395 bool dc_link_dp_allow_hpd_rx_irq(const struct dc_link *link)
3398 * Don't handle RX IRQ unless one of following is met:
3399 * 1) The link is established (cur_link_settings != unknown)
3400 * 2) We know we're dealing with a branch device, SST or MST
3403 if ((link->cur_link_settings.lane_count != LANE_COUNT_UNKNOWN) ||
3404 is_dp_branch_device(link))
3410 static bool handle_hpd_irq_psr_sink(struct dc_link *link)
3412 union dpcd_psr_configuration psr_configuration;
3414 if (!link->psr_settings.psr_feature_enabled)
3417 dm_helpers_dp_read_dpcd(
3420 368,/*DpcdAddress_PSR_Enable_Cfg*/
3421 &psr_configuration.raw,
3422 sizeof(psr_configuration.raw));
3425 if (psr_configuration.bits.ENABLE) {
3426 unsigned char dpcdbuf[3] = {0};
3427 union psr_error_status psr_error_status;
3428 union psr_sink_psr_status psr_sink_psr_status;
3430 dm_helpers_dp_read_dpcd(
3433 0x2006, /*DpcdAddress_PSR_Error_Status*/
3434 (unsigned char *) dpcdbuf,
3437 /*DPCD 2006h ERROR STATUS*/
3438 psr_error_status.raw = dpcdbuf[0];
3439 /*DPCD 2008h SINK PANEL SELF REFRESH STATUS*/
3440 psr_sink_psr_status.raw = dpcdbuf[2];
3442 if (psr_error_status.bits.LINK_CRC_ERROR ||
3443 psr_error_status.bits.RFB_STORAGE_ERROR ||
3444 psr_error_status.bits.VSC_SDP_ERROR) {
3447 /* Acknowledge and clear error bits */
3448 dm_helpers_dp_write_dpcd(
3451 8198,/*DpcdAddress_PSR_Error_Status*/
3452 &psr_error_status.raw,
3453 sizeof(psr_error_status.raw));
3455 /* PSR error, disable and re-enable PSR */
3456 allow_active = false;
3457 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
3458 allow_active = true;
3459 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
3462 } else if (psr_sink_psr_status.bits.SINK_SELF_REFRESH_STATUS ==
3463 PSR_SINK_STATE_ACTIVE_DISPLAY_FROM_SINK_RFB){
3464 /* No error is detect, PSR is active.
3465 * We should return with IRQ_HPD handled without
3466 * checking for loss of sync since PSR would have
3467 * powered down main link.
3475 static void dp_test_send_link_training(struct dc_link *link)
3477 struct dc_link_settings link_settings = {0};
3479 core_link_read_dpcd(
3482 (unsigned char *)(&link_settings.lane_count),
3484 core_link_read_dpcd(
3487 (unsigned char *)(&link_settings.link_rate),
3490 /* Set preferred link settings */
3491 link->verified_link_cap.lane_count = link_settings.lane_count;
3492 link->verified_link_cap.link_rate = link_settings.link_rate;
3494 dp_retrain_link_dp_test(link, &link_settings, false);
3497 /* TODO Raven hbr2 compliance eye output is unstable
3498 * (toggling on and off) with debugger break
3499 * This caueses intermittent PHY automation failure
3500 * Need to look into the root cause */
3501 static void dp_test_send_phy_test_pattern(struct dc_link *link)
3503 union phy_test_pattern dpcd_test_pattern;
3504 union lane_adjust dpcd_lane_adjustment[2];
3505 unsigned char dpcd_post_cursor_2_adjustment = 0;
3506 #if defined(CONFIG_DRM_AMD_DC_DCN)
3507 unsigned char test_pattern_buffer[
3508 (DP_TEST_264BIT_CUSTOM_PATTERN_263_256 -
3509 DP_TEST_264BIT_CUSTOM_PATTERN_7_0)+1] = {0};
3511 unsigned char test_pattern_buffer[
3512 (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
3513 DP_TEST_80BIT_CUSTOM_PATTERN_7_0)+1] = {0};
3515 unsigned int test_pattern_size = 0;
3516 enum dp_test_pattern test_pattern;
3517 union lane_adjust dpcd_lane_adjust;
3519 struct link_training_settings link_training_settings;
3521 dpcd_test_pattern.raw = 0;
3522 memset(dpcd_lane_adjustment, 0, sizeof(dpcd_lane_adjustment));
3523 memset(&link_training_settings, 0, sizeof(link_training_settings));
3525 /* get phy test pattern and pattern parameters from DP receiver */
3526 core_link_read_dpcd(
3528 DP_PHY_TEST_PATTERN,
3529 &dpcd_test_pattern.raw,
3530 sizeof(dpcd_test_pattern));
3531 core_link_read_dpcd(
3533 DP_ADJUST_REQUEST_LANE0_1,
3534 &dpcd_lane_adjustment[0].raw,
3535 sizeof(dpcd_lane_adjustment));
3537 /*get post cursor 2 parameters
3538 * For DP 1.1a or eariler, this DPCD register's value is 0
3539 * For DP 1.2 or later:
3540 * Bits 1:0 = POST_CURSOR2_LANE0; Bits 3:2 = POST_CURSOR2_LANE1
3541 * Bits 5:4 = POST_CURSOR2_LANE2; Bits 7:6 = POST_CURSOR2_LANE3
3543 core_link_read_dpcd(
3545 DP_ADJUST_REQUEST_POST_CURSOR2,
3546 &dpcd_post_cursor_2_adjustment,
3547 sizeof(dpcd_post_cursor_2_adjustment));
3549 /* translate request */
3550 switch (dpcd_test_pattern.bits.PATTERN) {
3551 case PHY_TEST_PATTERN_D10_2:
3552 test_pattern = DP_TEST_PATTERN_D102;
3554 case PHY_TEST_PATTERN_SYMBOL_ERROR:
3555 test_pattern = DP_TEST_PATTERN_SYMBOL_ERROR;
3557 case PHY_TEST_PATTERN_PRBS7:
3558 test_pattern = DP_TEST_PATTERN_PRBS7;
3560 case PHY_TEST_PATTERN_80BIT_CUSTOM:
3561 test_pattern = DP_TEST_PATTERN_80BIT_CUSTOM;
3563 case PHY_TEST_PATTERN_CP2520_1:
3564 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
3565 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
3566 DP_TEST_PATTERN_TRAINING_PATTERN4 :
3567 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
3569 case PHY_TEST_PATTERN_CP2520_2:
3570 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
3571 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
3572 DP_TEST_PATTERN_TRAINING_PATTERN4 :
3573 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
3575 case PHY_TEST_PATTERN_CP2520_3:
3576 test_pattern = DP_TEST_PATTERN_TRAINING_PATTERN4;
3578 #if defined(CONFIG_DRM_AMD_DC_DCN)
3579 case PHY_TEST_PATTERN_128b_132b_TPS1:
3580 test_pattern = DP_TEST_PATTERN_128b_132b_TPS1;
3582 case PHY_TEST_PATTERN_128b_132b_TPS2:
3583 test_pattern = DP_TEST_PATTERN_128b_132b_TPS2;
3585 case PHY_TEST_PATTERN_PRBS9:
3586 test_pattern = DP_TEST_PATTERN_PRBS9;
3588 case PHY_TEST_PATTERN_PRBS11:
3589 test_pattern = DP_TEST_PATTERN_PRBS11;
3591 case PHY_TEST_PATTERN_PRBS15:
3592 test_pattern = DP_TEST_PATTERN_PRBS15;
3594 case PHY_TEST_PATTERN_PRBS23:
3595 test_pattern = DP_TEST_PATTERN_PRBS23;
3597 case PHY_TEST_PATTERN_PRBS31:
3598 test_pattern = DP_TEST_PATTERN_PRBS31;
3600 case PHY_TEST_PATTERN_264BIT_CUSTOM:
3601 test_pattern = DP_TEST_PATTERN_264BIT_CUSTOM;
3603 case PHY_TEST_PATTERN_SQUARE_PULSE:
3604 test_pattern = DP_TEST_PATTERN_SQUARE_PULSE;
3608 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
3612 if (test_pattern == DP_TEST_PATTERN_80BIT_CUSTOM) {
3613 test_pattern_size = (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
3614 DP_TEST_80BIT_CUSTOM_PATTERN_7_0) + 1;
3615 core_link_read_dpcd(
3617 DP_TEST_80BIT_CUSTOM_PATTERN_7_0,
3618 test_pattern_buffer,
3622 #if defined(CONFIG_DRM_AMD_DC_DCN)
3623 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE) {
3624 test_pattern_size = 1; // Square pattern data is 1 byte (DP spec)
3625 core_link_read_dpcd(
3627 DP_PHY_SQUARE_PATTERN,
3628 test_pattern_buffer,
3632 if (test_pattern == DP_TEST_PATTERN_264BIT_CUSTOM) {
3633 test_pattern_size = (DP_TEST_264BIT_CUSTOM_PATTERN_263_256-
3634 DP_TEST_264BIT_CUSTOM_PATTERN_7_0) + 1;
3635 core_link_read_dpcd(
3637 DP_TEST_264BIT_CUSTOM_PATTERN_7_0,
3638 test_pattern_buffer,
3643 /* prepare link training settings */
3644 link_training_settings.link_settings = link->cur_link_settings;
3646 for (lane = 0; lane <
3647 (unsigned int)(link->cur_link_settings.lane_count);
3649 dpcd_lane_adjust.raw =
3650 get_nibble_at_index(&dpcd_lane_adjustment[0].raw, lane);
3651 if (dp_get_link_encoding_format(&link->cur_link_settings) ==
3652 DP_8b_10b_ENCODING) {
3653 link_training_settings.hw_lane_settings[lane].VOLTAGE_SWING =
3654 (enum dc_voltage_swing)
3655 (dpcd_lane_adjust.bits.VOLTAGE_SWING_LANE);
3656 link_training_settings.hw_lane_settings[lane].PRE_EMPHASIS =
3657 (enum dc_pre_emphasis)
3658 (dpcd_lane_adjust.bits.PRE_EMPHASIS_LANE);
3659 link_training_settings.hw_lane_settings[lane].POST_CURSOR2 =
3660 (enum dc_post_cursor2)
3661 ((dpcd_post_cursor_2_adjustment >> (lane * 2)) & 0x03);
3663 #if defined(CONFIG_DRM_AMD_DC_DCN)
3664 else if (dp_get_link_encoding_format(&link->cur_link_settings) ==
3665 DP_128b_132b_ENCODING) {
3666 link_training_settings.hw_lane_settings[lane].FFE_PRESET.raw =
3667 dpcd_lane_adjust.tx_ffe.PRESET_VALUE;
3672 dp_hw_to_dpcd_lane_settings(&link_training_settings,
3673 link_training_settings.hw_lane_settings,
3674 link_training_settings.dpcd_lane_settings);
3675 /*Usage: Measure DP physical lane signal
3676 * by DP SI test equipment automatically.
3677 * PHY test pattern request is generated by equipment via HPD interrupt.
3678 * HPD needs to be active all the time. HPD should be active
3679 * all the time. Do not touch it.
3680 * forward request to DS
3682 dc_link_dp_set_test_pattern(
3685 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED,
3686 &link_training_settings,
3687 test_pattern_buffer,
3691 static void dp_test_send_link_test_pattern(struct dc_link *link)
3693 union link_test_pattern dpcd_test_pattern;
3694 union test_misc dpcd_test_params;
3695 enum dp_test_pattern test_pattern;
3696 enum dp_test_pattern_color_space test_pattern_color_space =
3697 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED;
3698 enum dc_color_depth requestColorDepth = COLOR_DEPTH_UNDEFINED;
3699 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
3700 struct pipe_ctx *pipe_ctx = NULL;
3703 memset(&dpcd_test_pattern, 0, sizeof(dpcd_test_pattern));
3704 memset(&dpcd_test_params, 0, sizeof(dpcd_test_params));
3706 for (i = 0; i < MAX_PIPES; i++) {
3707 if (pipes[i].stream == NULL)
3710 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
3711 pipe_ctx = &pipes[i];
3716 if (pipe_ctx == NULL)
3719 /* get link test pattern and pattern parameters */
3720 core_link_read_dpcd(
3723 &dpcd_test_pattern.raw,
3724 sizeof(dpcd_test_pattern));
3725 core_link_read_dpcd(
3728 &dpcd_test_params.raw,
3729 sizeof(dpcd_test_params));
3731 switch (dpcd_test_pattern.bits.PATTERN) {
3732 case LINK_TEST_PATTERN_COLOR_RAMP:
3733 test_pattern = DP_TEST_PATTERN_COLOR_RAMP;
3735 case LINK_TEST_PATTERN_VERTICAL_BARS:
3736 test_pattern = DP_TEST_PATTERN_VERTICAL_BARS;
3737 break; /* black and white */
3738 case LINK_TEST_PATTERN_COLOR_SQUARES:
3739 test_pattern = (dpcd_test_params.bits.DYN_RANGE ==
3740 TEST_DYN_RANGE_VESA ?
3741 DP_TEST_PATTERN_COLOR_SQUARES :
3742 DP_TEST_PATTERN_COLOR_SQUARES_CEA);
3745 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
3749 if (dpcd_test_params.bits.CLR_FORMAT == 0)
3750 test_pattern_color_space = DP_TEST_PATTERN_COLOR_SPACE_RGB;
3752 test_pattern_color_space = dpcd_test_params.bits.YCBCR_COEFS ?
3753 DP_TEST_PATTERN_COLOR_SPACE_YCBCR709 :
3754 DP_TEST_PATTERN_COLOR_SPACE_YCBCR601;
3756 switch (dpcd_test_params.bits.BPC) {
3758 requestColorDepth = COLOR_DEPTH_666;
3761 requestColorDepth = COLOR_DEPTH_888;
3764 requestColorDepth = COLOR_DEPTH_101010;
3767 requestColorDepth = COLOR_DEPTH_121212;
3773 switch (dpcd_test_params.bits.CLR_FORMAT) {
3775 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
3778 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR422;
3781 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR444;
3784 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
3789 if (requestColorDepth != COLOR_DEPTH_UNDEFINED
3790 && pipe_ctx->stream->timing.display_color_depth != requestColorDepth) {
3791 DC_LOG_DEBUG("%s: original bpc %d, changing to %d\n",
3793 pipe_ctx->stream->timing.display_color_depth,
3795 pipe_ctx->stream->timing.display_color_depth = requestColorDepth;
3798 dp_update_dsc_config(pipe_ctx);
3800 dc_link_dp_set_test_pattern(
3803 test_pattern_color_space,
3809 static void dp_test_get_audio_test_data(struct dc_link *link, bool disable_video)
3811 union audio_test_mode dpcd_test_mode = {0};
3812 struct audio_test_pattern_type dpcd_pattern_type = {0};
3813 union audio_test_pattern_period dpcd_pattern_period[AUDIO_CHANNELS_COUNT] = {0};
3814 enum dp_test_pattern test_pattern = DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
3816 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
3817 struct pipe_ctx *pipe_ctx = &pipes[0];
3818 unsigned int channel_count;
3819 unsigned int channel = 0;
3820 unsigned int modes = 0;
3821 unsigned int sampling_rate_in_hz = 0;
3823 // get audio test mode and test pattern parameters
3824 core_link_read_dpcd(
3827 &dpcd_test_mode.raw,
3828 sizeof(dpcd_test_mode));
3830 core_link_read_dpcd(
3832 DP_TEST_AUDIO_PATTERN_TYPE,
3833 &dpcd_pattern_type.value,
3834 sizeof(dpcd_pattern_type));
3836 channel_count = dpcd_test_mode.bits.channel_count + 1;
3838 // read pattern periods for requested channels when sawTooth pattern is requested
3839 if (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH ||
3840 dpcd_pattern_type.value == AUDIO_TEST_PATTERN_OPERATOR_DEFINED) {
3842 test_pattern = (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH) ?
3843 DP_TEST_PATTERN_AUDIO_SAWTOOTH : DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
3844 // read period for each channel
3845 for (channel = 0; channel < channel_count; channel++) {
3846 core_link_read_dpcd(
3848 DP_TEST_AUDIO_PERIOD_CH1 + channel,
3849 &dpcd_pattern_period[channel].raw,
3850 sizeof(dpcd_pattern_period[channel]));
3854 // translate sampling rate
3855 switch (dpcd_test_mode.bits.sampling_rate) {
3856 case AUDIO_SAMPLING_RATE_32KHZ:
3857 sampling_rate_in_hz = 32000;
3859 case AUDIO_SAMPLING_RATE_44_1KHZ:
3860 sampling_rate_in_hz = 44100;
3862 case AUDIO_SAMPLING_RATE_48KHZ:
3863 sampling_rate_in_hz = 48000;
3865 case AUDIO_SAMPLING_RATE_88_2KHZ:
3866 sampling_rate_in_hz = 88200;
3868 case AUDIO_SAMPLING_RATE_96KHZ:
3869 sampling_rate_in_hz = 96000;
3871 case AUDIO_SAMPLING_RATE_176_4KHZ:
3872 sampling_rate_in_hz = 176400;
3874 case AUDIO_SAMPLING_RATE_192KHZ:
3875 sampling_rate_in_hz = 192000;
3878 sampling_rate_in_hz = 0;
3882 link->audio_test_data.flags.test_requested = 1;
3883 link->audio_test_data.flags.disable_video = disable_video;
3884 link->audio_test_data.sampling_rate = sampling_rate_in_hz;
3885 link->audio_test_data.channel_count = channel_count;
3886 link->audio_test_data.pattern_type = test_pattern;
3888 if (test_pattern == DP_TEST_PATTERN_AUDIO_SAWTOOTH) {
3889 for (modes = 0; modes < pipe_ctx->stream->audio_info.mode_count; modes++) {
3890 link->audio_test_data.pattern_period[modes] = dpcd_pattern_period[modes].bits.pattern_period;
3895 void dc_link_dp_handle_automated_test(struct dc_link *link)
3897 union test_request test_request;
3898 union test_response test_response;
3900 memset(&test_request, 0, sizeof(test_request));
3901 memset(&test_response, 0, sizeof(test_response));
3903 core_link_read_dpcd(
3907 sizeof(union test_request));
3908 if (test_request.bits.LINK_TRAINING) {
3909 /* ACK first to let DP RX test box monitor LT sequence */
3910 test_response.bits.ACK = 1;
3911 core_link_write_dpcd(
3915 sizeof(test_response));
3916 dp_test_send_link_training(link);
3917 /* no acknowledge request is needed again */
3918 test_response.bits.ACK = 0;
3920 if (test_request.bits.LINK_TEST_PATTRN) {
3921 dp_test_send_link_test_pattern(link);
3922 test_response.bits.ACK = 1;
3925 if (test_request.bits.AUDIO_TEST_PATTERN) {
3926 dp_test_get_audio_test_data(link, test_request.bits.TEST_AUDIO_DISABLED_VIDEO);
3927 test_response.bits.ACK = 1;
3930 if (test_request.bits.PHY_TEST_PATTERN) {
3931 dp_test_send_phy_test_pattern(link);
3932 test_response.bits.ACK = 1;
3935 /* send request acknowledgment */
3936 if (test_response.bits.ACK)
3937 core_link_write_dpcd(
3941 sizeof(test_response));
3944 void dc_link_dp_handle_link_loss(struct dc_link *link)
3947 struct pipe_ctx *pipe_ctx;
3949 for (i = 0; i < MAX_PIPES; i++) {
3950 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
3951 if (pipe_ctx && pipe_ctx->stream && pipe_ctx->stream->link == link)
3955 if (pipe_ctx == NULL || pipe_ctx->stream == NULL)
3958 for (i = 0; i < MAX_PIPES; i++) {
3959 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
3960 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
3961 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
3962 core_link_disable_stream(pipe_ctx);
3966 for (i = 0; i < MAX_PIPES; i++) {
3967 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
3968 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
3969 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
3970 core_link_enable_stream(link->dc->current_state, pipe_ctx);
3975 bool dc_link_handle_hpd_rx_irq(struct dc_link *link, union hpd_irq_data *out_hpd_irq_dpcd_data, bool *out_link_loss,
3976 bool defer_handling, bool *has_left_work)
3978 union hpd_irq_data hpd_irq_dpcd_data = {0};
3979 union device_service_irq device_service_clear = {0};
3980 enum dc_status result;
3981 bool status = false;
3984 *out_link_loss = false;
3987 *has_left_work = false;
3988 /* For use cases related to down stream connection status change,
3989 * PSR and device auto test, refer to function handle_sst_hpd_irq
3992 DC_LOG_HW_HPD_IRQ("%s: Got short pulse HPD on link %d\n",
3993 __func__, link->link_index);
3996 /* All the "handle_hpd_irq_xxx()" methods
3997 * should be called only after
3998 * dal_dpsst_ls_read_hpd_irq_data
3999 * Order of calls is important too
4001 result = read_hpd_rx_irq_data(link, &hpd_irq_dpcd_data);
4002 if (out_hpd_irq_dpcd_data)
4003 *out_hpd_irq_dpcd_data = hpd_irq_dpcd_data;
4005 if (result != DC_OK) {
4006 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain irq data\n",
4011 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.AUTOMATED_TEST) {
4012 device_service_clear.bits.AUTOMATED_TEST = 1;
4013 core_link_write_dpcd(
4015 DP_DEVICE_SERVICE_IRQ_VECTOR,
4016 &device_service_clear.raw,
4017 sizeof(device_service_clear.raw));
4018 device_service_clear.raw = 0;
4019 if (defer_handling && has_left_work)
4020 *has_left_work = true;
4022 dc_link_dp_handle_automated_test(link);
4026 if (!dc_link_dp_allow_hpd_rx_irq(link)) {
4027 DC_LOG_HW_HPD_IRQ("%s: skipping HPD handling on %d\n",
4028 __func__, link->link_index);
4032 if (handle_hpd_irq_psr_sink(link))
4033 /* PSR-related error was detected and handled */
4036 /* If PSR-related error handled, Main link may be off,
4037 * so do not handle as a normal sink status change interrupt.
4040 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.UP_REQ_MSG_RDY) {
4041 if (defer_handling && has_left_work)
4042 *has_left_work = true;
4046 /* check if we have MST msg and return since we poll for it */
4047 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.DOWN_REP_MSG_RDY) {
4048 if (defer_handling && has_left_work)
4049 *has_left_work = true;
4053 /* For now we only handle 'Downstream port status' case.
4054 * If we got sink count changed it means
4055 * Downstream port status changed,
4056 * then DM should call DC to do the detection.
4057 * NOTE: Do not handle link loss on eDP since it is internal link*/
4058 if ((link->connector_signal != SIGNAL_TYPE_EDP) &&
4059 hpd_rx_irq_check_link_loss_status(
4061 &hpd_irq_dpcd_data)) {
4062 /* Connectivity log: link loss */
4063 CONN_DATA_LINK_LOSS(link,
4064 hpd_irq_dpcd_data.raw,
4065 sizeof(hpd_irq_dpcd_data),
4068 if (defer_handling && has_left_work)
4069 *has_left_work = true;
4071 dc_link_dp_handle_link_loss(link);
4075 *out_link_loss = true;
4078 if (link->type == dc_connection_sst_branch &&
4079 hpd_irq_dpcd_data.bytes.sink_cnt.bits.SINK_COUNT
4080 != link->dpcd_sink_count)
4083 /* reasons for HPD RX:
4084 * 1. Link Loss - ie Re-train the Link
4085 * 2. MST sideband message
4086 * 3. Automated Test - ie. Internal Commit
4087 * 4. CP (copy protection) - (not interesting for DM???)
4089 * 6. Downstream Port status changed
4090 * -ie. Detect - this the only one
4091 * which is interesting for DM because
4092 * it must call dc_link_detect.
4097 /*query dpcd for version and mst cap addresses*/
4098 bool is_mst_supported(struct dc_link *link)
4101 enum dc_status st = DC_OK;
4105 if (link->preferred_training_settings.mst_enable &&
4106 *link->preferred_training_settings.mst_enable == false) {
4113 st = core_link_read_dpcd(link, DP_DPCD_REV, &rev.raw,
4116 if (st == DC_OK && rev.raw >= DPCD_REV_12) {
4118 st = core_link_read_dpcd(link, DP_MSTM_CAP,
4119 &cap.raw, sizeof(cap));
4120 if (st == DC_OK && cap.bits.MST_CAP == 1)
4127 bool is_dp_active_dongle(const struct dc_link *link)
4129 return (link->dpcd_caps.dongle_type >= DISPLAY_DONGLE_DP_VGA_CONVERTER) &&
4130 (link->dpcd_caps.dongle_type <= DISPLAY_DONGLE_DP_HDMI_CONVERTER);
4133 bool is_dp_branch_device(const struct dc_link *link)
4135 return link->dpcd_caps.is_branch_dev;
4138 static int translate_dpcd_max_bpc(enum dpcd_downstream_port_max_bpc bpc)
4141 case DOWN_STREAM_MAX_8BPC:
4143 case DOWN_STREAM_MAX_10BPC:
4145 case DOWN_STREAM_MAX_12BPC:
4147 case DOWN_STREAM_MAX_16BPC:
4156 static void read_dp_device_vendor_id(struct dc_link *link)
4158 struct dp_device_vendor_id dp_id;
4160 /* read IEEE branch device id */
4161 core_link_read_dpcd(
4167 link->dpcd_caps.branch_dev_id =
4168 (dp_id.ieee_oui[0] << 16) +
4169 (dp_id.ieee_oui[1] << 8) +
4173 link->dpcd_caps.branch_dev_name,
4174 dp_id.ieee_device_id,
4175 sizeof(dp_id.ieee_device_id));
4180 static void get_active_converter_info(
4181 uint8_t data, struct dc_link *link)
4183 union dp_downstream_port_present ds_port = { .byte = data };
4184 memset(&link->dpcd_caps.dongle_caps, 0, sizeof(link->dpcd_caps.dongle_caps));
4186 /* decode converter info*/
4187 if (!ds_port.fields.PORT_PRESENT) {
4188 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4189 ddc_service_set_dongle_type(link->ddc,
4190 link->dpcd_caps.dongle_type);
4191 link->dpcd_caps.is_branch_dev = false;
4195 /* DPCD 0x5 bit 0 = 1, it indicate it's branch device */
4196 link->dpcd_caps.is_branch_dev = ds_port.fields.PORT_PRESENT;
4198 switch (ds_port.fields.PORT_TYPE) {
4199 case DOWNSTREAM_VGA:
4200 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_VGA_CONVERTER;
4202 case DOWNSTREAM_DVI_HDMI_DP_PLUS_PLUS:
4203 /* At this point we don't know is it DVI or HDMI or DP++,
4205 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_DVI_CONVERTER;
4208 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4212 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_11) {
4213 uint8_t det_caps[16]; /* CTS 4.2.2.7 expects source to read Detailed Capabilities Info : 00080h-0008F.*/
4214 union dwnstream_port_caps_byte0 *port_caps =
4215 (union dwnstream_port_caps_byte0 *)det_caps;
4216 if (core_link_read_dpcd(link, DP_DOWNSTREAM_PORT_0,
4217 det_caps, sizeof(det_caps)) == DC_OK) {
4219 switch (port_caps->bits.DWN_STRM_PORTX_TYPE) {
4220 /*Handle DP case as DONGLE_NONE*/
4221 case DOWN_STREAM_DETAILED_DP:
4222 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4224 case DOWN_STREAM_DETAILED_VGA:
4225 link->dpcd_caps.dongle_type =
4226 DISPLAY_DONGLE_DP_VGA_CONVERTER;
4228 case DOWN_STREAM_DETAILED_DVI:
4229 link->dpcd_caps.dongle_type =
4230 DISPLAY_DONGLE_DP_DVI_CONVERTER;
4232 case DOWN_STREAM_DETAILED_HDMI:
4233 case DOWN_STREAM_DETAILED_DP_PLUS_PLUS:
4234 /*Handle DP++ active converter case, process DP++ case as HDMI case according DP1.4 spec*/
4235 link->dpcd_caps.dongle_type =
4236 DISPLAY_DONGLE_DP_HDMI_CONVERTER;
4238 link->dpcd_caps.dongle_caps.dongle_type = link->dpcd_caps.dongle_type;
4239 if (ds_port.fields.DETAILED_CAPS) {
4241 union dwnstream_port_caps_byte3_hdmi
4242 hdmi_caps = {.raw = det_caps[3] };
4243 union dwnstream_port_caps_byte2
4244 hdmi_color_caps = {.raw = det_caps[2] };
4245 link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz =
4248 link->dpcd_caps.dongle_caps.is_dp_hdmi_s3d_converter =
4249 hdmi_caps.bits.FRAME_SEQ_TO_FRAME_PACK;
4250 /*YCBCR capability only for HDMI case*/
4251 if (port_caps->bits.DWN_STRM_PORTX_TYPE
4252 == DOWN_STREAM_DETAILED_HDMI) {
4253 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_pass_through =
4254 hdmi_caps.bits.YCrCr422_PASS_THROUGH;
4255 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_pass_through =
4256 hdmi_caps.bits.YCrCr420_PASS_THROUGH;
4257 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_converter =
4258 hdmi_caps.bits.YCrCr422_CONVERSION;
4259 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_converter =
4260 hdmi_caps.bits.YCrCr420_CONVERSION;
4263 link->dpcd_caps.dongle_caps.dp_hdmi_max_bpc =
4264 translate_dpcd_max_bpc(
4265 hdmi_color_caps.bits.MAX_BITS_PER_COLOR_COMPONENT);
4267 if (link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz != 0)
4268 link->dpcd_caps.dongle_caps.extendedCapValid = true;
4276 ddc_service_set_dongle_type(link->ddc, link->dpcd_caps.dongle_type);
4279 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
4281 core_link_read_dpcd(
4283 DP_BRANCH_REVISION_START,
4284 (uint8_t *)&dp_hw_fw_revision,
4285 sizeof(dp_hw_fw_revision));
4287 link->dpcd_caps.branch_hw_revision =
4288 dp_hw_fw_revision.ieee_hw_rev;
4291 link->dpcd_caps.branch_fw_revision,
4292 dp_hw_fw_revision.ieee_fw_rev,
4293 sizeof(dp_hw_fw_revision.ieee_fw_rev));
4295 #if defined(CONFIG_DRM_AMD_DC_DCN)
4296 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14 &&
4297 link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
4298 union dp_dfp_cap_ext dfp_cap_ext;
4299 memset(&dfp_cap_ext, '\0', sizeof (dfp_cap_ext));
4300 core_link_read_dpcd(
4302 DP_DFP_CAPABILITY_EXTENSION_SUPPORT,
4304 sizeof(dfp_cap_ext.raw));
4305 link->dpcd_caps.dongle_caps.dfp_cap_ext.supported = dfp_cap_ext.fields.supported;
4306 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps =
4307 dfp_cap_ext.fields.max_pixel_rate_in_mps[0] +
4308 (dfp_cap_ext.fields.max_pixel_rate_in_mps[1] << 8);
4309 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width =
4310 dfp_cap_ext.fields.max_video_h_active_width[0] +
4311 (dfp_cap_ext.fields.max_video_h_active_width[1] << 8);
4312 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height =
4313 dfp_cap_ext.fields.max_video_v_active_height[0] +
4314 (dfp_cap_ext.fields.max_video_v_active_height[1] << 8);
4315 link->dpcd_caps.dongle_caps.dfp_cap_ext.encoding_format_caps =
4316 dfp_cap_ext.fields.encoding_format_caps;
4317 link->dpcd_caps.dongle_caps.dfp_cap_ext.rgb_color_depth_caps =
4318 dfp_cap_ext.fields.rgb_color_depth_caps;
4319 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr444_color_depth_caps =
4320 dfp_cap_ext.fields.ycbcr444_color_depth_caps;
4321 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr422_color_depth_caps =
4322 dfp_cap_ext.fields.ycbcr422_color_depth_caps;
4323 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr420_color_depth_caps =
4324 dfp_cap_ext.fields.ycbcr420_color_depth_caps;
4325 DC_LOG_DP2("DFP capability extension is read at link %d", link->link_index);
4326 DC_LOG_DP2("\tdfp_cap_ext.supported = %s", link->dpcd_caps.dongle_caps.dfp_cap_ext.supported ? "true" : "false");
4327 DC_LOG_DP2("\tdfp_cap_ext.max_pixel_rate_in_mps = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps);
4328 DC_LOG_DP2("\tdfp_cap_ext.max_video_h_active_width = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width);
4329 DC_LOG_DP2("\tdfp_cap_ext.max_video_v_active_height = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height);
4334 static void dp_wa_power_up_0010FA(struct dc_link *link, uint8_t *dpcd_data,
4339 if (!link->dpcd_caps.dpcd_rev.raw) {
4341 dp_receiver_power_ctrl(link, true);
4342 core_link_read_dpcd(link, DP_DPCD_REV,
4344 link->dpcd_caps.dpcd_rev.raw = dpcd_data[
4347 } while (retry++ < 4 && !link->dpcd_caps.dpcd_rev.raw);
4350 if (link->dpcd_caps.dongle_type == DISPLAY_DONGLE_DP_VGA_CONVERTER) {
4351 switch (link->dpcd_caps.branch_dev_id) {
4352 /* 0010FA active dongles (DP-VGA, DP-DLDVI converters) power down
4353 * all internal circuits including AUX communication preventing
4354 * reading DPCD table and EDID (spec violation).
4355 * Encoder will skip DP RX power down on disable_output to
4356 * keep receiver powered all the time.*/
4357 case DP_BRANCH_DEVICE_ID_0010FA:
4358 case DP_BRANCH_DEVICE_ID_0080E1:
4359 case DP_BRANCH_DEVICE_ID_00E04C:
4360 link->wa_flags.dp_keep_receiver_powered = true;
4363 /* TODO: May need work around for other dongles. */
4365 link->wa_flags.dp_keep_receiver_powered = false;
4369 link->wa_flags.dp_keep_receiver_powered = false;
4372 /* Read additional sink caps defined in source specific DPCD area
4373 * This function currently only reads from SinkCapability address (DP_SOURCE_SINK_CAP)
4375 static bool dpcd_read_sink_ext_caps(struct dc_link *link)
4382 if (core_link_read_dpcd(link, DP_SOURCE_SINK_CAP, &dpcd_data, 1) != DC_OK)
4385 link->dpcd_sink_ext_caps.raw = dpcd_data;
4389 bool dp_retrieve_lttpr_cap(struct dc_link *link)
4391 #if defined(CONFIG_DRM_AMD_DC_DCN)
4392 uint8_t lttpr_dpcd_data[8];
4393 bool allow_lttpr_non_transparent_mode = 0;
4395 uint8_t lttpr_dpcd_data[6];
4397 bool vbios_lttpr_enable = link->dc->caps.vbios_lttpr_enable;
4398 bool vbios_lttpr_interop = link->dc->caps.vbios_lttpr_aware;
4399 enum dc_status status = DC_ERROR_UNEXPECTED;
4400 bool is_lttpr_present = false;
4402 memset(lttpr_dpcd_data, '\0', sizeof(lttpr_dpcd_data));
4404 #if defined(CONFIG_DRM_AMD_DC_DCN)
4405 if ((link->dc->config.allow_lttpr_non_transparent_mode.bits.DP2_0 &&
4406 link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED)) {
4407 allow_lttpr_non_transparent_mode = 1;
4408 } else if (link->dc->config.allow_lttpr_non_transparent_mode.bits.DP1_4A &&
4409 !link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
4410 allow_lttpr_non_transparent_mode = 1;
4415 * Logic to determine LTTPR mode
4417 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4418 if (vbios_lttpr_enable && vbios_lttpr_interop)
4419 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4420 else if (!vbios_lttpr_enable && vbios_lttpr_interop) {
4421 #if defined(CONFIG_DRM_AMD_DC_DCN)
4422 if (allow_lttpr_non_transparent_mode)
4424 if (link->dc->config.allow_lttpr_non_transparent_mode)
4426 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4428 link->lttpr_mode = LTTPR_MODE_TRANSPARENT;
4429 } else if (!vbios_lttpr_enable && !vbios_lttpr_interop) {
4430 #if defined(CONFIG_DRM_AMD_DC_DCN)
4431 if (!allow_lttpr_non_transparent_mode || !link->dc->caps.extended_aux_timeout_support)
4433 if (!link->dc->config.allow_lttpr_non_transparent_mode
4434 || !link->dc->caps.extended_aux_timeout_support)
4436 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4438 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4440 #if defined(CONFIG_DRM_AMD_DC_DCN)
4441 /* Check DP tunnel LTTPR mode debug option. */
4442 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA &&
4443 link->dc->debug.dpia_debug.bits.force_non_lttpr)
4444 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4447 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT || link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
4448 /* By reading LTTPR capability, RX assumes that we will enable
4449 * LTTPR extended aux timeout if LTTPR is present.
4451 status = core_link_read_dpcd(
4453 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV,
4455 sizeof(lttpr_dpcd_data));
4456 if (status != DC_OK) {
4457 dm_error("%s: Read LTTPR caps data failed.\n", __func__);
4461 link->dpcd_caps.lttpr_caps.revision.raw =
4462 lttpr_dpcd_data[DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV -
4463 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4465 link->dpcd_caps.lttpr_caps.max_link_rate =
4466 lttpr_dpcd_data[DP_MAX_LINK_RATE_PHY_REPEATER -
4467 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4469 link->dpcd_caps.lttpr_caps.phy_repeater_cnt =
4470 lttpr_dpcd_data[DP_PHY_REPEATER_CNT -
4471 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4473 link->dpcd_caps.lttpr_caps.max_lane_count =
4474 lttpr_dpcd_data[DP_MAX_LANE_COUNT_PHY_REPEATER -
4475 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4477 link->dpcd_caps.lttpr_caps.mode =
4478 lttpr_dpcd_data[DP_PHY_REPEATER_MODE -
4479 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4481 link->dpcd_caps.lttpr_caps.max_ext_timeout =
4482 lttpr_dpcd_data[DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT -
4483 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4485 #if defined(CONFIG_DRM_AMD_DC_DCN)
4486 link->dpcd_caps.lttpr_caps.main_link_channel_coding.raw =
4487 lttpr_dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER -
4488 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4490 link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.raw =
4491 lttpr_dpcd_data[DP_PHY_REPEATER_128b_132b_RATES -
4492 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4495 /* Attempt to train in LTTPR transparent mode if repeater count exceeds 8. */
4496 is_lttpr_present = (dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) != 0 &&
4497 link->dpcd_caps.lttpr_caps.phy_repeater_cnt < 0xff &&
4498 link->dpcd_caps.lttpr_caps.max_lane_count > 0 &&
4499 link->dpcd_caps.lttpr_caps.max_lane_count <= 4 &&
4500 link->dpcd_caps.lttpr_caps.revision.raw >= 0x14);
4501 if (is_lttpr_present) {
4502 CONN_DATA_DETECT(link, lttpr_dpcd_data, sizeof(lttpr_dpcd_data), "LTTPR Caps: ");
4503 configure_lttpr_mode_transparent(link);
4505 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4507 return is_lttpr_present;
4510 static bool retrieve_link_cap(struct dc_link *link)
4512 /* DP_ADAPTER_CAP - DP_DPCD_REV + 1 == 16 and also DP_DSC_BITS_PER_PIXEL_INC - DP_DSC_SUPPORT + 1 == 16,
4513 * which means size 16 will be good for both of those DPCD register block reads
4515 uint8_t dpcd_data[16];
4516 /*Only need to read 1 byte starting from DP_DPRX_FEATURE_ENUMERATION_LIST.
4518 uint8_t dpcd_dprx_data = '\0';
4519 uint8_t dpcd_power_state = '\0';
4521 struct dp_device_vendor_id sink_id;
4522 union down_stream_port_count down_strm_port_count;
4523 union edp_configuration_cap edp_config_cap;
4524 union dp_downstream_port_present ds_port = { 0 };
4525 enum dc_status status = DC_ERROR_UNEXPECTED;
4526 uint32_t read_dpcd_retry_cnt = 3;
4528 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
4529 const uint32_t post_oui_delay = 30; // 30ms
4530 bool is_lttpr_present = false;
4532 memset(dpcd_data, '\0', sizeof(dpcd_data));
4533 memset(&down_strm_port_count,
4534 '\0', sizeof(union down_stream_port_count));
4535 memset(&edp_config_cap, '\0',
4536 sizeof(union edp_configuration_cap));
4538 /* if extended timeout is supported in hardware,
4539 * default to LTTPR timeout (3.2ms) first as a W/A for DP link layer
4540 * CTS 4.2.1.1 regression introduced by CTS specs requirement update.
4542 dc_link_aux_try_to_configure_timeout(link->ddc,
4543 LINK_AUX_DEFAULT_LTTPR_TIMEOUT_PERIOD);
4545 is_lttpr_present = dp_retrieve_lttpr_cap(link);
4546 /* Read DP tunneling information. */
4547 status = dpcd_get_tunneling_device_data(link);
4549 status = core_link_read_dpcd(link, DP_SET_POWER,
4550 &dpcd_power_state, sizeof(dpcd_power_state));
4552 /* Delay 1 ms if AUX CH is in power down state. Based on spec
4553 * section 2.3.1.2, if AUX CH may be powered down due to
4554 * write to DPCD 600h = 2. Sink AUX CH is monitoring differential
4555 * signal and may need up to 1 ms before being able to reply.
4557 if (status != DC_OK || dpcd_power_state == DP_SET_POWER_D3)
4560 dpcd_set_source_specific_data(link);
4561 /* Sink may need to configure internals based on vendor, so allow some
4562 * time before proceeding with possibly vendor specific transactions
4564 msleep(post_oui_delay);
4566 for (i = 0; i < read_dpcd_retry_cnt; i++) {
4567 status = core_link_read_dpcd(
4572 if (status == DC_OK)
4576 if (status != DC_OK) {
4577 dm_error("%s: Read receiver caps dpcd data failed.\n", __func__);
4581 if (!is_lttpr_present)
4582 dc_link_aux_try_to_configure_timeout(link->ddc, LINK_AUX_DEFAULT_TIMEOUT_PERIOD);
4585 union training_aux_rd_interval aux_rd_interval;
4587 aux_rd_interval.raw =
4588 dpcd_data[DP_TRAINING_AUX_RD_INTERVAL];
4590 link->dpcd_caps.ext_receiver_cap_field_present =
4591 aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1;
4593 if (aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1) {
4594 uint8_t ext_cap_data[16];
4596 memset(ext_cap_data, '\0', sizeof(ext_cap_data));
4597 for (i = 0; i < read_dpcd_retry_cnt; i++) {
4598 status = core_link_read_dpcd(
4602 sizeof(ext_cap_data));
4603 if (status == DC_OK) {
4604 memcpy(dpcd_data, ext_cap_data, sizeof(dpcd_data));
4608 if (status != DC_OK)
4609 dm_error("%s: Read extend caps data failed, use cap from dpcd 0.\n", __func__);
4613 link->dpcd_caps.dpcd_rev.raw =
4614 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
4616 if (link->dpcd_caps.ext_receiver_cap_field_present) {
4617 for (i = 0; i < read_dpcd_retry_cnt; i++) {
4618 status = core_link_read_dpcd(
4620 DP_DPRX_FEATURE_ENUMERATION_LIST,
4622 sizeof(dpcd_dprx_data));
4623 if (status == DC_OK)
4627 link->dpcd_caps.dprx_feature.raw = dpcd_dprx_data;
4629 if (status != DC_OK)
4630 dm_error("%s: Read DPRX caps data failed.\n", __func__);
4634 link->dpcd_caps.dprx_feature.raw = 0;
4638 /* Error condition checking...
4639 * It is impossible for Sink to report Max Lane Count = 0.
4640 * It is possible for Sink to report Max Link Rate = 0, if it is
4641 * an eDP device that is reporting specialized link rates in the
4642 * SUPPORTED_LINK_RATE table.
4644 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
4647 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
4650 read_dp_device_vendor_id(link);
4652 get_active_converter_info(ds_port.byte, link);
4654 dp_wa_power_up_0010FA(link, dpcd_data, sizeof(dpcd_data));
4656 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
4659 link->dpcd_caps.allow_invalid_MSA_timing_param =
4660 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
4662 link->dpcd_caps.max_ln_count.raw = dpcd_data[
4663 DP_MAX_LANE_COUNT - DP_DPCD_REV];
4665 link->dpcd_caps.max_down_spread.raw = dpcd_data[
4666 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
4668 link->reported_link_cap.lane_count =
4669 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
4670 link->reported_link_cap.link_rate = dpcd_data[
4671 DP_MAX_LINK_RATE - DP_DPCD_REV];
4672 link->reported_link_cap.link_spread =
4673 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
4674 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
4676 edp_config_cap.raw = dpcd_data[
4677 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
4678 link->dpcd_caps.panel_mode_edp =
4679 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
4680 link->dpcd_caps.dpcd_display_control_capable =
4681 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
4683 link->test_pattern_enabled = false;
4684 link->compliance_test_state.raw = 0;
4686 /* read sink count */
4687 core_link_read_dpcd(link,
4689 &link->dpcd_caps.sink_count.raw,
4690 sizeof(link->dpcd_caps.sink_count.raw));
4692 /* read sink ieee oui */
4693 core_link_read_dpcd(link,
4695 (uint8_t *)(&sink_id),
4698 link->dpcd_caps.sink_dev_id =
4699 (sink_id.ieee_oui[0] << 16) +
4700 (sink_id.ieee_oui[1] << 8) +
4701 (sink_id.ieee_oui[2]);
4704 link->dpcd_caps.sink_dev_id_str,
4705 sink_id.ieee_device_id,
4706 sizeof(sink_id.ieee_device_id));
4708 /* Quirk Apple MBP 2017 15" Retina panel: Wrong DP_MAX_LINK_RATE */
4710 uint8_t str_mbp_2017[] = { 101, 68, 21, 101, 98, 97 };
4712 if ((link->dpcd_caps.sink_dev_id == 0x0010fa) &&
4713 !memcmp(link->dpcd_caps.sink_dev_id_str, str_mbp_2017,
4714 sizeof(str_mbp_2017))) {
4715 link->reported_link_cap.link_rate = 0x0c;
4719 core_link_read_dpcd(
4721 DP_SINK_HW_REVISION_START,
4722 (uint8_t *)&dp_hw_fw_revision,
4723 sizeof(dp_hw_fw_revision));
4725 link->dpcd_caps.sink_hw_revision =
4726 dp_hw_fw_revision.ieee_hw_rev;
4729 link->dpcd_caps.sink_fw_revision,
4730 dp_hw_fw_revision.ieee_fw_rev,
4731 sizeof(dp_hw_fw_revision.ieee_fw_rev));
4733 memset(&link->dpcd_caps.dsc_caps, '\0',
4734 sizeof(link->dpcd_caps.dsc_caps));
4735 memset(&link->dpcd_caps.fec_cap, '\0', sizeof(link->dpcd_caps.fec_cap));
4736 /* Read DSC and FEC sink capabilities if DP revision is 1.4 and up */
4737 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14) {
4738 status = core_link_read_dpcd(
4741 &link->dpcd_caps.fec_cap.raw,
4742 sizeof(link->dpcd_caps.fec_cap.raw));
4743 status = core_link_read_dpcd(
4746 link->dpcd_caps.dsc_caps.dsc_basic_caps.raw,
4747 sizeof(link->dpcd_caps.dsc_caps.dsc_basic_caps.raw));
4748 #if defined(CONFIG_DRM_AMD_DC_DCN)
4749 if (link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
4750 status = core_link_read_dpcd(
4752 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
4753 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
4754 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
4755 DC_LOG_DSC("DSC branch decoder capability is read at link %d", link->link_index);
4756 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_0 = 0x%02x",
4757 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_0);
4758 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_1 = 0x%02x",
4759 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_1);
4760 DC_LOG_DSC("\tBRANCH_MAX_LINE_WIDTH 0x%02x",
4761 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_MAX_LINE_WIDTH);
4764 status = core_link_read_dpcd(
4766 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
4767 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
4768 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
4772 if (!dpcd_read_sink_ext_caps(link))
4773 link->dpcd_sink_ext_caps.raw = 0;
4775 #if defined(CONFIG_DRM_AMD_DC_DCN)
4776 link->dpcd_caps.channel_coding_cap.raw = dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_CAP - DP_DPCD_REV];
4778 if (link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
4779 DC_LOG_DP2("128b/132b encoding is supported at link %d", link->link_index);
4781 core_link_read_dpcd(link,
4782 DP_128b_132b_SUPPORTED_LINK_RATES,
4783 &link->dpcd_caps.dp_128b_132b_supported_link_rates.raw,
4784 sizeof(link->dpcd_caps.dp_128b_132b_supported_link_rates.raw));
4785 if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR20)
4786 link->reported_link_cap.link_rate = LINK_RATE_UHBR20;
4787 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR13_5)
4788 link->reported_link_cap.link_rate = LINK_RATE_UHBR13_5;
4789 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR10)
4790 link->reported_link_cap.link_rate = LINK_RATE_UHBR10;
4792 dm_error("%s: Invalid RX 128b_132b_supported_link_rates\n", __func__);
4793 DC_LOG_DP2("128b/132b supported link rates is read at link %d", link->link_index);
4794 DC_LOG_DP2("\tmax 128b/132b link rate support is %d.%d GHz",
4795 link->reported_link_cap.link_rate / 100,
4796 link->reported_link_cap.link_rate % 100);
4798 core_link_read_dpcd(link,
4799 DP_SINK_VIDEO_FALLBACK_FORMATS,
4800 &link->dpcd_caps.fallback_formats.raw,
4801 sizeof(link->dpcd_caps.fallback_formats.raw));
4802 DC_LOG_DP2("sink video fallback format is read at link %d", link->link_index);
4803 if (link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support)
4804 DC_LOG_DP2("\t1920x1080@60Hz 24bpp fallback format supported");
4805 if (link->dpcd_caps.fallback_formats.bits.dp_1280x720_60Hz_24bpp_support)
4806 DC_LOG_DP2("\t1280x720@60Hz 24bpp fallback format supported");
4807 if (link->dpcd_caps.fallback_formats.bits.dp_1024x768_60Hz_24bpp_support)
4808 DC_LOG_DP2("\t1024x768@60Hz 24bpp fallback format supported");
4809 if (link->dpcd_caps.fallback_formats.raw == 0) {
4810 DC_LOG_DP2("\tno supported fallback formats, assume 1920x1080@60Hz 24bpp is supported");
4811 link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support = 1;
4814 core_link_read_dpcd(link,
4815 DP_FEC_CAPABILITY_1,
4816 &link->dpcd_caps.fec_cap1.raw,
4817 sizeof(link->dpcd_caps.fec_cap1.raw));
4818 DC_LOG_DP2("FEC CAPABILITY 1 is read at link %d", link->link_index);
4819 if (link->dpcd_caps.fec_cap1.bits.AGGREGATED_ERROR_COUNTERS_CAPABLE)
4820 DC_LOG_DP2("\tFEC aggregated error counters are supported");
4824 /* Connectivity log: detection */
4825 CONN_DATA_DETECT(link, dpcd_data, sizeof(dpcd_data), "Rx Caps: ");
4830 bool dp_overwrite_extended_receiver_cap(struct dc_link *link)
4832 uint8_t dpcd_data[16];
4833 uint32_t read_dpcd_retry_cnt = 3;
4834 enum dc_status status = DC_ERROR_UNEXPECTED;
4835 union dp_downstream_port_present ds_port = { 0 };
4836 union down_stream_port_count down_strm_port_count;
4837 union edp_configuration_cap edp_config_cap;
4841 for (i = 0; i < read_dpcd_retry_cnt; i++) {
4842 status = core_link_read_dpcd(
4847 if (status == DC_OK)
4851 link->dpcd_caps.dpcd_rev.raw =
4852 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
4854 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
4857 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
4860 get_active_converter_info(ds_port.byte, link);
4862 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
4865 link->dpcd_caps.allow_invalid_MSA_timing_param =
4866 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
4868 link->dpcd_caps.max_ln_count.raw = dpcd_data[
4869 DP_MAX_LANE_COUNT - DP_DPCD_REV];
4871 link->dpcd_caps.max_down_spread.raw = dpcd_data[
4872 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
4874 link->reported_link_cap.lane_count =
4875 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
4876 link->reported_link_cap.link_rate = dpcd_data[
4877 DP_MAX_LINK_RATE - DP_DPCD_REV];
4878 link->reported_link_cap.link_spread =
4879 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
4880 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
4882 edp_config_cap.raw = dpcd_data[
4883 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
4884 link->dpcd_caps.panel_mode_edp =
4885 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
4886 link->dpcd_caps.dpcd_display_control_capable =
4887 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
4892 bool detect_dp_sink_caps(struct dc_link *link)
4894 return retrieve_link_cap(link);
4896 /* dc init_hw has power encoder using default
4897 * signal for connector. For native DP, no
4898 * need to power up encoder again. If not native
4899 * DP, hw_init may need check signal or power up
4902 /* TODO save sink caps in link->sink */
4905 static enum dc_link_rate linkRateInKHzToLinkRateMultiplier(uint32_t link_rate_in_khz)
4907 enum dc_link_rate link_rate;
4908 // LinkRate is normally stored as a multiplier of 0.27 Gbps per lane. Do the translation.
4909 switch (link_rate_in_khz) {
4911 link_rate = LINK_RATE_LOW; // Rate_1 (RBR) - 1.62 Gbps/Lane
4914 link_rate = LINK_RATE_RATE_2; // Rate_2 - 2.16 Gbps/Lane
4917 link_rate = LINK_RATE_RATE_3; // Rate_3 - 2.43 Gbps/Lane
4920 link_rate = LINK_RATE_HIGH; // Rate_4 (HBR) - 2.70 Gbps/Lane
4923 link_rate = LINK_RATE_RBR2; // Rate_5 (RBR2) - 3.24 Gbps/Lane
4926 link_rate = LINK_RATE_RATE_6; // Rate_6 - 4.32 Gbps/Lane
4929 link_rate = LINK_RATE_HIGH2; // Rate_7 (HBR2) - 5.40 Gbps/Lane
4932 link_rate = LINK_RATE_HIGH3; // Rate_8 (HBR3) - 8.10 Gbps/Lane
4935 link_rate = LINK_RATE_UNKNOWN;
4941 void detect_edp_sink_caps(struct dc_link *link)
4943 uint8_t supported_link_rates[16];
4945 uint32_t link_rate_in_khz;
4946 enum dc_link_rate link_rate = LINK_RATE_UNKNOWN;
4947 uint8_t backlight_adj_cap;
4949 retrieve_link_cap(link);
4950 link->dpcd_caps.edp_supported_link_rates_count = 0;
4951 memset(supported_link_rates, 0, sizeof(supported_link_rates));
4954 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
4955 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
4957 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
4958 (link->dc->debug.optimize_edp_link_rate ||
4959 link->reported_link_cap.link_rate == LINK_RATE_UNKNOWN)) {
4960 // Read DPCD 00010h - 0001Fh 16 bytes at one shot
4961 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
4962 supported_link_rates, sizeof(supported_link_rates));
4964 for (entry = 0; entry < 16; entry += 2) {
4965 // DPCD register reports per-lane link rate = 16-bit link rate capability
4966 // value X 200 kHz. Need multiplier to find link rate in kHz.
4967 link_rate_in_khz = (supported_link_rates[entry+1] * 0x100 +
4968 supported_link_rates[entry]) * 200;
4970 if (link_rate_in_khz != 0) {
4971 link_rate = linkRateInKHzToLinkRateMultiplier(link_rate_in_khz);
4972 link->dpcd_caps.edp_supported_link_rates[link->dpcd_caps.edp_supported_link_rates_count] = link_rate;
4973 link->dpcd_caps.edp_supported_link_rates_count++;
4975 if (link->reported_link_cap.link_rate < link_rate)
4976 link->reported_link_cap.link_rate = link_rate;
4980 link->verified_link_cap = link->reported_link_cap;
4982 core_link_read_dpcd(link, DP_EDP_BACKLIGHT_ADJUSTMENT_CAP,
4983 &backlight_adj_cap, sizeof(backlight_adj_cap));
4985 link->dpcd_caps.dynamic_backlight_capable_edp =
4986 (backlight_adj_cap & DP_EDP_DYNAMIC_BACKLIGHT_CAP) ? true:false;
4988 dc_link_set_default_brightness_aux(link);
4991 void dc_link_dp_enable_hpd(const struct dc_link *link)
4993 struct link_encoder *encoder = link->link_enc;
4995 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
4996 encoder->funcs->enable_hpd(encoder);
4999 void dc_link_dp_disable_hpd(const struct dc_link *link)
5001 struct link_encoder *encoder = link->link_enc;
5003 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5004 encoder->funcs->disable_hpd(encoder);
5007 static bool is_dp_phy_pattern(enum dp_test_pattern test_pattern)
5009 if ((DP_TEST_PATTERN_PHY_PATTERN_BEGIN <= test_pattern &&
5010 test_pattern <= DP_TEST_PATTERN_PHY_PATTERN_END) ||
5011 test_pattern == DP_TEST_PATTERN_VIDEO_MODE)
5017 static void set_crtc_test_pattern(struct dc_link *link,
5018 struct pipe_ctx *pipe_ctx,
5019 enum dp_test_pattern test_pattern,
5020 enum dp_test_pattern_color_space test_pattern_color_space)
5022 enum controller_dp_test_pattern controller_test_pattern;
5023 enum dc_color_depth color_depth = pipe_ctx->
5024 stream->timing.display_color_depth;
5025 struct bit_depth_reduction_params params;
5026 struct output_pixel_processor *opp = pipe_ctx->stream_res.opp;
5027 int width = pipe_ctx->stream->timing.h_addressable +
5028 pipe_ctx->stream->timing.h_border_left +
5029 pipe_ctx->stream->timing.h_border_right;
5030 int height = pipe_ctx->stream->timing.v_addressable +
5031 pipe_ctx->stream->timing.v_border_bottom +
5032 pipe_ctx->stream->timing.v_border_top;
5034 memset(¶ms, 0, sizeof(params));
5036 switch (test_pattern) {
5037 case DP_TEST_PATTERN_COLOR_SQUARES:
5038 controller_test_pattern =
5039 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES;
5041 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5042 controller_test_pattern =
5043 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES_CEA;
5045 case DP_TEST_PATTERN_VERTICAL_BARS:
5046 controller_test_pattern =
5047 CONTROLLER_DP_TEST_PATTERN_VERTICALBARS;
5049 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5050 controller_test_pattern =
5051 CONTROLLER_DP_TEST_PATTERN_HORIZONTALBARS;
5053 case DP_TEST_PATTERN_COLOR_RAMP:
5054 controller_test_pattern =
5055 CONTROLLER_DP_TEST_PATTERN_COLORRAMP;
5058 controller_test_pattern =
5059 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE;
5063 switch (test_pattern) {
5064 case DP_TEST_PATTERN_COLOR_SQUARES:
5065 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5066 case DP_TEST_PATTERN_VERTICAL_BARS:
5067 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5068 case DP_TEST_PATTERN_COLOR_RAMP:
5070 /* disable bit depth reduction */
5071 pipe_ctx->stream->bit_depth_params = params;
5072 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5073 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5074 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5075 controller_test_pattern, color_depth);
5076 else if (link->dc->hwss.set_disp_pattern_generator) {
5077 struct pipe_ctx *odm_pipe;
5078 enum controller_dp_color_space controller_color_space;
5081 int dpg_width = width;
5083 switch (test_pattern_color_space) {
5084 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5085 controller_color_space = CONTROLLER_DP_COLOR_SPACE_RGB;
5087 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5088 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR601;
5090 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5091 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR709;
5093 case DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED:
5095 controller_color_space = CONTROLLER_DP_COLOR_SPACE_UDEFINED;
5096 DC_LOG_ERROR("%s: Color space must be defined for test pattern", __func__);
5101 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5103 dpg_width = width / opp_cnt;
5106 link->dc->hwss.set_disp_pattern_generator(link->dc,
5108 controller_test_pattern,
5109 controller_color_space,
5116 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5117 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5119 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5120 link->dc->hwss.set_disp_pattern_generator(link->dc,
5122 controller_test_pattern,
5123 controller_color_space,
5134 case DP_TEST_PATTERN_VIDEO_MODE:
5136 /* restore bitdepth reduction */
5137 resource_build_bit_depth_reduction_params(pipe_ctx->stream, ¶ms);
5138 pipe_ctx->stream->bit_depth_params = params;
5139 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5140 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5141 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5142 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5144 else if (link->dc->hwss.set_disp_pattern_generator) {
5145 struct pipe_ctx *odm_pipe;
5147 int dpg_width = width;
5149 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5152 dpg_width = width / opp_cnt;
5153 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5154 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5156 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5157 link->dc->hwss.set_disp_pattern_generator(link->dc,
5159 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5160 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
5167 link->dc->hwss.set_disp_pattern_generator(link->dc,
5169 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5170 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
5185 bool dc_link_dp_set_test_pattern(
5186 struct dc_link *link,
5187 enum dp_test_pattern test_pattern,
5188 enum dp_test_pattern_color_space test_pattern_color_space,
5189 const struct link_training_settings *p_link_settings,
5190 const unsigned char *p_custom_pattern,
5191 unsigned int cust_pattern_size)
5193 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
5194 struct pipe_ctx *pipe_ctx = NULL;
5197 unsigned char link_qual_pattern[LANE_COUNT_DP_MAX] = {0};
5198 union dpcd_training_pattern training_pattern;
5199 enum dpcd_phy_test_patterns pattern;
5201 memset(&training_pattern, 0, sizeof(training_pattern));
5203 for (i = 0; i < MAX_PIPES; i++) {
5204 if (pipes[i].stream == NULL)
5207 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
5208 pipe_ctx = &pipes[i];
5213 if (pipe_ctx == NULL)
5216 /* Reset CRTC Test Pattern if it is currently running and request is VideoMode */
5217 if (link->test_pattern_enabled && test_pattern ==
5218 DP_TEST_PATTERN_VIDEO_MODE) {
5219 /* Set CRTC Test Pattern */
5220 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
5221 dp_set_hw_test_pattern(link, test_pattern,
5222 (uint8_t *)p_custom_pattern,
5223 (uint32_t)cust_pattern_size);
5225 /* Unblank Stream */
5226 link->dc->hwss.unblank_stream(
5228 &link->verified_link_cap);
5229 /* TODO:m_pHwss->MuteAudioEndpoint
5230 * (pPathMode->pDisplayPath, false);
5233 /* Reset Test Pattern state */
5234 link->test_pattern_enabled = false;
5239 /* Check for PHY Test Patterns */
5240 if (is_dp_phy_pattern(test_pattern)) {
5241 /* Set DPCD Lane Settings before running test pattern */
5242 if (p_link_settings != NULL) {
5243 dp_set_hw_lane_settings(link, p_link_settings, DPRX);
5244 dpcd_set_lane_settings(link, p_link_settings, DPRX);
5247 /* Blank stream if running test pattern */
5248 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
5251 * MuteAudioEndpoint(pPathMode->pDisplayPath, true);
5254 pipes->stream_res.stream_enc->funcs->dp_blank(link, pipe_ctx->stream_res.stream_enc);
5257 dp_set_hw_test_pattern(link, test_pattern,
5258 (uint8_t *)p_custom_pattern,
5259 (uint32_t)cust_pattern_size);
5261 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
5262 /* Set Test Pattern state */
5263 link->test_pattern_enabled = true;
5264 if (p_link_settings != NULL)
5265 dpcd_set_link_settings(link,
5269 switch (test_pattern) {
5270 case DP_TEST_PATTERN_VIDEO_MODE:
5271 pattern = PHY_TEST_PATTERN_NONE;
5273 case DP_TEST_PATTERN_D102:
5274 pattern = PHY_TEST_PATTERN_D10_2;
5276 case DP_TEST_PATTERN_SYMBOL_ERROR:
5277 pattern = PHY_TEST_PATTERN_SYMBOL_ERROR;
5279 case DP_TEST_PATTERN_PRBS7:
5280 pattern = PHY_TEST_PATTERN_PRBS7;
5282 case DP_TEST_PATTERN_80BIT_CUSTOM:
5283 pattern = PHY_TEST_PATTERN_80BIT_CUSTOM;
5285 case DP_TEST_PATTERN_CP2520_1:
5286 pattern = PHY_TEST_PATTERN_CP2520_1;
5288 case DP_TEST_PATTERN_CP2520_2:
5289 pattern = PHY_TEST_PATTERN_CP2520_2;
5291 case DP_TEST_PATTERN_CP2520_3:
5292 pattern = PHY_TEST_PATTERN_CP2520_3;
5294 #if defined(CONFIG_DRM_AMD_DC_DCN)
5295 case DP_TEST_PATTERN_128b_132b_TPS1:
5296 pattern = PHY_TEST_PATTERN_128b_132b_TPS1;
5298 case DP_TEST_PATTERN_128b_132b_TPS2:
5299 pattern = PHY_TEST_PATTERN_128b_132b_TPS2;
5301 case DP_TEST_PATTERN_PRBS9:
5302 pattern = PHY_TEST_PATTERN_PRBS9;
5304 case DP_TEST_PATTERN_PRBS11:
5305 pattern = PHY_TEST_PATTERN_PRBS11;
5307 case DP_TEST_PATTERN_PRBS15:
5308 pattern = PHY_TEST_PATTERN_PRBS15;
5310 case DP_TEST_PATTERN_PRBS23:
5311 pattern = PHY_TEST_PATTERN_PRBS23;
5313 case DP_TEST_PATTERN_PRBS31:
5314 pattern = PHY_TEST_PATTERN_PRBS31;
5316 case DP_TEST_PATTERN_264BIT_CUSTOM:
5317 pattern = PHY_TEST_PATTERN_264BIT_CUSTOM;
5319 case DP_TEST_PATTERN_SQUARE_PULSE:
5320 pattern = PHY_TEST_PATTERN_SQUARE_PULSE;
5327 if (test_pattern == DP_TEST_PATTERN_VIDEO_MODE
5328 /*TODO:&& !pPathMode->pDisplayPath->IsTargetPoweredOn()*/)
5331 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
5332 #if defined(CONFIG_DRM_AMD_DC_DCN)
5333 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE)
5334 core_link_write_dpcd(link,
5335 DP_LINK_SQUARE_PATTERN,
5340 /* tell receiver that we are sending qualification
5341 * pattern DP 1.2 or later - DP receiver's link quality
5342 * pattern is set using DPCD LINK_QUAL_LANEx_SET
5343 * register (0x10B~0x10E)\
5345 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++)
5346 link_qual_pattern[lane] =
5347 (unsigned char)(pattern);
5349 core_link_write_dpcd(link,
5350 DP_LINK_QUAL_LANE0_SET,
5352 sizeof(link_qual_pattern));
5353 } else if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_10 ||
5354 link->dpcd_caps.dpcd_rev.raw == 0) {
5355 /* tell receiver that we are sending qualification
5356 * pattern DP 1.1a or earlier - DP receiver's link
5357 * quality pattern is set using
5358 * DPCD TRAINING_PATTERN_SET -> LINK_QUAL_PATTERN_SET
5359 * register (0x102). We will use v_1.3 when we are
5360 * setting test pattern for DP 1.1.
5362 core_link_read_dpcd(link, DP_TRAINING_PATTERN_SET,
5363 &training_pattern.raw,
5364 sizeof(training_pattern));
5365 training_pattern.v1_3.LINK_QUAL_PATTERN_SET = pattern;
5366 core_link_write_dpcd(link, DP_TRAINING_PATTERN_SET,
5367 &training_pattern.raw,
5368 sizeof(training_pattern));
5371 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
5373 switch (test_pattern_color_space) {
5374 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5375 color_space = COLOR_SPACE_SRGB;
5376 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5377 color_space = COLOR_SPACE_SRGB_LIMITED;
5380 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5381 color_space = COLOR_SPACE_YCBCR601;
5382 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5383 color_space = COLOR_SPACE_YCBCR601_LIMITED;
5385 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5386 color_space = COLOR_SPACE_YCBCR709;
5387 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5388 color_space = COLOR_SPACE_YCBCR709_LIMITED;
5394 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable) {
5395 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
5396 union dmub_hw_lock_flags hw_locks = { 0 };
5397 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
5399 hw_locks.bits.lock_dig = 1;
5400 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
5402 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
5407 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable(
5408 pipe_ctx->stream_res.tg);
5411 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
5412 /* update MSA to requested color space */
5413 pipe_ctx->stream_res.stream_enc->funcs->dp_set_stream_attribute(pipe_ctx->stream_res.stream_enc,
5414 &pipe_ctx->stream->timing,
5416 pipe_ctx->stream->use_vsc_sdp_for_colorimetry,
5417 link->dpcd_caps.dprx_feature.bits.SST_SPLIT_SDP_CAP);
5419 if (pipe_ctx->stream->use_vsc_sdp_for_colorimetry) {
5420 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5421 pipe_ctx->stream->vsc_infopacket.sb[17] |= (1 << 7); // sb17 bit 7 Dynamic Range: 0 = VESA range, 1 = CTA range
5423 pipe_ctx->stream->vsc_infopacket.sb[17] &= ~(1 << 7);
5424 resource_build_info_frame(pipe_ctx);
5425 link->dc->hwss.update_info_frame(pipe_ctx);
5429 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
5430 pipe_ctx->stream_res.tg->funcs->unlock(pipe_ctx->stream_res.tg);
5431 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5432 CRTC_STATE_VACTIVE);
5433 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5435 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5436 CRTC_STATE_VACTIVE);
5438 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable) {
5439 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
5440 union dmub_hw_lock_flags hw_locks = { 0 };
5441 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
5443 hw_locks.bits.lock_dig = 1;
5444 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
5446 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
5451 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable(
5452 pipe_ctx->stream_res.tg);
5455 /* Set Test Pattern state */
5456 link->test_pattern_enabled = true;
5462 void dp_enable_mst_on_sink(struct dc_link *link, bool enable)
5464 unsigned char mstmCntl;
5466 core_link_read_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
5468 mstmCntl |= DP_MST_EN;
5470 mstmCntl &= (~DP_MST_EN);
5472 core_link_write_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
5475 void dp_set_panel_mode(struct dc_link *link, enum dp_panel_mode panel_mode)
5477 union dpcd_edp_config edp_config_set;
5478 bool panel_mode_edp = false;
5480 memset(&edp_config_set, '\0', sizeof(union dpcd_edp_config));
5482 if (panel_mode != DP_PANEL_MODE_DEFAULT) {
5484 switch (panel_mode) {
5485 case DP_PANEL_MODE_EDP:
5486 case DP_PANEL_MODE_SPECIAL:
5487 panel_mode_edp = true;
5494 /*set edp panel mode in receiver*/
5495 core_link_read_dpcd(
5497 DP_EDP_CONFIGURATION_SET,
5498 &edp_config_set.raw,
5499 sizeof(edp_config_set.raw));
5501 if (edp_config_set.bits.PANEL_MODE_EDP
5502 != panel_mode_edp) {
5503 enum dc_status result;
5505 edp_config_set.bits.PANEL_MODE_EDP =
5507 result = core_link_write_dpcd(
5509 DP_EDP_CONFIGURATION_SET,
5510 &edp_config_set.raw,
5511 sizeof(edp_config_set.raw));
5513 ASSERT(result == DC_OK);
5516 DC_LOG_DETECTION_DP_CAPS("Link: %d eDP panel mode supported: %d "
5517 "eDP panel mode enabled: %d \n",
5519 link->dpcd_caps.panel_mode_edp,
5523 enum dp_panel_mode dp_get_panel_mode(struct dc_link *link)
5525 /* We need to explicitly check that connector
5526 * is not DP. Some Travis_VGA get reported
5527 * by video bios as DP.
5529 if (link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT) {
5531 switch (link->dpcd_caps.branch_dev_id) {
5532 case DP_BRANCH_DEVICE_ID_0022B9:
5533 /* alternate scrambler reset is required for Travis
5534 * for the case when external chip does not
5535 * provide sink device id, alternate scrambler
5536 * scheme will be overriden later by querying
5540 link->dpcd_caps.branch_dev_name,
5541 DP_VGA_LVDS_CONVERTER_ID_2,
5544 branch_dev_name)) == 0) {
5545 return DP_PANEL_MODE_SPECIAL;
5548 case DP_BRANCH_DEVICE_ID_00001A:
5549 /* alternate scrambler reset is required for Travis
5550 * for the case when external chip does not provide
5551 * sink device id, alternate scrambler scheme will
5552 * be overriden later by querying Encoder feature
5554 if (strncmp(link->dpcd_caps.branch_dev_name,
5555 DP_VGA_LVDS_CONVERTER_ID_3,
5558 branch_dev_name)) == 0) {
5559 return DP_PANEL_MODE_SPECIAL;
5567 if (link->dpcd_caps.panel_mode_edp &&
5568 (link->connector_signal == SIGNAL_TYPE_EDP ||
5569 (link->connector_signal == SIGNAL_TYPE_DISPLAY_PORT &&
5570 link->is_internal_display))) {
5571 return DP_PANEL_MODE_EDP;
5574 return DP_PANEL_MODE_DEFAULT;
5577 enum dc_status dp_set_fec_ready(struct dc_link *link, bool ready)
5579 /* FEC has to be "set ready" before the link training.
5580 * The policy is to always train with FEC
5581 * if the sink supports it and leave it enabled on link.
5582 * If FEC is not supported, disable it.
5584 struct link_encoder *link_enc = NULL;
5585 enum dc_status status = DC_OK;
5586 uint8_t fec_config = 0;
5588 /* Access link encoder based on whether it is statically
5589 * or dynamically assigned to a link.
5591 if (link->is_dig_mapping_flexible &&
5592 link->dc->res_pool->funcs->link_encs_assign)
5593 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
5595 link_enc = link->link_enc;
5598 if (!dc_link_should_enable_fec(link))
5601 if (link_enc->funcs->fec_set_ready &&
5602 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
5605 status = core_link_write_dpcd(link,
5606 DP_FEC_CONFIGURATION,
5608 sizeof(fec_config));
5609 if (status == DC_OK) {
5610 link_enc->funcs->fec_set_ready(link_enc, true);
5611 link->fec_state = dc_link_fec_ready;
5613 link_enc->funcs->fec_set_ready(link_enc, false);
5614 link->fec_state = dc_link_fec_not_ready;
5615 dm_error("dpcd write failed to set fec_ready");
5617 } else if (link->fec_state == dc_link_fec_ready) {
5619 status = core_link_write_dpcd(link,
5620 DP_FEC_CONFIGURATION,
5622 sizeof(fec_config));
5623 link_enc->funcs->fec_set_ready(link_enc, false);
5624 link->fec_state = dc_link_fec_not_ready;
5631 void dp_set_fec_enable(struct dc_link *link, bool enable)
5633 struct link_encoder *link_enc = NULL;
5635 /* Access link encoder based on whether it is statically
5636 * or dynamically assigned to a link.
5638 if (link->is_dig_mapping_flexible &&
5639 link->dc->res_pool->funcs->link_encs_assign)
5640 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
5642 link_enc = link->link_enc;
5645 if (!dc_link_should_enable_fec(link))
5648 if (link_enc->funcs->fec_set_enable &&
5649 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
5650 if (link->fec_state == dc_link_fec_ready && enable) {
5651 /* Accord to DP spec, FEC enable sequence can first
5652 * be transmitted anytime after 1000 LL codes have
5653 * been transmitted on the link after link training
5654 * completion. Using 1 lane RBR should have the maximum
5655 * time for transmitting 1000 LL codes which is 6.173 us.
5656 * So use 7 microseconds delay instead.
5659 link_enc->funcs->fec_set_enable(link_enc, true);
5660 link->fec_state = dc_link_fec_enabled;
5661 } else if (link->fec_state == dc_link_fec_enabled && !enable) {
5662 link_enc->funcs->fec_set_enable(link_enc, false);
5663 link->fec_state = dc_link_fec_ready;
5668 void dpcd_set_source_specific_data(struct dc_link *link)
5670 if (!link->dc->vendor_signature.is_valid) {
5671 enum dc_status __maybe_unused result_write_min_hblank = DC_NOT_SUPPORTED;
5672 struct dpcd_amd_signature amd_signature = {0};
5673 struct dpcd_amd_device_id amd_device_id = {0};
5675 amd_device_id.device_id_byte1 =
5676 (uint8_t)(link->ctx->asic_id.chip_id);
5677 amd_device_id.device_id_byte2 =
5678 (uint8_t)(link->ctx->asic_id.chip_id >> 8);
5679 amd_device_id.dce_version =
5680 (uint8_t)(link->ctx->dce_version);
5681 amd_device_id.dal_version_byte1 = 0x0; // needed? where to get?
5682 amd_device_id.dal_version_byte2 = 0x0; // needed? where to get?
5684 core_link_read_dpcd(link, DP_SOURCE_OUI,
5685 (uint8_t *)(&amd_signature),
5686 sizeof(amd_signature));
5688 if (!((amd_signature.AMD_IEEE_TxSignature_byte1 == 0x0) &&
5689 (amd_signature.AMD_IEEE_TxSignature_byte2 == 0x0) &&
5690 (amd_signature.AMD_IEEE_TxSignature_byte3 == 0x1A))) {
5692 amd_signature.AMD_IEEE_TxSignature_byte1 = 0x0;
5693 amd_signature.AMD_IEEE_TxSignature_byte2 = 0x0;
5694 amd_signature.AMD_IEEE_TxSignature_byte3 = 0x1A;
5696 core_link_write_dpcd(link, DP_SOURCE_OUI,
5697 (uint8_t *)(&amd_signature),
5698 sizeof(amd_signature));
5701 core_link_write_dpcd(link, DP_SOURCE_OUI+0x03,
5702 (uint8_t *)(&amd_device_id),
5703 sizeof(amd_device_id));
5705 if (link->ctx->dce_version >= DCN_VERSION_2_0 &&
5706 link->dc->caps.min_horizontal_blanking_period != 0) {
5708 uint8_t hblank_size = (uint8_t)link->dc->caps.min_horizontal_blanking_period;
5710 if (link->preferred_link_setting.dpcd_source_device_specific_field_support) {
5711 result_write_min_hblank = core_link_write_dpcd(link,
5712 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED, (uint8_t *)(&hblank_size),
5713 sizeof(hblank_size));
5715 if (result_write_min_hblank == DC_ERROR_UNEXPECTED)
5716 link->preferred_link_setting.dpcd_source_device_specific_field_support = false;
5718 DC_LOG_DC("Sink device does not support 00340h DPCD write. Skipping on purpose.\n");
5722 DC_TRACE_LEVEL_MESSAGE(DAL_TRACE_LEVEL_INFORMATION,
5723 WPP_BIT_FLAG_DC_DETECTION_DP_CAPS,
5724 "result=%u link_index=%u enum dce_version=%d DPCD=0x%04X min_hblank=%u branch_dev_id=0x%x branch_dev_name='%c%c%c%c%c%c'",
5725 result_write_min_hblank,
5727 link->ctx->dce_version,
5728 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED,
5729 link->dc->caps.min_horizontal_blanking_period,
5730 link->dpcd_caps.branch_dev_id,
5731 link->dpcd_caps.branch_dev_name[0],
5732 link->dpcd_caps.branch_dev_name[1],
5733 link->dpcd_caps.branch_dev_name[2],
5734 link->dpcd_caps.branch_dev_name[3],
5735 link->dpcd_caps.branch_dev_name[4],
5736 link->dpcd_caps.branch_dev_name[5]);
5738 core_link_write_dpcd(link, DP_SOURCE_OUI,
5739 link->dc->vendor_signature.data.raw,
5740 sizeof(link->dc->vendor_signature.data.raw));
5744 bool dc_link_set_backlight_level_nits(struct dc_link *link,
5746 uint32_t backlight_millinits,
5747 uint32_t transition_time_in_ms)
5749 struct dpcd_source_backlight_set dpcd_backlight_set;
5750 uint8_t backlight_control = isHDR ? 1 : 0;
5752 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
5753 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
5756 // OLEDs have no PWM, they can only use AUX
5757 if (link->dpcd_sink_ext_caps.bits.oled == 1)
5758 backlight_control = 1;
5760 *(uint32_t *)&dpcd_backlight_set.backlight_level_millinits = backlight_millinits;
5761 *(uint16_t *)&dpcd_backlight_set.backlight_transition_time_ms = (uint16_t)transition_time_in_ms;
5764 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
5765 (uint8_t *)(&dpcd_backlight_set),
5766 sizeof(dpcd_backlight_set)) != DC_OK)
5769 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_CONTROL,
5770 &backlight_control, 1) != DC_OK)
5776 bool dc_link_get_backlight_level_nits(struct dc_link *link,
5777 uint32_t *backlight_millinits_avg,
5778 uint32_t *backlight_millinits_peak)
5780 union dpcd_source_backlight_get dpcd_backlight_get;
5782 memset(&dpcd_backlight_get, 0, sizeof(union dpcd_source_backlight_get));
5784 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
5785 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
5788 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_CURRENT_PEAK,
5789 dpcd_backlight_get.raw,
5790 sizeof(union dpcd_source_backlight_get)) != DC_OK)
5793 *backlight_millinits_avg =
5794 dpcd_backlight_get.bytes.backlight_millinits_avg;
5795 *backlight_millinits_peak =
5796 dpcd_backlight_get.bytes.backlight_millinits_peak;
5798 /* On non-supported panels dpcd_read usually succeeds with 0 returned */
5799 if (*backlight_millinits_avg == 0 ||
5800 *backlight_millinits_avg > *backlight_millinits_peak)
5806 bool dc_link_backlight_enable_aux(struct dc_link *link, bool enable)
5808 uint8_t backlight_enable = enable ? 1 : 0;
5810 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
5811 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
5814 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_ENABLE,
5815 &backlight_enable, 1) != DC_OK)
5821 // we read default from 0x320 because we expect BIOS wrote it there
5822 // regular get_backlight_nit reads from panel set at 0x326
5823 bool dc_link_read_default_bl_aux(struct dc_link *link, uint32_t *backlight_millinits)
5825 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
5826 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
5829 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
5830 (uint8_t *) backlight_millinits,
5831 sizeof(uint32_t)) != DC_OK)
5837 bool dc_link_set_default_brightness_aux(struct dc_link *link)
5839 uint32_t default_backlight;
5841 if (link && link->dpcd_sink_ext_caps.bits.oled == 1) {
5842 if (!dc_link_read_default_bl_aux(link, &default_backlight))
5843 default_backlight = 150000;
5844 // if < 5 nits or > 5000, it might be wrong readback
5845 if (default_backlight < 5000 || default_backlight > 5000000)
5846 default_backlight = 150000; //
5848 return dc_link_set_backlight_level_nits(link, true,
5849 default_backlight, 0);
5854 bool is_edp_ilr_optimization_required(struct dc_link *link, struct dc_crtc_timing *crtc_timing)
5856 struct dc_link_settings link_setting;
5857 uint8_t link_bw_set;
5858 uint8_t link_rate_set;
5860 union lane_count_set lane_count_set = {0};
5862 ASSERT(link || crtc_timing); // invalid input
5864 if (link->dpcd_caps.edp_supported_link_rates_count == 0 ||
5865 !link->dc->debug.optimize_edp_link_rate)
5869 // Read DPCD 00100h to find if standard link rates are set
5870 core_link_read_dpcd(link, DP_LINK_BW_SET,
5871 &link_bw_set, sizeof(link_bw_set));
5874 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS used link_bw_set\n");
5878 // Read DPCD 00115h to find the edp link rate set used
5879 core_link_read_dpcd(link, DP_LINK_RATE_SET,
5880 &link_rate_set, sizeof(link_rate_set));
5882 // Read DPCD 00101h to find out the number of lanes currently set
5883 core_link_read_dpcd(link, DP_LANE_COUNT_SET,
5884 &lane_count_set.raw, sizeof(lane_count_set));
5886 req_bw = dc_bandwidth_in_kbps_from_timing(crtc_timing);
5888 decide_edp_link_settings(link, &link_setting, req_bw);
5890 if (link->dpcd_caps.edp_supported_link_rates[link_rate_set] != link_setting.link_rate ||
5891 lane_count_set.bits.LANE_COUNT_SET != link_setting.lane_count) {
5892 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS link_rate_set not optimal\n");
5896 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: No optimization required, VBIOS set optimal link_rate_set\n");
5900 enum dp_link_encoding dp_get_link_encoding_format(const struct dc_link_settings *link_settings)
5902 if ((link_settings->link_rate >= LINK_RATE_LOW) &&
5903 (link_settings->link_rate <= LINK_RATE_HIGH3))
5904 return DP_8b_10b_ENCODING;
5905 #if defined(CONFIG_DRM_AMD_DC_DCN)
5906 else if ((link_settings->link_rate >= LINK_RATE_UHBR10) &&
5907 (link_settings->link_rate <= LINK_RATE_UHBR20))
5908 return DP_128b_132b_ENCODING;
5910 return DP_UNKNOWN_ENCODING;
5913 #if defined(CONFIG_DRM_AMD_DC_DCN)
5914 enum dp_link_encoding dc_link_dp_mst_decide_link_encoding_format(const struct dc_link *link)
5916 struct dc_link_settings link_settings = {0};
5918 if (!dc_is_dp_signal(link->connector_signal))
5919 return DP_UNKNOWN_ENCODING;
5921 if (link->preferred_link_setting.lane_count !=
5922 LANE_COUNT_UNKNOWN &&
5923 link->preferred_link_setting.link_rate !=
5924 LINK_RATE_UNKNOWN) {
5925 link_settings = link->preferred_link_setting;
5927 decide_mst_link_settings(link, &link_settings);
5930 return dp_get_link_encoding_format(&link_settings);
5933 // TODO - DP2.0 Link: Fix get_lane_status to handle LTTPR offset (SST and MST)
5934 static void get_lane_status(
5935 struct dc_link *link,
5936 uint32_t lane_count,
5937 union lane_status *status,
5938 union lane_align_status_updated *status_updated)
5941 uint8_t dpcd_buf[3] = {0};
5943 if (status == NULL || status_updated == NULL) {
5947 core_link_read_dpcd(
5953 for (lane = 0; lane < lane_count; lane++) {
5954 status[lane].raw = get_nibble_at_index(&dpcd_buf[0], lane);
5957 status_updated->raw = dpcd_buf[2];
5960 bool dpcd_write_128b_132b_sst_payload_allocation_table(
5961 const struct dc_stream_state *stream,
5962 struct dc_link *link,
5963 struct link_mst_stream_allocation_table *proposed_table,
5966 const uint8_t vc_id = 1; /// VC ID always 1 for SST
5967 const uint8_t start_time_slot = 0; /// Always start at time slot 0 for SST
5968 bool result = false;
5969 uint8_t req_slot_count = 0;
5970 struct fixed31_32 avg_time_slots_per_mtp = { 0 };
5971 union payload_table_update_status update_status = { 0 };
5972 const uint32_t max_retries = 30;
5973 uint32_t retries = 0;
5976 avg_time_slots_per_mtp = calculate_sst_avg_time_slots_per_mtp(stream, link);
5977 req_slot_count = dc_fixpt_ceil(avg_time_slots_per_mtp);
5979 /// Leave req_slot_count = 0 if allocate is false.
5982 /// Write DPCD 2C0 = 1 to start updating
5983 update_status.bits.VC_PAYLOAD_TABLE_UPDATED = 1;
5984 core_link_write_dpcd(
5986 DP_PAYLOAD_TABLE_UPDATE_STATUS,
5990 /// Program the changes in DPCD 1C0 - 1C2
5992 core_link_write_dpcd(
5994 DP_PAYLOAD_ALLOCATE_SET,
5998 ASSERT(start_time_slot == 0);
5999 core_link_write_dpcd(
6001 DP_PAYLOAD_ALLOCATE_START_TIME_SLOT,
6005 ASSERT(req_slot_count <= MAX_MTP_SLOT_COUNT); /// Validation should filter out modes that exceed link BW
6006 core_link_write_dpcd(
6008 DP_PAYLOAD_ALLOCATE_TIME_SLOT_COUNT,
6012 /// Poll till DPCD 2C0 read 1
6013 /// Try for at least 150ms (30 retries, with 5ms delay after each attempt)
6015 while (retries < max_retries) {
6016 if (core_link_read_dpcd(
6018 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6021 if (update_status.bits.VC_PAYLOAD_TABLE_UPDATED == 1) {
6022 DC_LOG_DP2("SST Update Payload: downstream payload table updated.");
6027 union dpcd_rev dpcdRev;
6029 if (core_link_read_dpcd(
6034 DC_LOG_ERROR("SST Update Payload: Unable to read DPCD revision "
6035 "of sink while polling payload table "
6036 "updated status bit.");
6044 if (!result && retries == max_retries) {
6045 DC_LOG_ERROR("SST Update Payload: Payload table not updated after retries, "
6046 "continue on. Something is wrong with the branch.");
6047 // TODO - DP2.0 Payload: Read and log the payload table from downstream branch
6050 proposed_table->stream_count = 1; /// Always 1 stream for SST
6051 proposed_table->stream_allocations[0].slot_count = req_slot_count;
6052 proposed_table->stream_allocations[0].vcp_id = vc_id;
6057 bool dpcd_poll_for_allocation_change_trigger(struct dc_link *link)
6060 * wait for ACT handled
6063 const int act_retries = 30;
6064 enum act_return_status result = ACT_FAILED;
6065 union payload_table_update_status update_status = {0};
6066 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
6067 union lane_align_status_updated lane_status_updated;
6069 for (i = 0; i < act_retries; i++) {
6070 get_lane_status(link, link->cur_link_settings.lane_count, dpcd_lane_status, &lane_status_updated);
6072 if (!dp_is_cr_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6073 !dp_is_ch_eq_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6074 !dp_is_symbol_locked(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6075 !dp_is_interlane_aligned(lane_status_updated)) {
6076 DC_LOG_ERROR("SST Update Payload: Link loss occurred while "
6077 "polling for ACT handled.");
6078 result = ACT_LINK_LOST;
6081 core_link_read_dpcd(
6083 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6087 if (update_status.bits.ACT_HANDLED == 1) {
6088 DC_LOG_DP2("SST Update Payload: ACT handled by downstream.");
6089 result = ACT_SUCCESS;
6096 if (result == ACT_FAILED) {
6097 DC_LOG_ERROR("SST Update Payload: ACT still not handled after retries, "
6098 "continue on. Something is wrong with the branch.");
6101 return (result == ACT_SUCCESS);
6104 struct fixed31_32 calculate_sst_avg_time_slots_per_mtp(
6105 const struct dc_stream_state *stream,
6106 const struct dc_link *link)
6108 struct fixed31_32 link_bw_effective =
6110 dc_link_bandwidth_kbps(link, &link->cur_link_settings));
6111 struct fixed31_32 timeslot_bw_effective =
6112 dc_fixpt_div_int(link_bw_effective, MAX_MTP_SLOT_COUNT);
6113 struct fixed31_32 timing_bw =
6115 dc_bandwidth_in_kbps_from_timing(&stream->timing));
6116 struct fixed31_32 avg_time_slots_per_mtp =
6117 dc_fixpt_div(timing_bw, timeslot_bw_effective);
6119 return avg_time_slots_per_mtp;
6122 bool is_dp_128b_132b_signal(struct pipe_ctx *pipe_ctx)
6124 return (pipe_ctx->stream_res.hpo_dp_stream_enc &&
6125 pipe_ctx->stream->link->hpo_dp_link_enc &&
6126 dc_is_dp_signal(pipe_ctx->stream->signal));