2 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
24 #include "dm_services.h"
26 #include "dc_link_dp.h"
27 #include "dm_helpers.h"
32 #include "inc/core_types.h"
33 #include "link_hwss.h"
34 #include "dc_link_ddc.h"
35 #include "core_status.h"
36 #include "dpcd_defs.h"
37 #include "dc_dmub_srv.h"
38 #include "dce/dmub_hw_lock_mgr.h"
39 #include "inc/dc_link_dpia.h"
40 #include "inc/link_enc_cfg.h"
43 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_2[] = "sivarT";
45 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_3[] = "dnomlA";
49 #define DC_TRACE_LEVEL_MESSAGE(...) /* do nothing */
51 #include "link_dpcd.h"
53 /* maximum pre emphasis level allowed for each voltage swing level*/
54 static const enum dc_pre_emphasis
55 voltage_swing_to_pre_emphasis[] = { PRE_EMPHASIS_LEVEL3,
58 PRE_EMPHASIS_DISABLED };
61 POST_LT_ADJ_REQ_LIMIT = 6,
62 POST_LT_ADJ_REQ_TIMEOUT = 200
65 #if defined(CONFIG_DRM_AMD_DC_DCN)
66 struct dp_lt_fallback_entry {
67 enum dc_lane_count lane_count;
68 enum dc_link_rate link_rate;
71 static const struct dp_lt_fallback_entry dp_lt_fallbacks[] = {
72 /* This link training fallback array is ordered by
73 * link bandwidth from highest to lowest.
74 * DP specs makes it a normative policy to always
75 * choose the next highest link bandwidth during
76 * link training fallback.
78 {LANE_COUNT_FOUR, LINK_RATE_UHBR20},
79 {LANE_COUNT_FOUR, LINK_RATE_UHBR13_5},
80 {LANE_COUNT_TWO, LINK_RATE_UHBR20},
81 {LANE_COUNT_FOUR, LINK_RATE_UHBR10},
82 {LANE_COUNT_TWO, LINK_RATE_UHBR13_5},
83 {LANE_COUNT_FOUR, LINK_RATE_HIGH3},
84 {LANE_COUNT_ONE, LINK_RATE_UHBR20},
85 {LANE_COUNT_TWO, LINK_RATE_UHBR10},
86 {LANE_COUNT_FOUR, LINK_RATE_HIGH2},
87 {LANE_COUNT_ONE, LINK_RATE_UHBR13_5},
88 {LANE_COUNT_TWO, LINK_RATE_HIGH3},
89 {LANE_COUNT_ONE, LINK_RATE_UHBR10},
90 {LANE_COUNT_TWO, LINK_RATE_HIGH2},
91 {LANE_COUNT_FOUR, LINK_RATE_HIGH},
92 {LANE_COUNT_ONE, LINK_RATE_HIGH3},
93 {LANE_COUNT_FOUR, LINK_RATE_LOW},
94 {LANE_COUNT_ONE, LINK_RATE_HIGH2},
95 {LANE_COUNT_TWO, LINK_RATE_HIGH},
96 {LANE_COUNT_TWO, LINK_RATE_LOW},
97 {LANE_COUNT_ONE, LINK_RATE_HIGH},
98 {LANE_COUNT_ONE, LINK_RATE_LOW},
102 static bool decide_fallback_link_setting(
103 struct dc_link_settings initial_link_settings,
104 struct dc_link_settings *current_link_setting,
105 enum link_training_result training_result);
106 static struct dc_link_settings get_common_supported_link_settings(
107 struct dc_link_settings link_setting_a,
108 struct dc_link_settings link_setting_b);
109 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
110 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
111 static void override_lane_settings(const struct link_training_settings *lt_settings,
112 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
114 static uint32_t get_cr_training_aux_rd_interval(struct dc_link *link,
115 const struct dc_link_settings *link_settings)
117 union training_aux_rd_interval training_rd_interval;
118 uint32_t wait_in_micro_secs = 100;
119 #if defined(CONFIG_DRM_AMD_DC_DCN)
120 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
121 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
122 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
125 DP_TRAINING_AUX_RD_INTERVAL,
126 (uint8_t *)&training_rd_interval,
127 sizeof(training_rd_interval));
128 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
129 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
134 DP_TRAINING_AUX_RD_INTERVAL,
135 (uint8_t *)&training_rd_interval,
136 sizeof(training_rd_interval));
137 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
138 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
140 return wait_in_micro_secs;
143 static uint32_t get_eq_training_aux_rd_interval(
144 struct dc_link *link,
145 const struct dc_link_settings *link_settings)
147 #if defined(CONFIG_DRM_AMD_DC_DCN)
148 union training_aux_rd_interval training_rd_interval;
150 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
151 if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING) {
154 DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
155 (uint8_t *)&training_rd_interval,
156 sizeof(training_rd_interval));
157 } else if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
158 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
161 DP_TRAINING_AUX_RD_INTERVAL,
162 (uint8_t *)&training_rd_interval,
163 sizeof(training_rd_interval));
166 switch (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL) {
170 case 3: return 12000;
171 case 4: return 16000;
172 case 5: return 32000;
173 case 6: return 64000;
177 union training_aux_rd_interval training_rd_interval;
178 uint32_t wait_in_micro_secs = 400;
180 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
181 /* overwrite the delay if rev > 1.1*/
182 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
183 /* DP 1.2 or later - retrieve delay through
184 * "DPCD_ADDR_TRAINING_AUX_RD_INTERVAL" register */
187 DP_TRAINING_AUX_RD_INTERVAL,
188 (uint8_t *)&training_rd_interval,
189 sizeof(training_rd_interval));
191 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
192 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
195 return wait_in_micro_secs;
199 void dp_wait_for_training_aux_rd_interval(
200 struct dc_link *link,
201 uint32_t wait_in_micro_secs)
203 #if defined(CONFIG_DRM_AMD_DC_DCN)
204 if (wait_in_micro_secs > 16000)
205 msleep(wait_in_micro_secs/1000);
207 udelay(wait_in_micro_secs);
209 udelay(wait_in_micro_secs);
212 DC_LOG_HW_LINK_TRAINING("%s:\n wait = %d\n",
217 enum dpcd_training_patterns
218 dc_dp_training_pattern_to_dpcd_training_pattern(
219 struct dc_link *link,
220 enum dc_dp_training_pattern pattern)
222 enum dpcd_training_patterns dpcd_tr_pattern =
223 DPCD_TRAINING_PATTERN_VIDEOIDLE;
226 case DP_TRAINING_PATTERN_SEQUENCE_1:
227 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_1;
229 case DP_TRAINING_PATTERN_SEQUENCE_2:
230 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_2;
232 case DP_TRAINING_PATTERN_SEQUENCE_3:
233 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_3;
235 case DP_TRAINING_PATTERN_SEQUENCE_4:
236 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_4;
238 #if defined(CONFIG_DRM_AMD_DC_DCN)
239 case DP_128b_132b_TPS1:
240 dpcd_tr_pattern = DPCD_128b_132b_TPS1;
242 case DP_128b_132b_TPS2:
243 dpcd_tr_pattern = DPCD_128b_132b_TPS2;
245 case DP_128b_132b_TPS2_CDS:
246 dpcd_tr_pattern = DPCD_128b_132b_TPS2_CDS;
249 case DP_TRAINING_PATTERN_VIDEOIDLE:
250 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_VIDEOIDLE;
254 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
259 return dpcd_tr_pattern;
262 static void dpcd_set_training_pattern(
263 struct dc_link *link,
264 enum dc_dp_training_pattern training_pattern)
266 union dpcd_training_pattern dpcd_pattern = {0};
268 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
269 dc_dp_training_pattern_to_dpcd_training_pattern(
270 link, training_pattern);
272 core_link_write_dpcd(
274 DP_TRAINING_PATTERN_SET,
278 DC_LOG_HW_LINK_TRAINING("%s\n %x pattern = %x\n",
280 DP_TRAINING_PATTERN_SET,
281 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
284 static enum dc_dp_training_pattern decide_cr_training_pattern(
285 const struct dc_link_settings *link_settings)
287 switch (dp_get_link_encoding_format(link_settings)) {
288 case DP_8b_10b_ENCODING:
290 return DP_TRAINING_PATTERN_SEQUENCE_1;
291 #if defined(CONFIG_DRM_AMD_DC_DCN)
292 case DP_128b_132b_ENCODING:
293 return DP_128b_132b_TPS1;
298 static enum dc_dp_training_pattern decide_eq_training_pattern(struct dc_link *link,
299 const struct dc_link_settings *link_settings)
301 struct link_encoder *link_enc;
302 #if defined(CONFIG_DRM_AMD_DC_DCN)
303 struct encoder_feature_support *enc_caps;
304 struct dpcd_caps *rx_caps = &link->dpcd_caps;
305 enum dc_dp_training_pattern pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
307 /* Access link encoder capability based on whether it is statically
308 * or dynamically assigned to a link.
310 if (link->is_dig_mapping_flexible &&
311 link->dc->res_pool->funcs->link_encs_assign)
312 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
314 link_enc = link->link_enc;
316 enc_caps = &link_enc->features;
318 switch (dp_get_link_encoding_format(link_settings)) {
319 case DP_8b_10b_ENCODING:
320 if (enc_caps->flags.bits.IS_TPS4_CAPABLE &&
321 rx_caps->max_down_spread.bits.TPS4_SUPPORTED)
322 pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
323 else if (enc_caps->flags.bits.IS_TPS3_CAPABLE &&
324 rx_caps->max_ln_count.bits.TPS3_SUPPORTED)
325 pattern = DP_TRAINING_PATTERN_SEQUENCE_3;
327 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
329 case DP_128b_132b_ENCODING:
330 pattern = DP_128b_132b_TPS2;
333 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
338 enum dc_dp_training_pattern highest_tp = DP_TRAINING_PATTERN_SEQUENCE_2;
339 struct encoder_feature_support *features;
340 struct dpcd_caps *dpcd_caps = &link->dpcd_caps;
342 /* Access link encoder capability based on whether it is statically
343 * or dynamically assigned to a link.
345 if (link->is_dig_mapping_flexible &&
346 link->dc->res_pool->funcs->link_encs_assign)
347 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
349 link_enc = link->link_enc;
351 features = &link_enc->features;
353 if (features->flags.bits.IS_TPS3_CAPABLE)
354 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_3;
356 if (features->flags.bits.IS_TPS4_CAPABLE)
357 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_4;
359 if (dpcd_caps->max_down_spread.bits.TPS4_SUPPORTED &&
360 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_4)
361 return DP_TRAINING_PATTERN_SEQUENCE_4;
363 if (dpcd_caps->max_ln_count.bits.TPS3_SUPPORTED &&
364 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_3)
365 return DP_TRAINING_PATTERN_SEQUENCE_3;
367 return DP_TRAINING_PATTERN_SEQUENCE_2;
371 #if defined(CONFIG_DRM_AMD_DC_DCN)
372 static uint8_t get_dpcd_link_rate(const struct dc_link_settings *link_settings)
374 uint8_t link_rate = 0;
375 enum dp_link_encoding encoding = dp_get_link_encoding_format(link_settings);
377 if (encoding == DP_128b_132b_ENCODING)
378 switch (link_settings->link_rate) {
379 case LINK_RATE_UHBR10:
382 case LINK_RATE_UHBR20:
385 case LINK_RATE_UHBR13_5:
392 else if (encoding == DP_8b_10b_ENCODING)
393 link_rate = (uint8_t) link_settings->link_rate;
401 static void vendor_specific_lttpr_wa_one_start(struct dc_link *link)
403 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0xff};
404 const uint8_t offset = dp_convert_to_count(
405 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
406 uint32_t vendor_lttpr_write_address = 0xF004F;
409 vendor_lttpr_write_address +=
410 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
412 /* W/A for certain LTTPR to reset their lane settings, part one of two */
413 core_link_write_dpcd(
415 vendor_lttpr_write_address,
416 &vendor_lttpr_write_data[0],
417 sizeof(vendor_lttpr_write_data));
420 static void vendor_specific_lttpr_wa_one_end(
421 struct dc_link *link,
424 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0x0};
425 const uint8_t offset = dp_convert_to_count(
426 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
427 uint32_t vendor_lttpr_write_address = 0xF004F;
431 vendor_lttpr_write_address +=
432 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
434 /* W/A for certain LTTPR to reset their lane settings, part two of two */
435 core_link_write_dpcd(
437 vendor_lttpr_write_address,
438 &vendor_lttpr_write_data[0],
439 sizeof(vendor_lttpr_write_data));
443 static void vendor_specific_lttpr_wa_one_two(
444 struct dc_link *link,
447 if (link->apply_vendor_specific_lttpr_link_rate_wa) {
448 uint8_t toggle_rate = 0x0;
455 if (link->vendor_specific_lttpr_link_rate_wa == rate) {
456 /* W/A for certain LTTPR to reset internal state for link training */
457 core_link_write_dpcd(
464 /* Store the last attempted link rate for this link */
465 link->vendor_specific_lttpr_link_rate_wa = rate;
469 static void vendor_specific_lttpr_wa_three(
470 struct dc_link *link,
471 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX])
473 const uint8_t vendor_lttpr_write_data_vs[3] = {0x0, 0x53, 0x63};
474 const uint8_t vendor_lttpr_write_data_pe[3] = {0x0, 0x54, 0x63};
475 const uint8_t offset = dp_convert_to_count(
476 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
477 uint32_t vendor_lttpr_write_address = 0xF004F;
478 uint32_t vendor_lttpr_read_address = 0xF0053;
483 if (offset != 0xFF) {
484 vendor_lttpr_write_address +=
485 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
486 vendor_lttpr_read_address +=
487 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
490 /* W/A to read lane settings requested by DPRX */
491 core_link_write_dpcd(
493 vendor_lttpr_write_address,
494 &vendor_lttpr_write_data_vs[0],
495 sizeof(vendor_lttpr_write_data_vs));
498 vendor_lttpr_read_address,
501 core_link_write_dpcd(
503 vendor_lttpr_write_address,
504 &vendor_lttpr_write_data_pe[0],
505 sizeof(vendor_lttpr_write_data_pe));
508 vendor_lttpr_read_address,
512 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
513 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE = (dprx_vs >> (2 * lane)) & 0x3;
514 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE = (dprx_pe >> (2 * lane)) & 0x3;
518 static void vendor_specific_lttpr_wa_four(
519 struct dc_link *link,
522 const uint8_t vendor_lttpr_write_data_one[4] = {0x1, 0x55, 0x63, 0x8};
523 const uint8_t vendor_lttpr_write_data_two[4] = {0x1, 0x55, 0x63, 0x0};
524 const uint8_t offset = dp_convert_to_count(
525 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
526 uint32_t vendor_lttpr_write_address = 0xF004F;
527 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
528 uint8_t sink_status = 0;
533 vendor_lttpr_write_address +=
534 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
536 /* W/A to pass through DPCD write of TPS=0 to DPRX */
538 core_link_write_dpcd(
540 vendor_lttpr_write_address,
541 &vendor_lttpr_write_data_one[0],
542 sizeof(vendor_lttpr_write_data_one));
545 /* clear training pattern set */
546 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
549 core_link_write_dpcd(
551 vendor_lttpr_write_address,
552 &vendor_lttpr_write_data_two[0],
553 sizeof(vendor_lttpr_write_data_two));
556 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
557 /* poll for intra-hop disable */
558 for (i = 0; i < 10; i++) {
559 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
560 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
567 enum dc_status dpcd_set_link_settings(
568 struct dc_link *link,
569 const struct link_training_settings *lt_settings)
572 enum dc_status status;
574 union down_spread_ctrl downspread = {0};
575 union lane_count_set lane_count_set = {0};
577 downspread.raw = (uint8_t)
578 (lt_settings->link_settings.link_spread);
580 lane_count_set.bits.LANE_COUNT_SET =
581 lt_settings->link_settings.lane_count;
583 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
584 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
587 if (link->ep_type == DISPLAY_ENDPOINT_PHY &&
588 lt_settings->pattern_for_eq < DP_TRAINING_PATTERN_SEQUENCE_4) {
589 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
590 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
593 status = core_link_write_dpcd(link, DP_DOWNSPREAD_CTRL,
594 &downspread.raw, sizeof(downspread));
596 status = core_link_write_dpcd(link, DP_LANE_COUNT_SET,
597 &lane_count_set.raw, 1);
599 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
600 lt_settings->link_settings.use_link_rate_set == true) {
602 /* WA for some MUX chips that will power down with eDP and lose supported
603 * link rate set for eDP 1.4. Source reads DPCD 0x010 again to ensure
604 * MUX chip gets link rate set back before link training.
606 if (link->connector_signal == SIGNAL_TYPE_EDP) {
607 uint8_t supported_link_rates[16];
609 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
610 supported_link_rates, sizeof(supported_link_rates));
612 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
613 status = core_link_write_dpcd(link, DP_LINK_RATE_SET,
614 <_settings->link_settings.link_rate_set, 1);
616 #if defined(CONFIG_DRM_AMD_DC_DCN)
617 rate = get_dpcd_link_rate(<_settings->link_settings);
619 rate = (uint8_t) (lt_settings->link_settings.link_rate);
621 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
622 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
623 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
624 vendor_specific_lttpr_wa_one_start(link);
626 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
627 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN))
628 vendor_specific_lttpr_wa_one_two(link, rate);
630 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
634 DC_LOG_HW_LINK_TRAINING("%s\n %x rate = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
637 lt_settings->link_settings.link_rate,
639 lt_settings->link_settings.lane_count,
640 lt_settings->enhanced_framing,
642 lt_settings->link_settings.link_spread);
644 DC_LOG_HW_LINK_TRAINING("%s\n %x rate set = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
647 lt_settings->link_settings.link_rate_set,
649 lt_settings->link_settings.lane_count,
650 lt_settings->enhanced_framing,
652 lt_settings->link_settings.link_spread);
658 uint8_t dc_dp_initialize_scrambling_data_symbols(
659 struct dc_link *link,
660 enum dc_dp_training_pattern pattern)
662 uint8_t disable_scrabled_data_symbols = 0;
665 case DP_TRAINING_PATTERN_SEQUENCE_1:
666 case DP_TRAINING_PATTERN_SEQUENCE_2:
667 case DP_TRAINING_PATTERN_SEQUENCE_3:
668 disable_scrabled_data_symbols = 1;
670 case DP_TRAINING_PATTERN_SEQUENCE_4:
671 #if defined(CONFIG_DRM_AMD_DC_DCN)
672 case DP_128b_132b_TPS1:
673 case DP_128b_132b_TPS2:
675 disable_scrabled_data_symbols = 0;
679 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
683 return disable_scrabled_data_symbols;
686 static inline bool is_repeater(struct dc_link *link, uint32_t offset)
688 return (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (offset != 0);
691 static void dpcd_set_lt_pattern_and_lane_settings(
692 struct dc_link *link,
693 const struct link_training_settings *lt_settings,
694 enum dc_dp_training_pattern pattern,
697 uint32_t dpcd_base_lt_offset;
699 uint8_t dpcd_lt_buffer[5] = {0};
700 union dpcd_training_pattern dpcd_pattern = { 0 };
701 uint32_t size_in_bytes;
702 bool edp_workaround = false; /* TODO link_prop.INTERNAL */
703 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET;
705 if (is_repeater(link, offset))
706 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
707 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
709 /*****************************************************************
710 * DpcdAddress_TrainingPatternSet
711 *****************************************************************/
712 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
713 dc_dp_training_pattern_to_dpcd_training_pattern(link, pattern);
715 dpcd_pattern.v1_4.SCRAMBLING_DISABLE =
716 dc_dp_initialize_scrambling_data_symbols(link, pattern);
718 dpcd_lt_buffer[DP_TRAINING_PATTERN_SET - DP_TRAINING_PATTERN_SET]
721 if (is_repeater(link, offset)) {
722 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n 0x%X pattern = %x\n",
726 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
728 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X pattern = %x\n",
731 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
734 /* concatenate everything into one buffer*/
735 size_in_bytes = lt_settings->link_settings.lane_count *
736 sizeof(lt_settings->dpcd_lane_settings[0]);
740 &dpcd_lt_buffer[DP_TRAINING_LANE0_SET - DP_TRAINING_PATTERN_SET],
741 lt_settings->dpcd_lane_settings,
744 if (is_repeater(link, offset)) {
745 #if defined(CONFIG_DRM_AMD_DC_DCN)
746 if (dp_get_link_encoding_format(<_settings->link_settings) ==
747 DP_128b_132b_ENCODING)
748 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
749 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
753 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
754 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
757 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
758 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
762 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
763 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
764 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
765 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
767 #if defined(CONFIG_DRM_AMD_DC_DCN)
768 if (dp_get_link_encoding_format(<_settings->link_settings) ==
769 DP_128b_132b_ENCODING)
770 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
773 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
774 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
777 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
780 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
781 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
782 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
783 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
785 if (edp_workaround) {
786 /* for eDP write in 2 parts because the 5-byte burst is
787 * causing issues on some eDP panels (EPR#366724)
789 core_link_write_dpcd(
791 DP_TRAINING_PATTERN_SET,
793 sizeof(dpcd_pattern.raw));
795 core_link_write_dpcd(
797 DP_TRAINING_LANE0_SET,
798 (uint8_t *)(lt_settings->dpcd_lane_settings),
801 #if defined(CONFIG_DRM_AMD_DC_DCN)
802 } else if (dp_get_link_encoding_format(<_settings->link_settings) ==
803 DP_128b_132b_ENCODING) {
804 core_link_write_dpcd(
808 sizeof(dpcd_lt_buffer));
811 /* write it all in (1 + number-of-lanes)-byte burst*/
812 core_link_write_dpcd(
816 size_in_bytes + sizeof(dpcd_pattern.raw));
819 bool dp_is_cr_done(enum dc_lane_count ln_count,
820 union lane_status *dpcd_lane_status)
823 /*LANEx_CR_DONE bits All 1's?*/
824 for (lane = 0; lane < (uint32_t)(ln_count); lane++) {
825 if (!dpcd_lane_status[lane].bits.CR_DONE_0)
831 bool dp_is_ch_eq_done(enum dc_lane_count ln_count,
832 union lane_status *dpcd_lane_status)
836 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
837 if (!dpcd_lane_status[lane].bits.CHANNEL_EQ_DONE_0)
842 bool dp_is_symbol_locked(enum dc_lane_count ln_count,
843 union lane_status *dpcd_lane_status)
847 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
848 if (!dpcd_lane_status[lane].bits.SYMBOL_LOCKED_0)
853 bool dp_is_interlane_aligned(union lane_align_status_updated align_status)
855 return align_status.bits.INTERLANE_ALIGN_DONE == 1;
858 void dp_hw_to_dpcd_lane_settings(
859 const struct link_training_settings *lt_settings,
860 const struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
861 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
865 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
866 if (dp_get_link_encoding_format(<_settings->link_settings) ==
867 DP_8b_10b_ENCODING) {
868 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET =
869 (uint8_t)(hw_lane_settings[lane].VOLTAGE_SWING);
870 dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET =
871 (uint8_t)(hw_lane_settings[lane].PRE_EMPHASIS);
872 dpcd_lane_settings[lane].bits.MAX_SWING_REACHED =
873 (hw_lane_settings[lane].VOLTAGE_SWING ==
874 VOLTAGE_SWING_MAX_LEVEL ? 1 : 0);
875 dpcd_lane_settings[lane].bits.MAX_PRE_EMPHASIS_REACHED =
876 (hw_lane_settings[lane].PRE_EMPHASIS ==
877 PRE_EMPHASIS_MAX_LEVEL ? 1 : 0);
879 #if defined(CONFIG_DRM_AMD_DC_DCN)
880 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
881 DP_128b_132b_ENCODING) {
882 dpcd_lane_settings[lane].tx_ffe.PRESET_VALUE =
883 hw_lane_settings[lane].FFE_PRESET.settings.level;
889 void dp_decide_lane_settings(
890 const struct link_training_settings *lt_settings,
891 const union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
892 struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
893 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
897 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
898 if (dp_get_link_encoding_format(<_settings->link_settings) ==
899 DP_8b_10b_ENCODING) {
900 hw_lane_settings[lane].VOLTAGE_SWING =
901 (enum dc_voltage_swing)(ln_adjust[lane].bits.
903 hw_lane_settings[lane].PRE_EMPHASIS =
904 (enum dc_pre_emphasis)(ln_adjust[lane].bits.
907 #if defined(CONFIG_DRM_AMD_DC_DCN)
908 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
909 DP_128b_132b_ENCODING) {
910 hw_lane_settings[lane].FFE_PRESET.raw =
911 ln_adjust[lane].tx_ffe.PRESET_VALUE;
915 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
917 if (lt_settings->disallow_per_lane_settings) {
918 /* we find the maximum of the requested settings across all lanes*/
919 /* and set this maximum for all lanes*/
920 maximize_lane_settings(lt_settings, hw_lane_settings);
921 override_lane_settings(lt_settings, hw_lane_settings);
923 if (lt_settings->always_match_dpcd_with_hw_lane_settings)
924 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
929 static uint8_t get_nibble_at_index(const uint8_t *buf,
933 nibble = buf[index / 2];
943 static enum dc_pre_emphasis get_max_pre_emphasis_for_voltage_swing(
944 enum dc_voltage_swing voltage)
946 enum dc_pre_emphasis pre_emphasis;
947 pre_emphasis = PRE_EMPHASIS_MAX_LEVEL;
949 if (voltage <= VOLTAGE_SWING_MAX_LEVEL)
950 pre_emphasis = voltage_swing_to_pre_emphasis[voltage];
956 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
957 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
960 struct dc_lane_settings max_requested;
962 max_requested.VOLTAGE_SWING = lane_settings[0].VOLTAGE_SWING;
963 max_requested.PRE_EMPHASIS = lane_settings[0].PRE_EMPHASIS;
964 #if defined(CONFIG_DRM_AMD_DC_DCN)
965 max_requested.FFE_PRESET = lane_settings[0].FFE_PRESET;
968 /* Determine what the maximum of the requested settings are*/
969 for (lane = 1; lane < lt_settings->link_settings.lane_count; lane++) {
970 if (lane_settings[lane].VOLTAGE_SWING > max_requested.VOLTAGE_SWING)
971 max_requested.VOLTAGE_SWING = lane_settings[lane].VOLTAGE_SWING;
973 if (lane_settings[lane].PRE_EMPHASIS > max_requested.PRE_EMPHASIS)
974 max_requested.PRE_EMPHASIS = lane_settings[lane].PRE_EMPHASIS;
975 #if defined(CONFIG_DRM_AMD_DC_DCN)
976 if (lane_settings[lane].FFE_PRESET.settings.level >
977 max_requested.FFE_PRESET.settings.level)
978 max_requested.FFE_PRESET.settings.level =
979 lane_settings[lane].FFE_PRESET.settings.level;
983 /* make sure the requested settings are
984 * not higher than maximum settings*/
985 if (max_requested.VOLTAGE_SWING > VOLTAGE_SWING_MAX_LEVEL)
986 max_requested.VOLTAGE_SWING = VOLTAGE_SWING_MAX_LEVEL;
988 if (max_requested.PRE_EMPHASIS > PRE_EMPHASIS_MAX_LEVEL)
989 max_requested.PRE_EMPHASIS = PRE_EMPHASIS_MAX_LEVEL;
990 #if defined(CONFIG_DRM_AMD_DC_DCN)
991 if (max_requested.FFE_PRESET.settings.level > DP_FFE_PRESET_MAX_LEVEL)
992 max_requested.FFE_PRESET.settings.level = DP_FFE_PRESET_MAX_LEVEL;
995 /* make sure the pre-emphasis matches the voltage swing*/
996 if (max_requested.PRE_EMPHASIS >
997 get_max_pre_emphasis_for_voltage_swing(
998 max_requested.VOLTAGE_SWING))
999 max_requested.PRE_EMPHASIS =
1000 get_max_pre_emphasis_for_voltage_swing(
1001 max_requested.VOLTAGE_SWING);
1003 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1004 lane_settings[lane].VOLTAGE_SWING = max_requested.VOLTAGE_SWING;
1005 lane_settings[lane].PRE_EMPHASIS = max_requested.PRE_EMPHASIS;
1006 #if defined(CONFIG_DRM_AMD_DC_DCN)
1007 lane_settings[lane].FFE_PRESET = max_requested.FFE_PRESET;
1012 static void override_lane_settings(const struct link_training_settings *lt_settings,
1013 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
1017 if (lt_settings->voltage_swing == NULL &&
1018 lt_settings->pre_emphasis == NULL &&
1019 #if defined(CONFIG_DRM_AMD_DC_DCN)
1020 lt_settings->ffe_preset == NULL &&
1022 lt_settings->post_cursor2 == NULL)
1026 for (lane = 1; lane < LANE_COUNT_DP_MAX; lane++) {
1027 if (lt_settings->voltage_swing)
1028 lane_settings[lane].VOLTAGE_SWING = *lt_settings->voltage_swing;
1029 if (lt_settings->pre_emphasis)
1030 lane_settings[lane].PRE_EMPHASIS = *lt_settings->pre_emphasis;
1031 if (lt_settings->post_cursor2)
1032 lane_settings[lane].POST_CURSOR2 = *lt_settings->post_cursor2;
1033 #if defined(CONFIG_DRM_AMD_DC_DCN)
1034 if (lt_settings->ffe_preset)
1035 lane_settings[lane].FFE_PRESET = *lt_settings->ffe_preset;
1040 enum dc_status dp_get_lane_status_and_lane_adjust(
1041 struct dc_link *link,
1042 const struct link_training_settings *link_training_setting,
1043 union lane_status ln_status[LANE_COUNT_DP_MAX],
1044 union lane_align_status_updated *ln_align,
1045 union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
1048 unsigned int lane01_status_address = DP_LANE0_1_STATUS;
1049 uint8_t lane_adjust_offset = 4;
1050 unsigned int lane01_adjust_address;
1051 uint8_t dpcd_buf[6] = {0};
1053 enum dc_status status;
1055 if (is_repeater(link, offset)) {
1056 lane01_status_address =
1057 DP_LANE0_1_STATUS_PHY_REPEATER1 +
1058 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1059 lane_adjust_offset = 3;
1062 status = core_link_read_dpcd(
1064 lane01_status_address,
1065 (uint8_t *)(dpcd_buf),
1068 for (lane = 0; lane <
1069 (uint32_t)(link_training_setting->link_settings.lane_count);
1072 ln_status[lane].raw =
1073 get_nibble_at_index(&dpcd_buf[0], lane);
1074 ln_adjust[lane].raw =
1075 get_nibble_at_index(&dpcd_buf[lane_adjust_offset], lane);
1078 ln_align->raw = dpcd_buf[2];
1080 if (is_repeater(link, offset)) {
1081 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1082 " 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1085 lane01_status_address, dpcd_buf[0],
1086 lane01_status_address + 1, dpcd_buf[1]);
1088 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1090 lane01_status_address, dpcd_buf[0],
1091 lane01_status_address + 1, dpcd_buf[1]);
1093 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1;
1095 if (is_repeater(link, offset))
1096 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1_PHY_REPEATER1 +
1097 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1099 if (is_repeater(link, offset)) {
1100 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1101 " 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1104 lane01_adjust_address,
1105 dpcd_buf[lane_adjust_offset],
1106 lane01_adjust_address + 1,
1107 dpcd_buf[lane_adjust_offset + 1]);
1109 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1111 lane01_adjust_address,
1112 dpcd_buf[lane_adjust_offset],
1113 lane01_adjust_address + 1,
1114 dpcd_buf[lane_adjust_offset + 1]);
1120 enum dc_status dpcd_set_lane_settings(
1121 struct dc_link *link,
1122 const struct link_training_settings *link_training_setting,
1125 unsigned int lane0_set_address;
1126 enum dc_status status;
1128 lane0_set_address = DP_TRAINING_LANE0_SET;
1130 if (is_repeater(link, offset))
1131 lane0_set_address = DP_TRAINING_LANE0_SET_PHY_REPEATER1 +
1132 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1134 status = core_link_write_dpcd(link,
1136 (uint8_t *)(link_training_setting->dpcd_lane_settings),
1137 link_training_setting->link_settings.lane_count);
1139 if (is_repeater(link, offset)) {
1140 #if defined(CONFIG_DRM_AMD_DC_DCN)
1141 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1142 DP_128b_132b_ENCODING)
1143 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1144 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
1148 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1149 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1152 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n"
1153 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1157 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1158 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1159 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1160 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1163 #if defined(CONFIG_DRM_AMD_DC_DCN)
1164 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1165 DP_128b_132b_ENCODING)
1166 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
1169 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1170 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1173 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1176 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1177 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1178 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1179 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1185 bool dp_is_max_vs_reached(
1186 const struct link_training_settings *lt_settings)
1189 for (lane = 0; lane <
1190 (uint32_t)(lt_settings->link_settings.lane_count);
1192 if (lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET
1193 == VOLTAGE_SWING_MAX_LEVEL)
1200 static bool perform_post_lt_adj_req_sequence(
1201 struct dc_link *link,
1202 struct link_training_settings *lt_settings)
1204 enum dc_lane_count lane_count =
1205 lt_settings->link_settings.lane_count;
1207 uint32_t adj_req_count;
1208 uint32_t adj_req_timer;
1209 bool req_drv_setting_changed;
1212 req_drv_setting_changed = false;
1213 for (adj_req_count = 0; adj_req_count < POST_LT_ADJ_REQ_LIMIT;
1216 req_drv_setting_changed = false;
1218 for (adj_req_timer = 0;
1219 adj_req_timer < POST_LT_ADJ_REQ_TIMEOUT;
1222 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1223 union lane_align_status_updated
1224 dpcd_lane_status_updated;
1225 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1227 dp_get_lane_status_and_lane_adjust(
1231 &dpcd_lane_status_updated,
1235 if (dpcd_lane_status_updated.bits.
1236 POST_LT_ADJ_REQ_IN_PROGRESS == 0)
1239 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1242 if (!dp_is_ch_eq_done(lane_count, dpcd_lane_status) ||
1243 !dp_is_symbol_locked(lane_count, dpcd_lane_status) ||
1244 !dp_is_interlane_aligned(dpcd_lane_status_updated))
1247 for (lane = 0; lane < (uint32_t)(lane_count); lane++) {
1250 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET !=
1251 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE ||
1252 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET !=
1253 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE) {
1255 req_drv_setting_changed = true;
1260 if (req_drv_setting_changed) {
1261 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1262 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1264 dc_link_dp_set_drive_settings(link,
1272 if (!req_drv_setting_changed) {
1273 DC_LOG_WARNING("%s: Post Link Training Adjust Request Timed out\n",
1280 DC_LOG_WARNING("%s: Post Link Training Adjust Request limit reached\n",
1288 /* Only used for channel equalization */
1289 uint32_t dp_translate_training_aux_read_interval(uint32_t dpcd_aux_read_interval)
1291 unsigned int aux_rd_interval_us = 400;
1293 switch (dpcd_aux_read_interval) {
1295 aux_rd_interval_us = 4000;
1298 aux_rd_interval_us = 8000;
1301 aux_rd_interval_us = 12000;
1304 aux_rd_interval_us = 16000;
1306 #if defined(CONFIG_DRM_AMD_DC_DCN)
1308 aux_rd_interval_us = 32000;
1311 aux_rd_interval_us = 64000;
1318 return aux_rd_interval_us;
1321 enum link_training_result dp_get_cr_failure(enum dc_lane_count ln_count,
1322 union lane_status *dpcd_lane_status)
1324 enum link_training_result result = LINK_TRAINING_SUCCESS;
1326 if (ln_count >= LANE_COUNT_ONE && !dpcd_lane_status[0].bits.CR_DONE_0)
1327 result = LINK_TRAINING_CR_FAIL_LANE0;
1328 else if (ln_count >= LANE_COUNT_TWO && !dpcd_lane_status[1].bits.CR_DONE_0)
1329 result = LINK_TRAINING_CR_FAIL_LANE1;
1330 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[2].bits.CR_DONE_0)
1331 result = LINK_TRAINING_CR_FAIL_LANE23;
1332 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[3].bits.CR_DONE_0)
1333 result = LINK_TRAINING_CR_FAIL_LANE23;
1337 static enum link_training_result perform_channel_equalization_sequence(
1338 struct dc_link *link,
1339 struct link_training_settings *lt_settings,
1342 enum dc_dp_training_pattern tr_pattern;
1343 uint32_t retries_ch_eq;
1344 uint32_t wait_time_microsec;
1345 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1346 union lane_align_status_updated dpcd_lane_status_updated = {0};
1347 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
1348 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
1350 /* Note: also check that TPS4 is a supported feature*/
1351 tr_pattern = lt_settings->pattern_for_eq;
1353 #if defined(CONFIG_DRM_AMD_DC_DCN)
1354 if (is_repeater(link, offset) && dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING)
1355 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1357 if (is_repeater(link, offset))
1358 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1361 dp_set_hw_training_pattern(link, tr_pattern, offset);
1363 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
1366 dp_set_hw_lane_settings(link, lt_settings, offset);
1370 /* EPR #361076 - write as a 5-byte burst,
1371 * but only for the 1-st iteration
1374 dpcd_set_lt_pattern_and_lane_settings(
1377 tr_pattern, offset);
1379 dpcd_set_lane_settings(link, lt_settings, offset);
1381 /* 3. wait for receiver to lock-on*/
1382 wait_time_microsec = lt_settings->eq_pattern_time;
1384 if (is_repeater(link, offset))
1385 wait_time_microsec =
1386 dp_translate_training_aux_read_interval(
1387 link->dpcd_caps.lttpr_caps.aux_rd_interval[offset - 1]);
1389 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1390 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1391 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1392 wait_time_microsec = 16000;
1395 dp_wait_for_training_aux_rd_interval(
1397 wait_time_microsec);
1399 /* 4. Read lane status and requested
1400 * drive settings as set by the sink*/
1402 dp_get_lane_status_and_lane_adjust(
1406 &dpcd_lane_status_updated,
1410 /* 5. check CR done*/
1411 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1412 return LINK_TRAINING_EQ_FAIL_CR;
1414 /* 6. check CHEQ done*/
1415 if (dp_is_ch_eq_done(lane_count, dpcd_lane_status) &&
1416 dp_is_symbol_locked(lane_count, dpcd_lane_status) &&
1417 dp_is_interlane_aligned(dpcd_lane_status_updated))
1418 return LINK_TRAINING_SUCCESS;
1420 /* 7. update VS/PE/PC2 in lt_settings*/
1421 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1422 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1425 return LINK_TRAINING_EQ_FAIL_EQ;
1429 static void start_clock_recovery_pattern_early(struct dc_link *link,
1430 struct link_training_settings *lt_settings,
1433 DC_LOG_HW_LINK_TRAINING("%s\n GPU sends TPS1. Wait 400us.\n",
1435 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1436 dp_set_hw_lane_settings(link, lt_settings, offset);
1440 static enum link_training_result perform_clock_recovery_sequence(
1441 struct dc_link *link,
1442 struct link_training_settings *lt_settings,
1445 uint32_t retries_cr;
1446 uint32_t retry_count;
1447 uint32_t wait_time_microsec;
1448 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1449 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1450 union lane_align_status_updated dpcd_lane_status_updated;
1451 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1456 if (!link->ctx->dc->work_arounds.lt_early_cr_pattern)
1457 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1459 /* najeeb - The synaptics MST hub can put the LT in
1460 * infinite loop by switching the VS
1462 /* between level 0 and level 1 continuously, here
1463 * we try for CR lock for LinkTrainingMaxCRRetry count*/
1464 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
1465 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
1467 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
1468 memset(&dpcd_lane_status_updated, '\0',
1469 sizeof(dpcd_lane_status_updated));
1471 /* 1. call HWSS to set lane settings*/
1472 dp_set_hw_lane_settings(
1477 /* 2. update DPCD of the receiver*/
1479 /* EPR #361076 - write as a 5-byte burst,
1480 * but only for the 1-st iteration.*/
1481 dpcd_set_lt_pattern_and_lane_settings(
1484 lt_settings->pattern_for_cr,
1487 dpcd_set_lane_settings(
1492 /* 3. wait receiver to lock-on*/
1493 wait_time_microsec = lt_settings->cr_pattern_time;
1495 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
1496 wait_time_microsec = TRAINING_AUX_RD_INTERVAL;
1498 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1499 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN)) {
1500 wait_time_microsec = 16000;
1503 dp_wait_for_training_aux_rd_interval(
1505 wait_time_microsec);
1507 /* 4. Read lane status and requested drive
1508 * settings as set by the sink
1510 dp_get_lane_status_and_lane_adjust(
1514 &dpcd_lane_status_updated,
1518 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1519 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1520 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1521 vendor_specific_lttpr_wa_one_end(link, retry_count);
1522 vendor_specific_lttpr_wa_three(link, dpcd_lane_adjust);
1525 /* 5. check CR done*/
1526 if (dp_is_cr_done(lane_count, dpcd_lane_status))
1527 return LINK_TRAINING_SUCCESS;
1529 /* 6. max VS reached*/
1530 #if defined(CONFIG_DRM_AMD_DC_DCN)
1531 if ((dp_get_link_encoding_format(<_settings->link_settings) ==
1532 DP_8b_10b_ENCODING) &&
1533 dp_is_max_vs_reached(lt_settings))
1536 if (dp_is_max_vs_reached(lt_settings))
1540 /* 7. same lane settings*/
1541 /* Note: settings are the same for all lanes,
1542 * so comparing first lane is sufficient*/
1543 if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING) &&
1544 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET ==
1545 dpcd_lane_adjust[0].bits.VOLTAGE_SWING_LANE)
1547 #if defined(CONFIG_DRM_AMD_DC_DCN)
1548 else if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_128b_132b_ENCODING) &&
1549 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE ==
1550 dpcd_lane_adjust[0].tx_ffe.PRESET_VALUE)
1556 /* 8. update VS/PE/PC2 in lt_settings*/
1557 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1558 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1562 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
1564 DC_LOG_ERROR("%s: Link Training Error, could not get CR after %d tries. Possibly voltage swing issue",
1566 LINK_TRAINING_MAX_CR_RETRY);
1570 return dp_get_cr_failure(lane_count, dpcd_lane_status);
1573 static inline enum link_training_result dp_transition_to_video_idle(
1574 struct dc_link *link,
1575 struct link_training_settings *lt_settings,
1576 enum link_training_result status)
1578 union lane_count_set lane_count_set = {0};
1580 /* 4. mainlink output idle pattern*/
1581 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
1584 * 5. post training adjust if required
1585 * If the upstream DPTX and downstream DPRX both support TPS4,
1586 * TPS4 must be used instead of POST_LT_ADJ_REQ.
1588 if (link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED != 1 ||
1589 #if defined(CONFIG_DRM_AMD_DC_DCN)
1590 lt_settings->pattern_for_eq >= DP_TRAINING_PATTERN_SEQUENCE_4) {
1592 lt_settings->pattern_for_eq == DP_TRAINING_PATTERN_SEQUENCE_4) {
1594 /* delay 5ms after Main Link output idle pattern and then check
1597 if (link->connector_signal != SIGNAL_TYPE_EDP && status == LINK_TRAINING_SUCCESS) {
1599 status = dp_check_link_loss_status(link, lt_settings);
1604 if (status == LINK_TRAINING_SUCCESS &&
1605 perform_post_lt_adj_req_sequence(link, lt_settings) == false)
1606 status = LINK_TRAINING_LQA_FAIL;
1608 lane_count_set.bits.LANE_COUNT_SET = lt_settings->link_settings.lane_count;
1609 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
1610 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
1612 core_link_write_dpcd(
1615 &lane_count_set.raw,
1616 sizeof(lane_count_set));
1621 enum link_training_result dp_check_link_loss_status(
1622 struct dc_link *link,
1623 const struct link_training_settings *link_training_setting)
1625 enum link_training_result status = LINK_TRAINING_SUCCESS;
1626 union lane_status lane_status;
1627 uint8_t dpcd_buf[6] = {0};
1630 core_link_read_dpcd(
1633 (uint8_t *)(dpcd_buf),
1636 /*parse lane status*/
1637 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
1639 * check lanes status
1641 lane_status.raw = get_nibble_at_index(&dpcd_buf[2], lane);
1643 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
1644 !lane_status.bits.CR_DONE_0 ||
1645 !lane_status.bits.SYMBOL_LOCKED_0) {
1646 /* if one of the channel equalization, clock
1647 * recovery or symbol lock is dropped
1648 * consider it as (link has been
1649 * dropped) dp sink status has changed
1651 status = LINK_TRAINING_LINK_LOSS;
1659 static inline void decide_8b_10b_training_settings(
1660 struct dc_link *link,
1661 const struct dc_link_settings *link_setting,
1662 struct link_training_settings *lt_settings)
1664 memset(lt_settings, '\0', sizeof(struct link_training_settings));
1666 /* Initialize link settings */
1667 lt_settings->link_settings.use_link_rate_set = link_setting->use_link_rate_set;
1668 lt_settings->link_settings.link_rate_set = link_setting->link_rate_set;
1669 lt_settings->link_settings.link_rate = link_setting->link_rate;
1670 lt_settings->link_settings.lane_count = link_setting->lane_count;
1671 /* TODO hard coded to SS for now
1672 * lt_settings.link_settings.link_spread =
1673 * dal_display_path_is_ss_supported(
1674 * path_mode->display_path) ?
1675 * LINK_SPREAD_05_DOWNSPREAD_30KHZ :
1676 * LINK_SPREAD_DISABLED;
1678 lt_settings->link_settings.link_spread = link->dp_ss_off ?
1679 LINK_SPREAD_DISABLED : LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1680 lt_settings->lttpr_mode = link->lttpr_mode;
1681 lt_settings->cr_pattern_time = get_cr_training_aux_rd_interval(link, link_setting);
1682 lt_settings->eq_pattern_time = get_eq_training_aux_rd_interval(link, link_setting);
1683 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_setting);
1684 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_setting);
1685 lt_settings->enhanced_framing = 1;
1686 lt_settings->should_set_fec_ready = true;
1687 lt_settings->disallow_per_lane_settings = true;
1688 lt_settings->always_match_dpcd_with_hw_lane_settings = true;
1689 dp_hw_to_dpcd_lane_settings(lt_settings, lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1692 #if defined(CONFIG_DRM_AMD_DC_DCN)
1693 static inline void decide_128b_132b_training_settings(struct dc_link *link,
1694 const struct dc_link_settings *link_settings,
1695 struct link_training_settings *lt_settings)
1697 memset(lt_settings, 0, sizeof(*lt_settings));
1699 lt_settings->link_settings = *link_settings;
1700 /* TODO: should decide link spread when populating link_settings */
1701 lt_settings->link_settings.link_spread = link->dp_ss_off ? LINK_SPREAD_DISABLED :
1702 LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1704 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_settings);
1705 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_settings);
1706 lt_settings->eq_pattern_time = 2500;
1707 lt_settings->eq_wait_time_limit = 400000;
1708 lt_settings->eq_loop_count_limit = 20;
1709 lt_settings->pattern_for_cds = DP_128b_132b_TPS2_CDS;
1710 lt_settings->cds_pattern_time = 2500;
1711 lt_settings->cds_wait_time_limit = (dp_convert_to_count(
1712 link->dpcd_caps.lttpr_caps.phy_repeater_cnt) + 1) * 20000;
1713 lt_settings->lttpr_mode = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) ?
1714 LTTPR_MODE_NON_TRANSPARENT : LTTPR_MODE_TRANSPARENT;
1715 lt_settings->disallow_per_lane_settings = true;
1716 dp_hw_to_dpcd_lane_settings(lt_settings,
1717 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1721 void dp_decide_training_settings(
1722 struct dc_link *link,
1723 const struct dc_link_settings *link_settings,
1724 struct link_training_settings *lt_settings)
1726 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING)
1727 decide_8b_10b_training_settings(link, link_settings, lt_settings);
1728 #if defined(CONFIG_DRM_AMD_DC_DCN)
1729 else if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING)
1730 decide_128b_132b_training_settings(link, link_settings, lt_settings);
1734 static void override_training_settings(
1735 struct dc_link *link,
1736 const struct dc_link_training_overrides *overrides,
1737 struct link_training_settings *lt_settings)
1741 /* Override link spread */
1742 if (!link->dp_ss_off && overrides->downspread != NULL)
1743 lt_settings->link_settings.link_spread = *overrides->downspread ?
1744 LINK_SPREAD_05_DOWNSPREAD_30KHZ
1745 : LINK_SPREAD_DISABLED;
1747 /* Override lane settings */
1748 if (overrides->voltage_swing != NULL)
1749 lt_settings->voltage_swing = overrides->voltage_swing;
1750 if (overrides->pre_emphasis != NULL)
1751 lt_settings->pre_emphasis = overrides->pre_emphasis;
1752 if (overrides->post_cursor2 != NULL)
1753 lt_settings->post_cursor2 = overrides->post_cursor2;
1754 #if defined(CONFIG_DRM_AMD_DC_DCN)
1755 if (overrides->ffe_preset != NULL)
1756 lt_settings->ffe_preset = overrides->ffe_preset;
1758 /* Override HW lane settings with BIOS forced values if present */
1759 if (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN &&
1760 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1761 lt_settings->voltage_swing = &link->bios_forced_drive_settings.VOLTAGE_SWING;
1762 lt_settings->pre_emphasis = &link->bios_forced_drive_settings.PRE_EMPHASIS;
1763 lt_settings->always_match_dpcd_with_hw_lane_settings = false;
1765 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1766 lt_settings->lane_settings[lane].VOLTAGE_SWING =
1767 lt_settings->voltage_swing != NULL ?
1768 *lt_settings->voltage_swing :
1769 VOLTAGE_SWING_LEVEL0;
1770 lt_settings->lane_settings[lane].PRE_EMPHASIS =
1771 lt_settings->pre_emphasis != NULL ?
1772 *lt_settings->pre_emphasis
1773 : PRE_EMPHASIS_DISABLED;
1774 lt_settings->lane_settings[lane].POST_CURSOR2 =
1775 lt_settings->post_cursor2 != NULL ?
1776 *lt_settings->post_cursor2
1777 : POST_CURSOR2_DISABLED;
1780 dp_hw_to_dpcd_lane_settings(lt_settings,
1781 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1783 /* Initialize training timings */
1784 if (overrides->cr_pattern_time != NULL)
1785 lt_settings->cr_pattern_time = *overrides->cr_pattern_time;
1787 if (overrides->eq_pattern_time != NULL)
1788 lt_settings->eq_pattern_time = *overrides->eq_pattern_time;
1790 if (overrides->pattern_for_cr != NULL)
1791 lt_settings->pattern_for_cr = *overrides->pattern_for_cr;
1792 if (overrides->pattern_for_eq != NULL)
1793 lt_settings->pattern_for_eq = *overrides->pattern_for_eq;
1795 if (overrides->enhanced_framing != NULL)
1796 lt_settings->enhanced_framing = *overrides->enhanced_framing;
1798 if (link->preferred_training_settings.fec_enable != NULL)
1799 lt_settings->should_set_fec_ready = *link->preferred_training_settings.fec_enable;
1802 uint8_t dp_convert_to_count(uint8_t lttpr_repeater_count)
1804 switch (lttpr_repeater_count) {
1805 case 0x80: // 1 lttpr repeater
1807 case 0x40: // 2 lttpr repeaters
1809 case 0x20: // 3 lttpr repeaters
1811 case 0x10: // 4 lttpr repeaters
1813 case 0x08: // 5 lttpr repeaters
1815 case 0x04: // 6 lttpr repeaters
1817 case 0x02: // 7 lttpr repeaters
1819 case 0x01: // 8 lttpr repeaters
1824 return 0; // invalid value
1827 static enum dc_status configure_lttpr_mode_transparent(struct dc_link *link)
1829 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1831 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1832 return core_link_write_dpcd(link,
1833 DP_PHY_REPEATER_MODE,
1834 (uint8_t *)&repeater_mode,
1835 sizeof(repeater_mode));
1838 static enum dc_status configure_lttpr_mode_non_transparent(
1839 struct dc_link *link,
1840 const struct link_training_settings *lt_settings)
1842 /* aux timeout is already set to extended */
1843 /* RESET/SET lttpr mode to enable non transparent mode */
1844 uint8_t repeater_cnt;
1845 uint32_t aux_interval_address;
1846 uint8_t repeater_id;
1847 enum dc_status result = DC_ERROR_UNEXPECTED;
1848 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1850 enum dp_link_encoding encoding = dp_get_link_encoding_format(<_settings->link_settings);
1852 if (encoding == DP_8b_10b_ENCODING) {
1853 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1854 result = core_link_write_dpcd(link,
1855 DP_PHY_REPEATER_MODE,
1856 (uint8_t *)&repeater_mode,
1857 sizeof(repeater_mode));
1861 if (result == DC_OK) {
1862 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1865 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
1867 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Non Transparent Mode\n", __func__);
1869 repeater_mode = DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
1870 result = core_link_write_dpcd(link,
1871 DP_PHY_REPEATER_MODE,
1872 (uint8_t *)&repeater_mode,
1873 sizeof(repeater_mode));
1875 if (result == DC_OK) {
1876 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1879 if (encoding == DP_8b_10b_ENCODING) {
1880 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
1882 /* Driver does not need to train the first hop. Skip DPCD read and clear
1883 * AUX_RD_INTERVAL for DPTX-to-DPIA hop.
1885 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA)
1886 link->dpcd_caps.lttpr_caps.aux_rd_interval[--repeater_cnt] = 0;
1888 for (repeater_id = repeater_cnt; repeater_id > 0; repeater_id--) {
1889 aux_interval_address = DP_TRAINING_AUX_RD_INTERVAL_PHY_REPEATER1 +
1890 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (repeater_id - 1));
1891 core_link_read_dpcd(
1893 aux_interval_address,
1894 (uint8_t *)&link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1],
1895 sizeof(link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1]));
1896 link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1] &= 0x7F;
1904 static void repeater_training_done(struct dc_link *link, uint32_t offset)
1906 union dpcd_training_pattern dpcd_pattern = {0};
1908 const uint32_t dpcd_base_lt_offset =
1909 DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
1910 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1911 /* Set training not in progress*/
1912 dpcd_pattern.v1_4.TRAINING_PATTERN_SET = DPCD_TRAINING_PATTERN_VIDEOIDLE;
1914 core_link_write_dpcd(
1916 dpcd_base_lt_offset,
1920 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Id: %d 0x%X pattern = %x\n",
1923 dpcd_base_lt_offset,
1924 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
1927 static void print_status_message(
1928 struct dc_link *link,
1929 const struct link_training_settings *lt_settings,
1930 enum link_training_result status)
1932 char *link_rate = "Unknown";
1933 char *lt_result = "Unknown";
1934 char *lt_spread = "Disabled";
1936 switch (lt_settings->link_settings.link_rate) {
1940 case LINK_RATE_RATE_2:
1943 case LINK_RATE_RATE_3:
1946 case LINK_RATE_HIGH:
1949 case LINK_RATE_RBR2:
1952 case LINK_RATE_RATE_6:
1955 case LINK_RATE_HIGH2:
1958 case LINK_RATE_HIGH3:
1961 #if defined(CONFIG_DRM_AMD_DC_DCN)
1962 case LINK_RATE_UHBR10:
1963 link_rate = "UHBR10";
1965 case LINK_RATE_UHBR13_5:
1966 link_rate = "UHBR13.5";
1968 case LINK_RATE_UHBR20:
1969 link_rate = "UHBR20";
1977 case LINK_TRAINING_SUCCESS:
1980 case LINK_TRAINING_CR_FAIL_LANE0:
1981 lt_result = "CR failed lane0";
1983 case LINK_TRAINING_CR_FAIL_LANE1:
1984 lt_result = "CR failed lane1";
1986 case LINK_TRAINING_CR_FAIL_LANE23:
1987 lt_result = "CR failed lane23";
1989 case LINK_TRAINING_EQ_FAIL_CR:
1990 lt_result = "CR failed in EQ";
1992 case LINK_TRAINING_EQ_FAIL_EQ:
1993 lt_result = "EQ failed";
1995 case LINK_TRAINING_LQA_FAIL:
1996 lt_result = "LQA failed";
1998 case LINK_TRAINING_LINK_LOSS:
1999 lt_result = "Link loss";
2001 #if defined(CONFIG_DRM_AMD_DC_DCN)
2002 case DP_128b_132b_LT_FAILED:
2003 lt_result = "LT_FAILED received";
2005 case DP_128b_132b_MAX_LOOP_COUNT_REACHED:
2006 lt_result = "max loop count reached";
2008 case DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT:
2009 lt_result = "channel EQ timeout";
2011 case DP_128b_132b_CDS_DONE_TIMEOUT:
2012 lt_result = "CDS timeout";
2019 switch (lt_settings->link_settings.link_spread) {
2020 case LINK_SPREAD_DISABLED:
2021 lt_spread = "Disabled";
2023 case LINK_SPREAD_05_DOWNSPREAD_30KHZ:
2024 lt_spread = "0.5% 30KHz";
2026 case LINK_SPREAD_05_DOWNSPREAD_33KHZ:
2027 lt_spread = "0.5% 33KHz";
2033 /* Connectivity log: link training */
2034 #if defined(CONFIG_DRM_AMD_DC_DCN)
2035 /* TODO - DP2.0 Log: add connectivity log for FFE PRESET */
2037 CONN_MSG_LT(link, "%sx%d %s VS=%d, PE=%d, DS=%s",
2039 lt_settings->link_settings.lane_count,
2041 lt_settings->lane_settings[0].VOLTAGE_SWING,
2042 lt_settings->lane_settings[0].PRE_EMPHASIS,
2046 void dc_link_dp_set_drive_settings(
2047 struct dc_link *link,
2048 struct link_training_settings *lt_settings)
2050 /* program ASIC PHY settings*/
2051 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2053 dp_hw_to_dpcd_lane_settings(lt_settings,
2054 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2056 /* Notify DP sink the PHY settings from source */
2057 dpcd_set_lane_settings(link, lt_settings, DPRX);
2060 bool dc_link_dp_perform_link_training_skip_aux(
2061 struct dc_link *link,
2062 const struct dc_link_settings *link_setting)
2064 struct link_training_settings lt_settings = {0};
2066 dp_decide_training_settings(
2070 override_training_settings(
2072 &link->preferred_training_settings,
2075 /* 1. Perform_clock_recovery_sequence. */
2077 /* transmit training pattern for clock recovery */
2078 dp_set_hw_training_pattern(link, lt_settings.pattern_for_cr, DPRX);
2080 /* call HWSS to set lane settings*/
2081 dp_set_hw_lane_settings(link, <_settings, DPRX);
2083 /* wait receiver to lock-on*/
2084 dp_wait_for_training_aux_rd_interval(link, lt_settings.cr_pattern_time);
2086 /* 2. Perform_channel_equalization_sequence. */
2088 /* transmit training pattern for channel equalization. */
2089 dp_set_hw_training_pattern(link, lt_settings.pattern_for_eq, DPRX);
2091 /* call HWSS to set lane settings*/
2092 dp_set_hw_lane_settings(link, <_settings, DPRX);
2094 /* wait receiver to lock-on. */
2095 dp_wait_for_training_aux_rd_interval(link, lt_settings.eq_pattern_time);
2097 /* 3. Perform_link_training_int. */
2099 /* Mainlink output idle pattern. */
2100 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
2102 print_status_message(link, <_settings, LINK_TRAINING_SUCCESS);
2107 enum dc_status dpcd_configure_lttpr_mode(struct dc_link *link, struct link_training_settings *lt_settings)
2109 enum dc_status status = DC_OK;
2111 if (lt_settings->lttpr_mode == LTTPR_MODE_TRANSPARENT)
2112 status = configure_lttpr_mode_transparent(link);
2114 else if (lt_settings->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
2115 status = configure_lttpr_mode_non_transparent(link, lt_settings);
2120 static void dpcd_exit_training_mode(struct dc_link *link)
2122 #if defined(CONFIG_DRM_AMD_DC_DCN)
2123 uint8_t sink_status = 0;
2127 /* clear training pattern set */
2128 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
2130 #if defined(CONFIG_DRM_AMD_DC_DCN)
2131 /* poll for intra-hop disable */
2132 for (i = 0; i < 10; i++) {
2133 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
2134 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
2141 enum dc_status dpcd_configure_channel_coding(struct dc_link *link,
2142 struct link_training_settings *lt_settings)
2144 enum dp_link_encoding encoding =
2145 dp_get_link_encoding_format(
2146 <_settings->link_settings);
2147 enum dc_status status;
2149 status = core_link_write_dpcd(
2151 DP_MAIN_LINK_CHANNEL_CODING_SET,
2152 (uint8_t *) &encoding,
2154 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X MAIN_LINK_CHANNEL_CODING_SET = %x\n",
2156 DP_MAIN_LINK_CHANNEL_CODING_SET,
2162 #if defined(CONFIG_DRM_AMD_DC_DCN)
2163 static void dpcd_128b_132b_get_aux_rd_interval(struct dc_link *link,
2164 uint32_t *interval_in_us)
2166 union dp_128b_132b_training_aux_rd_interval dpcd_interval;
2167 uint32_t interval_unit = 0;
2169 dpcd_interval.raw = 0;
2170 core_link_read_dpcd(link, DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
2171 &dpcd_interval.raw, sizeof(dpcd_interval.raw));
2172 interval_unit = dpcd_interval.bits.UNIT ? 1 : 2; /* 0b = 2 ms, 1b = 1 ms */
2173 /* (128b/132b_TRAINING_AUX_RD_INTERVAL value + 1) *
2174 * INTERVAL_UNIT. The maximum is 256 ms
2176 *interval_in_us = (dpcd_interval.bits.VALUE + 1) * interval_unit * 1000;
2179 static enum link_training_result dp_perform_128b_132b_channel_eq_done_sequence(
2180 struct dc_link *link,
2181 struct link_training_settings *lt_settings)
2184 uint32_t aux_rd_interval = 0;
2185 uint32_t wait_time = 0;
2186 union lane_align_status_updated dpcd_lane_status_updated = {0};
2187 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2188 enum link_training_result status = LINK_TRAINING_SUCCESS;
2189 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2191 /* Transmit 128b/132b_TPS1 over Main-Link */
2192 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, DPRX);
2193 /* Set TRAINING_PATTERN_SET to 01h */
2194 dpcd_set_training_pattern(link, lt_settings->pattern_for_cr);
2196 /* Adjust TX_FFE_PRESET_VALUE and Transmit 128b/132b_TPS2 over Main-Link */
2197 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2198 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2199 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2200 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2201 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2202 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2203 dp_set_hw_training_pattern(link, lt_settings->pattern_for_eq, DPRX);
2205 /* Set loop counter to start from 1 */
2208 /* Set TRAINING_PATTERN_SET to 02h and TX_FFE_PRESET_VALUE in one AUX transaction */
2209 dpcd_set_lt_pattern_and_lane_settings(link, lt_settings,
2210 lt_settings->pattern_for_eq, DPRX);
2212 /* poll for channel EQ done */
2213 while (status == LINK_TRAINING_SUCCESS) {
2214 dp_wait_for_training_aux_rd_interval(link, aux_rd_interval);
2215 wait_time += aux_rd_interval;
2216 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2217 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2218 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2219 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2220 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2221 if (dp_is_ch_eq_done(lt_settings->link_settings.lane_count,
2222 dpcd_lane_status)) {
2225 } else if (loop_count >= lt_settings->eq_loop_count_limit) {
2226 status = DP_128b_132b_MAX_LOOP_COUNT_REACHED;
2227 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2228 status = DP_128b_132b_LT_FAILED;
2230 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2231 dpcd_set_lane_settings(link, lt_settings, DPRX);
2236 /* poll for EQ interlane align done */
2237 while (status == LINK_TRAINING_SUCCESS) {
2238 if (dpcd_lane_status_updated.bits.EQ_INTERLANE_ALIGN_DONE_128b_132b) {
2241 } else if (wait_time >= lt_settings->eq_wait_time_limit) {
2242 status = DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT;
2243 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2244 status = DP_128b_132b_LT_FAILED;
2246 dp_wait_for_training_aux_rd_interval(link,
2247 lt_settings->eq_pattern_time);
2248 wait_time += lt_settings->eq_pattern_time;
2249 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2250 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2257 static enum link_training_result dp_perform_128b_132b_cds_done_sequence(
2258 struct dc_link *link,
2259 struct link_training_settings *lt_settings)
2261 /* Assumption: assume hardware has transmitted eq pattern */
2262 enum link_training_result status = LINK_TRAINING_SUCCESS;
2263 union lane_align_status_updated dpcd_lane_status_updated = {0};
2264 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2265 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
2266 uint32_t wait_time = 0;
2268 /* initiate CDS done sequence */
2269 dpcd_set_training_pattern(link, lt_settings->pattern_for_cds);
2271 /* poll for CDS interlane align done and symbol lock */
2272 while (status == LINK_TRAINING_SUCCESS) {
2273 dp_wait_for_training_aux_rd_interval(link,
2274 lt_settings->cds_pattern_time);
2275 wait_time += lt_settings->cds_pattern_time;
2276 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2277 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2278 if (dp_is_symbol_locked(lt_settings->link_settings.lane_count, dpcd_lane_status) &&
2279 dpcd_lane_status_updated.bits.CDS_INTERLANE_ALIGN_DONE_128b_132b) {
2282 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2283 status = DP_128b_132b_LT_FAILED;
2284 } else if (wait_time >= lt_settings->cds_wait_time_limit) {
2285 status = DP_128b_132b_CDS_DONE_TIMEOUT;
2293 static enum link_training_result dp_perform_8b_10b_link_training(
2294 struct dc_link *link,
2295 struct link_training_settings *lt_settings)
2297 enum link_training_result status = LINK_TRAINING_SUCCESS;
2299 uint8_t repeater_cnt;
2300 uint8_t repeater_id;
2303 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2304 start_clock_recovery_pattern_early(link, lt_settings, DPRX);
2306 /* 1. set link rate, lane count and spread. */
2307 dpcd_set_link_settings(link, lt_settings);
2309 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2311 /* 2. perform link training (set link training done
2312 * to false is done as well)
2314 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
2316 for (repeater_id = repeater_cnt; (repeater_id > 0 && status == LINK_TRAINING_SUCCESS);
2318 status = perform_clock_recovery_sequence(link, lt_settings, repeater_id);
2320 if (status != LINK_TRAINING_SUCCESS)
2323 status = perform_channel_equalization_sequence(link,
2327 if (status != LINK_TRAINING_SUCCESS)
2330 repeater_training_done(link, repeater_id);
2333 for (lane = 0; lane < (uint8_t)lt_settings->link_settings.lane_count; lane++)
2334 lt_settings->dpcd_lane_settings[lane].raw = 0;
2337 if (status == LINK_TRAINING_SUCCESS) {
2338 status = perform_clock_recovery_sequence(link, lt_settings, DPRX);
2339 if (status == LINK_TRAINING_SUCCESS) {
2340 status = perform_channel_equalization_sequence(link,
2349 #if defined(CONFIG_DRM_AMD_DC_DCN)
2350 static enum link_training_result dp_perform_128b_132b_link_training(
2351 struct dc_link *link,
2352 struct link_training_settings *lt_settings)
2354 enum link_training_result result = LINK_TRAINING_SUCCESS;
2356 /* TODO - DP2.0 Link: remove legacy_dp2_lt logic */
2357 if (link->dc->debug.legacy_dp2_lt) {
2358 struct link_training_settings legacy_settings;
2360 decide_8b_10b_training_settings(link,
2361 <_settings->link_settings,
2363 return dp_perform_8b_10b_link_training(link, &legacy_settings);
2366 dpcd_set_link_settings(link, lt_settings);
2368 if (result == LINK_TRAINING_SUCCESS)
2369 result = dp_perform_128b_132b_channel_eq_done_sequence(link, lt_settings);
2371 if (result == LINK_TRAINING_SUCCESS)
2372 result = dp_perform_128b_132b_cds_done_sequence(link, lt_settings);
2378 enum link_training_result dc_link_dp_perform_link_training(
2379 struct dc_link *link,
2380 const struct dc_link_settings *link_settings,
2381 bool skip_video_pattern)
2383 enum link_training_result status = LINK_TRAINING_SUCCESS;
2384 struct link_training_settings lt_settings = {0};
2385 enum dp_link_encoding encoding =
2386 dp_get_link_encoding_format(link_settings);
2388 /* decide training settings */
2389 dp_decide_training_settings(
2393 override_training_settings(
2395 &link->preferred_training_settings,
2398 /* reset previous training states */
2399 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2400 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2401 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2402 link->apply_vendor_specific_lttpr_link_rate_wa = true;
2403 vendor_specific_lttpr_wa_four(link, true);
2405 dpcd_exit_training_mode(link);
2408 /* configure link prior to entering training mode */
2409 dpcd_configure_lttpr_mode(link, <_settings);
2410 dp_set_fec_ready(link, lt_settings.should_set_fec_ready);
2411 dpcd_configure_channel_coding(link, <_settings);
2413 /* enter training mode:
2414 * Per DP specs starting from here, DPTX device shall not issue
2415 * Non-LT AUX transactions inside training mode.
2417 if (encoding == DP_8b_10b_ENCODING)
2418 status = dp_perform_8b_10b_link_training(link, <_settings);
2419 #if defined(CONFIG_DRM_AMD_DC_DCN)
2420 else if (encoding == DP_128b_132b_ENCODING)
2421 status = dp_perform_128b_132b_link_training(link, <_settings);
2426 /* exit training mode */
2427 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2428 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2429 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2430 link->apply_vendor_specific_lttpr_link_rate_wa = false;
2431 vendor_specific_lttpr_wa_four(link, (status != LINK_TRAINING_SUCCESS));
2433 dpcd_exit_training_mode(link);
2436 /* switch to video idle */
2437 if ((status == LINK_TRAINING_SUCCESS) || !skip_video_pattern)
2438 status = dp_transition_to_video_idle(link,
2442 /* dump debug data */
2443 print_status_message(link, <_settings, status);
2444 if (status != LINK_TRAINING_SUCCESS)
2445 link->ctx->dc->debug_data.ltFailCount++;
2449 bool perform_link_training_with_retries(
2450 const struct dc_link_settings *link_setting,
2451 bool skip_video_pattern,
2453 struct pipe_ctx *pipe_ctx,
2454 enum signal_type signal,
2458 uint8_t delay_between_attempts = LINK_TRAINING_RETRY_DELAY;
2459 struct dc_stream_state *stream = pipe_ctx->stream;
2460 struct dc_link *link = stream->link;
2461 enum dp_panel_mode panel_mode = dp_get_panel_mode(link);
2462 struct link_encoder *link_enc;
2463 enum link_training_result status = LINK_TRAINING_CR_FAIL_LANE0;
2464 struct dc_link_settings current_setting = *link_setting;
2466 /* Dynamically assigned link encoders associated with stream rather than
2469 if (link->is_dig_mapping_flexible && link->dc->res_pool->funcs->link_encs_assign)
2470 link_enc = link_enc_cfg_get_link_enc_used_by_stream(link->ctx->dc, pipe_ctx->stream);
2472 link_enc = link->link_enc;
2474 /* We need to do this before the link training to ensure the idle pattern in SST
2475 * mode will be sent right after the link training
2477 if (dp_get_link_encoding_format(¤t_setting) == DP_8b_10b_ENCODING) {
2478 link_enc->funcs->connect_dig_be_to_fe(link_enc,
2479 pipe_ctx->stream_res.stream_enc->id, true);
2480 dp_source_sequence_trace(link, DPCD_SOURCE_SEQ_AFTER_CONNECT_DIG_FE_BE);
2483 for (j = 0; j < attempts; ++j) {
2485 DC_LOG_HW_LINK_TRAINING("%s: Beginning link training attempt %u of %d\n",
2486 __func__, (unsigned int)j + 1, attempts);
2491 pipe_ctx->clock_source->id,
2494 if (stream->sink_patches.dppowerup_delay > 0) {
2495 int delay_dp_power_up_in_ms = stream->sink_patches.dppowerup_delay;
2497 msleep(delay_dp_power_up_in_ms);
2500 #ifdef CONFIG_DRM_AMD_DC_HDCP
2501 if (panel_mode == DP_PANEL_MODE_EDP) {
2502 struct cp_psp *cp_psp = &stream->ctx->cp_psp;
2504 if (cp_psp && cp_psp->funcs.enable_assr)
2505 /* ASSR is bound to fail with unsigned PSP
2506 * verstage used during devlopment phase.
2507 * Report and continue with eDP panel mode to
2508 * perform eDP link training with right settings
2510 cp_psp->funcs.enable_assr(cp_psp->handle, link);
2514 dp_set_panel_mode(link, panel_mode);
2516 if (link->aux_access_disabled) {
2517 dc_link_dp_perform_link_training_skip_aux(link, ¤t_setting);
2520 /** @todo Consolidate USB4 DP and DPx.x training. */
2521 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA) {
2522 status = dc_link_dpia_perform_link_training(link,
2524 skip_video_pattern);
2526 /* Transmit idle pattern once training successful. */
2527 if (status == LINK_TRAINING_SUCCESS)
2528 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE,
2531 status = dc_link_dp_perform_link_training(link,
2533 skip_video_pattern);
2536 if (status == LINK_TRAINING_SUCCESS)
2540 /* latest link training still fail, skip delay and keep PHY on
2542 if (j == (attempts - 1) && link->ep_type == DISPLAY_ENDPOINT_PHY)
2545 DC_LOG_WARNING("%s: Link training attempt %u of %d failed\n",
2546 __func__, (unsigned int)j + 1, attempts);
2548 dp_disable_link_phy(link, signal);
2550 /* Abort link training if failure due to sink being unplugged. */
2551 if (status == LINK_TRAINING_ABORT) {
2552 enum dc_connection_type type = dc_connection_none;
2554 dc_link_detect_sink(link, &type);
2555 if (type == dc_connection_none)
2557 } else if (do_fallback) {
2561 decide_fallback_link_setting(*link_setting, ¤t_setting, status);
2562 /* Fail link training if reduced link bandwidth no longer meets
2563 * stream requirements.
2565 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
2566 link_bw = dc_link_bandwidth_kbps(link, ¤t_setting);
2567 if (req_bw > link_bw)
2571 msleep(delay_between_attempts);
2573 delay_between_attempts += LINK_TRAINING_RETRY_DELAY;
2579 static enum clock_source_id get_clock_source_id(struct dc_link *link)
2581 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_UNDEFINED;
2582 struct clock_source *dp_cs = link->dc->res_pool->dp_clock_source;
2584 if (dp_cs != NULL) {
2585 dp_cs_id = dp_cs->id;
2588 * dp clock source is not initialized for some reason.
2589 * Should not happen, CLOCK_SOURCE_ID_EXTERNAL will be used
2597 static void set_dp_mst_mode(struct dc_link *link, bool mst_enable)
2599 if (mst_enable == false &&
2600 link->type == dc_connection_mst_branch) {
2601 /* Disable MST on link. Use only local sink. */
2602 dp_disable_link_phy_mst(link, link->connector_signal);
2604 link->type = dc_connection_single;
2605 link->local_sink = link->remote_sinks[0];
2606 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT;
2607 dc_sink_retain(link->local_sink);
2608 dm_helpers_dp_mst_stop_top_mgr(link->ctx, link);
2609 } else if (mst_enable == true &&
2610 link->type == dc_connection_single &&
2611 link->remote_sinks[0] != NULL) {
2612 /* Re-enable MST on link. */
2613 dp_disable_link_phy(link, link->connector_signal);
2614 dp_enable_mst_on_sink(link, true);
2616 link->type = dc_connection_mst_branch;
2617 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT_MST;
2621 bool dc_link_dp_sync_lt_begin(struct dc_link *link)
2623 /* Begin Sync LT. During this time,
2624 * DPCD:600h must not be powered down.
2626 link->sync_lt_in_progress = true;
2628 /*Clear any existing preferred settings.*/
2629 memset(&link->preferred_training_settings, 0,
2630 sizeof(struct dc_link_training_overrides));
2631 memset(&link->preferred_link_setting, 0,
2632 sizeof(struct dc_link_settings));
2637 enum link_training_result dc_link_dp_sync_lt_attempt(
2638 struct dc_link *link,
2639 struct dc_link_settings *link_settings,
2640 struct dc_link_training_overrides *lt_overrides)
2642 struct link_training_settings lt_settings = {0};
2643 enum link_training_result lt_status = LINK_TRAINING_SUCCESS;
2644 enum dp_panel_mode panel_mode = DP_PANEL_MODE_DEFAULT;
2645 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
2646 bool fec_enable = false;
2648 dp_decide_training_settings(
2652 override_training_settings(
2656 /* Setup MST Mode */
2657 if (lt_overrides->mst_enable)
2658 set_dp_mst_mode(link, *lt_overrides->mst_enable);
2661 dp_disable_link_phy(link, link->connector_signal);
2664 dp_cs_id = get_clock_source_id(link);
2665 dp_enable_link_phy(link, link->connector_signal,
2666 dp_cs_id, link_settings);
2668 /* Set FEC enable */
2669 #if defined(CONFIG_DRM_AMD_DC_DCN)
2670 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING) {
2672 fec_enable = lt_overrides->fec_enable && *lt_overrides->fec_enable;
2673 dp_set_fec_ready(link, fec_enable);
2674 #if defined(CONFIG_DRM_AMD_DC_DCN)
2678 if (lt_overrides->alternate_scrambler_reset) {
2679 if (*lt_overrides->alternate_scrambler_reset)
2680 panel_mode = DP_PANEL_MODE_EDP;
2682 panel_mode = DP_PANEL_MODE_DEFAULT;
2684 panel_mode = dp_get_panel_mode(link);
2686 dp_set_panel_mode(link, panel_mode);
2688 /* Attempt to train with given link training settings */
2689 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2690 start_clock_recovery_pattern_early(link, <_settings, DPRX);
2692 /* Set link rate, lane count and spread. */
2693 dpcd_set_link_settings(link, <_settings);
2695 /* 2. perform link training (set link training done
2696 * to false is done as well)
2698 lt_status = perform_clock_recovery_sequence(link, <_settings, DPRX);
2699 if (lt_status == LINK_TRAINING_SUCCESS) {
2700 lt_status = perform_channel_equalization_sequence(link,
2705 /* 3. Sync LT must skip TRAINING_PATTERN_SET:0 (video pattern)*/
2706 /* 4. print status message*/
2707 print_status_message(link, <_settings, lt_status);
2712 bool dc_link_dp_sync_lt_end(struct dc_link *link, bool link_down)
2714 /* If input parameter is set, shut down phy.
2715 * Still shouldn't turn off dp_receiver (DPCD:600h)
2717 if (link_down == true) {
2718 #if defined(CONFIG_DRM_AMD_DC_DCN)
2719 struct dc_link_settings link_settings = link->cur_link_settings;
2721 dp_disable_link_phy(link, link->connector_signal);
2722 #if defined(CONFIG_DRM_AMD_DC_DCN)
2723 if (dp_get_link_encoding_format(&link_settings) == DP_8b_10b_ENCODING)
2725 dp_set_fec_ready(link, false);
2728 link->sync_lt_in_progress = false;
2732 #if defined(CONFIG_DRM_AMD_DC_DCN)
2733 static enum dc_link_rate get_lttpr_max_link_rate(struct dc_link *link)
2735 enum dc_link_rate lttpr_max_link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
2737 if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR20)
2738 lttpr_max_link_rate = LINK_RATE_UHBR20;
2739 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR13_5)
2740 lttpr_max_link_rate = LINK_RATE_UHBR13_5;
2741 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR10)
2742 lttpr_max_link_rate = LINK_RATE_UHBR10;
2744 return lttpr_max_link_rate;
2748 bool dc_link_dp_get_max_link_enc_cap(const struct dc_link *link, struct dc_link_settings *max_link_enc_cap)
2750 struct link_encoder *link_enc = NULL;
2752 if (!max_link_enc_cap) {
2753 DC_LOG_ERROR("%s: Could not return max link encoder caps", __func__);
2757 /* Links supporting dynamically assigned link encoder will be assigned next
2758 * available encoder if one not already assigned.
2760 if (link->is_dig_mapping_flexible &&
2761 link->dc->res_pool->funcs->link_encs_assign) {
2762 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
2763 if (link_enc == NULL)
2764 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
2766 link_enc = link->link_enc;
2769 if (link_enc && link_enc->funcs->get_max_link_cap) {
2770 link_enc->funcs->get_max_link_cap(link_enc, max_link_enc_cap);
2774 DC_LOG_ERROR("%s: Max link encoder caps unknown", __func__);
2775 max_link_enc_cap->lane_count = 1;
2776 max_link_enc_cap->link_rate = 6;
2780 static struct dc_link_settings get_max_link_cap(struct dc_link *link)
2782 struct dc_link_settings max_link_cap = {0};
2783 #if defined(CONFIG_DRM_AMD_DC_DCN)
2784 enum dc_link_rate lttpr_max_link_rate;
2786 struct link_encoder *link_enc = NULL;
2788 /* Links supporting dynamically assigned link encoder will be assigned next
2789 * available encoder if one not already assigned.
2791 if (link->is_dig_mapping_flexible &&
2792 link->dc->res_pool->funcs->link_encs_assign) {
2793 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
2794 if (link_enc == NULL)
2795 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
2797 link_enc = link->link_enc;
2800 /* get max link encoder capability */
2802 link_enc->funcs->get_max_link_cap(link_enc, &max_link_cap);
2803 #if defined(CONFIG_DRM_AMD_DC_DCN)
2804 if (max_link_cap.link_rate >= LINK_RATE_UHBR10 &&
2805 !link->hpo_dp_link_enc)
2806 max_link_cap.link_rate = LINK_RATE_HIGH3;
2809 /* Lower link settings based on sink's link cap */
2810 if (link->reported_link_cap.lane_count < max_link_cap.lane_count)
2811 max_link_cap.lane_count =
2812 link->reported_link_cap.lane_count;
2813 if (link->reported_link_cap.link_rate < max_link_cap.link_rate)
2814 max_link_cap.link_rate =
2815 link->reported_link_cap.link_rate;
2816 if (link->reported_link_cap.link_spread <
2817 max_link_cap.link_spread)
2818 max_link_cap.link_spread =
2819 link->reported_link_cap.link_spread;
2821 * account for lttpr repeaters cap
2822 * notes: repeaters do not snoop in the DPRX Capabilities addresses (3.6.3).
2824 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2825 if (link->dpcd_caps.lttpr_caps.max_lane_count < max_link_cap.lane_count)
2826 max_link_cap.lane_count = link->dpcd_caps.lttpr_caps.max_lane_count;
2828 #if defined(CONFIG_DRM_AMD_DC_DCN)
2829 lttpr_max_link_rate = get_lttpr_max_link_rate(link);
2831 if (lttpr_max_link_rate < max_link_cap.link_rate)
2832 max_link_cap.link_rate = lttpr_max_link_rate;
2834 if (link->dpcd_caps.lttpr_caps.max_link_rate < max_link_cap.link_rate)
2835 max_link_cap.link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
2838 DC_LOG_HW_LINK_TRAINING("%s\n Training with LTTPR, max_lane count %d max_link rate %d \n",
2840 max_link_cap.lane_count,
2841 max_link_cap.link_rate);
2843 return max_link_cap;
2846 static enum dc_status read_hpd_rx_irq_data(
2847 struct dc_link *link,
2848 union hpd_irq_data *irq_data)
2850 static enum dc_status retval;
2852 /* The HW reads 16 bytes from 200h on HPD,
2853 * but if we get an AUX_DEFER, the HW cannot retry
2854 * and this causes the CTS tests 4.3.2.1 - 3.2.4 to
2855 * fail, so we now explicitly read 6 bytes which is
2856 * the req from the above mentioned test cases.
2858 * For DP 1.4 we need to read those from 2002h range.
2860 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_14)
2861 retval = core_link_read_dpcd(
2865 sizeof(union hpd_irq_data));
2867 /* Read 14 bytes in a single read and then copy only the required fields.
2868 * This is more efficient than doing it in two separate AUX reads. */
2870 uint8_t tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI + 1];
2872 retval = core_link_read_dpcd(
2878 if (retval != DC_OK)
2881 irq_data->bytes.sink_cnt.raw = tmp[DP_SINK_COUNT_ESI - DP_SINK_COUNT_ESI];
2882 irq_data->bytes.device_service_irq.raw = tmp[DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 - DP_SINK_COUNT_ESI];
2883 irq_data->bytes.lane01_status.raw = tmp[DP_LANE0_1_STATUS_ESI - DP_SINK_COUNT_ESI];
2884 irq_data->bytes.lane23_status.raw = tmp[DP_LANE2_3_STATUS_ESI - DP_SINK_COUNT_ESI];
2885 irq_data->bytes.lane_status_updated.raw = tmp[DP_LANE_ALIGN_STATUS_UPDATED_ESI - DP_SINK_COUNT_ESI];
2886 irq_data->bytes.sink_status.raw = tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI];
2892 bool hpd_rx_irq_check_link_loss_status(
2893 struct dc_link *link,
2894 union hpd_irq_data *hpd_irq_dpcd_data)
2896 uint8_t irq_reg_rx_power_state = 0;
2897 enum dc_status dpcd_result = DC_ERROR_UNEXPECTED;
2898 union lane_status lane_status;
2900 bool sink_status_changed;
2903 sink_status_changed = false;
2904 return_code = false;
2906 if (link->cur_link_settings.lane_count == 0)
2909 /*1. Check that Link Status changed, before re-training.*/
2911 /*parse lane status*/
2912 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
2913 /* check status of lanes 0,1
2914 * changed DpcdAddress_Lane01Status (0x202)
2916 lane_status.raw = get_nibble_at_index(
2917 &hpd_irq_dpcd_data->bytes.lane01_status.raw,
2920 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
2921 !lane_status.bits.CR_DONE_0 ||
2922 !lane_status.bits.SYMBOL_LOCKED_0) {
2923 /* if one of the channel equalization, clock
2924 * recovery or symbol lock is dropped
2925 * consider it as (link has been
2926 * dropped) dp sink status has changed
2928 sink_status_changed = true;
2933 /* Check interlane align.*/
2934 if (sink_status_changed ||
2935 !hpd_irq_dpcd_data->bytes.lane_status_updated.bits.INTERLANE_ALIGN_DONE) {
2937 DC_LOG_HW_HPD_IRQ("%s: Link Status changed.\n", __func__);
2941 /*2. Check that we can handle interrupt: Not in FS DOS,
2942 * Not in "Display Timeout" state, Link is trained.
2944 dpcd_result = core_link_read_dpcd(link,
2946 &irq_reg_rx_power_state,
2947 sizeof(irq_reg_rx_power_state));
2949 if (dpcd_result != DC_OK) {
2950 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain power state.\n",
2953 if (irq_reg_rx_power_state != DP_SET_POWER_D0)
2954 return_code = false;
2961 bool dp_verify_link_cap(
2962 struct dc_link *link,
2963 struct dc_link_settings *known_limit_link_setting,
2966 struct dc_link_settings max_link_cap = {0};
2967 struct dc_link_settings cur_link_setting = {0};
2968 struct dc_link_settings *cur = &cur_link_setting;
2969 struct dc_link_settings initial_link_settings = {0};
2971 bool skip_link_training;
2972 bool skip_video_pattern;
2973 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
2974 enum link_training_result status;
2975 union hpd_irq_data irq_data;
2977 /* link training starts with the maximum common settings
2978 * supported by both sink and ASIC.
2980 max_link_cap = get_max_link_cap(link);
2981 initial_link_settings = get_common_supported_link_settings(
2982 *known_limit_link_setting,
2985 /* Accept reported capabilities if link supports flexible encoder mapping or encoder already in use. */
2986 if (link->dc->debug.skip_detection_link_training ||
2987 link->is_dig_mapping_flexible) {
2988 /* TODO - should we check link encoder's max link caps here?
2989 * How do we know which link encoder to check from?
2991 link->verified_link_cap = *known_limit_link_setting;
2993 } else if (link->link_enc && link->dc->res_pool->funcs->link_encs_assign &&
2994 !link_enc_cfg_is_link_enc_avail(link->ctx->dc, link->link_enc->preferred_engine, link)) {
2995 link->verified_link_cap = initial_link_settings;
2999 memset(&irq_data, 0, sizeof(irq_data));
3001 skip_link_training = false;
3003 /* Grant extended timeout request */
3004 if ((link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (link->dpcd_caps.lttpr_caps.max_ext_timeout > 0)) {
3005 uint8_t grant = link->dpcd_caps.lttpr_caps.max_ext_timeout & 0x80;
3007 core_link_write_dpcd(link, DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT, &grant, sizeof(grant));
3010 #if defined(CONFIG_DRM_AMD_DC_DCN)
3011 if (dp_get_link_encoding_format(&link->cur_link_settings) == DP_128b_132b_ENCODING)
3012 reset_dp_hpo_stream_encoders_for_link(link);
3014 /* TODO implement override and monitor patch later */
3016 /* try to train the link from high to low to
3017 * find the physical link capability
3019 /* disable PHY done possible by BIOS, will be done by driver itself */
3020 dp_disable_link_phy(link, link->connector_signal);
3022 dp_cs_id = get_clock_source_id(link);
3024 cur_link_setting = initial_link_settings;
3026 /* Temporary Renoir-specific workaround for SWDEV-215184;
3027 * PHY will sometimes be in bad state on hotplugging display from certain USB-C dongle,
3028 * so add extra cycle of enabling and disabling the PHY before first link training.
3030 if (link->link_enc && link->link_enc->features.flags.bits.DP_IS_USB_C &&
3031 link->dc->debug.usbc_combo_phy_reset_wa) {
3032 dp_enable_link_phy(link, link->connector_signal, dp_cs_id, cur);
3033 dp_disable_link_phy(link, link->connector_signal);
3037 skip_video_pattern = true;
3039 if (cur->link_rate == LINK_RATE_LOW)
3040 skip_video_pattern = false;
3044 link->connector_signal,
3049 if (skip_link_training)
3052 status = dc_link_dp_perform_link_training(
3055 skip_video_pattern);
3056 if (status == LINK_TRAINING_SUCCESS)
3063 link->verified_link_cap = *cur;
3065 if (read_hpd_rx_irq_data(link, &irq_data) == DC_OK)
3066 if (hpd_rx_irq_check_link_loss_status(
3071 /* always disable the link before trying another
3072 * setting or before returning we'll enable it later
3073 * based on the actual mode we're driving
3075 dp_disable_link_phy(link, link->connector_signal);
3076 } while (!success && decide_fallback_link_setting(
3077 initial_link_settings, cur, status));
3079 /* Link Training failed for all Link Settings
3080 * (Lane Count is still unknown)
3083 /* If all LT fails for all settings,
3084 * set verified = failed safe (1 lane low)
3086 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3087 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3089 link->verified_link_cap.link_spread =
3090 LINK_SPREAD_DISABLED;
3097 bool dp_verify_link_cap_with_retries(
3098 struct dc_link *link,
3099 struct dc_link_settings *known_limit_link_setting,
3103 bool success = false;
3105 for (i = 0; i < attempts; i++) {
3107 enum dc_connection_type type = dc_connection_none;
3109 memset(&link->verified_link_cap, 0,
3110 sizeof(struct dc_link_settings));
3111 if (!dc_link_detect_sink(link, &type) || type == dc_connection_none) {
3112 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3113 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3114 link->verified_link_cap.link_spread = LINK_SPREAD_DISABLED;
3116 } else if (dp_verify_link_cap(link,
3117 known_limit_link_setting,
3118 &fail_count) && fail_count == 0) {
3127 bool dp_verify_mst_link_cap(
3128 struct dc_link *link)
3130 struct dc_link_settings max_link_cap = {0};
3132 if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3133 DP_8b_10b_ENCODING) {
3134 max_link_cap = get_max_link_cap(link);
3135 link->verified_link_cap = get_common_supported_link_settings(
3136 link->reported_link_cap,
3139 #if defined(CONFIG_DRM_AMD_DC_DCN)
3140 else if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3141 DP_128b_132b_ENCODING) {
3142 dp_verify_link_cap_with_retries(link,
3143 &link->reported_link_cap,
3144 LINK_TRAINING_MAX_VERIFY_RETRY);
3150 static struct dc_link_settings get_common_supported_link_settings(
3151 struct dc_link_settings link_setting_a,
3152 struct dc_link_settings link_setting_b)
3154 struct dc_link_settings link_settings = {0};
3156 link_settings.lane_count =
3157 (link_setting_a.lane_count <=
3158 link_setting_b.lane_count) ?
3159 link_setting_a.lane_count :
3160 link_setting_b.lane_count;
3161 link_settings.link_rate =
3162 (link_setting_a.link_rate <=
3163 link_setting_b.link_rate) ?
3164 link_setting_a.link_rate :
3165 link_setting_b.link_rate;
3166 link_settings.link_spread = LINK_SPREAD_DISABLED;
3168 /* in DP compliance test, DPR-120 may have
3169 * a random value in its MAX_LINK_BW dpcd field.
3170 * We map it to the maximum supported link rate that
3171 * is smaller than MAX_LINK_BW in this case.
3173 #if defined(CONFIG_DRM_AMD_DC_DCN)
3174 if (link_settings.link_rate > LINK_RATE_UHBR20) {
3175 link_settings.link_rate = LINK_RATE_UHBR20;
3176 } else if (link_settings.link_rate < LINK_RATE_UHBR20 &&
3177 link_settings.link_rate > LINK_RATE_UHBR13_5) {
3178 link_settings.link_rate = LINK_RATE_UHBR13_5;
3179 } else if (link_settings.link_rate < LINK_RATE_UHBR10 &&
3180 link_settings.link_rate > LINK_RATE_HIGH3) {
3182 if (link_settings.link_rate > LINK_RATE_HIGH3) {
3184 link_settings.link_rate = LINK_RATE_HIGH3;
3185 } else if (link_settings.link_rate < LINK_RATE_HIGH3
3186 && link_settings.link_rate > LINK_RATE_HIGH2) {
3187 link_settings.link_rate = LINK_RATE_HIGH2;
3188 } else if (link_settings.link_rate < LINK_RATE_HIGH2
3189 && link_settings.link_rate > LINK_RATE_HIGH) {
3190 link_settings.link_rate = LINK_RATE_HIGH;
3191 } else if (link_settings.link_rate < LINK_RATE_HIGH
3192 && link_settings.link_rate > LINK_RATE_LOW) {
3193 link_settings.link_rate = LINK_RATE_LOW;
3194 } else if (link_settings.link_rate < LINK_RATE_LOW) {
3195 link_settings.link_rate = LINK_RATE_UNKNOWN;
3198 return link_settings;
3201 static inline bool reached_minimum_lane_count(enum dc_lane_count lane_count)
3203 return lane_count <= LANE_COUNT_ONE;
3206 static inline bool reached_minimum_link_rate(enum dc_link_rate link_rate)
3208 return link_rate <= LINK_RATE_LOW;
3211 static enum dc_lane_count reduce_lane_count(enum dc_lane_count lane_count)
3213 switch (lane_count) {
3214 case LANE_COUNT_FOUR:
3215 return LANE_COUNT_TWO;
3216 case LANE_COUNT_TWO:
3217 return LANE_COUNT_ONE;
3218 case LANE_COUNT_ONE:
3219 return LANE_COUNT_UNKNOWN;
3221 return LANE_COUNT_UNKNOWN;
3225 static enum dc_link_rate reduce_link_rate(enum dc_link_rate link_rate)
3227 switch (link_rate) {
3228 #if defined(CONFIG_DRM_AMD_DC_DCN)
3229 case LINK_RATE_UHBR20:
3230 return LINK_RATE_UHBR13_5;
3231 case LINK_RATE_UHBR13_5:
3232 return LINK_RATE_UHBR10;
3233 case LINK_RATE_UHBR10:
3234 return LINK_RATE_HIGH3;
3236 case LINK_RATE_HIGH3:
3237 return LINK_RATE_HIGH2;
3238 case LINK_RATE_HIGH2:
3239 return LINK_RATE_HIGH;
3240 case LINK_RATE_HIGH:
3241 return LINK_RATE_LOW;
3243 return LINK_RATE_UNKNOWN;
3245 return LINK_RATE_UNKNOWN;
3249 static enum dc_lane_count increase_lane_count(enum dc_lane_count lane_count)
3251 switch (lane_count) {
3252 case LANE_COUNT_ONE:
3253 return LANE_COUNT_TWO;
3254 case LANE_COUNT_TWO:
3255 return LANE_COUNT_FOUR;
3257 return LANE_COUNT_UNKNOWN;
3261 static enum dc_link_rate increase_link_rate(enum dc_link_rate link_rate)
3263 switch (link_rate) {
3265 return LINK_RATE_HIGH;
3266 case LINK_RATE_HIGH:
3267 return LINK_RATE_HIGH2;
3268 case LINK_RATE_HIGH2:
3269 return LINK_RATE_HIGH3;
3270 #if defined(CONFIG_DRM_AMD_DC_DCN)
3271 case LINK_RATE_HIGH3:
3272 return LINK_RATE_UHBR10;
3273 case LINK_RATE_UHBR10:
3274 return LINK_RATE_UHBR13_5;
3275 case LINK_RATE_UHBR13_5:
3276 return LINK_RATE_UHBR20;
3279 return LINK_RATE_UNKNOWN;
3283 #if defined(CONFIG_DRM_AMD_DC_DCN)
3284 static bool decide_fallback_link_setting_max_bw_policy(
3285 const struct dc_link_settings *max,
3286 struct dc_link_settings *cur)
3288 uint8_t cur_idx = 0, next_idx;
3291 while (cur_idx < ARRAY_SIZE(dp_lt_fallbacks))
3292 /* find current index */
3293 if (dp_lt_fallbacks[cur_idx].lane_count == cur->lane_count &&
3294 dp_lt_fallbacks[cur_idx].link_rate == cur->link_rate)
3299 next_idx = cur_idx + 1;
3301 while (next_idx < ARRAY_SIZE(dp_lt_fallbacks))
3302 /* find next index */
3303 if (dp_lt_fallbacks[next_idx].lane_count <= max->lane_count &&
3304 dp_lt_fallbacks[next_idx].link_rate <= max->link_rate)
3309 if (next_idx < ARRAY_SIZE(dp_lt_fallbacks)) {
3310 cur->lane_count = dp_lt_fallbacks[next_idx].lane_count;
3311 cur->link_rate = dp_lt_fallbacks[next_idx].link_rate;
3320 * function: set link rate and lane count fallback based
3321 * on current link setting and last link training result
3323 * true - link setting could be set
3324 * false - has reached minimum setting
3325 * and no further fallback could be done
3327 static bool decide_fallback_link_setting(
3328 struct dc_link_settings initial_link_settings,
3329 struct dc_link_settings *current_link_setting,
3330 enum link_training_result training_result)
3332 if (!current_link_setting)
3334 #if defined(CONFIG_DRM_AMD_DC_DCN)
3335 if (dp_get_link_encoding_format(&initial_link_settings) == DP_128b_132b_ENCODING)
3336 return decide_fallback_link_setting_max_bw_policy(&initial_link_settings,
3337 current_link_setting);
3340 switch (training_result) {
3341 case LINK_TRAINING_CR_FAIL_LANE0:
3342 case LINK_TRAINING_CR_FAIL_LANE1:
3343 case LINK_TRAINING_CR_FAIL_LANE23:
3344 case LINK_TRAINING_LQA_FAIL:
3346 if (!reached_minimum_link_rate
3347 (current_link_setting->link_rate)) {
3348 current_link_setting->link_rate =
3350 current_link_setting->link_rate);
3351 } else if (!reached_minimum_lane_count
3352 (current_link_setting->lane_count)) {
3353 current_link_setting->link_rate =
3354 initial_link_settings.link_rate;
3355 if (training_result == LINK_TRAINING_CR_FAIL_LANE0)
3357 else if (training_result == LINK_TRAINING_CR_FAIL_LANE1)
3358 current_link_setting->lane_count =
3360 else if (training_result ==
3361 LINK_TRAINING_CR_FAIL_LANE23)
3362 current_link_setting->lane_count =
3365 current_link_setting->lane_count =
3367 current_link_setting->lane_count);
3373 case LINK_TRAINING_EQ_FAIL_EQ:
3375 if (!reached_minimum_lane_count
3376 (current_link_setting->lane_count)) {
3377 current_link_setting->lane_count =
3379 current_link_setting->lane_count);
3380 } else if (!reached_minimum_link_rate
3381 (current_link_setting->link_rate)) {
3382 current_link_setting->link_rate =
3384 current_link_setting->link_rate);
3390 case LINK_TRAINING_EQ_FAIL_CR:
3392 if (!reached_minimum_link_rate
3393 (current_link_setting->link_rate)) {
3394 current_link_setting->link_rate =
3396 current_link_setting->link_rate);
3408 bool dp_validate_mode_timing(
3409 struct dc_link *link,
3410 const struct dc_crtc_timing *timing)
3415 const struct dc_link_settings *link_setting;
3417 /* According to spec, VSC SDP should be used if pixel format is YCbCr420 */
3418 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420 &&
3419 !link->dpcd_caps.dprx_feature.bits.VSC_SDP_COLORIMETRY_SUPPORTED &&
3420 dal_graphics_object_id_get_connector_id(link->link_id) != CONNECTOR_ID_VIRTUAL)
3423 /*always DP fail safe mode*/
3424 if ((timing->pix_clk_100hz / 10) == (uint32_t) 25175 &&
3425 timing->h_addressable == (uint32_t) 640 &&
3426 timing->v_addressable == (uint32_t) 480)
3429 link_setting = dc_link_get_link_cap(link);
3431 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3432 /*if (flags.DYNAMIC_VALIDATION == 1 &&
3433 link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN)
3434 link_setting = &link->verified_link_cap;
3437 req_bw = dc_bandwidth_in_kbps_from_timing(timing);
3438 max_bw = dc_link_bandwidth_kbps(link, link_setting);
3440 if (req_bw <= max_bw) {
3441 /* remember the biggest mode here, during
3442 * initial link training (to get
3443 * verified_link_cap), LS sends event about
3444 * cannot train at reported cap to upper
3445 * layer and upper layer will re-enumerate modes.
3446 * this is not necessary if the lower
3447 * verified_link_cap is enough to drive
3450 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3451 /* if (flags.DYNAMIC_VALIDATION == 1)
3452 dpsst->max_req_bw_for_verified_linkcap = dal_max(
3453 dpsst->max_req_bw_for_verified_linkcap, req_bw); */
3459 static bool decide_dp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3461 struct dc_link_settings initial_link_setting = {
3462 LANE_COUNT_ONE, LINK_RATE_LOW, LINK_SPREAD_DISABLED, false, 0};
3463 struct dc_link_settings current_link_setting =
3464 initial_link_setting;
3467 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3470 /* search for the minimum link setting that:
3471 * 1. is supported according to the link training result
3472 * 2. could support the b/w requested by the timing
3474 while (current_link_setting.link_rate <=
3475 link->verified_link_cap.link_rate) {
3476 link_bw = dc_link_bandwidth_kbps(
3478 ¤t_link_setting);
3479 if (req_bw <= link_bw) {
3480 *link_setting = current_link_setting;
3484 if (current_link_setting.lane_count <
3485 link->verified_link_cap.lane_count) {
3486 current_link_setting.lane_count =
3487 increase_lane_count(
3488 current_link_setting.lane_count);
3490 current_link_setting.link_rate =
3492 current_link_setting.link_rate);
3493 current_link_setting.lane_count =
3494 initial_link_setting.lane_count;
3501 bool decide_edp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3503 struct dc_link_settings initial_link_setting;
3504 struct dc_link_settings current_link_setting;
3508 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3509 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3511 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3512 link->dpcd_caps.edp_supported_link_rates_count == 0) {
3513 *link_setting = link->verified_link_cap;
3517 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3518 initial_link_setting.lane_count = LANE_COUNT_ONE;
3519 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
3520 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3521 initial_link_setting.use_link_rate_set = true;
3522 initial_link_setting.link_rate_set = 0;
3523 current_link_setting = initial_link_setting;
3525 /* search for the minimum link setting that:
3526 * 1. is supported according to the link training result
3527 * 2. could support the b/w requested by the timing
3529 while (current_link_setting.link_rate <=
3530 link->verified_link_cap.link_rate) {
3531 link_bw = dc_link_bandwidth_kbps(
3533 ¤t_link_setting);
3534 if (req_bw <= link_bw) {
3535 *link_setting = current_link_setting;
3539 if (current_link_setting.lane_count <
3540 link->verified_link_cap.lane_count) {
3541 current_link_setting.lane_count =
3542 increase_lane_count(
3543 current_link_setting.lane_count);
3545 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
3546 current_link_setting.link_rate_set++;
3547 current_link_setting.link_rate =
3548 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3549 current_link_setting.lane_count =
3550 initial_link_setting.lane_count;
3558 static bool decide_edp_link_settings_with_dsc(struct dc_link *link,
3559 struct dc_link_settings *link_setting,
3561 enum dc_link_rate max_link_rate)
3563 struct dc_link_settings initial_link_setting;
3564 struct dc_link_settings current_link_setting;
3567 unsigned int policy = 0;
3569 policy = link->ctx->dc->debug.force_dsc_edp_policy;
3570 if (max_link_rate == LINK_RATE_UNKNOWN)
3571 max_link_rate = link->verified_link_cap.link_rate;
3573 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3574 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3576 if ((link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3577 link->dpcd_caps.edp_supported_link_rates_count == 0)) {
3578 /* for DSC enabled case, we search for minimum lane count */
3579 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3580 initial_link_setting.lane_count = LANE_COUNT_ONE;
3581 initial_link_setting.link_rate = LINK_RATE_LOW;
3582 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3583 initial_link_setting.use_link_rate_set = false;
3584 initial_link_setting.link_rate_set = 0;
3585 current_link_setting = initial_link_setting;
3586 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3589 /* search for the minimum link setting that:
3590 * 1. is supported according to the link training result
3591 * 2. could support the b/w requested by the timing
3593 while (current_link_setting.link_rate <=
3595 link_bw = dc_link_bandwidth_kbps(
3597 ¤t_link_setting);
3598 if (req_bw <= link_bw) {
3599 *link_setting = current_link_setting;
3604 if (current_link_setting.link_rate < max_link_rate) {
3605 current_link_setting.link_rate =
3607 current_link_setting.link_rate);
3609 if (current_link_setting.lane_count <
3610 link->verified_link_cap.lane_count) {
3611 current_link_setting.lane_count =
3612 increase_lane_count(
3613 current_link_setting.lane_count);
3614 current_link_setting.link_rate = initial_link_setting.link_rate;
3619 /* minimize link rate */
3620 if (current_link_setting.lane_count <
3621 link->verified_link_cap.lane_count) {
3622 current_link_setting.lane_count =
3623 increase_lane_count(
3624 current_link_setting.lane_count);
3626 current_link_setting.link_rate =
3628 current_link_setting.link_rate);
3629 current_link_setting.lane_count =
3630 initial_link_setting.lane_count;
3637 /* if optimize edp link is supported */
3638 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3639 initial_link_setting.lane_count = LANE_COUNT_ONE;
3640 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
3641 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3642 initial_link_setting.use_link_rate_set = true;
3643 initial_link_setting.link_rate_set = 0;
3644 current_link_setting = initial_link_setting;
3646 /* search for the minimum link setting that:
3647 * 1. is supported according to the link training result
3648 * 2. could support the b/w requested by the timing
3650 while (current_link_setting.link_rate <=
3652 link_bw = dc_link_bandwidth_kbps(
3654 ¤t_link_setting);
3655 if (req_bw <= link_bw) {
3656 *link_setting = current_link_setting;
3661 if (current_link_setting.link_rate_set <
3662 link->dpcd_caps.edp_supported_link_rates_count
3663 && current_link_setting.link_rate < max_link_rate) {
3664 current_link_setting.link_rate_set++;
3665 current_link_setting.link_rate =
3666 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3668 if (current_link_setting.lane_count < link->verified_link_cap.lane_count) {
3669 current_link_setting.lane_count =
3670 increase_lane_count(
3671 current_link_setting.lane_count);
3672 current_link_setting.link_rate_set = initial_link_setting.link_rate_set;
3673 current_link_setting.link_rate =
3674 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3679 /* minimize link rate */
3680 if (current_link_setting.lane_count <
3681 link->verified_link_cap.lane_count) {
3682 current_link_setting.lane_count =
3683 increase_lane_count(
3684 current_link_setting.lane_count);
3686 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
3687 current_link_setting.link_rate_set++;
3688 current_link_setting.link_rate =
3689 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3690 current_link_setting.lane_count =
3691 initial_link_setting.lane_count;
3700 static bool decide_mst_link_settings(const struct dc_link *link, struct dc_link_settings *link_setting)
3702 *link_setting = link->verified_link_cap;
3706 void decide_link_settings(struct dc_stream_state *stream,
3707 struct dc_link_settings *link_setting)
3709 struct dc_link *link;
3712 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
3714 link = stream->link;
3716 /* if preferred is specified through AMDDP, use it, if it's enough
3719 if (link->preferred_link_setting.lane_count !=
3720 LANE_COUNT_UNKNOWN &&
3721 link->preferred_link_setting.link_rate !=
3722 LINK_RATE_UNKNOWN) {
3723 *link_setting = link->preferred_link_setting;
3727 /* MST doesn't perform link training for now
3728 * TODO: add MST specific link training routine
3730 if (stream->signal == SIGNAL_TYPE_DISPLAY_PORT_MST) {
3731 if (decide_mst_link_settings(link, link_setting))
3733 } else if (link->connector_signal == SIGNAL_TYPE_EDP) {
3734 /* enable edp link optimization for DSC eDP case */
3735 if (stream->timing.flags.DSC) {
3736 enum dc_link_rate max_link_rate = LINK_RATE_UNKNOWN;
3738 if (link->ctx->dc->debug.force_dsc_edp_policy) {
3739 /* calculate link max link rate cap*/
3740 struct dc_link_settings tmp_link_setting;
3741 struct dc_crtc_timing tmp_timing = stream->timing;
3742 uint32_t orig_req_bw;
3744 tmp_link_setting.link_rate = LINK_RATE_UNKNOWN;
3745 tmp_timing.flags.DSC = 0;
3746 orig_req_bw = dc_bandwidth_in_kbps_from_timing(&tmp_timing);
3747 decide_edp_link_settings(link, &tmp_link_setting, orig_req_bw);
3748 max_link_rate = tmp_link_setting.link_rate;
3750 if (decide_edp_link_settings_with_dsc(link, link_setting, req_bw, max_link_rate))
3752 } else if (decide_edp_link_settings(link, link_setting, req_bw))
3754 } else if (decide_dp_link_settings(link, link_setting, req_bw))
3757 BREAK_TO_DEBUGGER();
3758 ASSERT(link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN);
3760 *link_setting = link->verified_link_cap;
3763 /*************************Short Pulse IRQ***************************/
3764 bool dc_link_dp_allow_hpd_rx_irq(const struct dc_link *link)
3767 * Don't handle RX IRQ unless one of following is met:
3768 * 1) The link is established (cur_link_settings != unknown)
3769 * 2) We know we're dealing with a branch device, SST or MST
3772 if ((link->cur_link_settings.lane_count != LANE_COUNT_UNKNOWN) ||
3773 is_dp_branch_device(link))
3779 static bool handle_hpd_irq_psr_sink(struct dc_link *link)
3781 union dpcd_psr_configuration psr_configuration;
3783 if (!link->psr_settings.psr_feature_enabled)
3786 dm_helpers_dp_read_dpcd(
3789 368,/*DpcdAddress_PSR_Enable_Cfg*/
3790 &psr_configuration.raw,
3791 sizeof(psr_configuration.raw));
3794 if (psr_configuration.bits.ENABLE) {
3795 unsigned char dpcdbuf[3] = {0};
3796 union psr_error_status psr_error_status;
3797 union psr_sink_psr_status psr_sink_psr_status;
3799 dm_helpers_dp_read_dpcd(
3802 0x2006, /*DpcdAddress_PSR_Error_Status*/
3803 (unsigned char *) dpcdbuf,
3806 /*DPCD 2006h ERROR STATUS*/
3807 psr_error_status.raw = dpcdbuf[0];
3808 /*DPCD 2008h SINK PANEL SELF REFRESH STATUS*/
3809 psr_sink_psr_status.raw = dpcdbuf[2];
3811 if (psr_error_status.bits.LINK_CRC_ERROR ||
3812 psr_error_status.bits.RFB_STORAGE_ERROR ||
3813 psr_error_status.bits.VSC_SDP_ERROR) {
3816 /* Acknowledge and clear error bits */
3817 dm_helpers_dp_write_dpcd(
3820 8198,/*DpcdAddress_PSR_Error_Status*/
3821 &psr_error_status.raw,
3822 sizeof(psr_error_status.raw));
3824 /* PSR error, disable and re-enable PSR */
3825 allow_active = false;
3826 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
3827 allow_active = true;
3828 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
3831 } else if (psr_sink_psr_status.bits.SINK_SELF_REFRESH_STATUS ==
3832 PSR_SINK_STATE_ACTIVE_DISPLAY_FROM_SINK_RFB){
3833 /* No error is detect, PSR is active.
3834 * We should return with IRQ_HPD handled without
3835 * checking for loss of sync since PSR would have
3836 * powered down main link.
3844 static void dp_test_send_link_training(struct dc_link *link)
3846 struct dc_link_settings link_settings = {0};
3848 core_link_read_dpcd(
3851 (unsigned char *)(&link_settings.lane_count),
3853 core_link_read_dpcd(
3856 (unsigned char *)(&link_settings.link_rate),
3859 /* Set preferred link settings */
3860 link->verified_link_cap.lane_count = link_settings.lane_count;
3861 link->verified_link_cap.link_rate = link_settings.link_rate;
3863 dp_retrain_link_dp_test(link, &link_settings, false);
3866 /* TODO Raven hbr2 compliance eye output is unstable
3867 * (toggling on and off) with debugger break
3868 * This caueses intermittent PHY automation failure
3869 * Need to look into the root cause */
3870 static void dp_test_send_phy_test_pattern(struct dc_link *link)
3872 union phy_test_pattern dpcd_test_pattern;
3873 union lane_adjust dpcd_lane_adjustment[2];
3874 unsigned char dpcd_post_cursor_2_adjustment = 0;
3875 #if defined(CONFIG_DRM_AMD_DC_DCN)
3876 unsigned char test_pattern_buffer[
3877 (DP_TEST_264BIT_CUSTOM_PATTERN_263_256 -
3878 DP_TEST_264BIT_CUSTOM_PATTERN_7_0)+1] = {0};
3880 unsigned char test_pattern_buffer[
3881 (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
3882 DP_TEST_80BIT_CUSTOM_PATTERN_7_0)+1] = {0};
3884 unsigned int test_pattern_size = 0;
3885 enum dp_test_pattern test_pattern;
3886 union lane_adjust dpcd_lane_adjust;
3888 struct link_training_settings link_training_settings;
3890 dpcd_test_pattern.raw = 0;
3891 memset(dpcd_lane_adjustment, 0, sizeof(dpcd_lane_adjustment));
3892 memset(&link_training_settings, 0, sizeof(link_training_settings));
3894 /* get phy test pattern and pattern parameters from DP receiver */
3895 core_link_read_dpcd(
3897 DP_PHY_TEST_PATTERN,
3898 &dpcd_test_pattern.raw,
3899 sizeof(dpcd_test_pattern));
3900 core_link_read_dpcd(
3902 DP_ADJUST_REQUEST_LANE0_1,
3903 &dpcd_lane_adjustment[0].raw,
3904 sizeof(dpcd_lane_adjustment));
3906 /*get post cursor 2 parameters
3907 * For DP 1.1a or eariler, this DPCD register's value is 0
3908 * For DP 1.2 or later:
3909 * Bits 1:0 = POST_CURSOR2_LANE0; Bits 3:2 = POST_CURSOR2_LANE1
3910 * Bits 5:4 = POST_CURSOR2_LANE2; Bits 7:6 = POST_CURSOR2_LANE3
3912 core_link_read_dpcd(
3914 DP_ADJUST_REQUEST_POST_CURSOR2,
3915 &dpcd_post_cursor_2_adjustment,
3916 sizeof(dpcd_post_cursor_2_adjustment));
3918 /* translate request */
3919 switch (dpcd_test_pattern.bits.PATTERN) {
3920 case PHY_TEST_PATTERN_D10_2:
3921 test_pattern = DP_TEST_PATTERN_D102;
3923 case PHY_TEST_PATTERN_SYMBOL_ERROR:
3924 test_pattern = DP_TEST_PATTERN_SYMBOL_ERROR;
3926 case PHY_TEST_PATTERN_PRBS7:
3927 test_pattern = DP_TEST_PATTERN_PRBS7;
3929 case PHY_TEST_PATTERN_80BIT_CUSTOM:
3930 test_pattern = DP_TEST_PATTERN_80BIT_CUSTOM;
3932 case PHY_TEST_PATTERN_CP2520_1:
3933 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
3934 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
3935 DP_TEST_PATTERN_TRAINING_PATTERN4 :
3936 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
3938 case PHY_TEST_PATTERN_CP2520_2:
3939 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
3940 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
3941 DP_TEST_PATTERN_TRAINING_PATTERN4 :
3942 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
3944 case PHY_TEST_PATTERN_CP2520_3:
3945 test_pattern = DP_TEST_PATTERN_TRAINING_PATTERN4;
3947 #if defined(CONFIG_DRM_AMD_DC_DCN)
3948 case PHY_TEST_PATTERN_128b_132b_TPS1:
3949 test_pattern = DP_TEST_PATTERN_128b_132b_TPS1;
3951 case PHY_TEST_PATTERN_128b_132b_TPS2:
3952 test_pattern = DP_TEST_PATTERN_128b_132b_TPS2;
3954 case PHY_TEST_PATTERN_PRBS9:
3955 test_pattern = DP_TEST_PATTERN_PRBS9;
3957 case PHY_TEST_PATTERN_PRBS11:
3958 test_pattern = DP_TEST_PATTERN_PRBS11;
3960 case PHY_TEST_PATTERN_PRBS15:
3961 test_pattern = DP_TEST_PATTERN_PRBS15;
3963 case PHY_TEST_PATTERN_PRBS23:
3964 test_pattern = DP_TEST_PATTERN_PRBS23;
3966 case PHY_TEST_PATTERN_PRBS31:
3967 test_pattern = DP_TEST_PATTERN_PRBS31;
3969 case PHY_TEST_PATTERN_264BIT_CUSTOM:
3970 test_pattern = DP_TEST_PATTERN_264BIT_CUSTOM;
3972 case PHY_TEST_PATTERN_SQUARE_PULSE:
3973 test_pattern = DP_TEST_PATTERN_SQUARE_PULSE;
3977 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
3981 if (test_pattern == DP_TEST_PATTERN_80BIT_CUSTOM) {
3982 test_pattern_size = (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
3983 DP_TEST_80BIT_CUSTOM_PATTERN_7_0) + 1;
3984 core_link_read_dpcd(
3986 DP_TEST_80BIT_CUSTOM_PATTERN_7_0,
3987 test_pattern_buffer,
3991 #if defined(CONFIG_DRM_AMD_DC_DCN)
3992 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE) {
3993 test_pattern_size = 1; // Square pattern data is 1 byte (DP spec)
3994 core_link_read_dpcd(
3996 DP_PHY_SQUARE_PATTERN,
3997 test_pattern_buffer,
4001 if (test_pattern == DP_TEST_PATTERN_264BIT_CUSTOM) {
4002 test_pattern_size = (DP_TEST_264BIT_CUSTOM_PATTERN_263_256-
4003 DP_TEST_264BIT_CUSTOM_PATTERN_7_0) + 1;
4004 core_link_read_dpcd(
4006 DP_TEST_264BIT_CUSTOM_PATTERN_7_0,
4007 test_pattern_buffer,
4012 /* prepare link training settings */
4013 link_training_settings.link_settings = link->cur_link_settings;
4015 for (lane = 0; lane <
4016 (unsigned int)(link->cur_link_settings.lane_count);
4018 dpcd_lane_adjust.raw =
4019 get_nibble_at_index(&dpcd_lane_adjustment[0].raw, lane);
4020 if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4021 DP_8b_10b_ENCODING) {
4022 link_training_settings.hw_lane_settings[lane].VOLTAGE_SWING =
4023 (enum dc_voltage_swing)
4024 (dpcd_lane_adjust.bits.VOLTAGE_SWING_LANE);
4025 link_training_settings.hw_lane_settings[lane].PRE_EMPHASIS =
4026 (enum dc_pre_emphasis)
4027 (dpcd_lane_adjust.bits.PRE_EMPHASIS_LANE);
4028 link_training_settings.hw_lane_settings[lane].POST_CURSOR2 =
4029 (enum dc_post_cursor2)
4030 ((dpcd_post_cursor_2_adjustment >> (lane * 2)) & 0x03);
4032 #if defined(CONFIG_DRM_AMD_DC_DCN)
4033 else if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4034 DP_128b_132b_ENCODING) {
4035 link_training_settings.hw_lane_settings[lane].FFE_PRESET.raw =
4036 dpcd_lane_adjust.tx_ffe.PRESET_VALUE;
4041 dp_hw_to_dpcd_lane_settings(&link_training_settings,
4042 link_training_settings.hw_lane_settings,
4043 link_training_settings.dpcd_lane_settings);
4044 /*Usage: Measure DP physical lane signal
4045 * by DP SI test equipment automatically.
4046 * PHY test pattern request is generated by equipment via HPD interrupt.
4047 * HPD needs to be active all the time. HPD should be active
4048 * all the time. Do not touch it.
4049 * forward request to DS
4051 dc_link_dp_set_test_pattern(
4054 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED,
4055 &link_training_settings,
4056 test_pattern_buffer,
4060 static void dp_test_send_link_test_pattern(struct dc_link *link)
4062 union link_test_pattern dpcd_test_pattern;
4063 union test_misc dpcd_test_params;
4064 enum dp_test_pattern test_pattern;
4065 enum dp_test_pattern_color_space test_pattern_color_space =
4066 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED;
4067 enum dc_color_depth requestColorDepth = COLOR_DEPTH_UNDEFINED;
4068 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4069 struct pipe_ctx *pipe_ctx = NULL;
4072 memset(&dpcd_test_pattern, 0, sizeof(dpcd_test_pattern));
4073 memset(&dpcd_test_params, 0, sizeof(dpcd_test_params));
4075 for (i = 0; i < MAX_PIPES; i++) {
4076 if (pipes[i].stream == NULL)
4079 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
4080 pipe_ctx = &pipes[i];
4085 if (pipe_ctx == NULL)
4088 /* get link test pattern and pattern parameters */
4089 core_link_read_dpcd(
4092 &dpcd_test_pattern.raw,
4093 sizeof(dpcd_test_pattern));
4094 core_link_read_dpcd(
4097 &dpcd_test_params.raw,
4098 sizeof(dpcd_test_params));
4100 switch (dpcd_test_pattern.bits.PATTERN) {
4101 case LINK_TEST_PATTERN_COLOR_RAMP:
4102 test_pattern = DP_TEST_PATTERN_COLOR_RAMP;
4104 case LINK_TEST_PATTERN_VERTICAL_BARS:
4105 test_pattern = DP_TEST_PATTERN_VERTICAL_BARS;
4106 break; /* black and white */
4107 case LINK_TEST_PATTERN_COLOR_SQUARES:
4108 test_pattern = (dpcd_test_params.bits.DYN_RANGE ==
4109 TEST_DYN_RANGE_VESA ?
4110 DP_TEST_PATTERN_COLOR_SQUARES :
4111 DP_TEST_PATTERN_COLOR_SQUARES_CEA);
4114 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
4118 if (dpcd_test_params.bits.CLR_FORMAT == 0)
4119 test_pattern_color_space = DP_TEST_PATTERN_COLOR_SPACE_RGB;
4121 test_pattern_color_space = dpcd_test_params.bits.YCBCR_COEFS ?
4122 DP_TEST_PATTERN_COLOR_SPACE_YCBCR709 :
4123 DP_TEST_PATTERN_COLOR_SPACE_YCBCR601;
4125 switch (dpcd_test_params.bits.BPC) {
4127 requestColorDepth = COLOR_DEPTH_666;
4130 requestColorDepth = COLOR_DEPTH_888;
4133 requestColorDepth = COLOR_DEPTH_101010;
4136 requestColorDepth = COLOR_DEPTH_121212;
4142 switch (dpcd_test_params.bits.CLR_FORMAT) {
4144 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4147 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR422;
4150 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR444;
4153 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4158 if (requestColorDepth != COLOR_DEPTH_UNDEFINED
4159 && pipe_ctx->stream->timing.display_color_depth != requestColorDepth) {
4160 DC_LOG_DEBUG("%s: original bpc %d, changing to %d\n",
4162 pipe_ctx->stream->timing.display_color_depth,
4164 pipe_ctx->stream->timing.display_color_depth = requestColorDepth;
4167 dp_update_dsc_config(pipe_ctx);
4169 dc_link_dp_set_test_pattern(
4172 test_pattern_color_space,
4178 static void dp_test_get_audio_test_data(struct dc_link *link, bool disable_video)
4180 union audio_test_mode dpcd_test_mode = {0};
4181 struct audio_test_pattern_type dpcd_pattern_type = {0};
4182 union audio_test_pattern_period dpcd_pattern_period[AUDIO_CHANNELS_COUNT] = {0};
4183 enum dp_test_pattern test_pattern = DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4185 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4186 struct pipe_ctx *pipe_ctx = &pipes[0];
4187 unsigned int channel_count;
4188 unsigned int channel = 0;
4189 unsigned int modes = 0;
4190 unsigned int sampling_rate_in_hz = 0;
4192 // get audio test mode and test pattern parameters
4193 core_link_read_dpcd(
4196 &dpcd_test_mode.raw,
4197 sizeof(dpcd_test_mode));
4199 core_link_read_dpcd(
4201 DP_TEST_AUDIO_PATTERN_TYPE,
4202 &dpcd_pattern_type.value,
4203 sizeof(dpcd_pattern_type));
4205 channel_count = dpcd_test_mode.bits.channel_count + 1;
4207 // read pattern periods for requested channels when sawTooth pattern is requested
4208 if (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH ||
4209 dpcd_pattern_type.value == AUDIO_TEST_PATTERN_OPERATOR_DEFINED) {
4211 test_pattern = (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH) ?
4212 DP_TEST_PATTERN_AUDIO_SAWTOOTH : DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4213 // read period for each channel
4214 for (channel = 0; channel < channel_count; channel++) {
4215 core_link_read_dpcd(
4217 DP_TEST_AUDIO_PERIOD_CH1 + channel,
4218 &dpcd_pattern_period[channel].raw,
4219 sizeof(dpcd_pattern_period[channel]));
4223 // translate sampling rate
4224 switch (dpcd_test_mode.bits.sampling_rate) {
4225 case AUDIO_SAMPLING_RATE_32KHZ:
4226 sampling_rate_in_hz = 32000;
4228 case AUDIO_SAMPLING_RATE_44_1KHZ:
4229 sampling_rate_in_hz = 44100;
4231 case AUDIO_SAMPLING_RATE_48KHZ:
4232 sampling_rate_in_hz = 48000;
4234 case AUDIO_SAMPLING_RATE_88_2KHZ:
4235 sampling_rate_in_hz = 88200;
4237 case AUDIO_SAMPLING_RATE_96KHZ:
4238 sampling_rate_in_hz = 96000;
4240 case AUDIO_SAMPLING_RATE_176_4KHZ:
4241 sampling_rate_in_hz = 176400;
4243 case AUDIO_SAMPLING_RATE_192KHZ:
4244 sampling_rate_in_hz = 192000;
4247 sampling_rate_in_hz = 0;
4251 link->audio_test_data.flags.test_requested = 1;
4252 link->audio_test_data.flags.disable_video = disable_video;
4253 link->audio_test_data.sampling_rate = sampling_rate_in_hz;
4254 link->audio_test_data.channel_count = channel_count;
4255 link->audio_test_data.pattern_type = test_pattern;
4257 if (test_pattern == DP_TEST_PATTERN_AUDIO_SAWTOOTH) {
4258 for (modes = 0; modes < pipe_ctx->stream->audio_info.mode_count; modes++) {
4259 link->audio_test_data.pattern_period[modes] = dpcd_pattern_period[modes].bits.pattern_period;
4264 void dc_link_dp_handle_automated_test(struct dc_link *link)
4266 union test_request test_request;
4267 union test_response test_response;
4269 memset(&test_request, 0, sizeof(test_request));
4270 memset(&test_response, 0, sizeof(test_response));
4272 core_link_read_dpcd(
4276 sizeof(union test_request));
4277 if (test_request.bits.LINK_TRAINING) {
4278 /* ACK first to let DP RX test box monitor LT sequence */
4279 test_response.bits.ACK = 1;
4280 core_link_write_dpcd(
4284 sizeof(test_response));
4285 dp_test_send_link_training(link);
4286 /* no acknowledge request is needed again */
4287 test_response.bits.ACK = 0;
4289 if (test_request.bits.LINK_TEST_PATTRN) {
4290 dp_test_send_link_test_pattern(link);
4291 test_response.bits.ACK = 1;
4294 if (test_request.bits.AUDIO_TEST_PATTERN) {
4295 dp_test_get_audio_test_data(link, test_request.bits.TEST_AUDIO_DISABLED_VIDEO);
4296 test_response.bits.ACK = 1;
4299 if (test_request.bits.PHY_TEST_PATTERN) {
4300 dp_test_send_phy_test_pattern(link);
4301 test_response.bits.ACK = 1;
4304 /* send request acknowledgment */
4305 if (test_response.bits.ACK)
4306 core_link_write_dpcd(
4310 sizeof(test_response));
4313 void dc_link_dp_handle_link_loss(struct dc_link *link)
4316 struct pipe_ctx *pipe_ctx;
4318 for (i = 0; i < MAX_PIPES; i++) {
4319 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4320 if (pipe_ctx && pipe_ctx->stream && pipe_ctx->stream->link == link)
4324 if (pipe_ctx == NULL || pipe_ctx->stream == NULL)
4327 for (i = 0; i < MAX_PIPES; i++) {
4328 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4329 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4330 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4331 core_link_disable_stream(pipe_ctx);
4335 for (i = 0; i < MAX_PIPES; i++) {
4336 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4337 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4338 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4339 core_link_enable_stream(link->dc->current_state, pipe_ctx);
4344 bool dc_link_handle_hpd_rx_irq(struct dc_link *link, union hpd_irq_data *out_hpd_irq_dpcd_data, bool *out_link_loss,
4345 bool defer_handling, bool *has_left_work)
4347 union hpd_irq_data hpd_irq_dpcd_data = {0};
4348 union device_service_irq device_service_clear = {0};
4349 enum dc_status result;
4350 bool status = false;
4353 *out_link_loss = false;
4356 *has_left_work = false;
4357 /* For use cases related to down stream connection status change,
4358 * PSR and device auto test, refer to function handle_sst_hpd_irq
4361 DC_LOG_HW_HPD_IRQ("%s: Got short pulse HPD on link %d\n",
4362 __func__, link->link_index);
4365 /* All the "handle_hpd_irq_xxx()" methods
4366 * should be called only after
4367 * dal_dpsst_ls_read_hpd_irq_data
4368 * Order of calls is important too
4370 result = read_hpd_rx_irq_data(link, &hpd_irq_dpcd_data);
4371 if (out_hpd_irq_dpcd_data)
4372 *out_hpd_irq_dpcd_data = hpd_irq_dpcd_data;
4374 if (result != DC_OK) {
4375 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain irq data\n",
4380 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.AUTOMATED_TEST) {
4381 device_service_clear.bits.AUTOMATED_TEST = 1;
4382 core_link_write_dpcd(
4384 DP_DEVICE_SERVICE_IRQ_VECTOR,
4385 &device_service_clear.raw,
4386 sizeof(device_service_clear.raw));
4387 device_service_clear.raw = 0;
4388 if (defer_handling && has_left_work)
4389 *has_left_work = true;
4391 dc_link_dp_handle_automated_test(link);
4395 if (!dc_link_dp_allow_hpd_rx_irq(link)) {
4396 DC_LOG_HW_HPD_IRQ("%s: skipping HPD handling on %d\n",
4397 __func__, link->link_index);
4401 if (handle_hpd_irq_psr_sink(link))
4402 /* PSR-related error was detected and handled */
4405 /* If PSR-related error handled, Main link may be off,
4406 * so do not handle as a normal sink status change interrupt.
4409 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.UP_REQ_MSG_RDY) {
4410 if (defer_handling && has_left_work)
4411 *has_left_work = true;
4415 /* check if we have MST msg and return since we poll for it */
4416 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.DOWN_REP_MSG_RDY) {
4417 if (defer_handling && has_left_work)
4418 *has_left_work = true;
4422 /* For now we only handle 'Downstream port status' case.
4423 * If we got sink count changed it means
4424 * Downstream port status changed,
4425 * then DM should call DC to do the detection.
4426 * NOTE: Do not handle link loss on eDP since it is internal link*/
4427 if ((link->connector_signal != SIGNAL_TYPE_EDP) &&
4428 hpd_rx_irq_check_link_loss_status(
4430 &hpd_irq_dpcd_data)) {
4431 /* Connectivity log: link loss */
4432 CONN_DATA_LINK_LOSS(link,
4433 hpd_irq_dpcd_data.raw,
4434 sizeof(hpd_irq_dpcd_data),
4437 if (defer_handling && has_left_work)
4438 *has_left_work = true;
4440 dc_link_dp_handle_link_loss(link);
4444 *out_link_loss = true;
4447 if (link->type == dc_connection_sst_branch &&
4448 hpd_irq_dpcd_data.bytes.sink_cnt.bits.SINK_COUNT
4449 != link->dpcd_sink_count)
4452 /* reasons for HPD RX:
4453 * 1. Link Loss - ie Re-train the Link
4454 * 2. MST sideband message
4455 * 3. Automated Test - ie. Internal Commit
4456 * 4. CP (copy protection) - (not interesting for DM???)
4458 * 6. Downstream Port status changed
4459 * -ie. Detect - this the only one
4460 * which is interesting for DM because
4461 * it must call dc_link_detect.
4466 /*query dpcd for version and mst cap addresses*/
4467 bool is_mst_supported(struct dc_link *link)
4470 enum dc_status st = DC_OK;
4474 if (link->preferred_training_settings.mst_enable &&
4475 *link->preferred_training_settings.mst_enable == false) {
4482 st = core_link_read_dpcd(link, DP_DPCD_REV, &rev.raw,
4485 if (st == DC_OK && rev.raw >= DPCD_REV_12) {
4487 st = core_link_read_dpcd(link, DP_MSTM_CAP,
4488 &cap.raw, sizeof(cap));
4489 if (st == DC_OK && cap.bits.MST_CAP == 1)
4496 bool is_dp_active_dongle(const struct dc_link *link)
4498 return (link->dpcd_caps.dongle_type >= DISPLAY_DONGLE_DP_VGA_CONVERTER) &&
4499 (link->dpcd_caps.dongle_type <= DISPLAY_DONGLE_DP_HDMI_CONVERTER);
4502 bool is_dp_branch_device(const struct dc_link *link)
4504 return link->dpcd_caps.is_branch_dev;
4507 static int translate_dpcd_max_bpc(enum dpcd_downstream_port_max_bpc bpc)
4510 case DOWN_STREAM_MAX_8BPC:
4512 case DOWN_STREAM_MAX_10BPC:
4514 case DOWN_STREAM_MAX_12BPC:
4516 case DOWN_STREAM_MAX_16BPC:
4525 #if defined(CONFIG_DRM_AMD_DC_DCN)
4526 uint32_t dc_link_bw_kbps_from_raw_frl_link_rate_data(uint8_t bw)
4547 * Return PCON's post FRL link training supported BW if its non-zero, otherwise return max_supported_frl_bw.
4549 static uint32_t intersect_frl_link_bw_support(
4550 const uint32_t max_supported_frl_bw_in_kbps,
4551 const union hdmi_encoded_link_bw hdmi_encoded_link_bw)
4553 uint32_t supported_bw_in_kbps = max_supported_frl_bw_in_kbps;
4555 // HDMI_ENCODED_LINK_BW bits are only valid if HDMI Link Configuration bit is 1 (FRL mode)
4556 if (hdmi_encoded_link_bw.bits.FRL_MODE) {
4557 if (hdmi_encoded_link_bw.bits.BW_48Gbps)
4558 supported_bw_in_kbps = 48000000;
4559 else if (hdmi_encoded_link_bw.bits.BW_40Gbps)
4560 supported_bw_in_kbps = 40000000;
4561 else if (hdmi_encoded_link_bw.bits.BW_32Gbps)
4562 supported_bw_in_kbps = 32000000;
4563 else if (hdmi_encoded_link_bw.bits.BW_24Gbps)
4564 supported_bw_in_kbps = 24000000;
4565 else if (hdmi_encoded_link_bw.bits.BW_18Gbps)
4566 supported_bw_in_kbps = 18000000;
4567 else if (hdmi_encoded_link_bw.bits.BW_9Gbps)
4568 supported_bw_in_kbps = 9000000;
4571 return supported_bw_in_kbps;
4575 static void read_dp_device_vendor_id(struct dc_link *link)
4577 struct dp_device_vendor_id dp_id;
4579 /* read IEEE branch device id */
4580 core_link_read_dpcd(
4586 link->dpcd_caps.branch_dev_id =
4587 (dp_id.ieee_oui[0] << 16) +
4588 (dp_id.ieee_oui[1] << 8) +
4592 link->dpcd_caps.branch_dev_name,
4593 dp_id.ieee_device_id,
4594 sizeof(dp_id.ieee_device_id));
4599 static void get_active_converter_info(
4600 uint8_t data, struct dc_link *link)
4602 union dp_downstream_port_present ds_port = { .byte = data };
4603 memset(&link->dpcd_caps.dongle_caps, 0, sizeof(link->dpcd_caps.dongle_caps));
4605 /* decode converter info*/
4606 if (!ds_port.fields.PORT_PRESENT) {
4607 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4608 ddc_service_set_dongle_type(link->ddc,
4609 link->dpcd_caps.dongle_type);
4610 link->dpcd_caps.is_branch_dev = false;
4614 /* DPCD 0x5 bit 0 = 1, it indicate it's branch device */
4615 link->dpcd_caps.is_branch_dev = ds_port.fields.PORT_PRESENT;
4617 switch (ds_port.fields.PORT_TYPE) {
4618 case DOWNSTREAM_VGA:
4619 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_VGA_CONVERTER;
4621 case DOWNSTREAM_DVI_HDMI_DP_PLUS_PLUS:
4622 /* At this point we don't know is it DVI or HDMI or DP++,
4624 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_DVI_CONVERTER;
4627 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4631 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_11) {
4632 uint8_t det_caps[16]; /* CTS 4.2.2.7 expects source to read Detailed Capabilities Info : 00080h-0008F.*/
4633 union dwnstream_port_caps_byte0 *port_caps =
4634 (union dwnstream_port_caps_byte0 *)det_caps;
4635 if (core_link_read_dpcd(link, DP_DOWNSTREAM_PORT_0,
4636 det_caps, sizeof(det_caps)) == DC_OK) {
4638 switch (port_caps->bits.DWN_STRM_PORTX_TYPE) {
4639 /*Handle DP case as DONGLE_NONE*/
4640 case DOWN_STREAM_DETAILED_DP:
4641 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4643 case DOWN_STREAM_DETAILED_VGA:
4644 link->dpcd_caps.dongle_type =
4645 DISPLAY_DONGLE_DP_VGA_CONVERTER;
4647 case DOWN_STREAM_DETAILED_DVI:
4648 link->dpcd_caps.dongle_type =
4649 DISPLAY_DONGLE_DP_DVI_CONVERTER;
4651 case DOWN_STREAM_DETAILED_HDMI:
4652 case DOWN_STREAM_DETAILED_DP_PLUS_PLUS:
4653 /*Handle DP++ active converter case, process DP++ case as HDMI case according DP1.4 spec*/
4654 link->dpcd_caps.dongle_type =
4655 DISPLAY_DONGLE_DP_HDMI_CONVERTER;
4657 link->dpcd_caps.dongle_caps.dongle_type = link->dpcd_caps.dongle_type;
4658 if (ds_port.fields.DETAILED_CAPS) {
4660 union dwnstream_port_caps_byte3_hdmi
4661 hdmi_caps = {.raw = det_caps[3] };
4662 union dwnstream_port_caps_byte2
4663 hdmi_color_caps = {.raw = det_caps[2] };
4664 link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz =
4667 link->dpcd_caps.dongle_caps.is_dp_hdmi_s3d_converter =
4668 hdmi_caps.bits.FRAME_SEQ_TO_FRAME_PACK;
4669 /*YCBCR capability only for HDMI case*/
4670 if (port_caps->bits.DWN_STRM_PORTX_TYPE
4671 == DOWN_STREAM_DETAILED_HDMI) {
4672 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_pass_through =
4673 hdmi_caps.bits.YCrCr422_PASS_THROUGH;
4674 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_pass_through =
4675 hdmi_caps.bits.YCrCr420_PASS_THROUGH;
4676 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_converter =
4677 hdmi_caps.bits.YCrCr422_CONVERSION;
4678 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_converter =
4679 hdmi_caps.bits.YCrCr420_CONVERSION;
4682 link->dpcd_caps.dongle_caps.dp_hdmi_max_bpc =
4683 translate_dpcd_max_bpc(
4684 hdmi_color_caps.bits.MAX_BITS_PER_COLOR_COMPONENT);
4686 #if defined(CONFIG_DRM_AMD_DC_DCN)
4687 if (link->dc->caps.hdmi_frl_pcon_support) {
4688 union hdmi_encoded_link_bw hdmi_encoded_link_bw;
4690 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps =
4691 dc_link_bw_kbps_from_raw_frl_link_rate_data(
4692 hdmi_color_caps.bits.MAX_ENCODED_LINK_BW_SUPPORT);
4694 // Intersect reported max link bw support with the supported link rate post FRL link training
4695 if (core_link_read_dpcd(link, DP_PCON_HDMI_POST_FRL_STATUS,
4696 &hdmi_encoded_link_bw.raw, sizeof(hdmi_encoded_link_bw)) == DC_OK) {
4697 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps = intersect_frl_link_bw_support(
4698 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps,
4699 hdmi_encoded_link_bw);
4702 if (link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps > 0)
4703 link->dpcd_caps.dongle_caps.extendedCapValid = true;
4707 if (link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz != 0)
4708 link->dpcd_caps.dongle_caps.extendedCapValid = true;
4716 ddc_service_set_dongle_type(link->ddc, link->dpcd_caps.dongle_type);
4719 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
4721 core_link_read_dpcd(
4723 DP_BRANCH_REVISION_START,
4724 (uint8_t *)&dp_hw_fw_revision,
4725 sizeof(dp_hw_fw_revision));
4727 link->dpcd_caps.branch_hw_revision =
4728 dp_hw_fw_revision.ieee_hw_rev;
4731 link->dpcd_caps.branch_fw_revision,
4732 dp_hw_fw_revision.ieee_fw_rev,
4733 sizeof(dp_hw_fw_revision.ieee_fw_rev));
4735 #if defined(CONFIG_DRM_AMD_DC_DCN)
4736 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14 &&
4737 link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
4738 union dp_dfp_cap_ext dfp_cap_ext;
4739 memset(&dfp_cap_ext, '\0', sizeof (dfp_cap_ext));
4740 core_link_read_dpcd(
4742 DP_DFP_CAPABILITY_EXTENSION_SUPPORT,
4744 sizeof(dfp_cap_ext.raw));
4745 link->dpcd_caps.dongle_caps.dfp_cap_ext.supported = dfp_cap_ext.fields.supported;
4746 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps =
4747 dfp_cap_ext.fields.max_pixel_rate_in_mps[0] +
4748 (dfp_cap_ext.fields.max_pixel_rate_in_mps[1] << 8);
4749 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width =
4750 dfp_cap_ext.fields.max_video_h_active_width[0] +
4751 (dfp_cap_ext.fields.max_video_h_active_width[1] << 8);
4752 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height =
4753 dfp_cap_ext.fields.max_video_v_active_height[0] +
4754 (dfp_cap_ext.fields.max_video_v_active_height[1] << 8);
4755 link->dpcd_caps.dongle_caps.dfp_cap_ext.encoding_format_caps =
4756 dfp_cap_ext.fields.encoding_format_caps;
4757 link->dpcd_caps.dongle_caps.dfp_cap_ext.rgb_color_depth_caps =
4758 dfp_cap_ext.fields.rgb_color_depth_caps;
4759 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr444_color_depth_caps =
4760 dfp_cap_ext.fields.ycbcr444_color_depth_caps;
4761 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr422_color_depth_caps =
4762 dfp_cap_ext.fields.ycbcr422_color_depth_caps;
4763 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr420_color_depth_caps =
4764 dfp_cap_ext.fields.ycbcr420_color_depth_caps;
4765 DC_LOG_DP2("DFP capability extension is read at link %d", link->link_index);
4766 DC_LOG_DP2("\tdfp_cap_ext.supported = %s", link->dpcd_caps.dongle_caps.dfp_cap_ext.supported ? "true" : "false");
4767 DC_LOG_DP2("\tdfp_cap_ext.max_pixel_rate_in_mps = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps);
4768 DC_LOG_DP2("\tdfp_cap_ext.max_video_h_active_width = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width);
4769 DC_LOG_DP2("\tdfp_cap_ext.max_video_v_active_height = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height);
4774 static void dp_wa_power_up_0010FA(struct dc_link *link, uint8_t *dpcd_data,
4779 if (!link->dpcd_caps.dpcd_rev.raw) {
4781 dp_receiver_power_ctrl(link, true);
4782 core_link_read_dpcd(link, DP_DPCD_REV,
4784 link->dpcd_caps.dpcd_rev.raw = dpcd_data[
4787 } while (retry++ < 4 && !link->dpcd_caps.dpcd_rev.raw);
4790 if (link->dpcd_caps.dongle_type == DISPLAY_DONGLE_DP_VGA_CONVERTER) {
4791 switch (link->dpcd_caps.branch_dev_id) {
4792 /* 0010FA active dongles (DP-VGA, DP-DLDVI converters) power down
4793 * all internal circuits including AUX communication preventing
4794 * reading DPCD table and EDID (spec violation).
4795 * Encoder will skip DP RX power down on disable_output to
4796 * keep receiver powered all the time.*/
4797 case DP_BRANCH_DEVICE_ID_0010FA:
4798 case DP_BRANCH_DEVICE_ID_0080E1:
4799 case DP_BRANCH_DEVICE_ID_00E04C:
4800 link->wa_flags.dp_keep_receiver_powered = true;
4803 /* TODO: May need work around for other dongles. */
4805 link->wa_flags.dp_keep_receiver_powered = false;
4809 link->wa_flags.dp_keep_receiver_powered = false;
4812 /* Read additional sink caps defined in source specific DPCD area
4813 * This function currently only reads from SinkCapability address (DP_SOURCE_SINK_CAP)
4815 static bool dpcd_read_sink_ext_caps(struct dc_link *link)
4822 if (core_link_read_dpcd(link, DP_SOURCE_SINK_CAP, &dpcd_data, 1) != DC_OK)
4825 link->dpcd_sink_ext_caps.raw = dpcd_data;
4829 bool dp_retrieve_lttpr_cap(struct dc_link *link)
4831 #if defined(CONFIG_DRM_AMD_DC_DCN)
4832 uint8_t lttpr_dpcd_data[8];
4833 bool allow_lttpr_non_transparent_mode = 0;
4835 uint8_t lttpr_dpcd_data[6];
4837 bool vbios_lttpr_enable = link->dc->caps.vbios_lttpr_enable;
4838 bool vbios_lttpr_interop = link->dc->caps.vbios_lttpr_aware;
4839 enum dc_status status = DC_ERROR_UNEXPECTED;
4840 bool is_lttpr_present = false;
4842 memset(lttpr_dpcd_data, '\0', sizeof(lttpr_dpcd_data));
4844 #if defined(CONFIG_DRM_AMD_DC_DCN)
4845 if ((link->dc->config.allow_lttpr_non_transparent_mode.bits.DP2_0 &&
4846 link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED)) {
4847 allow_lttpr_non_transparent_mode = 1;
4848 } else if (link->dc->config.allow_lttpr_non_transparent_mode.bits.DP1_4A &&
4849 !link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
4850 allow_lttpr_non_transparent_mode = 1;
4855 * Logic to determine LTTPR mode
4857 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4858 if (vbios_lttpr_enable && vbios_lttpr_interop)
4859 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4860 else if (!vbios_lttpr_enable && vbios_lttpr_interop) {
4861 #if defined(CONFIG_DRM_AMD_DC_DCN)
4862 if (allow_lttpr_non_transparent_mode)
4864 if (link->dc->config.allow_lttpr_non_transparent_mode)
4866 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4868 link->lttpr_mode = LTTPR_MODE_TRANSPARENT;
4869 } else if (!vbios_lttpr_enable && !vbios_lttpr_interop) {
4870 #if defined(CONFIG_DRM_AMD_DC_DCN)
4871 if (!allow_lttpr_non_transparent_mode || !link->dc->caps.extended_aux_timeout_support)
4873 if (!link->dc->config.allow_lttpr_non_transparent_mode
4874 || !link->dc->caps.extended_aux_timeout_support)
4876 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4878 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4880 #if defined(CONFIG_DRM_AMD_DC_DCN)
4881 /* Check DP tunnel LTTPR mode debug option. */
4882 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA &&
4883 link->dc->debug.dpia_debug.bits.force_non_lttpr)
4884 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4887 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT || link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
4888 /* By reading LTTPR capability, RX assumes that we will enable
4889 * LTTPR extended aux timeout if LTTPR is present.
4891 status = core_link_read_dpcd(
4893 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV,
4895 sizeof(lttpr_dpcd_data));
4896 if (status != DC_OK) {
4897 DC_LOG_DP2("%s: Read LTTPR caps data failed.\n", __func__);
4901 link->dpcd_caps.lttpr_caps.revision.raw =
4902 lttpr_dpcd_data[DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV -
4903 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4905 link->dpcd_caps.lttpr_caps.max_link_rate =
4906 lttpr_dpcd_data[DP_MAX_LINK_RATE_PHY_REPEATER -
4907 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4909 link->dpcd_caps.lttpr_caps.phy_repeater_cnt =
4910 lttpr_dpcd_data[DP_PHY_REPEATER_CNT -
4911 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4913 link->dpcd_caps.lttpr_caps.max_lane_count =
4914 lttpr_dpcd_data[DP_MAX_LANE_COUNT_PHY_REPEATER -
4915 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4917 link->dpcd_caps.lttpr_caps.mode =
4918 lttpr_dpcd_data[DP_PHY_REPEATER_MODE -
4919 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4921 link->dpcd_caps.lttpr_caps.max_ext_timeout =
4922 lttpr_dpcd_data[DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT -
4923 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4925 #if defined(CONFIG_DRM_AMD_DC_DCN)
4926 link->dpcd_caps.lttpr_caps.main_link_channel_coding.raw =
4927 lttpr_dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER -
4928 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4930 link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.raw =
4931 lttpr_dpcd_data[DP_PHY_REPEATER_128b_132b_RATES -
4932 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4935 /* Attempt to train in LTTPR transparent mode if repeater count exceeds 8. */
4936 is_lttpr_present = (dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) != 0 &&
4937 link->dpcd_caps.lttpr_caps.phy_repeater_cnt < 0xff &&
4938 link->dpcd_caps.lttpr_caps.max_lane_count > 0 &&
4939 link->dpcd_caps.lttpr_caps.max_lane_count <= 4 &&
4940 link->dpcd_caps.lttpr_caps.revision.raw >= 0x14);
4941 if (is_lttpr_present) {
4942 CONN_DATA_DETECT(link, lttpr_dpcd_data, sizeof(lttpr_dpcd_data), "LTTPR Caps: ");
4943 configure_lttpr_mode_transparent(link);
4945 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4947 return is_lttpr_present;
4950 static bool retrieve_link_cap(struct dc_link *link)
4952 /* DP_ADAPTER_CAP - DP_DPCD_REV + 1 == 16 and also DP_DSC_BITS_PER_PIXEL_INC - DP_DSC_SUPPORT + 1 == 16,
4953 * which means size 16 will be good for both of those DPCD register block reads
4955 uint8_t dpcd_data[16];
4956 /*Only need to read 1 byte starting from DP_DPRX_FEATURE_ENUMERATION_LIST.
4958 uint8_t dpcd_dprx_data = '\0';
4959 uint8_t dpcd_power_state = '\0';
4961 struct dp_device_vendor_id sink_id;
4962 union down_stream_port_count down_strm_port_count;
4963 union edp_configuration_cap edp_config_cap;
4964 union dp_downstream_port_present ds_port = { 0 };
4965 enum dc_status status = DC_ERROR_UNEXPECTED;
4966 uint32_t read_dpcd_retry_cnt = 3;
4968 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
4969 const uint32_t post_oui_delay = 30; // 30ms
4970 bool is_lttpr_present = false;
4972 memset(dpcd_data, '\0', sizeof(dpcd_data));
4973 memset(&down_strm_port_count,
4974 '\0', sizeof(union down_stream_port_count));
4975 memset(&edp_config_cap, '\0',
4976 sizeof(union edp_configuration_cap));
4978 /* if extended timeout is supported in hardware,
4979 * default to LTTPR timeout (3.2ms) first as a W/A for DP link layer
4980 * CTS 4.2.1.1 regression introduced by CTS specs requirement update.
4982 dc_link_aux_try_to_configure_timeout(link->ddc,
4983 LINK_AUX_DEFAULT_LTTPR_TIMEOUT_PERIOD);
4985 is_lttpr_present = dp_retrieve_lttpr_cap(link);
4986 /* Read DP tunneling information. */
4987 status = dpcd_get_tunneling_device_data(link);
4989 status = core_link_read_dpcd(link, DP_SET_POWER,
4990 &dpcd_power_state, sizeof(dpcd_power_state));
4992 /* Delay 1 ms if AUX CH is in power down state. Based on spec
4993 * section 2.3.1.2, if AUX CH may be powered down due to
4994 * write to DPCD 600h = 2. Sink AUX CH is monitoring differential
4995 * signal and may need up to 1 ms before being able to reply.
4997 if (status != DC_OK || dpcd_power_state == DP_SET_POWER_D3)
5000 dpcd_set_source_specific_data(link);
5001 /* Sink may need to configure internals based on vendor, so allow some
5002 * time before proceeding with possibly vendor specific transactions
5004 msleep(post_oui_delay);
5006 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5007 status = core_link_read_dpcd(
5012 if (status == DC_OK)
5016 if (status != DC_OK) {
5017 dm_error("%s: Read receiver caps dpcd data failed.\n", __func__);
5021 if (!is_lttpr_present)
5022 dc_link_aux_try_to_configure_timeout(link->ddc, LINK_AUX_DEFAULT_TIMEOUT_PERIOD);
5025 union training_aux_rd_interval aux_rd_interval;
5027 aux_rd_interval.raw =
5028 dpcd_data[DP_TRAINING_AUX_RD_INTERVAL];
5030 link->dpcd_caps.ext_receiver_cap_field_present =
5031 aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1;
5033 if (aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1) {
5034 uint8_t ext_cap_data[16];
5036 memset(ext_cap_data, '\0', sizeof(ext_cap_data));
5037 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5038 status = core_link_read_dpcd(
5042 sizeof(ext_cap_data));
5043 if (status == DC_OK) {
5044 memcpy(dpcd_data, ext_cap_data, sizeof(dpcd_data));
5048 if (status != DC_OK)
5049 dm_error("%s: Read extend caps data failed, use cap from dpcd 0.\n", __func__);
5053 link->dpcd_caps.dpcd_rev.raw =
5054 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5056 if (link->dpcd_caps.ext_receiver_cap_field_present) {
5057 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5058 status = core_link_read_dpcd(
5060 DP_DPRX_FEATURE_ENUMERATION_LIST,
5062 sizeof(dpcd_dprx_data));
5063 if (status == DC_OK)
5067 link->dpcd_caps.dprx_feature.raw = dpcd_dprx_data;
5069 if (status != DC_OK)
5070 dm_error("%s: Read DPRX caps data failed.\n", __func__);
5074 link->dpcd_caps.dprx_feature.raw = 0;
5078 /* Error condition checking...
5079 * It is impossible for Sink to report Max Lane Count = 0.
5080 * It is possible for Sink to report Max Link Rate = 0, if it is
5081 * an eDP device that is reporting specialized link rates in the
5082 * SUPPORTED_LINK_RATE table.
5084 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5087 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5090 read_dp_device_vendor_id(link);
5092 get_active_converter_info(ds_port.byte, link);
5094 dp_wa_power_up_0010FA(link, dpcd_data, sizeof(dpcd_data));
5096 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5099 link->dpcd_caps.allow_invalid_MSA_timing_param =
5100 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5102 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5103 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5105 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5106 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5108 link->reported_link_cap.lane_count =
5109 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5110 link->reported_link_cap.link_rate = dpcd_data[
5111 DP_MAX_LINK_RATE - DP_DPCD_REV];
5112 link->reported_link_cap.link_spread =
5113 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5114 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5116 edp_config_cap.raw = dpcd_data[
5117 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5118 link->dpcd_caps.panel_mode_edp =
5119 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5120 link->dpcd_caps.dpcd_display_control_capable =
5121 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5123 link->test_pattern_enabled = false;
5124 link->compliance_test_state.raw = 0;
5126 /* read sink count */
5127 core_link_read_dpcd(link,
5129 &link->dpcd_caps.sink_count.raw,
5130 sizeof(link->dpcd_caps.sink_count.raw));
5132 /* read sink ieee oui */
5133 core_link_read_dpcd(link,
5135 (uint8_t *)(&sink_id),
5138 link->dpcd_caps.sink_dev_id =
5139 (sink_id.ieee_oui[0] << 16) +
5140 (sink_id.ieee_oui[1] << 8) +
5141 (sink_id.ieee_oui[2]);
5144 link->dpcd_caps.sink_dev_id_str,
5145 sink_id.ieee_device_id,
5146 sizeof(sink_id.ieee_device_id));
5148 /* Quirk Apple MBP 2017 15" Retina panel: Wrong DP_MAX_LINK_RATE */
5150 uint8_t str_mbp_2017[] = { 101, 68, 21, 101, 98, 97 };
5152 if ((link->dpcd_caps.sink_dev_id == 0x0010fa) &&
5153 !memcmp(link->dpcd_caps.sink_dev_id_str, str_mbp_2017,
5154 sizeof(str_mbp_2017))) {
5155 link->reported_link_cap.link_rate = 0x0c;
5159 core_link_read_dpcd(
5161 DP_SINK_HW_REVISION_START,
5162 (uint8_t *)&dp_hw_fw_revision,
5163 sizeof(dp_hw_fw_revision));
5165 link->dpcd_caps.sink_hw_revision =
5166 dp_hw_fw_revision.ieee_hw_rev;
5169 link->dpcd_caps.sink_fw_revision,
5170 dp_hw_fw_revision.ieee_fw_rev,
5171 sizeof(dp_hw_fw_revision.ieee_fw_rev));
5173 memset(&link->dpcd_caps.dsc_caps, '\0',
5174 sizeof(link->dpcd_caps.dsc_caps));
5175 memset(&link->dpcd_caps.fec_cap, '\0', sizeof(link->dpcd_caps.fec_cap));
5176 /* Read DSC and FEC sink capabilities if DP revision is 1.4 and up */
5177 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14) {
5178 status = core_link_read_dpcd(
5181 &link->dpcd_caps.fec_cap.raw,
5182 sizeof(link->dpcd_caps.fec_cap.raw));
5183 status = core_link_read_dpcd(
5186 link->dpcd_caps.dsc_caps.dsc_basic_caps.raw,
5187 sizeof(link->dpcd_caps.dsc_caps.dsc_basic_caps.raw));
5188 #if defined(CONFIG_DRM_AMD_DC_DCN)
5189 if (link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
5190 status = core_link_read_dpcd(
5192 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5193 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5194 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5195 DC_LOG_DSC("DSC branch decoder capability is read at link %d", link->link_index);
5196 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_0 = 0x%02x",
5197 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_0);
5198 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_1 = 0x%02x",
5199 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_1);
5200 DC_LOG_DSC("\tBRANCH_MAX_LINE_WIDTH 0x%02x",
5201 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_MAX_LINE_WIDTH);
5204 status = core_link_read_dpcd(
5206 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5207 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5208 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5212 if (!dpcd_read_sink_ext_caps(link))
5213 link->dpcd_sink_ext_caps.raw = 0;
5215 #if defined(CONFIG_DRM_AMD_DC_DCN)
5216 link->dpcd_caps.channel_coding_cap.raw = dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_CAP - DP_DPCD_REV];
5218 if (link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
5219 DC_LOG_DP2("128b/132b encoding is supported at link %d", link->link_index);
5221 core_link_read_dpcd(link,
5222 DP_128b_132b_SUPPORTED_LINK_RATES,
5223 &link->dpcd_caps.dp_128b_132b_supported_link_rates.raw,
5224 sizeof(link->dpcd_caps.dp_128b_132b_supported_link_rates.raw));
5225 if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR20)
5226 link->reported_link_cap.link_rate = LINK_RATE_UHBR20;
5227 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR13_5)
5228 link->reported_link_cap.link_rate = LINK_RATE_UHBR13_5;
5229 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR10)
5230 link->reported_link_cap.link_rate = LINK_RATE_UHBR10;
5232 dm_error("%s: Invalid RX 128b_132b_supported_link_rates\n", __func__);
5233 DC_LOG_DP2("128b/132b supported link rates is read at link %d", link->link_index);
5234 DC_LOG_DP2("\tmax 128b/132b link rate support is %d.%d GHz",
5235 link->reported_link_cap.link_rate / 100,
5236 link->reported_link_cap.link_rate % 100);
5238 core_link_read_dpcd(link,
5239 DP_SINK_VIDEO_FALLBACK_FORMATS,
5240 &link->dpcd_caps.fallback_formats.raw,
5241 sizeof(link->dpcd_caps.fallback_formats.raw));
5242 DC_LOG_DP2("sink video fallback format is read at link %d", link->link_index);
5243 if (link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support)
5244 DC_LOG_DP2("\t1920x1080@60Hz 24bpp fallback format supported");
5245 if (link->dpcd_caps.fallback_formats.bits.dp_1280x720_60Hz_24bpp_support)
5246 DC_LOG_DP2("\t1280x720@60Hz 24bpp fallback format supported");
5247 if (link->dpcd_caps.fallback_formats.bits.dp_1024x768_60Hz_24bpp_support)
5248 DC_LOG_DP2("\t1024x768@60Hz 24bpp fallback format supported");
5249 if (link->dpcd_caps.fallback_formats.raw == 0) {
5250 DC_LOG_DP2("\tno supported fallback formats, assume 1920x1080@60Hz 24bpp is supported");
5251 link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support = 1;
5254 core_link_read_dpcd(link,
5255 DP_FEC_CAPABILITY_1,
5256 &link->dpcd_caps.fec_cap1.raw,
5257 sizeof(link->dpcd_caps.fec_cap1.raw));
5258 DC_LOG_DP2("FEC CAPABILITY 1 is read at link %d", link->link_index);
5259 if (link->dpcd_caps.fec_cap1.bits.AGGREGATED_ERROR_COUNTERS_CAPABLE)
5260 DC_LOG_DP2("\tFEC aggregated error counters are supported");
5264 /* Connectivity log: detection */
5265 CONN_DATA_DETECT(link, dpcd_data, sizeof(dpcd_data), "Rx Caps: ");
5270 bool dp_overwrite_extended_receiver_cap(struct dc_link *link)
5272 uint8_t dpcd_data[16];
5273 uint32_t read_dpcd_retry_cnt = 3;
5274 enum dc_status status = DC_ERROR_UNEXPECTED;
5275 union dp_downstream_port_present ds_port = { 0 };
5276 union down_stream_port_count down_strm_port_count;
5277 union edp_configuration_cap edp_config_cap;
5281 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5282 status = core_link_read_dpcd(
5287 if (status == DC_OK)
5291 link->dpcd_caps.dpcd_rev.raw =
5292 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5294 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5297 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5300 get_active_converter_info(ds_port.byte, link);
5302 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5305 link->dpcd_caps.allow_invalid_MSA_timing_param =
5306 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5308 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5309 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5311 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5312 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5314 link->reported_link_cap.lane_count =
5315 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5316 link->reported_link_cap.link_rate = dpcd_data[
5317 DP_MAX_LINK_RATE - DP_DPCD_REV];
5318 link->reported_link_cap.link_spread =
5319 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5320 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5322 edp_config_cap.raw = dpcd_data[
5323 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5324 link->dpcd_caps.panel_mode_edp =
5325 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5326 link->dpcd_caps.dpcd_display_control_capable =
5327 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5332 bool detect_dp_sink_caps(struct dc_link *link)
5334 return retrieve_link_cap(link);
5336 /* dc init_hw has power encoder using default
5337 * signal for connector. For native DP, no
5338 * need to power up encoder again. If not native
5339 * DP, hw_init may need check signal or power up
5342 /* TODO save sink caps in link->sink */
5345 static enum dc_link_rate linkRateInKHzToLinkRateMultiplier(uint32_t link_rate_in_khz)
5347 enum dc_link_rate link_rate;
5348 // LinkRate is normally stored as a multiplier of 0.27 Gbps per lane. Do the translation.
5349 switch (link_rate_in_khz) {
5351 link_rate = LINK_RATE_LOW; // Rate_1 (RBR) - 1.62 Gbps/Lane
5354 link_rate = LINK_RATE_RATE_2; // Rate_2 - 2.16 Gbps/Lane
5357 link_rate = LINK_RATE_RATE_3; // Rate_3 - 2.43 Gbps/Lane
5360 link_rate = LINK_RATE_HIGH; // Rate_4 (HBR) - 2.70 Gbps/Lane
5363 link_rate = LINK_RATE_RBR2; // Rate_5 (RBR2) - 3.24 Gbps/Lane
5366 link_rate = LINK_RATE_RATE_6; // Rate_6 - 4.32 Gbps/Lane
5369 link_rate = LINK_RATE_HIGH2; // Rate_7 (HBR2) - 5.40 Gbps/Lane
5372 link_rate = LINK_RATE_HIGH3; // Rate_8 (HBR3) - 8.10 Gbps/Lane
5375 link_rate = LINK_RATE_UNKNOWN;
5381 void detect_edp_sink_caps(struct dc_link *link)
5383 uint8_t supported_link_rates[16];
5385 uint32_t link_rate_in_khz;
5386 enum dc_link_rate link_rate = LINK_RATE_UNKNOWN;
5387 uint8_t backlight_adj_cap;
5389 retrieve_link_cap(link);
5390 link->dpcd_caps.edp_supported_link_rates_count = 0;
5391 memset(supported_link_rates, 0, sizeof(supported_link_rates));
5394 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
5395 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
5397 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
5398 (link->dc->debug.optimize_edp_link_rate ||
5399 link->reported_link_cap.link_rate == LINK_RATE_UNKNOWN)) {
5400 // Read DPCD 00010h - 0001Fh 16 bytes at one shot
5401 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
5402 supported_link_rates, sizeof(supported_link_rates));
5404 for (entry = 0; entry < 16; entry += 2) {
5405 // DPCD register reports per-lane link rate = 16-bit link rate capability
5406 // value X 200 kHz. Need multiplier to find link rate in kHz.
5407 link_rate_in_khz = (supported_link_rates[entry+1] * 0x100 +
5408 supported_link_rates[entry]) * 200;
5410 if (link_rate_in_khz != 0) {
5411 link_rate = linkRateInKHzToLinkRateMultiplier(link_rate_in_khz);
5412 link->dpcd_caps.edp_supported_link_rates[link->dpcd_caps.edp_supported_link_rates_count] = link_rate;
5413 link->dpcd_caps.edp_supported_link_rates_count++;
5415 if (link->reported_link_cap.link_rate < link_rate)
5416 link->reported_link_cap.link_rate = link_rate;
5420 link->verified_link_cap = link->reported_link_cap;
5422 core_link_read_dpcd(link, DP_EDP_BACKLIGHT_ADJUSTMENT_CAP,
5423 &backlight_adj_cap, sizeof(backlight_adj_cap));
5425 link->dpcd_caps.dynamic_backlight_capable_edp =
5426 (backlight_adj_cap & DP_EDP_DYNAMIC_BACKLIGHT_CAP) ? true:false;
5428 dc_link_set_default_brightness_aux(link);
5431 void dc_link_dp_enable_hpd(const struct dc_link *link)
5433 struct link_encoder *encoder = link->link_enc;
5435 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5436 encoder->funcs->enable_hpd(encoder);
5439 void dc_link_dp_disable_hpd(const struct dc_link *link)
5441 struct link_encoder *encoder = link->link_enc;
5443 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5444 encoder->funcs->disable_hpd(encoder);
5447 static bool is_dp_phy_pattern(enum dp_test_pattern test_pattern)
5449 if ((DP_TEST_PATTERN_PHY_PATTERN_BEGIN <= test_pattern &&
5450 test_pattern <= DP_TEST_PATTERN_PHY_PATTERN_END) ||
5451 test_pattern == DP_TEST_PATTERN_VIDEO_MODE)
5457 static void set_crtc_test_pattern(struct dc_link *link,
5458 struct pipe_ctx *pipe_ctx,
5459 enum dp_test_pattern test_pattern,
5460 enum dp_test_pattern_color_space test_pattern_color_space)
5462 enum controller_dp_test_pattern controller_test_pattern;
5463 enum dc_color_depth color_depth = pipe_ctx->
5464 stream->timing.display_color_depth;
5465 struct bit_depth_reduction_params params;
5466 struct output_pixel_processor *opp = pipe_ctx->stream_res.opp;
5467 int width = pipe_ctx->stream->timing.h_addressable +
5468 pipe_ctx->stream->timing.h_border_left +
5469 pipe_ctx->stream->timing.h_border_right;
5470 int height = pipe_ctx->stream->timing.v_addressable +
5471 pipe_ctx->stream->timing.v_border_bottom +
5472 pipe_ctx->stream->timing.v_border_top;
5474 memset(¶ms, 0, sizeof(params));
5476 switch (test_pattern) {
5477 case DP_TEST_PATTERN_COLOR_SQUARES:
5478 controller_test_pattern =
5479 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES;
5481 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5482 controller_test_pattern =
5483 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES_CEA;
5485 case DP_TEST_PATTERN_VERTICAL_BARS:
5486 controller_test_pattern =
5487 CONTROLLER_DP_TEST_PATTERN_VERTICALBARS;
5489 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5490 controller_test_pattern =
5491 CONTROLLER_DP_TEST_PATTERN_HORIZONTALBARS;
5493 case DP_TEST_PATTERN_COLOR_RAMP:
5494 controller_test_pattern =
5495 CONTROLLER_DP_TEST_PATTERN_COLORRAMP;
5498 controller_test_pattern =
5499 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE;
5503 switch (test_pattern) {
5504 case DP_TEST_PATTERN_COLOR_SQUARES:
5505 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5506 case DP_TEST_PATTERN_VERTICAL_BARS:
5507 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5508 case DP_TEST_PATTERN_COLOR_RAMP:
5510 /* disable bit depth reduction */
5511 pipe_ctx->stream->bit_depth_params = params;
5512 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5513 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5514 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5515 controller_test_pattern, color_depth);
5516 else if (link->dc->hwss.set_disp_pattern_generator) {
5517 struct pipe_ctx *odm_pipe;
5518 enum controller_dp_color_space controller_color_space;
5521 int dpg_width = width;
5523 switch (test_pattern_color_space) {
5524 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5525 controller_color_space = CONTROLLER_DP_COLOR_SPACE_RGB;
5527 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5528 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR601;
5530 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5531 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR709;
5533 case DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED:
5535 controller_color_space = CONTROLLER_DP_COLOR_SPACE_UDEFINED;
5536 DC_LOG_ERROR("%s: Color space must be defined for test pattern", __func__);
5541 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5543 dpg_width = width / opp_cnt;
5546 link->dc->hwss.set_disp_pattern_generator(link->dc,
5548 controller_test_pattern,
5549 controller_color_space,
5556 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5557 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5559 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5560 link->dc->hwss.set_disp_pattern_generator(link->dc,
5562 controller_test_pattern,
5563 controller_color_space,
5574 case DP_TEST_PATTERN_VIDEO_MODE:
5576 /* restore bitdepth reduction */
5577 resource_build_bit_depth_reduction_params(pipe_ctx->stream, ¶ms);
5578 pipe_ctx->stream->bit_depth_params = params;
5579 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5580 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5581 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5582 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5584 else if (link->dc->hwss.set_disp_pattern_generator) {
5585 struct pipe_ctx *odm_pipe;
5587 int dpg_width = width;
5589 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5592 dpg_width = width / opp_cnt;
5593 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5594 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5596 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5597 link->dc->hwss.set_disp_pattern_generator(link->dc,
5599 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5600 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
5607 link->dc->hwss.set_disp_pattern_generator(link->dc,
5609 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5610 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
5625 bool dc_link_dp_set_test_pattern(
5626 struct dc_link *link,
5627 enum dp_test_pattern test_pattern,
5628 enum dp_test_pattern_color_space test_pattern_color_space,
5629 const struct link_training_settings *p_link_settings,
5630 const unsigned char *p_custom_pattern,
5631 unsigned int cust_pattern_size)
5633 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
5634 struct pipe_ctx *pipe_ctx = NULL;
5637 unsigned char link_qual_pattern[LANE_COUNT_DP_MAX] = {0};
5638 union dpcd_training_pattern training_pattern;
5639 enum dpcd_phy_test_patterns pattern;
5641 memset(&training_pattern, 0, sizeof(training_pattern));
5643 for (i = 0; i < MAX_PIPES; i++) {
5644 if (pipes[i].stream == NULL)
5647 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
5648 pipe_ctx = &pipes[i];
5653 if (pipe_ctx == NULL)
5656 /* Reset CRTC Test Pattern if it is currently running and request is VideoMode */
5657 if (link->test_pattern_enabled && test_pattern ==
5658 DP_TEST_PATTERN_VIDEO_MODE) {
5659 /* Set CRTC Test Pattern */
5660 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
5661 dp_set_hw_test_pattern(link, test_pattern,
5662 (uint8_t *)p_custom_pattern,
5663 (uint32_t)cust_pattern_size);
5665 /* Unblank Stream */
5666 link->dc->hwss.unblank_stream(
5668 &link->verified_link_cap);
5669 /* TODO:m_pHwss->MuteAudioEndpoint
5670 * (pPathMode->pDisplayPath, false);
5673 /* Reset Test Pattern state */
5674 link->test_pattern_enabled = false;
5679 /* Check for PHY Test Patterns */
5680 if (is_dp_phy_pattern(test_pattern)) {
5681 /* Set DPCD Lane Settings before running test pattern */
5682 if (p_link_settings != NULL) {
5683 dp_set_hw_lane_settings(link, p_link_settings, DPRX);
5684 dpcd_set_lane_settings(link, p_link_settings, DPRX);
5687 /* Blank stream if running test pattern */
5688 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
5691 * MuteAudioEndpoint(pPathMode->pDisplayPath, true);
5694 pipes->stream_res.stream_enc->funcs->dp_blank(link, pipe_ctx->stream_res.stream_enc);
5697 dp_set_hw_test_pattern(link, test_pattern,
5698 (uint8_t *)p_custom_pattern,
5699 (uint32_t)cust_pattern_size);
5701 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
5702 /* Set Test Pattern state */
5703 link->test_pattern_enabled = true;
5704 if (p_link_settings != NULL)
5705 dpcd_set_link_settings(link,
5709 switch (test_pattern) {
5710 case DP_TEST_PATTERN_VIDEO_MODE:
5711 pattern = PHY_TEST_PATTERN_NONE;
5713 case DP_TEST_PATTERN_D102:
5714 pattern = PHY_TEST_PATTERN_D10_2;
5716 case DP_TEST_PATTERN_SYMBOL_ERROR:
5717 pattern = PHY_TEST_PATTERN_SYMBOL_ERROR;
5719 case DP_TEST_PATTERN_PRBS7:
5720 pattern = PHY_TEST_PATTERN_PRBS7;
5722 case DP_TEST_PATTERN_80BIT_CUSTOM:
5723 pattern = PHY_TEST_PATTERN_80BIT_CUSTOM;
5725 case DP_TEST_PATTERN_CP2520_1:
5726 pattern = PHY_TEST_PATTERN_CP2520_1;
5728 case DP_TEST_PATTERN_CP2520_2:
5729 pattern = PHY_TEST_PATTERN_CP2520_2;
5731 case DP_TEST_PATTERN_CP2520_3:
5732 pattern = PHY_TEST_PATTERN_CP2520_3;
5734 #if defined(CONFIG_DRM_AMD_DC_DCN)
5735 case DP_TEST_PATTERN_128b_132b_TPS1:
5736 pattern = PHY_TEST_PATTERN_128b_132b_TPS1;
5738 case DP_TEST_PATTERN_128b_132b_TPS2:
5739 pattern = PHY_TEST_PATTERN_128b_132b_TPS2;
5741 case DP_TEST_PATTERN_PRBS9:
5742 pattern = PHY_TEST_PATTERN_PRBS9;
5744 case DP_TEST_PATTERN_PRBS11:
5745 pattern = PHY_TEST_PATTERN_PRBS11;
5747 case DP_TEST_PATTERN_PRBS15:
5748 pattern = PHY_TEST_PATTERN_PRBS15;
5750 case DP_TEST_PATTERN_PRBS23:
5751 pattern = PHY_TEST_PATTERN_PRBS23;
5753 case DP_TEST_PATTERN_PRBS31:
5754 pattern = PHY_TEST_PATTERN_PRBS31;
5756 case DP_TEST_PATTERN_264BIT_CUSTOM:
5757 pattern = PHY_TEST_PATTERN_264BIT_CUSTOM;
5759 case DP_TEST_PATTERN_SQUARE_PULSE:
5760 pattern = PHY_TEST_PATTERN_SQUARE_PULSE;
5767 if (test_pattern == DP_TEST_PATTERN_VIDEO_MODE
5768 /*TODO:&& !pPathMode->pDisplayPath->IsTargetPoweredOn()*/)
5771 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
5772 #if defined(CONFIG_DRM_AMD_DC_DCN)
5773 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE)
5774 core_link_write_dpcd(link,
5775 DP_LINK_SQUARE_PATTERN,
5780 /* tell receiver that we are sending qualification
5781 * pattern DP 1.2 or later - DP receiver's link quality
5782 * pattern is set using DPCD LINK_QUAL_LANEx_SET
5783 * register (0x10B~0x10E)\
5785 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++)
5786 link_qual_pattern[lane] =
5787 (unsigned char)(pattern);
5789 core_link_write_dpcd(link,
5790 DP_LINK_QUAL_LANE0_SET,
5792 sizeof(link_qual_pattern));
5793 } else if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_10 ||
5794 link->dpcd_caps.dpcd_rev.raw == 0) {
5795 /* tell receiver that we are sending qualification
5796 * pattern DP 1.1a or earlier - DP receiver's link
5797 * quality pattern is set using
5798 * DPCD TRAINING_PATTERN_SET -> LINK_QUAL_PATTERN_SET
5799 * register (0x102). We will use v_1.3 when we are
5800 * setting test pattern for DP 1.1.
5802 core_link_read_dpcd(link, DP_TRAINING_PATTERN_SET,
5803 &training_pattern.raw,
5804 sizeof(training_pattern));
5805 training_pattern.v1_3.LINK_QUAL_PATTERN_SET = pattern;
5806 core_link_write_dpcd(link, DP_TRAINING_PATTERN_SET,
5807 &training_pattern.raw,
5808 sizeof(training_pattern));
5811 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
5813 switch (test_pattern_color_space) {
5814 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5815 color_space = COLOR_SPACE_SRGB;
5816 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5817 color_space = COLOR_SPACE_SRGB_LIMITED;
5820 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5821 color_space = COLOR_SPACE_YCBCR601;
5822 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5823 color_space = COLOR_SPACE_YCBCR601_LIMITED;
5825 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5826 color_space = COLOR_SPACE_YCBCR709;
5827 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5828 color_space = COLOR_SPACE_YCBCR709_LIMITED;
5834 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable) {
5835 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
5836 union dmub_hw_lock_flags hw_locks = { 0 };
5837 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
5839 hw_locks.bits.lock_dig = 1;
5840 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
5842 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
5847 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable(
5848 pipe_ctx->stream_res.tg);
5851 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
5852 /* update MSA to requested color space */
5853 pipe_ctx->stream_res.stream_enc->funcs->dp_set_stream_attribute(pipe_ctx->stream_res.stream_enc,
5854 &pipe_ctx->stream->timing,
5856 pipe_ctx->stream->use_vsc_sdp_for_colorimetry,
5857 link->dpcd_caps.dprx_feature.bits.SST_SPLIT_SDP_CAP);
5859 if (pipe_ctx->stream->use_vsc_sdp_for_colorimetry) {
5860 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5861 pipe_ctx->stream->vsc_infopacket.sb[17] |= (1 << 7); // sb17 bit 7 Dynamic Range: 0 = VESA range, 1 = CTA range
5863 pipe_ctx->stream->vsc_infopacket.sb[17] &= ~(1 << 7);
5864 resource_build_info_frame(pipe_ctx);
5865 link->dc->hwss.update_info_frame(pipe_ctx);
5869 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
5870 pipe_ctx->stream_res.tg->funcs->unlock(pipe_ctx->stream_res.tg);
5871 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5872 CRTC_STATE_VACTIVE);
5873 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5875 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5876 CRTC_STATE_VACTIVE);
5878 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable) {
5879 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
5880 union dmub_hw_lock_flags hw_locks = { 0 };
5881 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
5883 hw_locks.bits.lock_dig = 1;
5884 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
5886 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
5891 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable(
5892 pipe_ctx->stream_res.tg);
5895 /* Set Test Pattern state */
5896 link->test_pattern_enabled = true;
5902 void dp_enable_mst_on_sink(struct dc_link *link, bool enable)
5904 unsigned char mstmCntl;
5906 core_link_read_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
5908 mstmCntl |= DP_MST_EN;
5910 mstmCntl &= (~DP_MST_EN);
5912 core_link_write_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
5915 void dp_set_panel_mode(struct dc_link *link, enum dp_panel_mode panel_mode)
5917 union dpcd_edp_config edp_config_set;
5918 bool panel_mode_edp = false;
5920 memset(&edp_config_set, '\0', sizeof(union dpcd_edp_config));
5922 if (panel_mode != DP_PANEL_MODE_DEFAULT) {
5924 switch (panel_mode) {
5925 case DP_PANEL_MODE_EDP:
5926 case DP_PANEL_MODE_SPECIAL:
5927 panel_mode_edp = true;
5934 /*set edp panel mode in receiver*/
5935 core_link_read_dpcd(
5937 DP_EDP_CONFIGURATION_SET,
5938 &edp_config_set.raw,
5939 sizeof(edp_config_set.raw));
5941 if (edp_config_set.bits.PANEL_MODE_EDP
5942 != panel_mode_edp) {
5943 enum dc_status result;
5945 edp_config_set.bits.PANEL_MODE_EDP =
5947 result = core_link_write_dpcd(
5949 DP_EDP_CONFIGURATION_SET,
5950 &edp_config_set.raw,
5951 sizeof(edp_config_set.raw));
5953 ASSERT(result == DC_OK);
5956 DC_LOG_DETECTION_DP_CAPS("Link: %d eDP panel mode supported: %d "
5957 "eDP panel mode enabled: %d \n",
5959 link->dpcd_caps.panel_mode_edp,
5963 enum dp_panel_mode dp_get_panel_mode(struct dc_link *link)
5965 /* We need to explicitly check that connector
5966 * is not DP. Some Travis_VGA get reported
5967 * by video bios as DP.
5969 if (link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT) {
5971 switch (link->dpcd_caps.branch_dev_id) {
5972 case DP_BRANCH_DEVICE_ID_0022B9:
5973 /* alternate scrambler reset is required for Travis
5974 * for the case when external chip does not
5975 * provide sink device id, alternate scrambler
5976 * scheme will be overriden later by querying
5980 link->dpcd_caps.branch_dev_name,
5981 DP_VGA_LVDS_CONVERTER_ID_2,
5984 branch_dev_name)) == 0) {
5985 return DP_PANEL_MODE_SPECIAL;
5988 case DP_BRANCH_DEVICE_ID_00001A:
5989 /* alternate scrambler reset is required for Travis
5990 * for the case when external chip does not provide
5991 * sink device id, alternate scrambler scheme will
5992 * be overriden later by querying Encoder feature
5994 if (strncmp(link->dpcd_caps.branch_dev_name,
5995 DP_VGA_LVDS_CONVERTER_ID_3,
5998 branch_dev_name)) == 0) {
5999 return DP_PANEL_MODE_SPECIAL;
6007 if (link->dpcd_caps.panel_mode_edp &&
6008 (link->connector_signal == SIGNAL_TYPE_EDP ||
6009 (link->connector_signal == SIGNAL_TYPE_DISPLAY_PORT &&
6010 link->is_internal_display))) {
6011 return DP_PANEL_MODE_EDP;
6014 return DP_PANEL_MODE_DEFAULT;
6017 enum dc_status dp_set_fec_ready(struct dc_link *link, bool ready)
6019 /* FEC has to be "set ready" before the link training.
6020 * The policy is to always train with FEC
6021 * if the sink supports it and leave it enabled on link.
6022 * If FEC is not supported, disable it.
6024 struct link_encoder *link_enc = NULL;
6025 enum dc_status status = DC_OK;
6026 uint8_t fec_config = 0;
6028 /* Access link encoder based on whether it is statically
6029 * or dynamically assigned to a link.
6031 if (link->is_dig_mapping_flexible &&
6032 link->dc->res_pool->funcs->link_encs_assign)
6033 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6035 link_enc = link->link_enc;
6038 if (!dc_link_should_enable_fec(link))
6041 if (link_enc->funcs->fec_set_ready &&
6042 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6045 status = core_link_write_dpcd(link,
6046 DP_FEC_CONFIGURATION,
6048 sizeof(fec_config));
6049 if (status == DC_OK) {
6050 link_enc->funcs->fec_set_ready(link_enc, true);
6051 link->fec_state = dc_link_fec_ready;
6053 link_enc->funcs->fec_set_ready(link_enc, false);
6054 link->fec_state = dc_link_fec_not_ready;
6055 dm_error("dpcd write failed to set fec_ready");
6057 } else if (link->fec_state == dc_link_fec_ready) {
6059 status = core_link_write_dpcd(link,
6060 DP_FEC_CONFIGURATION,
6062 sizeof(fec_config));
6063 link_enc->funcs->fec_set_ready(link_enc, false);
6064 link->fec_state = dc_link_fec_not_ready;
6071 void dp_set_fec_enable(struct dc_link *link, bool enable)
6073 struct link_encoder *link_enc = NULL;
6075 /* Access link encoder based on whether it is statically
6076 * or dynamically assigned to a link.
6078 if (link->is_dig_mapping_flexible &&
6079 link->dc->res_pool->funcs->link_encs_assign)
6080 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6082 link_enc = link->link_enc;
6085 if (!dc_link_should_enable_fec(link))
6088 if (link_enc->funcs->fec_set_enable &&
6089 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6090 if (link->fec_state == dc_link_fec_ready && enable) {
6091 /* Accord to DP spec, FEC enable sequence can first
6092 * be transmitted anytime after 1000 LL codes have
6093 * been transmitted on the link after link training
6094 * completion. Using 1 lane RBR should have the maximum
6095 * time for transmitting 1000 LL codes which is 6.173 us.
6096 * So use 7 microseconds delay instead.
6099 link_enc->funcs->fec_set_enable(link_enc, true);
6100 link->fec_state = dc_link_fec_enabled;
6101 } else if (link->fec_state == dc_link_fec_enabled && !enable) {
6102 link_enc->funcs->fec_set_enable(link_enc, false);
6103 link->fec_state = dc_link_fec_ready;
6108 void dpcd_set_source_specific_data(struct dc_link *link)
6110 if (!link->dc->vendor_signature.is_valid) {
6111 enum dc_status __maybe_unused result_write_min_hblank = DC_NOT_SUPPORTED;
6112 struct dpcd_amd_signature amd_signature = {0};
6113 struct dpcd_amd_device_id amd_device_id = {0};
6115 amd_device_id.device_id_byte1 =
6116 (uint8_t)(link->ctx->asic_id.chip_id);
6117 amd_device_id.device_id_byte2 =
6118 (uint8_t)(link->ctx->asic_id.chip_id >> 8);
6119 amd_device_id.dce_version =
6120 (uint8_t)(link->ctx->dce_version);
6121 amd_device_id.dal_version_byte1 = 0x0; // needed? where to get?
6122 amd_device_id.dal_version_byte2 = 0x0; // needed? where to get?
6124 core_link_read_dpcd(link, DP_SOURCE_OUI,
6125 (uint8_t *)(&amd_signature),
6126 sizeof(amd_signature));
6128 if (!((amd_signature.AMD_IEEE_TxSignature_byte1 == 0x0) &&
6129 (amd_signature.AMD_IEEE_TxSignature_byte2 == 0x0) &&
6130 (amd_signature.AMD_IEEE_TxSignature_byte3 == 0x1A))) {
6132 amd_signature.AMD_IEEE_TxSignature_byte1 = 0x0;
6133 amd_signature.AMD_IEEE_TxSignature_byte2 = 0x0;
6134 amd_signature.AMD_IEEE_TxSignature_byte3 = 0x1A;
6136 core_link_write_dpcd(link, DP_SOURCE_OUI,
6137 (uint8_t *)(&amd_signature),
6138 sizeof(amd_signature));
6141 core_link_write_dpcd(link, DP_SOURCE_OUI+0x03,
6142 (uint8_t *)(&amd_device_id),
6143 sizeof(amd_device_id));
6145 if (link->ctx->dce_version >= DCN_VERSION_2_0 &&
6146 link->dc->caps.min_horizontal_blanking_period != 0) {
6148 uint8_t hblank_size = (uint8_t)link->dc->caps.min_horizontal_blanking_period;
6150 if (link->preferred_link_setting.dpcd_source_device_specific_field_support) {
6151 result_write_min_hblank = core_link_write_dpcd(link,
6152 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED, (uint8_t *)(&hblank_size),
6153 sizeof(hblank_size));
6155 if (result_write_min_hblank == DC_ERROR_UNEXPECTED)
6156 link->preferred_link_setting.dpcd_source_device_specific_field_support = false;
6158 DC_LOG_DC("Sink device does not support 00340h DPCD write. Skipping on purpose.\n");
6162 DC_TRACE_LEVEL_MESSAGE(DAL_TRACE_LEVEL_INFORMATION,
6163 WPP_BIT_FLAG_DC_DETECTION_DP_CAPS,
6164 "result=%u link_index=%u enum dce_version=%d DPCD=0x%04X min_hblank=%u branch_dev_id=0x%x branch_dev_name='%c%c%c%c%c%c'",
6165 result_write_min_hblank,
6167 link->ctx->dce_version,
6168 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED,
6169 link->dc->caps.min_horizontal_blanking_period,
6170 link->dpcd_caps.branch_dev_id,
6171 link->dpcd_caps.branch_dev_name[0],
6172 link->dpcd_caps.branch_dev_name[1],
6173 link->dpcd_caps.branch_dev_name[2],
6174 link->dpcd_caps.branch_dev_name[3],
6175 link->dpcd_caps.branch_dev_name[4],
6176 link->dpcd_caps.branch_dev_name[5]);
6178 core_link_write_dpcd(link, DP_SOURCE_OUI,
6179 link->dc->vendor_signature.data.raw,
6180 sizeof(link->dc->vendor_signature.data.raw));
6184 bool dc_link_set_backlight_level_nits(struct dc_link *link,
6186 uint32_t backlight_millinits,
6187 uint32_t transition_time_in_ms)
6189 struct dpcd_source_backlight_set dpcd_backlight_set;
6190 uint8_t backlight_control = isHDR ? 1 : 0;
6192 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6193 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6196 // OLEDs have no PWM, they can only use AUX
6197 if (link->dpcd_sink_ext_caps.bits.oled == 1)
6198 backlight_control = 1;
6200 *(uint32_t *)&dpcd_backlight_set.backlight_level_millinits = backlight_millinits;
6201 *(uint16_t *)&dpcd_backlight_set.backlight_transition_time_ms = (uint16_t)transition_time_in_ms;
6204 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6205 (uint8_t *)(&dpcd_backlight_set),
6206 sizeof(dpcd_backlight_set)) != DC_OK)
6209 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_CONTROL,
6210 &backlight_control, 1) != DC_OK)
6216 bool dc_link_get_backlight_level_nits(struct dc_link *link,
6217 uint32_t *backlight_millinits_avg,
6218 uint32_t *backlight_millinits_peak)
6220 union dpcd_source_backlight_get dpcd_backlight_get;
6222 memset(&dpcd_backlight_get, 0, sizeof(union dpcd_source_backlight_get));
6224 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6225 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6228 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_CURRENT_PEAK,
6229 dpcd_backlight_get.raw,
6230 sizeof(union dpcd_source_backlight_get)) != DC_OK)
6233 *backlight_millinits_avg =
6234 dpcd_backlight_get.bytes.backlight_millinits_avg;
6235 *backlight_millinits_peak =
6236 dpcd_backlight_get.bytes.backlight_millinits_peak;
6238 /* On non-supported panels dpcd_read usually succeeds with 0 returned */
6239 if (*backlight_millinits_avg == 0 ||
6240 *backlight_millinits_avg > *backlight_millinits_peak)
6246 bool dc_link_backlight_enable_aux(struct dc_link *link, bool enable)
6248 uint8_t backlight_enable = enable ? 1 : 0;
6250 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6251 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6254 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_ENABLE,
6255 &backlight_enable, 1) != DC_OK)
6261 // we read default from 0x320 because we expect BIOS wrote it there
6262 // regular get_backlight_nit reads from panel set at 0x326
6263 bool dc_link_read_default_bl_aux(struct dc_link *link, uint32_t *backlight_millinits)
6265 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6266 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6269 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6270 (uint8_t *) backlight_millinits,
6271 sizeof(uint32_t)) != DC_OK)
6277 bool dc_link_set_default_brightness_aux(struct dc_link *link)
6279 uint32_t default_backlight;
6281 if (link && link->dpcd_sink_ext_caps.bits.oled == 1) {
6282 if (!dc_link_read_default_bl_aux(link, &default_backlight))
6283 default_backlight = 150000;
6284 // if < 5 nits or > 5000, it might be wrong readback
6285 if (default_backlight < 5000 || default_backlight > 5000000)
6286 default_backlight = 150000; //
6288 return dc_link_set_backlight_level_nits(link, true,
6289 default_backlight, 0);
6294 bool is_edp_ilr_optimization_required(struct dc_link *link, struct dc_crtc_timing *crtc_timing)
6296 struct dc_link_settings link_setting;
6297 uint8_t link_bw_set;
6298 uint8_t link_rate_set;
6300 union lane_count_set lane_count_set = {0};
6302 ASSERT(link || crtc_timing); // invalid input
6304 if (link->dpcd_caps.edp_supported_link_rates_count == 0 ||
6305 !link->dc->debug.optimize_edp_link_rate)
6309 // Read DPCD 00100h to find if standard link rates are set
6310 core_link_read_dpcd(link, DP_LINK_BW_SET,
6311 &link_bw_set, sizeof(link_bw_set));
6314 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS used link_bw_set\n");
6318 // Read DPCD 00115h to find the edp link rate set used
6319 core_link_read_dpcd(link, DP_LINK_RATE_SET,
6320 &link_rate_set, sizeof(link_rate_set));
6322 // Read DPCD 00101h to find out the number of lanes currently set
6323 core_link_read_dpcd(link, DP_LANE_COUNT_SET,
6324 &lane_count_set.raw, sizeof(lane_count_set));
6326 req_bw = dc_bandwidth_in_kbps_from_timing(crtc_timing);
6328 if (!crtc_timing->flags.DSC)
6329 decide_edp_link_settings(link, &link_setting, req_bw);
6331 decide_edp_link_settings_with_dsc(link, &link_setting, req_bw, LINK_RATE_UNKNOWN);
6333 if (link->dpcd_caps.edp_supported_link_rates[link_rate_set] != link_setting.link_rate ||
6334 lane_count_set.bits.LANE_COUNT_SET != link_setting.lane_count) {
6335 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS link_rate_set not optimal\n");
6339 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: No optimization required, VBIOS set optimal link_rate_set\n");
6343 enum dp_link_encoding dp_get_link_encoding_format(const struct dc_link_settings *link_settings)
6345 if ((link_settings->link_rate >= LINK_RATE_LOW) &&
6346 (link_settings->link_rate <= LINK_RATE_HIGH3))
6347 return DP_8b_10b_ENCODING;
6348 #if defined(CONFIG_DRM_AMD_DC_DCN)
6349 else if ((link_settings->link_rate >= LINK_RATE_UHBR10) &&
6350 (link_settings->link_rate <= LINK_RATE_UHBR20))
6351 return DP_128b_132b_ENCODING;
6353 return DP_UNKNOWN_ENCODING;
6356 #if defined(CONFIG_DRM_AMD_DC_DCN)
6357 enum dp_link_encoding dc_link_dp_mst_decide_link_encoding_format(const struct dc_link *link)
6359 struct dc_link_settings link_settings = {0};
6361 if (!dc_is_dp_signal(link->connector_signal))
6362 return DP_UNKNOWN_ENCODING;
6364 if (link->preferred_link_setting.lane_count !=
6365 LANE_COUNT_UNKNOWN &&
6366 link->preferred_link_setting.link_rate !=
6367 LINK_RATE_UNKNOWN) {
6368 link_settings = link->preferred_link_setting;
6370 decide_mst_link_settings(link, &link_settings);
6373 return dp_get_link_encoding_format(&link_settings);
6376 // TODO - DP2.0 Link: Fix get_lane_status to handle LTTPR offset (SST and MST)
6377 static void get_lane_status(
6378 struct dc_link *link,
6379 uint32_t lane_count,
6380 union lane_status *status,
6381 union lane_align_status_updated *status_updated)
6384 uint8_t dpcd_buf[3] = {0};
6386 if (status == NULL || status_updated == NULL) {
6390 core_link_read_dpcd(
6396 for (lane = 0; lane < lane_count; lane++) {
6397 status[lane].raw = get_nibble_at_index(&dpcd_buf[0], lane);
6400 status_updated->raw = dpcd_buf[2];
6403 bool dpcd_write_128b_132b_sst_payload_allocation_table(
6404 const struct dc_stream_state *stream,
6405 struct dc_link *link,
6406 struct link_mst_stream_allocation_table *proposed_table,
6409 const uint8_t vc_id = 1; /// VC ID always 1 for SST
6410 const uint8_t start_time_slot = 0; /// Always start at time slot 0 for SST
6411 bool result = false;
6412 uint8_t req_slot_count = 0;
6413 struct fixed31_32 avg_time_slots_per_mtp = { 0 };
6414 union payload_table_update_status update_status = { 0 };
6415 const uint32_t max_retries = 30;
6416 uint32_t retries = 0;
6419 avg_time_slots_per_mtp = calculate_sst_avg_time_slots_per_mtp(stream, link);
6420 req_slot_count = dc_fixpt_ceil(avg_time_slots_per_mtp);
6422 /// Leave req_slot_count = 0 if allocate is false.
6425 /// Write DPCD 2C0 = 1 to start updating
6426 update_status.bits.VC_PAYLOAD_TABLE_UPDATED = 1;
6427 core_link_write_dpcd(
6429 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6433 /// Program the changes in DPCD 1C0 - 1C2
6435 core_link_write_dpcd(
6437 DP_PAYLOAD_ALLOCATE_SET,
6441 ASSERT(start_time_slot == 0);
6442 core_link_write_dpcd(
6444 DP_PAYLOAD_ALLOCATE_START_TIME_SLOT,
6448 ASSERT(req_slot_count <= MAX_MTP_SLOT_COUNT); /// Validation should filter out modes that exceed link BW
6449 core_link_write_dpcd(
6451 DP_PAYLOAD_ALLOCATE_TIME_SLOT_COUNT,
6455 /// Poll till DPCD 2C0 read 1
6456 /// Try for at least 150ms (30 retries, with 5ms delay after each attempt)
6458 while (retries < max_retries) {
6459 if (core_link_read_dpcd(
6461 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6464 if (update_status.bits.VC_PAYLOAD_TABLE_UPDATED == 1) {
6465 DC_LOG_DP2("SST Update Payload: downstream payload table updated.");
6470 union dpcd_rev dpcdRev;
6472 if (core_link_read_dpcd(
6477 DC_LOG_ERROR("SST Update Payload: Unable to read DPCD revision "
6478 "of sink while polling payload table "
6479 "updated status bit.");
6487 if (!result && retries == max_retries) {
6488 DC_LOG_ERROR("SST Update Payload: Payload table not updated after retries, "
6489 "continue on. Something is wrong with the branch.");
6490 // TODO - DP2.0 Payload: Read and log the payload table from downstream branch
6493 proposed_table->stream_count = 1; /// Always 1 stream for SST
6494 proposed_table->stream_allocations[0].slot_count = req_slot_count;
6495 proposed_table->stream_allocations[0].vcp_id = vc_id;
6500 bool dpcd_poll_for_allocation_change_trigger(struct dc_link *link)
6503 * wait for ACT handled
6506 const int act_retries = 30;
6507 enum act_return_status result = ACT_FAILED;
6508 union payload_table_update_status update_status = {0};
6509 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
6510 union lane_align_status_updated lane_status_updated;
6512 for (i = 0; i < act_retries; i++) {
6513 get_lane_status(link, link->cur_link_settings.lane_count, dpcd_lane_status, &lane_status_updated);
6515 if (!dp_is_cr_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6516 !dp_is_ch_eq_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6517 !dp_is_symbol_locked(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6518 !dp_is_interlane_aligned(lane_status_updated)) {
6519 DC_LOG_ERROR("SST Update Payload: Link loss occurred while "
6520 "polling for ACT handled.");
6521 result = ACT_LINK_LOST;
6524 core_link_read_dpcd(
6526 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6530 if (update_status.bits.ACT_HANDLED == 1) {
6531 DC_LOG_DP2("SST Update Payload: ACT handled by downstream.");
6532 result = ACT_SUCCESS;
6539 if (result == ACT_FAILED) {
6540 DC_LOG_ERROR("SST Update Payload: ACT still not handled after retries, "
6541 "continue on. Something is wrong with the branch.");
6544 return (result == ACT_SUCCESS);
6547 struct fixed31_32 calculate_sst_avg_time_slots_per_mtp(
6548 const struct dc_stream_state *stream,
6549 const struct dc_link *link)
6551 struct fixed31_32 link_bw_effective =
6553 dc_link_bandwidth_kbps(link, &link->cur_link_settings));
6554 struct fixed31_32 timeslot_bw_effective =
6555 dc_fixpt_div_int(link_bw_effective, MAX_MTP_SLOT_COUNT);
6556 struct fixed31_32 timing_bw =
6558 dc_bandwidth_in_kbps_from_timing(&stream->timing));
6559 struct fixed31_32 avg_time_slots_per_mtp =
6560 dc_fixpt_div(timing_bw, timeslot_bw_effective);
6562 return avg_time_slots_per_mtp;
6565 bool is_dp_128b_132b_signal(struct pipe_ctx *pipe_ctx)
6567 return (pipe_ctx->stream_res.hpo_dp_stream_enc &&
6568 pipe_ctx->stream->link->hpo_dp_link_enc &&
6569 dc_is_dp_signal(pipe_ctx->stream->signal));
6573 void edp_panel_backlight_power_on(struct dc_link *link)
6575 if (link->connector_signal != SIGNAL_TYPE_EDP)
6578 link->dc->hwss.edp_power_control(link, true);
6579 link->dc->hwss.edp_wait_for_hpd_ready(link, true);
6580 if (link->dc->hwss.edp_backlight_control)
6581 link->dc->hwss.edp_backlight_control(link, true);