2 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
24 #include "dm_services.h"
26 #include "dc_link_dp.h"
27 #include "dm_helpers.h"
32 #include "inc/core_types.h"
33 #include "link_hwss.h"
34 #include "dc_link_ddc.h"
35 #include "core_status.h"
36 #include "dpcd_defs.h"
37 #include "dc_dmub_srv.h"
38 #include "dce/dmub_hw_lock_mgr.h"
39 #include "inc/dc_link_dpia.h"
40 #include "inc/link_enc_cfg.h"
43 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_2[] = "sivarT";
45 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_3[] = "dnomlA";
49 #define DC_TRACE_LEVEL_MESSAGE(...) /* do nothing */
51 #include "link_dpcd.h"
53 /* maximum pre emphasis level allowed for each voltage swing level*/
54 static const enum dc_pre_emphasis
55 voltage_swing_to_pre_emphasis[] = { PRE_EMPHASIS_LEVEL3,
58 PRE_EMPHASIS_DISABLED };
61 POST_LT_ADJ_REQ_LIMIT = 6,
62 POST_LT_ADJ_REQ_TIMEOUT = 200
65 #if defined(CONFIG_DRM_AMD_DC_DCN)
66 struct dp_lt_fallback_entry {
67 enum dc_lane_count lane_count;
68 enum dc_link_rate link_rate;
71 static const struct dp_lt_fallback_entry dp_lt_fallbacks[] = {
72 /* This link training fallback array is ordered by
73 * link bandwidth from highest to lowest.
74 * DP specs makes it a normative policy to always
75 * choose the next highest link bandwidth during
76 * link training fallback.
78 {LANE_COUNT_FOUR, LINK_RATE_UHBR20},
79 {LANE_COUNT_FOUR, LINK_RATE_UHBR13_5},
80 {LANE_COUNT_TWO, LINK_RATE_UHBR20},
81 {LANE_COUNT_FOUR, LINK_RATE_UHBR10},
82 {LANE_COUNT_TWO, LINK_RATE_UHBR13_5},
83 {LANE_COUNT_FOUR, LINK_RATE_HIGH3},
84 {LANE_COUNT_ONE, LINK_RATE_UHBR20},
85 {LANE_COUNT_TWO, LINK_RATE_UHBR10},
86 {LANE_COUNT_FOUR, LINK_RATE_HIGH2},
87 {LANE_COUNT_ONE, LINK_RATE_UHBR13_5},
88 {LANE_COUNT_TWO, LINK_RATE_HIGH3},
89 {LANE_COUNT_ONE, LINK_RATE_UHBR10},
90 {LANE_COUNT_TWO, LINK_RATE_HIGH2},
91 {LANE_COUNT_FOUR, LINK_RATE_HIGH},
92 {LANE_COUNT_ONE, LINK_RATE_HIGH3},
93 {LANE_COUNT_FOUR, LINK_RATE_LOW},
94 {LANE_COUNT_ONE, LINK_RATE_HIGH2},
95 {LANE_COUNT_TWO, LINK_RATE_HIGH},
96 {LANE_COUNT_TWO, LINK_RATE_LOW},
97 {LANE_COUNT_ONE, LINK_RATE_HIGH},
98 {LANE_COUNT_ONE, LINK_RATE_LOW},
102 static bool decide_fallback_link_setting(
103 struct dc_link *link,
104 struct dc_link_settings initial_link_settings,
105 struct dc_link_settings *current_link_setting,
106 enum link_training_result training_result);
107 static struct dc_link_settings get_common_supported_link_settings(
108 struct dc_link_settings link_setting_a,
109 struct dc_link_settings link_setting_b);
110 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
111 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
112 static void override_lane_settings(const struct link_training_settings *lt_settings,
113 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
115 static uint32_t get_cr_training_aux_rd_interval(struct dc_link *link,
116 const struct dc_link_settings *link_settings)
118 union training_aux_rd_interval training_rd_interval;
119 uint32_t wait_in_micro_secs = 100;
120 #if defined(CONFIG_DRM_AMD_DC_DCN)
121 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
122 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
123 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
126 DP_TRAINING_AUX_RD_INTERVAL,
127 (uint8_t *)&training_rd_interval,
128 sizeof(training_rd_interval));
129 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
130 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
135 DP_TRAINING_AUX_RD_INTERVAL,
136 (uint8_t *)&training_rd_interval,
137 sizeof(training_rd_interval));
138 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
139 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
141 return wait_in_micro_secs;
144 static uint32_t get_eq_training_aux_rd_interval(
145 struct dc_link *link,
146 const struct dc_link_settings *link_settings)
148 #if defined(CONFIG_DRM_AMD_DC_DCN)
149 union training_aux_rd_interval training_rd_interval;
151 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
152 if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING) {
155 DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
156 (uint8_t *)&training_rd_interval,
157 sizeof(training_rd_interval));
158 } else if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
159 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
162 DP_TRAINING_AUX_RD_INTERVAL,
163 (uint8_t *)&training_rd_interval,
164 sizeof(training_rd_interval));
167 switch (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL) {
171 case 3: return 12000;
172 case 4: return 16000;
173 case 5: return 32000;
174 case 6: return 64000;
178 union training_aux_rd_interval training_rd_interval;
179 uint32_t wait_in_micro_secs = 400;
181 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
182 /* overwrite the delay if rev > 1.1*/
183 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
184 /* DP 1.2 or later - retrieve delay through
185 * "DPCD_ADDR_TRAINING_AUX_RD_INTERVAL" register */
188 DP_TRAINING_AUX_RD_INTERVAL,
189 (uint8_t *)&training_rd_interval,
190 sizeof(training_rd_interval));
192 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
193 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
196 return wait_in_micro_secs;
200 void dp_wait_for_training_aux_rd_interval(
201 struct dc_link *link,
202 uint32_t wait_in_micro_secs)
204 #if defined(CONFIG_DRM_AMD_DC_DCN)
205 if (wait_in_micro_secs > 16000)
206 msleep(wait_in_micro_secs/1000);
208 udelay(wait_in_micro_secs);
210 udelay(wait_in_micro_secs);
213 DC_LOG_HW_LINK_TRAINING("%s:\n wait = %d\n",
218 enum dpcd_training_patterns
219 dc_dp_training_pattern_to_dpcd_training_pattern(
220 struct dc_link *link,
221 enum dc_dp_training_pattern pattern)
223 enum dpcd_training_patterns dpcd_tr_pattern =
224 DPCD_TRAINING_PATTERN_VIDEOIDLE;
227 case DP_TRAINING_PATTERN_SEQUENCE_1:
228 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_1;
230 case DP_TRAINING_PATTERN_SEQUENCE_2:
231 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_2;
233 case DP_TRAINING_PATTERN_SEQUENCE_3:
234 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_3;
236 case DP_TRAINING_PATTERN_SEQUENCE_4:
237 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_4;
239 #if defined(CONFIG_DRM_AMD_DC_DCN)
240 case DP_128b_132b_TPS1:
241 dpcd_tr_pattern = DPCD_128b_132b_TPS1;
243 case DP_128b_132b_TPS2:
244 dpcd_tr_pattern = DPCD_128b_132b_TPS2;
246 case DP_128b_132b_TPS2_CDS:
247 dpcd_tr_pattern = DPCD_128b_132b_TPS2_CDS;
250 case DP_TRAINING_PATTERN_VIDEOIDLE:
251 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_VIDEOIDLE;
255 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
260 return dpcd_tr_pattern;
263 static void dpcd_set_training_pattern(
264 struct dc_link *link,
265 enum dc_dp_training_pattern training_pattern)
267 union dpcd_training_pattern dpcd_pattern = {0};
269 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
270 dc_dp_training_pattern_to_dpcd_training_pattern(
271 link, training_pattern);
273 core_link_write_dpcd(
275 DP_TRAINING_PATTERN_SET,
279 DC_LOG_HW_LINK_TRAINING("%s\n %x pattern = %x\n",
281 DP_TRAINING_PATTERN_SET,
282 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
285 static enum dc_dp_training_pattern decide_cr_training_pattern(
286 const struct dc_link_settings *link_settings)
288 switch (dp_get_link_encoding_format(link_settings)) {
289 case DP_8b_10b_ENCODING:
291 return DP_TRAINING_PATTERN_SEQUENCE_1;
292 #if defined(CONFIG_DRM_AMD_DC_DCN)
293 case DP_128b_132b_ENCODING:
294 return DP_128b_132b_TPS1;
299 static enum dc_dp_training_pattern decide_eq_training_pattern(struct dc_link *link,
300 const struct dc_link_settings *link_settings)
302 struct link_encoder *link_enc;
303 #if defined(CONFIG_DRM_AMD_DC_DCN)
304 struct encoder_feature_support *enc_caps;
305 struct dpcd_caps *rx_caps = &link->dpcd_caps;
306 enum dc_dp_training_pattern pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
308 /* Access link encoder capability based on whether it is statically
309 * or dynamically assigned to a link.
311 if (link->is_dig_mapping_flexible &&
312 link->dc->res_pool->funcs->link_encs_assign)
313 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
315 link_enc = link->link_enc;
317 enc_caps = &link_enc->features;
319 switch (dp_get_link_encoding_format(link_settings)) {
320 case DP_8b_10b_ENCODING:
321 if (enc_caps->flags.bits.IS_TPS4_CAPABLE &&
322 rx_caps->max_down_spread.bits.TPS4_SUPPORTED)
323 pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
324 else if (enc_caps->flags.bits.IS_TPS3_CAPABLE &&
325 rx_caps->max_ln_count.bits.TPS3_SUPPORTED)
326 pattern = DP_TRAINING_PATTERN_SEQUENCE_3;
328 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
330 case DP_128b_132b_ENCODING:
331 pattern = DP_128b_132b_TPS2;
334 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
339 enum dc_dp_training_pattern highest_tp = DP_TRAINING_PATTERN_SEQUENCE_2;
340 struct encoder_feature_support *features;
341 struct dpcd_caps *dpcd_caps = &link->dpcd_caps;
343 /* Access link encoder capability based on whether it is statically
344 * or dynamically assigned to a link.
346 if (link->is_dig_mapping_flexible &&
347 link->dc->res_pool->funcs->link_encs_assign)
348 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
350 link_enc = link->link_enc;
352 features = &link_enc->features;
354 if (features->flags.bits.IS_TPS3_CAPABLE)
355 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_3;
357 if (features->flags.bits.IS_TPS4_CAPABLE)
358 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_4;
360 if (dpcd_caps->max_down_spread.bits.TPS4_SUPPORTED &&
361 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_4)
362 return DP_TRAINING_PATTERN_SEQUENCE_4;
364 if (dpcd_caps->max_ln_count.bits.TPS3_SUPPORTED &&
365 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_3)
366 return DP_TRAINING_PATTERN_SEQUENCE_3;
368 return DP_TRAINING_PATTERN_SEQUENCE_2;
372 #if defined(CONFIG_DRM_AMD_DC_DCN)
373 static uint8_t get_dpcd_link_rate(const struct dc_link_settings *link_settings)
375 uint8_t link_rate = 0;
376 enum dp_link_encoding encoding = dp_get_link_encoding_format(link_settings);
378 if (encoding == DP_128b_132b_ENCODING)
379 switch (link_settings->link_rate) {
380 case LINK_RATE_UHBR10:
383 case LINK_RATE_UHBR20:
386 case LINK_RATE_UHBR13_5:
393 else if (encoding == DP_8b_10b_ENCODING)
394 link_rate = (uint8_t) link_settings->link_rate;
402 static void vendor_specific_lttpr_wa_one_start(struct dc_link *link)
404 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0xff};
405 const uint8_t offset = dp_convert_to_count(
406 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
407 uint32_t vendor_lttpr_write_address = 0xF004F;
410 vendor_lttpr_write_address +=
411 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
413 /* W/A for certain LTTPR to reset their lane settings, part one of two */
414 core_link_write_dpcd(
416 vendor_lttpr_write_address,
417 &vendor_lttpr_write_data[0],
418 sizeof(vendor_lttpr_write_data));
421 static void vendor_specific_lttpr_wa_one_end(
422 struct dc_link *link,
425 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0x0};
426 const uint8_t offset = dp_convert_to_count(
427 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
428 uint32_t vendor_lttpr_write_address = 0xF004F;
432 vendor_lttpr_write_address +=
433 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
435 /* W/A for certain LTTPR to reset their lane settings, part two of two */
436 core_link_write_dpcd(
438 vendor_lttpr_write_address,
439 &vendor_lttpr_write_data[0],
440 sizeof(vendor_lttpr_write_data));
444 static void vendor_specific_lttpr_wa_one_two(
445 struct dc_link *link,
448 if (link->apply_vendor_specific_lttpr_link_rate_wa) {
449 uint8_t toggle_rate = 0x0;
456 if (link->vendor_specific_lttpr_link_rate_wa == rate) {
457 /* W/A for certain LTTPR to reset internal state for link training */
458 core_link_write_dpcd(
465 /* Store the last attempted link rate for this link */
466 link->vendor_specific_lttpr_link_rate_wa = rate;
470 static void vendor_specific_lttpr_wa_three(
471 struct dc_link *link,
472 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX])
474 const uint8_t vendor_lttpr_write_data_vs[3] = {0x0, 0x53, 0x63};
475 const uint8_t vendor_lttpr_write_data_pe[3] = {0x0, 0x54, 0x63};
476 const uint8_t offset = dp_convert_to_count(
477 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
478 uint32_t vendor_lttpr_write_address = 0xF004F;
479 uint32_t vendor_lttpr_read_address = 0xF0053;
484 if (offset != 0xFF) {
485 vendor_lttpr_write_address +=
486 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
487 vendor_lttpr_read_address +=
488 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
491 /* W/A to read lane settings requested by DPRX */
492 core_link_write_dpcd(
494 vendor_lttpr_write_address,
495 &vendor_lttpr_write_data_vs[0],
496 sizeof(vendor_lttpr_write_data_vs));
499 vendor_lttpr_read_address,
502 core_link_write_dpcd(
504 vendor_lttpr_write_address,
505 &vendor_lttpr_write_data_pe[0],
506 sizeof(vendor_lttpr_write_data_pe));
509 vendor_lttpr_read_address,
513 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
514 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE = (dprx_vs >> (2 * lane)) & 0x3;
515 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE = (dprx_pe >> (2 * lane)) & 0x3;
519 static void vendor_specific_lttpr_wa_three_dpcd(
520 struct dc_link *link,
521 union dpcd_training_lane dpcd_lane_adjust[LANE_COUNT_DP_MAX])
523 union lane_adjust lane_adjust[LANE_COUNT_DP_MAX];
526 vendor_specific_lttpr_wa_three(link, lane_adjust);
528 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
529 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_SET = lane_adjust[lane].bits.VOLTAGE_SWING_LANE;
530 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_SET = lane_adjust[lane].bits.PRE_EMPHASIS_LANE;
534 static void vendor_specific_lttpr_wa_four(
535 struct dc_link *link,
538 const uint8_t vendor_lttpr_write_data_one[4] = {0x1, 0x55, 0x63, 0x8};
539 const uint8_t vendor_lttpr_write_data_two[4] = {0x1, 0x55, 0x63, 0x0};
540 const uint8_t offset = dp_convert_to_count(
541 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
542 uint32_t vendor_lttpr_write_address = 0xF004F;
543 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
544 uint8_t sink_status = 0;
549 vendor_lttpr_write_address +=
550 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
552 /* W/A to pass through DPCD write of TPS=0 to DPRX */
554 core_link_write_dpcd(
556 vendor_lttpr_write_address,
557 &vendor_lttpr_write_data_one[0],
558 sizeof(vendor_lttpr_write_data_one));
561 /* clear training pattern set */
562 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
565 core_link_write_dpcd(
567 vendor_lttpr_write_address,
568 &vendor_lttpr_write_data_two[0],
569 sizeof(vendor_lttpr_write_data_two));
572 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
573 /* poll for intra-hop disable */
574 for (i = 0; i < 10; i++) {
575 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
576 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
583 static void vendor_specific_lttpr_wa_five(
584 struct dc_link *link,
585 const union dpcd_training_lane dpcd_lane_adjust[LANE_COUNT_DP_MAX],
588 const uint32_t vendor_lttpr_write_address = 0xF004F;
589 const uint8_t vendor_lttpr_write_data_reset[4] = {0x1, 0x50, 0x63, 0xFF};
590 uint8_t vendor_lttpr_write_data_vs[4] = {0x1, 0x51, 0x63, 0x0};
591 uint8_t vendor_lttpr_write_data_pe[4] = {0x1, 0x52, 0x63, 0x0};
594 for (lane = 0; lane < lane_count; lane++) {
595 vendor_lttpr_write_data_vs[3] |=
596 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_SET << (2 * lane);
597 vendor_lttpr_write_data_pe[3] |=
598 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_SET << (2 * lane);
601 /* Force LTTPR to output desired VS and PE */
602 core_link_write_dpcd(
604 vendor_lttpr_write_address,
605 &vendor_lttpr_write_data_reset[0],
606 sizeof(vendor_lttpr_write_data_reset));
607 core_link_write_dpcd(
609 vendor_lttpr_write_address,
610 &vendor_lttpr_write_data_vs[0],
611 sizeof(vendor_lttpr_write_data_vs));
612 core_link_write_dpcd(
614 vendor_lttpr_write_address,
615 &vendor_lttpr_write_data_pe[0],
616 sizeof(vendor_lttpr_write_data_pe));
619 enum dc_status dpcd_set_link_settings(
620 struct dc_link *link,
621 const struct link_training_settings *lt_settings)
624 enum dc_status status;
626 union down_spread_ctrl downspread = {0};
627 union lane_count_set lane_count_set = {0};
629 downspread.raw = (uint8_t)
630 (lt_settings->link_settings.link_spread);
632 lane_count_set.bits.LANE_COUNT_SET =
633 lt_settings->link_settings.lane_count;
635 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
636 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
639 if (link->ep_type == DISPLAY_ENDPOINT_PHY &&
640 lt_settings->pattern_for_eq < DP_TRAINING_PATTERN_SEQUENCE_4) {
641 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
642 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
645 status = core_link_write_dpcd(link, DP_DOWNSPREAD_CTRL,
646 &downspread.raw, sizeof(downspread));
648 status = core_link_write_dpcd(link, DP_LANE_COUNT_SET,
649 &lane_count_set.raw, 1);
651 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
652 lt_settings->link_settings.use_link_rate_set == true) {
654 /* WA for some MUX chips that will power down with eDP and lose supported
655 * link rate set for eDP 1.4. Source reads DPCD 0x010 again to ensure
656 * MUX chip gets link rate set back before link training.
658 if (link->connector_signal == SIGNAL_TYPE_EDP) {
659 uint8_t supported_link_rates[16];
661 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
662 supported_link_rates, sizeof(supported_link_rates));
664 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
665 status = core_link_write_dpcd(link, DP_LINK_RATE_SET,
666 <_settings->link_settings.link_rate_set, 1);
668 #if defined(CONFIG_DRM_AMD_DC_DCN)
669 rate = get_dpcd_link_rate(<_settings->link_settings);
671 rate = (uint8_t) (lt_settings->link_settings.link_rate);
673 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
674 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
675 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
676 vendor_specific_lttpr_wa_one_start(link);
678 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
679 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN))
680 vendor_specific_lttpr_wa_one_two(link, rate);
682 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
686 DC_LOG_HW_LINK_TRAINING("%s\n %x rate = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
689 lt_settings->link_settings.link_rate,
691 lt_settings->link_settings.lane_count,
692 lt_settings->enhanced_framing,
694 lt_settings->link_settings.link_spread);
696 DC_LOG_HW_LINK_TRAINING("%s\n %x rate set = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
699 lt_settings->link_settings.link_rate_set,
701 lt_settings->link_settings.lane_count,
702 lt_settings->enhanced_framing,
704 lt_settings->link_settings.link_spread);
710 uint8_t dc_dp_initialize_scrambling_data_symbols(
711 struct dc_link *link,
712 enum dc_dp_training_pattern pattern)
714 uint8_t disable_scrabled_data_symbols = 0;
717 case DP_TRAINING_PATTERN_SEQUENCE_1:
718 case DP_TRAINING_PATTERN_SEQUENCE_2:
719 case DP_TRAINING_PATTERN_SEQUENCE_3:
720 disable_scrabled_data_symbols = 1;
722 case DP_TRAINING_PATTERN_SEQUENCE_4:
723 #if defined(CONFIG_DRM_AMD_DC_DCN)
724 case DP_128b_132b_TPS1:
725 case DP_128b_132b_TPS2:
727 disable_scrabled_data_symbols = 0;
731 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
735 return disable_scrabled_data_symbols;
738 static inline bool is_repeater(struct dc_link *link, uint32_t offset)
740 return (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (offset != 0);
743 static void dpcd_set_lt_pattern_and_lane_settings(
744 struct dc_link *link,
745 const struct link_training_settings *lt_settings,
746 enum dc_dp_training_pattern pattern,
749 uint32_t dpcd_base_lt_offset;
751 uint8_t dpcd_lt_buffer[5] = {0};
752 union dpcd_training_pattern dpcd_pattern = { 0 };
753 uint32_t size_in_bytes;
754 bool edp_workaround = false; /* TODO link_prop.INTERNAL */
755 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET;
757 if (is_repeater(link, offset))
758 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
759 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
761 /*****************************************************************
762 * DpcdAddress_TrainingPatternSet
763 *****************************************************************/
764 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
765 dc_dp_training_pattern_to_dpcd_training_pattern(link, pattern);
767 dpcd_pattern.v1_4.SCRAMBLING_DISABLE =
768 dc_dp_initialize_scrambling_data_symbols(link, pattern);
770 dpcd_lt_buffer[DP_TRAINING_PATTERN_SET - DP_TRAINING_PATTERN_SET]
773 if (is_repeater(link, offset)) {
774 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n 0x%X pattern = %x\n",
778 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
780 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X pattern = %x\n",
783 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
786 /* concatenate everything into one buffer*/
787 size_in_bytes = lt_settings->link_settings.lane_count *
788 sizeof(lt_settings->dpcd_lane_settings[0]);
792 &dpcd_lt_buffer[DP_TRAINING_LANE0_SET - DP_TRAINING_PATTERN_SET],
793 lt_settings->dpcd_lane_settings,
796 if (is_repeater(link, offset)) {
797 #if defined(CONFIG_DRM_AMD_DC_DCN)
798 if (dp_get_link_encoding_format(<_settings->link_settings) ==
799 DP_128b_132b_ENCODING)
800 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
801 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
805 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
806 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
809 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
810 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
814 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
815 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
816 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
817 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
819 #if defined(CONFIG_DRM_AMD_DC_DCN)
820 if (dp_get_link_encoding_format(<_settings->link_settings) ==
821 DP_128b_132b_ENCODING)
822 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
825 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
826 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
829 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
832 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
833 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
834 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
835 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
837 if (edp_workaround) {
838 /* for eDP write in 2 parts because the 5-byte burst is
839 * causing issues on some eDP panels (EPR#366724)
841 core_link_write_dpcd(
843 DP_TRAINING_PATTERN_SET,
845 sizeof(dpcd_pattern.raw));
847 core_link_write_dpcd(
849 DP_TRAINING_LANE0_SET,
850 (uint8_t *)(lt_settings->dpcd_lane_settings),
853 #if defined(CONFIG_DRM_AMD_DC_DCN)
854 } else if (dp_get_link_encoding_format(<_settings->link_settings) ==
855 DP_128b_132b_ENCODING) {
856 core_link_write_dpcd(
860 sizeof(dpcd_lt_buffer));
863 /* write it all in (1 + number-of-lanes)-byte burst*/
864 core_link_write_dpcd(
868 size_in_bytes + sizeof(dpcd_pattern.raw));
871 bool dp_is_cr_done(enum dc_lane_count ln_count,
872 union lane_status *dpcd_lane_status)
875 /*LANEx_CR_DONE bits All 1's?*/
876 for (lane = 0; lane < (uint32_t)(ln_count); lane++) {
877 if (!dpcd_lane_status[lane].bits.CR_DONE_0)
883 bool dp_is_ch_eq_done(enum dc_lane_count ln_count,
884 union lane_status *dpcd_lane_status)
888 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
889 if (!dpcd_lane_status[lane].bits.CHANNEL_EQ_DONE_0)
894 bool dp_is_symbol_locked(enum dc_lane_count ln_count,
895 union lane_status *dpcd_lane_status)
899 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
900 if (!dpcd_lane_status[lane].bits.SYMBOL_LOCKED_0)
905 bool dp_is_interlane_aligned(union lane_align_status_updated align_status)
907 return align_status.bits.INTERLANE_ALIGN_DONE == 1;
910 void dp_hw_to_dpcd_lane_settings(
911 const struct link_training_settings *lt_settings,
912 const struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
913 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
917 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
918 if (dp_get_link_encoding_format(<_settings->link_settings) ==
919 DP_8b_10b_ENCODING) {
920 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET =
921 (uint8_t)(hw_lane_settings[lane].VOLTAGE_SWING);
922 dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET =
923 (uint8_t)(hw_lane_settings[lane].PRE_EMPHASIS);
924 dpcd_lane_settings[lane].bits.MAX_SWING_REACHED =
925 (hw_lane_settings[lane].VOLTAGE_SWING ==
926 VOLTAGE_SWING_MAX_LEVEL ? 1 : 0);
927 dpcd_lane_settings[lane].bits.MAX_PRE_EMPHASIS_REACHED =
928 (hw_lane_settings[lane].PRE_EMPHASIS ==
929 PRE_EMPHASIS_MAX_LEVEL ? 1 : 0);
931 #if defined(CONFIG_DRM_AMD_DC_DCN)
932 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
933 DP_128b_132b_ENCODING) {
934 dpcd_lane_settings[lane].tx_ffe.PRESET_VALUE =
935 hw_lane_settings[lane].FFE_PRESET.settings.level;
941 void dp_decide_lane_settings(
942 const struct link_training_settings *lt_settings,
943 const union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
944 struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
945 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
949 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
950 if (dp_get_link_encoding_format(<_settings->link_settings) ==
951 DP_8b_10b_ENCODING) {
952 hw_lane_settings[lane].VOLTAGE_SWING =
953 (enum dc_voltage_swing)(ln_adjust[lane].bits.
955 hw_lane_settings[lane].PRE_EMPHASIS =
956 (enum dc_pre_emphasis)(ln_adjust[lane].bits.
959 #if defined(CONFIG_DRM_AMD_DC_DCN)
960 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
961 DP_128b_132b_ENCODING) {
962 hw_lane_settings[lane].FFE_PRESET.raw =
963 ln_adjust[lane].tx_ffe.PRESET_VALUE;
967 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
969 if (lt_settings->disallow_per_lane_settings) {
970 /* we find the maximum of the requested settings across all lanes*/
971 /* and set this maximum for all lanes*/
972 maximize_lane_settings(lt_settings, hw_lane_settings);
973 override_lane_settings(lt_settings, hw_lane_settings);
975 if (lt_settings->always_match_dpcd_with_hw_lane_settings)
976 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
981 static uint8_t get_nibble_at_index(const uint8_t *buf,
985 nibble = buf[index / 2];
995 static enum dc_pre_emphasis get_max_pre_emphasis_for_voltage_swing(
996 enum dc_voltage_swing voltage)
998 enum dc_pre_emphasis pre_emphasis;
999 pre_emphasis = PRE_EMPHASIS_MAX_LEVEL;
1001 if (voltage <= VOLTAGE_SWING_MAX_LEVEL)
1002 pre_emphasis = voltage_swing_to_pre_emphasis[voltage];
1004 return pre_emphasis;
1008 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
1009 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
1012 struct dc_lane_settings max_requested;
1014 max_requested.VOLTAGE_SWING = lane_settings[0].VOLTAGE_SWING;
1015 max_requested.PRE_EMPHASIS = lane_settings[0].PRE_EMPHASIS;
1016 #if defined(CONFIG_DRM_AMD_DC_DCN)
1017 max_requested.FFE_PRESET = lane_settings[0].FFE_PRESET;
1020 /* Determine what the maximum of the requested settings are*/
1021 for (lane = 1; lane < lt_settings->link_settings.lane_count; lane++) {
1022 if (lane_settings[lane].VOLTAGE_SWING > max_requested.VOLTAGE_SWING)
1023 max_requested.VOLTAGE_SWING = lane_settings[lane].VOLTAGE_SWING;
1025 if (lane_settings[lane].PRE_EMPHASIS > max_requested.PRE_EMPHASIS)
1026 max_requested.PRE_EMPHASIS = lane_settings[lane].PRE_EMPHASIS;
1027 #if defined(CONFIG_DRM_AMD_DC_DCN)
1028 if (lane_settings[lane].FFE_PRESET.settings.level >
1029 max_requested.FFE_PRESET.settings.level)
1030 max_requested.FFE_PRESET.settings.level =
1031 lane_settings[lane].FFE_PRESET.settings.level;
1035 /* make sure the requested settings are
1036 * not higher than maximum settings*/
1037 if (max_requested.VOLTAGE_SWING > VOLTAGE_SWING_MAX_LEVEL)
1038 max_requested.VOLTAGE_SWING = VOLTAGE_SWING_MAX_LEVEL;
1040 if (max_requested.PRE_EMPHASIS > PRE_EMPHASIS_MAX_LEVEL)
1041 max_requested.PRE_EMPHASIS = PRE_EMPHASIS_MAX_LEVEL;
1042 #if defined(CONFIG_DRM_AMD_DC_DCN)
1043 if (max_requested.FFE_PRESET.settings.level > DP_FFE_PRESET_MAX_LEVEL)
1044 max_requested.FFE_PRESET.settings.level = DP_FFE_PRESET_MAX_LEVEL;
1047 /* make sure the pre-emphasis matches the voltage swing*/
1048 if (max_requested.PRE_EMPHASIS >
1049 get_max_pre_emphasis_for_voltage_swing(
1050 max_requested.VOLTAGE_SWING))
1051 max_requested.PRE_EMPHASIS =
1052 get_max_pre_emphasis_for_voltage_swing(
1053 max_requested.VOLTAGE_SWING);
1055 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1056 lane_settings[lane].VOLTAGE_SWING = max_requested.VOLTAGE_SWING;
1057 lane_settings[lane].PRE_EMPHASIS = max_requested.PRE_EMPHASIS;
1058 #if defined(CONFIG_DRM_AMD_DC_DCN)
1059 lane_settings[lane].FFE_PRESET = max_requested.FFE_PRESET;
1064 static void override_lane_settings(const struct link_training_settings *lt_settings,
1065 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
1069 if (lt_settings->voltage_swing == NULL &&
1070 lt_settings->pre_emphasis == NULL &&
1071 #if defined(CONFIG_DRM_AMD_DC_DCN)
1072 lt_settings->ffe_preset == NULL &&
1074 lt_settings->post_cursor2 == NULL)
1078 for (lane = 1; lane < LANE_COUNT_DP_MAX; lane++) {
1079 if (lt_settings->voltage_swing)
1080 lane_settings[lane].VOLTAGE_SWING = *lt_settings->voltage_swing;
1081 if (lt_settings->pre_emphasis)
1082 lane_settings[lane].PRE_EMPHASIS = *lt_settings->pre_emphasis;
1083 if (lt_settings->post_cursor2)
1084 lane_settings[lane].POST_CURSOR2 = *lt_settings->post_cursor2;
1085 #if defined(CONFIG_DRM_AMD_DC_DCN)
1086 if (lt_settings->ffe_preset)
1087 lane_settings[lane].FFE_PRESET = *lt_settings->ffe_preset;
1092 enum dc_status dp_get_lane_status_and_lane_adjust(
1093 struct dc_link *link,
1094 const struct link_training_settings *link_training_setting,
1095 union lane_status ln_status[LANE_COUNT_DP_MAX],
1096 union lane_align_status_updated *ln_align,
1097 union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
1100 unsigned int lane01_status_address = DP_LANE0_1_STATUS;
1101 uint8_t lane_adjust_offset = 4;
1102 unsigned int lane01_adjust_address;
1103 uint8_t dpcd_buf[6] = {0};
1105 enum dc_status status;
1107 if (is_repeater(link, offset)) {
1108 lane01_status_address =
1109 DP_LANE0_1_STATUS_PHY_REPEATER1 +
1110 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1111 lane_adjust_offset = 3;
1114 status = core_link_read_dpcd(
1116 lane01_status_address,
1117 (uint8_t *)(dpcd_buf),
1120 for (lane = 0; lane <
1121 (uint32_t)(link_training_setting->link_settings.lane_count);
1124 ln_status[lane].raw =
1125 get_nibble_at_index(&dpcd_buf[0], lane);
1126 ln_adjust[lane].raw =
1127 get_nibble_at_index(&dpcd_buf[lane_adjust_offset], lane);
1130 ln_align->raw = dpcd_buf[2];
1132 if (is_repeater(link, offset)) {
1133 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1134 " 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1137 lane01_status_address, dpcd_buf[0],
1138 lane01_status_address + 1, dpcd_buf[1]);
1140 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1142 lane01_status_address, dpcd_buf[0],
1143 lane01_status_address + 1, dpcd_buf[1]);
1145 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1;
1147 if (is_repeater(link, offset))
1148 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1_PHY_REPEATER1 +
1149 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1151 if (is_repeater(link, offset)) {
1152 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1153 " 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1156 lane01_adjust_address,
1157 dpcd_buf[lane_adjust_offset],
1158 lane01_adjust_address + 1,
1159 dpcd_buf[lane_adjust_offset + 1]);
1161 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1163 lane01_adjust_address,
1164 dpcd_buf[lane_adjust_offset],
1165 lane01_adjust_address + 1,
1166 dpcd_buf[lane_adjust_offset + 1]);
1172 enum dc_status dpcd_set_lane_settings(
1173 struct dc_link *link,
1174 const struct link_training_settings *link_training_setting,
1177 unsigned int lane0_set_address;
1178 enum dc_status status;
1180 lane0_set_address = DP_TRAINING_LANE0_SET;
1182 if (is_repeater(link, offset))
1183 lane0_set_address = DP_TRAINING_LANE0_SET_PHY_REPEATER1 +
1184 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1186 status = core_link_write_dpcd(link,
1188 (uint8_t *)(link_training_setting->dpcd_lane_settings),
1189 link_training_setting->link_settings.lane_count);
1191 if (is_repeater(link, offset)) {
1192 #if defined(CONFIG_DRM_AMD_DC_DCN)
1193 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1194 DP_128b_132b_ENCODING)
1195 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1196 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
1200 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1201 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1204 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n"
1205 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1209 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1210 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1211 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1212 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1215 #if defined(CONFIG_DRM_AMD_DC_DCN)
1216 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1217 DP_128b_132b_ENCODING)
1218 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
1221 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1222 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1225 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1228 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1229 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1230 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1231 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1237 bool dp_is_max_vs_reached(
1238 const struct link_training_settings *lt_settings)
1241 for (lane = 0; lane <
1242 (uint32_t)(lt_settings->link_settings.lane_count);
1244 if (lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET
1245 == VOLTAGE_SWING_MAX_LEVEL)
1252 static bool perform_post_lt_adj_req_sequence(
1253 struct dc_link *link,
1254 struct link_training_settings *lt_settings)
1256 enum dc_lane_count lane_count =
1257 lt_settings->link_settings.lane_count;
1259 uint32_t adj_req_count;
1260 uint32_t adj_req_timer;
1261 bool req_drv_setting_changed;
1264 req_drv_setting_changed = false;
1265 for (adj_req_count = 0; adj_req_count < POST_LT_ADJ_REQ_LIMIT;
1268 req_drv_setting_changed = false;
1270 for (adj_req_timer = 0;
1271 adj_req_timer < POST_LT_ADJ_REQ_TIMEOUT;
1274 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1275 union lane_align_status_updated
1276 dpcd_lane_status_updated;
1277 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1279 dp_get_lane_status_and_lane_adjust(
1283 &dpcd_lane_status_updated,
1287 if (dpcd_lane_status_updated.bits.
1288 POST_LT_ADJ_REQ_IN_PROGRESS == 0)
1291 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1294 if (!dp_is_ch_eq_done(lane_count, dpcd_lane_status) ||
1295 !dp_is_symbol_locked(lane_count, dpcd_lane_status) ||
1296 !dp_is_interlane_aligned(dpcd_lane_status_updated))
1299 for (lane = 0; lane < (uint32_t)(lane_count); lane++) {
1302 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET !=
1303 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE ||
1304 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET !=
1305 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE) {
1307 req_drv_setting_changed = true;
1312 if (req_drv_setting_changed) {
1313 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1314 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1316 dc_link_dp_set_drive_settings(link,
1324 if (!req_drv_setting_changed) {
1325 DC_LOG_WARNING("%s: Post Link Training Adjust Request Timed out\n",
1332 DC_LOG_WARNING("%s: Post Link Training Adjust Request limit reached\n",
1340 /* Only used for channel equalization */
1341 uint32_t dp_translate_training_aux_read_interval(uint32_t dpcd_aux_read_interval)
1343 unsigned int aux_rd_interval_us = 400;
1345 switch (dpcd_aux_read_interval) {
1347 aux_rd_interval_us = 4000;
1350 aux_rd_interval_us = 8000;
1353 aux_rd_interval_us = 12000;
1356 aux_rd_interval_us = 16000;
1358 #if defined(CONFIG_DRM_AMD_DC_DCN)
1360 aux_rd_interval_us = 32000;
1363 aux_rd_interval_us = 64000;
1370 return aux_rd_interval_us;
1373 enum link_training_result dp_get_cr_failure(enum dc_lane_count ln_count,
1374 union lane_status *dpcd_lane_status)
1376 enum link_training_result result = LINK_TRAINING_SUCCESS;
1378 if (ln_count >= LANE_COUNT_ONE && !dpcd_lane_status[0].bits.CR_DONE_0)
1379 result = LINK_TRAINING_CR_FAIL_LANE0;
1380 else if (ln_count >= LANE_COUNT_TWO && !dpcd_lane_status[1].bits.CR_DONE_0)
1381 result = LINK_TRAINING_CR_FAIL_LANE1;
1382 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[2].bits.CR_DONE_0)
1383 result = LINK_TRAINING_CR_FAIL_LANE23;
1384 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[3].bits.CR_DONE_0)
1385 result = LINK_TRAINING_CR_FAIL_LANE23;
1389 static enum link_training_result perform_channel_equalization_sequence(
1390 struct dc_link *link,
1391 struct link_training_settings *lt_settings,
1394 enum dc_dp_training_pattern tr_pattern;
1395 uint32_t retries_ch_eq;
1396 uint32_t wait_time_microsec;
1397 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1398 union lane_align_status_updated dpcd_lane_status_updated = {0};
1399 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
1400 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
1402 /* Note: also check that TPS4 is a supported feature*/
1403 tr_pattern = lt_settings->pattern_for_eq;
1405 #if defined(CONFIG_DRM_AMD_DC_DCN)
1406 if (is_repeater(link, offset) && dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING)
1407 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1409 if (is_repeater(link, offset))
1410 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1413 dp_set_hw_training_pattern(link, tr_pattern, offset);
1415 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
1418 dp_set_hw_lane_settings(link, lt_settings, offset);
1422 /* EPR #361076 - write as a 5-byte burst,
1423 * but only for the 1-st iteration
1426 dpcd_set_lt_pattern_and_lane_settings(
1429 tr_pattern, offset);
1431 dpcd_set_lane_settings(link, lt_settings, offset);
1433 /* 3. wait for receiver to lock-on*/
1434 wait_time_microsec = lt_settings->eq_pattern_time;
1436 if (is_repeater(link, offset))
1437 wait_time_microsec =
1438 dp_translate_training_aux_read_interval(
1439 link->dpcd_caps.lttpr_caps.aux_rd_interval[offset - 1]);
1441 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1442 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1443 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1444 wait_time_microsec = 16000;
1447 dp_wait_for_training_aux_rd_interval(
1449 wait_time_microsec);
1451 /* 4. Read lane status and requested
1452 * drive settings as set by the sink*/
1454 dp_get_lane_status_and_lane_adjust(
1458 &dpcd_lane_status_updated,
1462 /* 5. check CR done*/
1463 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1464 return LINK_TRAINING_EQ_FAIL_CR;
1466 /* 6. check CHEQ done*/
1467 if (dp_is_ch_eq_done(lane_count, dpcd_lane_status) &&
1468 dp_is_symbol_locked(lane_count, dpcd_lane_status) &&
1469 dp_is_interlane_aligned(dpcd_lane_status_updated))
1470 return LINK_TRAINING_SUCCESS;
1472 /* 7. update VS/PE/PC2 in lt_settings*/
1473 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1474 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1477 return LINK_TRAINING_EQ_FAIL_EQ;
1481 static void start_clock_recovery_pattern_early(struct dc_link *link,
1482 struct link_training_settings *lt_settings,
1485 DC_LOG_HW_LINK_TRAINING("%s\n GPU sends TPS1. Wait 400us.\n",
1487 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1488 dp_set_hw_lane_settings(link, lt_settings, offset);
1492 static enum link_training_result perform_clock_recovery_sequence(
1493 struct dc_link *link,
1494 struct link_training_settings *lt_settings,
1497 uint32_t retries_cr;
1498 uint32_t retry_count;
1499 uint32_t wait_time_microsec;
1500 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1501 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1502 union lane_align_status_updated dpcd_lane_status_updated;
1503 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1508 if (!link->ctx->dc->work_arounds.lt_early_cr_pattern)
1509 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1511 /* najeeb - The synaptics MST hub can put the LT in
1512 * infinite loop by switching the VS
1514 /* between level 0 and level 1 continuously, here
1515 * we try for CR lock for LinkTrainingMaxCRRetry count*/
1516 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
1517 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
1519 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
1520 memset(&dpcd_lane_status_updated, '\0',
1521 sizeof(dpcd_lane_status_updated));
1523 /* 1. call HWSS to set lane settings*/
1524 dp_set_hw_lane_settings(
1529 /* 2. update DPCD of the receiver*/
1531 /* EPR #361076 - write as a 5-byte burst,
1532 * but only for the 1-st iteration.*/
1533 dpcd_set_lt_pattern_and_lane_settings(
1536 lt_settings->pattern_for_cr,
1539 dpcd_set_lane_settings(
1544 /* 3. wait receiver to lock-on*/
1545 wait_time_microsec = lt_settings->cr_pattern_time;
1547 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1548 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN)) {
1549 wait_time_microsec = 16000;
1552 dp_wait_for_training_aux_rd_interval(
1554 wait_time_microsec);
1556 /* 4. Read lane status and requested drive
1557 * settings as set by the sink
1559 dp_get_lane_status_and_lane_adjust(
1563 &dpcd_lane_status_updated,
1567 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1568 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1569 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1570 vendor_specific_lttpr_wa_one_end(link, retry_count);
1571 vendor_specific_lttpr_wa_three(link, dpcd_lane_adjust);
1574 /* 5. check CR done*/
1575 if (dp_is_cr_done(lane_count, dpcd_lane_status))
1576 return LINK_TRAINING_SUCCESS;
1578 /* 6. max VS reached*/
1579 #if defined(CONFIG_DRM_AMD_DC_DCN)
1580 if ((dp_get_link_encoding_format(<_settings->link_settings) ==
1581 DP_8b_10b_ENCODING) &&
1582 dp_is_max_vs_reached(lt_settings))
1585 if (dp_is_max_vs_reached(lt_settings))
1589 /* 7. same lane settings*/
1590 /* Note: settings are the same for all lanes,
1591 * so comparing first lane is sufficient*/
1592 if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING) &&
1593 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET ==
1594 dpcd_lane_adjust[0].bits.VOLTAGE_SWING_LANE)
1596 #if defined(CONFIG_DRM_AMD_DC_DCN)
1597 else if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_128b_132b_ENCODING) &&
1598 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE ==
1599 dpcd_lane_adjust[0].tx_ffe.PRESET_VALUE)
1605 /* 8. update VS/PE/PC2 in lt_settings*/
1606 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1607 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1611 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
1613 DC_LOG_ERROR("%s: Link Training Error, could not get CR after %d tries. Possibly voltage swing issue",
1615 LINK_TRAINING_MAX_CR_RETRY);
1619 return dp_get_cr_failure(lane_count, dpcd_lane_status);
1622 static inline enum link_training_result dp_transition_to_video_idle(
1623 struct dc_link *link,
1624 struct link_training_settings *lt_settings,
1625 enum link_training_result status)
1627 union lane_count_set lane_count_set = {0};
1629 /* 4. mainlink output idle pattern*/
1630 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
1633 * 5. post training adjust if required
1634 * If the upstream DPTX and downstream DPRX both support TPS4,
1635 * TPS4 must be used instead of POST_LT_ADJ_REQ.
1637 if (link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED != 1 ||
1638 #if defined(CONFIG_DRM_AMD_DC_DCN)
1639 lt_settings->pattern_for_eq >= DP_TRAINING_PATTERN_SEQUENCE_4) {
1641 lt_settings->pattern_for_eq == DP_TRAINING_PATTERN_SEQUENCE_4) {
1643 /* delay 5ms after Main Link output idle pattern and then check
1646 if (link->connector_signal != SIGNAL_TYPE_EDP && status == LINK_TRAINING_SUCCESS) {
1648 status = dp_check_link_loss_status(link, lt_settings);
1653 if (status == LINK_TRAINING_SUCCESS &&
1654 perform_post_lt_adj_req_sequence(link, lt_settings) == false)
1655 status = LINK_TRAINING_LQA_FAIL;
1657 lane_count_set.bits.LANE_COUNT_SET = lt_settings->link_settings.lane_count;
1658 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
1659 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
1661 core_link_write_dpcd(
1664 &lane_count_set.raw,
1665 sizeof(lane_count_set));
1670 enum link_training_result dp_check_link_loss_status(
1671 struct dc_link *link,
1672 const struct link_training_settings *link_training_setting)
1674 enum link_training_result status = LINK_TRAINING_SUCCESS;
1675 union lane_status lane_status;
1676 uint8_t dpcd_buf[6] = {0};
1679 core_link_read_dpcd(
1682 (uint8_t *)(dpcd_buf),
1685 /*parse lane status*/
1686 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
1688 * check lanes status
1690 lane_status.raw = get_nibble_at_index(&dpcd_buf[2], lane);
1692 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
1693 !lane_status.bits.CR_DONE_0 ||
1694 !lane_status.bits.SYMBOL_LOCKED_0) {
1695 /* if one of the channel equalization, clock
1696 * recovery or symbol lock is dropped
1697 * consider it as (link has been
1698 * dropped) dp sink status has changed
1700 status = LINK_TRAINING_LINK_LOSS;
1708 static inline void decide_8b_10b_training_settings(
1709 struct dc_link *link,
1710 const struct dc_link_settings *link_setting,
1711 struct link_training_settings *lt_settings)
1713 memset(lt_settings, '\0', sizeof(struct link_training_settings));
1715 /* Initialize link settings */
1716 lt_settings->link_settings.use_link_rate_set = link_setting->use_link_rate_set;
1717 lt_settings->link_settings.link_rate_set = link_setting->link_rate_set;
1718 lt_settings->link_settings.link_rate = link_setting->link_rate;
1719 lt_settings->link_settings.lane_count = link_setting->lane_count;
1720 /* TODO hard coded to SS for now
1721 * lt_settings.link_settings.link_spread =
1722 * dal_display_path_is_ss_supported(
1723 * path_mode->display_path) ?
1724 * LINK_SPREAD_05_DOWNSPREAD_30KHZ :
1725 * LINK_SPREAD_DISABLED;
1727 lt_settings->link_settings.link_spread = link->dp_ss_off ?
1728 LINK_SPREAD_DISABLED : LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1729 lt_settings->lttpr_mode = link->lttpr_mode;
1730 lt_settings->cr_pattern_time = get_cr_training_aux_rd_interval(link, link_setting);
1731 lt_settings->eq_pattern_time = get_eq_training_aux_rd_interval(link, link_setting);
1732 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_setting);
1733 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_setting);
1734 lt_settings->enhanced_framing = 1;
1735 lt_settings->should_set_fec_ready = true;
1736 lt_settings->disallow_per_lane_settings = true;
1737 lt_settings->always_match_dpcd_with_hw_lane_settings = true;
1738 dp_hw_to_dpcd_lane_settings(lt_settings, lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1741 #if defined(CONFIG_DRM_AMD_DC_DCN)
1742 static inline void decide_128b_132b_training_settings(struct dc_link *link,
1743 const struct dc_link_settings *link_settings,
1744 struct link_training_settings *lt_settings)
1746 memset(lt_settings, 0, sizeof(*lt_settings));
1748 lt_settings->link_settings = *link_settings;
1749 /* TODO: should decide link spread when populating link_settings */
1750 lt_settings->link_settings.link_spread = link->dp_ss_off ? LINK_SPREAD_DISABLED :
1751 LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1753 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_settings);
1754 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_settings);
1755 lt_settings->eq_pattern_time = 2500;
1756 lt_settings->eq_wait_time_limit = 400000;
1757 lt_settings->eq_loop_count_limit = 20;
1758 lt_settings->pattern_for_cds = DP_128b_132b_TPS2_CDS;
1759 lt_settings->cds_pattern_time = 2500;
1760 lt_settings->cds_wait_time_limit = (dp_convert_to_count(
1761 link->dpcd_caps.lttpr_caps.phy_repeater_cnt) + 1) * 20000;
1762 lt_settings->lttpr_mode = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) ?
1763 LTTPR_MODE_NON_TRANSPARENT : LTTPR_MODE_TRANSPARENT;
1764 lt_settings->disallow_per_lane_settings = true;
1765 dp_hw_to_dpcd_lane_settings(lt_settings,
1766 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1770 void dp_decide_training_settings(
1771 struct dc_link *link,
1772 const struct dc_link_settings *link_settings,
1773 struct link_training_settings *lt_settings)
1775 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING)
1776 decide_8b_10b_training_settings(link, link_settings, lt_settings);
1777 #if defined(CONFIG_DRM_AMD_DC_DCN)
1778 else if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING)
1779 decide_128b_132b_training_settings(link, link_settings, lt_settings);
1783 static void override_training_settings(
1784 struct dc_link *link,
1785 const struct dc_link_training_overrides *overrides,
1786 struct link_training_settings *lt_settings)
1790 /* Override link spread */
1791 if (!link->dp_ss_off && overrides->downspread != NULL)
1792 lt_settings->link_settings.link_spread = *overrides->downspread ?
1793 LINK_SPREAD_05_DOWNSPREAD_30KHZ
1794 : LINK_SPREAD_DISABLED;
1796 /* Override lane settings */
1797 if (overrides->voltage_swing != NULL)
1798 lt_settings->voltage_swing = overrides->voltage_swing;
1799 if (overrides->pre_emphasis != NULL)
1800 lt_settings->pre_emphasis = overrides->pre_emphasis;
1801 if (overrides->post_cursor2 != NULL)
1802 lt_settings->post_cursor2 = overrides->post_cursor2;
1803 #if defined(CONFIG_DRM_AMD_DC_DCN)
1804 if (overrides->ffe_preset != NULL)
1805 lt_settings->ffe_preset = overrides->ffe_preset;
1807 /* Override HW lane settings with BIOS forced values if present */
1808 if (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN &&
1809 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1810 lt_settings->voltage_swing = &link->bios_forced_drive_settings.VOLTAGE_SWING;
1811 lt_settings->pre_emphasis = &link->bios_forced_drive_settings.PRE_EMPHASIS;
1812 lt_settings->always_match_dpcd_with_hw_lane_settings = false;
1814 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1815 lt_settings->lane_settings[lane].VOLTAGE_SWING =
1816 lt_settings->voltage_swing != NULL ?
1817 *lt_settings->voltage_swing :
1818 VOLTAGE_SWING_LEVEL0;
1819 lt_settings->lane_settings[lane].PRE_EMPHASIS =
1820 lt_settings->pre_emphasis != NULL ?
1821 *lt_settings->pre_emphasis
1822 : PRE_EMPHASIS_DISABLED;
1823 lt_settings->lane_settings[lane].POST_CURSOR2 =
1824 lt_settings->post_cursor2 != NULL ?
1825 *lt_settings->post_cursor2
1826 : POST_CURSOR2_DISABLED;
1829 dp_hw_to_dpcd_lane_settings(lt_settings,
1830 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1832 /* Initialize training timings */
1833 if (overrides->cr_pattern_time != NULL)
1834 lt_settings->cr_pattern_time = *overrides->cr_pattern_time;
1836 if (overrides->eq_pattern_time != NULL)
1837 lt_settings->eq_pattern_time = *overrides->eq_pattern_time;
1839 if (overrides->pattern_for_cr != NULL)
1840 lt_settings->pattern_for_cr = *overrides->pattern_for_cr;
1841 if (overrides->pattern_for_eq != NULL)
1842 lt_settings->pattern_for_eq = *overrides->pattern_for_eq;
1844 if (overrides->enhanced_framing != NULL)
1845 lt_settings->enhanced_framing = *overrides->enhanced_framing;
1847 if (link->preferred_training_settings.fec_enable != NULL)
1848 lt_settings->should_set_fec_ready = *link->preferred_training_settings.fec_enable;
1851 uint8_t dp_convert_to_count(uint8_t lttpr_repeater_count)
1853 switch (lttpr_repeater_count) {
1854 case 0x80: // 1 lttpr repeater
1856 case 0x40: // 2 lttpr repeaters
1858 case 0x20: // 3 lttpr repeaters
1860 case 0x10: // 4 lttpr repeaters
1862 case 0x08: // 5 lttpr repeaters
1864 case 0x04: // 6 lttpr repeaters
1866 case 0x02: // 7 lttpr repeaters
1868 case 0x01: // 8 lttpr repeaters
1873 return 0; // invalid value
1876 static enum dc_status configure_lttpr_mode_transparent(struct dc_link *link)
1878 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1880 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1881 return core_link_write_dpcd(link,
1882 DP_PHY_REPEATER_MODE,
1883 (uint8_t *)&repeater_mode,
1884 sizeof(repeater_mode));
1887 static enum dc_status configure_lttpr_mode_non_transparent(
1888 struct dc_link *link,
1889 const struct link_training_settings *lt_settings)
1891 /* aux timeout is already set to extended */
1892 /* RESET/SET lttpr mode to enable non transparent mode */
1893 uint8_t repeater_cnt;
1894 uint32_t aux_interval_address;
1895 uint8_t repeater_id;
1896 enum dc_status result = DC_ERROR_UNEXPECTED;
1897 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1899 enum dp_link_encoding encoding = dp_get_link_encoding_format(<_settings->link_settings);
1901 if (encoding == DP_8b_10b_ENCODING) {
1902 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1903 result = core_link_write_dpcd(link,
1904 DP_PHY_REPEATER_MODE,
1905 (uint8_t *)&repeater_mode,
1906 sizeof(repeater_mode));
1910 if (result == DC_OK) {
1911 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1914 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
1916 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Non Transparent Mode\n", __func__);
1918 repeater_mode = DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
1919 result = core_link_write_dpcd(link,
1920 DP_PHY_REPEATER_MODE,
1921 (uint8_t *)&repeater_mode,
1922 sizeof(repeater_mode));
1924 if (result == DC_OK) {
1925 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1928 if (encoding == DP_8b_10b_ENCODING) {
1929 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
1931 /* Driver does not need to train the first hop. Skip DPCD read and clear
1932 * AUX_RD_INTERVAL for DPTX-to-DPIA hop.
1934 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA)
1935 link->dpcd_caps.lttpr_caps.aux_rd_interval[--repeater_cnt] = 0;
1937 for (repeater_id = repeater_cnt; repeater_id > 0; repeater_id--) {
1938 aux_interval_address = DP_TRAINING_AUX_RD_INTERVAL_PHY_REPEATER1 +
1939 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (repeater_id - 1));
1940 core_link_read_dpcd(
1942 aux_interval_address,
1943 (uint8_t *)&link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1],
1944 sizeof(link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1]));
1945 link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1] &= 0x7F;
1953 static void repeater_training_done(struct dc_link *link, uint32_t offset)
1955 union dpcd_training_pattern dpcd_pattern = {0};
1957 const uint32_t dpcd_base_lt_offset =
1958 DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
1959 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1960 /* Set training not in progress*/
1961 dpcd_pattern.v1_4.TRAINING_PATTERN_SET = DPCD_TRAINING_PATTERN_VIDEOIDLE;
1963 core_link_write_dpcd(
1965 dpcd_base_lt_offset,
1969 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Id: %d 0x%X pattern = %x\n",
1972 dpcd_base_lt_offset,
1973 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
1976 static void print_status_message(
1977 struct dc_link *link,
1978 const struct link_training_settings *lt_settings,
1979 enum link_training_result status)
1981 char *link_rate = "Unknown";
1982 char *lt_result = "Unknown";
1983 char *lt_spread = "Disabled";
1985 switch (lt_settings->link_settings.link_rate) {
1989 case LINK_RATE_RATE_2:
1992 case LINK_RATE_RATE_3:
1995 case LINK_RATE_HIGH:
1998 case LINK_RATE_RBR2:
2001 case LINK_RATE_RATE_6:
2004 case LINK_RATE_HIGH2:
2007 case LINK_RATE_HIGH3:
2010 #if defined(CONFIG_DRM_AMD_DC_DCN)
2011 case LINK_RATE_UHBR10:
2012 link_rate = "UHBR10";
2014 case LINK_RATE_UHBR13_5:
2015 link_rate = "UHBR13.5";
2017 case LINK_RATE_UHBR20:
2018 link_rate = "UHBR20";
2026 case LINK_TRAINING_SUCCESS:
2029 case LINK_TRAINING_CR_FAIL_LANE0:
2030 lt_result = "CR failed lane0";
2032 case LINK_TRAINING_CR_FAIL_LANE1:
2033 lt_result = "CR failed lane1";
2035 case LINK_TRAINING_CR_FAIL_LANE23:
2036 lt_result = "CR failed lane23";
2038 case LINK_TRAINING_EQ_FAIL_CR:
2039 lt_result = "CR failed in EQ";
2041 case LINK_TRAINING_EQ_FAIL_EQ:
2042 lt_result = "EQ failed";
2044 case LINK_TRAINING_LQA_FAIL:
2045 lt_result = "LQA failed";
2047 case LINK_TRAINING_LINK_LOSS:
2048 lt_result = "Link loss";
2050 #if defined(CONFIG_DRM_AMD_DC_DCN)
2051 case DP_128b_132b_LT_FAILED:
2052 lt_result = "LT_FAILED received";
2054 case DP_128b_132b_MAX_LOOP_COUNT_REACHED:
2055 lt_result = "max loop count reached";
2057 case DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT:
2058 lt_result = "channel EQ timeout";
2060 case DP_128b_132b_CDS_DONE_TIMEOUT:
2061 lt_result = "CDS timeout";
2068 switch (lt_settings->link_settings.link_spread) {
2069 case LINK_SPREAD_DISABLED:
2070 lt_spread = "Disabled";
2072 case LINK_SPREAD_05_DOWNSPREAD_30KHZ:
2073 lt_spread = "0.5% 30KHz";
2075 case LINK_SPREAD_05_DOWNSPREAD_33KHZ:
2076 lt_spread = "0.5% 33KHz";
2082 /* Connectivity log: link training */
2083 #if defined(CONFIG_DRM_AMD_DC_DCN)
2084 /* TODO - DP2.0 Log: add connectivity log for FFE PRESET */
2086 CONN_MSG_LT(link, "%sx%d %s VS=%d, PE=%d, DS=%s",
2088 lt_settings->link_settings.lane_count,
2090 lt_settings->lane_settings[0].VOLTAGE_SWING,
2091 lt_settings->lane_settings[0].PRE_EMPHASIS,
2095 void dc_link_dp_set_drive_settings(
2096 struct dc_link *link,
2097 struct link_training_settings *lt_settings)
2099 /* program ASIC PHY settings*/
2100 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2102 dp_hw_to_dpcd_lane_settings(lt_settings,
2103 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2105 /* Notify DP sink the PHY settings from source */
2106 dpcd_set_lane_settings(link, lt_settings, DPRX);
2109 bool dc_link_dp_perform_link_training_skip_aux(
2110 struct dc_link *link,
2111 const struct dc_link_settings *link_setting)
2113 struct link_training_settings lt_settings = {0};
2115 dp_decide_training_settings(
2119 override_training_settings(
2121 &link->preferred_training_settings,
2124 /* 1. Perform_clock_recovery_sequence. */
2126 /* transmit training pattern for clock recovery */
2127 dp_set_hw_training_pattern(link, lt_settings.pattern_for_cr, DPRX);
2129 /* call HWSS to set lane settings*/
2130 dp_set_hw_lane_settings(link, <_settings, DPRX);
2132 /* wait receiver to lock-on*/
2133 dp_wait_for_training_aux_rd_interval(link, lt_settings.cr_pattern_time);
2135 /* 2. Perform_channel_equalization_sequence. */
2137 /* transmit training pattern for channel equalization. */
2138 dp_set_hw_training_pattern(link, lt_settings.pattern_for_eq, DPRX);
2140 /* call HWSS to set lane settings*/
2141 dp_set_hw_lane_settings(link, <_settings, DPRX);
2143 /* wait receiver to lock-on. */
2144 dp_wait_for_training_aux_rd_interval(link, lt_settings.eq_pattern_time);
2146 /* 3. Perform_link_training_int. */
2148 /* Mainlink output idle pattern. */
2149 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
2151 print_status_message(link, <_settings, LINK_TRAINING_SUCCESS);
2156 enum dc_status dpcd_configure_lttpr_mode(struct dc_link *link, struct link_training_settings *lt_settings)
2158 enum dc_status status = DC_OK;
2160 if (lt_settings->lttpr_mode == LTTPR_MODE_TRANSPARENT)
2161 status = configure_lttpr_mode_transparent(link);
2163 else if (lt_settings->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
2164 status = configure_lttpr_mode_non_transparent(link, lt_settings);
2169 static void dpcd_exit_training_mode(struct dc_link *link)
2171 #if defined(CONFIG_DRM_AMD_DC_DCN)
2172 uint8_t sink_status = 0;
2176 /* clear training pattern set */
2177 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
2179 #if defined(CONFIG_DRM_AMD_DC_DCN)
2180 /* poll for intra-hop disable */
2181 for (i = 0; i < 10; i++) {
2182 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
2183 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
2190 enum dc_status dpcd_configure_channel_coding(struct dc_link *link,
2191 struct link_training_settings *lt_settings)
2193 enum dp_link_encoding encoding =
2194 dp_get_link_encoding_format(
2195 <_settings->link_settings);
2196 enum dc_status status;
2198 status = core_link_write_dpcd(
2200 DP_MAIN_LINK_CHANNEL_CODING_SET,
2201 (uint8_t *) &encoding,
2203 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X MAIN_LINK_CHANNEL_CODING_SET = %x\n",
2205 DP_MAIN_LINK_CHANNEL_CODING_SET,
2211 #if defined(CONFIG_DRM_AMD_DC_DCN)
2212 static void dpcd_128b_132b_get_aux_rd_interval(struct dc_link *link,
2213 uint32_t *interval_in_us)
2215 union dp_128b_132b_training_aux_rd_interval dpcd_interval;
2216 uint32_t interval_unit = 0;
2218 dpcd_interval.raw = 0;
2219 core_link_read_dpcd(link, DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
2220 &dpcd_interval.raw, sizeof(dpcd_interval.raw));
2221 interval_unit = dpcd_interval.bits.UNIT ? 1 : 2; /* 0b = 2 ms, 1b = 1 ms */
2222 /* (128b/132b_TRAINING_AUX_RD_INTERVAL value + 1) *
2223 * INTERVAL_UNIT. The maximum is 256 ms
2225 *interval_in_us = (dpcd_interval.bits.VALUE + 1) * interval_unit * 1000;
2228 static enum link_training_result dp_perform_128b_132b_channel_eq_done_sequence(
2229 struct dc_link *link,
2230 struct link_training_settings *lt_settings)
2233 uint32_t aux_rd_interval = 0;
2234 uint32_t wait_time = 0;
2235 union lane_align_status_updated dpcd_lane_status_updated = {0};
2236 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2237 enum link_training_result status = LINK_TRAINING_SUCCESS;
2238 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2240 /* Transmit 128b/132b_TPS1 over Main-Link */
2241 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, DPRX);
2242 /* Set TRAINING_PATTERN_SET to 01h */
2243 dpcd_set_training_pattern(link, lt_settings->pattern_for_cr);
2245 /* Adjust TX_FFE_PRESET_VALUE and Transmit 128b/132b_TPS2 over Main-Link */
2246 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2247 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2248 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2249 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2250 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2251 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2252 dp_set_hw_training_pattern(link, lt_settings->pattern_for_eq, DPRX);
2254 /* Set loop counter to start from 1 */
2257 /* Set TRAINING_PATTERN_SET to 02h and TX_FFE_PRESET_VALUE in one AUX transaction */
2258 dpcd_set_lt_pattern_and_lane_settings(link, lt_settings,
2259 lt_settings->pattern_for_eq, DPRX);
2261 /* poll for channel EQ done */
2262 while (status == LINK_TRAINING_SUCCESS) {
2263 dp_wait_for_training_aux_rd_interval(link, aux_rd_interval);
2264 wait_time += aux_rd_interval;
2265 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2266 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2267 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2268 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2269 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2270 if (dp_is_ch_eq_done(lt_settings->link_settings.lane_count,
2271 dpcd_lane_status)) {
2274 } else if (loop_count >= lt_settings->eq_loop_count_limit) {
2275 status = DP_128b_132b_MAX_LOOP_COUNT_REACHED;
2276 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2277 status = DP_128b_132b_LT_FAILED;
2279 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2280 dpcd_set_lane_settings(link, lt_settings, DPRX);
2285 /* poll for EQ interlane align done */
2286 while (status == LINK_TRAINING_SUCCESS) {
2287 if (dpcd_lane_status_updated.bits.EQ_INTERLANE_ALIGN_DONE_128b_132b) {
2290 } else if (wait_time >= lt_settings->eq_wait_time_limit) {
2291 status = DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT;
2292 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2293 status = DP_128b_132b_LT_FAILED;
2295 dp_wait_for_training_aux_rd_interval(link,
2296 lt_settings->eq_pattern_time);
2297 wait_time += lt_settings->eq_pattern_time;
2298 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2299 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2306 static enum link_training_result dp_perform_128b_132b_cds_done_sequence(
2307 struct dc_link *link,
2308 struct link_training_settings *lt_settings)
2310 /* Assumption: assume hardware has transmitted eq pattern */
2311 enum link_training_result status = LINK_TRAINING_SUCCESS;
2312 union lane_align_status_updated dpcd_lane_status_updated = {0};
2313 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2314 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
2315 uint32_t wait_time = 0;
2317 /* initiate CDS done sequence */
2318 dpcd_set_training_pattern(link, lt_settings->pattern_for_cds);
2320 /* poll for CDS interlane align done and symbol lock */
2321 while (status == LINK_TRAINING_SUCCESS) {
2322 dp_wait_for_training_aux_rd_interval(link,
2323 lt_settings->cds_pattern_time);
2324 wait_time += lt_settings->cds_pattern_time;
2325 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2326 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2327 if (dp_is_symbol_locked(lt_settings->link_settings.lane_count, dpcd_lane_status) &&
2328 dpcd_lane_status_updated.bits.CDS_INTERLANE_ALIGN_DONE_128b_132b) {
2331 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2332 status = DP_128b_132b_LT_FAILED;
2333 } else if (wait_time >= lt_settings->cds_wait_time_limit) {
2334 status = DP_128b_132b_CDS_DONE_TIMEOUT;
2342 static enum link_training_result dp_perform_8b_10b_link_training(
2343 struct dc_link *link,
2344 struct link_training_settings *lt_settings)
2346 enum link_training_result status = LINK_TRAINING_SUCCESS;
2348 uint8_t repeater_cnt;
2349 uint8_t repeater_id;
2352 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2353 start_clock_recovery_pattern_early(link, lt_settings, DPRX);
2355 /* 1. set link rate, lane count and spread. */
2356 dpcd_set_link_settings(link, lt_settings);
2358 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2360 /* 2. perform link training (set link training done
2361 * to false is done as well)
2363 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
2365 for (repeater_id = repeater_cnt; (repeater_id > 0 && status == LINK_TRAINING_SUCCESS);
2367 status = perform_clock_recovery_sequence(link, lt_settings, repeater_id);
2369 if (status != LINK_TRAINING_SUCCESS)
2372 status = perform_channel_equalization_sequence(link,
2376 if (status != LINK_TRAINING_SUCCESS)
2379 repeater_training_done(link, repeater_id);
2382 for (lane = 0; lane < (uint8_t)lt_settings->link_settings.lane_count; lane++)
2383 lt_settings->dpcd_lane_settings[lane].raw = 0;
2386 if (status == LINK_TRAINING_SUCCESS) {
2387 status = perform_clock_recovery_sequence(link, lt_settings, DPRX);
2388 if (status == LINK_TRAINING_SUCCESS) {
2389 status = perform_channel_equalization_sequence(link,
2398 #if defined(CONFIG_DRM_AMD_DC_DCN)
2399 static enum link_training_result dp_perform_128b_132b_link_training(
2400 struct dc_link *link,
2401 struct link_training_settings *lt_settings)
2403 enum link_training_result result = LINK_TRAINING_SUCCESS;
2405 /* TODO - DP2.0 Link: remove legacy_dp2_lt logic */
2406 if (link->dc->debug.legacy_dp2_lt) {
2407 struct link_training_settings legacy_settings;
2409 decide_8b_10b_training_settings(link,
2410 <_settings->link_settings,
2412 return dp_perform_8b_10b_link_training(link, &legacy_settings);
2415 dpcd_set_link_settings(link, lt_settings);
2417 if (result == LINK_TRAINING_SUCCESS)
2418 result = dp_perform_128b_132b_channel_eq_done_sequence(link, lt_settings);
2420 if (result == LINK_TRAINING_SUCCESS)
2421 result = dp_perform_128b_132b_cds_done_sequence(link, lt_settings);
2427 static enum link_training_result dc_link_dp_perform_fixed_vs_pe_training_sequence(
2428 struct dc_link *link,
2429 struct link_training_settings *lt_settings)
2431 const uint8_t vendor_lttpr_write_data_reset[4] = {0x1, 0x50, 0x63, 0xFF};
2432 const uint8_t offset = dp_convert_to_count(
2433 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
2434 const uint8_t vendor_lttpr_write_data_intercept_en[4] = {0x1, 0x55, 0x63, 0x0};
2435 const uint8_t vendor_lttpr_write_data_intercept_dis[4] = {0x1, 0x55, 0x63, 0x68};
2436 uint8_t vendor_lttpr_write_data_vs[4] = {0x1, 0x51, 0x63, 0x0};
2437 uint8_t vendor_lttpr_write_data_pe[4] = {0x1, 0x52, 0x63, 0x0};
2438 uint32_t vendor_lttpr_write_address = 0xF004F;
2439 enum link_training_result status = LINK_TRAINING_SUCCESS;
2441 union down_spread_ctrl downspread = {0};
2442 union lane_count_set lane_count_set = {0};
2443 uint8_t toggle_rate;
2446 /* Only 8b/10b is supported */
2447 ASSERT(dp_get_link_encoding_format(<_settings->link_settings) ==
2448 DP_8b_10b_ENCODING);
2450 if (offset != 0xFF) {
2451 vendor_lttpr_write_address +=
2452 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
2455 /* Vendor specific: Reset lane settings */
2456 core_link_write_dpcd(
2458 vendor_lttpr_write_address,
2459 &vendor_lttpr_write_data_reset[0],
2460 sizeof(vendor_lttpr_write_data_reset));
2461 core_link_write_dpcd(
2463 vendor_lttpr_write_address,
2464 &vendor_lttpr_write_data_vs[0],
2465 sizeof(vendor_lttpr_write_data_vs));
2466 core_link_write_dpcd(
2468 vendor_lttpr_write_address,
2469 &vendor_lttpr_write_data_pe[0],
2470 sizeof(vendor_lttpr_write_data_pe));
2472 /* Vendor specific: Enable intercept */
2473 core_link_write_dpcd(
2475 vendor_lttpr_write_address,
2476 &vendor_lttpr_write_data_intercept_en[0],
2477 sizeof(vendor_lttpr_write_data_intercept_en));
2479 /* 1. set link rate, lane count and spread. */
2481 downspread.raw = (uint8_t)(lt_settings->link_settings.link_spread);
2483 lane_count_set.bits.LANE_COUNT_SET =
2484 lt_settings->link_settings.lane_count;
2486 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
2487 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
2490 if (lt_settings->pattern_for_eq < DP_TRAINING_PATTERN_SEQUENCE_4) {
2491 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
2492 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
2495 core_link_write_dpcd(link, DP_DOWNSPREAD_CTRL,
2496 &downspread.raw, sizeof(downspread));
2498 core_link_write_dpcd(link, DP_LANE_COUNT_SET,
2499 &lane_count_set.raw, 1);
2501 #if defined(CONFIG_DRM_AMD_DC_DCN)
2502 rate = get_dpcd_link_rate(<_settings->link_settings);
2504 rate = (uint8_t) (lt_settings->link_settings.link_rate);
2507 /* Vendor specific: Toggle link rate */
2508 toggle_rate = (rate == 0x6) ? 0xA : 0x6;
2510 if (link->vendor_specific_lttpr_link_rate_wa == rate) {
2511 core_link_write_dpcd(
2518 link->vendor_specific_lttpr_link_rate_wa = rate;
2520 core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
2522 DC_LOG_HW_LINK_TRAINING("%s\n %x rate = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
2525 lt_settings->link_settings.link_rate,
2527 lt_settings->link_settings.lane_count,
2528 lt_settings->enhanced_framing,
2530 lt_settings->link_settings.link_spread);
2532 /* 2. Perform link training */
2534 /* Perform Clock Recovery Sequence */
2535 if (status == LINK_TRAINING_SUCCESS) {
2536 uint32_t retries_cr;
2537 uint32_t retry_count;
2538 uint32_t wait_time_microsec;
2539 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
2540 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
2541 union lane_align_status_updated dpcd_lane_status_updated;
2542 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2547 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
2548 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
2550 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
2551 memset(&dpcd_lane_status_updated, '\0',
2552 sizeof(dpcd_lane_status_updated));
2554 /* 1. call HWSS to set lane settings */
2555 dp_set_hw_lane_settings(
2560 /* 2. update DPCD of the receiver */
2562 /* EPR #361076 - write as a 5-byte burst,
2563 * but only for the 1-st iteration.
2565 dpcd_set_lt_pattern_and_lane_settings(
2568 lt_settings->pattern_for_cr,
2570 /* Vendor specific: Disable intercept */
2571 core_link_write_dpcd(
2573 vendor_lttpr_write_address,
2574 &vendor_lttpr_write_data_intercept_dis[0],
2575 sizeof(vendor_lttpr_write_data_intercept_dis));
2577 vendor_lttpr_write_data_vs[3] = 0;
2578 vendor_lttpr_write_data_pe[3] = 0;
2580 for (lane = 0; lane < lane_count; lane++) {
2581 vendor_lttpr_write_data_vs[3] |=
2582 lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET << (2 * lane);
2583 vendor_lttpr_write_data_pe[3] |=
2584 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET << (2 * lane);
2587 /* Vendor specific: Update VS and PE to DPRX requested value */
2588 core_link_write_dpcd(
2590 vendor_lttpr_write_address,
2591 &vendor_lttpr_write_data_vs[0],
2592 sizeof(vendor_lttpr_write_data_vs));
2593 core_link_write_dpcd(
2595 vendor_lttpr_write_address,
2596 &vendor_lttpr_write_data_pe[0],
2597 sizeof(vendor_lttpr_write_data_pe));
2599 dpcd_set_lane_settings(
2605 /* 3. wait receiver to lock-on*/
2606 wait_time_microsec = lt_settings->cr_pattern_time;
2608 dp_wait_for_training_aux_rd_interval(
2610 wait_time_microsec);
2612 /* 4. Read lane status and requested drive
2613 * settings as set by the sink
2615 dp_get_lane_status_and_lane_adjust(
2619 &dpcd_lane_status_updated,
2623 /* 5. check CR done*/
2624 if (dp_is_cr_done(lane_count, dpcd_lane_status)) {
2625 status = LINK_TRAINING_SUCCESS;
2629 /* 6. max VS reached*/
2630 if (dp_is_max_vs_reached(lt_settings))
2633 /* 7. same lane settings */
2634 /* Note: settings are the same for all lanes,
2635 * so comparing first lane is sufficient
2637 if (lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET ==
2638 dpcd_lane_adjust[0].bits.VOLTAGE_SWING_LANE)
2643 /* 8. update VS/PE/PC2 in lt_settings*/
2644 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2645 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2649 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
2651 DC_LOG_ERROR("%s: Link Training Error, could not get CR after %d tries. Possibly voltage swing issue",
2653 LINK_TRAINING_MAX_CR_RETRY);
2657 status = dp_get_cr_failure(lane_count, dpcd_lane_status);
2660 /* Perform Channel EQ Sequence */
2661 if (status == LINK_TRAINING_SUCCESS) {
2662 enum dc_dp_training_pattern tr_pattern;
2663 uint32_t retries_ch_eq;
2664 uint32_t wait_time_microsec;
2665 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
2666 union lane_align_status_updated dpcd_lane_status_updated = {0};
2667 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2668 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2670 /* Note: also check that TPS4 is a supported feature*/
2671 tr_pattern = lt_settings->pattern_for_eq;
2673 dp_set_hw_training_pattern(link, tr_pattern, 0);
2675 status = LINK_TRAINING_EQ_FAIL_EQ;
2677 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
2680 dp_set_hw_lane_settings(link, lt_settings, 0);
2682 vendor_lttpr_write_data_vs[3] = 0;
2683 vendor_lttpr_write_data_pe[3] = 0;
2685 for (lane = 0; lane < lane_count; lane++) {
2686 vendor_lttpr_write_data_vs[3] |=
2687 lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET << (2 * lane);
2688 vendor_lttpr_write_data_pe[3] |=
2689 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET << (2 * lane);
2692 /* Vendor specific: Update VS and PE to DPRX requested value */
2693 core_link_write_dpcd(
2695 vendor_lttpr_write_address,
2696 &vendor_lttpr_write_data_vs[0],
2697 sizeof(vendor_lttpr_write_data_vs));
2698 core_link_write_dpcd(
2700 vendor_lttpr_write_address,
2701 &vendor_lttpr_write_data_pe[0],
2702 sizeof(vendor_lttpr_write_data_pe));
2706 /* EPR #361076 - write as a 5-byte burst,
2707 * but only for the 1-st iteration
2710 dpcd_set_lt_pattern_and_lane_settings(
2715 dpcd_set_lane_settings(link, lt_settings, 0);
2717 /* 3. wait for receiver to lock-on*/
2718 wait_time_microsec = lt_settings->eq_pattern_time;
2720 dp_wait_for_training_aux_rd_interval(
2722 wait_time_microsec);
2724 /* 4. Read lane status and requested
2725 * drive settings as set by the sink
2727 dp_get_lane_status_and_lane_adjust(
2731 &dpcd_lane_status_updated,
2735 /* 5. check CR done*/
2736 if (!dp_is_cr_done(lane_count, dpcd_lane_status)) {
2737 status = LINK_TRAINING_EQ_FAIL_CR;
2741 /* 6. check CHEQ done*/
2742 if (dp_is_ch_eq_done(lane_count, dpcd_lane_status) &&
2743 dp_is_symbol_locked(lane_count, dpcd_lane_status) &&
2744 dp_is_interlane_aligned(dpcd_lane_status_updated)) {
2745 status = LINK_TRAINING_SUCCESS;
2749 /* 7. update VS/PE/PC2 in lt_settings*/
2750 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2751 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2759 enum link_training_result dc_link_dp_perform_link_training(
2760 struct dc_link *link,
2761 const struct dc_link_settings *link_settings,
2762 bool skip_video_pattern)
2764 enum link_training_result status = LINK_TRAINING_SUCCESS;
2765 struct link_training_settings lt_settings = {0};
2766 enum dp_link_encoding encoding =
2767 dp_get_link_encoding_format(link_settings);
2769 /* decide training settings */
2770 dp_decide_training_settings(
2774 override_training_settings(
2776 &link->preferred_training_settings,
2779 /* reset previous training states */
2780 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2781 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2782 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2783 link->apply_vendor_specific_lttpr_link_rate_wa = true;
2784 vendor_specific_lttpr_wa_four(link, true);
2786 dpcd_exit_training_mode(link);
2789 /* configure link prior to entering training mode */
2790 dpcd_configure_lttpr_mode(link, <_settings);
2791 dp_set_fec_ready(link, lt_settings.should_set_fec_ready);
2792 dpcd_configure_channel_coding(link, <_settings);
2794 /* enter training mode:
2795 * Per DP specs starting from here, DPTX device shall not issue
2796 * Non-LT AUX transactions inside training mode.
2798 if (!link->dc->debug.apply_vendor_specific_lttpr_wa &&
2799 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2800 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
2801 status = dc_link_dp_perform_fixed_vs_pe_training_sequence(link, <_settings);
2802 else if (encoding == DP_8b_10b_ENCODING)
2803 status = dp_perform_8b_10b_link_training(link, <_settings);
2804 #if defined(CONFIG_DRM_AMD_DC_DCN)
2805 else if (encoding == DP_128b_132b_ENCODING)
2806 status = dp_perform_128b_132b_link_training(link, <_settings);
2811 /* exit training mode */
2812 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2813 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2814 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2815 link->apply_vendor_specific_lttpr_link_rate_wa = false;
2816 vendor_specific_lttpr_wa_four(link, (status != LINK_TRAINING_SUCCESS));
2818 dpcd_exit_training_mode(link);
2821 /* switch to video idle */
2822 if ((status == LINK_TRAINING_SUCCESS) || !skip_video_pattern)
2823 status = dp_transition_to_video_idle(link,
2827 /* dump debug data */
2828 print_status_message(link, <_settings, status);
2829 if (status != LINK_TRAINING_SUCCESS)
2830 link->ctx->dc->debug_data.ltFailCount++;
2834 bool perform_link_training_with_retries(
2835 const struct dc_link_settings *link_setting,
2836 bool skip_video_pattern,
2838 struct pipe_ctx *pipe_ctx,
2839 enum signal_type signal,
2843 uint8_t delay_between_attempts = LINK_TRAINING_RETRY_DELAY;
2844 struct dc_stream_state *stream = pipe_ctx->stream;
2845 struct dc_link *link = stream->link;
2846 enum dp_panel_mode panel_mode = dp_get_panel_mode(link);
2847 struct link_encoder *link_enc;
2848 enum link_training_result status = LINK_TRAINING_CR_FAIL_LANE0;
2849 struct dc_link_settings current_setting = *link_setting;
2851 /* Dynamically assigned link encoders associated with stream rather than
2854 if (link->is_dig_mapping_flexible && link->dc->res_pool->funcs->link_encs_assign)
2855 link_enc = link_enc_cfg_get_link_enc_used_by_stream(link->ctx->dc, pipe_ctx->stream);
2857 link_enc = link->link_enc;
2859 /* We need to do this before the link training to ensure the idle pattern in SST
2860 * mode will be sent right after the link training
2862 if (dp_get_link_encoding_format(¤t_setting) == DP_8b_10b_ENCODING) {
2863 link_enc->funcs->connect_dig_be_to_fe(link_enc,
2864 pipe_ctx->stream_res.stream_enc->id, true);
2865 dp_source_sequence_trace(link, DPCD_SOURCE_SEQ_AFTER_CONNECT_DIG_FE_BE);
2868 for (j = 0; j < attempts; ++j) {
2870 DC_LOG_HW_LINK_TRAINING("%s: Beginning link training attempt %u of %d\n",
2871 __func__, (unsigned int)j + 1, attempts);
2876 pipe_ctx->clock_source->id,
2879 if (stream->sink_patches.dppowerup_delay > 0) {
2880 int delay_dp_power_up_in_ms = stream->sink_patches.dppowerup_delay;
2882 msleep(delay_dp_power_up_in_ms);
2885 #ifdef CONFIG_DRM_AMD_DC_HDCP
2886 if (panel_mode == DP_PANEL_MODE_EDP) {
2887 struct cp_psp *cp_psp = &stream->ctx->cp_psp;
2889 if (cp_psp && cp_psp->funcs.enable_assr)
2890 /* ASSR is bound to fail with unsigned PSP
2891 * verstage used during devlopment phase.
2892 * Report and continue with eDP panel mode to
2893 * perform eDP link training with right settings
2895 cp_psp->funcs.enable_assr(cp_psp->handle, link);
2899 dp_set_panel_mode(link, panel_mode);
2901 if (link->aux_access_disabled) {
2902 dc_link_dp_perform_link_training_skip_aux(link, ¤t_setting);
2905 /** @todo Consolidate USB4 DP and DPx.x training. */
2906 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA) {
2907 status = dc_link_dpia_perform_link_training(link,
2909 skip_video_pattern);
2911 /* Transmit idle pattern once training successful. */
2912 if (status == LINK_TRAINING_SUCCESS)
2913 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE,
2916 status = dc_link_dp_perform_link_training(link,
2918 skip_video_pattern);
2921 if (status == LINK_TRAINING_SUCCESS)
2925 /* latest link training still fail, skip delay and keep PHY on
2927 if (j == (attempts - 1) && link->ep_type == DISPLAY_ENDPOINT_PHY)
2930 DC_LOG_WARNING("%s: Link training attempt %u of %d failed\n",
2931 __func__, (unsigned int)j + 1, attempts);
2933 dp_disable_link_phy(link, signal);
2935 /* Abort link training if failure due to sink being unplugged. */
2936 if (status == LINK_TRAINING_ABORT) {
2937 enum dc_connection_type type = dc_connection_none;
2939 dc_link_detect_sink(link, &type);
2940 if (type == dc_connection_none)
2942 } else if (do_fallback) {
2946 decide_fallback_link_setting(link, *link_setting, ¤t_setting, status);
2947 /* Fail link training if reduced link bandwidth no longer meets
2948 * stream requirements.
2950 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
2951 link_bw = dc_link_bandwidth_kbps(link, ¤t_setting);
2952 if (req_bw > link_bw)
2956 msleep(delay_between_attempts);
2958 delay_between_attempts += LINK_TRAINING_RETRY_DELAY;
2964 static enum clock_source_id get_clock_source_id(struct dc_link *link)
2966 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_UNDEFINED;
2967 struct clock_source *dp_cs = link->dc->res_pool->dp_clock_source;
2969 if (dp_cs != NULL) {
2970 dp_cs_id = dp_cs->id;
2973 * dp clock source is not initialized for some reason.
2974 * Should not happen, CLOCK_SOURCE_ID_EXTERNAL will be used
2982 static void set_dp_mst_mode(struct dc_link *link, bool mst_enable)
2984 if (mst_enable == false &&
2985 link->type == dc_connection_mst_branch) {
2986 /* Disable MST on link. Use only local sink. */
2987 dp_disable_link_phy_mst(link, link->connector_signal);
2989 link->type = dc_connection_single;
2990 link->local_sink = link->remote_sinks[0];
2991 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT;
2992 dc_sink_retain(link->local_sink);
2993 dm_helpers_dp_mst_stop_top_mgr(link->ctx, link);
2994 } else if (mst_enable == true &&
2995 link->type == dc_connection_single &&
2996 link->remote_sinks[0] != NULL) {
2997 /* Re-enable MST on link. */
2998 dp_disable_link_phy(link, link->connector_signal);
2999 dp_enable_mst_on_sink(link, true);
3001 link->type = dc_connection_mst_branch;
3002 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT_MST;
3006 bool dc_link_dp_sync_lt_begin(struct dc_link *link)
3008 /* Begin Sync LT. During this time,
3009 * DPCD:600h must not be powered down.
3011 link->sync_lt_in_progress = true;
3013 /*Clear any existing preferred settings.*/
3014 memset(&link->preferred_training_settings, 0,
3015 sizeof(struct dc_link_training_overrides));
3016 memset(&link->preferred_link_setting, 0,
3017 sizeof(struct dc_link_settings));
3022 enum link_training_result dc_link_dp_sync_lt_attempt(
3023 struct dc_link *link,
3024 struct dc_link_settings *link_settings,
3025 struct dc_link_training_overrides *lt_overrides)
3027 struct link_training_settings lt_settings = {0};
3028 enum link_training_result lt_status = LINK_TRAINING_SUCCESS;
3029 enum dp_panel_mode panel_mode = DP_PANEL_MODE_DEFAULT;
3030 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
3031 bool fec_enable = false;
3033 dp_decide_training_settings(
3037 override_training_settings(
3041 /* Setup MST Mode */
3042 if (lt_overrides->mst_enable)
3043 set_dp_mst_mode(link, *lt_overrides->mst_enable);
3046 dp_disable_link_phy(link, link->connector_signal);
3049 dp_cs_id = get_clock_source_id(link);
3050 dp_enable_link_phy(link, link->connector_signal,
3051 dp_cs_id, link_settings);
3053 /* Set FEC enable */
3054 #if defined(CONFIG_DRM_AMD_DC_DCN)
3055 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING) {
3057 fec_enable = lt_overrides->fec_enable && *lt_overrides->fec_enable;
3058 dp_set_fec_ready(link, fec_enable);
3059 #if defined(CONFIG_DRM_AMD_DC_DCN)
3063 if (lt_overrides->alternate_scrambler_reset) {
3064 if (*lt_overrides->alternate_scrambler_reset)
3065 panel_mode = DP_PANEL_MODE_EDP;
3067 panel_mode = DP_PANEL_MODE_DEFAULT;
3069 panel_mode = dp_get_panel_mode(link);
3071 dp_set_panel_mode(link, panel_mode);
3073 /* Attempt to train with given link training settings */
3074 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
3075 start_clock_recovery_pattern_early(link, <_settings, DPRX);
3077 /* Set link rate, lane count and spread. */
3078 dpcd_set_link_settings(link, <_settings);
3080 /* 2. perform link training (set link training done
3081 * to false is done as well)
3083 lt_status = perform_clock_recovery_sequence(link, <_settings, DPRX);
3084 if (lt_status == LINK_TRAINING_SUCCESS) {
3085 lt_status = perform_channel_equalization_sequence(link,
3090 /* 3. Sync LT must skip TRAINING_PATTERN_SET:0 (video pattern)*/
3091 /* 4. print status message*/
3092 print_status_message(link, <_settings, lt_status);
3097 bool dc_link_dp_sync_lt_end(struct dc_link *link, bool link_down)
3099 /* If input parameter is set, shut down phy.
3100 * Still shouldn't turn off dp_receiver (DPCD:600h)
3102 if (link_down == true) {
3103 #if defined(CONFIG_DRM_AMD_DC_DCN)
3104 struct dc_link_settings link_settings = link->cur_link_settings;
3106 dp_disable_link_phy(link, link->connector_signal);
3107 #if defined(CONFIG_DRM_AMD_DC_DCN)
3108 if (dp_get_link_encoding_format(&link_settings) == DP_8b_10b_ENCODING)
3110 dp_set_fec_ready(link, false);
3113 link->sync_lt_in_progress = false;
3117 #if defined(CONFIG_DRM_AMD_DC_DCN)
3118 static enum dc_link_rate get_lttpr_max_link_rate(struct dc_link *link)
3120 enum dc_link_rate lttpr_max_link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
3122 if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR20)
3123 lttpr_max_link_rate = LINK_RATE_UHBR20;
3124 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR13_5)
3125 lttpr_max_link_rate = LINK_RATE_UHBR13_5;
3126 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR10)
3127 lttpr_max_link_rate = LINK_RATE_UHBR10;
3129 return lttpr_max_link_rate;
3133 bool dc_link_dp_get_max_link_enc_cap(const struct dc_link *link, struct dc_link_settings *max_link_enc_cap)
3135 struct link_encoder *link_enc = NULL;
3137 if (!max_link_enc_cap) {
3138 DC_LOG_ERROR("%s: Could not return max link encoder caps", __func__);
3142 /* Links supporting dynamically assigned link encoder will be assigned next
3143 * available encoder if one not already assigned.
3145 if (link->is_dig_mapping_flexible &&
3146 link->dc->res_pool->funcs->link_encs_assign) {
3147 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
3148 if (link_enc == NULL)
3149 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
3151 link_enc = link->link_enc;
3154 if (link_enc && link_enc->funcs->get_max_link_cap) {
3155 link_enc->funcs->get_max_link_cap(link_enc, max_link_enc_cap);
3159 DC_LOG_ERROR("%s: Max link encoder caps unknown", __func__);
3160 max_link_enc_cap->lane_count = 1;
3161 max_link_enc_cap->link_rate = 6;
3165 static struct dc_link_settings get_max_link_cap(struct dc_link *link)
3167 struct dc_link_settings max_link_cap = {0};
3168 #if defined(CONFIG_DRM_AMD_DC_DCN)
3169 enum dc_link_rate lttpr_max_link_rate;
3171 struct link_encoder *link_enc = NULL;
3173 /* Links supporting dynamically assigned link encoder will be assigned next
3174 * available encoder if one not already assigned.
3176 if (link->is_dig_mapping_flexible &&
3177 link->dc->res_pool->funcs->link_encs_assign) {
3178 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
3179 if (link_enc == NULL)
3180 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
3182 link_enc = link->link_enc;
3185 /* get max link encoder capability */
3187 link_enc->funcs->get_max_link_cap(link_enc, &max_link_cap);
3188 #if defined(CONFIG_DRM_AMD_DC_DCN)
3189 if (max_link_cap.link_rate >= LINK_RATE_UHBR10 &&
3190 !link->hpo_dp_link_enc)
3191 max_link_cap.link_rate = LINK_RATE_HIGH3;
3194 /* Lower link settings based on sink's link cap */
3195 if (link->reported_link_cap.lane_count < max_link_cap.lane_count)
3196 max_link_cap.lane_count =
3197 link->reported_link_cap.lane_count;
3198 if (link->reported_link_cap.link_rate < max_link_cap.link_rate)
3199 max_link_cap.link_rate =
3200 link->reported_link_cap.link_rate;
3201 if (link->reported_link_cap.link_spread <
3202 max_link_cap.link_spread)
3203 max_link_cap.link_spread =
3204 link->reported_link_cap.link_spread;
3206 * account for lttpr repeaters cap
3207 * notes: repeaters do not snoop in the DPRX Capabilities addresses (3.6.3).
3209 if (link->lttpr_mode != LTTPR_MODE_NON_LTTPR) {
3210 if (link->dpcd_caps.lttpr_caps.max_lane_count < max_link_cap.lane_count)
3211 max_link_cap.lane_count = link->dpcd_caps.lttpr_caps.max_lane_count;
3213 #if defined(CONFIG_DRM_AMD_DC_DCN)
3214 lttpr_max_link_rate = get_lttpr_max_link_rate(link);
3216 if (lttpr_max_link_rate < max_link_cap.link_rate)
3217 max_link_cap.link_rate = lttpr_max_link_rate;
3219 if (link->dpcd_caps.lttpr_caps.max_link_rate < max_link_cap.link_rate)
3220 max_link_cap.link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
3223 DC_LOG_HW_LINK_TRAINING("%s\n Training with LTTPR, max_lane count %d max_link rate %d \n",
3225 max_link_cap.lane_count,
3226 max_link_cap.link_rate);
3228 return max_link_cap;
3231 static enum dc_status read_hpd_rx_irq_data(
3232 struct dc_link *link,
3233 union hpd_irq_data *irq_data)
3235 static enum dc_status retval;
3237 /* The HW reads 16 bytes from 200h on HPD,
3238 * but if we get an AUX_DEFER, the HW cannot retry
3239 * and this causes the CTS tests 4.3.2.1 - 3.2.4 to
3240 * fail, so we now explicitly read 6 bytes which is
3241 * the req from the above mentioned test cases.
3243 * For DP 1.4 we need to read those from 2002h range.
3245 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_14)
3246 retval = core_link_read_dpcd(
3250 sizeof(union hpd_irq_data));
3252 /* Read 14 bytes in a single read and then copy only the required fields.
3253 * This is more efficient than doing it in two separate AUX reads. */
3255 uint8_t tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI + 1];
3257 retval = core_link_read_dpcd(
3263 if (retval != DC_OK)
3266 irq_data->bytes.sink_cnt.raw = tmp[DP_SINK_COUNT_ESI - DP_SINK_COUNT_ESI];
3267 irq_data->bytes.device_service_irq.raw = tmp[DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 - DP_SINK_COUNT_ESI];
3268 irq_data->bytes.lane01_status.raw = tmp[DP_LANE0_1_STATUS_ESI - DP_SINK_COUNT_ESI];
3269 irq_data->bytes.lane23_status.raw = tmp[DP_LANE2_3_STATUS_ESI - DP_SINK_COUNT_ESI];
3270 irq_data->bytes.lane_status_updated.raw = tmp[DP_LANE_ALIGN_STATUS_UPDATED_ESI - DP_SINK_COUNT_ESI];
3271 irq_data->bytes.sink_status.raw = tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI];
3277 bool hpd_rx_irq_check_link_loss_status(
3278 struct dc_link *link,
3279 union hpd_irq_data *hpd_irq_dpcd_data)
3281 uint8_t irq_reg_rx_power_state = 0;
3282 enum dc_status dpcd_result = DC_ERROR_UNEXPECTED;
3283 union lane_status lane_status;
3285 bool sink_status_changed;
3288 sink_status_changed = false;
3289 return_code = false;
3291 if (link->cur_link_settings.lane_count == 0)
3294 /*1. Check that Link Status changed, before re-training.*/
3296 /*parse lane status*/
3297 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
3298 /* check status of lanes 0,1
3299 * changed DpcdAddress_Lane01Status (0x202)
3301 lane_status.raw = get_nibble_at_index(
3302 &hpd_irq_dpcd_data->bytes.lane01_status.raw,
3305 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
3306 !lane_status.bits.CR_DONE_0 ||
3307 !lane_status.bits.SYMBOL_LOCKED_0) {
3308 /* if one of the channel equalization, clock
3309 * recovery or symbol lock is dropped
3310 * consider it as (link has been
3311 * dropped) dp sink status has changed
3313 sink_status_changed = true;
3318 /* Check interlane align.*/
3319 if (sink_status_changed ||
3320 !hpd_irq_dpcd_data->bytes.lane_status_updated.bits.INTERLANE_ALIGN_DONE) {
3322 DC_LOG_HW_HPD_IRQ("%s: Link Status changed.\n", __func__);
3326 /*2. Check that we can handle interrupt: Not in FS DOS,
3327 * Not in "Display Timeout" state, Link is trained.
3329 dpcd_result = core_link_read_dpcd(link,
3331 &irq_reg_rx_power_state,
3332 sizeof(irq_reg_rx_power_state));
3334 if (dpcd_result != DC_OK) {
3335 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain power state.\n",
3338 if (irq_reg_rx_power_state != DP_SET_POWER_D0)
3339 return_code = false;
3346 bool dp_verify_link_cap(
3347 struct dc_link *link,
3348 struct dc_link_settings *known_limit_link_setting,
3351 struct dc_link_settings max_link_cap = {0};
3352 struct dc_link_settings cur_link_setting = {0};
3353 struct dc_link_settings *cur = &cur_link_setting;
3354 struct dc_link_settings initial_link_settings = {0};
3356 bool skip_link_training;
3357 bool skip_video_pattern;
3358 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
3359 enum link_training_result status;
3360 union hpd_irq_data irq_data;
3362 /* link training starts with the maximum common settings
3363 * supported by both sink and ASIC.
3365 max_link_cap = get_max_link_cap(link);
3366 initial_link_settings = get_common_supported_link_settings(
3367 *known_limit_link_setting,
3370 /* Accept reported capabilities if link supports flexible encoder mapping or encoder already in use. */
3371 if (link->dc->debug.skip_detection_link_training ||
3372 link->is_dig_mapping_flexible) {
3373 /* TODO - should we check link encoder's max link caps here?
3374 * How do we know which link encoder to check from?
3376 link->verified_link_cap = *known_limit_link_setting;
3378 } else if (link->link_enc && link->dc->res_pool->funcs->link_encs_assign &&
3379 !link_enc_cfg_is_link_enc_avail(link->ctx->dc, link->link_enc->preferred_engine, link)) {
3380 link->verified_link_cap = initial_link_settings;
3384 memset(&irq_data, 0, sizeof(irq_data));
3386 skip_link_training = false;
3388 /* Grant extended timeout request */
3389 if ((link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (link->dpcd_caps.lttpr_caps.max_ext_timeout > 0)) {
3390 uint8_t grant = link->dpcd_caps.lttpr_caps.max_ext_timeout & 0x80;
3392 core_link_write_dpcd(link, DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT, &grant, sizeof(grant));
3395 #if defined(CONFIG_DRM_AMD_DC_DCN)
3396 if (dp_get_link_encoding_format(&link->cur_link_settings) == DP_128b_132b_ENCODING)
3397 reset_dp_hpo_stream_encoders_for_link(link);
3399 /* TODO implement override and monitor patch later */
3401 /* try to train the link from high to low to
3402 * find the physical link capability
3404 /* disable PHY done possible by BIOS, will be done by driver itself */
3405 dp_disable_link_phy(link, link->connector_signal);
3407 dp_cs_id = get_clock_source_id(link);
3409 cur_link_setting = initial_link_settings;
3411 /* Temporary Renoir-specific workaround for SWDEV-215184;
3412 * PHY will sometimes be in bad state on hotplugging display from certain USB-C dongle,
3413 * so add extra cycle of enabling and disabling the PHY before first link training.
3415 if (link->link_enc && link->link_enc->features.flags.bits.DP_IS_USB_C &&
3416 link->dc->debug.usbc_combo_phy_reset_wa) {
3417 dp_enable_link_phy(link, link->connector_signal, dp_cs_id, cur);
3418 dp_disable_link_phy(link, link->connector_signal);
3422 skip_video_pattern = true;
3424 if (cur->link_rate == LINK_RATE_LOW)
3425 skip_video_pattern = false;
3429 link->connector_signal,
3434 if (skip_link_training)
3437 status = dc_link_dp_perform_link_training(
3440 skip_video_pattern);
3441 if (status == LINK_TRAINING_SUCCESS)
3448 link->verified_link_cap = *cur;
3450 if (read_hpd_rx_irq_data(link, &irq_data) == DC_OK)
3451 if (hpd_rx_irq_check_link_loss_status(
3456 /* always disable the link before trying another
3457 * setting or before returning we'll enable it later
3458 * based on the actual mode we're driving
3460 dp_disable_link_phy(link, link->connector_signal);
3461 } while (!success && decide_fallback_link_setting(link,
3462 initial_link_settings, cur, status));
3464 /* Link Training failed for all Link Settings
3465 * (Lane Count is still unknown)
3468 /* If all LT fails for all settings,
3469 * set verified = failed safe (1 lane low)
3471 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3472 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3474 link->verified_link_cap.link_spread =
3475 LINK_SPREAD_DISABLED;
3482 bool dp_verify_link_cap_with_retries(
3483 struct dc_link *link,
3484 struct dc_link_settings *known_limit_link_setting,
3488 bool success = false;
3490 for (i = 0; i < attempts; i++) {
3492 enum dc_connection_type type = dc_connection_none;
3494 memset(&link->verified_link_cap, 0,
3495 sizeof(struct dc_link_settings));
3496 if (!dc_link_detect_sink(link, &type) || type == dc_connection_none) {
3497 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3498 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3499 link->verified_link_cap.link_spread = LINK_SPREAD_DISABLED;
3501 } else if (dp_verify_link_cap(link,
3502 known_limit_link_setting,
3503 &fail_count) && fail_count == 0) {
3512 bool dp_verify_mst_link_cap(
3513 struct dc_link *link)
3515 struct dc_link_settings max_link_cap = {0};
3517 if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3518 DP_8b_10b_ENCODING) {
3519 max_link_cap = get_max_link_cap(link);
3520 link->verified_link_cap = get_common_supported_link_settings(
3521 link->reported_link_cap,
3524 #if defined(CONFIG_DRM_AMD_DC_DCN)
3525 else if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3526 DP_128b_132b_ENCODING) {
3527 dp_verify_link_cap_with_retries(link,
3528 &link->reported_link_cap,
3529 LINK_TRAINING_MAX_VERIFY_RETRY);
3535 static struct dc_link_settings get_common_supported_link_settings(
3536 struct dc_link_settings link_setting_a,
3537 struct dc_link_settings link_setting_b)
3539 struct dc_link_settings link_settings = {0};
3541 link_settings.lane_count =
3542 (link_setting_a.lane_count <=
3543 link_setting_b.lane_count) ?
3544 link_setting_a.lane_count :
3545 link_setting_b.lane_count;
3546 link_settings.link_rate =
3547 (link_setting_a.link_rate <=
3548 link_setting_b.link_rate) ?
3549 link_setting_a.link_rate :
3550 link_setting_b.link_rate;
3551 link_settings.link_spread = LINK_SPREAD_DISABLED;
3553 /* in DP compliance test, DPR-120 may have
3554 * a random value in its MAX_LINK_BW dpcd field.
3555 * We map it to the maximum supported link rate that
3556 * is smaller than MAX_LINK_BW in this case.
3558 #if defined(CONFIG_DRM_AMD_DC_DCN)
3559 if (link_settings.link_rate > LINK_RATE_UHBR20) {
3560 link_settings.link_rate = LINK_RATE_UHBR20;
3561 } else if (link_settings.link_rate < LINK_RATE_UHBR20 &&
3562 link_settings.link_rate > LINK_RATE_UHBR13_5) {
3563 link_settings.link_rate = LINK_RATE_UHBR13_5;
3564 } else if (link_settings.link_rate < LINK_RATE_UHBR10 &&
3565 link_settings.link_rate > LINK_RATE_HIGH3) {
3567 if (link_settings.link_rate > LINK_RATE_HIGH3) {
3569 link_settings.link_rate = LINK_RATE_HIGH3;
3570 } else if (link_settings.link_rate < LINK_RATE_HIGH3
3571 && link_settings.link_rate > LINK_RATE_HIGH2) {
3572 link_settings.link_rate = LINK_RATE_HIGH2;
3573 } else if (link_settings.link_rate < LINK_RATE_HIGH2
3574 && link_settings.link_rate > LINK_RATE_HIGH) {
3575 link_settings.link_rate = LINK_RATE_HIGH;
3576 } else if (link_settings.link_rate < LINK_RATE_HIGH
3577 && link_settings.link_rate > LINK_RATE_LOW) {
3578 link_settings.link_rate = LINK_RATE_LOW;
3579 } else if (link_settings.link_rate < LINK_RATE_LOW) {
3580 link_settings.link_rate = LINK_RATE_UNKNOWN;
3583 return link_settings;
3586 static inline bool reached_minimum_lane_count(enum dc_lane_count lane_count)
3588 return lane_count <= LANE_COUNT_ONE;
3591 static inline bool reached_minimum_link_rate(enum dc_link_rate link_rate)
3593 return link_rate <= LINK_RATE_LOW;
3596 static enum dc_lane_count reduce_lane_count(enum dc_lane_count lane_count)
3598 switch (lane_count) {
3599 case LANE_COUNT_FOUR:
3600 return LANE_COUNT_TWO;
3601 case LANE_COUNT_TWO:
3602 return LANE_COUNT_ONE;
3603 case LANE_COUNT_ONE:
3604 return LANE_COUNT_UNKNOWN;
3606 return LANE_COUNT_UNKNOWN;
3610 static enum dc_link_rate reduce_link_rate(enum dc_link_rate link_rate)
3612 switch (link_rate) {
3613 #if defined(CONFIG_DRM_AMD_DC_DCN)
3614 case LINK_RATE_UHBR20:
3615 return LINK_RATE_UHBR13_5;
3616 case LINK_RATE_UHBR13_5:
3617 return LINK_RATE_UHBR10;
3618 case LINK_RATE_UHBR10:
3619 return LINK_RATE_HIGH3;
3621 case LINK_RATE_HIGH3:
3622 return LINK_RATE_HIGH2;
3623 case LINK_RATE_HIGH2:
3624 return LINK_RATE_HIGH;
3625 case LINK_RATE_HIGH:
3626 return LINK_RATE_LOW;
3628 return LINK_RATE_UNKNOWN;
3630 return LINK_RATE_UNKNOWN;
3634 static enum dc_lane_count increase_lane_count(enum dc_lane_count lane_count)
3636 switch (lane_count) {
3637 case LANE_COUNT_ONE:
3638 return LANE_COUNT_TWO;
3639 case LANE_COUNT_TWO:
3640 return LANE_COUNT_FOUR;
3642 return LANE_COUNT_UNKNOWN;
3646 static enum dc_link_rate increase_link_rate(enum dc_link_rate link_rate)
3648 switch (link_rate) {
3650 return LINK_RATE_HIGH;
3651 case LINK_RATE_HIGH:
3652 return LINK_RATE_HIGH2;
3653 case LINK_RATE_HIGH2:
3654 return LINK_RATE_HIGH3;
3655 #if defined(CONFIG_DRM_AMD_DC_DCN)
3656 case LINK_RATE_HIGH3:
3657 return LINK_RATE_UHBR10;
3658 case LINK_RATE_UHBR10:
3659 return LINK_RATE_UHBR13_5;
3660 case LINK_RATE_UHBR13_5:
3661 return LINK_RATE_UHBR20;
3664 return LINK_RATE_UNKNOWN;
3668 #if defined(CONFIG_DRM_AMD_DC_DCN)
3669 static bool decide_fallback_link_setting_max_bw_policy(
3670 const struct dc_link_settings *max,
3671 struct dc_link_settings *cur)
3673 uint8_t cur_idx = 0, next_idx;
3676 while (cur_idx < ARRAY_SIZE(dp_lt_fallbacks))
3677 /* find current index */
3678 if (dp_lt_fallbacks[cur_idx].lane_count == cur->lane_count &&
3679 dp_lt_fallbacks[cur_idx].link_rate == cur->link_rate)
3684 next_idx = cur_idx + 1;
3686 while (next_idx < ARRAY_SIZE(dp_lt_fallbacks))
3687 /* find next index */
3688 if (dp_lt_fallbacks[next_idx].lane_count <= max->lane_count &&
3689 dp_lt_fallbacks[next_idx].link_rate <= max->link_rate)
3694 if (next_idx < ARRAY_SIZE(dp_lt_fallbacks)) {
3695 cur->lane_count = dp_lt_fallbacks[next_idx].lane_count;
3696 cur->link_rate = dp_lt_fallbacks[next_idx].link_rate;
3705 * function: set link rate and lane count fallback based
3706 * on current link setting and last link training result
3708 * true - link setting could be set
3709 * false - has reached minimum setting
3710 * and no further fallback could be done
3712 static bool decide_fallback_link_setting(
3713 struct dc_link *link,
3714 struct dc_link_settings initial_link_settings,
3715 struct dc_link_settings *current_link_setting,
3716 enum link_training_result training_result)
3718 if (!current_link_setting)
3720 #if defined(CONFIG_DRM_AMD_DC_DCN)
3721 if (dp_get_link_encoding_format(&initial_link_settings) == DP_128b_132b_ENCODING ||
3722 link->dc->debug.force_dp2_lt_fallback_method)
3723 return decide_fallback_link_setting_max_bw_policy(&initial_link_settings,
3724 current_link_setting);
3727 switch (training_result) {
3728 case LINK_TRAINING_CR_FAIL_LANE0:
3729 case LINK_TRAINING_CR_FAIL_LANE1:
3730 case LINK_TRAINING_CR_FAIL_LANE23:
3731 case LINK_TRAINING_LQA_FAIL:
3733 if (!reached_minimum_link_rate
3734 (current_link_setting->link_rate)) {
3735 current_link_setting->link_rate =
3737 current_link_setting->link_rate);
3738 } else if (!reached_minimum_lane_count
3739 (current_link_setting->lane_count)) {
3740 current_link_setting->link_rate =
3741 initial_link_settings.link_rate;
3742 if (training_result == LINK_TRAINING_CR_FAIL_LANE0)
3744 else if (training_result == LINK_TRAINING_CR_FAIL_LANE1)
3745 current_link_setting->lane_count =
3747 else if (training_result ==
3748 LINK_TRAINING_CR_FAIL_LANE23)
3749 current_link_setting->lane_count =
3752 current_link_setting->lane_count =
3754 current_link_setting->lane_count);
3760 case LINK_TRAINING_EQ_FAIL_EQ:
3762 if (!reached_minimum_lane_count
3763 (current_link_setting->lane_count)) {
3764 current_link_setting->lane_count =
3766 current_link_setting->lane_count);
3767 } else if (!reached_minimum_link_rate
3768 (current_link_setting->link_rate)) {
3769 current_link_setting->link_rate =
3771 current_link_setting->link_rate);
3777 case LINK_TRAINING_EQ_FAIL_CR:
3779 if (!reached_minimum_link_rate
3780 (current_link_setting->link_rate)) {
3781 current_link_setting->link_rate =
3783 current_link_setting->link_rate);
3795 bool dp_validate_mode_timing(
3796 struct dc_link *link,
3797 const struct dc_crtc_timing *timing)
3802 const struct dc_link_settings *link_setting;
3804 /* According to spec, VSC SDP should be used if pixel format is YCbCr420 */
3805 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420 &&
3806 !link->dpcd_caps.dprx_feature.bits.VSC_SDP_COLORIMETRY_SUPPORTED &&
3807 dal_graphics_object_id_get_connector_id(link->link_id) != CONNECTOR_ID_VIRTUAL)
3810 /*always DP fail safe mode*/
3811 if ((timing->pix_clk_100hz / 10) == (uint32_t) 25175 &&
3812 timing->h_addressable == (uint32_t) 640 &&
3813 timing->v_addressable == (uint32_t) 480)
3816 link_setting = dc_link_get_link_cap(link);
3818 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3819 /*if (flags.DYNAMIC_VALIDATION == 1 &&
3820 link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN)
3821 link_setting = &link->verified_link_cap;
3824 req_bw = dc_bandwidth_in_kbps_from_timing(timing);
3825 max_bw = dc_link_bandwidth_kbps(link, link_setting);
3827 if (req_bw <= max_bw) {
3828 /* remember the biggest mode here, during
3829 * initial link training (to get
3830 * verified_link_cap), LS sends event about
3831 * cannot train at reported cap to upper
3832 * layer and upper layer will re-enumerate modes.
3833 * this is not necessary if the lower
3834 * verified_link_cap is enough to drive
3837 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3838 /* if (flags.DYNAMIC_VALIDATION == 1)
3839 dpsst->max_req_bw_for_verified_linkcap = dal_max(
3840 dpsst->max_req_bw_for_verified_linkcap, req_bw); */
3846 static bool decide_dp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3848 struct dc_link_settings initial_link_setting = {
3849 LANE_COUNT_ONE, LINK_RATE_LOW, LINK_SPREAD_DISABLED, false, 0};
3850 struct dc_link_settings current_link_setting =
3851 initial_link_setting;
3854 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3857 /* search for the minimum link setting that:
3858 * 1. is supported according to the link training result
3859 * 2. could support the b/w requested by the timing
3861 while (current_link_setting.link_rate <=
3862 link->verified_link_cap.link_rate) {
3863 link_bw = dc_link_bandwidth_kbps(
3865 ¤t_link_setting);
3866 if (req_bw <= link_bw) {
3867 *link_setting = current_link_setting;
3871 if (current_link_setting.lane_count <
3872 link->verified_link_cap.lane_count) {
3873 current_link_setting.lane_count =
3874 increase_lane_count(
3875 current_link_setting.lane_count);
3877 current_link_setting.link_rate =
3879 current_link_setting.link_rate);
3880 current_link_setting.lane_count =
3881 initial_link_setting.lane_count;
3888 bool decide_edp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3890 struct dc_link_settings initial_link_setting;
3891 struct dc_link_settings current_link_setting;
3895 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3896 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3898 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3899 link->dpcd_caps.edp_supported_link_rates_count == 0) {
3900 *link_setting = link->verified_link_cap;
3904 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3905 initial_link_setting.lane_count = LANE_COUNT_ONE;
3906 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
3907 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3908 initial_link_setting.use_link_rate_set = true;
3909 initial_link_setting.link_rate_set = 0;
3910 current_link_setting = initial_link_setting;
3912 /* search for the minimum link setting that:
3913 * 1. is supported according to the link training result
3914 * 2. could support the b/w requested by the timing
3916 while (current_link_setting.link_rate <=
3917 link->verified_link_cap.link_rate) {
3918 link_bw = dc_link_bandwidth_kbps(
3920 ¤t_link_setting);
3921 if (req_bw <= link_bw) {
3922 *link_setting = current_link_setting;
3926 if (current_link_setting.lane_count <
3927 link->verified_link_cap.lane_count) {
3928 current_link_setting.lane_count =
3929 increase_lane_count(
3930 current_link_setting.lane_count);
3932 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
3933 current_link_setting.link_rate_set++;
3934 current_link_setting.link_rate =
3935 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3936 current_link_setting.lane_count =
3937 initial_link_setting.lane_count;
3945 static bool decide_edp_link_settings_with_dsc(struct dc_link *link,
3946 struct dc_link_settings *link_setting,
3948 enum dc_link_rate max_link_rate)
3950 struct dc_link_settings initial_link_setting;
3951 struct dc_link_settings current_link_setting;
3954 unsigned int policy = 0;
3956 policy = link->ctx->dc->debug.force_dsc_edp_policy;
3957 if (max_link_rate == LINK_RATE_UNKNOWN)
3958 max_link_rate = link->verified_link_cap.link_rate;
3960 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3961 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3963 if ((link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3964 link->dpcd_caps.edp_supported_link_rates_count == 0)) {
3965 /* for DSC enabled case, we search for minimum lane count */
3966 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3967 initial_link_setting.lane_count = LANE_COUNT_ONE;
3968 initial_link_setting.link_rate = LINK_RATE_LOW;
3969 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3970 initial_link_setting.use_link_rate_set = false;
3971 initial_link_setting.link_rate_set = 0;
3972 current_link_setting = initial_link_setting;
3973 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3976 /* search for the minimum link setting that:
3977 * 1. is supported according to the link training result
3978 * 2. could support the b/w requested by the timing
3980 while (current_link_setting.link_rate <=
3982 link_bw = dc_link_bandwidth_kbps(
3984 ¤t_link_setting);
3985 if (req_bw <= link_bw) {
3986 *link_setting = current_link_setting;
3991 if (current_link_setting.link_rate < max_link_rate) {
3992 current_link_setting.link_rate =
3994 current_link_setting.link_rate);
3996 if (current_link_setting.lane_count <
3997 link->verified_link_cap.lane_count) {
3998 current_link_setting.lane_count =
3999 increase_lane_count(
4000 current_link_setting.lane_count);
4001 current_link_setting.link_rate = initial_link_setting.link_rate;
4006 /* minimize link rate */
4007 if (current_link_setting.lane_count <
4008 link->verified_link_cap.lane_count) {
4009 current_link_setting.lane_count =
4010 increase_lane_count(
4011 current_link_setting.lane_count);
4013 current_link_setting.link_rate =
4015 current_link_setting.link_rate);
4016 current_link_setting.lane_count =
4017 initial_link_setting.lane_count;
4024 /* if optimize edp link is supported */
4025 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
4026 initial_link_setting.lane_count = LANE_COUNT_ONE;
4027 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
4028 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
4029 initial_link_setting.use_link_rate_set = true;
4030 initial_link_setting.link_rate_set = 0;
4031 current_link_setting = initial_link_setting;
4033 /* search for the minimum link setting that:
4034 * 1. is supported according to the link training result
4035 * 2. could support the b/w requested by the timing
4037 while (current_link_setting.link_rate <=
4039 link_bw = dc_link_bandwidth_kbps(
4041 ¤t_link_setting);
4042 if (req_bw <= link_bw) {
4043 *link_setting = current_link_setting;
4048 if (current_link_setting.link_rate_set <
4049 link->dpcd_caps.edp_supported_link_rates_count
4050 && current_link_setting.link_rate < max_link_rate) {
4051 current_link_setting.link_rate_set++;
4052 current_link_setting.link_rate =
4053 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
4055 if (current_link_setting.lane_count < link->verified_link_cap.lane_count) {
4056 current_link_setting.lane_count =
4057 increase_lane_count(
4058 current_link_setting.lane_count);
4059 current_link_setting.link_rate_set = initial_link_setting.link_rate_set;
4060 current_link_setting.link_rate =
4061 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
4066 /* minimize link rate */
4067 if (current_link_setting.lane_count <
4068 link->verified_link_cap.lane_count) {
4069 current_link_setting.lane_count =
4070 increase_lane_count(
4071 current_link_setting.lane_count);
4073 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
4074 current_link_setting.link_rate_set++;
4075 current_link_setting.link_rate =
4076 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
4077 current_link_setting.lane_count =
4078 initial_link_setting.lane_count;
4087 static bool decide_mst_link_settings(const struct dc_link *link, struct dc_link_settings *link_setting)
4089 *link_setting = link->verified_link_cap;
4093 void decide_link_settings(struct dc_stream_state *stream,
4094 struct dc_link_settings *link_setting)
4096 struct dc_link *link;
4099 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
4101 link = stream->link;
4103 /* if preferred is specified through AMDDP, use it, if it's enough
4106 if (link->preferred_link_setting.lane_count !=
4107 LANE_COUNT_UNKNOWN &&
4108 link->preferred_link_setting.link_rate !=
4109 LINK_RATE_UNKNOWN) {
4110 *link_setting = link->preferred_link_setting;
4114 /* MST doesn't perform link training for now
4115 * TODO: add MST specific link training routine
4117 if (stream->signal == SIGNAL_TYPE_DISPLAY_PORT_MST) {
4118 if (decide_mst_link_settings(link, link_setting))
4120 } else if (link->connector_signal == SIGNAL_TYPE_EDP) {
4121 /* enable edp link optimization for DSC eDP case */
4122 if (stream->timing.flags.DSC) {
4123 enum dc_link_rate max_link_rate = LINK_RATE_UNKNOWN;
4125 if (link->ctx->dc->debug.force_dsc_edp_policy) {
4126 /* calculate link max link rate cap*/
4127 struct dc_link_settings tmp_link_setting;
4128 struct dc_crtc_timing tmp_timing = stream->timing;
4129 uint32_t orig_req_bw;
4131 tmp_link_setting.link_rate = LINK_RATE_UNKNOWN;
4132 tmp_timing.flags.DSC = 0;
4133 orig_req_bw = dc_bandwidth_in_kbps_from_timing(&tmp_timing);
4134 decide_edp_link_settings(link, &tmp_link_setting, orig_req_bw);
4135 max_link_rate = tmp_link_setting.link_rate;
4137 if (decide_edp_link_settings_with_dsc(link, link_setting, req_bw, max_link_rate))
4139 } else if (decide_edp_link_settings(link, link_setting, req_bw))
4141 } else if (decide_dp_link_settings(link, link_setting, req_bw))
4144 BREAK_TO_DEBUGGER();
4145 ASSERT(link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN);
4147 *link_setting = link->verified_link_cap;
4150 /*************************Short Pulse IRQ***************************/
4151 bool dc_link_dp_allow_hpd_rx_irq(const struct dc_link *link)
4154 * Don't handle RX IRQ unless one of following is met:
4155 * 1) The link is established (cur_link_settings != unknown)
4156 * 2) We know we're dealing with a branch device, SST or MST
4159 if ((link->cur_link_settings.lane_count != LANE_COUNT_UNKNOWN) ||
4160 is_dp_branch_device(link))
4166 static bool handle_hpd_irq_psr_sink(struct dc_link *link)
4168 union dpcd_psr_configuration psr_configuration;
4170 if (!link->psr_settings.psr_feature_enabled)
4173 dm_helpers_dp_read_dpcd(
4176 368,/*DpcdAddress_PSR_Enable_Cfg*/
4177 &psr_configuration.raw,
4178 sizeof(psr_configuration.raw));
4180 if (psr_configuration.bits.ENABLE) {
4181 unsigned char dpcdbuf[3] = {0};
4182 union psr_error_status psr_error_status;
4183 union psr_sink_psr_status psr_sink_psr_status;
4185 dm_helpers_dp_read_dpcd(
4188 0x2006, /*DpcdAddress_PSR_Error_Status*/
4189 (unsigned char *) dpcdbuf,
4192 /*DPCD 2006h ERROR STATUS*/
4193 psr_error_status.raw = dpcdbuf[0];
4194 /*DPCD 2008h SINK PANEL SELF REFRESH STATUS*/
4195 psr_sink_psr_status.raw = dpcdbuf[2];
4197 if (psr_error_status.bits.LINK_CRC_ERROR ||
4198 psr_error_status.bits.RFB_STORAGE_ERROR ||
4199 psr_error_status.bits.VSC_SDP_ERROR) {
4202 /* Acknowledge and clear error bits */
4203 dm_helpers_dp_write_dpcd(
4206 8198,/*DpcdAddress_PSR_Error_Status*/
4207 &psr_error_status.raw,
4208 sizeof(psr_error_status.raw));
4210 /* PSR error, disable and re-enable PSR */
4211 if (link->psr_settings.psr_allow_active) {
4212 allow_active = false;
4213 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
4214 allow_active = true;
4215 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
4219 } else if (psr_sink_psr_status.bits.SINK_SELF_REFRESH_STATUS ==
4220 PSR_SINK_STATE_ACTIVE_DISPLAY_FROM_SINK_RFB){
4221 /* No error is detect, PSR is active.
4222 * We should return with IRQ_HPD handled without
4223 * checking for loss of sync since PSR would have
4224 * powered down main link.
4232 static void dp_test_send_link_training(struct dc_link *link)
4234 struct dc_link_settings link_settings = {0};
4236 core_link_read_dpcd(
4239 (unsigned char *)(&link_settings.lane_count),
4241 core_link_read_dpcd(
4244 (unsigned char *)(&link_settings.link_rate),
4247 /* Set preferred link settings */
4248 link->verified_link_cap.lane_count = link_settings.lane_count;
4249 link->verified_link_cap.link_rate = link_settings.link_rate;
4251 dp_retrain_link_dp_test(link, &link_settings, false);
4254 /* TODO Raven hbr2 compliance eye output is unstable
4255 * (toggling on and off) with debugger break
4256 * This caueses intermittent PHY automation failure
4257 * Need to look into the root cause */
4258 static void dp_test_send_phy_test_pattern(struct dc_link *link)
4260 union phy_test_pattern dpcd_test_pattern;
4261 union lane_adjust dpcd_lane_adjustment[2];
4262 unsigned char dpcd_post_cursor_2_adjustment = 0;
4263 #if defined(CONFIG_DRM_AMD_DC_DCN)
4264 unsigned char test_pattern_buffer[
4265 (DP_TEST_264BIT_CUSTOM_PATTERN_263_256 -
4266 DP_TEST_264BIT_CUSTOM_PATTERN_7_0)+1] = {0};
4268 unsigned char test_pattern_buffer[
4269 (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
4270 DP_TEST_80BIT_CUSTOM_PATTERN_7_0)+1] = {0};
4272 unsigned int test_pattern_size = 0;
4273 enum dp_test_pattern test_pattern;
4274 union lane_adjust dpcd_lane_adjust;
4276 struct link_training_settings link_training_settings;
4278 dpcd_test_pattern.raw = 0;
4279 memset(dpcd_lane_adjustment, 0, sizeof(dpcd_lane_adjustment));
4280 memset(&link_training_settings, 0, sizeof(link_training_settings));
4282 /* get phy test pattern and pattern parameters from DP receiver */
4283 core_link_read_dpcd(
4285 DP_PHY_TEST_PATTERN,
4286 &dpcd_test_pattern.raw,
4287 sizeof(dpcd_test_pattern));
4288 core_link_read_dpcd(
4290 DP_ADJUST_REQUEST_LANE0_1,
4291 &dpcd_lane_adjustment[0].raw,
4292 sizeof(dpcd_lane_adjustment));
4294 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
4295 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
4296 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
4297 vendor_specific_lttpr_wa_three_dpcd(
4299 link_training_settings.dpcd_lane_settings);
4301 /*get post cursor 2 parameters
4302 * For DP 1.1a or eariler, this DPCD register's value is 0
4303 * For DP 1.2 or later:
4304 * Bits 1:0 = POST_CURSOR2_LANE0; Bits 3:2 = POST_CURSOR2_LANE1
4305 * Bits 5:4 = POST_CURSOR2_LANE2; Bits 7:6 = POST_CURSOR2_LANE3
4307 core_link_read_dpcd(
4309 DP_ADJUST_REQUEST_POST_CURSOR2,
4310 &dpcd_post_cursor_2_adjustment,
4311 sizeof(dpcd_post_cursor_2_adjustment));
4313 /* translate request */
4314 switch (dpcd_test_pattern.bits.PATTERN) {
4315 case PHY_TEST_PATTERN_D10_2:
4316 test_pattern = DP_TEST_PATTERN_D102;
4318 case PHY_TEST_PATTERN_SYMBOL_ERROR:
4319 test_pattern = DP_TEST_PATTERN_SYMBOL_ERROR;
4321 case PHY_TEST_PATTERN_PRBS7:
4322 test_pattern = DP_TEST_PATTERN_PRBS7;
4324 case PHY_TEST_PATTERN_80BIT_CUSTOM:
4325 test_pattern = DP_TEST_PATTERN_80BIT_CUSTOM;
4327 case PHY_TEST_PATTERN_CP2520_1:
4328 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
4329 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
4330 DP_TEST_PATTERN_TRAINING_PATTERN4 :
4331 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
4333 case PHY_TEST_PATTERN_CP2520_2:
4334 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
4335 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
4336 DP_TEST_PATTERN_TRAINING_PATTERN4 :
4337 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
4339 case PHY_TEST_PATTERN_CP2520_3:
4340 test_pattern = DP_TEST_PATTERN_TRAINING_PATTERN4;
4342 #if defined(CONFIG_DRM_AMD_DC_DCN)
4343 case PHY_TEST_PATTERN_128b_132b_TPS1:
4344 test_pattern = DP_TEST_PATTERN_128b_132b_TPS1;
4346 case PHY_TEST_PATTERN_128b_132b_TPS2:
4347 test_pattern = DP_TEST_PATTERN_128b_132b_TPS2;
4349 case PHY_TEST_PATTERN_PRBS9:
4350 test_pattern = DP_TEST_PATTERN_PRBS9;
4352 case PHY_TEST_PATTERN_PRBS11:
4353 test_pattern = DP_TEST_PATTERN_PRBS11;
4355 case PHY_TEST_PATTERN_PRBS15:
4356 test_pattern = DP_TEST_PATTERN_PRBS15;
4358 case PHY_TEST_PATTERN_PRBS23:
4359 test_pattern = DP_TEST_PATTERN_PRBS23;
4361 case PHY_TEST_PATTERN_PRBS31:
4362 test_pattern = DP_TEST_PATTERN_PRBS31;
4364 case PHY_TEST_PATTERN_264BIT_CUSTOM:
4365 test_pattern = DP_TEST_PATTERN_264BIT_CUSTOM;
4367 case PHY_TEST_PATTERN_SQUARE_PULSE:
4368 test_pattern = DP_TEST_PATTERN_SQUARE_PULSE;
4372 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
4376 if (test_pattern == DP_TEST_PATTERN_80BIT_CUSTOM) {
4377 test_pattern_size = (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
4378 DP_TEST_80BIT_CUSTOM_PATTERN_7_0) + 1;
4379 core_link_read_dpcd(
4381 DP_TEST_80BIT_CUSTOM_PATTERN_7_0,
4382 test_pattern_buffer,
4386 #if defined(CONFIG_DRM_AMD_DC_DCN)
4387 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE) {
4388 test_pattern_size = 1; // Square pattern data is 1 byte (DP spec)
4389 core_link_read_dpcd(
4391 DP_PHY_SQUARE_PATTERN,
4392 test_pattern_buffer,
4396 if (test_pattern == DP_TEST_PATTERN_264BIT_CUSTOM) {
4397 test_pattern_size = (DP_TEST_264BIT_CUSTOM_PATTERN_263_256-
4398 DP_TEST_264BIT_CUSTOM_PATTERN_7_0) + 1;
4399 core_link_read_dpcd(
4401 DP_TEST_264BIT_CUSTOM_PATTERN_7_0,
4402 test_pattern_buffer,
4407 /* prepare link training settings */
4408 link_training_settings.link_settings = link->cur_link_settings;
4410 for (lane = 0; lane <
4411 (unsigned int)(link->cur_link_settings.lane_count);
4413 dpcd_lane_adjust.raw =
4414 get_nibble_at_index(&dpcd_lane_adjustment[0].raw, lane);
4415 if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4416 DP_8b_10b_ENCODING) {
4417 link_training_settings.hw_lane_settings[lane].VOLTAGE_SWING =
4418 (enum dc_voltage_swing)
4419 (dpcd_lane_adjust.bits.VOLTAGE_SWING_LANE);
4420 link_training_settings.hw_lane_settings[lane].PRE_EMPHASIS =
4421 (enum dc_pre_emphasis)
4422 (dpcd_lane_adjust.bits.PRE_EMPHASIS_LANE);
4423 link_training_settings.hw_lane_settings[lane].POST_CURSOR2 =
4424 (enum dc_post_cursor2)
4425 ((dpcd_post_cursor_2_adjustment >> (lane * 2)) & 0x03);
4427 #if defined(CONFIG_DRM_AMD_DC_DCN)
4428 else if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4429 DP_128b_132b_ENCODING) {
4430 link_training_settings.hw_lane_settings[lane].FFE_PRESET.raw =
4431 dpcd_lane_adjust.tx_ffe.PRESET_VALUE;
4436 dp_hw_to_dpcd_lane_settings(&link_training_settings,
4437 link_training_settings.hw_lane_settings,
4438 link_training_settings.dpcd_lane_settings);
4439 /*Usage: Measure DP physical lane signal
4440 * by DP SI test equipment automatically.
4441 * PHY test pattern request is generated by equipment via HPD interrupt.
4442 * HPD needs to be active all the time. HPD should be active
4443 * all the time. Do not touch it.
4444 * forward request to DS
4446 dc_link_dp_set_test_pattern(
4449 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED,
4450 &link_training_settings,
4451 test_pattern_buffer,
4455 static void dp_test_send_link_test_pattern(struct dc_link *link)
4457 union link_test_pattern dpcd_test_pattern;
4458 union test_misc dpcd_test_params;
4459 enum dp_test_pattern test_pattern;
4460 enum dp_test_pattern_color_space test_pattern_color_space =
4461 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED;
4462 enum dc_color_depth requestColorDepth = COLOR_DEPTH_UNDEFINED;
4463 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4464 struct pipe_ctx *pipe_ctx = NULL;
4467 memset(&dpcd_test_pattern, 0, sizeof(dpcd_test_pattern));
4468 memset(&dpcd_test_params, 0, sizeof(dpcd_test_params));
4470 for (i = 0; i < MAX_PIPES; i++) {
4471 if (pipes[i].stream == NULL)
4474 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
4475 pipe_ctx = &pipes[i];
4480 if (pipe_ctx == NULL)
4483 /* get link test pattern and pattern parameters */
4484 core_link_read_dpcd(
4487 &dpcd_test_pattern.raw,
4488 sizeof(dpcd_test_pattern));
4489 core_link_read_dpcd(
4492 &dpcd_test_params.raw,
4493 sizeof(dpcd_test_params));
4495 switch (dpcd_test_pattern.bits.PATTERN) {
4496 case LINK_TEST_PATTERN_COLOR_RAMP:
4497 test_pattern = DP_TEST_PATTERN_COLOR_RAMP;
4499 case LINK_TEST_PATTERN_VERTICAL_BARS:
4500 test_pattern = DP_TEST_PATTERN_VERTICAL_BARS;
4501 break; /* black and white */
4502 case LINK_TEST_PATTERN_COLOR_SQUARES:
4503 test_pattern = (dpcd_test_params.bits.DYN_RANGE ==
4504 TEST_DYN_RANGE_VESA ?
4505 DP_TEST_PATTERN_COLOR_SQUARES :
4506 DP_TEST_PATTERN_COLOR_SQUARES_CEA);
4509 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
4513 if (dpcd_test_params.bits.CLR_FORMAT == 0)
4514 test_pattern_color_space = DP_TEST_PATTERN_COLOR_SPACE_RGB;
4516 test_pattern_color_space = dpcd_test_params.bits.YCBCR_COEFS ?
4517 DP_TEST_PATTERN_COLOR_SPACE_YCBCR709 :
4518 DP_TEST_PATTERN_COLOR_SPACE_YCBCR601;
4520 switch (dpcd_test_params.bits.BPC) {
4522 requestColorDepth = COLOR_DEPTH_666;
4525 requestColorDepth = COLOR_DEPTH_888;
4528 requestColorDepth = COLOR_DEPTH_101010;
4531 requestColorDepth = COLOR_DEPTH_121212;
4537 switch (dpcd_test_params.bits.CLR_FORMAT) {
4539 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4542 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR422;
4545 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR444;
4548 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4553 if (requestColorDepth != COLOR_DEPTH_UNDEFINED
4554 && pipe_ctx->stream->timing.display_color_depth != requestColorDepth) {
4555 DC_LOG_DEBUG("%s: original bpc %d, changing to %d\n",
4557 pipe_ctx->stream->timing.display_color_depth,
4559 pipe_ctx->stream->timing.display_color_depth = requestColorDepth;
4562 dp_update_dsc_config(pipe_ctx);
4564 dc_link_dp_set_test_pattern(
4567 test_pattern_color_space,
4573 static void dp_test_get_audio_test_data(struct dc_link *link, bool disable_video)
4575 union audio_test_mode dpcd_test_mode = {0};
4576 struct audio_test_pattern_type dpcd_pattern_type = {0};
4577 union audio_test_pattern_period dpcd_pattern_period[AUDIO_CHANNELS_COUNT] = {0};
4578 enum dp_test_pattern test_pattern = DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4580 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4581 struct pipe_ctx *pipe_ctx = &pipes[0];
4582 unsigned int channel_count;
4583 unsigned int channel = 0;
4584 unsigned int modes = 0;
4585 unsigned int sampling_rate_in_hz = 0;
4587 // get audio test mode and test pattern parameters
4588 core_link_read_dpcd(
4591 &dpcd_test_mode.raw,
4592 sizeof(dpcd_test_mode));
4594 core_link_read_dpcd(
4596 DP_TEST_AUDIO_PATTERN_TYPE,
4597 &dpcd_pattern_type.value,
4598 sizeof(dpcd_pattern_type));
4600 channel_count = dpcd_test_mode.bits.channel_count + 1;
4602 // read pattern periods for requested channels when sawTooth pattern is requested
4603 if (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH ||
4604 dpcd_pattern_type.value == AUDIO_TEST_PATTERN_OPERATOR_DEFINED) {
4606 test_pattern = (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH) ?
4607 DP_TEST_PATTERN_AUDIO_SAWTOOTH : DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4608 // read period for each channel
4609 for (channel = 0; channel < channel_count; channel++) {
4610 core_link_read_dpcd(
4612 DP_TEST_AUDIO_PERIOD_CH1 + channel,
4613 &dpcd_pattern_period[channel].raw,
4614 sizeof(dpcd_pattern_period[channel]));
4618 // translate sampling rate
4619 switch (dpcd_test_mode.bits.sampling_rate) {
4620 case AUDIO_SAMPLING_RATE_32KHZ:
4621 sampling_rate_in_hz = 32000;
4623 case AUDIO_SAMPLING_RATE_44_1KHZ:
4624 sampling_rate_in_hz = 44100;
4626 case AUDIO_SAMPLING_RATE_48KHZ:
4627 sampling_rate_in_hz = 48000;
4629 case AUDIO_SAMPLING_RATE_88_2KHZ:
4630 sampling_rate_in_hz = 88200;
4632 case AUDIO_SAMPLING_RATE_96KHZ:
4633 sampling_rate_in_hz = 96000;
4635 case AUDIO_SAMPLING_RATE_176_4KHZ:
4636 sampling_rate_in_hz = 176400;
4638 case AUDIO_SAMPLING_RATE_192KHZ:
4639 sampling_rate_in_hz = 192000;
4642 sampling_rate_in_hz = 0;
4646 link->audio_test_data.flags.test_requested = 1;
4647 link->audio_test_data.flags.disable_video = disable_video;
4648 link->audio_test_data.sampling_rate = sampling_rate_in_hz;
4649 link->audio_test_data.channel_count = channel_count;
4650 link->audio_test_data.pattern_type = test_pattern;
4652 if (test_pattern == DP_TEST_PATTERN_AUDIO_SAWTOOTH) {
4653 for (modes = 0; modes < pipe_ctx->stream->audio_info.mode_count; modes++) {
4654 link->audio_test_data.pattern_period[modes] = dpcd_pattern_period[modes].bits.pattern_period;
4659 void dc_link_dp_handle_automated_test(struct dc_link *link)
4661 union test_request test_request;
4662 union test_response test_response;
4664 memset(&test_request, 0, sizeof(test_request));
4665 memset(&test_response, 0, sizeof(test_response));
4667 core_link_read_dpcd(
4671 sizeof(union test_request));
4672 if (test_request.bits.LINK_TRAINING) {
4673 /* ACK first to let DP RX test box monitor LT sequence */
4674 test_response.bits.ACK = 1;
4675 core_link_write_dpcd(
4679 sizeof(test_response));
4680 dp_test_send_link_training(link);
4681 /* no acknowledge request is needed again */
4682 test_response.bits.ACK = 0;
4684 if (test_request.bits.LINK_TEST_PATTRN) {
4685 dp_test_send_link_test_pattern(link);
4686 test_response.bits.ACK = 1;
4689 if (test_request.bits.AUDIO_TEST_PATTERN) {
4690 dp_test_get_audio_test_data(link, test_request.bits.TEST_AUDIO_DISABLED_VIDEO);
4691 test_response.bits.ACK = 1;
4694 if (test_request.bits.PHY_TEST_PATTERN) {
4695 dp_test_send_phy_test_pattern(link);
4696 test_response.bits.ACK = 1;
4699 /* send request acknowledgment */
4700 if (test_response.bits.ACK)
4701 core_link_write_dpcd(
4705 sizeof(test_response));
4708 void dc_link_dp_handle_link_loss(struct dc_link *link)
4711 struct pipe_ctx *pipe_ctx;
4713 for (i = 0; i < MAX_PIPES; i++) {
4714 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4715 if (pipe_ctx && pipe_ctx->stream && pipe_ctx->stream->link == link)
4719 if (pipe_ctx == NULL || pipe_ctx->stream == NULL)
4722 for (i = 0; i < MAX_PIPES; i++) {
4723 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4724 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4725 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4726 core_link_disable_stream(pipe_ctx);
4730 for (i = 0; i < MAX_PIPES; i++) {
4731 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4732 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4733 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4734 core_link_enable_stream(link->dc->current_state, pipe_ctx);
4739 bool dc_link_handle_hpd_rx_irq(struct dc_link *link, union hpd_irq_data *out_hpd_irq_dpcd_data, bool *out_link_loss,
4740 bool defer_handling, bool *has_left_work)
4742 union hpd_irq_data hpd_irq_dpcd_data = {0};
4743 union device_service_irq device_service_clear = {0};
4744 enum dc_status result;
4745 bool status = false;
4748 *out_link_loss = false;
4751 *has_left_work = false;
4752 /* For use cases related to down stream connection status change,
4753 * PSR and device auto test, refer to function handle_sst_hpd_irq
4756 DC_LOG_HW_HPD_IRQ("%s: Got short pulse HPD on link %d\n",
4757 __func__, link->link_index);
4760 /* All the "handle_hpd_irq_xxx()" methods
4761 * should be called only after
4762 * dal_dpsst_ls_read_hpd_irq_data
4763 * Order of calls is important too
4765 result = read_hpd_rx_irq_data(link, &hpd_irq_dpcd_data);
4766 if (out_hpd_irq_dpcd_data)
4767 *out_hpd_irq_dpcd_data = hpd_irq_dpcd_data;
4769 if (result != DC_OK) {
4770 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain irq data\n",
4775 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.AUTOMATED_TEST) {
4776 device_service_clear.bits.AUTOMATED_TEST = 1;
4777 core_link_write_dpcd(
4779 DP_DEVICE_SERVICE_IRQ_VECTOR,
4780 &device_service_clear.raw,
4781 sizeof(device_service_clear.raw));
4782 device_service_clear.raw = 0;
4783 if (defer_handling && has_left_work)
4784 *has_left_work = true;
4786 dc_link_dp_handle_automated_test(link);
4790 if (!dc_link_dp_allow_hpd_rx_irq(link)) {
4791 DC_LOG_HW_HPD_IRQ("%s: skipping HPD handling on %d\n",
4792 __func__, link->link_index);
4796 if (handle_hpd_irq_psr_sink(link))
4797 /* PSR-related error was detected and handled */
4800 /* If PSR-related error handled, Main link may be off,
4801 * so do not handle as a normal sink status change interrupt.
4804 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.UP_REQ_MSG_RDY) {
4805 if (defer_handling && has_left_work)
4806 *has_left_work = true;
4810 /* check if we have MST msg and return since we poll for it */
4811 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.DOWN_REP_MSG_RDY) {
4812 if (defer_handling && has_left_work)
4813 *has_left_work = true;
4817 /* For now we only handle 'Downstream port status' case.
4818 * If we got sink count changed it means
4819 * Downstream port status changed,
4820 * then DM should call DC to do the detection.
4821 * NOTE: Do not handle link loss on eDP since it is internal link*/
4822 if ((link->connector_signal != SIGNAL_TYPE_EDP) &&
4823 hpd_rx_irq_check_link_loss_status(
4825 &hpd_irq_dpcd_data)) {
4826 /* Connectivity log: link loss */
4827 CONN_DATA_LINK_LOSS(link,
4828 hpd_irq_dpcd_data.raw,
4829 sizeof(hpd_irq_dpcd_data),
4832 if (defer_handling && has_left_work)
4833 *has_left_work = true;
4835 dc_link_dp_handle_link_loss(link);
4839 *out_link_loss = true;
4842 if (link->type == dc_connection_sst_branch &&
4843 hpd_irq_dpcd_data.bytes.sink_cnt.bits.SINK_COUNT
4844 != link->dpcd_sink_count)
4847 /* reasons for HPD RX:
4848 * 1. Link Loss - ie Re-train the Link
4849 * 2. MST sideband message
4850 * 3. Automated Test - ie. Internal Commit
4851 * 4. CP (copy protection) - (not interesting for DM???)
4853 * 6. Downstream Port status changed
4854 * -ie. Detect - this the only one
4855 * which is interesting for DM because
4856 * it must call dc_link_detect.
4861 /*query dpcd for version and mst cap addresses*/
4862 bool is_mst_supported(struct dc_link *link)
4865 enum dc_status st = DC_OK;
4869 if (link->preferred_training_settings.mst_enable &&
4870 *link->preferred_training_settings.mst_enable == false) {
4877 st = core_link_read_dpcd(link, DP_DPCD_REV, &rev.raw,
4880 if (st == DC_OK && rev.raw >= DPCD_REV_12) {
4882 st = core_link_read_dpcd(link, DP_MSTM_CAP,
4883 &cap.raw, sizeof(cap));
4884 if (st == DC_OK && cap.bits.MST_CAP == 1)
4891 bool is_dp_active_dongle(const struct dc_link *link)
4893 return (link->dpcd_caps.dongle_type >= DISPLAY_DONGLE_DP_VGA_CONVERTER) &&
4894 (link->dpcd_caps.dongle_type <= DISPLAY_DONGLE_DP_HDMI_CONVERTER);
4897 bool is_dp_branch_device(const struct dc_link *link)
4899 return link->dpcd_caps.is_branch_dev;
4902 static int translate_dpcd_max_bpc(enum dpcd_downstream_port_max_bpc bpc)
4905 case DOWN_STREAM_MAX_8BPC:
4907 case DOWN_STREAM_MAX_10BPC:
4909 case DOWN_STREAM_MAX_12BPC:
4911 case DOWN_STREAM_MAX_16BPC:
4920 #if defined(CONFIG_DRM_AMD_DC_DCN)
4921 uint32_t dc_link_bw_kbps_from_raw_frl_link_rate_data(uint8_t bw)
4942 * Return PCON's post FRL link training supported BW if its non-zero, otherwise return max_supported_frl_bw.
4944 static uint32_t intersect_frl_link_bw_support(
4945 const uint32_t max_supported_frl_bw_in_kbps,
4946 const union hdmi_encoded_link_bw hdmi_encoded_link_bw)
4948 uint32_t supported_bw_in_kbps = max_supported_frl_bw_in_kbps;
4950 // HDMI_ENCODED_LINK_BW bits are only valid if HDMI Link Configuration bit is 1 (FRL mode)
4951 if (hdmi_encoded_link_bw.bits.FRL_MODE) {
4952 if (hdmi_encoded_link_bw.bits.BW_48Gbps)
4953 supported_bw_in_kbps = 48000000;
4954 else if (hdmi_encoded_link_bw.bits.BW_40Gbps)
4955 supported_bw_in_kbps = 40000000;
4956 else if (hdmi_encoded_link_bw.bits.BW_32Gbps)
4957 supported_bw_in_kbps = 32000000;
4958 else if (hdmi_encoded_link_bw.bits.BW_24Gbps)
4959 supported_bw_in_kbps = 24000000;
4960 else if (hdmi_encoded_link_bw.bits.BW_18Gbps)
4961 supported_bw_in_kbps = 18000000;
4962 else if (hdmi_encoded_link_bw.bits.BW_9Gbps)
4963 supported_bw_in_kbps = 9000000;
4966 return supported_bw_in_kbps;
4970 static void read_dp_device_vendor_id(struct dc_link *link)
4972 struct dp_device_vendor_id dp_id;
4974 /* read IEEE branch device id */
4975 core_link_read_dpcd(
4981 link->dpcd_caps.branch_dev_id =
4982 (dp_id.ieee_oui[0] << 16) +
4983 (dp_id.ieee_oui[1] << 8) +
4987 link->dpcd_caps.branch_dev_name,
4988 dp_id.ieee_device_id,
4989 sizeof(dp_id.ieee_device_id));
4994 static void get_active_converter_info(
4995 uint8_t data, struct dc_link *link)
4997 union dp_downstream_port_present ds_port = { .byte = data };
4998 memset(&link->dpcd_caps.dongle_caps, 0, sizeof(link->dpcd_caps.dongle_caps));
5000 /* decode converter info*/
5001 if (!ds_port.fields.PORT_PRESENT) {
5002 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
5003 ddc_service_set_dongle_type(link->ddc,
5004 link->dpcd_caps.dongle_type);
5005 link->dpcd_caps.is_branch_dev = false;
5009 /* DPCD 0x5 bit 0 = 1, it indicate it's branch device */
5010 link->dpcd_caps.is_branch_dev = ds_port.fields.PORT_PRESENT;
5012 switch (ds_port.fields.PORT_TYPE) {
5013 case DOWNSTREAM_VGA:
5014 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_VGA_CONVERTER;
5016 case DOWNSTREAM_DVI_HDMI_DP_PLUS_PLUS:
5017 /* At this point we don't know is it DVI or HDMI or DP++,
5019 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_DVI_CONVERTER;
5022 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
5026 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_11) {
5027 uint8_t det_caps[16]; /* CTS 4.2.2.7 expects source to read Detailed Capabilities Info : 00080h-0008F.*/
5028 union dwnstream_port_caps_byte0 *port_caps =
5029 (union dwnstream_port_caps_byte0 *)det_caps;
5030 if (core_link_read_dpcd(link, DP_DOWNSTREAM_PORT_0,
5031 det_caps, sizeof(det_caps)) == DC_OK) {
5033 switch (port_caps->bits.DWN_STRM_PORTX_TYPE) {
5034 /*Handle DP case as DONGLE_NONE*/
5035 case DOWN_STREAM_DETAILED_DP:
5036 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
5038 case DOWN_STREAM_DETAILED_VGA:
5039 link->dpcd_caps.dongle_type =
5040 DISPLAY_DONGLE_DP_VGA_CONVERTER;
5042 case DOWN_STREAM_DETAILED_DVI:
5043 link->dpcd_caps.dongle_type =
5044 DISPLAY_DONGLE_DP_DVI_CONVERTER;
5046 case DOWN_STREAM_DETAILED_HDMI:
5047 case DOWN_STREAM_DETAILED_DP_PLUS_PLUS:
5048 /*Handle DP++ active converter case, process DP++ case as HDMI case according DP1.4 spec*/
5049 link->dpcd_caps.dongle_type =
5050 DISPLAY_DONGLE_DP_HDMI_CONVERTER;
5052 link->dpcd_caps.dongle_caps.dongle_type = link->dpcd_caps.dongle_type;
5053 if (ds_port.fields.DETAILED_CAPS) {
5055 union dwnstream_port_caps_byte3_hdmi
5056 hdmi_caps = {.raw = det_caps[3] };
5057 union dwnstream_port_caps_byte2
5058 hdmi_color_caps = {.raw = det_caps[2] };
5059 link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz =
5062 link->dpcd_caps.dongle_caps.is_dp_hdmi_s3d_converter =
5063 hdmi_caps.bits.FRAME_SEQ_TO_FRAME_PACK;
5064 /*YCBCR capability only for HDMI case*/
5065 if (port_caps->bits.DWN_STRM_PORTX_TYPE
5066 == DOWN_STREAM_DETAILED_HDMI) {
5067 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_pass_through =
5068 hdmi_caps.bits.YCrCr422_PASS_THROUGH;
5069 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_pass_through =
5070 hdmi_caps.bits.YCrCr420_PASS_THROUGH;
5071 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_converter =
5072 hdmi_caps.bits.YCrCr422_CONVERSION;
5073 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_converter =
5074 hdmi_caps.bits.YCrCr420_CONVERSION;
5077 link->dpcd_caps.dongle_caps.dp_hdmi_max_bpc =
5078 translate_dpcd_max_bpc(
5079 hdmi_color_caps.bits.MAX_BITS_PER_COLOR_COMPONENT);
5081 #if defined(CONFIG_DRM_AMD_DC_DCN)
5082 if (link->dc->caps.hdmi_frl_pcon_support) {
5083 union hdmi_encoded_link_bw hdmi_encoded_link_bw;
5085 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps =
5086 dc_link_bw_kbps_from_raw_frl_link_rate_data(
5087 hdmi_color_caps.bits.MAX_ENCODED_LINK_BW_SUPPORT);
5089 // Intersect reported max link bw support with the supported link rate post FRL link training
5090 if (core_link_read_dpcd(link, DP_PCON_HDMI_POST_FRL_STATUS,
5091 &hdmi_encoded_link_bw.raw, sizeof(hdmi_encoded_link_bw)) == DC_OK) {
5092 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps = intersect_frl_link_bw_support(
5093 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps,
5094 hdmi_encoded_link_bw);
5097 if (link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps > 0)
5098 link->dpcd_caps.dongle_caps.extendedCapValid = true;
5102 if (link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz != 0)
5103 link->dpcd_caps.dongle_caps.extendedCapValid = true;
5111 ddc_service_set_dongle_type(link->ddc, link->dpcd_caps.dongle_type);
5114 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
5116 core_link_read_dpcd(
5118 DP_BRANCH_REVISION_START,
5119 (uint8_t *)&dp_hw_fw_revision,
5120 sizeof(dp_hw_fw_revision));
5122 link->dpcd_caps.branch_hw_revision =
5123 dp_hw_fw_revision.ieee_hw_rev;
5126 link->dpcd_caps.branch_fw_revision,
5127 dp_hw_fw_revision.ieee_fw_rev,
5128 sizeof(dp_hw_fw_revision.ieee_fw_rev));
5130 #if defined(CONFIG_DRM_AMD_DC_DCN)
5131 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14 &&
5132 link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
5133 union dp_dfp_cap_ext dfp_cap_ext;
5134 memset(&dfp_cap_ext, '\0', sizeof (dfp_cap_ext));
5135 core_link_read_dpcd(
5137 DP_DFP_CAPABILITY_EXTENSION_SUPPORT,
5139 sizeof(dfp_cap_ext.raw));
5140 link->dpcd_caps.dongle_caps.dfp_cap_ext.supported = dfp_cap_ext.fields.supported;
5141 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps =
5142 dfp_cap_ext.fields.max_pixel_rate_in_mps[0] +
5143 (dfp_cap_ext.fields.max_pixel_rate_in_mps[1] << 8);
5144 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width =
5145 dfp_cap_ext.fields.max_video_h_active_width[0] +
5146 (dfp_cap_ext.fields.max_video_h_active_width[1] << 8);
5147 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height =
5148 dfp_cap_ext.fields.max_video_v_active_height[0] +
5149 (dfp_cap_ext.fields.max_video_v_active_height[1] << 8);
5150 link->dpcd_caps.dongle_caps.dfp_cap_ext.encoding_format_caps =
5151 dfp_cap_ext.fields.encoding_format_caps;
5152 link->dpcd_caps.dongle_caps.dfp_cap_ext.rgb_color_depth_caps =
5153 dfp_cap_ext.fields.rgb_color_depth_caps;
5154 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr444_color_depth_caps =
5155 dfp_cap_ext.fields.ycbcr444_color_depth_caps;
5156 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr422_color_depth_caps =
5157 dfp_cap_ext.fields.ycbcr422_color_depth_caps;
5158 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr420_color_depth_caps =
5159 dfp_cap_ext.fields.ycbcr420_color_depth_caps;
5160 DC_LOG_DP2("DFP capability extension is read at link %d", link->link_index);
5161 DC_LOG_DP2("\tdfp_cap_ext.supported = %s", link->dpcd_caps.dongle_caps.dfp_cap_ext.supported ? "true" : "false");
5162 DC_LOG_DP2("\tdfp_cap_ext.max_pixel_rate_in_mps = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps);
5163 DC_LOG_DP2("\tdfp_cap_ext.max_video_h_active_width = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width);
5164 DC_LOG_DP2("\tdfp_cap_ext.max_video_v_active_height = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height);
5169 static void dp_wa_power_up_0010FA(struct dc_link *link, uint8_t *dpcd_data,
5174 if (!link->dpcd_caps.dpcd_rev.raw) {
5176 dp_receiver_power_ctrl(link, true);
5177 core_link_read_dpcd(link, DP_DPCD_REV,
5179 link->dpcd_caps.dpcd_rev.raw = dpcd_data[
5182 } while (retry++ < 4 && !link->dpcd_caps.dpcd_rev.raw);
5185 if (link->dpcd_caps.dongle_type == DISPLAY_DONGLE_DP_VGA_CONVERTER) {
5186 switch (link->dpcd_caps.branch_dev_id) {
5187 /* 0010FA active dongles (DP-VGA, DP-DLDVI converters) power down
5188 * all internal circuits including AUX communication preventing
5189 * reading DPCD table and EDID (spec violation).
5190 * Encoder will skip DP RX power down on disable_output to
5191 * keep receiver powered all the time.*/
5192 case DP_BRANCH_DEVICE_ID_0010FA:
5193 case DP_BRANCH_DEVICE_ID_0080E1:
5194 case DP_BRANCH_DEVICE_ID_00E04C:
5195 link->wa_flags.dp_keep_receiver_powered = true;
5198 /* TODO: May need work around for other dongles. */
5200 link->wa_flags.dp_keep_receiver_powered = false;
5204 link->wa_flags.dp_keep_receiver_powered = false;
5207 /* Read additional sink caps defined in source specific DPCD area
5208 * This function currently only reads from SinkCapability address (DP_SOURCE_SINK_CAP)
5210 static bool dpcd_read_sink_ext_caps(struct dc_link *link)
5217 if (core_link_read_dpcd(link, DP_SOURCE_SINK_CAP, &dpcd_data, 1) != DC_OK)
5220 link->dpcd_sink_ext_caps.raw = dpcd_data;
5224 bool dp_retrieve_lttpr_cap(struct dc_link *link)
5226 #if defined(CONFIG_DRM_AMD_DC_DCN)
5227 uint8_t lttpr_dpcd_data[8];
5228 bool allow_lttpr_non_transparent_mode = 0;
5230 uint8_t lttpr_dpcd_data[6];
5232 bool vbios_lttpr_enable = link->dc->caps.vbios_lttpr_enable;
5233 bool vbios_lttpr_interop = link->dc->caps.vbios_lttpr_aware;
5234 enum dc_status status = DC_ERROR_UNEXPECTED;
5235 bool is_lttpr_present = false;
5237 memset(lttpr_dpcd_data, '\0', sizeof(lttpr_dpcd_data));
5239 #if defined(CONFIG_DRM_AMD_DC_DCN)
5240 if ((link->dc->config.allow_lttpr_non_transparent_mode.bits.DP2_0 &&
5241 link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED)) {
5242 allow_lttpr_non_transparent_mode = 1;
5243 } else if (link->dc->config.allow_lttpr_non_transparent_mode.bits.DP1_4A &&
5244 !link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
5245 allow_lttpr_non_transparent_mode = 1;
5250 * Logic to determine LTTPR mode
5252 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5253 if (vbios_lttpr_enable && vbios_lttpr_interop)
5254 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
5255 else if (!vbios_lttpr_enable && vbios_lttpr_interop) {
5256 #if defined(CONFIG_DRM_AMD_DC_DCN)
5257 if (allow_lttpr_non_transparent_mode)
5259 if (link->dc->config.allow_lttpr_non_transparent_mode)
5261 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
5263 link->lttpr_mode = LTTPR_MODE_TRANSPARENT;
5264 } else if (!vbios_lttpr_enable && !vbios_lttpr_interop) {
5265 #if defined(CONFIG_DRM_AMD_DC_DCN)
5266 if (!allow_lttpr_non_transparent_mode || !link->dc->caps.extended_aux_timeout_support)
5268 if (!link->dc->config.allow_lttpr_non_transparent_mode
5269 || !link->dc->caps.extended_aux_timeout_support)
5271 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5273 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
5275 #if defined(CONFIG_DRM_AMD_DC_DCN)
5276 /* Check DP tunnel LTTPR mode debug option. */
5277 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA &&
5278 link->dc->debug.dpia_debug.bits.force_non_lttpr)
5279 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5282 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT || link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
5283 /* By reading LTTPR capability, RX assumes that we will enable
5284 * LTTPR extended aux timeout if LTTPR is present.
5286 status = core_link_read_dpcd(
5288 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV,
5290 sizeof(lttpr_dpcd_data));
5291 if (status != DC_OK) {
5292 DC_LOG_DP2("%s: Read LTTPR caps data failed.\n", __func__);
5296 link->dpcd_caps.lttpr_caps.revision.raw =
5297 lttpr_dpcd_data[DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV -
5298 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5300 link->dpcd_caps.lttpr_caps.max_link_rate =
5301 lttpr_dpcd_data[DP_MAX_LINK_RATE_PHY_REPEATER -
5302 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5304 link->dpcd_caps.lttpr_caps.phy_repeater_cnt =
5305 lttpr_dpcd_data[DP_PHY_REPEATER_CNT -
5306 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5308 link->dpcd_caps.lttpr_caps.max_lane_count =
5309 lttpr_dpcd_data[DP_MAX_LANE_COUNT_PHY_REPEATER -
5310 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5312 link->dpcd_caps.lttpr_caps.mode =
5313 lttpr_dpcd_data[DP_PHY_REPEATER_MODE -
5314 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5316 link->dpcd_caps.lttpr_caps.max_ext_timeout =
5317 lttpr_dpcd_data[DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT -
5318 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5320 #if defined(CONFIG_DRM_AMD_DC_DCN)
5321 link->dpcd_caps.lttpr_caps.main_link_channel_coding.raw =
5322 lttpr_dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER -
5323 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5325 link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.raw =
5326 lttpr_dpcd_data[DP_PHY_REPEATER_128b_132b_RATES -
5327 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5330 /* Attempt to train in LTTPR transparent mode if repeater count exceeds 8. */
5331 is_lttpr_present = (dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) != 0 &&
5332 link->dpcd_caps.lttpr_caps.phy_repeater_cnt < 0xff &&
5333 link->dpcd_caps.lttpr_caps.max_lane_count > 0 &&
5334 link->dpcd_caps.lttpr_caps.max_lane_count <= 4 &&
5335 link->dpcd_caps.lttpr_caps.revision.raw >= 0x14);
5336 if (is_lttpr_present) {
5337 CONN_DATA_DETECT(link, lttpr_dpcd_data, sizeof(lttpr_dpcd_data), "LTTPR Caps: ");
5338 configure_lttpr_mode_transparent(link);
5340 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5342 return is_lttpr_present;
5345 static bool retrieve_link_cap(struct dc_link *link)
5347 /* DP_ADAPTER_CAP - DP_DPCD_REV + 1 == 16 and also DP_DSC_BITS_PER_PIXEL_INC - DP_DSC_SUPPORT + 1 == 16,
5348 * which means size 16 will be good for both of those DPCD register block reads
5350 uint8_t dpcd_data[16];
5351 /*Only need to read 1 byte starting from DP_DPRX_FEATURE_ENUMERATION_LIST.
5353 uint8_t dpcd_dprx_data = '\0';
5354 uint8_t dpcd_power_state = '\0';
5356 struct dp_device_vendor_id sink_id;
5357 union down_stream_port_count down_strm_port_count;
5358 union edp_configuration_cap edp_config_cap;
5359 union dp_downstream_port_present ds_port = { 0 };
5360 enum dc_status status = DC_ERROR_UNEXPECTED;
5361 uint32_t read_dpcd_retry_cnt = 3;
5363 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
5364 const uint32_t post_oui_delay = 30; // 30ms
5365 bool is_lttpr_present = false;
5367 memset(dpcd_data, '\0', sizeof(dpcd_data));
5368 memset(&down_strm_port_count,
5369 '\0', sizeof(union down_stream_port_count));
5370 memset(&edp_config_cap, '\0',
5371 sizeof(union edp_configuration_cap));
5373 /* if extended timeout is supported in hardware,
5374 * default to LTTPR timeout (3.2ms) first as a W/A for DP link layer
5375 * CTS 4.2.1.1 regression introduced by CTS specs requirement update.
5377 dc_link_aux_try_to_configure_timeout(link->ddc,
5378 LINK_AUX_DEFAULT_LTTPR_TIMEOUT_PERIOD);
5380 is_lttpr_present = dp_retrieve_lttpr_cap(link);
5381 /* Read DP tunneling information. */
5382 status = dpcd_get_tunneling_device_data(link);
5384 status = core_link_read_dpcd(link, DP_SET_POWER,
5385 &dpcd_power_state, sizeof(dpcd_power_state));
5387 /* Delay 1 ms if AUX CH is in power down state. Based on spec
5388 * section 2.3.1.2, if AUX CH may be powered down due to
5389 * write to DPCD 600h = 2. Sink AUX CH is monitoring differential
5390 * signal and may need up to 1 ms before being able to reply.
5392 if (status != DC_OK || dpcd_power_state == DP_SET_POWER_D3)
5395 dpcd_set_source_specific_data(link);
5396 /* Sink may need to configure internals based on vendor, so allow some
5397 * time before proceeding with possibly vendor specific transactions
5399 msleep(post_oui_delay);
5401 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5402 status = core_link_read_dpcd(
5407 if (status == DC_OK)
5411 if (status != DC_OK) {
5412 dm_error("%s: Read receiver caps dpcd data failed.\n", __func__);
5416 if (!is_lttpr_present)
5417 dc_link_aux_try_to_configure_timeout(link->ddc, LINK_AUX_DEFAULT_TIMEOUT_PERIOD);
5420 union training_aux_rd_interval aux_rd_interval;
5422 aux_rd_interval.raw =
5423 dpcd_data[DP_TRAINING_AUX_RD_INTERVAL];
5425 link->dpcd_caps.ext_receiver_cap_field_present =
5426 aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1;
5428 if (aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1) {
5429 uint8_t ext_cap_data[16];
5431 memset(ext_cap_data, '\0', sizeof(ext_cap_data));
5432 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5433 status = core_link_read_dpcd(
5437 sizeof(ext_cap_data));
5438 if (status == DC_OK) {
5439 memcpy(dpcd_data, ext_cap_data, sizeof(dpcd_data));
5443 if (status != DC_OK)
5444 dm_error("%s: Read extend caps data failed, use cap from dpcd 0.\n", __func__);
5448 link->dpcd_caps.dpcd_rev.raw =
5449 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5451 if (link->dpcd_caps.ext_receiver_cap_field_present) {
5452 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5453 status = core_link_read_dpcd(
5455 DP_DPRX_FEATURE_ENUMERATION_LIST,
5457 sizeof(dpcd_dprx_data));
5458 if (status == DC_OK)
5462 link->dpcd_caps.dprx_feature.raw = dpcd_dprx_data;
5464 if (status != DC_OK)
5465 dm_error("%s: Read DPRX caps data failed.\n", __func__);
5469 link->dpcd_caps.dprx_feature.raw = 0;
5473 /* Error condition checking...
5474 * It is impossible for Sink to report Max Lane Count = 0.
5475 * It is possible for Sink to report Max Link Rate = 0, if it is
5476 * an eDP device that is reporting specialized link rates in the
5477 * SUPPORTED_LINK_RATE table.
5479 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5482 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5485 read_dp_device_vendor_id(link);
5487 get_active_converter_info(ds_port.byte, link);
5489 dp_wa_power_up_0010FA(link, dpcd_data, sizeof(dpcd_data));
5491 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5494 link->dpcd_caps.allow_invalid_MSA_timing_param =
5495 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5497 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5498 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5500 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5501 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5503 link->reported_link_cap.lane_count =
5504 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5505 link->reported_link_cap.link_rate = dpcd_data[
5506 DP_MAX_LINK_RATE - DP_DPCD_REV];
5507 link->reported_link_cap.link_spread =
5508 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5509 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5511 edp_config_cap.raw = dpcd_data[
5512 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5513 link->dpcd_caps.panel_mode_edp =
5514 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5515 link->dpcd_caps.dpcd_display_control_capable =
5516 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5518 link->test_pattern_enabled = false;
5519 link->compliance_test_state.raw = 0;
5521 /* read sink count */
5522 core_link_read_dpcd(link,
5524 &link->dpcd_caps.sink_count.raw,
5525 sizeof(link->dpcd_caps.sink_count.raw));
5527 /* read sink ieee oui */
5528 core_link_read_dpcd(link,
5530 (uint8_t *)(&sink_id),
5533 link->dpcd_caps.sink_dev_id =
5534 (sink_id.ieee_oui[0] << 16) +
5535 (sink_id.ieee_oui[1] << 8) +
5536 (sink_id.ieee_oui[2]);
5539 link->dpcd_caps.sink_dev_id_str,
5540 sink_id.ieee_device_id,
5541 sizeof(sink_id.ieee_device_id));
5543 /* Quirk Apple MBP 2017 15" Retina panel: Wrong DP_MAX_LINK_RATE */
5545 uint8_t str_mbp_2017[] = { 101, 68, 21, 101, 98, 97 };
5547 if ((link->dpcd_caps.sink_dev_id == 0x0010fa) &&
5548 !memcmp(link->dpcd_caps.sink_dev_id_str, str_mbp_2017,
5549 sizeof(str_mbp_2017))) {
5550 link->reported_link_cap.link_rate = 0x0c;
5554 core_link_read_dpcd(
5556 DP_SINK_HW_REVISION_START,
5557 (uint8_t *)&dp_hw_fw_revision,
5558 sizeof(dp_hw_fw_revision));
5560 link->dpcd_caps.sink_hw_revision =
5561 dp_hw_fw_revision.ieee_hw_rev;
5564 link->dpcd_caps.sink_fw_revision,
5565 dp_hw_fw_revision.ieee_fw_rev,
5566 sizeof(dp_hw_fw_revision.ieee_fw_rev));
5568 memset(&link->dpcd_caps.dsc_caps, '\0',
5569 sizeof(link->dpcd_caps.dsc_caps));
5570 memset(&link->dpcd_caps.fec_cap, '\0', sizeof(link->dpcd_caps.fec_cap));
5571 /* Read DSC and FEC sink capabilities if DP revision is 1.4 and up */
5572 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14) {
5573 status = core_link_read_dpcd(
5576 &link->dpcd_caps.fec_cap.raw,
5577 sizeof(link->dpcd_caps.fec_cap.raw));
5578 status = core_link_read_dpcd(
5581 link->dpcd_caps.dsc_caps.dsc_basic_caps.raw,
5582 sizeof(link->dpcd_caps.dsc_caps.dsc_basic_caps.raw));
5583 #if defined(CONFIG_DRM_AMD_DC_DCN)
5584 if (link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
5585 status = core_link_read_dpcd(
5587 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5588 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5589 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5590 DC_LOG_DSC("DSC branch decoder capability is read at link %d", link->link_index);
5591 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_0 = 0x%02x",
5592 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_0);
5593 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_1 = 0x%02x",
5594 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_1);
5595 DC_LOG_DSC("\tBRANCH_MAX_LINE_WIDTH 0x%02x",
5596 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_MAX_LINE_WIDTH);
5599 status = core_link_read_dpcd(
5601 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5602 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5603 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5607 if (!dpcd_read_sink_ext_caps(link))
5608 link->dpcd_sink_ext_caps.raw = 0;
5610 #if defined(CONFIG_DRM_AMD_DC_DCN)
5611 link->dpcd_caps.channel_coding_cap.raw = dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_CAP - DP_DPCD_REV];
5613 if (link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
5614 DC_LOG_DP2("128b/132b encoding is supported at link %d", link->link_index);
5616 core_link_read_dpcd(link,
5617 DP_128b_132b_SUPPORTED_LINK_RATES,
5618 &link->dpcd_caps.dp_128b_132b_supported_link_rates.raw,
5619 sizeof(link->dpcd_caps.dp_128b_132b_supported_link_rates.raw));
5620 if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR20)
5621 link->reported_link_cap.link_rate = LINK_RATE_UHBR20;
5622 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR13_5)
5623 link->reported_link_cap.link_rate = LINK_RATE_UHBR13_5;
5624 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR10)
5625 link->reported_link_cap.link_rate = LINK_RATE_UHBR10;
5627 dm_error("%s: Invalid RX 128b_132b_supported_link_rates\n", __func__);
5628 DC_LOG_DP2("128b/132b supported link rates is read at link %d", link->link_index);
5629 DC_LOG_DP2("\tmax 128b/132b link rate support is %d.%d GHz",
5630 link->reported_link_cap.link_rate / 100,
5631 link->reported_link_cap.link_rate % 100);
5633 core_link_read_dpcd(link,
5634 DP_SINK_VIDEO_FALLBACK_FORMATS,
5635 &link->dpcd_caps.fallback_formats.raw,
5636 sizeof(link->dpcd_caps.fallback_formats.raw));
5637 DC_LOG_DP2("sink video fallback format is read at link %d", link->link_index);
5638 if (link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support)
5639 DC_LOG_DP2("\t1920x1080@60Hz 24bpp fallback format supported");
5640 if (link->dpcd_caps.fallback_formats.bits.dp_1280x720_60Hz_24bpp_support)
5641 DC_LOG_DP2("\t1280x720@60Hz 24bpp fallback format supported");
5642 if (link->dpcd_caps.fallback_formats.bits.dp_1024x768_60Hz_24bpp_support)
5643 DC_LOG_DP2("\t1024x768@60Hz 24bpp fallback format supported");
5644 if (link->dpcd_caps.fallback_formats.raw == 0) {
5645 DC_LOG_DP2("\tno supported fallback formats, assume 1920x1080@60Hz 24bpp is supported");
5646 link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support = 1;
5649 core_link_read_dpcd(link,
5650 DP_FEC_CAPABILITY_1,
5651 &link->dpcd_caps.fec_cap1.raw,
5652 sizeof(link->dpcd_caps.fec_cap1.raw));
5653 DC_LOG_DP2("FEC CAPABILITY 1 is read at link %d", link->link_index);
5654 if (link->dpcd_caps.fec_cap1.bits.AGGREGATED_ERROR_COUNTERS_CAPABLE)
5655 DC_LOG_DP2("\tFEC aggregated error counters are supported");
5659 /* Connectivity log: detection */
5660 CONN_DATA_DETECT(link, dpcd_data, sizeof(dpcd_data), "Rx Caps: ");
5665 bool dp_overwrite_extended_receiver_cap(struct dc_link *link)
5667 uint8_t dpcd_data[16];
5668 uint32_t read_dpcd_retry_cnt = 3;
5669 enum dc_status status = DC_ERROR_UNEXPECTED;
5670 union dp_downstream_port_present ds_port = { 0 };
5671 union down_stream_port_count down_strm_port_count;
5672 union edp_configuration_cap edp_config_cap;
5676 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5677 status = core_link_read_dpcd(
5682 if (status == DC_OK)
5686 link->dpcd_caps.dpcd_rev.raw =
5687 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5689 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5692 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5695 get_active_converter_info(ds_port.byte, link);
5697 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5700 link->dpcd_caps.allow_invalid_MSA_timing_param =
5701 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5703 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5704 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5706 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5707 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5709 link->reported_link_cap.lane_count =
5710 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5711 link->reported_link_cap.link_rate = dpcd_data[
5712 DP_MAX_LINK_RATE - DP_DPCD_REV];
5713 link->reported_link_cap.link_spread =
5714 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5715 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5717 edp_config_cap.raw = dpcd_data[
5718 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5719 link->dpcd_caps.panel_mode_edp =
5720 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5721 link->dpcd_caps.dpcd_display_control_capable =
5722 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5727 bool detect_dp_sink_caps(struct dc_link *link)
5729 return retrieve_link_cap(link);
5731 /* dc init_hw has power encoder using default
5732 * signal for connector. For native DP, no
5733 * need to power up encoder again. If not native
5734 * DP, hw_init may need check signal or power up
5737 /* TODO save sink caps in link->sink */
5740 static enum dc_link_rate linkRateInKHzToLinkRateMultiplier(uint32_t link_rate_in_khz)
5742 enum dc_link_rate link_rate;
5743 // LinkRate is normally stored as a multiplier of 0.27 Gbps per lane. Do the translation.
5744 switch (link_rate_in_khz) {
5746 link_rate = LINK_RATE_LOW; // Rate_1 (RBR) - 1.62 Gbps/Lane
5749 link_rate = LINK_RATE_RATE_2; // Rate_2 - 2.16 Gbps/Lane
5752 link_rate = LINK_RATE_RATE_3; // Rate_3 - 2.43 Gbps/Lane
5755 link_rate = LINK_RATE_HIGH; // Rate_4 (HBR) - 2.70 Gbps/Lane
5758 link_rate = LINK_RATE_RBR2; // Rate_5 (RBR2) - 3.24 Gbps/Lane
5761 link_rate = LINK_RATE_RATE_6; // Rate_6 - 4.32 Gbps/Lane
5764 link_rate = LINK_RATE_HIGH2; // Rate_7 (HBR2) - 5.40 Gbps/Lane
5767 link_rate = LINK_RATE_HIGH3; // Rate_8 (HBR3) - 8.10 Gbps/Lane
5770 link_rate = LINK_RATE_UNKNOWN;
5776 void detect_edp_sink_caps(struct dc_link *link)
5778 uint8_t supported_link_rates[16];
5780 uint32_t link_rate_in_khz;
5781 enum dc_link_rate link_rate = LINK_RATE_UNKNOWN;
5782 uint8_t backlight_adj_cap;
5784 retrieve_link_cap(link);
5785 link->dpcd_caps.edp_supported_link_rates_count = 0;
5786 memset(supported_link_rates, 0, sizeof(supported_link_rates));
5789 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
5790 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
5792 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
5793 (link->dc->debug.optimize_edp_link_rate ||
5794 link->reported_link_cap.link_rate == LINK_RATE_UNKNOWN)) {
5795 // Read DPCD 00010h - 0001Fh 16 bytes at one shot
5796 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
5797 supported_link_rates, sizeof(supported_link_rates));
5799 for (entry = 0; entry < 16; entry += 2) {
5800 // DPCD register reports per-lane link rate = 16-bit link rate capability
5801 // value X 200 kHz. Need multiplier to find link rate in kHz.
5802 link_rate_in_khz = (supported_link_rates[entry+1] * 0x100 +
5803 supported_link_rates[entry]) * 200;
5805 if (link_rate_in_khz != 0) {
5806 link_rate = linkRateInKHzToLinkRateMultiplier(link_rate_in_khz);
5807 link->dpcd_caps.edp_supported_link_rates[link->dpcd_caps.edp_supported_link_rates_count] = link_rate;
5808 link->dpcd_caps.edp_supported_link_rates_count++;
5810 if (link->reported_link_cap.link_rate < link_rate)
5811 link->reported_link_cap.link_rate = link_rate;
5815 link->verified_link_cap = link->reported_link_cap;
5817 core_link_read_dpcd(link, DP_EDP_BACKLIGHT_ADJUSTMENT_CAP,
5818 &backlight_adj_cap, sizeof(backlight_adj_cap));
5820 link->dpcd_caps.dynamic_backlight_capable_edp =
5821 (backlight_adj_cap & DP_EDP_DYNAMIC_BACKLIGHT_CAP) ? true:false;
5823 dc_link_set_default_brightness_aux(link);
5826 void dc_link_dp_enable_hpd(const struct dc_link *link)
5828 struct link_encoder *encoder = link->link_enc;
5830 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5831 encoder->funcs->enable_hpd(encoder);
5834 void dc_link_dp_disable_hpd(const struct dc_link *link)
5836 struct link_encoder *encoder = link->link_enc;
5838 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5839 encoder->funcs->disable_hpd(encoder);
5842 static bool is_dp_phy_pattern(enum dp_test_pattern test_pattern)
5844 if ((DP_TEST_PATTERN_PHY_PATTERN_BEGIN <= test_pattern &&
5845 test_pattern <= DP_TEST_PATTERN_PHY_PATTERN_END) ||
5846 test_pattern == DP_TEST_PATTERN_VIDEO_MODE)
5852 static void set_crtc_test_pattern(struct dc_link *link,
5853 struct pipe_ctx *pipe_ctx,
5854 enum dp_test_pattern test_pattern,
5855 enum dp_test_pattern_color_space test_pattern_color_space)
5857 enum controller_dp_test_pattern controller_test_pattern;
5858 enum dc_color_depth color_depth = pipe_ctx->
5859 stream->timing.display_color_depth;
5860 struct bit_depth_reduction_params params;
5861 struct output_pixel_processor *opp = pipe_ctx->stream_res.opp;
5862 int width = pipe_ctx->stream->timing.h_addressable +
5863 pipe_ctx->stream->timing.h_border_left +
5864 pipe_ctx->stream->timing.h_border_right;
5865 int height = pipe_ctx->stream->timing.v_addressable +
5866 pipe_ctx->stream->timing.v_border_bottom +
5867 pipe_ctx->stream->timing.v_border_top;
5869 memset(¶ms, 0, sizeof(params));
5871 switch (test_pattern) {
5872 case DP_TEST_PATTERN_COLOR_SQUARES:
5873 controller_test_pattern =
5874 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES;
5876 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5877 controller_test_pattern =
5878 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES_CEA;
5880 case DP_TEST_PATTERN_VERTICAL_BARS:
5881 controller_test_pattern =
5882 CONTROLLER_DP_TEST_PATTERN_VERTICALBARS;
5884 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5885 controller_test_pattern =
5886 CONTROLLER_DP_TEST_PATTERN_HORIZONTALBARS;
5888 case DP_TEST_PATTERN_COLOR_RAMP:
5889 controller_test_pattern =
5890 CONTROLLER_DP_TEST_PATTERN_COLORRAMP;
5893 controller_test_pattern =
5894 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE;
5898 switch (test_pattern) {
5899 case DP_TEST_PATTERN_COLOR_SQUARES:
5900 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5901 case DP_TEST_PATTERN_VERTICAL_BARS:
5902 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5903 case DP_TEST_PATTERN_COLOR_RAMP:
5905 /* disable bit depth reduction */
5906 pipe_ctx->stream->bit_depth_params = params;
5907 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5908 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5909 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5910 controller_test_pattern, color_depth);
5911 else if (link->dc->hwss.set_disp_pattern_generator) {
5912 struct pipe_ctx *odm_pipe;
5913 enum controller_dp_color_space controller_color_space;
5916 int dpg_width = width;
5918 switch (test_pattern_color_space) {
5919 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5920 controller_color_space = CONTROLLER_DP_COLOR_SPACE_RGB;
5922 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5923 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR601;
5925 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5926 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR709;
5928 case DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED:
5930 controller_color_space = CONTROLLER_DP_COLOR_SPACE_UDEFINED;
5931 DC_LOG_ERROR("%s: Color space must be defined for test pattern", __func__);
5936 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5938 dpg_width = width / opp_cnt;
5941 link->dc->hwss.set_disp_pattern_generator(link->dc,
5943 controller_test_pattern,
5944 controller_color_space,
5951 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5952 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5954 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5955 link->dc->hwss.set_disp_pattern_generator(link->dc,
5957 controller_test_pattern,
5958 controller_color_space,
5969 case DP_TEST_PATTERN_VIDEO_MODE:
5971 /* restore bitdepth reduction */
5972 resource_build_bit_depth_reduction_params(pipe_ctx->stream, ¶ms);
5973 pipe_ctx->stream->bit_depth_params = params;
5974 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5975 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5976 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5977 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5979 else if (link->dc->hwss.set_disp_pattern_generator) {
5980 struct pipe_ctx *odm_pipe;
5982 int dpg_width = width;
5984 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5987 dpg_width = width / opp_cnt;
5988 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5989 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5991 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5992 link->dc->hwss.set_disp_pattern_generator(link->dc,
5994 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5995 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
6002 link->dc->hwss.set_disp_pattern_generator(link->dc,
6004 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
6005 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
6020 bool dc_link_dp_set_test_pattern(
6021 struct dc_link *link,
6022 enum dp_test_pattern test_pattern,
6023 enum dp_test_pattern_color_space test_pattern_color_space,
6024 const struct link_training_settings *p_link_settings,
6025 const unsigned char *p_custom_pattern,
6026 unsigned int cust_pattern_size)
6028 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
6029 struct pipe_ctx *pipe_ctx = NULL;
6032 unsigned char link_qual_pattern[LANE_COUNT_DP_MAX] = {0};
6033 union dpcd_training_pattern training_pattern;
6034 enum dpcd_phy_test_patterns pattern;
6036 memset(&training_pattern, 0, sizeof(training_pattern));
6038 for (i = 0; i < MAX_PIPES; i++) {
6039 if (pipes[i].stream == NULL)
6042 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
6043 pipe_ctx = &pipes[i];
6048 if (pipe_ctx == NULL)
6051 /* Reset CRTC Test Pattern if it is currently running and request is VideoMode */
6052 if (link->test_pattern_enabled && test_pattern ==
6053 DP_TEST_PATTERN_VIDEO_MODE) {
6054 /* Set CRTC Test Pattern */
6055 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
6056 dp_set_hw_test_pattern(link, test_pattern,
6057 (uint8_t *)p_custom_pattern,
6058 (uint32_t)cust_pattern_size);
6060 /* Unblank Stream */
6061 link->dc->hwss.unblank_stream(
6063 &link->verified_link_cap);
6064 /* TODO:m_pHwss->MuteAudioEndpoint
6065 * (pPathMode->pDisplayPath, false);
6068 /* Reset Test Pattern state */
6069 link->test_pattern_enabled = false;
6074 /* Check for PHY Test Patterns */
6075 if (is_dp_phy_pattern(test_pattern)) {
6076 /* Set DPCD Lane Settings before running test pattern */
6077 if (p_link_settings != NULL) {
6078 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
6079 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
6080 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
6081 dpcd_set_lane_settings(link, p_link_settings, DPRX);
6082 vendor_specific_lttpr_wa_five(
6084 p_link_settings->dpcd_lane_settings,
6085 p_link_settings->link_settings.lane_count);
6087 dp_set_hw_lane_settings(link, p_link_settings, DPRX);
6088 dpcd_set_lane_settings(link, p_link_settings, DPRX);
6092 /* Blank stream if running test pattern */
6093 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
6096 * MuteAudioEndpoint(pPathMode->pDisplayPath, true);
6099 pipes->stream_res.stream_enc->funcs->dp_blank(link, pipe_ctx->stream_res.stream_enc);
6102 dp_set_hw_test_pattern(link, test_pattern,
6103 (uint8_t *)p_custom_pattern,
6104 (uint32_t)cust_pattern_size);
6106 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
6107 /* Set Test Pattern state */
6108 link->test_pattern_enabled = true;
6109 if (p_link_settings != NULL)
6110 dpcd_set_link_settings(link,
6114 switch (test_pattern) {
6115 case DP_TEST_PATTERN_VIDEO_MODE:
6116 pattern = PHY_TEST_PATTERN_NONE;
6118 case DP_TEST_PATTERN_D102:
6119 pattern = PHY_TEST_PATTERN_D10_2;
6121 case DP_TEST_PATTERN_SYMBOL_ERROR:
6122 pattern = PHY_TEST_PATTERN_SYMBOL_ERROR;
6124 case DP_TEST_PATTERN_PRBS7:
6125 pattern = PHY_TEST_PATTERN_PRBS7;
6127 case DP_TEST_PATTERN_80BIT_CUSTOM:
6128 pattern = PHY_TEST_PATTERN_80BIT_CUSTOM;
6130 case DP_TEST_PATTERN_CP2520_1:
6131 pattern = PHY_TEST_PATTERN_CP2520_1;
6133 case DP_TEST_PATTERN_CP2520_2:
6134 pattern = PHY_TEST_PATTERN_CP2520_2;
6136 case DP_TEST_PATTERN_CP2520_3:
6137 pattern = PHY_TEST_PATTERN_CP2520_3;
6139 #if defined(CONFIG_DRM_AMD_DC_DCN)
6140 case DP_TEST_PATTERN_128b_132b_TPS1:
6141 pattern = PHY_TEST_PATTERN_128b_132b_TPS1;
6143 case DP_TEST_PATTERN_128b_132b_TPS2:
6144 pattern = PHY_TEST_PATTERN_128b_132b_TPS2;
6146 case DP_TEST_PATTERN_PRBS9:
6147 pattern = PHY_TEST_PATTERN_PRBS9;
6149 case DP_TEST_PATTERN_PRBS11:
6150 pattern = PHY_TEST_PATTERN_PRBS11;
6152 case DP_TEST_PATTERN_PRBS15:
6153 pattern = PHY_TEST_PATTERN_PRBS15;
6155 case DP_TEST_PATTERN_PRBS23:
6156 pattern = PHY_TEST_PATTERN_PRBS23;
6158 case DP_TEST_PATTERN_PRBS31:
6159 pattern = PHY_TEST_PATTERN_PRBS31;
6161 case DP_TEST_PATTERN_264BIT_CUSTOM:
6162 pattern = PHY_TEST_PATTERN_264BIT_CUSTOM;
6164 case DP_TEST_PATTERN_SQUARE_PULSE:
6165 pattern = PHY_TEST_PATTERN_SQUARE_PULSE;
6172 if (test_pattern == DP_TEST_PATTERN_VIDEO_MODE
6173 /*TODO:&& !pPathMode->pDisplayPath->IsTargetPoweredOn()*/)
6176 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
6177 #if defined(CONFIG_DRM_AMD_DC_DCN)
6178 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE)
6179 core_link_write_dpcd(link,
6180 DP_LINK_SQUARE_PATTERN,
6185 /* tell receiver that we are sending qualification
6186 * pattern DP 1.2 or later - DP receiver's link quality
6187 * pattern is set using DPCD LINK_QUAL_LANEx_SET
6188 * register (0x10B~0x10E)\
6190 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++)
6191 link_qual_pattern[lane] =
6192 (unsigned char)(pattern);
6194 core_link_write_dpcd(link,
6195 DP_LINK_QUAL_LANE0_SET,
6197 sizeof(link_qual_pattern));
6198 } else if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_10 ||
6199 link->dpcd_caps.dpcd_rev.raw == 0) {
6200 /* tell receiver that we are sending qualification
6201 * pattern DP 1.1a or earlier - DP receiver's link
6202 * quality pattern is set using
6203 * DPCD TRAINING_PATTERN_SET -> LINK_QUAL_PATTERN_SET
6204 * register (0x102). We will use v_1.3 when we are
6205 * setting test pattern for DP 1.1.
6207 core_link_read_dpcd(link, DP_TRAINING_PATTERN_SET,
6208 &training_pattern.raw,
6209 sizeof(training_pattern));
6210 training_pattern.v1_3.LINK_QUAL_PATTERN_SET = pattern;
6211 core_link_write_dpcd(link, DP_TRAINING_PATTERN_SET,
6212 &training_pattern.raw,
6213 sizeof(training_pattern));
6216 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
6218 switch (test_pattern_color_space) {
6219 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
6220 color_space = COLOR_SPACE_SRGB;
6221 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6222 color_space = COLOR_SPACE_SRGB_LIMITED;
6225 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
6226 color_space = COLOR_SPACE_YCBCR601;
6227 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6228 color_space = COLOR_SPACE_YCBCR601_LIMITED;
6230 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
6231 color_space = COLOR_SPACE_YCBCR709;
6232 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6233 color_space = COLOR_SPACE_YCBCR709_LIMITED;
6239 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable) {
6240 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
6241 union dmub_hw_lock_flags hw_locks = { 0 };
6242 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
6244 hw_locks.bits.lock_dig = 1;
6245 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
6247 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
6252 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable(
6253 pipe_ctx->stream_res.tg);
6256 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
6257 /* update MSA to requested color space */
6258 pipe_ctx->stream_res.stream_enc->funcs->dp_set_stream_attribute(pipe_ctx->stream_res.stream_enc,
6259 &pipe_ctx->stream->timing,
6261 pipe_ctx->stream->use_vsc_sdp_for_colorimetry,
6262 link->dpcd_caps.dprx_feature.bits.SST_SPLIT_SDP_CAP);
6264 if (pipe_ctx->stream->use_vsc_sdp_for_colorimetry) {
6265 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6266 pipe_ctx->stream->vsc_infopacket.sb[17] |= (1 << 7); // sb17 bit 7 Dynamic Range: 0 = VESA range, 1 = CTA range
6268 pipe_ctx->stream->vsc_infopacket.sb[17] &= ~(1 << 7);
6269 resource_build_info_frame(pipe_ctx);
6270 link->dc->hwss.update_info_frame(pipe_ctx);
6274 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
6275 pipe_ctx->stream_res.tg->funcs->unlock(pipe_ctx->stream_res.tg);
6276 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
6277 CRTC_STATE_VACTIVE);
6278 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
6280 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
6281 CRTC_STATE_VACTIVE);
6283 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable) {
6284 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
6285 union dmub_hw_lock_flags hw_locks = { 0 };
6286 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
6288 hw_locks.bits.lock_dig = 1;
6289 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
6291 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
6296 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable(
6297 pipe_ctx->stream_res.tg);
6300 /* Set Test Pattern state */
6301 link->test_pattern_enabled = true;
6307 void dp_enable_mst_on_sink(struct dc_link *link, bool enable)
6309 unsigned char mstmCntl;
6311 core_link_read_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
6313 mstmCntl |= DP_MST_EN;
6315 mstmCntl &= (~DP_MST_EN);
6317 core_link_write_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
6320 void dp_set_panel_mode(struct dc_link *link, enum dp_panel_mode panel_mode)
6322 union dpcd_edp_config edp_config_set;
6323 bool panel_mode_edp = false;
6325 memset(&edp_config_set, '\0', sizeof(union dpcd_edp_config));
6327 if (panel_mode != DP_PANEL_MODE_DEFAULT) {
6329 switch (panel_mode) {
6330 case DP_PANEL_MODE_EDP:
6331 case DP_PANEL_MODE_SPECIAL:
6332 panel_mode_edp = true;
6339 /*set edp panel mode in receiver*/
6340 core_link_read_dpcd(
6342 DP_EDP_CONFIGURATION_SET,
6343 &edp_config_set.raw,
6344 sizeof(edp_config_set.raw));
6346 if (edp_config_set.bits.PANEL_MODE_EDP
6347 != panel_mode_edp) {
6348 enum dc_status result;
6350 edp_config_set.bits.PANEL_MODE_EDP =
6352 result = core_link_write_dpcd(
6354 DP_EDP_CONFIGURATION_SET,
6355 &edp_config_set.raw,
6356 sizeof(edp_config_set.raw));
6358 ASSERT(result == DC_OK);
6361 DC_LOG_DETECTION_DP_CAPS("Link: %d eDP panel mode supported: %d "
6362 "eDP panel mode enabled: %d \n",
6364 link->dpcd_caps.panel_mode_edp,
6368 enum dp_panel_mode dp_get_panel_mode(struct dc_link *link)
6370 /* We need to explicitly check that connector
6371 * is not DP. Some Travis_VGA get reported
6372 * by video bios as DP.
6374 if (link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT) {
6376 switch (link->dpcd_caps.branch_dev_id) {
6377 case DP_BRANCH_DEVICE_ID_0022B9:
6378 /* alternate scrambler reset is required for Travis
6379 * for the case when external chip does not
6380 * provide sink device id, alternate scrambler
6381 * scheme will be overriden later by querying
6385 link->dpcd_caps.branch_dev_name,
6386 DP_VGA_LVDS_CONVERTER_ID_2,
6389 branch_dev_name)) == 0) {
6390 return DP_PANEL_MODE_SPECIAL;
6393 case DP_BRANCH_DEVICE_ID_00001A:
6394 /* alternate scrambler reset is required for Travis
6395 * for the case when external chip does not provide
6396 * sink device id, alternate scrambler scheme will
6397 * be overriden later by querying Encoder feature
6399 if (strncmp(link->dpcd_caps.branch_dev_name,
6400 DP_VGA_LVDS_CONVERTER_ID_3,
6403 branch_dev_name)) == 0) {
6404 return DP_PANEL_MODE_SPECIAL;
6412 if (link->dpcd_caps.panel_mode_edp &&
6413 (link->connector_signal == SIGNAL_TYPE_EDP ||
6414 (link->connector_signal == SIGNAL_TYPE_DISPLAY_PORT &&
6415 link->is_internal_display))) {
6416 return DP_PANEL_MODE_EDP;
6419 return DP_PANEL_MODE_DEFAULT;
6422 enum dc_status dp_set_fec_ready(struct dc_link *link, bool ready)
6424 /* FEC has to be "set ready" before the link training.
6425 * The policy is to always train with FEC
6426 * if the sink supports it and leave it enabled on link.
6427 * If FEC is not supported, disable it.
6429 struct link_encoder *link_enc = NULL;
6430 enum dc_status status = DC_OK;
6431 uint8_t fec_config = 0;
6433 /* Access link encoder based on whether it is statically
6434 * or dynamically assigned to a link.
6436 if (link->is_dig_mapping_flexible &&
6437 link->dc->res_pool->funcs->link_encs_assign)
6438 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6440 link_enc = link->link_enc;
6443 if (!dc_link_should_enable_fec(link))
6446 if (link_enc->funcs->fec_set_ready &&
6447 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6450 status = core_link_write_dpcd(link,
6451 DP_FEC_CONFIGURATION,
6453 sizeof(fec_config));
6454 if (status == DC_OK) {
6455 link_enc->funcs->fec_set_ready(link_enc, true);
6456 link->fec_state = dc_link_fec_ready;
6458 link_enc->funcs->fec_set_ready(link_enc, false);
6459 link->fec_state = dc_link_fec_not_ready;
6460 dm_error("dpcd write failed to set fec_ready");
6462 } else if (link->fec_state == dc_link_fec_ready) {
6464 status = core_link_write_dpcd(link,
6465 DP_FEC_CONFIGURATION,
6467 sizeof(fec_config));
6468 link_enc->funcs->fec_set_ready(link_enc, false);
6469 link->fec_state = dc_link_fec_not_ready;
6476 void dp_set_fec_enable(struct dc_link *link, bool enable)
6478 struct link_encoder *link_enc = NULL;
6480 /* Access link encoder based on whether it is statically
6481 * or dynamically assigned to a link.
6483 if (link->is_dig_mapping_flexible &&
6484 link->dc->res_pool->funcs->link_encs_assign)
6485 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6487 link_enc = link->link_enc;
6490 if (!dc_link_should_enable_fec(link))
6493 if (link_enc->funcs->fec_set_enable &&
6494 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6495 if (link->fec_state == dc_link_fec_ready && enable) {
6496 /* Accord to DP spec, FEC enable sequence can first
6497 * be transmitted anytime after 1000 LL codes have
6498 * been transmitted on the link after link training
6499 * completion. Using 1 lane RBR should have the maximum
6500 * time for transmitting 1000 LL codes which is 6.173 us.
6501 * So use 7 microseconds delay instead.
6504 link_enc->funcs->fec_set_enable(link_enc, true);
6505 link->fec_state = dc_link_fec_enabled;
6506 } else if (link->fec_state == dc_link_fec_enabled && !enable) {
6507 link_enc->funcs->fec_set_enable(link_enc, false);
6508 link->fec_state = dc_link_fec_ready;
6513 struct link_encoder *dp_get_link_enc(struct dc_link *link)
6515 struct link_encoder *link_enc;
6517 link_enc = link->link_enc;
6518 if (link->is_dig_mapping_flexible &&
6519 link->dc->res_pool->funcs->link_encs_assign) {
6520 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc,
6522 if (!link->link_enc)
6523 link_enc = link_enc_cfg_get_next_avail_link_enc(
6530 void dpcd_set_source_specific_data(struct dc_link *link)
6532 if (!link->dc->vendor_signature.is_valid) {
6533 enum dc_status __maybe_unused result_write_min_hblank = DC_NOT_SUPPORTED;
6534 struct dpcd_amd_signature amd_signature = {0};
6535 struct dpcd_amd_device_id amd_device_id = {0};
6537 amd_device_id.device_id_byte1 =
6538 (uint8_t)(link->ctx->asic_id.chip_id);
6539 amd_device_id.device_id_byte2 =
6540 (uint8_t)(link->ctx->asic_id.chip_id >> 8);
6541 amd_device_id.dce_version =
6542 (uint8_t)(link->ctx->dce_version);
6543 amd_device_id.dal_version_byte1 = 0x0; // needed? where to get?
6544 amd_device_id.dal_version_byte2 = 0x0; // needed? where to get?
6546 core_link_read_dpcd(link, DP_SOURCE_OUI,
6547 (uint8_t *)(&amd_signature),
6548 sizeof(amd_signature));
6550 if (!((amd_signature.AMD_IEEE_TxSignature_byte1 == 0x0) &&
6551 (amd_signature.AMD_IEEE_TxSignature_byte2 == 0x0) &&
6552 (amd_signature.AMD_IEEE_TxSignature_byte3 == 0x1A))) {
6554 amd_signature.AMD_IEEE_TxSignature_byte1 = 0x0;
6555 amd_signature.AMD_IEEE_TxSignature_byte2 = 0x0;
6556 amd_signature.AMD_IEEE_TxSignature_byte3 = 0x1A;
6558 core_link_write_dpcd(link, DP_SOURCE_OUI,
6559 (uint8_t *)(&amd_signature),
6560 sizeof(amd_signature));
6563 core_link_write_dpcd(link, DP_SOURCE_OUI+0x03,
6564 (uint8_t *)(&amd_device_id),
6565 sizeof(amd_device_id));
6567 if (link->ctx->dce_version >= DCN_VERSION_2_0 &&
6568 link->dc->caps.min_horizontal_blanking_period != 0) {
6570 uint8_t hblank_size = (uint8_t)link->dc->caps.min_horizontal_blanking_period;
6572 if (link->preferred_link_setting.dpcd_source_device_specific_field_support) {
6573 result_write_min_hblank = core_link_write_dpcd(link,
6574 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED, (uint8_t *)(&hblank_size),
6575 sizeof(hblank_size));
6577 if (result_write_min_hblank == DC_ERROR_UNEXPECTED)
6578 link->preferred_link_setting.dpcd_source_device_specific_field_support = false;
6580 DC_LOG_DC("Sink device does not support 00340h DPCD write. Skipping on purpose.\n");
6584 DC_TRACE_LEVEL_MESSAGE(DAL_TRACE_LEVEL_INFORMATION,
6585 WPP_BIT_FLAG_DC_DETECTION_DP_CAPS,
6586 "result=%u link_index=%u enum dce_version=%d DPCD=0x%04X min_hblank=%u branch_dev_id=0x%x branch_dev_name='%c%c%c%c%c%c'",
6587 result_write_min_hblank,
6589 link->ctx->dce_version,
6590 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED,
6591 link->dc->caps.min_horizontal_blanking_period,
6592 link->dpcd_caps.branch_dev_id,
6593 link->dpcd_caps.branch_dev_name[0],
6594 link->dpcd_caps.branch_dev_name[1],
6595 link->dpcd_caps.branch_dev_name[2],
6596 link->dpcd_caps.branch_dev_name[3],
6597 link->dpcd_caps.branch_dev_name[4],
6598 link->dpcd_caps.branch_dev_name[5]);
6600 core_link_write_dpcd(link, DP_SOURCE_OUI,
6601 link->dc->vendor_signature.data.raw,
6602 sizeof(link->dc->vendor_signature.data.raw));
6606 bool dc_link_set_backlight_level_nits(struct dc_link *link,
6608 uint32_t backlight_millinits,
6609 uint32_t transition_time_in_ms)
6611 struct dpcd_source_backlight_set dpcd_backlight_set;
6612 uint8_t backlight_control = isHDR ? 1 : 0;
6614 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6615 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6618 // OLEDs have no PWM, they can only use AUX
6619 if (link->dpcd_sink_ext_caps.bits.oled == 1)
6620 backlight_control = 1;
6622 *(uint32_t *)&dpcd_backlight_set.backlight_level_millinits = backlight_millinits;
6623 *(uint16_t *)&dpcd_backlight_set.backlight_transition_time_ms = (uint16_t)transition_time_in_ms;
6626 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6627 (uint8_t *)(&dpcd_backlight_set),
6628 sizeof(dpcd_backlight_set)) != DC_OK)
6631 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_CONTROL,
6632 &backlight_control, 1) != DC_OK)
6638 bool dc_link_get_backlight_level_nits(struct dc_link *link,
6639 uint32_t *backlight_millinits_avg,
6640 uint32_t *backlight_millinits_peak)
6642 union dpcd_source_backlight_get dpcd_backlight_get;
6644 memset(&dpcd_backlight_get, 0, sizeof(union dpcd_source_backlight_get));
6646 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6647 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6650 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_CURRENT_PEAK,
6651 dpcd_backlight_get.raw,
6652 sizeof(union dpcd_source_backlight_get)) != DC_OK)
6655 *backlight_millinits_avg =
6656 dpcd_backlight_get.bytes.backlight_millinits_avg;
6657 *backlight_millinits_peak =
6658 dpcd_backlight_get.bytes.backlight_millinits_peak;
6660 /* On non-supported panels dpcd_read usually succeeds with 0 returned */
6661 if (*backlight_millinits_avg == 0 ||
6662 *backlight_millinits_avg > *backlight_millinits_peak)
6668 bool dc_link_backlight_enable_aux(struct dc_link *link, bool enable)
6670 uint8_t backlight_enable = enable ? 1 : 0;
6672 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6673 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6676 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_ENABLE,
6677 &backlight_enable, 1) != DC_OK)
6683 // we read default from 0x320 because we expect BIOS wrote it there
6684 // regular get_backlight_nit reads from panel set at 0x326
6685 bool dc_link_read_default_bl_aux(struct dc_link *link, uint32_t *backlight_millinits)
6687 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6688 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6691 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6692 (uint8_t *) backlight_millinits,
6693 sizeof(uint32_t)) != DC_OK)
6699 bool dc_link_set_default_brightness_aux(struct dc_link *link)
6701 uint32_t default_backlight;
6703 if (link && link->dpcd_sink_ext_caps.bits.oled == 1) {
6704 if (!dc_link_read_default_bl_aux(link, &default_backlight))
6705 default_backlight = 150000;
6706 // if < 5 nits or > 5000, it might be wrong readback
6707 if (default_backlight < 5000 || default_backlight > 5000000)
6708 default_backlight = 150000; //
6710 return dc_link_set_backlight_level_nits(link, true,
6711 default_backlight, 0);
6716 bool is_edp_ilr_optimization_required(struct dc_link *link, struct dc_crtc_timing *crtc_timing)
6718 struct dc_link_settings link_setting;
6719 uint8_t link_bw_set;
6720 uint8_t link_rate_set;
6722 union lane_count_set lane_count_set = {0};
6724 ASSERT(link || crtc_timing); // invalid input
6726 if (link->dpcd_caps.edp_supported_link_rates_count == 0 ||
6727 !link->dc->debug.optimize_edp_link_rate)
6731 // Read DPCD 00100h to find if standard link rates are set
6732 core_link_read_dpcd(link, DP_LINK_BW_SET,
6733 &link_bw_set, sizeof(link_bw_set));
6736 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS used link_bw_set\n");
6740 // Read DPCD 00115h to find the edp link rate set used
6741 core_link_read_dpcd(link, DP_LINK_RATE_SET,
6742 &link_rate_set, sizeof(link_rate_set));
6744 // Read DPCD 00101h to find out the number of lanes currently set
6745 core_link_read_dpcd(link, DP_LANE_COUNT_SET,
6746 &lane_count_set.raw, sizeof(lane_count_set));
6748 req_bw = dc_bandwidth_in_kbps_from_timing(crtc_timing);
6750 if (!crtc_timing->flags.DSC)
6751 decide_edp_link_settings(link, &link_setting, req_bw);
6753 decide_edp_link_settings_with_dsc(link, &link_setting, req_bw, LINK_RATE_UNKNOWN);
6755 if (link->dpcd_caps.edp_supported_link_rates[link_rate_set] != link_setting.link_rate ||
6756 lane_count_set.bits.LANE_COUNT_SET != link_setting.lane_count) {
6757 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS link_rate_set not optimal\n");
6761 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: No optimization required, VBIOS set optimal link_rate_set\n");
6765 enum dp_link_encoding dp_get_link_encoding_format(const struct dc_link_settings *link_settings)
6767 if ((link_settings->link_rate >= LINK_RATE_LOW) &&
6768 (link_settings->link_rate <= LINK_RATE_HIGH3))
6769 return DP_8b_10b_ENCODING;
6770 #if defined(CONFIG_DRM_AMD_DC_DCN)
6771 else if ((link_settings->link_rate >= LINK_RATE_UHBR10) &&
6772 (link_settings->link_rate <= LINK_RATE_UHBR20))
6773 return DP_128b_132b_ENCODING;
6775 return DP_UNKNOWN_ENCODING;
6778 #if defined(CONFIG_DRM_AMD_DC_DCN)
6779 enum dp_link_encoding dc_link_dp_mst_decide_link_encoding_format(const struct dc_link *link)
6781 struct dc_link_settings link_settings = {0};
6783 if (!dc_is_dp_signal(link->connector_signal))
6784 return DP_UNKNOWN_ENCODING;
6786 if (link->preferred_link_setting.lane_count !=
6787 LANE_COUNT_UNKNOWN &&
6788 link->preferred_link_setting.link_rate !=
6789 LINK_RATE_UNKNOWN) {
6790 link_settings = link->preferred_link_setting;
6792 decide_mst_link_settings(link, &link_settings);
6795 return dp_get_link_encoding_format(&link_settings);
6798 // TODO - DP2.0 Link: Fix get_lane_status to handle LTTPR offset (SST and MST)
6799 static void get_lane_status(
6800 struct dc_link *link,
6801 uint32_t lane_count,
6802 union lane_status *status,
6803 union lane_align_status_updated *status_updated)
6806 uint8_t dpcd_buf[3] = {0};
6808 if (status == NULL || status_updated == NULL) {
6812 core_link_read_dpcd(
6818 for (lane = 0; lane < lane_count; lane++) {
6819 status[lane].raw = get_nibble_at_index(&dpcd_buf[0], lane);
6822 status_updated->raw = dpcd_buf[2];
6825 bool dpcd_write_128b_132b_sst_payload_allocation_table(
6826 const struct dc_stream_state *stream,
6827 struct dc_link *link,
6828 struct link_mst_stream_allocation_table *proposed_table,
6831 const uint8_t vc_id = 1; /// VC ID always 1 for SST
6832 const uint8_t start_time_slot = 0; /// Always start at time slot 0 for SST
6833 bool result = false;
6834 uint8_t req_slot_count = 0;
6835 struct fixed31_32 avg_time_slots_per_mtp = { 0 };
6836 union payload_table_update_status update_status = { 0 };
6837 const uint32_t max_retries = 30;
6838 uint32_t retries = 0;
6841 avg_time_slots_per_mtp = calculate_sst_avg_time_slots_per_mtp(stream, link);
6842 req_slot_count = dc_fixpt_ceil(avg_time_slots_per_mtp);
6844 /// Leave req_slot_count = 0 if allocate is false.
6847 /// Write DPCD 2C0 = 1 to start updating
6848 update_status.bits.VC_PAYLOAD_TABLE_UPDATED = 1;
6849 core_link_write_dpcd(
6851 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6855 /// Program the changes in DPCD 1C0 - 1C2
6857 core_link_write_dpcd(
6859 DP_PAYLOAD_ALLOCATE_SET,
6863 ASSERT(start_time_slot == 0);
6864 core_link_write_dpcd(
6866 DP_PAYLOAD_ALLOCATE_START_TIME_SLOT,
6870 ASSERT(req_slot_count <= MAX_MTP_SLOT_COUNT); /// Validation should filter out modes that exceed link BW
6871 core_link_write_dpcd(
6873 DP_PAYLOAD_ALLOCATE_TIME_SLOT_COUNT,
6877 /// Poll till DPCD 2C0 read 1
6878 /// Try for at least 150ms (30 retries, with 5ms delay after each attempt)
6880 while (retries < max_retries) {
6881 if (core_link_read_dpcd(
6883 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6886 if (update_status.bits.VC_PAYLOAD_TABLE_UPDATED == 1) {
6887 DC_LOG_DP2("SST Update Payload: downstream payload table updated.");
6892 union dpcd_rev dpcdRev;
6894 if (core_link_read_dpcd(
6899 DC_LOG_ERROR("SST Update Payload: Unable to read DPCD revision "
6900 "of sink while polling payload table "
6901 "updated status bit.");
6909 if (!result && retries == max_retries) {
6910 DC_LOG_ERROR("SST Update Payload: Payload table not updated after retries, "
6911 "continue on. Something is wrong with the branch.");
6912 // TODO - DP2.0 Payload: Read and log the payload table from downstream branch
6915 proposed_table->stream_count = 1; /// Always 1 stream for SST
6916 proposed_table->stream_allocations[0].slot_count = req_slot_count;
6917 proposed_table->stream_allocations[0].vcp_id = vc_id;
6922 bool dpcd_poll_for_allocation_change_trigger(struct dc_link *link)
6925 * wait for ACT handled
6928 const int act_retries = 30;
6929 enum act_return_status result = ACT_FAILED;
6930 union payload_table_update_status update_status = {0};
6931 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
6932 union lane_align_status_updated lane_status_updated;
6934 for (i = 0; i < act_retries; i++) {
6935 get_lane_status(link, link->cur_link_settings.lane_count, dpcd_lane_status, &lane_status_updated);
6937 if (!dp_is_cr_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6938 !dp_is_ch_eq_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6939 !dp_is_symbol_locked(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6940 !dp_is_interlane_aligned(lane_status_updated)) {
6941 DC_LOG_ERROR("SST Update Payload: Link loss occurred while "
6942 "polling for ACT handled.");
6943 result = ACT_LINK_LOST;
6946 core_link_read_dpcd(
6948 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6952 if (update_status.bits.ACT_HANDLED == 1) {
6953 DC_LOG_DP2("SST Update Payload: ACT handled by downstream.");
6954 result = ACT_SUCCESS;
6961 if (result == ACT_FAILED) {
6962 DC_LOG_ERROR("SST Update Payload: ACT still not handled after retries, "
6963 "continue on. Something is wrong with the branch.");
6966 return (result == ACT_SUCCESS);
6969 struct fixed31_32 calculate_sst_avg_time_slots_per_mtp(
6970 const struct dc_stream_state *stream,
6971 const struct dc_link *link)
6973 struct fixed31_32 link_bw_effective =
6975 dc_link_bandwidth_kbps(link, &link->cur_link_settings));
6976 struct fixed31_32 timeslot_bw_effective =
6977 dc_fixpt_div_int(link_bw_effective, MAX_MTP_SLOT_COUNT);
6978 struct fixed31_32 timing_bw =
6980 dc_bandwidth_in_kbps_from_timing(&stream->timing));
6981 struct fixed31_32 avg_time_slots_per_mtp =
6982 dc_fixpt_div(timing_bw, timeslot_bw_effective);
6984 return avg_time_slots_per_mtp;
6987 bool is_dp_128b_132b_signal(struct pipe_ctx *pipe_ctx)
6989 return (pipe_ctx->stream_res.hpo_dp_stream_enc &&
6990 pipe_ctx->stream->link->hpo_dp_link_enc &&
6991 dc_is_dp_signal(pipe_ctx->stream->signal));
6995 void edp_panel_backlight_power_on(struct dc_link *link)
6997 if (link->connector_signal != SIGNAL_TYPE_EDP)
7000 link->dc->hwss.edp_power_control(link, true);
7001 link->dc->hwss.edp_wait_for_hpd_ready(link, true);
7002 if (link->dc->hwss.edp_backlight_control)
7003 link->dc->hwss.edp_backlight_control(link, true);