2 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
24 #include "dm_services.h"
26 #include "dc_link_dp.h"
27 #include "dm_helpers.h"
32 #include "inc/core_types.h"
33 #include "link_hwss.h"
34 #include "dc_link_ddc.h"
35 #include "core_status.h"
36 #include "dpcd_defs.h"
37 #include "dc_dmub_srv.h"
38 #include "dce/dmub_hw_lock_mgr.h"
39 #include "inc/dc_link_dpia.h"
40 #include "inc/link_enc_cfg.h"
43 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_2[] = "sivarT";
45 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_3[] = "dnomlA";
49 #define DC_TRACE_LEVEL_MESSAGE(...) /* do nothing */
51 #include "link_dpcd.h"
53 /* maximum pre emphasis level allowed for each voltage swing level*/
54 static const enum dc_pre_emphasis
55 voltage_swing_to_pre_emphasis[] = { PRE_EMPHASIS_LEVEL3,
58 PRE_EMPHASIS_DISABLED };
61 POST_LT_ADJ_REQ_LIMIT = 6,
62 POST_LT_ADJ_REQ_TIMEOUT = 200
65 #if defined(CONFIG_DRM_AMD_DC_DCN)
66 struct dp_lt_fallback_entry {
67 enum dc_lane_count lane_count;
68 enum dc_link_rate link_rate;
71 static const struct dp_lt_fallback_entry dp_lt_fallbacks[] = {
72 /* This link training fallback array is ordered by
73 * link bandwidth from highest to lowest.
74 * DP specs makes it a normative policy to always
75 * choose the next highest link bandwidth during
76 * link training fallback.
78 {LANE_COUNT_FOUR, LINK_RATE_UHBR20},
79 {LANE_COUNT_FOUR, LINK_RATE_UHBR13_5},
80 {LANE_COUNT_TWO, LINK_RATE_UHBR20},
81 {LANE_COUNT_FOUR, LINK_RATE_UHBR10},
82 {LANE_COUNT_TWO, LINK_RATE_UHBR13_5},
83 {LANE_COUNT_FOUR, LINK_RATE_HIGH3},
84 {LANE_COUNT_ONE, LINK_RATE_UHBR20},
85 {LANE_COUNT_TWO, LINK_RATE_UHBR10},
86 {LANE_COUNT_FOUR, LINK_RATE_HIGH2},
87 {LANE_COUNT_ONE, LINK_RATE_UHBR13_5},
88 {LANE_COUNT_TWO, LINK_RATE_HIGH3},
89 {LANE_COUNT_ONE, LINK_RATE_UHBR10},
90 {LANE_COUNT_TWO, LINK_RATE_HIGH2},
91 {LANE_COUNT_FOUR, LINK_RATE_HIGH},
92 {LANE_COUNT_ONE, LINK_RATE_HIGH3},
93 {LANE_COUNT_FOUR, LINK_RATE_LOW},
94 {LANE_COUNT_ONE, LINK_RATE_HIGH2},
95 {LANE_COUNT_TWO, LINK_RATE_HIGH},
96 {LANE_COUNT_TWO, LINK_RATE_LOW},
97 {LANE_COUNT_ONE, LINK_RATE_HIGH},
98 {LANE_COUNT_ONE, LINK_RATE_LOW},
102 static bool decide_fallback_link_setting(
103 struct dc_link *link,
104 struct dc_link_settings initial_link_settings,
105 struct dc_link_settings *current_link_setting,
106 enum link_training_result training_result);
107 static struct dc_link_settings get_common_supported_link_settings(
108 struct dc_link_settings link_setting_a,
109 struct dc_link_settings link_setting_b);
110 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
111 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
112 static void override_lane_settings(const struct link_training_settings *lt_settings,
113 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
115 static uint32_t get_cr_training_aux_rd_interval(struct dc_link *link,
116 const struct dc_link_settings *link_settings)
118 union training_aux_rd_interval training_rd_interval;
119 uint32_t wait_in_micro_secs = 100;
120 #if defined(CONFIG_DRM_AMD_DC_DCN)
121 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
122 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
123 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
126 DP_TRAINING_AUX_RD_INTERVAL,
127 (uint8_t *)&training_rd_interval,
128 sizeof(training_rd_interval));
129 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
130 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
135 DP_TRAINING_AUX_RD_INTERVAL,
136 (uint8_t *)&training_rd_interval,
137 sizeof(training_rd_interval));
138 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
139 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
141 return wait_in_micro_secs;
144 static uint32_t get_eq_training_aux_rd_interval(
145 struct dc_link *link,
146 const struct dc_link_settings *link_settings)
148 #if defined(CONFIG_DRM_AMD_DC_DCN)
149 union training_aux_rd_interval training_rd_interval;
151 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
152 if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING) {
155 DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
156 (uint8_t *)&training_rd_interval,
157 sizeof(training_rd_interval));
158 } else if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
159 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
162 DP_TRAINING_AUX_RD_INTERVAL,
163 (uint8_t *)&training_rd_interval,
164 sizeof(training_rd_interval));
167 switch (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL) {
171 case 3: return 12000;
172 case 4: return 16000;
173 case 5: return 32000;
174 case 6: return 64000;
178 union training_aux_rd_interval training_rd_interval;
179 uint32_t wait_in_micro_secs = 400;
181 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
182 /* overwrite the delay if rev > 1.1*/
183 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
184 /* DP 1.2 or later - retrieve delay through
185 * "DPCD_ADDR_TRAINING_AUX_RD_INTERVAL" register */
188 DP_TRAINING_AUX_RD_INTERVAL,
189 (uint8_t *)&training_rd_interval,
190 sizeof(training_rd_interval));
192 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
193 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
196 return wait_in_micro_secs;
200 void dp_wait_for_training_aux_rd_interval(
201 struct dc_link *link,
202 uint32_t wait_in_micro_secs)
204 #if defined(CONFIG_DRM_AMD_DC_DCN)
205 if (wait_in_micro_secs > 16000)
206 msleep(wait_in_micro_secs/1000);
208 udelay(wait_in_micro_secs);
210 udelay(wait_in_micro_secs);
213 DC_LOG_HW_LINK_TRAINING("%s:\n wait = %d\n",
218 enum dpcd_training_patterns
219 dc_dp_training_pattern_to_dpcd_training_pattern(
220 struct dc_link *link,
221 enum dc_dp_training_pattern pattern)
223 enum dpcd_training_patterns dpcd_tr_pattern =
224 DPCD_TRAINING_PATTERN_VIDEOIDLE;
227 case DP_TRAINING_PATTERN_SEQUENCE_1:
228 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_1;
230 case DP_TRAINING_PATTERN_SEQUENCE_2:
231 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_2;
233 case DP_TRAINING_PATTERN_SEQUENCE_3:
234 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_3;
236 case DP_TRAINING_PATTERN_SEQUENCE_4:
237 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_4;
239 #if defined(CONFIG_DRM_AMD_DC_DCN)
240 case DP_128b_132b_TPS1:
241 dpcd_tr_pattern = DPCD_128b_132b_TPS1;
243 case DP_128b_132b_TPS2:
244 dpcd_tr_pattern = DPCD_128b_132b_TPS2;
246 case DP_128b_132b_TPS2_CDS:
247 dpcd_tr_pattern = DPCD_128b_132b_TPS2_CDS;
250 case DP_TRAINING_PATTERN_VIDEOIDLE:
251 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_VIDEOIDLE;
255 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
260 return dpcd_tr_pattern;
263 static void dpcd_set_training_pattern(
264 struct dc_link *link,
265 enum dc_dp_training_pattern training_pattern)
267 union dpcd_training_pattern dpcd_pattern = {0};
269 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
270 dc_dp_training_pattern_to_dpcd_training_pattern(
271 link, training_pattern);
273 core_link_write_dpcd(
275 DP_TRAINING_PATTERN_SET,
279 DC_LOG_HW_LINK_TRAINING("%s\n %x pattern = %x\n",
281 DP_TRAINING_PATTERN_SET,
282 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
285 static enum dc_dp_training_pattern decide_cr_training_pattern(
286 const struct dc_link_settings *link_settings)
288 switch (dp_get_link_encoding_format(link_settings)) {
289 case DP_8b_10b_ENCODING:
291 return DP_TRAINING_PATTERN_SEQUENCE_1;
292 #if defined(CONFIG_DRM_AMD_DC_DCN)
293 case DP_128b_132b_ENCODING:
294 return DP_128b_132b_TPS1;
299 static enum dc_dp_training_pattern decide_eq_training_pattern(struct dc_link *link,
300 const struct dc_link_settings *link_settings)
302 struct link_encoder *link_enc;
303 #if defined(CONFIG_DRM_AMD_DC_DCN)
304 struct encoder_feature_support *enc_caps;
305 struct dpcd_caps *rx_caps = &link->dpcd_caps;
306 enum dc_dp_training_pattern pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
308 /* Access link encoder capability based on whether it is statically
309 * or dynamically assigned to a link.
311 if (link->is_dig_mapping_flexible &&
312 link->dc->res_pool->funcs->link_encs_assign)
313 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
315 link_enc = link->link_enc;
317 enc_caps = &link_enc->features;
319 switch (dp_get_link_encoding_format(link_settings)) {
320 case DP_8b_10b_ENCODING:
321 if (enc_caps->flags.bits.IS_TPS4_CAPABLE &&
322 rx_caps->max_down_spread.bits.TPS4_SUPPORTED)
323 pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
324 else if (enc_caps->flags.bits.IS_TPS3_CAPABLE &&
325 rx_caps->max_ln_count.bits.TPS3_SUPPORTED)
326 pattern = DP_TRAINING_PATTERN_SEQUENCE_3;
328 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
330 case DP_128b_132b_ENCODING:
331 pattern = DP_128b_132b_TPS2;
334 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
339 enum dc_dp_training_pattern highest_tp = DP_TRAINING_PATTERN_SEQUENCE_2;
340 struct encoder_feature_support *features;
341 struct dpcd_caps *dpcd_caps = &link->dpcd_caps;
343 /* Access link encoder capability based on whether it is statically
344 * or dynamically assigned to a link.
346 if (link->is_dig_mapping_flexible &&
347 link->dc->res_pool->funcs->link_encs_assign)
348 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
350 link_enc = link->link_enc;
352 features = &link_enc->features;
354 if (features->flags.bits.IS_TPS3_CAPABLE)
355 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_3;
357 if (features->flags.bits.IS_TPS4_CAPABLE)
358 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_4;
360 if (dpcd_caps->max_down_spread.bits.TPS4_SUPPORTED &&
361 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_4)
362 return DP_TRAINING_PATTERN_SEQUENCE_4;
364 if (dpcd_caps->max_ln_count.bits.TPS3_SUPPORTED &&
365 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_3)
366 return DP_TRAINING_PATTERN_SEQUENCE_3;
368 return DP_TRAINING_PATTERN_SEQUENCE_2;
372 #if defined(CONFIG_DRM_AMD_DC_DCN)
373 static uint8_t get_dpcd_link_rate(const struct dc_link_settings *link_settings)
375 uint8_t link_rate = 0;
376 enum dp_link_encoding encoding = dp_get_link_encoding_format(link_settings);
378 if (encoding == DP_128b_132b_ENCODING)
379 switch (link_settings->link_rate) {
380 case LINK_RATE_UHBR10:
383 case LINK_RATE_UHBR20:
386 case LINK_RATE_UHBR13_5:
393 else if (encoding == DP_8b_10b_ENCODING)
394 link_rate = (uint8_t) link_settings->link_rate;
402 static void vendor_specific_lttpr_wa_one_start(struct dc_link *link)
404 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0xff};
405 const uint8_t offset = dp_convert_to_count(
406 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
407 uint32_t vendor_lttpr_write_address = 0xF004F;
410 vendor_lttpr_write_address +=
411 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
413 /* W/A for certain LTTPR to reset their lane settings, part one of two */
414 core_link_write_dpcd(
416 vendor_lttpr_write_address,
417 &vendor_lttpr_write_data[0],
418 sizeof(vendor_lttpr_write_data));
421 static void vendor_specific_lttpr_wa_one_end(
422 struct dc_link *link,
425 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0x0};
426 const uint8_t offset = dp_convert_to_count(
427 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
428 uint32_t vendor_lttpr_write_address = 0xF004F;
432 vendor_lttpr_write_address +=
433 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
435 /* W/A for certain LTTPR to reset their lane settings, part two of two */
436 core_link_write_dpcd(
438 vendor_lttpr_write_address,
439 &vendor_lttpr_write_data[0],
440 sizeof(vendor_lttpr_write_data));
444 static void vendor_specific_lttpr_wa_one_two(
445 struct dc_link *link,
448 if (link->apply_vendor_specific_lttpr_link_rate_wa) {
449 uint8_t toggle_rate = 0x0;
456 if (link->vendor_specific_lttpr_link_rate_wa == rate) {
457 /* W/A for certain LTTPR to reset internal state for link training */
458 core_link_write_dpcd(
465 /* Store the last attempted link rate for this link */
466 link->vendor_specific_lttpr_link_rate_wa = rate;
470 static void vendor_specific_lttpr_wa_three(
471 struct dc_link *link,
472 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX])
474 const uint8_t vendor_lttpr_write_data_vs[3] = {0x0, 0x53, 0x63};
475 const uint8_t vendor_lttpr_write_data_pe[3] = {0x0, 0x54, 0x63};
476 const uint8_t offset = dp_convert_to_count(
477 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
478 uint32_t vendor_lttpr_write_address = 0xF004F;
479 uint32_t vendor_lttpr_read_address = 0xF0053;
484 if (offset != 0xFF) {
485 vendor_lttpr_write_address +=
486 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
487 vendor_lttpr_read_address +=
488 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
491 /* W/A to read lane settings requested by DPRX */
492 core_link_write_dpcd(
494 vendor_lttpr_write_address,
495 &vendor_lttpr_write_data_vs[0],
496 sizeof(vendor_lttpr_write_data_vs));
499 vendor_lttpr_read_address,
502 core_link_write_dpcd(
504 vendor_lttpr_write_address,
505 &vendor_lttpr_write_data_pe[0],
506 sizeof(vendor_lttpr_write_data_pe));
509 vendor_lttpr_read_address,
513 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
514 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE = (dprx_vs >> (2 * lane)) & 0x3;
515 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE = (dprx_pe >> (2 * lane)) & 0x3;
519 static void vendor_specific_lttpr_wa_three_dpcd(
520 struct dc_link *link,
521 union dpcd_training_lane dpcd_lane_adjust[LANE_COUNT_DP_MAX])
523 union lane_adjust lane_adjust[LANE_COUNT_DP_MAX];
526 vendor_specific_lttpr_wa_three(link, lane_adjust);
528 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
529 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_SET = lane_adjust[lane].bits.VOLTAGE_SWING_LANE;
530 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_SET = lane_adjust[lane].bits.PRE_EMPHASIS_LANE;
534 static void vendor_specific_lttpr_wa_four(
535 struct dc_link *link,
538 const uint8_t vendor_lttpr_write_data_one[4] = {0x1, 0x55, 0x63, 0x8};
539 const uint8_t vendor_lttpr_write_data_two[4] = {0x1, 0x55, 0x63, 0x0};
540 const uint8_t offset = dp_convert_to_count(
541 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
542 uint32_t vendor_lttpr_write_address = 0xF004F;
543 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
544 uint8_t sink_status = 0;
549 vendor_lttpr_write_address +=
550 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
552 /* W/A to pass through DPCD write of TPS=0 to DPRX */
554 core_link_write_dpcd(
556 vendor_lttpr_write_address,
557 &vendor_lttpr_write_data_one[0],
558 sizeof(vendor_lttpr_write_data_one));
561 /* clear training pattern set */
562 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
565 core_link_write_dpcd(
567 vendor_lttpr_write_address,
568 &vendor_lttpr_write_data_two[0],
569 sizeof(vendor_lttpr_write_data_two));
572 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
573 /* poll for intra-hop disable */
574 for (i = 0; i < 10; i++) {
575 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
576 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
583 static void vendor_specific_lttpr_wa_five(
584 struct dc_link *link,
585 const union dpcd_training_lane dpcd_lane_adjust[LANE_COUNT_DP_MAX],
588 const uint32_t vendor_lttpr_write_address = 0xF004F;
589 const uint8_t vendor_lttpr_write_data_reset[4] = {0x1, 0x50, 0x63, 0xFF};
590 uint8_t vendor_lttpr_write_data_vs[4] = {0x1, 0x51, 0x63, 0x0};
591 uint8_t vendor_lttpr_write_data_pe[4] = {0x1, 0x52, 0x63, 0x0};
594 for (lane = 0; lane < lane_count; lane++) {
595 vendor_lttpr_write_data_vs[3] |=
596 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_SET << (2 * lane);
597 vendor_lttpr_write_data_pe[3] |=
598 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_SET << (2 * lane);
601 /* Force LTTPR to output desired VS and PE */
602 core_link_write_dpcd(
604 vendor_lttpr_write_address,
605 &vendor_lttpr_write_data_reset[0],
606 sizeof(vendor_lttpr_write_data_reset));
607 core_link_write_dpcd(
609 vendor_lttpr_write_address,
610 &vendor_lttpr_write_data_vs[0],
611 sizeof(vendor_lttpr_write_data_vs));
612 core_link_write_dpcd(
614 vendor_lttpr_write_address,
615 &vendor_lttpr_write_data_pe[0],
616 sizeof(vendor_lttpr_write_data_pe));
619 enum dc_status dpcd_set_link_settings(
620 struct dc_link *link,
621 const struct link_training_settings *lt_settings)
624 enum dc_status status;
626 union down_spread_ctrl downspread = {0};
627 union lane_count_set lane_count_set = {0};
629 downspread.raw = (uint8_t)
630 (lt_settings->link_settings.link_spread);
632 lane_count_set.bits.LANE_COUNT_SET =
633 lt_settings->link_settings.lane_count;
635 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
636 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
639 if (link->ep_type == DISPLAY_ENDPOINT_PHY &&
640 lt_settings->pattern_for_eq < DP_TRAINING_PATTERN_SEQUENCE_4) {
641 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
642 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
645 status = core_link_write_dpcd(link, DP_DOWNSPREAD_CTRL,
646 &downspread.raw, sizeof(downspread));
648 status = core_link_write_dpcd(link, DP_LANE_COUNT_SET,
649 &lane_count_set.raw, 1);
651 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
652 lt_settings->link_settings.use_link_rate_set == true) {
654 /* WA for some MUX chips that will power down with eDP and lose supported
655 * link rate set for eDP 1.4. Source reads DPCD 0x010 again to ensure
656 * MUX chip gets link rate set back before link training.
658 if (link->connector_signal == SIGNAL_TYPE_EDP) {
659 uint8_t supported_link_rates[16];
661 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
662 supported_link_rates, sizeof(supported_link_rates));
664 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
665 status = core_link_write_dpcd(link, DP_LINK_RATE_SET,
666 <_settings->link_settings.link_rate_set, 1);
668 #if defined(CONFIG_DRM_AMD_DC_DCN)
669 rate = get_dpcd_link_rate(<_settings->link_settings);
671 rate = (uint8_t) (lt_settings->link_settings.link_rate);
673 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
674 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
675 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
676 vendor_specific_lttpr_wa_one_start(link);
678 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
679 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN))
680 vendor_specific_lttpr_wa_one_two(link, rate);
682 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
686 DC_LOG_HW_LINK_TRAINING("%s\n %x rate = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
689 lt_settings->link_settings.link_rate,
691 lt_settings->link_settings.lane_count,
692 lt_settings->enhanced_framing,
694 lt_settings->link_settings.link_spread);
696 DC_LOG_HW_LINK_TRAINING("%s\n %x rate set = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
699 lt_settings->link_settings.link_rate_set,
701 lt_settings->link_settings.lane_count,
702 lt_settings->enhanced_framing,
704 lt_settings->link_settings.link_spread);
710 uint8_t dc_dp_initialize_scrambling_data_symbols(
711 struct dc_link *link,
712 enum dc_dp_training_pattern pattern)
714 uint8_t disable_scrabled_data_symbols = 0;
717 case DP_TRAINING_PATTERN_SEQUENCE_1:
718 case DP_TRAINING_PATTERN_SEQUENCE_2:
719 case DP_TRAINING_PATTERN_SEQUENCE_3:
720 disable_scrabled_data_symbols = 1;
722 case DP_TRAINING_PATTERN_SEQUENCE_4:
723 #if defined(CONFIG_DRM_AMD_DC_DCN)
724 case DP_128b_132b_TPS1:
725 case DP_128b_132b_TPS2:
727 disable_scrabled_data_symbols = 0;
731 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
735 return disable_scrabled_data_symbols;
738 static inline bool is_repeater(struct dc_link *link, uint32_t offset)
740 return (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (offset != 0);
743 static void dpcd_set_lt_pattern_and_lane_settings(
744 struct dc_link *link,
745 const struct link_training_settings *lt_settings,
746 enum dc_dp_training_pattern pattern,
749 uint32_t dpcd_base_lt_offset;
751 uint8_t dpcd_lt_buffer[5] = {0};
752 union dpcd_training_pattern dpcd_pattern = { 0 };
753 uint32_t size_in_bytes;
754 bool edp_workaround = false; /* TODO link_prop.INTERNAL */
755 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET;
757 if (is_repeater(link, offset))
758 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
759 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
761 /*****************************************************************
762 * DpcdAddress_TrainingPatternSet
763 *****************************************************************/
764 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
765 dc_dp_training_pattern_to_dpcd_training_pattern(link, pattern);
767 dpcd_pattern.v1_4.SCRAMBLING_DISABLE =
768 dc_dp_initialize_scrambling_data_symbols(link, pattern);
770 dpcd_lt_buffer[DP_TRAINING_PATTERN_SET - DP_TRAINING_PATTERN_SET]
773 if (is_repeater(link, offset)) {
774 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n 0x%X pattern = %x\n",
778 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
780 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X pattern = %x\n",
783 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
786 /* concatenate everything into one buffer*/
787 size_in_bytes = lt_settings->link_settings.lane_count *
788 sizeof(lt_settings->dpcd_lane_settings[0]);
792 &dpcd_lt_buffer[DP_TRAINING_LANE0_SET - DP_TRAINING_PATTERN_SET],
793 lt_settings->dpcd_lane_settings,
796 if (is_repeater(link, offset)) {
797 #if defined(CONFIG_DRM_AMD_DC_DCN)
798 if (dp_get_link_encoding_format(<_settings->link_settings) ==
799 DP_128b_132b_ENCODING)
800 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
801 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
805 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
806 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
809 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
810 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
814 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
815 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
816 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
817 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
819 #if defined(CONFIG_DRM_AMD_DC_DCN)
820 if (dp_get_link_encoding_format(<_settings->link_settings) ==
821 DP_128b_132b_ENCODING)
822 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
825 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
826 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
829 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
832 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
833 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
834 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
835 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
837 if (edp_workaround) {
838 /* for eDP write in 2 parts because the 5-byte burst is
839 * causing issues on some eDP panels (EPR#366724)
841 core_link_write_dpcd(
843 DP_TRAINING_PATTERN_SET,
845 sizeof(dpcd_pattern.raw));
847 core_link_write_dpcd(
849 DP_TRAINING_LANE0_SET,
850 (uint8_t *)(lt_settings->dpcd_lane_settings),
853 #if defined(CONFIG_DRM_AMD_DC_DCN)
854 } else if (dp_get_link_encoding_format(<_settings->link_settings) ==
855 DP_128b_132b_ENCODING) {
856 core_link_write_dpcd(
860 sizeof(dpcd_lt_buffer));
863 /* write it all in (1 + number-of-lanes)-byte burst*/
864 core_link_write_dpcd(
868 size_in_bytes + sizeof(dpcd_pattern.raw));
871 bool dp_is_cr_done(enum dc_lane_count ln_count,
872 union lane_status *dpcd_lane_status)
875 /*LANEx_CR_DONE bits All 1's?*/
876 for (lane = 0; lane < (uint32_t)(ln_count); lane++) {
877 if (!dpcd_lane_status[lane].bits.CR_DONE_0)
883 bool dp_is_ch_eq_done(enum dc_lane_count ln_count,
884 union lane_status *dpcd_lane_status)
888 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
889 if (!dpcd_lane_status[lane].bits.CHANNEL_EQ_DONE_0)
894 bool dp_is_symbol_locked(enum dc_lane_count ln_count,
895 union lane_status *dpcd_lane_status)
899 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
900 if (!dpcd_lane_status[lane].bits.SYMBOL_LOCKED_0)
905 bool dp_is_interlane_aligned(union lane_align_status_updated align_status)
907 return align_status.bits.INTERLANE_ALIGN_DONE == 1;
910 void dp_hw_to_dpcd_lane_settings(
911 const struct link_training_settings *lt_settings,
912 const struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
913 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
917 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
918 if (dp_get_link_encoding_format(<_settings->link_settings) ==
919 DP_8b_10b_ENCODING) {
920 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET =
921 (uint8_t)(hw_lane_settings[lane].VOLTAGE_SWING);
922 dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET =
923 (uint8_t)(hw_lane_settings[lane].PRE_EMPHASIS);
924 dpcd_lane_settings[lane].bits.MAX_SWING_REACHED =
925 (hw_lane_settings[lane].VOLTAGE_SWING ==
926 VOLTAGE_SWING_MAX_LEVEL ? 1 : 0);
927 dpcd_lane_settings[lane].bits.MAX_PRE_EMPHASIS_REACHED =
928 (hw_lane_settings[lane].PRE_EMPHASIS ==
929 PRE_EMPHASIS_MAX_LEVEL ? 1 : 0);
931 #if defined(CONFIG_DRM_AMD_DC_DCN)
932 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
933 DP_128b_132b_ENCODING) {
934 dpcd_lane_settings[lane].tx_ffe.PRESET_VALUE =
935 hw_lane_settings[lane].FFE_PRESET.settings.level;
941 void dp_decide_lane_settings(
942 const struct link_training_settings *lt_settings,
943 const union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
944 struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
945 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
949 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
950 if (dp_get_link_encoding_format(<_settings->link_settings) ==
951 DP_8b_10b_ENCODING) {
952 hw_lane_settings[lane].VOLTAGE_SWING =
953 (enum dc_voltage_swing)(ln_adjust[lane].bits.
955 hw_lane_settings[lane].PRE_EMPHASIS =
956 (enum dc_pre_emphasis)(ln_adjust[lane].bits.
959 #if defined(CONFIG_DRM_AMD_DC_DCN)
960 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
961 DP_128b_132b_ENCODING) {
962 hw_lane_settings[lane].FFE_PRESET.raw =
963 ln_adjust[lane].tx_ffe.PRESET_VALUE;
967 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
969 if (lt_settings->disallow_per_lane_settings) {
970 /* we find the maximum of the requested settings across all lanes*/
971 /* and set this maximum for all lanes*/
972 maximize_lane_settings(lt_settings, hw_lane_settings);
973 override_lane_settings(lt_settings, hw_lane_settings);
975 if (lt_settings->always_match_dpcd_with_hw_lane_settings)
976 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
981 static uint8_t get_nibble_at_index(const uint8_t *buf,
985 nibble = buf[index / 2];
995 static enum dc_pre_emphasis get_max_pre_emphasis_for_voltage_swing(
996 enum dc_voltage_swing voltage)
998 enum dc_pre_emphasis pre_emphasis;
999 pre_emphasis = PRE_EMPHASIS_MAX_LEVEL;
1001 if (voltage <= VOLTAGE_SWING_MAX_LEVEL)
1002 pre_emphasis = voltage_swing_to_pre_emphasis[voltage];
1004 return pre_emphasis;
1008 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
1009 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
1012 struct dc_lane_settings max_requested;
1014 max_requested.VOLTAGE_SWING = lane_settings[0].VOLTAGE_SWING;
1015 max_requested.PRE_EMPHASIS = lane_settings[0].PRE_EMPHASIS;
1016 #if defined(CONFIG_DRM_AMD_DC_DCN)
1017 max_requested.FFE_PRESET = lane_settings[0].FFE_PRESET;
1020 /* Determine what the maximum of the requested settings are*/
1021 for (lane = 1; lane < lt_settings->link_settings.lane_count; lane++) {
1022 if (lane_settings[lane].VOLTAGE_SWING > max_requested.VOLTAGE_SWING)
1023 max_requested.VOLTAGE_SWING = lane_settings[lane].VOLTAGE_SWING;
1025 if (lane_settings[lane].PRE_EMPHASIS > max_requested.PRE_EMPHASIS)
1026 max_requested.PRE_EMPHASIS = lane_settings[lane].PRE_EMPHASIS;
1027 #if defined(CONFIG_DRM_AMD_DC_DCN)
1028 if (lane_settings[lane].FFE_PRESET.settings.level >
1029 max_requested.FFE_PRESET.settings.level)
1030 max_requested.FFE_PRESET.settings.level =
1031 lane_settings[lane].FFE_PRESET.settings.level;
1035 /* make sure the requested settings are
1036 * not higher than maximum settings*/
1037 if (max_requested.VOLTAGE_SWING > VOLTAGE_SWING_MAX_LEVEL)
1038 max_requested.VOLTAGE_SWING = VOLTAGE_SWING_MAX_LEVEL;
1040 if (max_requested.PRE_EMPHASIS > PRE_EMPHASIS_MAX_LEVEL)
1041 max_requested.PRE_EMPHASIS = PRE_EMPHASIS_MAX_LEVEL;
1042 #if defined(CONFIG_DRM_AMD_DC_DCN)
1043 if (max_requested.FFE_PRESET.settings.level > DP_FFE_PRESET_MAX_LEVEL)
1044 max_requested.FFE_PRESET.settings.level = DP_FFE_PRESET_MAX_LEVEL;
1047 /* make sure the pre-emphasis matches the voltage swing*/
1048 if (max_requested.PRE_EMPHASIS >
1049 get_max_pre_emphasis_for_voltage_swing(
1050 max_requested.VOLTAGE_SWING))
1051 max_requested.PRE_EMPHASIS =
1052 get_max_pre_emphasis_for_voltage_swing(
1053 max_requested.VOLTAGE_SWING);
1055 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1056 lane_settings[lane].VOLTAGE_SWING = max_requested.VOLTAGE_SWING;
1057 lane_settings[lane].PRE_EMPHASIS = max_requested.PRE_EMPHASIS;
1058 #if defined(CONFIG_DRM_AMD_DC_DCN)
1059 lane_settings[lane].FFE_PRESET = max_requested.FFE_PRESET;
1064 static void override_lane_settings(const struct link_training_settings *lt_settings,
1065 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
1069 if (lt_settings->voltage_swing == NULL &&
1070 lt_settings->pre_emphasis == NULL &&
1071 #if defined(CONFIG_DRM_AMD_DC_DCN)
1072 lt_settings->ffe_preset == NULL &&
1074 lt_settings->post_cursor2 == NULL)
1078 for (lane = 1; lane < LANE_COUNT_DP_MAX; lane++) {
1079 if (lt_settings->voltage_swing)
1080 lane_settings[lane].VOLTAGE_SWING = *lt_settings->voltage_swing;
1081 if (lt_settings->pre_emphasis)
1082 lane_settings[lane].PRE_EMPHASIS = *lt_settings->pre_emphasis;
1083 if (lt_settings->post_cursor2)
1084 lane_settings[lane].POST_CURSOR2 = *lt_settings->post_cursor2;
1085 #if defined(CONFIG_DRM_AMD_DC_DCN)
1086 if (lt_settings->ffe_preset)
1087 lane_settings[lane].FFE_PRESET = *lt_settings->ffe_preset;
1092 enum dc_status dp_get_lane_status_and_lane_adjust(
1093 struct dc_link *link,
1094 const struct link_training_settings *link_training_setting,
1095 union lane_status ln_status[LANE_COUNT_DP_MAX],
1096 union lane_align_status_updated *ln_align,
1097 union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
1100 unsigned int lane01_status_address = DP_LANE0_1_STATUS;
1101 uint8_t lane_adjust_offset = 4;
1102 unsigned int lane01_adjust_address;
1103 uint8_t dpcd_buf[6] = {0};
1105 enum dc_status status;
1107 if (is_repeater(link, offset)) {
1108 lane01_status_address =
1109 DP_LANE0_1_STATUS_PHY_REPEATER1 +
1110 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1111 lane_adjust_offset = 3;
1114 status = core_link_read_dpcd(
1116 lane01_status_address,
1117 (uint8_t *)(dpcd_buf),
1120 for (lane = 0; lane <
1121 (uint32_t)(link_training_setting->link_settings.lane_count);
1124 ln_status[lane].raw =
1125 get_nibble_at_index(&dpcd_buf[0], lane);
1126 ln_adjust[lane].raw =
1127 get_nibble_at_index(&dpcd_buf[lane_adjust_offset], lane);
1130 ln_align->raw = dpcd_buf[2];
1132 if (is_repeater(link, offset)) {
1133 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1134 " 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1137 lane01_status_address, dpcd_buf[0],
1138 lane01_status_address + 1, dpcd_buf[1]);
1140 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1142 lane01_status_address, dpcd_buf[0],
1143 lane01_status_address + 1, dpcd_buf[1]);
1145 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1;
1147 if (is_repeater(link, offset))
1148 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1_PHY_REPEATER1 +
1149 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1151 if (is_repeater(link, offset)) {
1152 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1153 " 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1156 lane01_adjust_address,
1157 dpcd_buf[lane_adjust_offset],
1158 lane01_adjust_address + 1,
1159 dpcd_buf[lane_adjust_offset + 1]);
1161 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1163 lane01_adjust_address,
1164 dpcd_buf[lane_adjust_offset],
1165 lane01_adjust_address + 1,
1166 dpcd_buf[lane_adjust_offset + 1]);
1172 enum dc_status dpcd_set_lane_settings(
1173 struct dc_link *link,
1174 const struct link_training_settings *link_training_setting,
1177 unsigned int lane0_set_address;
1178 enum dc_status status;
1180 lane0_set_address = DP_TRAINING_LANE0_SET;
1182 if (is_repeater(link, offset))
1183 lane0_set_address = DP_TRAINING_LANE0_SET_PHY_REPEATER1 +
1184 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1186 status = core_link_write_dpcd(link,
1188 (uint8_t *)(link_training_setting->dpcd_lane_settings),
1189 link_training_setting->link_settings.lane_count);
1191 if (is_repeater(link, offset)) {
1192 #if defined(CONFIG_DRM_AMD_DC_DCN)
1193 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1194 DP_128b_132b_ENCODING)
1195 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1196 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
1200 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1201 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1204 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n"
1205 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1209 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1210 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1211 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1212 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1215 #if defined(CONFIG_DRM_AMD_DC_DCN)
1216 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1217 DP_128b_132b_ENCODING)
1218 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
1221 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1222 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1225 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1228 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1229 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1230 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1231 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1237 bool dp_is_max_vs_reached(
1238 const struct link_training_settings *lt_settings)
1241 for (lane = 0; lane <
1242 (uint32_t)(lt_settings->link_settings.lane_count);
1244 if (lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET
1245 == VOLTAGE_SWING_MAX_LEVEL)
1252 static bool perform_post_lt_adj_req_sequence(
1253 struct dc_link *link,
1254 const struct link_resource *link_res,
1255 struct link_training_settings *lt_settings)
1257 enum dc_lane_count lane_count =
1258 lt_settings->link_settings.lane_count;
1260 uint32_t adj_req_count;
1261 uint32_t adj_req_timer;
1262 bool req_drv_setting_changed;
1265 req_drv_setting_changed = false;
1266 for (adj_req_count = 0; adj_req_count < POST_LT_ADJ_REQ_LIMIT;
1269 req_drv_setting_changed = false;
1271 for (adj_req_timer = 0;
1272 adj_req_timer < POST_LT_ADJ_REQ_TIMEOUT;
1275 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1276 union lane_align_status_updated
1277 dpcd_lane_status_updated;
1278 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1280 dp_get_lane_status_and_lane_adjust(
1284 &dpcd_lane_status_updated,
1288 if (dpcd_lane_status_updated.bits.
1289 POST_LT_ADJ_REQ_IN_PROGRESS == 0)
1292 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1295 if (!dp_is_ch_eq_done(lane_count, dpcd_lane_status) ||
1296 !dp_is_symbol_locked(lane_count, dpcd_lane_status) ||
1297 !dp_is_interlane_aligned(dpcd_lane_status_updated))
1300 for (lane = 0; lane < (uint32_t)(lane_count); lane++) {
1303 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET !=
1304 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE ||
1305 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET !=
1306 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE) {
1308 req_drv_setting_changed = true;
1313 if (req_drv_setting_changed) {
1314 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1315 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1317 dc_link_dp_set_drive_settings(link,
1326 if (!req_drv_setting_changed) {
1327 DC_LOG_WARNING("%s: Post Link Training Adjust Request Timed out\n",
1334 DC_LOG_WARNING("%s: Post Link Training Adjust Request limit reached\n",
1342 /* Only used for channel equalization */
1343 uint32_t dp_translate_training_aux_read_interval(uint32_t dpcd_aux_read_interval)
1345 unsigned int aux_rd_interval_us = 400;
1347 switch (dpcd_aux_read_interval) {
1349 aux_rd_interval_us = 4000;
1352 aux_rd_interval_us = 8000;
1355 aux_rd_interval_us = 12000;
1358 aux_rd_interval_us = 16000;
1360 #if defined(CONFIG_DRM_AMD_DC_DCN)
1362 aux_rd_interval_us = 32000;
1365 aux_rd_interval_us = 64000;
1372 return aux_rd_interval_us;
1375 enum link_training_result dp_get_cr_failure(enum dc_lane_count ln_count,
1376 union lane_status *dpcd_lane_status)
1378 enum link_training_result result = LINK_TRAINING_SUCCESS;
1380 if (ln_count >= LANE_COUNT_ONE && !dpcd_lane_status[0].bits.CR_DONE_0)
1381 result = LINK_TRAINING_CR_FAIL_LANE0;
1382 else if (ln_count >= LANE_COUNT_TWO && !dpcd_lane_status[1].bits.CR_DONE_0)
1383 result = LINK_TRAINING_CR_FAIL_LANE1;
1384 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[2].bits.CR_DONE_0)
1385 result = LINK_TRAINING_CR_FAIL_LANE23;
1386 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[3].bits.CR_DONE_0)
1387 result = LINK_TRAINING_CR_FAIL_LANE23;
1391 static enum link_training_result perform_channel_equalization_sequence(
1392 struct dc_link *link,
1393 const struct link_resource *link_res,
1394 struct link_training_settings *lt_settings,
1397 enum dc_dp_training_pattern tr_pattern;
1398 uint32_t retries_ch_eq;
1399 uint32_t wait_time_microsec;
1400 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1401 union lane_align_status_updated dpcd_lane_status_updated = {0};
1402 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
1403 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
1405 /* Note: also check that TPS4 is a supported feature*/
1406 tr_pattern = lt_settings->pattern_for_eq;
1408 #if defined(CONFIG_DRM_AMD_DC_DCN)
1409 if (is_repeater(link, offset) && dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING)
1410 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1412 if (is_repeater(link, offset))
1413 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1416 dp_set_hw_training_pattern(link, link_res, tr_pattern, offset);
1418 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
1421 dp_set_hw_lane_settings(link, link_res, lt_settings, offset);
1425 /* EPR #361076 - write as a 5-byte burst,
1426 * but only for the 1-st iteration
1429 dpcd_set_lt_pattern_and_lane_settings(
1432 tr_pattern, offset);
1434 dpcd_set_lane_settings(link, lt_settings, offset);
1436 /* 3. wait for receiver to lock-on*/
1437 wait_time_microsec = lt_settings->eq_pattern_time;
1439 if (is_repeater(link, offset))
1440 wait_time_microsec =
1441 dp_translate_training_aux_read_interval(
1442 link->dpcd_caps.lttpr_caps.aux_rd_interval[offset - 1]);
1444 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1445 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1446 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1447 wait_time_microsec = 16000;
1450 dp_wait_for_training_aux_rd_interval(
1452 wait_time_microsec);
1454 /* 4. Read lane status and requested
1455 * drive settings as set by the sink*/
1457 dp_get_lane_status_and_lane_adjust(
1461 &dpcd_lane_status_updated,
1465 /* 5. check CR done*/
1466 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1467 return LINK_TRAINING_EQ_FAIL_CR;
1469 /* 6. check CHEQ done*/
1470 if (dp_is_ch_eq_done(lane_count, dpcd_lane_status) &&
1471 dp_is_symbol_locked(lane_count, dpcd_lane_status) &&
1472 dp_is_interlane_aligned(dpcd_lane_status_updated))
1473 return LINK_TRAINING_SUCCESS;
1475 /* 7. update VS/PE/PC2 in lt_settings*/
1476 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1477 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1480 return LINK_TRAINING_EQ_FAIL_EQ;
1484 static void start_clock_recovery_pattern_early(struct dc_link *link,
1485 const struct link_resource *link_res,
1486 struct link_training_settings *lt_settings,
1489 DC_LOG_HW_LINK_TRAINING("%s\n GPU sends TPS1. Wait 400us.\n",
1491 dp_set_hw_training_pattern(link, link_res, lt_settings->pattern_for_cr, offset);
1492 dp_set_hw_lane_settings(link, link_res, lt_settings, offset);
1496 static enum link_training_result perform_clock_recovery_sequence(
1497 struct dc_link *link,
1498 const struct link_resource *link_res,
1499 struct link_training_settings *lt_settings,
1502 uint32_t retries_cr;
1503 uint32_t retry_count;
1504 uint32_t wait_time_microsec;
1505 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1506 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1507 union lane_align_status_updated dpcd_lane_status_updated;
1508 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1513 if (!link->ctx->dc->work_arounds.lt_early_cr_pattern)
1514 dp_set_hw_training_pattern(link, link_res, lt_settings->pattern_for_cr, offset);
1516 /* najeeb - The synaptics MST hub can put the LT in
1517 * infinite loop by switching the VS
1519 /* between level 0 and level 1 continuously, here
1520 * we try for CR lock for LinkTrainingMaxCRRetry count*/
1521 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
1522 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
1524 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
1525 memset(&dpcd_lane_status_updated, '\0',
1526 sizeof(dpcd_lane_status_updated));
1528 /* 1. call HWSS to set lane settings*/
1529 dp_set_hw_lane_settings(
1535 /* 2. update DPCD of the receiver*/
1537 /* EPR #361076 - write as a 5-byte burst,
1538 * but only for the 1-st iteration.*/
1539 dpcd_set_lt_pattern_and_lane_settings(
1542 lt_settings->pattern_for_cr,
1545 dpcd_set_lane_settings(
1550 /* 3. wait receiver to lock-on*/
1551 wait_time_microsec = lt_settings->cr_pattern_time;
1553 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1554 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN)) {
1555 wait_time_microsec = 16000;
1558 dp_wait_for_training_aux_rd_interval(
1560 wait_time_microsec);
1562 /* 4. Read lane status and requested drive
1563 * settings as set by the sink
1565 dp_get_lane_status_and_lane_adjust(
1569 &dpcd_lane_status_updated,
1573 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1574 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1575 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1576 vendor_specific_lttpr_wa_one_end(link, retry_count);
1577 vendor_specific_lttpr_wa_three(link, dpcd_lane_adjust);
1580 /* 5. check CR done*/
1581 if (dp_is_cr_done(lane_count, dpcd_lane_status))
1582 return LINK_TRAINING_SUCCESS;
1584 /* 6. max VS reached*/
1585 #if defined(CONFIG_DRM_AMD_DC_DCN)
1586 if ((dp_get_link_encoding_format(<_settings->link_settings) ==
1587 DP_8b_10b_ENCODING) &&
1588 dp_is_max_vs_reached(lt_settings))
1591 if (dp_is_max_vs_reached(lt_settings))
1595 /* 7. same lane settings*/
1596 /* Note: settings are the same for all lanes,
1597 * so comparing first lane is sufficient*/
1598 if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING) &&
1599 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET ==
1600 dpcd_lane_adjust[0].bits.VOLTAGE_SWING_LANE)
1602 #if defined(CONFIG_DRM_AMD_DC_DCN)
1603 else if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_128b_132b_ENCODING) &&
1604 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE ==
1605 dpcd_lane_adjust[0].tx_ffe.PRESET_VALUE)
1611 /* 8. update VS/PE/PC2 in lt_settings*/
1612 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1613 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1617 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
1619 DC_LOG_ERROR("%s: Link Training Error, could not get CR after %d tries. Possibly voltage swing issue",
1621 LINK_TRAINING_MAX_CR_RETRY);
1625 return dp_get_cr_failure(lane_count, dpcd_lane_status);
1628 static inline enum link_training_result dp_transition_to_video_idle(
1629 struct dc_link *link,
1630 const struct link_resource *link_res,
1631 struct link_training_settings *lt_settings,
1632 enum link_training_result status)
1634 union lane_count_set lane_count_set = {0};
1636 /* 4. mainlink output idle pattern*/
1637 dp_set_hw_test_pattern(link, link_res, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
1640 * 5. post training adjust if required
1641 * If the upstream DPTX and downstream DPRX both support TPS4,
1642 * TPS4 must be used instead of POST_LT_ADJ_REQ.
1644 if (link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED != 1 ||
1645 #if defined(CONFIG_DRM_AMD_DC_DCN)
1646 lt_settings->pattern_for_eq >= DP_TRAINING_PATTERN_SEQUENCE_4) {
1648 lt_settings->pattern_for_eq == DP_TRAINING_PATTERN_SEQUENCE_4) {
1650 /* delay 5ms after Main Link output idle pattern and then check
1653 if (link->connector_signal != SIGNAL_TYPE_EDP && status == LINK_TRAINING_SUCCESS) {
1655 status = dp_check_link_loss_status(link, lt_settings);
1660 if (status == LINK_TRAINING_SUCCESS &&
1661 perform_post_lt_adj_req_sequence(link, link_res, lt_settings) == false)
1662 status = LINK_TRAINING_LQA_FAIL;
1664 lane_count_set.bits.LANE_COUNT_SET = lt_settings->link_settings.lane_count;
1665 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
1666 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
1668 core_link_write_dpcd(
1671 &lane_count_set.raw,
1672 sizeof(lane_count_set));
1677 enum link_training_result dp_check_link_loss_status(
1678 struct dc_link *link,
1679 const struct link_training_settings *link_training_setting)
1681 enum link_training_result status = LINK_TRAINING_SUCCESS;
1682 union lane_status lane_status;
1683 uint8_t dpcd_buf[6] = {0};
1686 core_link_read_dpcd(
1689 (uint8_t *)(dpcd_buf),
1692 /*parse lane status*/
1693 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
1695 * check lanes status
1697 lane_status.raw = get_nibble_at_index(&dpcd_buf[2], lane);
1699 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
1700 !lane_status.bits.CR_DONE_0 ||
1701 !lane_status.bits.SYMBOL_LOCKED_0) {
1702 /* if one of the channel equalization, clock
1703 * recovery or symbol lock is dropped
1704 * consider it as (link has been
1705 * dropped) dp sink status has changed
1707 status = LINK_TRAINING_LINK_LOSS;
1715 static inline void decide_8b_10b_training_settings(
1716 struct dc_link *link,
1717 const struct dc_link_settings *link_setting,
1718 struct link_training_settings *lt_settings)
1720 memset(lt_settings, '\0', sizeof(struct link_training_settings));
1722 /* Initialize link settings */
1723 lt_settings->link_settings.use_link_rate_set = link_setting->use_link_rate_set;
1724 lt_settings->link_settings.link_rate_set = link_setting->link_rate_set;
1725 lt_settings->link_settings.link_rate = link_setting->link_rate;
1726 lt_settings->link_settings.lane_count = link_setting->lane_count;
1727 /* TODO hard coded to SS for now
1728 * lt_settings.link_settings.link_spread =
1729 * dal_display_path_is_ss_supported(
1730 * path_mode->display_path) ?
1731 * LINK_SPREAD_05_DOWNSPREAD_30KHZ :
1732 * LINK_SPREAD_DISABLED;
1734 lt_settings->link_settings.link_spread = link->dp_ss_off ?
1735 LINK_SPREAD_DISABLED : LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1736 lt_settings->lttpr_mode = link->lttpr_mode;
1737 lt_settings->cr_pattern_time = get_cr_training_aux_rd_interval(link, link_setting);
1738 lt_settings->eq_pattern_time = get_eq_training_aux_rd_interval(link, link_setting);
1739 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_setting);
1740 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_setting);
1741 lt_settings->enhanced_framing = 1;
1742 lt_settings->should_set_fec_ready = true;
1743 lt_settings->disallow_per_lane_settings = true;
1744 lt_settings->always_match_dpcd_with_hw_lane_settings = true;
1745 dp_hw_to_dpcd_lane_settings(lt_settings, lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1748 #if defined(CONFIG_DRM_AMD_DC_DCN)
1749 static inline void decide_128b_132b_training_settings(struct dc_link *link,
1750 const struct dc_link_settings *link_settings,
1751 struct link_training_settings *lt_settings)
1753 memset(lt_settings, 0, sizeof(*lt_settings));
1755 lt_settings->link_settings = *link_settings;
1756 /* TODO: should decide link spread when populating link_settings */
1757 lt_settings->link_settings.link_spread = link->dp_ss_off ? LINK_SPREAD_DISABLED :
1758 LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1760 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_settings);
1761 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_settings);
1762 lt_settings->eq_pattern_time = 2500;
1763 lt_settings->eq_wait_time_limit = 400000;
1764 lt_settings->eq_loop_count_limit = 20;
1765 lt_settings->pattern_for_cds = DP_128b_132b_TPS2_CDS;
1766 lt_settings->cds_pattern_time = 2500;
1767 lt_settings->cds_wait_time_limit = (dp_convert_to_count(
1768 link->dpcd_caps.lttpr_caps.phy_repeater_cnt) + 1) * 20000;
1769 lt_settings->lttpr_mode = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) ?
1770 LTTPR_MODE_NON_TRANSPARENT : LTTPR_MODE_TRANSPARENT;
1771 lt_settings->disallow_per_lane_settings = true;
1772 dp_hw_to_dpcd_lane_settings(lt_settings,
1773 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1777 void dp_decide_training_settings(
1778 struct dc_link *link,
1779 const struct dc_link_settings *link_settings,
1780 struct link_training_settings *lt_settings)
1782 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING)
1783 decide_8b_10b_training_settings(link, link_settings, lt_settings);
1784 #if defined(CONFIG_DRM_AMD_DC_DCN)
1785 else if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING)
1786 decide_128b_132b_training_settings(link, link_settings, lt_settings);
1790 static void override_training_settings(
1791 struct dc_link *link,
1792 const struct dc_link_training_overrides *overrides,
1793 struct link_training_settings *lt_settings)
1797 /* Override link spread */
1798 if (!link->dp_ss_off && overrides->downspread != NULL)
1799 lt_settings->link_settings.link_spread = *overrides->downspread ?
1800 LINK_SPREAD_05_DOWNSPREAD_30KHZ
1801 : LINK_SPREAD_DISABLED;
1803 /* Override lane settings */
1804 if (overrides->voltage_swing != NULL)
1805 lt_settings->voltage_swing = overrides->voltage_swing;
1806 if (overrides->pre_emphasis != NULL)
1807 lt_settings->pre_emphasis = overrides->pre_emphasis;
1808 if (overrides->post_cursor2 != NULL)
1809 lt_settings->post_cursor2 = overrides->post_cursor2;
1810 #if defined(CONFIG_DRM_AMD_DC_DCN)
1811 if (overrides->ffe_preset != NULL)
1812 lt_settings->ffe_preset = overrides->ffe_preset;
1814 /* Override HW lane settings with BIOS forced values if present */
1815 if (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN &&
1816 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1817 lt_settings->voltage_swing = &link->bios_forced_drive_settings.VOLTAGE_SWING;
1818 lt_settings->pre_emphasis = &link->bios_forced_drive_settings.PRE_EMPHASIS;
1819 lt_settings->always_match_dpcd_with_hw_lane_settings = false;
1821 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1822 lt_settings->lane_settings[lane].VOLTAGE_SWING =
1823 lt_settings->voltage_swing != NULL ?
1824 *lt_settings->voltage_swing :
1825 VOLTAGE_SWING_LEVEL0;
1826 lt_settings->lane_settings[lane].PRE_EMPHASIS =
1827 lt_settings->pre_emphasis != NULL ?
1828 *lt_settings->pre_emphasis
1829 : PRE_EMPHASIS_DISABLED;
1830 lt_settings->lane_settings[lane].POST_CURSOR2 =
1831 lt_settings->post_cursor2 != NULL ?
1832 *lt_settings->post_cursor2
1833 : POST_CURSOR2_DISABLED;
1836 dp_hw_to_dpcd_lane_settings(lt_settings,
1837 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1839 /* Initialize training timings */
1840 if (overrides->cr_pattern_time != NULL)
1841 lt_settings->cr_pattern_time = *overrides->cr_pattern_time;
1843 if (overrides->eq_pattern_time != NULL)
1844 lt_settings->eq_pattern_time = *overrides->eq_pattern_time;
1846 if (overrides->pattern_for_cr != NULL)
1847 lt_settings->pattern_for_cr = *overrides->pattern_for_cr;
1848 if (overrides->pattern_for_eq != NULL)
1849 lt_settings->pattern_for_eq = *overrides->pattern_for_eq;
1851 if (overrides->enhanced_framing != NULL)
1852 lt_settings->enhanced_framing = *overrides->enhanced_framing;
1854 if (link->preferred_training_settings.fec_enable != NULL)
1855 lt_settings->should_set_fec_ready = *link->preferred_training_settings.fec_enable;
1858 uint8_t dp_convert_to_count(uint8_t lttpr_repeater_count)
1860 switch (lttpr_repeater_count) {
1861 case 0x80: // 1 lttpr repeater
1863 case 0x40: // 2 lttpr repeaters
1865 case 0x20: // 3 lttpr repeaters
1867 case 0x10: // 4 lttpr repeaters
1869 case 0x08: // 5 lttpr repeaters
1871 case 0x04: // 6 lttpr repeaters
1873 case 0x02: // 7 lttpr repeaters
1875 case 0x01: // 8 lttpr repeaters
1880 return 0; // invalid value
1883 static enum dc_status configure_lttpr_mode_transparent(struct dc_link *link)
1885 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1887 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1888 return core_link_write_dpcd(link,
1889 DP_PHY_REPEATER_MODE,
1890 (uint8_t *)&repeater_mode,
1891 sizeof(repeater_mode));
1894 static enum dc_status configure_lttpr_mode_non_transparent(
1895 struct dc_link *link,
1896 const struct link_training_settings *lt_settings)
1898 /* aux timeout is already set to extended */
1899 /* RESET/SET lttpr mode to enable non transparent mode */
1900 uint8_t repeater_cnt;
1901 uint32_t aux_interval_address;
1902 uint8_t repeater_id;
1903 enum dc_status result = DC_ERROR_UNEXPECTED;
1904 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1906 enum dp_link_encoding encoding = dp_get_link_encoding_format(<_settings->link_settings);
1908 if (encoding == DP_8b_10b_ENCODING) {
1909 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1910 result = core_link_write_dpcd(link,
1911 DP_PHY_REPEATER_MODE,
1912 (uint8_t *)&repeater_mode,
1913 sizeof(repeater_mode));
1917 if (result == DC_OK) {
1918 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1921 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
1923 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Non Transparent Mode\n", __func__);
1925 repeater_mode = DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
1926 result = core_link_write_dpcd(link,
1927 DP_PHY_REPEATER_MODE,
1928 (uint8_t *)&repeater_mode,
1929 sizeof(repeater_mode));
1931 if (result == DC_OK) {
1932 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1935 if (encoding == DP_8b_10b_ENCODING) {
1936 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
1938 /* Driver does not need to train the first hop. Skip DPCD read and clear
1939 * AUX_RD_INTERVAL for DPTX-to-DPIA hop.
1941 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA)
1942 link->dpcd_caps.lttpr_caps.aux_rd_interval[--repeater_cnt] = 0;
1944 for (repeater_id = repeater_cnt; repeater_id > 0; repeater_id--) {
1945 aux_interval_address = DP_TRAINING_AUX_RD_INTERVAL_PHY_REPEATER1 +
1946 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (repeater_id - 1));
1947 core_link_read_dpcd(
1949 aux_interval_address,
1950 (uint8_t *)&link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1],
1951 sizeof(link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1]));
1952 link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1] &= 0x7F;
1960 static void repeater_training_done(struct dc_link *link, uint32_t offset)
1962 union dpcd_training_pattern dpcd_pattern = {0};
1964 const uint32_t dpcd_base_lt_offset =
1965 DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
1966 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1967 /* Set training not in progress*/
1968 dpcd_pattern.v1_4.TRAINING_PATTERN_SET = DPCD_TRAINING_PATTERN_VIDEOIDLE;
1970 core_link_write_dpcd(
1972 dpcd_base_lt_offset,
1976 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Id: %d 0x%X pattern = %x\n",
1979 dpcd_base_lt_offset,
1980 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
1983 static void print_status_message(
1984 struct dc_link *link,
1985 const struct link_training_settings *lt_settings,
1986 enum link_training_result status)
1988 char *link_rate = "Unknown";
1989 char *lt_result = "Unknown";
1990 char *lt_spread = "Disabled";
1992 switch (lt_settings->link_settings.link_rate) {
1996 case LINK_RATE_RATE_2:
1999 case LINK_RATE_RATE_3:
2002 case LINK_RATE_HIGH:
2005 case LINK_RATE_RBR2:
2008 case LINK_RATE_RATE_6:
2011 case LINK_RATE_HIGH2:
2014 case LINK_RATE_HIGH3:
2017 #if defined(CONFIG_DRM_AMD_DC_DCN)
2018 case LINK_RATE_UHBR10:
2019 link_rate = "UHBR10";
2021 case LINK_RATE_UHBR13_5:
2022 link_rate = "UHBR13.5";
2024 case LINK_RATE_UHBR20:
2025 link_rate = "UHBR20";
2033 case LINK_TRAINING_SUCCESS:
2036 case LINK_TRAINING_CR_FAIL_LANE0:
2037 lt_result = "CR failed lane0";
2039 case LINK_TRAINING_CR_FAIL_LANE1:
2040 lt_result = "CR failed lane1";
2042 case LINK_TRAINING_CR_FAIL_LANE23:
2043 lt_result = "CR failed lane23";
2045 case LINK_TRAINING_EQ_FAIL_CR:
2046 lt_result = "CR failed in EQ";
2048 case LINK_TRAINING_EQ_FAIL_EQ:
2049 lt_result = "EQ failed";
2051 case LINK_TRAINING_LQA_FAIL:
2052 lt_result = "LQA failed";
2054 case LINK_TRAINING_LINK_LOSS:
2055 lt_result = "Link loss";
2057 #if defined(CONFIG_DRM_AMD_DC_DCN)
2058 case DP_128b_132b_LT_FAILED:
2059 lt_result = "LT_FAILED received";
2061 case DP_128b_132b_MAX_LOOP_COUNT_REACHED:
2062 lt_result = "max loop count reached";
2064 case DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT:
2065 lt_result = "channel EQ timeout";
2067 case DP_128b_132b_CDS_DONE_TIMEOUT:
2068 lt_result = "CDS timeout";
2075 switch (lt_settings->link_settings.link_spread) {
2076 case LINK_SPREAD_DISABLED:
2077 lt_spread = "Disabled";
2079 case LINK_SPREAD_05_DOWNSPREAD_30KHZ:
2080 lt_spread = "0.5% 30KHz";
2082 case LINK_SPREAD_05_DOWNSPREAD_33KHZ:
2083 lt_spread = "0.5% 33KHz";
2089 /* Connectivity log: link training */
2090 #if defined(CONFIG_DRM_AMD_DC_DCN)
2091 /* TODO - DP2.0 Log: add connectivity log for FFE PRESET */
2093 CONN_MSG_LT(link, "%sx%d %s VS=%d, PE=%d, DS=%s",
2095 lt_settings->link_settings.lane_count,
2097 lt_settings->lane_settings[0].VOLTAGE_SWING,
2098 lt_settings->lane_settings[0].PRE_EMPHASIS,
2102 void dc_link_dp_set_drive_settings(
2103 struct dc_link *link,
2104 const struct link_resource *link_res,
2105 struct link_training_settings *lt_settings)
2107 /* program ASIC PHY settings*/
2108 dp_set_hw_lane_settings(link, link_res, lt_settings, DPRX);
2110 dp_hw_to_dpcd_lane_settings(lt_settings,
2111 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2113 /* Notify DP sink the PHY settings from source */
2114 dpcd_set_lane_settings(link, lt_settings, DPRX);
2117 bool dc_link_dp_perform_link_training_skip_aux(
2118 struct dc_link *link,
2119 const struct link_resource *link_res,
2120 const struct dc_link_settings *link_setting)
2122 struct link_training_settings lt_settings = {0};
2124 dp_decide_training_settings(
2128 override_training_settings(
2130 &link->preferred_training_settings,
2133 /* 1. Perform_clock_recovery_sequence. */
2135 /* transmit training pattern for clock recovery */
2136 dp_set_hw_training_pattern(link, link_res, lt_settings.pattern_for_cr, DPRX);
2138 /* call HWSS to set lane settings*/
2139 dp_set_hw_lane_settings(link, link_res, <_settings, DPRX);
2141 /* wait receiver to lock-on*/
2142 dp_wait_for_training_aux_rd_interval(link, lt_settings.cr_pattern_time);
2144 /* 2. Perform_channel_equalization_sequence. */
2146 /* transmit training pattern for channel equalization. */
2147 dp_set_hw_training_pattern(link, link_res, lt_settings.pattern_for_eq, DPRX);
2149 /* call HWSS to set lane settings*/
2150 dp_set_hw_lane_settings(link, link_res, <_settings, DPRX);
2152 /* wait receiver to lock-on. */
2153 dp_wait_for_training_aux_rd_interval(link, lt_settings.eq_pattern_time);
2155 /* 3. Perform_link_training_int. */
2157 /* Mainlink output idle pattern. */
2158 dp_set_hw_test_pattern(link, link_res, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
2160 print_status_message(link, <_settings, LINK_TRAINING_SUCCESS);
2165 enum dc_status dpcd_configure_lttpr_mode(struct dc_link *link, struct link_training_settings *lt_settings)
2167 enum dc_status status = DC_OK;
2169 if (lt_settings->lttpr_mode == LTTPR_MODE_TRANSPARENT)
2170 status = configure_lttpr_mode_transparent(link);
2172 else if (lt_settings->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
2173 status = configure_lttpr_mode_non_transparent(link, lt_settings);
2178 static void dpcd_exit_training_mode(struct dc_link *link)
2180 #if defined(CONFIG_DRM_AMD_DC_DCN)
2181 uint8_t sink_status = 0;
2185 /* clear training pattern set */
2186 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
2188 #if defined(CONFIG_DRM_AMD_DC_DCN)
2189 /* poll for intra-hop disable */
2190 for (i = 0; i < 10; i++) {
2191 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
2192 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
2199 enum dc_status dpcd_configure_channel_coding(struct dc_link *link,
2200 struct link_training_settings *lt_settings)
2202 enum dp_link_encoding encoding =
2203 dp_get_link_encoding_format(
2204 <_settings->link_settings);
2205 enum dc_status status;
2207 status = core_link_write_dpcd(
2209 DP_MAIN_LINK_CHANNEL_CODING_SET,
2210 (uint8_t *) &encoding,
2212 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X MAIN_LINK_CHANNEL_CODING_SET = %x\n",
2214 DP_MAIN_LINK_CHANNEL_CODING_SET,
2220 #if defined(CONFIG_DRM_AMD_DC_DCN)
2221 static void dpcd_128b_132b_get_aux_rd_interval(struct dc_link *link,
2222 uint32_t *interval_in_us)
2224 union dp_128b_132b_training_aux_rd_interval dpcd_interval;
2225 uint32_t interval_unit = 0;
2227 dpcd_interval.raw = 0;
2228 core_link_read_dpcd(link, DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
2229 &dpcd_interval.raw, sizeof(dpcd_interval.raw));
2230 interval_unit = dpcd_interval.bits.UNIT ? 1 : 2; /* 0b = 2 ms, 1b = 1 ms */
2231 /* (128b/132b_TRAINING_AUX_RD_INTERVAL value + 1) *
2232 * INTERVAL_UNIT. The maximum is 256 ms
2234 *interval_in_us = (dpcd_interval.bits.VALUE + 1) * interval_unit * 1000;
2237 static enum link_training_result dp_perform_128b_132b_channel_eq_done_sequence(
2238 struct dc_link *link,
2239 const struct link_resource *link_res,
2240 struct link_training_settings *lt_settings)
2243 uint32_t aux_rd_interval = 0;
2244 uint32_t wait_time = 0;
2245 union lane_align_status_updated dpcd_lane_status_updated = {0};
2246 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2247 enum link_training_result status = LINK_TRAINING_SUCCESS;
2248 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2250 /* Transmit 128b/132b_TPS1 over Main-Link */
2251 dp_set_hw_training_pattern(link, link_res, lt_settings->pattern_for_cr, DPRX);
2252 /* Set TRAINING_PATTERN_SET to 01h */
2253 dpcd_set_training_pattern(link, lt_settings->pattern_for_cr);
2255 /* Adjust TX_FFE_PRESET_VALUE and Transmit 128b/132b_TPS2 over Main-Link */
2256 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2257 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2258 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2259 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2260 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2261 dp_set_hw_lane_settings(link, link_res, lt_settings, DPRX);
2262 dp_set_hw_training_pattern(link, link_res, lt_settings->pattern_for_eq, DPRX);
2264 /* Set loop counter to start from 1 */
2267 /* Set TRAINING_PATTERN_SET to 02h and TX_FFE_PRESET_VALUE in one AUX transaction */
2268 dpcd_set_lt_pattern_and_lane_settings(link, lt_settings,
2269 lt_settings->pattern_for_eq, DPRX);
2271 /* poll for channel EQ done */
2272 while (status == LINK_TRAINING_SUCCESS) {
2273 dp_wait_for_training_aux_rd_interval(link, aux_rd_interval);
2274 wait_time += aux_rd_interval;
2275 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2276 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2277 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2278 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2279 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2280 if (dp_is_ch_eq_done(lt_settings->link_settings.lane_count,
2281 dpcd_lane_status)) {
2284 } else if (loop_count >= lt_settings->eq_loop_count_limit) {
2285 status = DP_128b_132b_MAX_LOOP_COUNT_REACHED;
2286 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2287 status = DP_128b_132b_LT_FAILED;
2289 dp_set_hw_lane_settings(link, link_res, lt_settings, DPRX);
2290 dpcd_set_lane_settings(link, lt_settings, DPRX);
2295 /* poll for EQ interlane align done */
2296 while (status == LINK_TRAINING_SUCCESS) {
2297 if (dpcd_lane_status_updated.bits.EQ_INTERLANE_ALIGN_DONE_128b_132b) {
2300 } else if (wait_time >= lt_settings->eq_wait_time_limit) {
2301 status = DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT;
2302 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2303 status = DP_128b_132b_LT_FAILED;
2305 dp_wait_for_training_aux_rd_interval(link,
2306 lt_settings->eq_pattern_time);
2307 wait_time += lt_settings->eq_pattern_time;
2308 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2309 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2316 static enum link_training_result dp_perform_128b_132b_cds_done_sequence(
2317 struct dc_link *link,
2318 const struct link_resource *link_res,
2319 struct link_training_settings *lt_settings)
2321 /* Assumption: assume hardware has transmitted eq pattern */
2322 enum link_training_result status = LINK_TRAINING_SUCCESS;
2323 union lane_align_status_updated dpcd_lane_status_updated = {0};
2324 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2325 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
2326 uint32_t wait_time = 0;
2328 /* initiate CDS done sequence */
2329 dpcd_set_training_pattern(link, lt_settings->pattern_for_cds);
2331 /* poll for CDS interlane align done and symbol lock */
2332 while (status == LINK_TRAINING_SUCCESS) {
2333 dp_wait_for_training_aux_rd_interval(link,
2334 lt_settings->cds_pattern_time);
2335 wait_time += lt_settings->cds_pattern_time;
2336 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2337 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2338 if (dp_is_symbol_locked(lt_settings->link_settings.lane_count, dpcd_lane_status) &&
2339 dpcd_lane_status_updated.bits.CDS_INTERLANE_ALIGN_DONE_128b_132b) {
2342 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2343 status = DP_128b_132b_LT_FAILED;
2344 } else if (wait_time >= lt_settings->cds_wait_time_limit) {
2345 status = DP_128b_132b_CDS_DONE_TIMEOUT;
2353 static enum link_training_result dp_perform_8b_10b_link_training(
2354 struct dc_link *link,
2355 const struct link_resource *link_res,
2356 struct link_training_settings *lt_settings)
2358 enum link_training_result status = LINK_TRAINING_SUCCESS;
2360 uint8_t repeater_cnt;
2361 uint8_t repeater_id;
2364 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2365 start_clock_recovery_pattern_early(link, link_res, lt_settings, DPRX);
2367 /* 1. set link rate, lane count and spread. */
2368 dpcd_set_link_settings(link, lt_settings);
2370 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2372 /* 2. perform link training (set link training done
2373 * to false is done as well)
2375 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
2377 for (repeater_id = repeater_cnt; (repeater_id > 0 && status == LINK_TRAINING_SUCCESS);
2379 status = perform_clock_recovery_sequence(link, link_res, lt_settings, repeater_id);
2381 if (status != LINK_TRAINING_SUCCESS)
2384 status = perform_channel_equalization_sequence(link,
2389 if (status != LINK_TRAINING_SUCCESS)
2392 repeater_training_done(link, repeater_id);
2395 for (lane = 0; lane < (uint8_t)lt_settings->link_settings.lane_count; lane++)
2396 lt_settings->dpcd_lane_settings[lane].raw = 0;
2399 if (status == LINK_TRAINING_SUCCESS) {
2400 status = perform_clock_recovery_sequence(link, link_res, lt_settings, DPRX);
2401 if (status == LINK_TRAINING_SUCCESS) {
2402 status = perform_channel_equalization_sequence(link,
2412 #if defined(CONFIG_DRM_AMD_DC_DCN)
2413 static enum link_training_result dp_perform_128b_132b_link_training(
2414 struct dc_link *link,
2415 const struct link_resource *link_res,
2416 struct link_training_settings *lt_settings)
2418 enum link_training_result result = LINK_TRAINING_SUCCESS;
2420 /* TODO - DP2.0 Link: remove legacy_dp2_lt logic */
2421 if (link->dc->debug.legacy_dp2_lt) {
2422 struct link_training_settings legacy_settings;
2424 decide_8b_10b_training_settings(link,
2425 <_settings->link_settings,
2427 return dp_perform_8b_10b_link_training(link, link_res, &legacy_settings);
2430 dpcd_set_link_settings(link, lt_settings);
2432 if (result == LINK_TRAINING_SUCCESS)
2433 result = dp_perform_128b_132b_channel_eq_done_sequence(link, link_res, lt_settings);
2435 if (result == LINK_TRAINING_SUCCESS)
2436 result = dp_perform_128b_132b_cds_done_sequence(link, link_res, lt_settings);
2442 static enum link_training_result dc_link_dp_perform_fixed_vs_pe_training_sequence(
2443 struct dc_link *link,
2444 const struct link_resource *link_res,
2445 struct link_training_settings *lt_settings)
2447 const uint8_t vendor_lttpr_write_data_reset[4] = {0x1, 0x50, 0x63, 0xFF};
2448 const uint8_t offset = dp_convert_to_count(
2449 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
2450 const uint8_t vendor_lttpr_write_data_intercept_en[4] = {0x1, 0x55, 0x63, 0x0};
2451 const uint8_t vendor_lttpr_write_data_intercept_dis[4] = {0x1, 0x55, 0x63, 0x68};
2452 uint8_t vendor_lttpr_write_data_vs[4] = {0x1, 0x51, 0x63, 0x0};
2453 uint8_t vendor_lttpr_write_data_pe[4] = {0x1, 0x52, 0x63, 0x0};
2454 uint32_t vendor_lttpr_write_address = 0xF004F;
2455 enum link_training_result status = LINK_TRAINING_SUCCESS;
2457 union down_spread_ctrl downspread = {0};
2458 union lane_count_set lane_count_set = {0};
2459 uint8_t toggle_rate;
2462 /* Only 8b/10b is supported */
2463 ASSERT(dp_get_link_encoding_format(<_settings->link_settings) ==
2464 DP_8b_10b_ENCODING);
2466 if (offset != 0xFF) {
2467 vendor_lttpr_write_address +=
2468 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
2471 /* Vendor specific: Reset lane settings */
2472 core_link_write_dpcd(
2474 vendor_lttpr_write_address,
2475 &vendor_lttpr_write_data_reset[0],
2476 sizeof(vendor_lttpr_write_data_reset));
2477 core_link_write_dpcd(
2479 vendor_lttpr_write_address,
2480 &vendor_lttpr_write_data_vs[0],
2481 sizeof(vendor_lttpr_write_data_vs));
2482 core_link_write_dpcd(
2484 vendor_lttpr_write_address,
2485 &vendor_lttpr_write_data_pe[0],
2486 sizeof(vendor_lttpr_write_data_pe));
2488 /* Vendor specific: Enable intercept */
2489 core_link_write_dpcd(
2491 vendor_lttpr_write_address,
2492 &vendor_lttpr_write_data_intercept_en[0],
2493 sizeof(vendor_lttpr_write_data_intercept_en));
2495 /* 1. set link rate, lane count and spread. */
2497 downspread.raw = (uint8_t)(lt_settings->link_settings.link_spread);
2499 lane_count_set.bits.LANE_COUNT_SET =
2500 lt_settings->link_settings.lane_count;
2502 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
2503 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
2506 if (lt_settings->pattern_for_eq < DP_TRAINING_PATTERN_SEQUENCE_4) {
2507 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
2508 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
2511 core_link_write_dpcd(link, DP_DOWNSPREAD_CTRL,
2512 &downspread.raw, sizeof(downspread));
2514 core_link_write_dpcd(link, DP_LANE_COUNT_SET,
2515 &lane_count_set.raw, 1);
2517 #if defined(CONFIG_DRM_AMD_DC_DCN)
2518 rate = get_dpcd_link_rate(<_settings->link_settings);
2520 rate = (uint8_t) (lt_settings->link_settings.link_rate);
2523 /* Vendor specific: Toggle link rate */
2524 toggle_rate = (rate == 0x6) ? 0xA : 0x6;
2526 if (link->vendor_specific_lttpr_link_rate_wa == rate) {
2527 core_link_write_dpcd(
2534 link->vendor_specific_lttpr_link_rate_wa = rate;
2536 core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
2538 DC_LOG_HW_LINK_TRAINING("%s\n %x rate = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
2541 lt_settings->link_settings.link_rate,
2543 lt_settings->link_settings.lane_count,
2544 lt_settings->enhanced_framing,
2546 lt_settings->link_settings.link_spread);
2548 /* 2. Perform link training */
2550 /* Perform Clock Recovery Sequence */
2551 if (status == LINK_TRAINING_SUCCESS) {
2552 uint32_t retries_cr;
2553 uint32_t retry_count;
2554 uint32_t wait_time_microsec;
2555 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
2556 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
2557 union lane_align_status_updated dpcd_lane_status_updated;
2558 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2563 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
2564 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
2566 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
2567 memset(&dpcd_lane_status_updated, '\0',
2568 sizeof(dpcd_lane_status_updated));
2570 /* 1. call HWSS to set lane settings */
2571 dp_set_hw_lane_settings(
2577 /* 2. update DPCD of the receiver */
2579 /* EPR #361076 - write as a 5-byte burst,
2580 * but only for the 1-st iteration.
2582 dpcd_set_lt_pattern_and_lane_settings(
2585 lt_settings->pattern_for_cr,
2587 /* Vendor specific: Disable intercept */
2588 core_link_write_dpcd(
2590 vendor_lttpr_write_address,
2591 &vendor_lttpr_write_data_intercept_dis[0],
2592 sizeof(vendor_lttpr_write_data_intercept_dis));
2594 vendor_lttpr_write_data_vs[3] = 0;
2595 vendor_lttpr_write_data_pe[3] = 0;
2597 for (lane = 0; lane < lane_count; lane++) {
2598 vendor_lttpr_write_data_vs[3] |=
2599 lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET << (2 * lane);
2600 vendor_lttpr_write_data_pe[3] |=
2601 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET << (2 * lane);
2604 /* Vendor specific: Update VS and PE to DPRX requested value */
2605 core_link_write_dpcd(
2607 vendor_lttpr_write_address,
2608 &vendor_lttpr_write_data_vs[0],
2609 sizeof(vendor_lttpr_write_data_vs));
2610 core_link_write_dpcd(
2612 vendor_lttpr_write_address,
2613 &vendor_lttpr_write_data_pe[0],
2614 sizeof(vendor_lttpr_write_data_pe));
2616 dpcd_set_lane_settings(
2622 /* 3. wait receiver to lock-on*/
2623 wait_time_microsec = lt_settings->cr_pattern_time;
2625 dp_wait_for_training_aux_rd_interval(
2627 wait_time_microsec);
2629 /* 4. Read lane status and requested drive
2630 * settings as set by the sink
2632 dp_get_lane_status_and_lane_adjust(
2636 &dpcd_lane_status_updated,
2640 /* 5. check CR done*/
2641 if (dp_is_cr_done(lane_count, dpcd_lane_status)) {
2642 status = LINK_TRAINING_SUCCESS;
2646 /* 6. max VS reached*/
2647 if (dp_is_max_vs_reached(lt_settings))
2650 /* 7. same lane settings */
2651 /* Note: settings are the same for all lanes,
2652 * so comparing first lane is sufficient
2654 if (lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET ==
2655 dpcd_lane_adjust[0].bits.VOLTAGE_SWING_LANE)
2660 /* 8. update VS/PE/PC2 in lt_settings*/
2661 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2662 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2666 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
2668 DC_LOG_ERROR("%s: Link Training Error, could not get CR after %d tries. Possibly voltage swing issue",
2670 LINK_TRAINING_MAX_CR_RETRY);
2674 status = dp_get_cr_failure(lane_count, dpcd_lane_status);
2677 /* Perform Channel EQ Sequence */
2678 if (status == LINK_TRAINING_SUCCESS) {
2679 enum dc_dp_training_pattern tr_pattern;
2680 uint32_t retries_ch_eq;
2681 uint32_t wait_time_microsec;
2682 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
2683 union lane_align_status_updated dpcd_lane_status_updated = {0};
2684 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2685 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2687 /* Note: also check that TPS4 is a supported feature*/
2688 tr_pattern = lt_settings->pattern_for_eq;
2690 dp_set_hw_training_pattern(link, link_res, tr_pattern, 0);
2692 status = LINK_TRAINING_EQ_FAIL_EQ;
2694 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
2697 dp_set_hw_lane_settings(link, link_res, lt_settings, 0);
2699 vendor_lttpr_write_data_vs[3] = 0;
2700 vendor_lttpr_write_data_pe[3] = 0;
2702 for (lane = 0; lane < lane_count; lane++) {
2703 vendor_lttpr_write_data_vs[3] |=
2704 lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET << (2 * lane);
2705 vendor_lttpr_write_data_pe[3] |=
2706 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET << (2 * lane);
2709 /* Vendor specific: Update VS and PE to DPRX requested value */
2710 core_link_write_dpcd(
2712 vendor_lttpr_write_address,
2713 &vendor_lttpr_write_data_vs[0],
2714 sizeof(vendor_lttpr_write_data_vs));
2715 core_link_write_dpcd(
2717 vendor_lttpr_write_address,
2718 &vendor_lttpr_write_data_pe[0],
2719 sizeof(vendor_lttpr_write_data_pe));
2723 /* EPR #361076 - write as a 5-byte burst,
2724 * but only for the 1-st iteration
2727 dpcd_set_lt_pattern_and_lane_settings(
2732 dpcd_set_lane_settings(link, lt_settings, 0);
2734 /* 3. wait for receiver to lock-on*/
2735 wait_time_microsec = lt_settings->eq_pattern_time;
2737 dp_wait_for_training_aux_rd_interval(
2739 wait_time_microsec);
2741 /* 4. Read lane status and requested
2742 * drive settings as set by the sink
2744 dp_get_lane_status_and_lane_adjust(
2748 &dpcd_lane_status_updated,
2752 /* 5. check CR done*/
2753 if (!dp_is_cr_done(lane_count, dpcd_lane_status)) {
2754 status = LINK_TRAINING_EQ_FAIL_CR;
2758 /* 6. check CHEQ done*/
2759 if (dp_is_ch_eq_done(lane_count, dpcd_lane_status) &&
2760 dp_is_symbol_locked(lane_count, dpcd_lane_status) &&
2761 dp_is_interlane_aligned(dpcd_lane_status_updated)) {
2762 status = LINK_TRAINING_SUCCESS;
2766 /* 7. update VS/PE/PC2 in lt_settings*/
2767 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2768 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2776 enum link_training_result dc_link_dp_perform_link_training(
2777 struct dc_link *link,
2778 const struct link_resource *link_res,
2779 const struct dc_link_settings *link_settings,
2780 bool skip_video_pattern)
2782 enum link_training_result status = LINK_TRAINING_SUCCESS;
2783 struct link_training_settings lt_settings = {0};
2784 enum dp_link_encoding encoding =
2785 dp_get_link_encoding_format(link_settings);
2787 /* decide training settings */
2788 dp_decide_training_settings(
2792 override_training_settings(
2794 &link->preferred_training_settings,
2797 /* reset previous training states */
2798 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2799 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2800 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2801 link->apply_vendor_specific_lttpr_link_rate_wa = true;
2802 vendor_specific_lttpr_wa_four(link, true);
2804 dpcd_exit_training_mode(link);
2807 /* configure link prior to entering training mode */
2808 dpcd_configure_lttpr_mode(link, <_settings);
2809 dp_set_fec_ready(link, link_res, lt_settings.should_set_fec_ready);
2810 dpcd_configure_channel_coding(link, <_settings);
2812 /* enter training mode:
2813 * Per DP specs starting from here, DPTX device shall not issue
2814 * Non-LT AUX transactions inside training mode.
2816 if (!link->dc->debug.apply_vendor_specific_lttpr_wa &&
2817 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2818 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
2819 status = dc_link_dp_perform_fixed_vs_pe_training_sequence(link, link_res, <_settings);
2820 else if (encoding == DP_8b_10b_ENCODING)
2821 status = dp_perform_8b_10b_link_training(link, link_res, <_settings);
2822 #if defined(CONFIG_DRM_AMD_DC_DCN)
2823 else if (encoding == DP_128b_132b_ENCODING)
2824 status = dp_perform_128b_132b_link_training(link, link_res, <_settings);
2829 /* exit training mode */
2830 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2831 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2832 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2833 link->apply_vendor_specific_lttpr_link_rate_wa = false;
2834 vendor_specific_lttpr_wa_four(link, (status != LINK_TRAINING_SUCCESS));
2836 dpcd_exit_training_mode(link);
2839 /* switch to video idle */
2840 if ((status == LINK_TRAINING_SUCCESS) || !skip_video_pattern)
2841 status = dp_transition_to_video_idle(link,
2846 /* dump debug data */
2847 print_status_message(link, <_settings, status);
2848 if (status != LINK_TRAINING_SUCCESS)
2849 link->ctx->dc->debug_data.ltFailCount++;
2853 bool perform_link_training_with_retries(
2854 const struct dc_link_settings *link_setting,
2855 bool skip_video_pattern,
2857 struct pipe_ctx *pipe_ctx,
2858 enum signal_type signal,
2862 uint8_t delay_between_attempts = LINK_TRAINING_RETRY_DELAY;
2863 struct dc_stream_state *stream = pipe_ctx->stream;
2864 struct dc_link *link = stream->link;
2865 enum dp_panel_mode panel_mode = dp_get_panel_mode(link);
2866 struct link_encoder *link_enc;
2867 enum link_training_result status = LINK_TRAINING_CR_FAIL_LANE0;
2868 struct dc_link_settings current_setting = *link_setting;
2870 /* Dynamically assigned link encoders associated with stream rather than
2873 if (link->is_dig_mapping_flexible && link->dc->res_pool->funcs->link_encs_assign)
2874 link_enc = link_enc_cfg_get_link_enc_used_by_stream(link->ctx->dc, pipe_ctx->stream);
2876 link_enc = link->link_enc;
2878 /* We need to do this before the link training to ensure the idle pattern in SST
2879 * mode will be sent right after the link training
2881 if (dp_get_link_encoding_format(¤t_setting) == DP_8b_10b_ENCODING) {
2882 link_enc->funcs->connect_dig_be_to_fe(link_enc,
2883 pipe_ctx->stream_res.stream_enc->id, true);
2884 dp_source_sequence_trace(link, DPCD_SOURCE_SEQ_AFTER_CONNECT_DIG_FE_BE);
2887 for (j = 0; j < attempts; ++j) {
2889 DC_LOG_HW_LINK_TRAINING("%s: Beginning link training attempt %u of %d\n",
2890 __func__, (unsigned int)j + 1, attempts);
2894 &pipe_ctx->link_res,
2896 pipe_ctx->clock_source->id,
2899 if (stream->sink_patches.dppowerup_delay > 0) {
2900 int delay_dp_power_up_in_ms = stream->sink_patches.dppowerup_delay;
2902 msleep(delay_dp_power_up_in_ms);
2905 #ifdef CONFIG_DRM_AMD_DC_HDCP
2906 if (panel_mode == DP_PANEL_MODE_EDP) {
2907 struct cp_psp *cp_psp = &stream->ctx->cp_psp;
2909 if (cp_psp && cp_psp->funcs.enable_assr)
2910 /* ASSR is bound to fail with unsigned PSP
2911 * verstage used during devlopment phase.
2912 * Report and continue with eDP panel mode to
2913 * perform eDP link training with right settings
2915 cp_psp->funcs.enable_assr(cp_psp->handle, link);
2919 dp_set_panel_mode(link, panel_mode);
2921 if (link->aux_access_disabled) {
2922 dc_link_dp_perform_link_training_skip_aux(link, &pipe_ctx->link_res, ¤t_setting);
2925 /** @todo Consolidate USB4 DP and DPx.x training. */
2926 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA) {
2927 status = dc_link_dpia_perform_link_training(link,
2928 &pipe_ctx->link_res,
2930 skip_video_pattern);
2932 /* Transmit idle pattern once training successful. */
2933 if (status == LINK_TRAINING_SUCCESS)
2934 dp_set_hw_test_pattern(link, &pipe_ctx->link_res, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
2936 status = dc_link_dp_perform_link_training(link,
2937 &pipe_ctx->link_res,
2939 skip_video_pattern);
2942 if (status == LINK_TRAINING_SUCCESS)
2946 /* latest link training still fail, skip delay and keep PHY on
2948 if (j == (attempts - 1) && link->ep_type == DISPLAY_ENDPOINT_PHY)
2951 DC_LOG_WARNING("%s: Link training attempt %u of %d failed\n",
2952 __func__, (unsigned int)j + 1, attempts);
2954 dp_disable_link_phy(link, &pipe_ctx->link_res, signal);
2956 /* Abort link training if failure due to sink being unplugged. */
2957 if (status == LINK_TRAINING_ABORT) {
2958 enum dc_connection_type type = dc_connection_none;
2960 dc_link_detect_sink(link, &type);
2961 if (type == dc_connection_none)
2963 } else if (do_fallback) {
2967 decide_fallback_link_setting(link, *link_setting, ¤t_setting, status);
2968 /* Fail link training if reduced link bandwidth no longer meets
2969 * stream requirements.
2971 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
2972 link_bw = dc_link_bandwidth_kbps(link, ¤t_setting);
2973 if (req_bw > link_bw)
2977 msleep(delay_between_attempts);
2979 delay_between_attempts += LINK_TRAINING_RETRY_DELAY;
2985 static enum clock_source_id get_clock_source_id(struct dc_link *link)
2987 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_UNDEFINED;
2988 struct clock_source *dp_cs = link->dc->res_pool->dp_clock_source;
2990 if (dp_cs != NULL) {
2991 dp_cs_id = dp_cs->id;
2994 * dp clock source is not initialized for some reason.
2995 * Should not happen, CLOCK_SOURCE_ID_EXTERNAL will be used
3003 static void set_dp_mst_mode(struct dc_link *link, const struct link_resource *link_res,
3006 if (mst_enable == false &&
3007 link->type == dc_connection_mst_branch) {
3008 /* Disable MST on link. Use only local sink. */
3009 dp_disable_link_phy_mst(link, link_res, link->connector_signal);
3011 link->type = dc_connection_single;
3012 link->local_sink = link->remote_sinks[0];
3013 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT;
3014 dc_sink_retain(link->local_sink);
3015 dm_helpers_dp_mst_stop_top_mgr(link->ctx, link);
3016 } else if (mst_enable == true &&
3017 link->type == dc_connection_single &&
3018 link->remote_sinks[0] != NULL) {
3019 /* Re-enable MST on link. */
3020 dp_disable_link_phy(link, link_res, link->connector_signal);
3021 dp_enable_mst_on_sink(link, true);
3023 link->type = dc_connection_mst_branch;
3024 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT_MST;
3028 bool dc_link_dp_sync_lt_begin(struct dc_link *link)
3030 /* Begin Sync LT. During this time,
3031 * DPCD:600h must not be powered down.
3033 link->sync_lt_in_progress = true;
3035 /*Clear any existing preferred settings.*/
3036 memset(&link->preferred_training_settings, 0,
3037 sizeof(struct dc_link_training_overrides));
3038 memset(&link->preferred_link_setting, 0,
3039 sizeof(struct dc_link_settings));
3044 enum link_training_result dc_link_dp_sync_lt_attempt(
3045 struct dc_link *link,
3046 const struct link_resource *link_res,
3047 struct dc_link_settings *link_settings,
3048 struct dc_link_training_overrides *lt_overrides)
3050 struct link_training_settings lt_settings = {0};
3051 enum link_training_result lt_status = LINK_TRAINING_SUCCESS;
3052 enum dp_panel_mode panel_mode = DP_PANEL_MODE_DEFAULT;
3053 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
3054 bool fec_enable = false;
3056 dp_decide_training_settings(
3060 override_training_settings(
3064 /* Setup MST Mode */
3065 if (lt_overrides->mst_enable)
3066 set_dp_mst_mode(link, link_res, *lt_overrides->mst_enable);
3069 dp_disable_link_phy(link, link_res, link->connector_signal);
3072 dp_cs_id = get_clock_source_id(link);
3073 dp_enable_link_phy(link, link_res, link->connector_signal,
3074 dp_cs_id, link_settings);
3076 /* Set FEC enable */
3077 #if defined(CONFIG_DRM_AMD_DC_DCN)
3078 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING) {
3080 fec_enable = lt_overrides->fec_enable && *lt_overrides->fec_enable;
3081 dp_set_fec_ready(link, NULL, fec_enable);
3082 #if defined(CONFIG_DRM_AMD_DC_DCN)
3086 if (lt_overrides->alternate_scrambler_reset) {
3087 if (*lt_overrides->alternate_scrambler_reset)
3088 panel_mode = DP_PANEL_MODE_EDP;
3090 panel_mode = DP_PANEL_MODE_DEFAULT;
3092 panel_mode = dp_get_panel_mode(link);
3094 dp_set_panel_mode(link, panel_mode);
3096 /* Attempt to train with given link training settings */
3097 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
3098 start_clock_recovery_pattern_early(link, link_res, <_settings, DPRX);
3100 /* Set link rate, lane count and spread. */
3101 dpcd_set_link_settings(link, <_settings);
3103 /* 2. perform link training (set link training done
3104 * to false is done as well)
3106 lt_status = perform_clock_recovery_sequence(link, link_res, <_settings, DPRX);
3107 if (lt_status == LINK_TRAINING_SUCCESS) {
3108 lt_status = perform_channel_equalization_sequence(link,
3114 /* 3. Sync LT must skip TRAINING_PATTERN_SET:0 (video pattern)*/
3115 /* 4. print status message*/
3116 print_status_message(link, <_settings, lt_status);
3121 bool dc_link_dp_sync_lt_end(struct dc_link *link, bool link_down)
3123 /* If input parameter is set, shut down phy.
3124 * Still shouldn't turn off dp_receiver (DPCD:600h)
3126 if (link_down == true) {
3127 #if defined(CONFIG_DRM_AMD_DC_DCN)
3128 struct dc_link_settings link_settings = link->cur_link_settings;
3130 dp_disable_link_phy(link, NULL, link->connector_signal);
3131 #if defined(CONFIG_DRM_AMD_DC_DCN)
3132 if (dp_get_link_encoding_format(&link_settings) == DP_8b_10b_ENCODING)
3134 dp_set_fec_ready(link, NULL, false);
3137 link->sync_lt_in_progress = false;
3141 #if defined(CONFIG_DRM_AMD_DC_DCN)
3142 static enum dc_link_rate get_lttpr_max_link_rate(struct dc_link *link)
3144 enum dc_link_rate lttpr_max_link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
3146 if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR20)
3147 lttpr_max_link_rate = LINK_RATE_UHBR20;
3148 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR13_5)
3149 lttpr_max_link_rate = LINK_RATE_UHBR13_5;
3150 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR10)
3151 lttpr_max_link_rate = LINK_RATE_UHBR10;
3153 return lttpr_max_link_rate;
3157 bool dc_link_dp_get_max_link_enc_cap(const struct dc_link *link, struct dc_link_settings *max_link_enc_cap)
3159 struct link_encoder *link_enc = NULL;
3161 if (!max_link_enc_cap) {
3162 DC_LOG_ERROR("%s: Could not return max link encoder caps", __func__);
3166 /* Links supporting dynamically assigned link encoder will be assigned next
3167 * available encoder if one not already assigned.
3169 if (link->is_dig_mapping_flexible &&
3170 link->dc->res_pool->funcs->link_encs_assign) {
3171 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
3172 if (link_enc == NULL)
3173 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
3175 link_enc = link->link_enc;
3178 if (link_enc && link_enc->funcs->get_max_link_cap) {
3179 link_enc->funcs->get_max_link_cap(link_enc, max_link_enc_cap);
3183 DC_LOG_ERROR("%s: Max link encoder caps unknown", __func__);
3184 max_link_enc_cap->lane_count = 1;
3185 max_link_enc_cap->link_rate = 6;
3189 static struct dc_link_settings get_max_link_cap(struct dc_link *link,
3190 const struct link_resource *link_res)
3192 struct dc_link_settings max_link_cap = {0};
3193 #if defined(CONFIG_DRM_AMD_DC_DCN)
3194 enum dc_link_rate lttpr_max_link_rate;
3196 struct link_encoder *link_enc = NULL;
3198 /* Links supporting dynamically assigned link encoder will be assigned next
3199 * available encoder if one not already assigned.
3201 if (link->is_dig_mapping_flexible &&
3202 link->dc->res_pool->funcs->link_encs_assign) {
3203 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
3204 if (link_enc == NULL)
3205 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
3207 link_enc = link->link_enc;
3210 /* get max link encoder capability */
3212 link_enc->funcs->get_max_link_cap(link_enc, &max_link_cap);
3213 #if defined(CONFIG_DRM_AMD_DC_DCN)
3214 if (max_link_cap.link_rate >= LINK_RATE_UHBR10) {
3215 if (!link_res->hpo_dp_link_enc ||
3216 link->dc->debug.disable_uhbr)
3217 max_link_cap.link_rate = LINK_RATE_HIGH3;
3221 /* Lower link settings based on sink's link cap */
3222 if (link->reported_link_cap.lane_count < max_link_cap.lane_count)
3223 max_link_cap.lane_count =
3224 link->reported_link_cap.lane_count;
3225 if (link->reported_link_cap.link_rate < max_link_cap.link_rate)
3226 max_link_cap.link_rate =
3227 link->reported_link_cap.link_rate;
3228 if (link->reported_link_cap.link_spread <
3229 max_link_cap.link_spread)
3230 max_link_cap.link_spread =
3231 link->reported_link_cap.link_spread;
3233 * account for lttpr repeaters cap
3234 * notes: repeaters do not snoop in the DPRX Capabilities addresses (3.6.3).
3236 if (link->lttpr_mode != LTTPR_MODE_NON_LTTPR) {
3237 if (link->dpcd_caps.lttpr_caps.max_lane_count < max_link_cap.lane_count)
3238 max_link_cap.lane_count = link->dpcd_caps.lttpr_caps.max_lane_count;
3240 #if defined(CONFIG_DRM_AMD_DC_DCN)
3241 lttpr_max_link_rate = get_lttpr_max_link_rate(link);
3243 if (lttpr_max_link_rate < max_link_cap.link_rate)
3244 max_link_cap.link_rate = lttpr_max_link_rate;
3246 if (link->dpcd_caps.lttpr_caps.max_link_rate < max_link_cap.link_rate)
3247 max_link_cap.link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
3250 DC_LOG_HW_LINK_TRAINING("%s\n Training with LTTPR, max_lane count %d max_link rate %d \n",
3252 max_link_cap.lane_count,
3253 max_link_cap.link_rate);
3255 return max_link_cap;
3258 static enum dc_status read_hpd_rx_irq_data(
3259 struct dc_link *link,
3260 union hpd_irq_data *irq_data)
3262 static enum dc_status retval;
3264 /* The HW reads 16 bytes from 200h on HPD,
3265 * but if we get an AUX_DEFER, the HW cannot retry
3266 * and this causes the CTS tests 4.3.2.1 - 3.2.4 to
3267 * fail, so we now explicitly read 6 bytes which is
3268 * the req from the above mentioned test cases.
3270 * For DP 1.4 we need to read those from 2002h range.
3272 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_14)
3273 retval = core_link_read_dpcd(
3277 sizeof(union hpd_irq_data));
3279 /* Read 14 bytes in a single read and then copy only the required fields.
3280 * This is more efficient than doing it in two separate AUX reads. */
3282 uint8_t tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI + 1];
3284 retval = core_link_read_dpcd(
3290 if (retval != DC_OK)
3293 irq_data->bytes.sink_cnt.raw = tmp[DP_SINK_COUNT_ESI - DP_SINK_COUNT_ESI];
3294 irq_data->bytes.device_service_irq.raw = tmp[DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 - DP_SINK_COUNT_ESI];
3295 irq_data->bytes.lane01_status.raw = tmp[DP_LANE0_1_STATUS_ESI - DP_SINK_COUNT_ESI];
3296 irq_data->bytes.lane23_status.raw = tmp[DP_LANE2_3_STATUS_ESI - DP_SINK_COUNT_ESI];
3297 irq_data->bytes.lane_status_updated.raw = tmp[DP_LANE_ALIGN_STATUS_UPDATED_ESI - DP_SINK_COUNT_ESI];
3298 irq_data->bytes.sink_status.raw = tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI];
3304 bool hpd_rx_irq_check_link_loss_status(
3305 struct dc_link *link,
3306 union hpd_irq_data *hpd_irq_dpcd_data)
3308 uint8_t irq_reg_rx_power_state = 0;
3309 enum dc_status dpcd_result = DC_ERROR_UNEXPECTED;
3310 union lane_status lane_status;
3312 bool sink_status_changed;
3315 sink_status_changed = false;
3316 return_code = false;
3318 if (link->cur_link_settings.lane_count == 0)
3321 /*1. Check that Link Status changed, before re-training.*/
3323 /*parse lane status*/
3324 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
3325 /* check status of lanes 0,1
3326 * changed DpcdAddress_Lane01Status (0x202)
3328 lane_status.raw = get_nibble_at_index(
3329 &hpd_irq_dpcd_data->bytes.lane01_status.raw,
3332 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
3333 !lane_status.bits.CR_DONE_0 ||
3334 !lane_status.bits.SYMBOL_LOCKED_0) {
3335 /* if one of the channel equalization, clock
3336 * recovery or symbol lock is dropped
3337 * consider it as (link has been
3338 * dropped) dp sink status has changed
3340 sink_status_changed = true;
3345 /* Check interlane align.*/
3346 if (sink_status_changed ||
3347 !hpd_irq_dpcd_data->bytes.lane_status_updated.bits.INTERLANE_ALIGN_DONE) {
3349 DC_LOG_HW_HPD_IRQ("%s: Link Status changed.\n", __func__);
3353 /*2. Check that we can handle interrupt: Not in FS DOS,
3354 * Not in "Display Timeout" state, Link is trained.
3356 dpcd_result = core_link_read_dpcd(link,
3358 &irq_reg_rx_power_state,
3359 sizeof(irq_reg_rx_power_state));
3361 if (dpcd_result != DC_OK) {
3362 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain power state.\n",
3365 if (irq_reg_rx_power_state != DP_SET_POWER_D0)
3366 return_code = false;
3373 bool dp_verify_link_cap(
3374 struct dc_link *link,
3375 const struct link_resource *link_res,
3376 struct dc_link_settings *known_limit_link_setting,
3379 struct dc_link_settings max_link_cap = {0};
3380 struct dc_link_settings cur_link_setting = {0};
3381 struct dc_link_settings *cur = &cur_link_setting;
3382 struct dc_link_settings initial_link_settings = {0};
3384 bool skip_link_training;
3385 bool skip_video_pattern;
3386 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
3387 enum link_training_result status;
3388 union hpd_irq_data irq_data;
3390 /* link training starts with the maximum common settings
3391 * supported by both sink and ASIC.
3393 max_link_cap = get_max_link_cap(link, link_res);
3394 initial_link_settings = get_common_supported_link_settings(
3395 *known_limit_link_setting,
3398 /* Accept reported capabilities if link supports flexible encoder mapping or encoder already in use. */
3399 if (link->dc->debug.skip_detection_link_training ||
3400 link->is_dig_mapping_flexible) {
3401 /* TODO - should we check link encoder's max link caps here?
3402 * How do we know which link encoder to check from?
3404 link->verified_link_cap = *known_limit_link_setting;
3406 } else if (link->link_enc && link->dc->res_pool->funcs->link_encs_assign &&
3407 !link_enc_cfg_is_link_enc_avail(link->ctx->dc, link->link_enc->preferred_engine, link)) {
3408 link->verified_link_cap = initial_link_settings;
3412 memset(&irq_data, 0, sizeof(irq_data));
3414 skip_link_training = false;
3416 /* Grant extended timeout request */
3417 if ((link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (link->dpcd_caps.lttpr_caps.max_ext_timeout > 0)) {
3418 uint8_t grant = link->dpcd_caps.lttpr_caps.max_ext_timeout & 0x80;
3420 core_link_write_dpcd(link, DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT, &grant, sizeof(grant));
3423 #if defined(CONFIG_DRM_AMD_DC_DCN)
3424 if (dp_get_link_encoding_format(&link->cur_link_settings) == DP_128b_132b_ENCODING)
3425 reset_dp_hpo_stream_encoders_for_link(link);
3427 /* TODO implement override and monitor patch later */
3429 /* try to train the link from high to low to
3430 * find the physical link capability
3432 /* disable PHY done possible by BIOS, will be done by driver itself */
3433 dp_disable_link_phy(link, link_res, link->connector_signal);
3435 dp_cs_id = get_clock_source_id(link);
3437 cur_link_setting = initial_link_settings;
3439 /* Temporary Renoir-specific workaround for SWDEV-215184;
3440 * PHY will sometimes be in bad state on hotplugging display from certain USB-C dongle,
3441 * so add extra cycle of enabling and disabling the PHY before first link training.
3443 if (link->link_enc && link->link_enc->features.flags.bits.DP_IS_USB_C &&
3444 link->dc->debug.usbc_combo_phy_reset_wa) {
3445 dp_enable_link_phy(link, link_res, link->connector_signal, dp_cs_id, cur);
3446 dp_disable_link_phy(link, link_res, link->connector_signal);
3450 skip_video_pattern = true;
3452 if (cur->link_rate == LINK_RATE_LOW)
3453 skip_video_pattern = false;
3458 link->connector_signal,
3463 if (skip_link_training)
3466 status = dc_link_dp_perform_link_training(
3470 skip_video_pattern);
3471 if (status == LINK_TRAINING_SUCCESS)
3478 link->verified_link_cap = *cur;
3480 if (read_hpd_rx_irq_data(link, &irq_data) == DC_OK)
3481 if (hpd_rx_irq_check_link_loss_status(
3486 /* always disable the link before trying another
3487 * setting or before returning we'll enable it later
3488 * based on the actual mode we're driving
3490 dp_disable_link_phy(link, link_res, link->connector_signal);
3491 } while (!success && decide_fallback_link_setting(link,
3492 initial_link_settings, cur, status));
3494 /* Link Training failed for all Link Settings
3495 * (Lane Count is still unknown)
3498 /* If all LT fails for all settings,
3499 * set verified = failed safe (1 lane low)
3501 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3502 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3504 link->verified_link_cap.link_spread =
3505 LINK_SPREAD_DISABLED;
3512 bool dp_verify_link_cap_with_retries(
3513 struct dc_link *link,
3514 const struct link_resource *link_res,
3515 struct dc_link_settings *known_limit_link_setting,
3519 bool success = false;
3521 for (i = 0; i < attempts; i++) {
3523 enum dc_connection_type type = dc_connection_none;
3525 memset(&link->verified_link_cap, 0,
3526 sizeof(struct dc_link_settings));
3527 if (!dc_link_detect_sink(link, &type) || type == dc_connection_none) {
3528 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3529 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3530 link->verified_link_cap.link_spread = LINK_SPREAD_DISABLED;
3532 } else if (dp_verify_link_cap(link, link_res,
3533 known_limit_link_setting,
3534 &fail_count) && fail_count == 0) {
3543 bool dp_verify_mst_link_cap(
3544 struct dc_link *link, const struct link_resource *link_res)
3546 struct dc_link_settings max_link_cap = {0};
3548 if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3549 DP_8b_10b_ENCODING) {
3550 max_link_cap = get_max_link_cap(link, link_res);
3551 link->verified_link_cap = get_common_supported_link_settings(
3552 link->reported_link_cap,
3555 #if defined(CONFIG_DRM_AMD_DC_DCN)
3556 else if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3557 DP_128b_132b_ENCODING) {
3558 dp_verify_link_cap_with_retries(link,
3560 &link->reported_link_cap,
3561 LINK_TRAINING_MAX_VERIFY_RETRY);
3567 static struct dc_link_settings get_common_supported_link_settings(
3568 struct dc_link_settings link_setting_a,
3569 struct dc_link_settings link_setting_b)
3571 struct dc_link_settings link_settings = {0};
3573 link_settings.lane_count =
3574 (link_setting_a.lane_count <=
3575 link_setting_b.lane_count) ?
3576 link_setting_a.lane_count :
3577 link_setting_b.lane_count;
3578 link_settings.link_rate =
3579 (link_setting_a.link_rate <=
3580 link_setting_b.link_rate) ?
3581 link_setting_a.link_rate :
3582 link_setting_b.link_rate;
3583 link_settings.link_spread = LINK_SPREAD_DISABLED;
3585 /* in DP compliance test, DPR-120 may have
3586 * a random value in its MAX_LINK_BW dpcd field.
3587 * We map it to the maximum supported link rate that
3588 * is smaller than MAX_LINK_BW in this case.
3590 #if defined(CONFIG_DRM_AMD_DC_DCN)
3591 if (link_settings.link_rate > LINK_RATE_UHBR20) {
3592 link_settings.link_rate = LINK_RATE_UHBR20;
3593 } else if (link_settings.link_rate < LINK_RATE_UHBR20 &&
3594 link_settings.link_rate > LINK_RATE_UHBR13_5) {
3595 link_settings.link_rate = LINK_RATE_UHBR13_5;
3596 } else if (link_settings.link_rate < LINK_RATE_UHBR10 &&
3597 link_settings.link_rate > LINK_RATE_HIGH3) {
3599 if (link_settings.link_rate > LINK_RATE_HIGH3) {
3601 link_settings.link_rate = LINK_RATE_HIGH3;
3602 } else if (link_settings.link_rate < LINK_RATE_HIGH3
3603 && link_settings.link_rate > LINK_RATE_HIGH2) {
3604 link_settings.link_rate = LINK_RATE_HIGH2;
3605 } else if (link_settings.link_rate < LINK_RATE_HIGH2
3606 && link_settings.link_rate > LINK_RATE_HIGH) {
3607 link_settings.link_rate = LINK_RATE_HIGH;
3608 } else if (link_settings.link_rate < LINK_RATE_HIGH
3609 && link_settings.link_rate > LINK_RATE_LOW) {
3610 link_settings.link_rate = LINK_RATE_LOW;
3611 } else if (link_settings.link_rate < LINK_RATE_LOW) {
3612 link_settings.link_rate = LINK_RATE_UNKNOWN;
3615 return link_settings;
3618 static inline bool reached_minimum_lane_count(enum dc_lane_count lane_count)
3620 return lane_count <= LANE_COUNT_ONE;
3623 static inline bool reached_minimum_link_rate(enum dc_link_rate link_rate)
3625 return link_rate <= LINK_RATE_LOW;
3628 static enum dc_lane_count reduce_lane_count(enum dc_lane_count lane_count)
3630 switch (lane_count) {
3631 case LANE_COUNT_FOUR:
3632 return LANE_COUNT_TWO;
3633 case LANE_COUNT_TWO:
3634 return LANE_COUNT_ONE;
3635 case LANE_COUNT_ONE:
3636 return LANE_COUNT_UNKNOWN;
3638 return LANE_COUNT_UNKNOWN;
3642 static enum dc_link_rate reduce_link_rate(enum dc_link_rate link_rate)
3644 switch (link_rate) {
3645 #if defined(CONFIG_DRM_AMD_DC_DCN)
3646 case LINK_RATE_UHBR20:
3647 return LINK_RATE_UHBR13_5;
3648 case LINK_RATE_UHBR13_5:
3649 return LINK_RATE_UHBR10;
3650 case LINK_RATE_UHBR10:
3651 return LINK_RATE_HIGH3;
3653 case LINK_RATE_HIGH3:
3654 return LINK_RATE_HIGH2;
3655 case LINK_RATE_HIGH2:
3656 return LINK_RATE_HIGH;
3657 case LINK_RATE_HIGH:
3658 return LINK_RATE_LOW;
3660 return LINK_RATE_UNKNOWN;
3662 return LINK_RATE_UNKNOWN;
3666 static enum dc_lane_count increase_lane_count(enum dc_lane_count lane_count)
3668 switch (lane_count) {
3669 case LANE_COUNT_ONE:
3670 return LANE_COUNT_TWO;
3671 case LANE_COUNT_TWO:
3672 return LANE_COUNT_FOUR;
3674 return LANE_COUNT_UNKNOWN;
3678 static enum dc_link_rate increase_link_rate(enum dc_link_rate link_rate)
3680 switch (link_rate) {
3682 return LINK_RATE_HIGH;
3683 case LINK_RATE_HIGH:
3684 return LINK_RATE_HIGH2;
3685 case LINK_RATE_HIGH2:
3686 return LINK_RATE_HIGH3;
3687 #if defined(CONFIG_DRM_AMD_DC_DCN)
3688 case LINK_RATE_HIGH3:
3689 return LINK_RATE_UHBR10;
3690 case LINK_RATE_UHBR10:
3691 return LINK_RATE_UHBR13_5;
3692 case LINK_RATE_UHBR13_5:
3693 return LINK_RATE_UHBR20;
3696 return LINK_RATE_UNKNOWN;
3700 #if defined(CONFIG_DRM_AMD_DC_DCN)
3701 static bool decide_fallback_link_setting_max_bw_policy(
3702 const struct dc_link_settings *max,
3703 struct dc_link_settings *cur)
3705 uint8_t cur_idx = 0, next_idx;
3708 while (cur_idx < ARRAY_SIZE(dp_lt_fallbacks))
3709 /* find current index */
3710 if (dp_lt_fallbacks[cur_idx].lane_count == cur->lane_count &&
3711 dp_lt_fallbacks[cur_idx].link_rate == cur->link_rate)
3716 next_idx = cur_idx + 1;
3718 while (next_idx < ARRAY_SIZE(dp_lt_fallbacks))
3719 /* find next index */
3720 if (dp_lt_fallbacks[next_idx].lane_count <= max->lane_count &&
3721 dp_lt_fallbacks[next_idx].link_rate <= max->link_rate)
3726 if (next_idx < ARRAY_SIZE(dp_lt_fallbacks)) {
3727 cur->lane_count = dp_lt_fallbacks[next_idx].lane_count;
3728 cur->link_rate = dp_lt_fallbacks[next_idx].link_rate;
3737 * function: set link rate and lane count fallback based
3738 * on current link setting and last link training result
3740 * true - link setting could be set
3741 * false - has reached minimum setting
3742 * and no further fallback could be done
3744 static bool decide_fallback_link_setting(
3745 struct dc_link *link,
3746 struct dc_link_settings initial_link_settings,
3747 struct dc_link_settings *current_link_setting,
3748 enum link_training_result training_result)
3750 if (!current_link_setting)
3752 #if defined(CONFIG_DRM_AMD_DC_DCN)
3753 if (dp_get_link_encoding_format(&initial_link_settings) == DP_128b_132b_ENCODING ||
3754 link->dc->debug.force_dp2_lt_fallback_method)
3755 return decide_fallback_link_setting_max_bw_policy(&initial_link_settings,
3756 current_link_setting);
3759 switch (training_result) {
3760 case LINK_TRAINING_CR_FAIL_LANE0:
3761 case LINK_TRAINING_CR_FAIL_LANE1:
3762 case LINK_TRAINING_CR_FAIL_LANE23:
3763 case LINK_TRAINING_LQA_FAIL:
3765 if (!reached_minimum_link_rate
3766 (current_link_setting->link_rate)) {
3767 current_link_setting->link_rate =
3769 current_link_setting->link_rate);
3770 } else if (!reached_minimum_lane_count
3771 (current_link_setting->lane_count)) {
3772 current_link_setting->link_rate =
3773 initial_link_settings.link_rate;
3774 if (training_result == LINK_TRAINING_CR_FAIL_LANE0)
3776 else if (training_result == LINK_TRAINING_CR_FAIL_LANE1)
3777 current_link_setting->lane_count =
3779 else if (training_result ==
3780 LINK_TRAINING_CR_FAIL_LANE23)
3781 current_link_setting->lane_count =
3784 current_link_setting->lane_count =
3786 current_link_setting->lane_count);
3792 case LINK_TRAINING_EQ_FAIL_EQ:
3794 if (!reached_minimum_lane_count
3795 (current_link_setting->lane_count)) {
3796 current_link_setting->lane_count =
3798 current_link_setting->lane_count);
3799 } else if (!reached_minimum_link_rate
3800 (current_link_setting->link_rate)) {
3801 current_link_setting->link_rate =
3803 current_link_setting->link_rate);
3809 case LINK_TRAINING_EQ_FAIL_CR:
3811 if (!reached_minimum_link_rate
3812 (current_link_setting->link_rate)) {
3813 current_link_setting->link_rate =
3815 current_link_setting->link_rate);
3827 bool dp_validate_mode_timing(
3828 struct dc_link *link,
3829 const struct dc_crtc_timing *timing)
3834 const struct dc_link_settings *link_setting;
3836 /* According to spec, VSC SDP should be used if pixel format is YCbCr420 */
3837 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420 &&
3838 !link->dpcd_caps.dprx_feature.bits.VSC_SDP_COLORIMETRY_SUPPORTED &&
3839 dal_graphics_object_id_get_connector_id(link->link_id) != CONNECTOR_ID_VIRTUAL)
3842 /*always DP fail safe mode*/
3843 if ((timing->pix_clk_100hz / 10) == (uint32_t) 25175 &&
3844 timing->h_addressable == (uint32_t) 640 &&
3845 timing->v_addressable == (uint32_t) 480)
3848 link_setting = dc_link_get_link_cap(link);
3850 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3851 /*if (flags.DYNAMIC_VALIDATION == 1 &&
3852 link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN)
3853 link_setting = &link->verified_link_cap;
3856 req_bw = dc_bandwidth_in_kbps_from_timing(timing);
3857 max_bw = dc_link_bandwidth_kbps(link, link_setting);
3859 if (req_bw <= max_bw) {
3860 /* remember the biggest mode here, during
3861 * initial link training (to get
3862 * verified_link_cap), LS sends event about
3863 * cannot train at reported cap to upper
3864 * layer and upper layer will re-enumerate modes.
3865 * this is not necessary if the lower
3866 * verified_link_cap is enough to drive
3869 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3870 /* if (flags.DYNAMIC_VALIDATION == 1)
3871 dpsst->max_req_bw_for_verified_linkcap = dal_max(
3872 dpsst->max_req_bw_for_verified_linkcap, req_bw); */
3878 static bool decide_dp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3880 struct dc_link_settings initial_link_setting = {
3881 LANE_COUNT_ONE, LINK_RATE_LOW, LINK_SPREAD_DISABLED, false, 0};
3882 struct dc_link_settings current_link_setting =
3883 initial_link_setting;
3886 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3889 /* search for the minimum link setting that:
3890 * 1. is supported according to the link training result
3891 * 2. could support the b/w requested by the timing
3893 while (current_link_setting.link_rate <=
3894 link->verified_link_cap.link_rate) {
3895 link_bw = dc_link_bandwidth_kbps(
3897 ¤t_link_setting);
3898 if (req_bw <= link_bw) {
3899 *link_setting = current_link_setting;
3903 if (current_link_setting.lane_count <
3904 link->verified_link_cap.lane_count) {
3905 current_link_setting.lane_count =
3906 increase_lane_count(
3907 current_link_setting.lane_count);
3909 current_link_setting.link_rate =
3911 current_link_setting.link_rate);
3912 current_link_setting.lane_count =
3913 initial_link_setting.lane_count;
3920 bool decide_edp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3922 struct dc_link_settings initial_link_setting;
3923 struct dc_link_settings current_link_setting;
3927 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3928 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3930 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3931 link->dpcd_caps.edp_supported_link_rates_count == 0) {
3932 *link_setting = link->verified_link_cap;
3936 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3937 initial_link_setting.lane_count = LANE_COUNT_ONE;
3938 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
3939 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3940 initial_link_setting.use_link_rate_set = true;
3941 initial_link_setting.link_rate_set = 0;
3942 current_link_setting = initial_link_setting;
3944 /* search for the minimum link setting that:
3945 * 1. is supported according to the link training result
3946 * 2. could support the b/w requested by the timing
3948 while (current_link_setting.link_rate <=
3949 link->verified_link_cap.link_rate) {
3950 link_bw = dc_link_bandwidth_kbps(
3952 ¤t_link_setting);
3953 if (req_bw <= link_bw) {
3954 *link_setting = current_link_setting;
3958 if (current_link_setting.lane_count <
3959 link->verified_link_cap.lane_count) {
3960 current_link_setting.lane_count =
3961 increase_lane_count(
3962 current_link_setting.lane_count);
3964 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
3965 current_link_setting.link_rate_set++;
3966 current_link_setting.link_rate =
3967 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3968 current_link_setting.lane_count =
3969 initial_link_setting.lane_count;
3977 static bool decide_edp_link_settings_with_dsc(struct dc_link *link,
3978 struct dc_link_settings *link_setting,
3980 enum dc_link_rate max_link_rate)
3982 struct dc_link_settings initial_link_setting;
3983 struct dc_link_settings current_link_setting;
3986 unsigned int policy = 0;
3988 policy = link->ctx->dc->debug.force_dsc_edp_policy;
3989 if (max_link_rate == LINK_RATE_UNKNOWN)
3990 max_link_rate = link->verified_link_cap.link_rate;
3992 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3993 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3995 if ((link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3996 link->dpcd_caps.edp_supported_link_rates_count == 0)) {
3997 /* for DSC enabled case, we search for minimum lane count */
3998 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3999 initial_link_setting.lane_count = LANE_COUNT_ONE;
4000 initial_link_setting.link_rate = LINK_RATE_LOW;
4001 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
4002 initial_link_setting.use_link_rate_set = false;
4003 initial_link_setting.link_rate_set = 0;
4004 current_link_setting = initial_link_setting;
4005 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
4008 /* search for the minimum link setting that:
4009 * 1. is supported according to the link training result
4010 * 2. could support the b/w requested by the timing
4012 while (current_link_setting.link_rate <=
4014 link_bw = dc_link_bandwidth_kbps(
4016 ¤t_link_setting);
4017 if (req_bw <= link_bw) {
4018 *link_setting = current_link_setting;
4023 if (current_link_setting.link_rate < max_link_rate) {
4024 current_link_setting.link_rate =
4026 current_link_setting.link_rate);
4028 if (current_link_setting.lane_count <
4029 link->verified_link_cap.lane_count) {
4030 current_link_setting.lane_count =
4031 increase_lane_count(
4032 current_link_setting.lane_count);
4033 current_link_setting.link_rate = initial_link_setting.link_rate;
4038 /* minimize link rate */
4039 if (current_link_setting.lane_count <
4040 link->verified_link_cap.lane_count) {
4041 current_link_setting.lane_count =
4042 increase_lane_count(
4043 current_link_setting.lane_count);
4045 current_link_setting.link_rate =
4047 current_link_setting.link_rate);
4048 current_link_setting.lane_count =
4049 initial_link_setting.lane_count;
4056 /* if optimize edp link is supported */
4057 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
4058 initial_link_setting.lane_count = LANE_COUNT_ONE;
4059 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
4060 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
4061 initial_link_setting.use_link_rate_set = true;
4062 initial_link_setting.link_rate_set = 0;
4063 current_link_setting = initial_link_setting;
4065 /* search for the minimum link setting that:
4066 * 1. is supported according to the link training result
4067 * 2. could support the b/w requested by the timing
4069 while (current_link_setting.link_rate <=
4071 link_bw = dc_link_bandwidth_kbps(
4073 ¤t_link_setting);
4074 if (req_bw <= link_bw) {
4075 *link_setting = current_link_setting;
4080 if (current_link_setting.link_rate_set <
4081 link->dpcd_caps.edp_supported_link_rates_count
4082 && current_link_setting.link_rate < max_link_rate) {
4083 current_link_setting.link_rate_set++;
4084 current_link_setting.link_rate =
4085 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
4087 if (current_link_setting.lane_count < link->verified_link_cap.lane_count) {
4088 current_link_setting.lane_count =
4089 increase_lane_count(
4090 current_link_setting.lane_count);
4091 current_link_setting.link_rate_set = initial_link_setting.link_rate_set;
4092 current_link_setting.link_rate =
4093 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
4098 /* minimize link rate */
4099 if (current_link_setting.lane_count <
4100 link->verified_link_cap.lane_count) {
4101 current_link_setting.lane_count =
4102 increase_lane_count(
4103 current_link_setting.lane_count);
4105 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
4106 current_link_setting.link_rate_set++;
4107 current_link_setting.link_rate =
4108 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
4109 current_link_setting.lane_count =
4110 initial_link_setting.lane_count;
4119 static bool decide_mst_link_settings(const struct dc_link *link, struct dc_link_settings *link_setting)
4121 *link_setting = link->verified_link_cap;
4125 void decide_link_settings(struct dc_stream_state *stream,
4126 struct dc_link_settings *link_setting)
4128 struct dc_link *link;
4131 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
4133 link = stream->link;
4135 /* if preferred is specified through AMDDP, use it, if it's enough
4138 if (link->preferred_link_setting.lane_count !=
4139 LANE_COUNT_UNKNOWN &&
4140 link->preferred_link_setting.link_rate !=
4141 LINK_RATE_UNKNOWN) {
4142 *link_setting = link->preferred_link_setting;
4146 /* MST doesn't perform link training for now
4147 * TODO: add MST specific link training routine
4149 if (stream->signal == SIGNAL_TYPE_DISPLAY_PORT_MST) {
4150 if (decide_mst_link_settings(link, link_setting))
4152 } else if (link->connector_signal == SIGNAL_TYPE_EDP) {
4153 /* enable edp link optimization for DSC eDP case */
4154 if (stream->timing.flags.DSC) {
4155 enum dc_link_rate max_link_rate = LINK_RATE_UNKNOWN;
4157 if (link->ctx->dc->debug.force_dsc_edp_policy) {
4158 /* calculate link max link rate cap*/
4159 struct dc_link_settings tmp_link_setting;
4160 struct dc_crtc_timing tmp_timing = stream->timing;
4161 uint32_t orig_req_bw;
4163 tmp_link_setting.link_rate = LINK_RATE_UNKNOWN;
4164 tmp_timing.flags.DSC = 0;
4165 orig_req_bw = dc_bandwidth_in_kbps_from_timing(&tmp_timing);
4166 decide_edp_link_settings(link, &tmp_link_setting, orig_req_bw);
4167 max_link_rate = tmp_link_setting.link_rate;
4169 if (decide_edp_link_settings_with_dsc(link, link_setting, req_bw, max_link_rate))
4171 } else if (decide_edp_link_settings(link, link_setting, req_bw))
4173 } else if (decide_dp_link_settings(link, link_setting, req_bw))
4176 BREAK_TO_DEBUGGER();
4177 ASSERT(link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN);
4179 *link_setting = link->verified_link_cap;
4182 /*************************Short Pulse IRQ***************************/
4183 bool dc_link_dp_allow_hpd_rx_irq(const struct dc_link *link)
4186 * Don't handle RX IRQ unless one of following is met:
4187 * 1) The link is established (cur_link_settings != unknown)
4188 * 2) We know we're dealing with a branch device, SST or MST
4191 if ((link->cur_link_settings.lane_count != LANE_COUNT_UNKNOWN) ||
4192 is_dp_branch_device(link))
4198 static bool handle_hpd_irq_psr_sink(struct dc_link *link)
4200 union dpcd_psr_configuration psr_configuration;
4202 if (!link->psr_settings.psr_feature_enabled)
4205 dm_helpers_dp_read_dpcd(
4208 368,/*DpcdAddress_PSR_Enable_Cfg*/
4209 &psr_configuration.raw,
4210 sizeof(psr_configuration.raw));
4212 if (psr_configuration.bits.ENABLE) {
4213 unsigned char dpcdbuf[3] = {0};
4214 union psr_error_status psr_error_status;
4215 union psr_sink_psr_status psr_sink_psr_status;
4217 dm_helpers_dp_read_dpcd(
4220 0x2006, /*DpcdAddress_PSR_Error_Status*/
4221 (unsigned char *) dpcdbuf,
4224 /*DPCD 2006h ERROR STATUS*/
4225 psr_error_status.raw = dpcdbuf[0];
4226 /*DPCD 2008h SINK PANEL SELF REFRESH STATUS*/
4227 psr_sink_psr_status.raw = dpcdbuf[2];
4229 if (psr_error_status.bits.LINK_CRC_ERROR ||
4230 psr_error_status.bits.RFB_STORAGE_ERROR ||
4231 psr_error_status.bits.VSC_SDP_ERROR) {
4234 /* Acknowledge and clear error bits */
4235 dm_helpers_dp_write_dpcd(
4238 8198,/*DpcdAddress_PSR_Error_Status*/
4239 &psr_error_status.raw,
4240 sizeof(psr_error_status.raw));
4242 /* PSR error, disable and re-enable PSR */
4243 if (link->psr_settings.psr_allow_active) {
4244 allow_active = false;
4245 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
4246 allow_active = true;
4247 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
4251 } else if (psr_sink_psr_status.bits.SINK_SELF_REFRESH_STATUS ==
4252 PSR_SINK_STATE_ACTIVE_DISPLAY_FROM_SINK_RFB){
4253 /* No error is detect, PSR is active.
4254 * We should return with IRQ_HPD handled without
4255 * checking for loss of sync since PSR would have
4256 * powered down main link.
4264 static void dp_test_send_link_training(struct dc_link *link)
4266 struct dc_link_settings link_settings = {0};
4268 core_link_read_dpcd(
4271 (unsigned char *)(&link_settings.lane_count),
4273 core_link_read_dpcd(
4276 (unsigned char *)(&link_settings.link_rate),
4279 /* Set preferred link settings */
4280 link->verified_link_cap.lane_count = link_settings.lane_count;
4281 link->verified_link_cap.link_rate = link_settings.link_rate;
4283 dp_retrain_link_dp_test(link, &link_settings, false);
4286 /* TODO Raven hbr2 compliance eye output is unstable
4287 * (toggling on and off) with debugger break
4288 * This caueses intermittent PHY automation failure
4289 * Need to look into the root cause */
4290 static void dp_test_send_phy_test_pattern(struct dc_link *link)
4292 union phy_test_pattern dpcd_test_pattern;
4293 union lane_adjust dpcd_lane_adjustment[2];
4294 unsigned char dpcd_post_cursor_2_adjustment = 0;
4295 #if defined(CONFIG_DRM_AMD_DC_DCN)
4296 unsigned char test_pattern_buffer[
4297 (DP_TEST_264BIT_CUSTOM_PATTERN_263_256 -
4298 DP_TEST_264BIT_CUSTOM_PATTERN_7_0)+1] = {0};
4300 unsigned char test_pattern_buffer[
4301 (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
4302 DP_TEST_80BIT_CUSTOM_PATTERN_7_0)+1] = {0};
4304 unsigned int test_pattern_size = 0;
4305 enum dp_test_pattern test_pattern;
4306 union lane_adjust dpcd_lane_adjust;
4308 struct link_training_settings link_training_settings;
4310 dpcd_test_pattern.raw = 0;
4311 memset(dpcd_lane_adjustment, 0, sizeof(dpcd_lane_adjustment));
4312 memset(&link_training_settings, 0, sizeof(link_training_settings));
4314 /* get phy test pattern and pattern parameters from DP receiver */
4315 core_link_read_dpcd(
4317 DP_PHY_TEST_PATTERN,
4318 &dpcd_test_pattern.raw,
4319 sizeof(dpcd_test_pattern));
4320 core_link_read_dpcd(
4322 DP_ADJUST_REQUEST_LANE0_1,
4323 &dpcd_lane_adjustment[0].raw,
4324 sizeof(dpcd_lane_adjustment));
4326 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
4327 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
4328 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
4329 vendor_specific_lttpr_wa_three_dpcd(
4331 link_training_settings.dpcd_lane_settings);
4333 /*get post cursor 2 parameters
4334 * For DP 1.1a or eariler, this DPCD register's value is 0
4335 * For DP 1.2 or later:
4336 * Bits 1:0 = POST_CURSOR2_LANE0; Bits 3:2 = POST_CURSOR2_LANE1
4337 * Bits 5:4 = POST_CURSOR2_LANE2; Bits 7:6 = POST_CURSOR2_LANE3
4339 core_link_read_dpcd(
4341 DP_ADJUST_REQUEST_POST_CURSOR2,
4342 &dpcd_post_cursor_2_adjustment,
4343 sizeof(dpcd_post_cursor_2_adjustment));
4345 /* translate request */
4346 switch (dpcd_test_pattern.bits.PATTERN) {
4347 case PHY_TEST_PATTERN_D10_2:
4348 test_pattern = DP_TEST_PATTERN_D102;
4350 case PHY_TEST_PATTERN_SYMBOL_ERROR:
4351 test_pattern = DP_TEST_PATTERN_SYMBOL_ERROR;
4353 case PHY_TEST_PATTERN_PRBS7:
4354 test_pattern = DP_TEST_PATTERN_PRBS7;
4356 case PHY_TEST_PATTERN_80BIT_CUSTOM:
4357 test_pattern = DP_TEST_PATTERN_80BIT_CUSTOM;
4359 case PHY_TEST_PATTERN_CP2520_1:
4360 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
4361 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
4362 DP_TEST_PATTERN_TRAINING_PATTERN4 :
4363 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
4365 case PHY_TEST_PATTERN_CP2520_2:
4366 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
4367 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
4368 DP_TEST_PATTERN_TRAINING_PATTERN4 :
4369 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
4371 case PHY_TEST_PATTERN_CP2520_3:
4372 test_pattern = DP_TEST_PATTERN_TRAINING_PATTERN4;
4374 #if defined(CONFIG_DRM_AMD_DC_DCN)
4375 case PHY_TEST_PATTERN_128b_132b_TPS1:
4376 test_pattern = DP_TEST_PATTERN_128b_132b_TPS1;
4378 case PHY_TEST_PATTERN_128b_132b_TPS2:
4379 test_pattern = DP_TEST_PATTERN_128b_132b_TPS2;
4381 case PHY_TEST_PATTERN_PRBS9:
4382 test_pattern = DP_TEST_PATTERN_PRBS9;
4384 case PHY_TEST_PATTERN_PRBS11:
4385 test_pattern = DP_TEST_PATTERN_PRBS11;
4387 case PHY_TEST_PATTERN_PRBS15:
4388 test_pattern = DP_TEST_PATTERN_PRBS15;
4390 case PHY_TEST_PATTERN_PRBS23:
4391 test_pattern = DP_TEST_PATTERN_PRBS23;
4393 case PHY_TEST_PATTERN_PRBS31:
4394 test_pattern = DP_TEST_PATTERN_PRBS31;
4396 case PHY_TEST_PATTERN_264BIT_CUSTOM:
4397 test_pattern = DP_TEST_PATTERN_264BIT_CUSTOM;
4399 case PHY_TEST_PATTERN_SQUARE_PULSE:
4400 test_pattern = DP_TEST_PATTERN_SQUARE_PULSE;
4404 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
4408 if (test_pattern == DP_TEST_PATTERN_80BIT_CUSTOM) {
4409 test_pattern_size = (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
4410 DP_TEST_80BIT_CUSTOM_PATTERN_7_0) + 1;
4411 core_link_read_dpcd(
4413 DP_TEST_80BIT_CUSTOM_PATTERN_7_0,
4414 test_pattern_buffer,
4418 #if defined(CONFIG_DRM_AMD_DC_DCN)
4419 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE) {
4420 test_pattern_size = 1; // Square pattern data is 1 byte (DP spec)
4421 core_link_read_dpcd(
4423 DP_PHY_SQUARE_PATTERN,
4424 test_pattern_buffer,
4428 if (test_pattern == DP_TEST_PATTERN_264BIT_CUSTOM) {
4429 test_pattern_size = (DP_TEST_264BIT_CUSTOM_PATTERN_263_256-
4430 DP_TEST_264BIT_CUSTOM_PATTERN_7_0) + 1;
4431 core_link_read_dpcd(
4433 DP_TEST_264BIT_CUSTOM_PATTERN_7_0,
4434 test_pattern_buffer,
4439 /* prepare link training settings */
4440 link_training_settings.link_settings = link->cur_link_settings;
4442 for (lane = 0; lane <
4443 (unsigned int)(link->cur_link_settings.lane_count);
4445 dpcd_lane_adjust.raw =
4446 get_nibble_at_index(&dpcd_lane_adjustment[0].raw, lane);
4447 if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4448 DP_8b_10b_ENCODING) {
4449 link_training_settings.hw_lane_settings[lane].VOLTAGE_SWING =
4450 (enum dc_voltage_swing)
4451 (dpcd_lane_adjust.bits.VOLTAGE_SWING_LANE);
4452 link_training_settings.hw_lane_settings[lane].PRE_EMPHASIS =
4453 (enum dc_pre_emphasis)
4454 (dpcd_lane_adjust.bits.PRE_EMPHASIS_LANE);
4455 link_training_settings.hw_lane_settings[lane].POST_CURSOR2 =
4456 (enum dc_post_cursor2)
4457 ((dpcd_post_cursor_2_adjustment >> (lane * 2)) & 0x03);
4459 #if defined(CONFIG_DRM_AMD_DC_DCN)
4460 else if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4461 DP_128b_132b_ENCODING) {
4462 link_training_settings.hw_lane_settings[lane].FFE_PRESET.raw =
4463 dpcd_lane_adjust.tx_ffe.PRESET_VALUE;
4468 dp_hw_to_dpcd_lane_settings(&link_training_settings,
4469 link_training_settings.hw_lane_settings,
4470 link_training_settings.dpcd_lane_settings);
4471 /*Usage: Measure DP physical lane signal
4472 * by DP SI test equipment automatically.
4473 * PHY test pattern request is generated by equipment via HPD interrupt.
4474 * HPD needs to be active all the time. HPD should be active
4475 * all the time. Do not touch it.
4476 * forward request to DS
4478 dc_link_dp_set_test_pattern(
4481 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED,
4482 &link_training_settings,
4483 test_pattern_buffer,
4487 static void dp_test_send_link_test_pattern(struct dc_link *link)
4489 union link_test_pattern dpcd_test_pattern;
4490 union test_misc dpcd_test_params;
4491 enum dp_test_pattern test_pattern;
4492 enum dp_test_pattern_color_space test_pattern_color_space =
4493 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED;
4494 enum dc_color_depth requestColorDepth = COLOR_DEPTH_UNDEFINED;
4495 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4496 struct pipe_ctx *pipe_ctx = NULL;
4499 memset(&dpcd_test_pattern, 0, sizeof(dpcd_test_pattern));
4500 memset(&dpcd_test_params, 0, sizeof(dpcd_test_params));
4502 for (i = 0; i < MAX_PIPES; i++) {
4503 if (pipes[i].stream == NULL)
4506 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
4507 pipe_ctx = &pipes[i];
4512 if (pipe_ctx == NULL)
4515 /* get link test pattern and pattern parameters */
4516 core_link_read_dpcd(
4519 &dpcd_test_pattern.raw,
4520 sizeof(dpcd_test_pattern));
4521 core_link_read_dpcd(
4524 &dpcd_test_params.raw,
4525 sizeof(dpcd_test_params));
4527 switch (dpcd_test_pattern.bits.PATTERN) {
4528 case LINK_TEST_PATTERN_COLOR_RAMP:
4529 test_pattern = DP_TEST_PATTERN_COLOR_RAMP;
4531 case LINK_TEST_PATTERN_VERTICAL_BARS:
4532 test_pattern = DP_TEST_PATTERN_VERTICAL_BARS;
4533 break; /* black and white */
4534 case LINK_TEST_PATTERN_COLOR_SQUARES:
4535 test_pattern = (dpcd_test_params.bits.DYN_RANGE ==
4536 TEST_DYN_RANGE_VESA ?
4537 DP_TEST_PATTERN_COLOR_SQUARES :
4538 DP_TEST_PATTERN_COLOR_SQUARES_CEA);
4541 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
4545 if (dpcd_test_params.bits.CLR_FORMAT == 0)
4546 test_pattern_color_space = DP_TEST_PATTERN_COLOR_SPACE_RGB;
4548 test_pattern_color_space = dpcd_test_params.bits.YCBCR_COEFS ?
4549 DP_TEST_PATTERN_COLOR_SPACE_YCBCR709 :
4550 DP_TEST_PATTERN_COLOR_SPACE_YCBCR601;
4552 switch (dpcd_test_params.bits.BPC) {
4554 requestColorDepth = COLOR_DEPTH_666;
4557 requestColorDepth = COLOR_DEPTH_888;
4560 requestColorDepth = COLOR_DEPTH_101010;
4563 requestColorDepth = COLOR_DEPTH_121212;
4569 switch (dpcd_test_params.bits.CLR_FORMAT) {
4571 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4574 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR422;
4577 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR444;
4580 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4585 if (requestColorDepth != COLOR_DEPTH_UNDEFINED
4586 && pipe_ctx->stream->timing.display_color_depth != requestColorDepth) {
4587 DC_LOG_DEBUG("%s: original bpc %d, changing to %d\n",
4589 pipe_ctx->stream->timing.display_color_depth,
4591 pipe_ctx->stream->timing.display_color_depth = requestColorDepth;
4594 dp_update_dsc_config(pipe_ctx);
4596 dc_link_dp_set_test_pattern(
4599 test_pattern_color_space,
4605 static void dp_test_get_audio_test_data(struct dc_link *link, bool disable_video)
4607 union audio_test_mode dpcd_test_mode = {0};
4608 struct audio_test_pattern_type dpcd_pattern_type = {0};
4609 union audio_test_pattern_period dpcd_pattern_period[AUDIO_CHANNELS_COUNT] = {0};
4610 enum dp_test_pattern test_pattern = DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4612 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4613 struct pipe_ctx *pipe_ctx = &pipes[0];
4614 unsigned int channel_count;
4615 unsigned int channel = 0;
4616 unsigned int modes = 0;
4617 unsigned int sampling_rate_in_hz = 0;
4619 // get audio test mode and test pattern parameters
4620 core_link_read_dpcd(
4623 &dpcd_test_mode.raw,
4624 sizeof(dpcd_test_mode));
4626 core_link_read_dpcd(
4628 DP_TEST_AUDIO_PATTERN_TYPE,
4629 &dpcd_pattern_type.value,
4630 sizeof(dpcd_pattern_type));
4632 channel_count = dpcd_test_mode.bits.channel_count + 1;
4634 // read pattern periods for requested channels when sawTooth pattern is requested
4635 if (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH ||
4636 dpcd_pattern_type.value == AUDIO_TEST_PATTERN_OPERATOR_DEFINED) {
4638 test_pattern = (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH) ?
4639 DP_TEST_PATTERN_AUDIO_SAWTOOTH : DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4640 // read period for each channel
4641 for (channel = 0; channel < channel_count; channel++) {
4642 core_link_read_dpcd(
4644 DP_TEST_AUDIO_PERIOD_CH1 + channel,
4645 &dpcd_pattern_period[channel].raw,
4646 sizeof(dpcd_pattern_period[channel]));
4650 // translate sampling rate
4651 switch (dpcd_test_mode.bits.sampling_rate) {
4652 case AUDIO_SAMPLING_RATE_32KHZ:
4653 sampling_rate_in_hz = 32000;
4655 case AUDIO_SAMPLING_RATE_44_1KHZ:
4656 sampling_rate_in_hz = 44100;
4658 case AUDIO_SAMPLING_RATE_48KHZ:
4659 sampling_rate_in_hz = 48000;
4661 case AUDIO_SAMPLING_RATE_88_2KHZ:
4662 sampling_rate_in_hz = 88200;
4664 case AUDIO_SAMPLING_RATE_96KHZ:
4665 sampling_rate_in_hz = 96000;
4667 case AUDIO_SAMPLING_RATE_176_4KHZ:
4668 sampling_rate_in_hz = 176400;
4670 case AUDIO_SAMPLING_RATE_192KHZ:
4671 sampling_rate_in_hz = 192000;
4674 sampling_rate_in_hz = 0;
4678 link->audio_test_data.flags.test_requested = 1;
4679 link->audio_test_data.flags.disable_video = disable_video;
4680 link->audio_test_data.sampling_rate = sampling_rate_in_hz;
4681 link->audio_test_data.channel_count = channel_count;
4682 link->audio_test_data.pattern_type = test_pattern;
4684 if (test_pattern == DP_TEST_PATTERN_AUDIO_SAWTOOTH) {
4685 for (modes = 0; modes < pipe_ctx->stream->audio_info.mode_count; modes++) {
4686 link->audio_test_data.pattern_period[modes] = dpcd_pattern_period[modes].bits.pattern_period;
4691 void dc_link_dp_handle_automated_test(struct dc_link *link)
4693 union test_request test_request;
4694 union test_response test_response;
4696 memset(&test_request, 0, sizeof(test_request));
4697 memset(&test_response, 0, sizeof(test_response));
4699 core_link_read_dpcd(
4703 sizeof(union test_request));
4704 if (test_request.bits.LINK_TRAINING) {
4705 /* ACK first to let DP RX test box monitor LT sequence */
4706 test_response.bits.ACK = 1;
4707 core_link_write_dpcd(
4711 sizeof(test_response));
4712 dp_test_send_link_training(link);
4713 /* no acknowledge request is needed again */
4714 test_response.bits.ACK = 0;
4716 if (test_request.bits.LINK_TEST_PATTRN) {
4717 dp_test_send_link_test_pattern(link);
4718 test_response.bits.ACK = 1;
4721 if (test_request.bits.AUDIO_TEST_PATTERN) {
4722 dp_test_get_audio_test_data(link, test_request.bits.TEST_AUDIO_DISABLED_VIDEO);
4723 test_response.bits.ACK = 1;
4726 if (test_request.bits.PHY_TEST_PATTERN) {
4727 dp_test_send_phy_test_pattern(link);
4728 test_response.bits.ACK = 1;
4731 /* send request acknowledgment */
4732 if (test_response.bits.ACK)
4733 core_link_write_dpcd(
4737 sizeof(test_response));
4740 void dc_link_dp_handle_link_loss(struct dc_link *link)
4743 struct pipe_ctx *pipe_ctx;
4745 for (i = 0; i < MAX_PIPES; i++) {
4746 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4747 if (pipe_ctx && pipe_ctx->stream && pipe_ctx->stream->link == link)
4751 if (pipe_ctx == NULL || pipe_ctx->stream == NULL)
4754 for (i = 0; i < MAX_PIPES; i++) {
4755 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4756 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4757 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4758 core_link_disable_stream(pipe_ctx);
4762 for (i = 0; i < MAX_PIPES; i++) {
4763 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4764 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4765 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4766 core_link_enable_stream(link->dc->current_state, pipe_ctx);
4771 bool dc_link_handle_hpd_rx_irq(struct dc_link *link, union hpd_irq_data *out_hpd_irq_dpcd_data, bool *out_link_loss,
4772 bool defer_handling, bool *has_left_work)
4774 union hpd_irq_data hpd_irq_dpcd_data = {0};
4775 union device_service_irq device_service_clear = {0};
4776 enum dc_status result;
4777 bool status = false;
4780 *out_link_loss = false;
4783 *has_left_work = false;
4784 /* For use cases related to down stream connection status change,
4785 * PSR and device auto test, refer to function handle_sst_hpd_irq
4788 DC_LOG_HW_HPD_IRQ("%s: Got short pulse HPD on link %d\n",
4789 __func__, link->link_index);
4792 /* All the "handle_hpd_irq_xxx()" methods
4793 * should be called only after
4794 * dal_dpsst_ls_read_hpd_irq_data
4795 * Order of calls is important too
4797 result = read_hpd_rx_irq_data(link, &hpd_irq_dpcd_data);
4798 if (out_hpd_irq_dpcd_data)
4799 *out_hpd_irq_dpcd_data = hpd_irq_dpcd_data;
4801 if (result != DC_OK) {
4802 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain irq data\n",
4807 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.AUTOMATED_TEST) {
4808 device_service_clear.bits.AUTOMATED_TEST = 1;
4809 core_link_write_dpcd(
4811 DP_DEVICE_SERVICE_IRQ_VECTOR,
4812 &device_service_clear.raw,
4813 sizeof(device_service_clear.raw));
4814 device_service_clear.raw = 0;
4815 if (defer_handling && has_left_work)
4816 *has_left_work = true;
4818 dc_link_dp_handle_automated_test(link);
4822 if (!dc_link_dp_allow_hpd_rx_irq(link)) {
4823 DC_LOG_HW_HPD_IRQ("%s: skipping HPD handling on %d\n",
4824 __func__, link->link_index);
4828 if (handle_hpd_irq_psr_sink(link))
4829 /* PSR-related error was detected and handled */
4832 /* If PSR-related error handled, Main link may be off,
4833 * so do not handle as a normal sink status change interrupt.
4836 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.UP_REQ_MSG_RDY) {
4837 if (defer_handling && has_left_work)
4838 *has_left_work = true;
4842 /* check if we have MST msg and return since we poll for it */
4843 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.DOWN_REP_MSG_RDY) {
4844 if (defer_handling && has_left_work)
4845 *has_left_work = true;
4849 /* For now we only handle 'Downstream port status' case.
4850 * If we got sink count changed it means
4851 * Downstream port status changed,
4852 * then DM should call DC to do the detection.
4853 * NOTE: Do not handle link loss on eDP since it is internal link*/
4854 if ((link->connector_signal != SIGNAL_TYPE_EDP) &&
4855 hpd_rx_irq_check_link_loss_status(
4857 &hpd_irq_dpcd_data)) {
4858 /* Connectivity log: link loss */
4859 CONN_DATA_LINK_LOSS(link,
4860 hpd_irq_dpcd_data.raw,
4861 sizeof(hpd_irq_dpcd_data),
4864 if (defer_handling && has_left_work)
4865 *has_left_work = true;
4867 dc_link_dp_handle_link_loss(link);
4871 *out_link_loss = true;
4874 if (link->type == dc_connection_sst_branch &&
4875 hpd_irq_dpcd_data.bytes.sink_cnt.bits.SINK_COUNT
4876 != link->dpcd_sink_count)
4879 /* reasons for HPD RX:
4880 * 1. Link Loss - ie Re-train the Link
4881 * 2. MST sideband message
4882 * 3. Automated Test - ie. Internal Commit
4883 * 4. CP (copy protection) - (not interesting for DM???)
4885 * 6. Downstream Port status changed
4886 * -ie. Detect - this the only one
4887 * which is interesting for DM because
4888 * it must call dc_link_detect.
4893 /*query dpcd for version and mst cap addresses*/
4894 bool is_mst_supported(struct dc_link *link)
4897 enum dc_status st = DC_OK;
4901 if (link->preferred_training_settings.mst_enable &&
4902 *link->preferred_training_settings.mst_enable == false) {
4909 st = core_link_read_dpcd(link, DP_DPCD_REV, &rev.raw,
4912 if (st == DC_OK && rev.raw >= DPCD_REV_12) {
4914 st = core_link_read_dpcd(link, DP_MSTM_CAP,
4915 &cap.raw, sizeof(cap));
4916 if (st == DC_OK && cap.bits.MST_CAP == 1)
4923 bool is_dp_active_dongle(const struct dc_link *link)
4925 return (link->dpcd_caps.dongle_type >= DISPLAY_DONGLE_DP_VGA_CONVERTER) &&
4926 (link->dpcd_caps.dongle_type <= DISPLAY_DONGLE_DP_HDMI_CONVERTER);
4929 bool is_dp_branch_device(const struct dc_link *link)
4931 return link->dpcd_caps.is_branch_dev;
4934 static int translate_dpcd_max_bpc(enum dpcd_downstream_port_max_bpc bpc)
4937 case DOWN_STREAM_MAX_8BPC:
4939 case DOWN_STREAM_MAX_10BPC:
4941 case DOWN_STREAM_MAX_12BPC:
4943 case DOWN_STREAM_MAX_16BPC:
4952 #if defined(CONFIG_DRM_AMD_DC_DCN)
4953 uint32_t dc_link_bw_kbps_from_raw_frl_link_rate_data(uint8_t bw)
4974 * Return PCON's post FRL link training supported BW if its non-zero, otherwise return max_supported_frl_bw.
4976 static uint32_t intersect_frl_link_bw_support(
4977 const uint32_t max_supported_frl_bw_in_kbps,
4978 const union hdmi_encoded_link_bw hdmi_encoded_link_bw)
4980 uint32_t supported_bw_in_kbps = max_supported_frl_bw_in_kbps;
4982 // HDMI_ENCODED_LINK_BW bits are only valid if HDMI Link Configuration bit is 1 (FRL mode)
4983 if (hdmi_encoded_link_bw.bits.FRL_MODE) {
4984 if (hdmi_encoded_link_bw.bits.BW_48Gbps)
4985 supported_bw_in_kbps = 48000000;
4986 else if (hdmi_encoded_link_bw.bits.BW_40Gbps)
4987 supported_bw_in_kbps = 40000000;
4988 else if (hdmi_encoded_link_bw.bits.BW_32Gbps)
4989 supported_bw_in_kbps = 32000000;
4990 else if (hdmi_encoded_link_bw.bits.BW_24Gbps)
4991 supported_bw_in_kbps = 24000000;
4992 else if (hdmi_encoded_link_bw.bits.BW_18Gbps)
4993 supported_bw_in_kbps = 18000000;
4994 else if (hdmi_encoded_link_bw.bits.BW_9Gbps)
4995 supported_bw_in_kbps = 9000000;
4998 return supported_bw_in_kbps;
5002 static void read_dp_device_vendor_id(struct dc_link *link)
5004 struct dp_device_vendor_id dp_id;
5006 /* read IEEE branch device id */
5007 core_link_read_dpcd(
5013 link->dpcd_caps.branch_dev_id =
5014 (dp_id.ieee_oui[0] << 16) +
5015 (dp_id.ieee_oui[1] << 8) +
5019 link->dpcd_caps.branch_dev_name,
5020 dp_id.ieee_device_id,
5021 sizeof(dp_id.ieee_device_id));
5026 static void get_active_converter_info(
5027 uint8_t data, struct dc_link *link)
5029 union dp_downstream_port_present ds_port = { .byte = data };
5030 memset(&link->dpcd_caps.dongle_caps, 0, sizeof(link->dpcd_caps.dongle_caps));
5032 /* decode converter info*/
5033 if (!ds_port.fields.PORT_PRESENT) {
5034 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
5035 ddc_service_set_dongle_type(link->ddc,
5036 link->dpcd_caps.dongle_type);
5037 link->dpcd_caps.is_branch_dev = false;
5041 /* DPCD 0x5 bit 0 = 1, it indicate it's branch device */
5042 link->dpcd_caps.is_branch_dev = ds_port.fields.PORT_PRESENT;
5044 switch (ds_port.fields.PORT_TYPE) {
5045 case DOWNSTREAM_VGA:
5046 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_VGA_CONVERTER;
5048 case DOWNSTREAM_DVI_HDMI_DP_PLUS_PLUS:
5049 /* At this point we don't know is it DVI or HDMI or DP++,
5051 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_DVI_CONVERTER;
5054 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
5058 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_11) {
5059 uint8_t det_caps[16]; /* CTS 4.2.2.7 expects source to read Detailed Capabilities Info : 00080h-0008F.*/
5060 union dwnstream_port_caps_byte0 *port_caps =
5061 (union dwnstream_port_caps_byte0 *)det_caps;
5062 if (core_link_read_dpcd(link, DP_DOWNSTREAM_PORT_0,
5063 det_caps, sizeof(det_caps)) == DC_OK) {
5065 switch (port_caps->bits.DWN_STRM_PORTX_TYPE) {
5066 /*Handle DP case as DONGLE_NONE*/
5067 case DOWN_STREAM_DETAILED_DP:
5068 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
5070 case DOWN_STREAM_DETAILED_VGA:
5071 link->dpcd_caps.dongle_type =
5072 DISPLAY_DONGLE_DP_VGA_CONVERTER;
5074 case DOWN_STREAM_DETAILED_DVI:
5075 link->dpcd_caps.dongle_type =
5076 DISPLAY_DONGLE_DP_DVI_CONVERTER;
5078 case DOWN_STREAM_DETAILED_HDMI:
5079 case DOWN_STREAM_DETAILED_DP_PLUS_PLUS:
5080 /*Handle DP++ active converter case, process DP++ case as HDMI case according DP1.4 spec*/
5081 link->dpcd_caps.dongle_type =
5082 DISPLAY_DONGLE_DP_HDMI_CONVERTER;
5084 link->dpcd_caps.dongle_caps.dongle_type = link->dpcd_caps.dongle_type;
5085 if (ds_port.fields.DETAILED_CAPS) {
5087 union dwnstream_port_caps_byte3_hdmi
5088 hdmi_caps = {.raw = det_caps[3] };
5089 union dwnstream_port_caps_byte2
5090 hdmi_color_caps = {.raw = det_caps[2] };
5091 link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz =
5094 link->dpcd_caps.dongle_caps.is_dp_hdmi_s3d_converter =
5095 hdmi_caps.bits.FRAME_SEQ_TO_FRAME_PACK;
5096 /*YCBCR capability only for HDMI case*/
5097 if (port_caps->bits.DWN_STRM_PORTX_TYPE
5098 == DOWN_STREAM_DETAILED_HDMI) {
5099 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_pass_through =
5100 hdmi_caps.bits.YCrCr422_PASS_THROUGH;
5101 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_pass_through =
5102 hdmi_caps.bits.YCrCr420_PASS_THROUGH;
5103 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_converter =
5104 hdmi_caps.bits.YCrCr422_CONVERSION;
5105 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_converter =
5106 hdmi_caps.bits.YCrCr420_CONVERSION;
5109 link->dpcd_caps.dongle_caps.dp_hdmi_max_bpc =
5110 translate_dpcd_max_bpc(
5111 hdmi_color_caps.bits.MAX_BITS_PER_COLOR_COMPONENT);
5113 #if defined(CONFIG_DRM_AMD_DC_DCN)
5114 if (link->dc->caps.hdmi_frl_pcon_support) {
5115 union hdmi_encoded_link_bw hdmi_encoded_link_bw;
5117 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps =
5118 dc_link_bw_kbps_from_raw_frl_link_rate_data(
5119 hdmi_color_caps.bits.MAX_ENCODED_LINK_BW_SUPPORT);
5121 // Intersect reported max link bw support with the supported link rate post FRL link training
5122 if (core_link_read_dpcd(link, DP_PCON_HDMI_POST_FRL_STATUS,
5123 &hdmi_encoded_link_bw.raw, sizeof(hdmi_encoded_link_bw)) == DC_OK) {
5124 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps = intersect_frl_link_bw_support(
5125 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps,
5126 hdmi_encoded_link_bw);
5129 if (link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps > 0)
5130 link->dpcd_caps.dongle_caps.extendedCapValid = true;
5134 if (link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz != 0)
5135 link->dpcd_caps.dongle_caps.extendedCapValid = true;
5143 ddc_service_set_dongle_type(link->ddc, link->dpcd_caps.dongle_type);
5146 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
5148 core_link_read_dpcd(
5150 DP_BRANCH_REVISION_START,
5151 (uint8_t *)&dp_hw_fw_revision,
5152 sizeof(dp_hw_fw_revision));
5154 link->dpcd_caps.branch_hw_revision =
5155 dp_hw_fw_revision.ieee_hw_rev;
5158 link->dpcd_caps.branch_fw_revision,
5159 dp_hw_fw_revision.ieee_fw_rev,
5160 sizeof(dp_hw_fw_revision.ieee_fw_rev));
5162 #if defined(CONFIG_DRM_AMD_DC_DCN)
5163 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14 &&
5164 link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
5165 union dp_dfp_cap_ext dfp_cap_ext;
5166 memset(&dfp_cap_ext, '\0', sizeof (dfp_cap_ext));
5167 core_link_read_dpcd(
5169 DP_DFP_CAPABILITY_EXTENSION_SUPPORT,
5171 sizeof(dfp_cap_ext.raw));
5172 link->dpcd_caps.dongle_caps.dfp_cap_ext.supported = dfp_cap_ext.fields.supported;
5173 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps =
5174 dfp_cap_ext.fields.max_pixel_rate_in_mps[0] +
5175 (dfp_cap_ext.fields.max_pixel_rate_in_mps[1] << 8);
5176 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width =
5177 dfp_cap_ext.fields.max_video_h_active_width[0] +
5178 (dfp_cap_ext.fields.max_video_h_active_width[1] << 8);
5179 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height =
5180 dfp_cap_ext.fields.max_video_v_active_height[0] +
5181 (dfp_cap_ext.fields.max_video_v_active_height[1] << 8);
5182 link->dpcd_caps.dongle_caps.dfp_cap_ext.encoding_format_caps =
5183 dfp_cap_ext.fields.encoding_format_caps;
5184 link->dpcd_caps.dongle_caps.dfp_cap_ext.rgb_color_depth_caps =
5185 dfp_cap_ext.fields.rgb_color_depth_caps;
5186 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr444_color_depth_caps =
5187 dfp_cap_ext.fields.ycbcr444_color_depth_caps;
5188 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr422_color_depth_caps =
5189 dfp_cap_ext.fields.ycbcr422_color_depth_caps;
5190 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr420_color_depth_caps =
5191 dfp_cap_ext.fields.ycbcr420_color_depth_caps;
5192 DC_LOG_DP2("DFP capability extension is read at link %d", link->link_index);
5193 DC_LOG_DP2("\tdfp_cap_ext.supported = %s", link->dpcd_caps.dongle_caps.dfp_cap_ext.supported ? "true" : "false");
5194 DC_LOG_DP2("\tdfp_cap_ext.max_pixel_rate_in_mps = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps);
5195 DC_LOG_DP2("\tdfp_cap_ext.max_video_h_active_width = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width);
5196 DC_LOG_DP2("\tdfp_cap_ext.max_video_v_active_height = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height);
5201 static void dp_wa_power_up_0010FA(struct dc_link *link, uint8_t *dpcd_data,
5206 if (!link->dpcd_caps.dpcd_rev.raw) {
5208 dp_receiver_power_ctrl(link, true);
5209 core_link_read_dpcd(link, DP_DPCD_REV,
5211 link->dpcd_caps.dpcd_rev.raw = dpcd_data[
5214 } while (retry++ < 4 && !link->dpcd_caps.dpcd_rev.raw);
5217 if (link->dpcd_caps.dongle_type == DISPLAY_DONGLE_DP_VGA_CONVERTER) {
5218 switch (link->dpcd_caps.branch_dev_id) {
5219 /* 0010FA active dongles (DP-VGA, DP-DLDVI converters) power down
5220 * all internal circuits including AUX communication preventing
5221 * reading DPCD table and EDID (spec violation).
5222 * Encoder will skip DP RX power down on disable_output to
5223 * keep receiver powered all the time.*/
5224 case DP_BRANCH_DEVICE_ID_0010FA:
5225 case DP_BRANCH_DEVICE_ID_0080E1:
5226 case DP_BRANCH_DEVICE_ID_00E04C:
5227 link->wa_flags.dp_keep_receiver_powered = true;
5230 /* TODO: May need work around for other dongles. */
5232 link->wa_flags.dp_keep_receiver_powered = false;
5236 link->wa_flags.dp_keep_receiver_powered = false;
5239 /* Read additional sink caps defined in source specific DPCD area
5240 * This function currently only reads from SinkCapability address (DP_SOURCE_SINK_CAP)
5242 static bool dpcd_read_sink_ext_caps(struct dc_link *link)
5249 if (core_link_read_dpcd(link, DP_SOURCE_SINK_CAP, &dpcd_data, 1) != DC_OK)
5252 link->dpcd_sink_ext_caps.raw = dpcd_data;
5256 bool dp_retrieve_lttpr_cap(struct dc_link *link)
5258 #if defined(CONFIG_DRM_AMD_DC_DCN)
5259 uint8_t lttpr_dpcd_data[8];
5260 bool allow_lttpr_non_transparent_mode = 0;
5262 uint8_t lttpr_dpcd_data[6];
5264 bool vbios_lttpr_enable = link->dc->caps.vbios_lttpr_enable;
5265 bool vbios_lttpr_interop = link->dc->caps.vbios_lttpr_aware;
5266 enum dc_status status = DC_ERROR_UNEXPECTED;
5267 bool is_lttpr_present = false;
5269 memset(lttpr_dpcd_data, '\0', sizeof(lttpr_dpcd_data));
5271 #if defined(CONFIG_DRM_AMD_DC_DCN)
5272 if ((link->dc->config.allow_lttpr_non_transparent_mode.bits.DP2_0 &&
5273 link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED)) {
5274 allow_lttpr_non_transparent_mode = 1;
5275 } else if (link->dc->config.allow_lttpr_non_transparent_mode.bits.DP1_4A &&
5276 !link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
5277 allow_lttpr_non_transparent_mode = 1;
5282 * Logic to determine LTTPR mode
5284 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5285 if (vbios_lttpr_enable && vbios_lttpr_interop)
5286 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
5287 else if (!vbios_lttpr_enable && vbios_lttpr_interop) {
5288 #if defined(CONFIG_DRM_AMD_DC_DCN)
5289 if (allow_lttpr_non_transparent_mode)
5291 if (link->dc->config.allow_lttpr_non_transparent_mode)
5293 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
5295 link->lttpr_mode = LTTPR_MODE_TRANSPARENT;
5296 } else if (!vbios_lttpr_enable && !vbios_lttpr_interop) {
5297 #if defined(CONFIG_DRM_AMD_DC_DCN)
5298 if (!allow_lttpr_non_transparent_mode || !link->dc->caps.extended_aux_timeout_support)
5300 if (!link->dc->config.allow_lttpr_non_transparent_mode
5301 || !link->dc->caps.extended_aux_timeout_support)
5303 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5305 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
5307 #if defined(CONFIG_DRM_AMD_DC_DCN)
5308 /* Check DP tunnel LTTPR mode debug option. */
5309 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA &&
5310 link->dc->debug.dpia_debug.bits.force_non_lttpr)
5311 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5314 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT || link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
5315 /* By reading LTTPR capability, RX assumes that we will enable
5316 * LTTPR extended aux timeout if LTTPR is present.
5318 status = core_link_read_dpcd(
5320 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV,
5322 sizeof(lttpr_dpcd_data));
5323 if (status != DC_OK) {
5324 DC_LOG_DP2("%s: Read LTTPR caps data failed.\n", __func__);
5328 link->dpcd_caps.lttpr_caps.revision.raw =
5329 lttpr_dpcd_data[DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV -
5330 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5332 link->dpcd_caps.lttpr_caps.max_link_rate =
5333 lttpr_dpcd_data[DP_MAX_LINK_RATE_PHY_REPEATER -
5334 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5336 link->dpcd_caps.lttpr_caps.phy_repeater_cnt =
5337 lttpr_dpcd_data[DP_PHY_REPEATER_CNT -
5338 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5340 link->dpcd_caps.lttpr_caps.max_lane_count =
5341 lttpr_dpcd_data[DP_MAX_LANE_COUNT_PHY_REPEATER -
5342 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5344 link->dpcd_caps.lttpr_caps.mode =
5345 lttpr_dpcd_data[DP_PHY_REPEATER_MODE -
5346 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5348 link->dpcd_caps.lttpr_caps.max_ext_timeout =
5349 lttpr_dpcd_data[DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT -
5350 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5352 #if defined(CONFIG_DRM_AMD_DC_DCN)
5353 link->dpcd_caps.lttpr_caps.main_link_channel_coding.raw =
5354 lttpr_dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER -
5355 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5357 link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.raw =
5358 lttpr_dpcd_data[DP_PHY_REPEATER_128b_132b_RATES -
5359 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
5362 /* Attempt to train in LTTPR transparent mode if repeater count exceeds 8. */
5363 is_lttpr_present = (dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) != 0 &&
5364 link->dpcd_caps.lttpr_caps.phy_repeater_cnt < 0xff &&
5365 link->dpcd_caps.lttpr_caps.max_lane_count > 0 &&
5366 link->dpcd_caps.lttpr_caps.max_lane_count <= 4 &&
5367 link->dpcd_caps.lttpr_caps.revision.raw >= 0x14);
5368 if (is_lttpr_present) {
5369 CONN_DATA_DETECT(link, lttpr_dpcd_data, sizeof(lttpr_dpcd_data), "LTTPR Caps: ");
5370 configure_lttpr_mode_transparent(link);
5372 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5374 return is_lttpr_present;
5377 static bool retrieve_link_cap(struct dc_link *link)
5379 /* DP_ADAPTER_CAP - DP_DPCD_REV + 1 == 16 and also DP_DSC_BITS_PER_PIXEL_INC - DP_DSC_SUPPORT + 1 == 16,
5380 * which means size 16 will be good for both of those DPCD register block reads
5382 uint8_t dpcd_data[16];
5383 /*Only need to read 1 byte starting from DP_DPRX_FEATURE_ENUMERATION_LIST.
5385 uint8_t dpcd_dprx_data = '\0';
5386 uint8_t dpcd_power_state = '\0';
5388 struct dp_device_vendor_id sink_id;
5389 union down_stream_port_count down_strm_port_count;
5390 union edp_configuration_cap edp_config_cap;
5391 union dp_downstream_port_present ds_port = { 0 };
5392 enum dc_status status = DC_ERROR_UNEXPECTED;
5393 uint32_t read_dpcd_retry_cnt = 3;
5395 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
5396 const uint32_t post_oui_delay = 30; // 30ms
5397 bool is_lttpr_present = false;
5399 memset(dpcd_data, '\0', sizeof(dpcd_data));
5400 memset(&down_strm_port_count,
5401 '\0', sizeof(union down_stream_port_count));
5402 memset(&edp_config_cap, '\0',
5403 sizeof(union edp_configuration_cap));
5405 /* if extended timeout is supported in hardware,
5406 * default to LTTPR timeout (3.2ms) first as a W/A for DP link layer
5407 * CTS 4.2.1.1 regression introduced by CTS specs requirement update.
5409 dc_link_aux_try_to_configure_timeout(link->ddc,
5410 LINK_AUX_DEFAULT_LTTPR_TIMEOUT_PERIOD);
5412 is_lttpr_present = dp_retrieve_lttpr_cap(link);
5413 /* Read DP tunneling information. */
5414 status = dpcd_get_tunneling_device_data(link);
5416 status = core_link_read_dpcd(link, DP_SET_POWER,
5417 &dpcd_power_state, sizeof(dpcd_power_state));
5419 /* Delay 1 ms if AUX CH is in power down state. Based on spec
5420 * section 2.3.1.2, if AUX CH may be powered down due to
5421 * write to DPCD 600h = 2. Sink AUX CH is monitoring differential
5422 * signal and may need up to 1 ms before being able to reply.
5424 if (status != DC_OK || dpcd_power_state == DP_SET_POWER_D3)
5427 dpcd_set_source_specific_data(link);
5428 /* Sink may need to configure internals based on vendor, so allow some
5429 * time before proceeding with possibly vendor specific transactions
5431 msleep(post_oui_delay);
5433 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5434 status = core_link_read_dpcd(
5439 if (status == DC_OK)
5443 if (status != DC_OK) {
5444 dm_error("%s: Read receiver caps dpcd data failed.\n", __func__);
5448 if (!is_lttpr_present)
5449 dc_link_aux_try_to_configure_timeout(link->ddc, LINK_AUX_DEFAULT_TIMEOUT_PERIOD);
5452 union training_aux_rd_interval aux_rd_interval;
5454 aux_rd_interval.raw =
5455 dpcd_data[DP_TRAINING_AUX_RD_INTERVAL];
5457 link->dpcd_caps.ext_receiver_cap_field_present =
5458 aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1;
5460 if (aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1) {
5461 uint8_t ext_cap_data[16];
5463 memset(ext_cap_data, '\0', sizeof(ext_cap_data));
5464 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5465 status = core_link_read_dpcd(
5469 sizeof(ext_cap_data));
5470 if (status == DC_OK) {
5471 memcpy(dpcd_data, ext_cap_data, sizeof(dpcd_data));
5475 if (status != DC_OK)
5476 dm_error("%s: Read extend caps data failed, use cap from dpcd 0.\n", __func__);
5480 link->dpcd_caps.dpcd_rev.raw =
5481 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5483 if (link->dpcd_caps.ext_receiver_cap_field_present) {
5484 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5485 status = core_link_read_dpcd(
5487 DP_DPRX_FEATURE_ENUMERATION_LIST,
5489 sizeof(dpcd_dprx_data));
5490 if (status == DC_OK)
5494 link->dpcd_caps.dprx_feature.raw = dpcd_dprx_data;
5496 if (status != DC_OK)
5497 dm_error("%s: Read DPRX caps data failed.\n", __func__);
5501 link->dpcd_caps.dprx_feature.raw = 0;
5505 /* Error condition checking...
5506 * It is impossible for Sink to report Max Lane Count = 0.
5507 * It is possible for Sink to report Max Link Rate = 0, if it is
5508 * an eDP device that is reporting specialized link rates in the
5509 * SUPPORTED_LINK_RATE table.
5511 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5514 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5517 read_dp_device_vendor_id(link);
5519 get_active_converter_info(ds_port.byte, link);
5521 dp_wa_power_up_0010FA(link, dpcd_data, sizeof(dpcd_data));
5523 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5526 link->dpcd_caps.allow_invalid_MSA_timing_param =
5527 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5529 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5530 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5532 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5533 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5535 link->reported_link_cap.lane_count =
5536 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5537 link->reported_link_cap.link_rate = dpcd_data[
5538 DP_MAX_LINK_RATE - DP_DPCD_REV];
5539 link->reported_link_cap.link_spread =
5540 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5541 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5543 edp_config_cap.raw = dpcd_data[
5544 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5545 link->dpcd_caps.panel_mode_edp =
5546 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5547 link->dpcd_caps.dpcd_display_control_capable =
5548 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5550 link->test_pattern_enabled = false;
5551 link->compliance_test_state.raw = 0;
5553 /* read sink count */
5554 core_link_read_dpcd(link,
5556 &link->dpcd_caps.sink_count.raw,
5557 sizeof(link->dpcd_caps.sink_count.raw));
5559 /* read sink ieee oui */
5560 core_link_read_dpcd(link,
5562 (uint8_t *)(&sink_id),
5565 link->dpcd_caps.sink_dev_id =
5566 (sink_id.ieee_oui[0] << 16) +
5567 (sink_id.ieee_oui[1] << 8) +
5568 (sink_id.ieee_oui[2]);
5571 link->dpcd_caps.sink_dev_id_str,
5572 sink_id.ieee_device_id,
5573 sizeof(sink_id.ieee_device_id));
5575 /* Quirk Apple MBP 2017 15" Retina panel: Wrong DP_MAX_LINK_RATE */
5577 uint8_t str_mbp_2017[] = { 101, 68, 21, 101, 98, 97 };
5579 if ((link->dpcd_caps.sink_dev_id == 0x0010fa) &&
5580 !memcmp(link->dpcd_caps.sink_dev_id_str, str_mbp_2017,
5581 sizeof(str_mbp_2017))) {
5582 link->reported_link_cap.link_rate = 0x0c;
5586 core_link_read_dpcd(
5588 DP_SINK_HW_REVISION_START,
5589 (uint8_t *)&dp_hw_fw_revision,
5590 sizeof(dp_hw_fw_revision));
5592 link->dpcd_caps.sink_hw_revision =
5593 dp_hw_fw_revision.ieee_hw_rev;
5596 link->dpcd_caps.sink_fw_revision,
5597 dp_hw_fw_revision.ieee_fw_rev,
5598 sizeof(dp_hw_fw_revision.ieee_fw_rev));
5600 memset(&link->dpcd_caps.dsc_caps, '\0',
5601 sizeof(link->dpcd_caps.dsc_caps));
5602 memset(&link->dpcd_caps.fec_cap, '\0', sizeof(link->dpcd_caps.fec_cap));
5603 /* Read DSC and FEC sink capabilities if DP revision is 1.4 and up */
5604 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14) {
5605 status = core_link_read_dpcd(
5608 &link->dpcd_caps.fec_cap.raw,
5609 sizeof(link->dpcd_caps.fec_cap.raw));
5610 status = core_link_read_dpcd(
5613 link->dpcd_caps.dsc_caps.dsc_basic_caps.raw,
5614 sizeof(link->dpcd_caps.dsc_caps.dsc_basic_caps.raw));
5615 #if defined(CONFIG_DRM_AMD_DC_DCN)
5616 if (link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
5617 status = core_link_read_dpcd(
5619 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5620 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5621 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5622 DC_LOG_DSC("DSC branch decoder capability is read at link %d", link->link_index);
5623 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_0 = 0x%02x",
5624 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_0);
5625 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_1 = 0x%02x",
5626 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_1);
5627 DC_LOG_DSC("\tBRANCH_MAX_LINE_WIDTH 0x%02x",
5628 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_MAX_LINE_WIDTH);
5631 status = core_link_read_dpcd(
5633 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5634 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5635 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5639 if (!dpcd_read_sink_ext_caps(link))
5640 link->dpcd_sink_ext_caps.raw = 0;
5642 #if defined(CONFIG_DRM_AMD_DC_DCN)
5643 link->dpcd_caps.channel_coding_cap.raw = dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_CAP - DP_DPCD_REV];
5645 if (link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
5646 DC_LOG_DP2("128b/132b encoding is supported at link %d", link->link_index);
5648 core_link_read_dpcd(link,
5649 DP_128b_132b_SUPPORTED_LINK_RATES,
5650 &link->dpcd_caps.dp_128b_132b_supported_link_rates.raw,
5651 sizeof(link->dpcd_caps.dp_128b_132b_supported_link_rates.raw));
5652 if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR20)
5653 link->reported_link_cap.link_rate = LINK_RATE_UHBR20;
5654 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR13_5)
5655 link->reported_link_cap.link_rate = LINK_RATE_UHBR13_5;
5656 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR10)
5657 link->reported_link_cap.link_rate = LINK_RATE_UHBR10;
5659 dm_error("%s: Invalid RX 128b_132b_supported_link_rates\n", __func__);
5660 DC_LOG_DP2("128b/132b supported link rates is read at link %d", link->link_index);
5661 DC_LOG_DP2("\tmax 128b/132b link rate support is %d.%d GHz",
5662 link->reported_link_cap.link_rate / 100,
5663 link->reported_link_cap.link_rate % 100);
5665 core_link_read_dpcd(link,
5666 DP_SINK_VIDEO_FALLBACK_FORMATS,
5667 &link->dpcd_caps.fallback_formats.raw,
5668 sizeof(link->dpcd_caps.fallback_formats.raw));
5669 DC_LOG_DP2("sink video fallback format is read at link %d", link->link_index);
5670 if (link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support)
5671 DC_LOG_DP2("\t1920x1080@60Hz 24bpp fallback format supported");
5672 if (link->dpcd_caps.fallback_formats.bits.dp_1280x720_60Hz_24bpp_support)
5673 DC_LOG_DP2("\t1280x720@60Hz 24bpp fallback format supported");
5674 if (link->dpcd_caps.fallback_formats.bits.dp_1024x768_60Hz_24bpp_support)
5675 DC_LOG_DP2("\t1024x768@60Hz 24bpp fallback format supported");
5676 if (link->dpcd_caps.fallback_formats.raw == 0) {
5677 DC_LOG_DP2("\tno supported fallback formats, assume 1920x1080@60Hz 24bpp is supported");
5678 link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support = 1;
5681 core_link_read_dpcd(link,
5682 DP_FEC_CAPABILITY_1,
5683 &link->dpcd_caps.fec_cap1.raw,
5684 sizeof(link->dpcd_caps.fec_cap1.raw));
5685 DC_LOG_DP2("FEC CAPABILITY 1 is read at link %d", link->link_index);
5686 if (link->dpcd_caps.fec_cap1.bits.AGGREGATED_ERROR_COUNTERS_CAPABLE)
5687 DC_LOG_DP2("\tFEC aggregated error counters are supported");
5691 /* Connectivity log: detection */
5692 CONN_DATA_DETECT(link, dpcd_data, sizeof(dpcd_data), "Rx Caps: ");
5697 bool dp_overwrite_extended_receiver_cap(struct dc_link *link)
5699 uint8_t dpcd_data[16];
5700 uint32_t read_dpcd_retry_cnt = 3;
5701 enum dc_status status = DC_ERROR_UNEXPECTED;
5702 union dp_downstream_port_present ds_port = { 0 };
5703 union down_stream_port_count down_strm_port_count;
5704 union edp_configuration_cap edp_config_cap;
5708 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5709 status = core_link_read_dpcd(
5714 if (status == DC_OK)
5718 link->dpcd_caps.dpcd_rev.raw =
5719 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5721 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5724 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5727 get_active_converter_info(ds_port.byte, link);
5729 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5732 link->dpcd_caps.allow_invalid_MSA_timing_param =
5733 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5735 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5736 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5738 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5739 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5741 link->reported_link_cap.lane_count =
5742 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5743 link->reported_link_cap.link_rate = dpcd_data[
5744 DP_MAX_LINK_RATE - DP_DPCD_REV];
5745 link->reported_link_cap.link_spread =
5746 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5747 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5749 edp_config_cap.raw = dpcd_data[
5750 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5751 link->dpcd_caps.panel_mode_edp =
5752 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5753 link->dpcd_caps.dpcd_display_control_capable =
5754 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5759 bool detect_dp_sink_caps(struct dc_link *link)
5761 return retrieve_link_cap(link);
5763 /* dc init_hw has power encoder using default
5764 * signal for connector. For native DP, no
5765 * need to power up encoder again. If not native
5766 * DP, hw_init may need check signal or power up
5769 /* TODO save sink caps in link->sink */
5772 static enum dc_link_rate linkRateInKHzToLinkRateMultiplier(uint32_t link_rate_in_khz)
5774 enum dc_link_rate link_rate;
5775 // LinkRate is normally stored as a multiplier of 0.27 Gbps per lane. Do the translation.
5776 switch (link_rate_in_khz) {
5778 link_rate = LINK_RATE_LOW; // Rate_1 (RBR) - 1.62 Gbps/Lane
5781 link_rate = LINK_RATE_RATE_2; // Rate_2 - 2.16 Gbps/Lane
5784 link_rate = LINK_RATE_RATE_3; // Rate_3 - 2.43 Gbps/Lane
5787 link_rate = LINK_RATE_HIGH; // Rate_4 (HBR) - 2.70 Gbps/Lane
5790 link_rate = LINK_RATE_RBR2; // Rate_5 (RBR2) - 3.24 Gbps/Lane
5793 link_rate = LINK_RATE_RATE_6; // Rate_6 - 4.32 Gbps/Lane
5796 link_rate = LINK_RATE_HIGH2; // Rate_7 (HBR2) - 5.40 Gbps/Lane
5799 link_rate = LINK_RATE_HIGH3; // Rate_8 (HBR3) - 8.10 Gbps/Lane
5802 link_rate = LINK_RATE_UNKNOWN;
5808 void detect_edp_sink_caps(struct dc_link *link)
5810 uint8_t supported_link_rates[16];
5812 uint32_t link_rate_in_khz;
5813 enum dc_link_rate link_rate = LINK_RATE_UNKNOWN;
5814 uint8_t backlight_adj_cap;
5816 retrieve_link_cap(link);
5817 link->dpcd_caps.edp_supported_link_rates_count = 0;
5818 memset(supported_link_rates, 0, sizeof(supported_link_rates));
5821 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
5822 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
5824 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
5825 (link->dc->debug.optimize_edp_link_rate ||
5826 link->reported_link_cap.link_rate == LINK_RATE_UNKNOWN)) {
5827 // Read DPCD 00010h - 0001Fh 16 bytes at one shot
5828 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
5829 supported_link_rates, sizeof(supported_link_rates));
5831 for (entry = 0; entry < 16; entry += 2) {
5832 // DPCD register reports per-lane link rate = 16-bit link rate capability
5833 // value X 200 kHz. Need multiplier to find link rate in kHz.
5834 link_rate_in_khz = (supported_link_rates[entry+1] * 0x100 +
5835 supported_link_rates[entry]) * 200;
5837 if (link_rate_in_khz != 0) {
5838 link_rate = linkRateInKHzToLinkRateMultiplier(link_rate_in_khz);
5839 link->dpcd_caps.edp_supported_link_rates[link->dpcd_caps.edp_supported_link_rates_count] = link_rate;
5840 link->dpcd_caps.edp_supported_link_rates_count++;
5842 if (link->reported_link_cap.link_rate < link_rate)
5843 link->reported_link_cap.link_rate = link_rate;
5847 link->verified_link_cap = link->reported_link_cap;
5849 core_link_read_dpcd(link, DP_EDP_BACKLIGHT_ADJUSTMENT_CAP,
5850 &backlight_adj_cap, sizeof(backlight_adj_cap));
5852 link->dpcd_caps.dynamic_backlight_capable_edp =
5853 (backlight_adj_cap & DP_EDP_DYNAMIC_BACKLIGHT_CAP) ? true:false;
5855 dc_link_set_default_brightness_aux(link);
5858 void dc_link_dp_enable_hpd(const struct dc_link *link)
5860 struct link_encoder *encoder = link->link_enc;
5862 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5863 encoder->funcs->enable_hpd(encoder);
5866 void dc_link_dp_disable_hpd(const struct dc_link *link)
5868 struct link_encoder *encoder = link->link_enc;
5870 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5871 encoder->funcs->disable_hpd(encoder);
5874 static bool is_dp_phy_pattern(enum dp_test_pattern test_pattern)
5876 if ((DP_TEST_PATTERN_PHY_PATTERN_BEGIN <= test_pattern &&
5877 test_pattern <= DP_TEST_PATTERN_PHY_PATTERN_END) ||
5878 test_pattern == DP_TEST_PATTERN_VIDEO_MODE)
5884 static void set_crtc_test_pattern(struct dc_link *link,
5885 struct pipe_ctx *pipe_ctx,
5886 enum dp_test_pattern test_pattern,
5887 enum dp_test_pattern_color_space test_pattern_color_space)
5889 enum controller_dp_test_pattern controller_test_pattern;
5890 enum dc_color_depth color_depth = pipe_ctx->
5891 stream->timing.display_color_depth;
5892 struct bit_depth_reduction_params params;
5893 struct output_pixel_processor *opp = pipe_ctx->stream_res.opp;
5894 int width = pipe_ctx->stream->timing.h_addressable +
5895 pipe_ctx->stream->timing.h_border_left +
5896 pipe_ctx->stream->timing.h_border_right;
5897 int height = pipe_ctx->stream->timing.v_addressable +
5898 pipe_ctx->stream->timing.v_border_bottom +
5899 pipe_ctx->stream->timing.v_border_top;
5901 memset(¶ms, 0, sizeof(params));
5903 switch (test_pattern) {
5904 case DP_TEST_PATTERN_COLOR_SQUARES:
5905 controller_test_pattern =
5906 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES;
5908 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5909 controller_test_pattern =
5910 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES_CEA;
5912 case DP_TEST_PATTERN_VERTICAL_BARS:
5913 controller_test_pattern =
5914 CONTROLLER_DP_TEST_PATTERN_VERTICALBARS;
5916 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5917 controller_test_pattern =
5918 CONTROLLER_DP_TEST_PATTERN_HORIZONTALBARS;
5920 case DP_TEST_PATTERN_COLOR_RAMP:
5921 controller_test_pattern =
5922 CONTROLLER_DP_TEST_PATTERN_COLORRAMP;
5925 controller_test_pattern =
5926 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE;
5930 switch (test_pattern) {
5931 case DP_TEST_PATTERN_COLOR_SQUARES:
5932 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5933 case DP_TEST_PATTERN_VERTICAL_BARS:
5934 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5935 case DP_TEST_PATTERN_COLOR_RAMP:
5937 /* disable bit depth reduction */
5938 pipe_ctx->stream->bit_depth_params = params;
5939 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5940 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5941 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5942 controller_test_pattern, color_depth);
5943 else if (link->dc->hwss.set_disp_pattern_generator) {
5944 struct pipe_ctx *odm_pipe;
5945 enum controller_dp_color_space controller_color_space;
5948 int dpg_width = width;
5950 switch (test_pattern_color_space) {
5951 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5952 controller_color_space = CONTROLLER_DP_COLOR_SPACE_RGB;
5954 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5955 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR601;
5957 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5958 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR709;
5960 case DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED:
5962 controller_color_space = CONTROLLER_DP_COLOR_SPACE_UDEFINED;
5963 DC_LOG_ERROR("%s: Color space must be defined for test pattern", __func__);
5968 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5970 dpg_width = width / opp_cnt;
5973 link->dc->hwss.set_disp_pattern_generator(link->dc,
5975 controller_test_pattern,
5976 controller_color_space,
5983 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5984 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5986 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5987 link->dc->hwss.set_disp_pattern_generator(link->dc,
5989 controller_test_pattern,
5990 controller_color_space,
6001 case DP_TEST_PATTERN_VIDEO_MODE:
6003 /* restore bitdepth reduction */
6004 resource_build_bit_depth_reduction_params(pipe_ctx->stream, ¶ms);
6005 pipe_ctx->stream->bit_depth_params = params;
6006 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
6007 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
6008 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
6009 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
6011 else if (link->dc->hwss.set_disp_pattern_generator) {
6012 struct pipe_ctx *odm_pipe;
6016 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
6019 dpg_width = width / opp_cnt;
6020 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
6021 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
6023 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
6024 link->dc->hwss.set_disp_pattern_generator(link->dc,
6026 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
6027 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
6034 link->dc->hwss.set_disp_pattern_generator(link->dc,
6036 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
6037 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
6052 bool dc_link_dp_set_test_pattern(
6053 struct dc_link *link,
6054 enum dp_test_pattern test_pattern,
6055 enum dp_test_pattern_color_space test_pattern_color_space,
6056 const struct link_training_settings *p_link_settings,
6057 const unsigned char *p_custom_pattern,
6058 unsigned int cust_pattern_size)
6060 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
6061 struct pipe_ctx *pipe_ctx = NULL;
6064 unsigned char link_qual_pattern[LANE_COUNT_DP_MAX] = {0};
6065 union dpcd_training_pattern training_pattern;
6066 enum dpcd_phy_test_patterns pattern;
6068 memset(&training_pattern, 0, sizeof(training_pattern));
6070 for (i = 0; i < MAX_PIPES; i++) {
6071 if (pipes[i].stream == NULL)
6074 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
6075 pipe_ctx = &pipes[i];
6080 if (pipe_ctx == NULL)
6083 /* Reset CRTC Test Pattern if it is currently running and request is VideoMode */
6084 if (link->test_pattern_enabled && test_pattern ==
6085 DP_TEST_PATTERN_VIDEO_MODE) {
6086 /* Set CRTC Test Pattern */
6087 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
6088 dp_set_hw_test_pattern(link, &pipe_ctx->link_res, test_pattern,
6089 (uint8_t *)p_custom_pattern,
6090 (uint32_t)cust_pattern_size);
6092 /* Unblank Stream */
6093 link->dc->hwss.unblank_stream(
6095 &link->verified_link_cap);
6096 /* TODO:m_pHwss->MuteAudioEndpoint
6097 * (pPathMode->pDisplayPath, false);
6100 /* Reset Test Pattern state */
6101 link->test_pattern_enabled = false;
6106 /* Check for PHY Test Patterns */
6107 if (is_dp_phy_pattern(test_pattern)) {
6108 /* Set DPCD Lane Settings before running test pattern */
6109 if (p_link_settings != NULL) {
6110 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
6111 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
6112 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
6113 dpcd_set_lane_settings(link, p_link_settings, DPRX);
6114 vendor_specific_lttpr_wa_five(
6116 p_link_settings->dpcd_lane_settings,
6117 p_link_settings->link_settings.lane_count);
6119 dp_set_hw_lane_settings(link, &pipe_ctx->link_res, p_link_settings, DPRX);
6120 dpcd_set_lane_settings(link, p_link_settings, DPRX);
6124 /* Blank stream if running test pattern */
6125 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
6128 * MuteAudioEndpoint(pPathMode->pDisplayPath, true);
6131 pipes->stream_res.stream_enc->funcs->dp_blank(link, pipe_ctx->stream_res.stream_enc);
6134 dp_set_hw_test_pattern(link, &pipe_ctx->link_res, test_pattern,
6135 (uint8_t *)p_custom_pattern,
6136 (uint32_t)cust_pattern_size);
6138 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
6139 /* Set Test Pattern state */
6140 link->test_pattern_enabled = true;
6141 if (p_link_settings != NULL)
6142 dpcd_set_link_settings(link,
6146 switch (test_pattern) {
6147 case DP_TEST_PATTERN_VIDEO_MODE:
6148 pattern = PHY_TEST_PATTERN_NONE;
6150 case DP_TEST_PATTERN_D102:
6151 pattern = PHY_TEST_PATTERN_D10_2;
6153 case DP_TEST_PATTERN_SYMBOL_ERROR:
6154 pattern = PHY_TEST_PATTERN_SYMBOL_ERROR;
6156 case DP_TEST_PATTERN_PRBS7:
6157 pattern = PHY_TEST_PATTERN_PRBS7;
6159 case DP_TEST_PATTERN_80BIT_CUSTOM:
6160 pattern = PHY_TEST_PATTERN_80BIT_CUSTOM;
6162 case DP_TEST_PATTERN_CP2520_1:
6163 pattern = PHY_TEST_PATTERN_CP2520_1;
6165 case DP_TEST_PATTERN_CP2520_2:
6166 pattern = PHY_TEST_PATTERN_CP2520_2;
6168 case DP_TEST_PATTERN_CP2520_3:
6169 pattern = PHY_TEST_PATTERN_CP2520_3;
6171 #if defined(CONFIG_DRM_AMD_DC_DCN)
6172 case DP_TEST_PATTERN_128b_132b_TPS1:
6173 pattern = PHY_TEST_PATTERN_128b_132b_TPS1;
6175 case DP_TEST_PATTERN_128b_132b_TPS2:
6176 pattern = PHY_TEST_PATTERN_128b_132b_TPS2;
6178 case DP_TEST_PATTERN_PRBS9:
6179 pattern = PHY_TEST_PATTERN_PRBS9;
6181 case DP_TEST_PATTERN_PRBS11:
6182 pattern = PHY_TEST_PATTERN_PRBS11;
6184 case DP_TEST_PATTERN_PRBS15:
6185 pattern = PHY_TEST_PATTERN_PRBS15;
6187 case DP_TEST_PATTERN_PRBS23:
6188 pattern = PHY_TEST_PATTERN_PRBS23;
6190 case DP_TEST_PATTERN_PRBS31:
6191 pattern = PHY_TEST_PATTERN_PRBS31;
6193 case DP_TEST_PATTERN_264BIT_CUSTOM:
6194 pattern = PHY_TEST_PATTERN_264BIT_CUSTOM;
6196 case DP_TEST_PATTERN_SQUARE_PULSE:
6197 pattern = PHY_TEST_PATTERN_SQUARE_PULSE;
6204 if (test_pattern == DP_TEST_PATTERN_VIDEO_MODE
6205 /*TODO:&& !pPathMode->pDisplayPath->IsTargetPoweredOn()*/)
6208 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
6209 #if defined(CONFIG_DRM_AMD_DC_DCN)
6210 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE)
6211 core_link_write_dpcd(link,
6212 DP_LINK_SQUARE_PATTERN,
6217 /* tell receiver that we are sending qualification
6218 * pattern DP 1.2 or later - DP receiver's link quality
6219 * pattern is set using DPCD LINK_QUAL_LANEx_SET
6220 * register (0x10B~0x10E)\
6222 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++)
6223 link_qual_pattern[lane] =
6224 (unsigned char)(pattern);
6226 core_link_write_dpcd(link,
6227 DP_LINK_QUAL_LANE0_SET,
6229 sizeof(link_qual_pattern));
6230 } else if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_10 ||
6231 link->dpcd_caps.dpcd_rev.raw == 0) {
6232 /* tell receiver that we are sending qualification
6233 * pattern DP 1.1a or earlier - DP receiver's link
6234 * quality pattern is set using
6235 * DPCD TRAINING_PATTERN_SET -> LINK_QUAL_PATTERN_SET
6236 * register (0x102). We will use v_1.3 when we are
6237 * setting test pattern for DP 1.1.
6239 core_link_read_dpcd(link, DP_TRAINING_PATTERN_SET,
6240 &training_pattern.raw,
6241 sizeof(training_pattern));
6242 training_pattern.v1_3.LINK_QUAL_PATTERN_SET = pattern;
6243 core_link_write_dpcd(link, DP_TRAINING_PATTERN_SET,
6244 &training_pattern.raw,
6245 sizeof(training_pattern));
6248 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
6250 switch (test_pattern_color_space) {
6251 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
6252 color_space = COLOR_SPACE_SRGB;
6253 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6254 color_space = COLOR_SPACE_SRGB_LIMITED;
6257 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
6258 color_space = COLOR_SPACE_YCBCR601;
6259 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6260 color_space = COLOR_SPACE_YCBCR601_LIMITED;
6262 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
6263 color_space = COLOR_SPACE_YCBCR709;
6264 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6265 color_space = COLOR_SPACE_YCBCR709_LIMITED;
6271 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable) {
6272 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
6273 union dmub_hw_lock_flags hw_locks = { 0 };
6274 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
6276 hw_locks.bits.lock_dig = 1;
6277 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
6279 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
6284 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable(
6285 pipe_ctx->stream_res.tg);
6288 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
6289 /* update MSA to requested color space */
6290 pipe_ctx->stream_res.stream_enc->funcs->dp_set_stream_attribute(pipe_ctx->stream_res.stream_enc,
6291 &pipe_ctx->stream->timing,
6293 pipe_ctx->stream->use_vsc_sdp_for_colorimetry,
6294 link->dpcd_caps.dprx_feature.bits.SST_SPLIT_SDP_CAP);
6296 if (pipe_ctx->stream->use_vsc_sdp_for_colorimetry) {
6297 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
6298 pipe_ctx->stream->vsc_infopacket.sb[17] |= (1 << 7); // sb17 bit 7 Dynamic Range: 0 = VESA range, 1 = CTA range
6300 pipe_ctx->stream->vsc_infopacket.sb[17] &= ~(1 << 7);
6301 resource_build_info_frame(pipe_ctx);
6302 link->dc->hwss.update_info_frame(pipe_ctx);
6306 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
6307 pipe_ctx->stream_res.tg->funcs->unlock(pipe_ctx->stream_res.tg);
6308 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
6309 CRTC_STATE_VACTIVE);
6310 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
6312 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
6313 CRTC_STATE_VACTIVE);
6315 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable) {
6316 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
6317 union dmub_hw_lock_flags hw_locks = { 0 };
6318 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
6320 hw_locks.bits.lock_dig = 1;
6321 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
6323 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
6328 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable(
6329 pipe_ctx->stream_res.tg);
6332 /* Set Test Pattern state */
6333 link->test_pattern_enabled = true;
6339 void dp_enable_mst_on_sink(struct dc_link *link, bool enable)
6341 unsigned char mstmCntl;
6343 core_link_read_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
6345 mstmCntl |= DP_MST_EN;
6347 mstmCntl &= (~DP_MST_EN);
6349 core_link_write_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
6352 void dp_set_panel_mode(struct dc_link *link, enum dp_panel_mode panel_mode)
6354 union dpcd_edp_config edp_config_set;
6355 bool panel_mode_edp = false;
6357 memset(&edp_config_set, '\0', sizeof(union dpcd_edp_config));
6359 if (panel_mode != DP_PANEL_MODE_DEFAULT) {
6361 switch (panel_mode) {
6362 case DP_PANEL_MODE_EDP:
6363 case DP_PANEL_MODE_SPECIAL:
6364 panel_mode_edp = true;
6371 /*set edp panel mode in receiver*/
6372 core_link_read_dpcd(
6374 DP_EDP_CONFIGURATION_SET,
6375 &edp_config_set.raw,
6376 sizeof(edp_config_set.raw));
6378 if (edp_config_set.bits.PANEL_MODE_EDP
6379 != panel_mode_edp) {
6380 enum dc_status result;
6382 edp_config_set.bits.PANEL_MODE_EDP =
6384 result = core_link_write_dpcd(
6386 DP_EDP_CONFIGURATION_SET,
6387 &edp_config_set.raw,
6388 sizeof(edp_config_set.raw));
6390 ASSERT(result == DC_OK);
6393 DC_LOG_DETECTION_DP_CAPS("Link: %d eDP panel mode supported: %d "
6394 "eDP panel mode enabled: %d \n",
6396 link->dpcd_caps.panel_mode_edp,
6400 enum dp_panel_mode dp_get_panel_mode(struct dc_link *link)
6402 /* We need to explicitly check that connector
6403 * is not DP. Some Travis_VGA get reported
6404 * by video bios as DP.
6406 if (link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT) {
6408 switch (link->dpcd_caps.branch_dev_id) {
6409 case DP_BRANCH_DEVICE_ID_0022B9:
6410 /* alternate scrambler reset is required for Travis
6411 * for the case when external chip does not
6412 * provide sink device id, alternate scrambler
6413 * scheme will be overriden later by querying
6417 link->dpcd_caps.branch_dev_name,
6418 DP_VGA_LVDS_CONVERTER_ID_2,
6421 branch_dev_name)) == 0) {
6422 return DP_PANEL_MODE_SPECIAL;
6425 case DP_BRANCH_DEVICE_ID_00001A:
6426 /* alternate scrambler reset is required for Travis
6427 * for the case when external chip does not provide
6428 * sink device id, alternate scrambler scheme will
6429 * be overriden later by querying Encoder feature
6431 if (strncmp(link->dpcd_caps.branch_dev_name,
6432 DP_VGA_LVDS_CONVERTER_ID_3,
6435 branch_dev_name)) == 0) {
6436 return DP_PANEL_MODE_SPECIAL;
6444 if (link->dpcd_caps.panel_mode_edp &&
6445 (link->connector_signal == SIGNAL_TYPE_EDP ||
6446 (link->connector_signal == SIGNAL_TYPE_DISPLAY_PORT &&
6447 link->is_internal_display))) {
6448 return DP_PANEL_MODE_EDP;
6451 return DP_PANEL_MODE_DEFAULT;
6454 enum dc_status dp_set_fec_ready(struct dc_link *link, const struct link_resource *link_res, bool ready)
6456 /* FEC has to be "set ready" before the link training.
6457 * The policy is to always train with FEC
6458 * if the sink supports it and leave it enabled on link.
6459 * If FEC is not supported, disable it.
6461 struct link_encoder *link_enc = NULL;
6462 enum dc_status status = DC_OK;
6463 uint8_t fec_config = 0;
6465 /* Access link encoder based on whether it is statically
6466 * or dynamically assigned to a link.
6468 if (link->is_dig_mapping_flexible &&
6469 link->dc->res_pool->funcs->link_encs_assign)
6470 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6472 link_enc = link->link_enc;
6475 if (!dc_link_should_enable_fec(link))
6478 if (link_enc->funcs->fec_set_ready &&
6479 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6482 status = core_link_write_dpcd(link,
6483 DP_FEC_CONFIGURATION,
6485 sizeof(fec_config));
6486 if (status == DC_OK) {
6487 link_enc->funcs->fec_set_ready(link_enc, true);
6488 link->fec_state = dc_link_fec_ready;
6490 link_enc->funcs->fec_set_ready(link_enc, false);
6491 link->fec_state = dc_link_fec_not_ready;
6492 dm_error("dpcd write failed to set fec_ready");
6494 } else if (link->fec_state == dc_link_fec_ready) {
6496 status = core_link_write_dpcd(link,
6497 DP_FEC_CONFIGURATION,
6499 sizeof(fec_config));
6500 link_enc->funcs->fec_set_ready(link_enc, false);
6501 link->fec_state = dc_link_fec_not_ready;
6508 void dp_set_fec_enable(struct dc_link *link, bool enable)
6510 struct link_encoder *link_enc = NULL;
6512 /* Access link encoder based on whether it is statically
6513 * or dynamically assigned to a link.
6515 if (link->is_dig_mapping_flexible &&
6516 link->dc->res_pool->funcs->link_encs_assign)
6517 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6519 link_enc = link->link_enc;
6522 if (!dc_link_should_enable_fec(link))
6525 if (link_enc->funcs->fec_set_enable &&
6526 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6527 if (link->fec_state == dc_link_fec_ready && enable) {
6528 /* Accord to DP spec, FEC enable sequence can first
6529 * be transmitted anytime after 1000 LL codes have
6530 * been transmitted on the link after link training
6531 * completion. Using 1 lane RBR should have the maximum
6532 * time for transmitting 1000 LL codes which is 6.173 us.
6533 * So use 7 microseconds delay instead.
6536 link_enc->funcs->fec_set_enable(link_enc, true);
6537 link->fec_state = dc_link_fec_enabled;
6538 } else if (link->fec_state == dc_link_fec_enabled && !enable) {
6539 link_enc->funcs->fec_set_enable(link_enc, false);
6540 link->fec_state = dc_link_fec_ready;
6545 struct link_encoder *dp_get_link_enc(struct dc_link *link)
6547 struct link_encoder *link_enc;
6549 link_enc = link->link_enc;
6550 if (link->is_dig_mapping_flexible &&
6551 link->dc->res_pool->funcs->link_encs_assign) {
6552 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc,
6554 if (!link->link_enc)
6555 link_enc = link_enc_cfg_get_next_avail_link_enc(
6562 void dpcd_set_source_specific_data(struct dc_link *link)
6564 if (!link->dc->vendor_signature.is_valid) {
6565 enum dc_status __maybe_unused result_write_min_hblank = DC_NOT_SUPPORTED;
6566 struct dpcd_amd_signature amd_signature = {0};
6567 struct dpcd_amd_device_id amd_device_id = {0};
6569 amd_device_id.device_id_byte1 =
6570 (uint8_t)(link->ctx->asic_id.chip_id);
6571 amd_device_id.device_id_byte2 =
6572 (uint8_t)(link->ctx->asic_id.chip_id >> 8);
6573 amd_device_id.dce_version =
6574 (uint8_t)(link->ctx->dce_version);
6575 amd_device_id.dal_version_byte1 = 0x0; // needed? where to get?
6576 amd_device_id.dal_version_byte2 = 0x0; // needed? where to get?
6578 core_link_read_dpcd(link, DP_SOURCE_OUI,
6579 (uint8_t *)(&amd_signature),
6580 sizeof(amd_signature));
6582 if (!((amd_signature.AMD_IEEE_TxSignature_byte1 == 0x0) &&
6583 (amd_signature.AMD_IEEE_TxSignature_byte2 == 0x0) &&
6584 (amd_signature.AMD_IEEE_TxSignature_byte3 == 0x1A))) {
6586 amd_signature.AMD_IEEE_TxSignature_byte1 = 0x0;
6587 amd_signature.AMD_IEEE_TxSignature_byte2 = 0x0;
6588 amd_signature.AMD_IEEE_TxSignature_byte3 = 0x1A;
6590 core_link_write_dpcd(link, DP_SOURCE_OUI,
6591 (uint8_t *)(&amd_signature),
6592 sizeof(amd_signature));
6595 core_link_write_dpcd(link, DP_SOURCE_OUI+0x03,
6596 (uint8_t *)(&amd_device_id),
6597 sizeof(amd_device_id));
6599 if (link->ctx->dce_version >= DCN_VERSION_2_0 &&
6600 link->dc->caps.min_horizontal_blanking_period != 0) {
6602 uint8_t hblank_size = (uint8_t)link->dc->caps.min_horizontal_blanking_period;
6604 if (link->preferred_link_setting.dpcd_source_device_specific_field_support) {
6605 result_write_min_hblank = core_link_write_dpcd(link,
6606 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED, (uint8_t *)(&hblank_size),
6607 sizeof(hblank_size));
6609 if (result_write_min_hblank == DC_ERROR_UNEXPECTED)
6610 link->preferred_link_setting.dpcd_source_device_specific_field_support = false;
6612 DC_LOG_DC("Sink device does not support 00340h DPCD write. Skipping on purpose.\n");
6616 DC_TRACE_LEVEL_MESSAGE(DAL_TRACE_LEVEL_INFORMATION,
6617 WPP_BIT_FLAG_DC_DETECTION_DP_CAPS,
6618 "result=%u link_index=%u enum dce_version=%d DPCD=0x%04X min_hblank=%u branch_dev_id=0x%x branch_dev_name='%c%c%c%c%c%c'",
6619 result_write_min_hblank,
6621 link->ctx->dce_version,
6622 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED,
6623 link->dc->caps.min_horizontal_blanking_period,
6624 link->dpcd_caps.branch_dev_id,
6625 link->dpcd_caps.branch_dev_name[0],
6626 link->dpcd_caps.branch_dev_name[1],
6627 link->dpcd_caps.branch_dev_name[2],
6628 link->dpcd_caps.branch_dev_name[3],
6629 link->dpcd_caps.branch_dev_name[4],
6630 link->dpcd_caps.branch_dev_name[5]);
6632 core_link_write_dpcd(link, DP_SOURCE_OUI,
6633 link->dc->vendor_signature.data.raw,
6634 sizeof(link->dc->vendor_signature.data.raw));
6638 bool dc_link_set_backlight_level_nits(struct dc_link *link,
6640 uint32_t backlight_millinits,
6641 uint32_t transition_time_in_ms)
6643 struct dpcd_source_backlight_set dpcd_backlight_set;
6644 uint8_t backlight_control = isHDR ? 1 : 0;
6646 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6647 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6650 // OLEDs have no PWM, they can only use AUX
6651 if (link->dpcd_sink_ext_caps.bits.oled == 1)
6652 backlight_control = 1;
6654 *(uint32_t *)&dpcd_backlight_set.backlight_level_millinits = backlight_millinits;
6655 *(uint16_t *)&dpcd_backlight_set.backlight_transition_time_ms = (uint16_t)transition_time_in_ms;
6658 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6659 (uint8_t *)(&dpcd_backlight_set),
6660 sizeof(dpcd_backlight_set)) != DC_OK)
6663 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_CONTROL,
6664 &backlight_control, 1) != DC_OK)
6670 bool dc_link_get_backlight_level_nits(struct dc_link *link,
6671 uint32_t *backlight_millinits_avg,
6672 uint32_t *backlight_millinits_peak)
6674 union dpcd_source_backlight_get dpcd_backlight_get;
6676 memset(&dpcd_backlight_get, 0, sizeof(union dpcd_source_backlight_get));
6678 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6679 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6682 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_CURRENT_PEAK,
6683 dpcd_backlight_get.raw,
6684 sizeof(union dpcd_source_backlight_get)) != DC_OK)
6687 *backlight_millinits_avg =
6688 dpcd_backlight_get.bytes.backlight_millinits_avg;
6689 *backlight_millinits_peak =
6690 dpcd_backlight_get.bytes.backlight_millinits_peak;
6692 /* On non-supported panels dpcd_read usually succeeds with 0 returned */
6693 if (*backlight_millinits_avg == 0 ||
6694 *backlight_millinits_avg > *backlight_millinits_peak)
6700 bool dc_link_backlight_enable_aux(struct dc_link *link, bool enable)
6702 uint8_t backlight_enable = enable ? 1 : 0;
6704 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6705 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6708 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_ENABLE,
6709 &backlight_enable, 1) != DC_OK)
6715 // we read default from 0x320 because we expect BIOS wrote it there
6716 // regular get_backlight_nit reads from panel set at 0x326
6717 bool dc_link_read_default_bl_aux(struct dc_link *link, uint32_t *backlight_millinits)
6719 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6720 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6723 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6724 (uint8_t *) backlight_millinits,
6725 sizeof(uint32_t)) != DC_OK)
6731 bool dc_link_set_default_brightness_aux(struct dc_link *link)
6733 uint32_t default_backlight;
6735 if (link && link->dpcd_sink_ext_caps.bits.oled == 1) {
6736 if (!dc_link_read_default_bl_aux(link, &default_backlight))
6737 default_backlight = 150000;
6738 // if < 5 nits or > 5000, it might be wrong readback
6739 if (default_backlight < 5000 || default_backlight > 5000000)
6740 default_backlight = 150000; //
6742 return dc_link_set_backlight_level_nits(link, true,
6743 default_backlight, 0);
6748 bool is_edp_ilr_optimization_required(struct dc_link *link, struct dc_crtc_timing *crtc_timing)
6750 struct dc_link_settings link_setting;
6751 uint8_t link_bw_set;
6752 uint8_t link_rate_set;
6754 union lane_count_set lane_count_set = {0};
6756 ASSERT(link || crtc_timing); // invalid input
6758 if (link->dpcd_caps.edp_supported_link_rates_count == 0 ||
6759 !link->dc->debug.optimize_edp_link_rate)
6763 // Read DPCD 00100h to find if standard link rates are set
6764 core_link_read_dpcd(link, DP_LINK_BW_SET,
6765 &link_bw_set, sizeof(link_bw_set));
6768 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS used link_bw_set\n");
6772 // Read DPCD 00115h to find the edp link rate set used
6773 core_link_read_dpcd(link, DP_LINK_RATE_SET,
6774 &link_rate_set, sizeof(link_rate_set));
6776 // Read DPCD 00101h to find out the number of lanes currently set
6777 core_link_read_dpcd(link, DP_LANE_COUNT_SET,
6778 &lane_count_set.raw, sizeof(lane_count_set));
6780 req_bw = dc_bandwidth_in_kbps_from_timing(crtc_timing);
6782 if (!crtc_timing->flags.DSC)
6783 decide_edp_link_settings(link, &link_setting, req_bw);
6785 decide_edp_link_settings_with_dsc(link, &link_setting, req_bw, LINK_RATE_UNKNOWN);
6787 if (link->dpcd_caps.edp_supported_link_rates[link_rate_set] != link_setting.link_rate ||
6788 lane_count_set.bits.LANE_COUNT_SET != link_setting.lane_count) {
6789 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS link_rate_set not optimal\n");
6793 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: No optimization required, VBIOS set optimal link_rate_set\n");
6797 enum dp_link_encoding dp_get_link_encoding_format(const struct dc_link_settings *link_settings)
6799 if ((link_settings->link_rate >= LINK_RATE_LOW) &&
6800 (link_settings->link_rate <= LINK_RATE_HIGH3))
6801 return DP_8b_10b_ENCODING;
6802 #if defined(CONFIG_DRM_AMD_DC_DCN)
6803 else if ((link_settings->link_rate >= LINK_RATE_UHBR10) &&
6804 (link_settings->link_rate <= LINK_RATE_UHBR20))
6805 return DP_128b_132b_ENCODING;
6807 return DP_UNKNOWN_ENCODING;
6810 #if defined(CONFIG_DRM_AMD_DC_DCN)
6811 enum dp_link_encoding dc_link_dp_mst_decide_link_encoding_format(const struct dc_link *link)
6813 struct dc_link_settings link_settings = {0};
6815 if (!dc_is_dp_signal(link->connector_signal))
6816 return DP_UNKNOWN_ENCODING;
6818 if (link->preferred_link_setting.lane_count !=
6819 LANE_COUNT_UNKNOWN &&
6820 link->preferred_link_setting.link_rate !=
6821 LINK_RATE_UNKNOWN) {
6822 link_settings = link->preferred_link_setting;
6824 decide_mst_link_settings(link, &link_settings);
6827 return dp_get_link_encoding_format(&link_settings);
6830 // TODO - DP2.0 Link: Fix get_lane_status to handle LTTPR offset (SST and MST)
6831 static void get_lane_status(
6832 struct dc_link *link,
6833 uint32_t lane_count,
6834 union lane_status *status,
6835 union lane_align_status_updated *status_updated)
6838 uint8_t dpcd_buf[3] = {0};
6840 if (status == NULL || status_updated == NULL) {
6844 core_link_read_dpcd(
6850 for (lane = 0; lane < lane_count; lane++) {
6851 status[lane].raw = get_nibble_at_index(&dpcd_buf[0], lane);
6854 status_updated->raw = dpcd_buf[2];
6857 bool dpcd_write_128b_132b_sst_payload_allocation_table(
6858 const struct dc_stream_state *stream,
6859 struct dc_link *link,
6860 struct link_mst_stream_allocation_table *proposed_table,
6863 const uint8_t vc_id = 1; /// VC ID always 1 for SST
6864 const uint8_t start_time_slot = 0; /// Always start at time slot 0 for SST
6865 bool result = false;
6866 uint8_t req_slot_count = 0;
6867 struct fixed31_32 avg_time_slots_per_mtp = { 0 };
6868 union payload_table_update_status update_status = { 0 };
6869 const uint32_t max_retries = 30;
6870 uint32_t retries = 0;
6873 avg_time_slots_per_mtp = calculate_sst_avg_time_slots_per_mtp(stream, link);
6874 req_slot_count = dc_fixpt_ceil(avg_time_slots_per_mtp);
6876 /// Leave req_slot_count = 0 if allocate is false.
6879 /// Write DPCD 2C0 = 1 to start updating
6880 update_status.bits.VC_PAYLOAD_TABLE_UPDATED = 1;
6881 core_link_write_dpcd(
6883 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6887 /// Program the changes in DPCD 1C0 - 1C2
6889 core_link_write_dpcd(
6891 DP_PAYLOAD_ALLOCATE_SET,
6895 ASSERT(start_time_slot == 0);
6896 core_link_write_dpcd(
6898 DP_PAYLOAD_ALLOCATE_START_TIME_SLOT,
6902 ASSERT(req_slot_count <= MAX_MTP_SLOT_COUNT); /// Validation should filter out modes that exceed link BW
6903 core_link_write_dpcd(
6905 DP_PAYLOAD_ALLOCATE_TIME_SLOT_COUNT,
6909 /// Poll till DPCD 2C0 read 1
6910 /// Try for at least 150ms (30 retries, with 5ms delay after each attempt)
6912 while (retries < max_retries) {
6913 if (core_link_read_dpcd(
6915 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6918 if (update_status.bits.VC_PAYLOAD_TABLE_UPDATED == 1) {
6919 DC_LOG_DP2("SST Update Payload: downstream payload table updated.");
6924 union dpcd_rev dpcdRev;
6926 if (core_link_read_dpcd(
6931 DC_LOG_ERROR("SST Update Payload: Unable to read DPCD revision "
6932 "of sink while polling payload table "
6933 "updated status bit.");
6941 if (!result && retries == max_retries) {
6942 DC_LOG_ERROR("SST Update Payload: Payload table not updated after retries, "
6943 "continue on. Something is wrong with the branch.");
6944 // TODO - DP2.0 Payload: Read and log the payload table from downstream branch
6947 proposed_table->stream_count = 1; /// Always 1 stream for SST
6948 proposed_table->stream_allocations[0].slot_count = req_slot_count;
6949 proposed_table->stream_allocations[0].vcp_id = vc_id;
6954 bool dpcd_poll_for_allocation_change_trigger(struct dc_link *link)
6957 * wait for ACT handled
6960 const int act_retries = 30;
6961 enum act_return_status result = ACT_FAILED;
6962 union payload_table_update_status update_status = {0};
6963 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
6964 union lane_align_status_updated lane_status_updated;
6966 for (i = 0; i < act_retries; i++) {
6967 get_lane_status(link, link->cur_link_settings.lane_count, dpcd_lane_status, &lane_status_updated);
6969 if (!dp_is_cr_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6970 !dp_is_ch_eq_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6971 !dp_is_symbol_locked(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6972 !dp_is_interlane_aligned(lane_status_updated)) {
6973 DC_LOG_ERROR("SST Update Payload: Link loss occurred while "
6974 "polling for ACT handled.");
6975 result = ACT_LINK_LOST;
6978 core_link_read_dpcd(
6980 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6984 if (update_status.bits.ACT_HANDLED == 1) {
6985 DC_LOG_DP2("SST Update Payload: ACT handled by downstream.");
6986 result = ACT_SUCCESS;
6993 if (result == ACT_FAILED) {
6994 DC_LOG_ERROR("SST Update Payload: ACT still not handled after retries, "
6995 "continue on. Something is wrong with the branch.");
6998 return (result == ACT_SUCCESS);
7001 struct fixed31_32 calculate_sst_avg_time_slots_per_mtp(
7002 const struct dc_stream_state *stream,
7003 const struct dc_link *link)
7005 struct fixed31_32 link_bw_effective =
7007 dc_link_bandwidth_kbps(link, &link->cur_link_settings));
7008 struct fixed31_32 timeslot_bw_effective =
7009 dc_fixpt_div_int(link_bw_effective, MAX_MTP_SLOT_COUNT);
7010 struct fixed31_32 timing_bw =
7012 dc_bandwidth_in_kbps_from_timing(&stream->timing));
7013 struct fixed31_32 avg_time_slots_per_mtp =
7014 dc_fixpt_div(timing_bw, timeslot_bw_effective);
7016 return avg_time_slots_per_mtp;
7019 bool is_dp_128b_132b_signal(struct pipe_ctx *pipe_ctx)
7021 /* If this assert is hit then we have a link encoder dynamic management issue */
7022 ASSERT(pipe_ctx->stream_res.hpo_dp_stream_enc ? pipe_ctx->link_res.hpo_dp_link_enc != NULL : true);
7023 return (pipe_ctx->stream_res.hpo_dp_stream_enc &&
7024 pipe_ctx->link_res.hpo_dp_link_enc &&
7025 dc_is_dp_signal(pipe_ctx->stream->signal));
7029 void edp_panel_backlight_power_on(struct dc_link *link)
7031 if (link->connector_signal != SIGNAL_TYPE_EDP)
7034 link->dc->hwss.edp_power_control(link, true);
7035 link->dc->hwss.edp_wait_for_hpd_ready(link, true);
7036 if (link->dc->hwss.edp_backlight_control)
7037 link->dc->hwss.edp_backlight_control(link, true);