2 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
24 #include "dm_services.h"
26 #include "dc_link_dp.h"
27 #include "dm_helpers.h"
32 #include "inc/core_types.h"
33 #include "link_hwss.h"
34 #include "dc_link_ddc.h"
35 #include "core_status.h"
36 #include "dpcd_defs.h"
37 #include "dc_dmub_srv.h"
38 #include "dce/dmub_hw_lock_mgr.h"
39 #include "inc/dc_link_dpia.h"
40 #include "inc/link_enc_cfg.h"
43 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_2[] = "sivarT";
45 static const uint8_t DP_VGA_LVDS_CONVERTER_ID_3[] = "dnomlA";
49 #define DC_TRACE_LEVEL_MESSAGE(...) /* do nothing */
51 #include "link_dpcd.h"
53 /* maximum pre emphasis level allowed for each voltage swing level*/
54 static const enum dc_pre_emphasis
55 voltage_swing_to_pre_emphasis[] = { PRE_EMPHASIS_LEVEL3,
58 PRE_EMPHASIS_DISABLED };
61 POST_LT_ADJ_REQ_LIMIT = 6,
62 POST_LT_ADJ_REQ_TIMEOUT = 200
65 #if defined(CONFIG_DRM_AMD_DC_DCN)
66 struct dp_lt_fallback_entry {
67 enum dc_lane_count lane_count;
68 enum dc_link_rate link_rate;
71 static const struct dp_lt_fallback_entry dp_lt_fallbacks[] = {
72 /* This link training fallback array is ordered by
73 * link bandwidth from highest to lowest.
74 * DP specs makes it a normative policy to always
75 * choose the next highest link bandwidth during
76 * link training fallback.
78 {LANE_COUNT_FOUR, LINK_RATE_UHBR20},
79 {LANE_COUNT_FOUR, LINK_RATE_UHBR13_5},
80 {LANE_COUNT_TWO, LINK_RATE_UHBR20},
81 {LANE_COUNT_FOUR, LINK_RATE_UHBR10},
82 {LANE_COUNT_TWO, LINK_RATE_UHBR13_5},
83 {LANE_COUNT_FOUR, LINK_RATE_HIGH3},
84 {LANE_COUNT_ONE, LINK_RATE_UHBR20},
85 {LANE_COUNT_TWO, LINK_RATE_UHBR10},
86 {LANE_COUNT_FOUR, LINK_RATE_HIGH2},
87 {LANE_COUNT_ONE, LINK_RATE_UHBR13_5},
88 {LANE_COUNT_TWO, LINK_RATE_HIGH3},
89 {LANE_COUNT_ONE, LINK_RATE_UHBR10},
90 {LANE_COUNT_TWO, LINK_RATE_HIGH2},
91 {LANE_COUNT_FOUR, LINK_RATE_HIGH},
92 {LANE_COUNT_ONE, LINK_RATE_HIGH3},
93 {LANE_COUNT_FOUR, LINK_RATE_LOW},
94 {LANE_COUNT_ONE, LINK_RATE_HIGH2},
95 {LANE_COUNT_TWO, LINK_RATE_HIGH},
96 {LANE_COUNT_TWO, LINK_RATE_LOW},
97 {LANE_COUNT_ONE, LINK_RATE_HIGH},
98 {LANE_COUNT_ONE, LINK_RATE_LOW},
102 static bool decide_fallback_link_setting(
103 struct dc_link *link,
104 struct dc_link_settings initial_link_settings,
105 struct dc_link_settings *current_link_setting,
106 enum link_training_result training_result);
107 static struct dc_link_settings get_common_supported_link_settings(
108 struct dc_link_settings link_setting_a,
109 struct dc_link_settings link_setting_b);
110 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
111 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
112 static void override_lane_settings(const struct link_training_settings *lt_settings,
113 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX]);
115 static uint32_t get_cr_training_aux_rd_interval(struct dc_link *link,
116 const struct dc_link_settings *link_settings)
118 union training_aux_rd_interval training_rd_interval;
119 uint32_t wait_in_micro_secs = 100;
120 #if defined(CONFIG_DRM_AMD_DC_DCN)
121 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
122 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
123 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
126 DP_TRAINING_AUX_RD_INTERVAL,
127 (uint8_t *)&training_rd_interval,
128 sizeof(training_rd_interval));
129 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
130 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
135 DP_TRAINING_AUX_RD_INTERVAL,
136 (uint8_t *)&training_rd_interval,
137 sizeof(training_rd_interval));
138 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
139 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
141 return wait_in_micro_secs;
144 static uint32_t get_eq_training_aux_rd_interval(
145 struct dc_link *link,
146 const struct dc_link_settings *link_settings)
148 #if defined(CONFIG_DRM_AMD_DC_DCN)
149 union training_aux_rd_interval training_rd_interval;
151 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
152 if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING) {
155 DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
156 (uint8_t *)&training_rd_interval,
157 sizeof(training_rd_interval));
158 } else if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING &&
159 link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
162 DP_TRAINING_AUX_RD_INTERVAL,
163 (uint8_t *)&training_rd_interval,
164 sizeof(training_rd_interval));
167 switch (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL) {
171 case 3: return 12000;
172 case 4: return 16000;
173 case 5: return 32000;
174 case 6: return 64000;
178 union training_aux_rd_interval training_rd_interval;
179 uint32_t wait_in_micro_secs = 400;
181 memset(&training_rd_interval, 0, sizeof(training_rd_interval));
182 /* overwrite the delay if rev > 1.1*/
183 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
184 /* DP 1.2 or later - retrieve delay through
185 * "DPCD_ADDR_TRAINING_AUX_RD_INTERVAL" register */
188 DP_TRAINING_AUX_RD_INTERVAL,
189 (uint8_t *)&training_rd_interval,
190 sizeof(training_rd_interval));
192 if (training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL)
193 wait_in_micro_secs = training_rd_interval.bits.TRAINIG_AUX_RD_INTERVAL * 4000;
196 return wait_in_micro_secs;
200 void dp_wait_for_training_aux_rd_interval(
201 struct dc_link *link,
202 uint32_t wait_in_micro_secs)
204 #if defined(CONFIG_DRM_AMD_DC_DCN)
205 if (wait_in_micro_secs > 16000)
206 msleep(wait_in_micro_secs/1000);
208 udelay(wait_in_micro_secs);
210 udelay(wait_in_micro_secs);
213 DC_LOG_HW_LINK_TRAINING("%s:\n wait = %d\n",
218 enum dpcd_training_patterns
219 dc_dp_training_pattern_to_dpcd_training_pattern(
220 struct dc_link *link,
221 enum dc_dp_training_pattern pattern)
223 enum dpcd_training_patterns dpcd_tr_pattern =
224 DPCD_TRAINING_PATTERN_VIDEOIDLE;
227 case DP_TRAINING_PATTERN_SEQUENCE_1:
228 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_1;
230 case DP_TRAINING_PATTERN_SEQUENCE_2:
231 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_2;
233 case DP_TRAINING_PATTERN_SEQUENCE_3:
234 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_3;
236 case DP_TRAINING_PATTERN_SEQUENCE_4:
237 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_4;
239 #if defined(CONFIG_DRM_AMD_DC_DCN)
240 case DP_128b_132b_TPS1:
241 dpcd_tr_pattern = DPCD_128b_132b_TPS1;
243 case DP_128b_132b_TPS2:
244 dpcd_tr_pattern = DPCD_128b_132b_TPS2;
246 case DP_128b_132b_TPS2_CDS:
247 dpcd_tr_pattern = DPCD_128b_132b_TPS2_CDS;
250 case DP_TRAINING_PATTERN_VIDEOIDLE:
251 dpcd_tr_pattern = DPCD_TRAINING_PATTERN_VIDEOIDLE;
255 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
260 return dpcd_tr_pattern;
263 static void dpcd_set_training_pattern(
264 struct dc_link *link,
265 enum dc_dp_training_pattern training_pattern)
267 union dpcd_training_pattern dpcd_pattern = {0};
269 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
270 dc_dp_training_pattern_to_dpcd_training_pattern(
271 link, training_pattern);
273 core_link_write_dpcd(
275 DP_TRAINING_PATTERN_SET,
279 DC_LOG_HW_LINK_TRAINING("%s\n %x pattern = %x\n",
281 DP_TRAINING_PATTERN_SET,
282 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
285 static enum dc_dp_training_pattern decide_cr_training_pattern(
286 const struct dc_link_settings *link_settings)
288 switch (dp_get_link_encoding_format(link_settings)) {
289 case DP_8b_10b_ENCODING:
291 return DP_TRAINING_PATTERN_SEQUENCE_1;
292 #if defined(CONFIG_DRM_AMD_DC_DCN)
293 case DP_128b_132b_ENCODING:
294 return DP_128b_132b_TPS1;
299 static enum dc_dp_training_pattern decide_eq_training_pattern(struct dc_link *link,
300 const struct dc_link_settings *link_settings)
302 struct link_encoder *link_enc;
303 #if defined(CONFIG_DRM_AMD_DC_DCN)
304 struct encoder_feature_support *enc_caps;
305 struct dpcd_caps *rx_caps = &link->dpcd_caps;
306 enum dc_dp_training_pattern pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
308 /* Access link encoder capability based on whether it is statically
309 * or dynamically assigned to a link.
311 if (link->is_dig_mapping_flexible &&
312 link->dc->res_pool->funcs->link_encs_assign)
313 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
315 link_enc = link->link_enc;
317 enc_caps = &link_enc->features;
319 switch (dp_get_link_encoding_format(link_settings)) {
320 case DP_8b_10b_ENCODING:
321 if (enc_caps->flags.bits.IS_TPS4_CAPABLE &&
322 rx_caps->max_down_spread.bits.TPS4_SUPPORTED)
323 pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
324 else if (enc_caps->flags.bits.IS_TPS3_CAPABLE &&
325 rx_caps->max_ln_count.bits.TPS3_SUPPORTED)
326 pattern = DP_TRAINING_PATTERN_SEQUENCE_3;
328 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
330 case DP_128b_132b_ENCODING:
331 pattern = DP_128b_132b_TPS2;
334 pattern = DP_TRAINING_PATTERN_SEQUENCE_2;
339 enum dc_dp_training_pattern highest_tp = DP_TRAINING_PATTERN_SEQUENCE_2;
340 struct encoder_feature_support *features;
341 struct dpcd_caps *dpcd_caps = &link->dpcd_caps;
343 /* Access link encoder capability based on whether it is statically
344 * or dynamically assigned to a link.
346 if (link->is_dig_mapping_flexible &&
347 link->dc->res_pool->funcs->link_encs_assign)
348 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
350 link_enc = link->link_enc;
352 features = &link_enc->features;
354 if (features->flags.bits.IS_TPS3_CAPABLE)
355 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_3;
357 if (features->flags.bits.IS_TPS4_CAPABLE)
358 highest_tp = DP_TRAINING_PATTERN_SEQUENCE_4;
360 if (dpcd_caps->max_down_spread.bits.TPS4_SUPPORTED &&
361 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_4)
362 return DP_TRAINING_PATTERN_SEQUENCE_4;
364 if (dpcd_caps->max_ln_count.bits.TPS3_SUPPORTED &&
365 highest_tp >= DP_TRAINING_PATTERN_SEQUENCE_3)
366 return DP_TRAINING_PATTERN_SEQUENCE_3;
368 return DP_TRAINING_PATTERN_SEQUENCE_2;
372 #if defined(CONFIG_DRM_AMD_DC_DCN)
373 static uint8_t get_dpcd_link_rate(const struct dc_link_settings *link_settings)
375 uint8_t link_rate = 0;
376 enum dp_link_encoding encoding = dp_get_link_encoding_format(link_settings);
378 if (encoding == DP_128b_132b_ENCODING)
379 switch (link_settings->link_rate) {
380 case LINK_RATE_UHBR10:
383 case LINK_RATE_UHBR20:
386 case LINK_RATE_UHBR13_5:
393 else if (encoding == DP_8b_10b_ENCODING)
394 link_rate = (uint8_t) link_settings->link_rate;
402 static void vendor_specific_lttpr_wa_one_start(struct dc_link *link)
404 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0xff};
405 const uint8_t offset = dp_convert_to_count(
406 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
407 uint32_t vendor_lttpr_write_address = 0xF004F;
410 vendor_lttpr_write_address +=
411 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
413 /* W/A for certain LTTPR to reset their lane settings, part one of two */
414 core_link_write_dpcd(
416 vendor_lttpr_write_address,
417 &vendor_lttpr_write_data[0],
418 sizeof(vendor_lttpr_write_data));
421 static void vendor_specific_lttpr_wa_one_end(
422 struct dc_link *link,
425 const uint8_t vendor_lttpr_write_data[4] = {0x1, 0x50, 0x63, 0x0};
426 const uint8_t offset = dp_convert_to_count(
427 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
428 uint32_t vendor_lttpr_write_address = 0xF004F;
432 vendor_lttpr_write_address +=
433 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
435 /* W/A for certain LTTPR to reset their lane settings, part two of two */
436 core_link_write_dpcd(
438 vendor_lttpr_write_address,
439 &vendor_lttpr_write_data[0],
440 sizeof(vendor_lttpr_write_data));
444 static void vendor_specific_lttpr_wa_one_two(
445 struct dc_link *link,
448 if (link->apply_vendor_specific_lttpr_link_rate_wa) {
449 uint8_t toggle_rate = 0x0;
456 if (link->vendor_specific_lttpr_link_rate_wa == rate) {
457 /* W/A for certain LTTPR to reset internal state for link training */
458 core_link_write_dpcd(
465 /* Store the last attempted link rate for this link */
466 link->vendor_specific_lttpr_link_rate_wa = rate;
470 static void vendor_specific_lttpr_wa_three(
471 struct dc_link *link,
472 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX])
474 const uint8_t vendor_lttpr_write_data_vs[3] = {0x0, 0x53, 0x63};
475 const uint8_t vendor_lttpr_write_data_pe[3] = {0x0, 0x54, 0x63};
476 const uint8_t offset = dp_convert_to_count(
477 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
478 uint32_t vendor_lttpr_write_address = 0xF004F;
479 uint32_t vendor_lttpr_read_address = 0xF0053;
484 if (offset != 0xFF) {
485 vendor_lttpr_write_address +=
486 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
487 vendor_lttpr_read_address +=
488 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
491 /* W/A to read lane settings requested by DPRX */
492 core_link_write_dpcd(
494 vendor_lttpr_write_address,
495 &vendor_lttpr_write_data_vs[0],
496 sizeof(vendor_lttpr_write_data_vs));
499 vendor_lttpr_read_address,
502 core_link_write_dpcd(
504 vendor_lttpr_write_address,
505 &vendor_lttpr_write_data_pe[0],
506 sizeof(vendor_lttpr_write_data_pe));
509 vendor_lttpr_read_address,
513 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
514 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE = (dprx_vs >> (2 * lane)) & 0x3;
515 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE = (dprx_pe >> (2 * lane)) & 0x3;
519 static void vendor_specific_lttpr_wa_three_dpcd(
520 struct dc_link *link,
521 union dpcd_training_lane dpcd_lane_adjust[LANE_COUNT_DP_MAX])
523 union lane_adjust lane_adjust[LANE_COUNT_DP_MAX];
526 vendor_specific_lttpr_wa_three(link, lane_adjust);
528 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
529 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_SET = lane_adjust[lane].bits.VOLTAGE_SWING_LANE;
530 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_SET = lane_adjust[lane].bits.PRE_EMPHASIS_LANE;
534 static void vendor_specific_lttpr_wa_four(
535 struct dc_link *link,
538 const uint8_t vendor_lttpr_write_data_one[4] = {0x1, 0x55, 0x63, 0x8};
539 const uint8_t vendor_lttpr_write_data_two[4] = {0x1, 0x55, 0x63, 0x0};
540 const uint8_t offset = dp_convert_to_count(
541 link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
542 uint32_t vendor_lttpr_write_address = 0xF004F;
543 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
544 uint8_t sink_status = 0;
549 vendor_lttpr_write_address +=
550 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
552 /* W/A to pass through DPCD write of TPS=0 to DPRX */
554 core_link_write_dpcd(
556 vendor_lttpr_write_address,
557 &vendor_lttpr_write_data_one[0],
558 sizeof(vendor_lttpr_write_data_one));
561 /* clear training pattern set */
562 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
565 core_link_write_dpcd(
567 vendor_lttpr_write_address,
568 &vendor_lttpr_write_data_two[0],
569 sizeof(vendor_lttpr_write_data_two));
572 #if defined(CONFIG_DRM_AMD_DC_DP2_0)
573 /* poll for intra-hop disable */
574 for (i = 0; i < 10; i++) {
575 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
576 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
583 static void vendor_specific_lttpr_wa_five(
584 struct dc_link *link,
585 const union dpcd_training_lane dpcd_lane_adjust[LANE_COUNT_DP_MAX],
588 const uint32_t vendor_lttpr_write_address = 0xF004F;
589 const uint8_t vendor_lttpr_write_data_reset[4] = {0x1, 0x50, 0x63, 0xFF};
590 uint8_t vendor_lttpr_write_data_vs[4] = {0x1, 0x51, 0x63, 0x0};
591 uint8_t vendor_lttpr_write_data_pe[4] = {0x1, 0x52, 0x63, 0x0};
594 for (lane = 0; lane < lane_count; lane++) {
595 vendor_lttpr_write_data_vs[3] |=
596 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_SET << (2 * lane);
597 vendor_lttpr_write_data_pe[3] |=
598 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_SET << (2 * lane);
601 /* Force LTTPR to output desired VS and PE */
602 core_link_write_dpcd(
604 vendor_lttpr_write_address,
605 &vendor_lttpr_write_data_reset[0],
606 sizeof(vendor_lttpr_write_data_reset));
607 core_link_write_dpcd(
609 vendor_lttpr_write_address,
610 &vendor_lttpr_write_data_vs[0],
611 sizeof(vendor_lttpr_write_data_vs));
612 core_link_write_dpcd(
614 vendor_lttpr_write_address,
615 &vendor_lttpr_write_data_pe[0],
616 sizeof(vendor_lttpr_write_data_pe));
619 enum dc_status dpcd_set_link_settings(
620 struct dc_link *link,
621 const struct link_training_settings *lt_settings)
624 enum dc_status status;
626 union down_spread_ctrl downspread = {0};
627 union lane_count_set lane_count_set = {0};
629 downspread.raw = (uint8_t)
630 (lt_settings->link_settings.link_spread);
632 lane_count_set.bits.LANE_COUNT_SET =
633 lt_settings->link_settings.lane_count;
635 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
636 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
639 if (link->ep_type == DISPLAY_ENDPOINT_PHY &&
640 lt_settings->pattern_for_eq < DP_TRAINING_PATTERN_SEQUENCE_4) {
641 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED =
642 link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED;
645 status = core_link_write_dpcd(link, DP_DOWNSPREAD_CTRL,
646 &downspread.raw, sizeof(downspread));
648 status = core_link_write_dpcd(link, DP_LANE_COUNT_SET,
649 &lane_count_set.raw, 1);
651 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
652 lt_settings->link_settings.use_link_rate_set == true) {
654 /* WA for some MUX chips that will power down with eDP and lose supported
655 * link rate set for eDP 1.4. Source reads DPCD 0x010 again to ensure
656 * MUX chip gets link rate set back before link training.
658 if (link->connector_signal == SIGNAL_TYPE_EDP) {
659 uint8_t supported_link_rates[16];
661 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
662 supported_link_rates, sizeof(supported_link_rates));
664 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
665 status = core_link_write_dpcd(link, DP_LINK_RATE_SET,
666 <_settings->link_settings.link_rate_set, 1);
668 #if defined(CONFIG_DRM_AMD_DC_DCN)
669 rate = get_dpcd_link_rate(<_settings->link_settings);
671 rate = (uint8_t) (lt_settings->link_settings.link_rate);
673 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
674 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
675 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
676 vendor_specific_lttpr_wa_one_start(link);
678 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
679 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN))
680 vendor_specific_lttpr_wa_one_two(link, rate);
682 status = core_link_write_dpcd(link, DP_LINK_BW_SET, &rate, 1);
686 DC_LOG_HW_LINK_TRAINING("%s\n %x rate = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
689 lt_settings->link_settings.link_rate,
691 lt_settings->link_settings.lane_count,
692 lt_settings->enhanced_framing,
694 lt_settings->link_settings.link_spread);
696 DC_LOG_HW_LINK_TRAINING("%s\n %x rate set = %x\n %x lane = %x framing = %x\n %x spread = %x\n",
699 lt_settings->link_settings.link_rate_set,
701 lt_settings->link_settings.lane_count,
702 lt_settings->enhanced_framing,
704 lt_settings->link_settings.link_spread);
710 uint8_t dc_dp_initialize_scrambling_data_symbols(
711 struct dc_link *link,
712 enum dc_dp_training_pattern pattern)
714 uint8_t disable_scrabled_data_symbols = 0;
717 case DP_TRAINING_PATTERN_SEQUENCE_1:
718 case DP_TRAINING_PATTERN_SEQUENCE_2:
719 case DP_TRAINING_PATTERN_SEQUENCE_3:
720 disable_scrabled_data_symbols = 1;
722 case DP_TRAINING_PATTERN_SEQUENCE_4:
723 #if defined(CONFIG_DRM_AMD_DC_DCN)
724 case DP_128b_132b_TPS1:
725 case DP_128b_132b_TPS2:
727 disable_scrabled_data_symbols = 0;
731 DC_LOG_HW_LINK_TRAINING("%s: Invalid HW Training pattern: %d\n",
735 return disable_scrabled_data_symbols;
738 static inline bool is_repeater(struct dc_link *link, uint32_t offset)
740 return (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (offset != 0);
743 static void dpcd_set_lt_pattern_and_lane_settings(
744 struct dc_link *link,
745 const struct link_training_settings *lt_settings,
746 enum dc_dp_training_pattern pattern,
749 uint32_t dpcd_base_lt_offset;
751 uint8_t dpcd_lt_buffer[5] = {0};
752 union dpcd_training_pattern dpcd_pattern = { 0 };
753 uint32_t size_in_bytes;
754 bool edp_workaround = false; /* TODO link_prop.INTERNAL */
755 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET;
757 if (is_repeater(link, offset))
758 dpcd_base_lt_offset = DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
759 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
761 /*****************************************************************
762 * DpcdAddress_TrainingPatternSet
763 *****************************************************************/
764 dpcd_pattern.v1_4.TRAINING_PATTERN_SET =
765 dc_dp_training_pattern_to_dpcd_training_pattern(link, pattern);
767 dpcd_pattern.v1_4.SCRAMBLING_DISABLE =
768 dc_dp_initialize_scrambling_data_symbols(link, pattern);
770 dpcd_lt_buffer[DP_TRAINING_PATTERN_SET - DP_TRAINING_PATTERN_SET]
773 if (is_repeater(link, offset)) {
774 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n 0x%X pattern = %x\n",
778 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
780 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X pattern = %x\n",
783 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
786 /* concatenate everything into one buffer*/
787 size_in_bytes = lt_settings->link_settings.lane_count *
788 sizeof(lt_settings->dpcd_lane_settings[0]);
792 &dpcd_lt_buffer[DP_TRAINING_LANE0_SET - DP_TRAINING_PATTERN_SET],
793 lt_settings->dpcd_lane_settings,
796 if (is_repeater(link, offset)) {
797 #if defined(CONFIG_DRM_AMD_DC_DCN)
798 if (dp_get_link_encoding_format(<_settings->link_settings) ==
799 DP_128b_132b_ENCODING)
800 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
801 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
805 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
806 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
809 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
810 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
814 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
815 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
816 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
817 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
819 #if defined(CONFIG_DRM_AMD_DC_DCN)
820 if (dp_get_link_encoding_format(<_settings->link_settings) ==
821 DP_128b_132b_ENCODING)
822 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
825 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
826 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
829 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
832 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
833 lt_settings->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
834 lt_settings->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
835 lt_settings->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
837 if (edp_workaround) {
838 /* for eDP write in 2 parts because the 5-byte burst is
839 * causing issues on some eDP panels (EPR#366724)
841 core_link_write_dpcd(
843 DP_TRAINING_PATTERN_SET,
845 sizeof(dpcd_pattern.raw));
847 core_link_write_dpcd(
849 DP_TRAINING_LANE0_SET,
850 (uint8_t *)(lt_settings->dpcd_lane_settings),
853 #if defined(CONFIG_DRM_AMD_DC_DCN)
854 } else if (dp_get_link_encoding_format(<_settings->link_settings) ==
855 DP_128b_132b_ENCODING) {
856 core_link_write_dpcd(
860 sizeof(dpcd_lt_buffer));
863 /* write it all in (1 + number-of-lanes)-byte burst*/
864 core_link_write_dpcd(
868 size_in_bytes + sizeof(dpcd_pattern.raw));
871 bool dp_is_cr_done(enum dc_lane_count ln_count,
872 union lane_status *dpcd_lane_status)
875 /*LANEx_CR_DONE bits All 1's?*/
876 for (lane = 0; lane < (uint32_t)(ln_count); lane++) {
877 if (!dpcd_lane_status[lane].bits.CR_DONE_0)
883 bool dp_is_ch_eq_done(enum dc_lane_count ln_count,
884 union lane_status *dpcd_lane_status)
888 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
889 if (!dpcd_lane_status[lane].bits.CHANNEL_EQ_DONE_0)
894 bool dp_is_symbol_locked(enum dc_lane_count ln_count,
895 union lane_status *dpcd_lane_status)
899 for (lane = 0; lane < (uint32_t)(ln_count); lane++)
900 if (!dpcd_lane_status[lane].bits.SYMBOL_LOCKED_0)
905 bool dp_is_interlane_aligned(union lane_align_status_updated align_status)
907 return align_status.bits.INTERLANE_ALIGN_DONE == 1;
910 void dp_hw_to_dpcd_lane_settings(
911 const struct link_training_settings *lt_settings,
912 const struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
913 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
917 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
918 if (dp_get_link_encoding_format(<_settings->link_settings) ==
919 DP_8b_10b_ENCODING) {
920 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET =
921 (uint8_t)(hw_lane_settings[lane].VOLTAGE_SWING);
922 dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET =
923 (uint8_t)(hw_lane_settings[lane].PRE_EMPHASIS);
924 dpcd_lane_settings[lane].bits.MAX_SWING_REACHED =
925 (hw_lane_settings[lane].VOLTAGE_SWING ==
926 VOLTAGE_SWING_MAX_LEVEL ? 1 : 0);
927 dpcd_lane_settings[lane].bits.MAX_PRE_EMPHASIS_REACHED =
928 (hw_lane_settings[lane].PRE_EMPHASIS ==
929 PRE_EMPHASIS_MAX_LEVEL ? 1 : 0);
931 #if defined(CONFIG_DRM_AMD_DC_DCN)
932 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
933 DP_128b_132b_ENCODING) {
934 dpcd_lane_settings[lane].tx_ffe.PRESET_VALUE =
935 hw_lane_settings[lane].FFE_PRESET.settings.level;
941 void dp_decide_lane_settings(
942 const struct link_training_settings *lt_settings,
943 const union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
944 struct dc_lane_settings hw_lane_settings[LANE_COUNT_DP_MAX],
945 union dpcd_training_lane dpcd_lane_settings[LANE_COUNT_DP_MAX])
949 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
950 if (dp_get_link_encoding_format(<_settings->link_settings) ==
951 DP_8b_10b_ENCODING) {
952 hw_lane_settings[lane].VOLTAGE_SWING =
953 (enum dc_voltage_swing)(ln_adjust[lane].bits.
955 hw_lane_settings[lane].PRE_EMPHASIS =
956 (enum dc_pre_emphasis)(ln_adjust[lane].bits.
959 #if defined(CONFIG_DRM_AMD_DC_DCN)
960 else if (dp_get_link_encoding_format(<_settings->link_settings) ==
961 DP_128b_132b_ENCODING) {
962 hw_lane_settings[lane].FFE_PRESET.raw =
963 ln_adjust[lane].tx_ffe.PRESET_VALUE;
967 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
969 if (lt_settings->disallow_per_lane_settings) {
970 /* we find the maximum of the requested settings across all lanes*/
971 /* and set this maximum for all lanes*/
972 maximize_lane_settings(lt_settings, hw_lane_settings);
973 override_lane_settings(lt_settings, hw_lane_settings);
975 if (lt_settings->always_match_dpcd_with_hw_lane_settings)
976 dp_hw_to_dpcd_lane_settings(lt_settings, hw_lane_settings, dpcd_lane_settings);
981 static uint8_t get_nibble_at_index(const uint8_t *buf,
985 nibble = buf[index / 2];
995 static enum dc_pre_emphasis get_max_pre_emphasis_for_voltage_swing(
996 enum dc_voltage_swing voltage)
998 enum dc_pre_emphasis pre_emphasis;
999 pre_emphasis = PRE_EMPHASIS_MAX_LEVEL;
1001 if (voltage <= VOLTAGE_SWING_MAX_LEVEL)
1002 pre_emphasis = voltage_swing_to_pre_emphasis[voltage];
1004 return pre_emphasis;
1008 static void maximize_lane_settings(const struct link_training_settings *lt_settings,
1009 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
1012 struct dc_lane_settings max_requested;
1014 max_requested.VOLTAGE_SWING = lane_settings[0].VOLTAGE_SWING;
1015 max_requested.PRE_EMPHASIS = lane_settings[0].PRE_EMPHASIS;
1016 #if defined(CONFIG_DRM_AMD_DC_DCN)
1017 max_requested.FFE_PRESET = lane_settings[0].FFE_PRESET;
1020 /* Determine what the maximum of the requested settings are*/
1021 for (lane = 1; lane < lt_settings->link_settings.lane_count; lane++) {
1022 if (lane_settings[lane].VOLTAGE_SWING > max_requested.VOLTAGE_SWING)
1023 max_requested.VOLTAGE_SWING = lane_settings[lane].VOLTAGE_SWING;
1025 if (lane_settings[lane].PRE_EMPHASIS > max_requested.PRE_EMPHASIS)
1026 max_requested.PRE_EMPHASIS = lane_settings[lane].PRE_EMPHASIS;
1027 #if defined(CONFIG_DRM_AMD_DC_DCN)
1028 if (lane_settings[lane].FFE_PRESET.settings.level >
1029 max_requested.FFE_PRESET.settings.level)
1030 max_requested.FFE_PRESET.settings.level =
1031 lane_settings[lane].FFE_PRESET.settings.level;
1035 /* make sure the requested settings are
1036 * not higher than maximum settings*/
1037 if (max_requested.VOLTAGE_SWING > VOLTAGE_SWING_MAX_LEVEL)
1038 max_requested.VOLTAGE_SWING = VOLTAGE_SWING_MAX_LEVEL;
1040 if (max_requested.PRE_EMPHASIS > PRE_EMPHASIS_MAX_LEVEL)
1041 max_requested.PRE_EMPHASIS = PRE_EMPHASIS_MAX_LEVEL;
1042 #if defined(CONFIG_DRM_AMD_DC_DCN)
1043 if (max_requested.FFE_PRESET.settings.level > DP_FFE_PRESET_MAX_LEVEL)
1044 max_requested.FFE_PRESET.settings.level = DP_FFE_PRESET_MAX_LEVEL;
1047 /* make sure the pre-emphasis matches the voltage swing*/
1048 if (max_requested.PRE_EMPHASIS >
1049 get_max_pre_emphasis_for_voltage_swing(
1050 max_requested.VOLTAGE_SWING))
1051 max_requested.PRE_EMPHASIS =
1052 get_max_pre_emphasis_for_voltage_swing(
1053 max_requested.VOLTAGE_SWING);
1055 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1056 lane_settings[lane].VOLTAGE_SWING = max_requested.VOLTAGE_SWING;
1057 lane_settings[lane].PRE_EMPHASIS = max_requested.PRE_EMPHASIS;
1058 #if defined(CONFIG_DRM_AMD_DC_DCN)
1059 lane_settings[lane].FFE_PRESET = max_requested.FFE_PRESET;
1064 static void override_lane_settings(const struct link_training_settings *lt_settings,
1065 struct dc_lane_settings lane_settings[LANE_COUNT_DP_MAX])
1069 if (lt_settings->voltage_swing == NULL &&
1070 lt_settings->pre_emphasis == NULL &&
1071 #if defined(CONFIG_DRM_AMD_DC_DCN)
1072 lt_settings->ffe_preset == NULL &&
1074 lt_settings->post_cursor2 == NULL)
1078 for (lane = 1; lane < LANE_COUNT_DP_MAX; lane++) {
1079 if (lt_settings->voltage_swing)
1080 lane_settings[lane].VOLTAGE_SWING = *lt_settings->voltage_swing;
1081 if (lt_settings->pre_emphasis)
1082 lane_settings[lane].PRE_EMPHASIS = *lt_settings->pre_emphasis;
1083 if (lt_settings->post_cursor2)
1084 lane_settings[lane].POST_CURSOR2 = *lt_settings->post_cursor2;
1085 #if defined(CONFIG_DRM_AMD_DC_DCN)
1086 if (lt_settings->ffe_preset)
1087 lane_settings[lane].FFE_PRESET = *lt_settings->ffe_preset;
1092 enum dc_status dp_get_lane_status_and_lane_adjust(
1093 struct dc_link *link,
1094 const struct link_training_settings *link_training_setting,
1095 union lane_status ln_status[LANE_COUNT_DP_MAX],
1096 union lane_align_status_updated *ln_align,
1097 union lane_adjust ln_adjust[LANE_COUNT_DP_MAX],
1100 unsigned int lane01_status_address = DP_LANE0_1_STATUS;
1101 uint8_t lane_adjust_offset = 4;
1102 unsigned int lane01_adjust_address;
1103 uint8_t dpcd_buf[6] = {0};
1105 enum dc_status status;
1107 if (is_repeater(link, offset)) {
1108 lane01_status_address =
1109 DP_LANE0_1_STATUS_PHY_REPEATER1 +
1110 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1111 lane_adjust_offset = 3;
1114 status = core_link_read_dpcd(
1116 lane01_status_address,
1117 (uint8_t *)(dpcd_buf),
1120 for (lane = 0; lane <
1121 (uint32_t)(link_training_setting->link_settings.lane_count);
1124 ln_status[lane].raw =
1125 get_nibble_at_index(&dpcd_buf[0], lane);
1126 ln_adjust[lane].raw =
1127 get_nibble_at_index(&dpcd_buf[lane_adjust_offset], lane);
1130 ln_align->raw = dpcd_buf[2];
1132 if (is_repeater(link, offset)) {
1133 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1134 " 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1137 lane01_status_address, dpcd_buf[0],
1138 lane01_status_address + 1, dpcd_buf[1]);
1140 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01Status = %x\n 0x%X Lane23Status = %x\n ",
1142 lane01_status_address, dpcd_buf[0],
1143 lane01_status_address + 1, dpcd_buf[1]);
1145 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1;
1147 if (is_repeater(link, offset))
1148 lane01_adjust_address = DP_ADJUST_REQUEST_LANE0_1_PHY_REPEATER1 +
1149 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1151 if (is_repeater(link, offset)) {
1152 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1153 " 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1156 lane01_adjust_address,
1157 dpcd_buf[lane_adjust_offset],
1158 lane01_adjust_address + 1,
1159 dpcd_buf[lane_adjust_offset + 1]);
1161 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X Lane01AdjustRequest = %x\n 0x%X Lane23AdjustRequest = %x\n",
1163 lane01_adjust_address,
1164 dpcd_buf[lane_adjust_offset],
1165 lane01_adjust_address + 1,
1166 dpcd_buf[lane_adjust_offset + 1]);
1172 enum dc_status dpcd_set_lane_settings(
1173 struct dc_link *link,
1174 const struct link_training_settings *link_training_setting,
1177 unsigned int lane0_set_address;
1178 enum dc_status status;
1180 lane0_set_address = DP_TRAINING_LANE0_SET;
1182 if (is_repeater(link, offset))
1183 lane0_set_address = DP_TRAINING_LANE0_SET_PHY_REPEATER1 +
1184 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1186 status = core_link_write_dpcd(link,
1188 (uint8_t *)(link_training_setting->dpcd_lane_settings),
1189 link_training_setting->link_settings.lane_count);
1191 if (is_repeater(link, offset)) {
1192 #if defined(CONFIG_DRM_AMD_DC_DCN)
1193 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1194 DP_128b_132b_ENCODING)
1195 DC_LOG_HW_LINK_TRAINING("%s:\n LTTPR Repeater ID: %d\n"
1196 " 0x%X TX_FFE_PRESET_VALUE = %x\n",
1200 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1201 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1204 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Repeater ID: %d\n"
1205 " 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1209 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1210 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1211 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1212 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1215 #if defined(CONFIG_DRM_AMD_DC_DCN)
1216 if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1217 DP_128b_132b_ENCODING)
1218 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X TX_FFE_PRESET_VALUE = %x\n",
1221 link_training_setting->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE);
1222 else if (dp_get_link_encoding_format(&link_training_setting->link_settings) ==
1225 DC_LOG_HW_LINK_TRAINING("%s\n 0x%X VS set = %x PE set = %x max VS Reached = %x max PE Reached = %x\n",
1228 link_training_setting->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET,
1229 link_training_setting->dpcd_lane_settings[0].bits.PRE_EMPHASIS_SET,
1230 link_training_setting->dpcd_lane_settings[0].bits.MAX_SWING_REACHED,
1231 link_training_setting->dpcd_lane_settings[0].bits.MAX_PRE_EMPHASIS_REACHED);
1237 bool dp_is_max_vs_reached(
1238 const struct link_training_settings *lt_settings)
1241 for (lane = 0; lane <
1242 (uint32_t)(lt_settings->link_settings.lane_count);
1244 if (lt_settings->dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET
1245 == VOLTAGE_SWING_MAX_LEVEL)
1252 static bool perform_post_lt_adj_req_sequence(
1253 struct dc_link *link,
1254 struct link_training_settings *lt_settings)
1256 enum dc_lane_count lane_count =
1257 lt_settings->link_settings.lane_count;
1259 uint32_t adj_req_count;
1260 uint32_t adj_req_timer;
1261 bool req_drv_setting_changed;
1264 req_drv_setting_changed = false;
1265 for (adj_req_count = 0; adj_req_count < POST_LT_ADJ_REQ_LIMIT;
1268 req_drv_setting_changed = false;
1270 for (adj_req_timer = 0;
1271 adj_req_timer < POST_LT_ADJ_REQ_TIMEOUT;
1274 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1275 union lane_align_status_updated
1276 dpcd_lane_status_updated;
1277 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1279 dp_get_lane_status_and_lane_adjust(
1283 &dpcd_lane_status_updated,
1287 if (dpcd_lane_status_updated.bits.
1288 POST_LT_ADJ_REQ_IN_PROGRESS == 0)
1291 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1294 if (!dp_is_ch_eq_done(lane_count, dpcd_lane_status) ||
1295 !dp_is_symbol_locked(lane_count, dpcd_lane_status) ||
1296 !dp_is_interlane_aligned(dpcd_lane_status_updated))
1299 for (lane = 0; lane < (uint32_t)(lane_count); lane++) {
1302 dpcd_lane_settings[lane].bits.VOLTAGE_SWING_SET !=
1303 dpcd_lane_adjust[lane].bits.VOLTAGE_SWING_LANE ||
1304 lt_settings->dpcd_lane_settings[lane].bits.PRE_EMPHASIS_SET !=
1305 dpcd_lane_adjust[lane].bits.PRE_EMPHASIS_LANE) {
1307 req_drv_setting_changed = true;
1312 if (req_drv_setting_changed) {
1313 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1314 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1316 dc_link_dp_set_drive_settings(link,
1324 if (!req_drv_setting_changed) {
1325 DC_LOG_WARNING("%s: Post Link Training Adjust Request Timed out\n",
1332 DC_LOG_WARNING("%s: Post Link Training Adjust Request limit reached\n",
1340 /* Only used for channel equalization */
1341 uint32_t dp_translate_training_aux_read_interval(uint32_t dpcd_aux_read_interval)
1343 unsigned int aux_rd_interval_us = 400;
1345 switch (dpcd_aux_read_interval) {
1347 aux_rd_interval_us = 4000;
1350 aux_rd_interval_us = 8000;
1353 aux_rd_interval_us = 12000;
1356 aux_rd_interval_us = 16000;
1358 #if defined(CONFIG_DRM_AMD_DC_DCN)
1360 aux_rd_interval_us = 32000;
1363 aux_rd_interval_us = 64000;
1370 return aux_rd_interval_us;
1373 enum link_training_result dp_get_cr_failure(enum dc_lane_count ln_count,
1374 union lane_status *dpcd_lane_status)
1376 enum link_training_result result = LINK_TRAINING_SUCCESS;
1378 if (ln_count >= LANE_COUNT_ONE && !dpcd_lane_status[0].bits.CR_DONE_0)
1379 result = LINK_TRAINING_CR_FAIL_LANE0;
1380 else if (ln_count >= LANE_COUNT_TWO && !dpcd_lane_status[1].bits.CR_DONE_0)
1381 result = LINK_TRAINING_CR_FAIL_LANE1;
1382 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[2].bits.CR_DONE_0)
1383 result = LINK_TRAINING_CR_FAIL_LANE23;
1384 else if (ln_count >= LANE_COUNT_FOUR && !dpcd_lane_status[3].bits.CR_DONE_0)
1385 result = LINK_TRAINING_CR_FAIL_LANE23;
1389 static enum link_training_result perform_channel_equalization_sequence(
1390 struct dc_link *link,
1391 struct link_training_settings *lt_settings,
1394 enum dc_dp_training_pattern tr_pattern;
1395 uint32_t retries_ch_eq;
1396 uint32_t wait_time_microsec;
1397 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1398 union lane_align_status_updated dpcd_lane_status_updated = {0};
1399 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
1400 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
1402 /* Note: also check that TPS4 is a supported feature*/
1403 tr_pattern = lt_settings->pattern_for_eq;
1405 #if defined(CONFIG_DRM_AMD_DC_DCN)
1406 if (is_repeater(link, offset) && dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING)
1407 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1409 if (is_repeater(link, offset))
1410 tr_pattern = DP_TRAINING_PATTERN_SEQUENCE_4;
1413 dp_set_hw_training_pattern(link, tr_pattern, offset);
1415 for (retries_ch_eq = 0; retries_ch_eq <= LINK_TRAINING_MAX_RETRY_COUNT;
1418 dp_set_hw_lane_settings(link, lt_settings, offset);
1422 /* EPR #361076 - write as a 5-byte burst,
1423 * but only for the 1-st iteration
1426 dpcd_set_lt_pattern_and_lane_settings(
1429 tr_pattern, offset);
1431 dpcd_set_lane_settings(link, lt_settings, offset);
1433 /* 3. wait for receiver to lock-on*/
1434 wait_time_microsec = lt_settings->eq_pattern_time;
1436 if (is_repeater(link, offset))
1437 wait_time_microsec =
1438 dp_translate_training_aux_read_interval(
1439 link->dpcd_caps.lttpr_caps.aux_rd_interval[offset - 1]);
1441 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1442 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1443 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1444 wait_time_microsec = 16000;
1447 dp_wait_for_training_aux_rd_interval(
1449 wait_time_microsec);
1451 /* 4. Read lane status and requested
1452 * drive settings as set by the sink*/
1454 dp_get_lane_status_and_lane_adjust(
1458 &dpcd_lane_status_updated,
1462 /* 5. check CR done*/
1463 if (!dp_is_cr_done(lane_count, dpcd_lane_status))
1464 return LINK_TRAINING_EQ_FAIL_CR;
1466 /* 6. check CHEQ done*/
1467 if (dp_is_ch_eq_done(lane_count, dpcd_lane_status) &&
1468 dp_is_symbol_locked(lane_count, dpcd_lane_status) &&
1469 dp_is_interlane_aligned(dpcd_lane_status_updated))
1470 return LINK_TRAINING_SUCCESS;
1472 /* 7. update VS/PE/PC2 in lt_settings*/
1473 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1474 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1477 return LINK_TRAINING_EQ_FAIL_EQ;
1481 static void start_clock_recovery_pattern_early(struct dc_link *link,
1482 struct link_training_settings *lt_settings,
1485 DC_LOG_HW_LINK_TRAINING("%s\n GPU sends TPS1. Wait 400us.\n",
1487 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1488 dp_set_hw_lane_settings(link, lt_settings, offset);
1492 static enum link_training_result perform_clock_recovery_sequence(
1493 struct dc_link *link,
1494 struct link_training_settings *lt_settings,
1497 uint32_t retries_cr;
1498 uint32_t retry_count;
1499 uint32_t wait_time_microsec;
1500 enum dc_lane_count lane_count = lt_settings->link_settings.lane_count;
1501 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
1502 union lane_align_status_updated dpcd_lane_status_updated;
1503 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
1508 if (!link->ctx->dc->work_arounds.lt_early_cr_pattern)
1509 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, offset);
1511 /* najeeb - The synaptics MST hub can put the LT in
1512 * infinite loop by switching the VS
1514 /* between level 0 and level 1 continuously, here
1515 * we try for CR lock for LinkTrainingMaxCRRetry count*/
1516 while ((retries_cr < LINK_TRAINING_MAX_RETRY_COUNT) &&
1517 (retry_count < LINK_TRAINING_MAX_CR_RETRY)) {
1519 memset(&dpcd_lane_status, '\0', sizeof(dpcd_lane_status));
1520 memset(&dpcd_lane_status_updated, '\0',
1521 sizeof(dpcd_lane_status_updated));
1523 /* 1. call HWSS to set lane settings*/
1524 dp_set_hw_lane_settings(
1529 /* 2. update DPCD of the receiver*/
1531 /* EPR #361076 - write as a 5-byte burst,
1532 * but only for the 1-st iteration.*/
1533 dpcd_set_lt_pattern_and_lane_settings(
1536 lt_settings->pattern_for_cr,
1539 dpcd_set_lane_settings(
1544 /* 3. wait receiver to lock-on*/
1545 wait_time_microsec = lt_settings->cr_pattern_time;
1547 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
1548 wait_time_microsec = TRAINING_AUX_RD_INTERVAL;
1550 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1551 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN)) {
1552 wait_time_microsec = 16000;
1555 dp_wait_for_training_aux_rd_interval(
1557 wait_time_microsec);
1559 /* 4. Read lane status and requested drive
1560 * settings as set by the sink
1562 dp_get_lane_status_and_lane_adjust(
1566 &dpcd_lane_status_updated,
1570 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
1571 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
1572 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1573 vendor_specific_lttpr_wa_one_end(link, retry_count);
1574 vendor_specific_lttpr_wa_three(link, dpcd_lane_adjust);
1577 /* 5. check CR done*/
1578 if (dp_is_cr_done(lane_count, dpcd_lane_status))
1579 return LINK_TRAINING_SUCCESS;
1581 /* 6. max VS reached*/
1582 #if defined(CONFIG_DRM_AMD_DC_DCN)
1583 if ((dp_get_link_encoding_format(<_settings->link_settings) ==
1584 DP_8b_10b_ENCODING) &&
1585 dp_is_max_vs_reached(lt_settings))
1588 if (dp_is_max_vs_reached(lt_settings))
1592 /* 7. same lane settings*/
1593 /* Note: settings are the same for all lanes,
1594 * so comparing first lane is sufficient*/
1595 if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_8b_10b_ENCODING) &&
1596 lt_settings->dpcd_lane_settings[0].bits.VOLTAGE_SWING_SET ==
1597 dpcd_lane_adjust[0].bits.VOLTAGE_SWING_LANE)
1599 #if defined(CONFIG_DRM_AMD_DC_DCN)
1600 else if ((dp_get_link_encoding_format(<_settings->link_settings) == DP_128b_132b_ENCODING) &&
1601 lt_settings->dpcd_lane_settings[0].tx_ffe.PRESET_VALUE ==
1602 dpcd_lane_adjust[0].tx_ffe.PRESET_VALUE)
1608 /* 8. update VS/PE/PC2 in lt_settings*/
1609 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
1610 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1614 if (retry_count >= LINK_TRAINING_MAX_CR_RETRY) {
1616 DC_LOG_ERROR("%s: Link Training Error, could not get CR after %d tries. Possibly voltage swing issue",
1618 LINK_TRAINING_MAX_CR_RETRY);
1622 return dp_get_cr_failure(lane_count, dpcd_lane_status);
1625 static inline enum link_training_result dp_transition_to_video_idle(
1626 struct dc_link *link,
1627 struct link_training_settings *lt_settings,
1628 enum link_training_result status)
1630 union lane_count_set lane_count_set = {0};
1632 /* 4. mainlink output idle pattern*/
1633 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
1636 * 5. post training adjust if required
1637 * If the upstream DPTX and downstream DPRX both support TPS4,
1638 * TPS4 must be used instead of POST_LT_ADJ_REQ.
1640 if (link->dpcd_caps.max_ln_count.bits.POST_LT_ADJ_REQ_SUPPORTED != 1 ||
1641 #if defined(CONFIG_DRM_AMD_DC_DCN)
1642 lt_settings->pattern_for_eq >= DP_TRAINING_PATTERN_SEQUENCE_4) {
1644 lt_settings->pattern_for_eq == DP_TRAINING_PATTERN_SEQUENCE_4) {
1646 /* delay 5ms after Main Link output idle pattern and then check
1649 if (link->connector_signal != SIGNAL_TYPE_EDP && status == LINK_TRAINING_SUCCESS) {
1651 status = dp_check_link_loss_status(link, lt_settings);
1656 if (status == LINK_TRAINING_SUCCESS &&
1657 perform_post_lt_adj_req_sequence(link, lt_settings) == false)
1658 status = LINK_TRAINING_LQA_FAIL;
1660 lane_count_set.bits.LANE_COUNT_SET = lt_settings->link_settings.lane_count;
1661 lane_count_set.bits.ENHANCED_FRAMING = lt_settings->enhanced_framing;
1662 lane_count_set.bits.POST_LT_ADJ_REQ_GRANTED = 0;
1664 core_link_write_dpcd(
1667 &lane_count_set.raw,
1668 sizeof(lane_count_set));
1673 enum link_training_result dp_check_link_loss_status(
1674 struct dc_link *link,
1675 const struct link_training_settings *link_training_setting)
1677 enum link_training_result status = LINK_TRAINING_SUCCESS;
1678 union lane_status lane_status;
1679 uint8_t dpcd_buf[6] = {0};
1682 core_link_read_dpcd(
1685 (uint8_t *)(dpcd_buf),
1688 /*parse lane status*/
1689 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
1691 * check lanes status
1693 lane_status.raw = get_nibble_at_index(&dpcd_buf[2], lane);
1695 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
1696 !lane_status.bits.CR_DONE_0 ||
1697 !lane_status.bits.SYMBOL_LOCKED_0) {
1698 /* if one of the channel equalization, clock
1699 * recovery or symbol lock is dropped
1700 * consider it as (link has been
1701 * dropped) dp sink status has changed
1703 status = LINK_TRAINING_LINK_LOSS;
1711 static inline void decide_8b_10b_training_settings(
1712 struct dc_link *link,
1713 const struct dc_link_settings *link_setting,
1714 struct link_training_settings *lt_settings)
1716 memset(lt_settings, '\0', sizeof(struct link_training_settings));
1718 /* Initialize link settings */
1719 lt_settings->link_settings.use_link_rate_set = link_setting->use_link_rate_set;
1720 lt_settings->link_settings.link_rate_set = link_setting->link_rate_set;
1721 lt_settings->link_settings.link_rate = link_setting->link_rate;
1722 lt_settings->link_settings.lane_count = link_setting->lane_count;
1723 /* TODO hard coded to SS for now
1724 * lt_settings.link_settings.link_spread =
1725 * dal_display_path_is_ss_supported(
1726 * path_mode->display_path) ?
1727 * LINK_SPREAD_05_DOWNSPREAD_30KHZ :
1728 * LINK_SPREAD_DISABLED;
1730 lt_settings->link_settings.link_spread = link->dp_ss_off ?
1731 LINK_SPREAD_DISABLED : LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1732 lt_settings->lttpr_mode = link->lttpr_mode;
1733 lt_settings->cr_pattern_time = get_cr_training_aux_rd_interval(link, link_setting);
1734 lt_settings->eq_pattern_time = get_eq_training_aux_rd_interval(link, link_setting);
1735 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_setting);
1736 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_setting);
1737 lt_settings->enhanced_framing = 1;
1738 lt_settings->should_set_fec_ready = true;
1739 lt_settings->disallow_per_lane_settings = true;
1740 lt_settings->always_match_dpcd_with_hw_lane_settings = true;
1741 dp_hw_to_dpcd_lane_settings(lt_settings, lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1744 #if defined(CONFIG_DRM_AMD_DC_DCN)
1745 static inline void decide_128b_132b_training_settings(struct dc_link *link,
1746 const struct dc_link_settings *link_settings,
1747 struct link_training_settings *lt_settings)
1749 memset(lt_settings, 0, sizeof(*lt_settings));
1751 lt_settings->link_settings = *link_settings;
1752 /* TODO: should decide link spread when populating link_settings */
1753 lt_settings->link_settings.link_spread = link->dp_ss_off ? LINK_SPREAD_DISABLED :
1754 LINK_SPREAD_05_DOWNSPREAD_30KHZ;
1756 lt_settings->pattern_for_cr = decide_cr_training_pattern(link_settings);
1757 lt_settings->pattern_for_eq = decide_eq_training_pattern(link, link_settings);
1758 lt_settings->eq_pattern_time = 2500;
1759 lt_settings->eq_wait_time_limit = 400000;
1760 lt_settings->eq_loop_count_limit = 20;
1761 lt_settings->pattern_for_cds = DP_128b_132b_TPS2_CDS;
1762 lt_settings->cds_pattern_time = 2500;
1763 lt_settings->cds_wait_time_limit = (dp_convert_to_count(
1764 link->dpcd_caps.lttpr_caps.phy_repeater_cnt) + 1) * 20000;
1765 lt_settings->lttpr_mode = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) ?
1766 LTTPR_MODE_NON_TRANSPARENT : LTTPR_MODE_TRANSPARENT;
1767 lt_settings->disallow_per_lane_settings = true;
1768 dp_hw_to_dpcd_lane_settings(lt_settings,
1769 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1773 void dp_decide_training_settings(
1774 struct dc_link *link,
1775 const struct dc_link_settings *link_settings,
1776 struct link_training_settings *lt_settings)
1778 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING)
1779 decide_8b_10b_training_settings(link, link_settings, lt_settings);
1780 #if defined(CONFIG_DRM_AMD_DC_DCN)
1781 else if (dp_get_link_encoding_format(link_settings) == DP_128b_132b_ENCODING)
1782 decide_128b_132b_training_settings(link, link_settings, lt_settings);
1786 static void override_training_settings(
1787 struct dc_link *link,
1788 const struct dc_link_training_overrides *overrides,
1789 struct link_training_settings *lt_settings)
1793 /* Override link spread */
1794 if (!link->dp_ss_off && overrides->downspread != NULL)
1795 lt_settings->link_settings.link_spread = *overrides->downspread ?
1796 LINK_SPREAD_05_DOWNSPREAD_30KHZ
1797 : LINK_SPREAD_DISABLED;
1799 /* Override lane settings */
1800 if (overrides->voltage_swing != NULL)
1801 lt_settings->voltage_swing = overrides->voltage_swing;
1802 if (overrides->pre_emphasis != NULL)
1803 lt_settings->pre_emphasis = overrides->pre_emphasis;
1804 if (overrides->post_cursor2 != NULL)
1805 lt_settings->post_cursor2 = overrides->post_cursor2;
1806 #if defined(CONFIG_DRM_AMD_DC_DCN)
1807 if (overrides->ffe_preset != NULL)
1808 lt_settings->ffe_preset = overrides->ffe_preset;
1810 /* Override HW lane settings with BIOS forced values if present */
1811 if (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN &&
1812 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
1813 lt_settings->voltage_swing = &link->bios_forced_drive_settings.VOLTAGE_SWING;
1814 lt_settings->pre_emphasis = &link->bios_forced_drive_settings.PRE_EMPHASIS;
1815 lt_settings->always_match_dpcd_with_hw_lane_settings = false;
1817 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++) {
1818 lt_settings->lane_settings[lane].VOLTAGE_SWING =
1819 lt_settings->voltage_swing != NULL ?
1820 *lt_settings->voltage_swing :
1821 VOLTAGE_SWING_LEVEL0;
1822 lt_settings->lane_settings[lane].PRE_EMPHASIS =
1823 lt_settings->pre_emphasis != NULL ?
1824 *lt_settings->pre_emphasis
1825 : PRE_EMPHASIS_DISABLED;
1826 lt_settings->lane_settings[lane].POST_CURSOR2 =
1827 lt_settings->post_cursor2 != NULL ?
1828 *lt_settings->post_cursor2
1829 : POST_CURSOR2_DISABLED;
1832 dp_hw_to_dpcd_lane_settings(lt_settings,
1833 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
1835 /* Initialize training timings */
1836 if (overrides->cr_pattern_time != NULL)
1837 lt_settings->cr_pattern_time = *overrides->cr_pattern_time;
1839 if (overrides->eq_pattern_time != NULL)
1840 lt_settings->eq_pattern_time = *overrides->eq_pattern_time;
1842 if (overrides->pattern_for_cr != NULL)
1843 lt_settings->pattern_for_cr = *overrides->pattern_for_cr;
1844 if (overrides->pattern_for_eq != NULL)
1845 lt_settings->pattern_for_eq = *overrides->pattern_for_eq;
1847 if (overrides->enhanced_framing != NULL)
1848 lt_settings->enhanced_framing = *overrides->enhanced_framing;
1850 if (link->preferred_training_settings.fec_enable != NULL)
1851 lt_settings->should_set_fec_ready = *link->preferred_training_settings.fec_enable;
1854 uint8_t dp_convert_to_count(uint8_t lttpr_repeater_count)
1856 switch (lttpr_repeater_count) {
1857 case 0x80: // 1 lttpr repeater
1859 case 0x40: // 2 lttpr repeaters
1861 case 0x20: // 3 lttpr repeaters
1863 case 0x10: // 4 lttpr repeaters
1865 case 0x08: // 5 lttpr repeaters
1867 case 0x04: // 6 lttpr repeaters
1869 case 0x02: // 7 lttpr repeaters
1871 case 0x01: // 8 lttpr repeaters
1876 return 0; // invalid value
1879 static enum dc_status configure_lttpr_mode_transparent(struct dc_link *link)
1881 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1883 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1884 return core_link_write_dpcd(link,
1885 DP_PHY_REPEATER_MODE,
1886 (uint8_t *)&repeater_mode,
1887 sizeof(repeater_mode));
1890 static enum dc_status configure_lttpr_mode_non_transparent(
1891 struct dc_link *link,
1892 const struct link_training_settings *lt_settings)
1894 /* aux timeout is already set to extended */
1895 /* RESET/SET lttpr mode to enable non transparent mode */
1896 uint8_t repeater_cnt;
1897 uint32_t aux_interval_address;
1898 uint8_t repeater_id;
1899 enum dc_status result = DC_ERROR_UNEXPECTED;
1900 uint8_t repeater_mode = DP_PHY_REPEATER_MODE_TRANSPARENT;
1902 enum dp_link_encoding encoding = dp_get_link_encoding_format(<_settings->link_settings);
1904 if (encoding == DP_8b_10b_ENCODING) {
1905 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Transparent Mode\n", __func__);
1906 result = core_link_write_dpcd(link,
1907 DP_PHY_REPEATER_MODE,
1908 (uint8_t *)&repeater_mode,
1909 sizeof(repeater_mode));
1913 if (result == DC_OK) {
1914 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1917 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
1919 DC_LOG_HW_LINK_TRAINING("%s\n Set LTTPR to Non Transparent Mode\n", __func__);
1921 repeater_mode = DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
1922 result = core_link_write_dpcd(link,
1923 DP_PHY_REPEATER_MODE,
1924 (uint8_t *)&repeater_mode,
1925 sizeof(repeater_mode));
1927 if (result == DC_OK) {
1928 link->dpcd_caps.lttpr_caps.mode = repeater_mode;
1931 if (encoding == DP_8b_10b_ENCODING) {
1932 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
1934 /* Driver does not need to train the first hop. Skip DPCD read and clear
1935 * AUX_RD_INTERVAL for DPTX-to-DPIA hop.
1937 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA)
1938 link->dpcd_caps.lttpr_caps.aux_rd_interval[--repeater_cnt] = 0;
1940 for (repeater_id = repeater_cnt; repeater_id > 0; repeater_id--) {
1941 aux_interval_address = DP_TRAINING_AUX_RD_INTERVAL_PHY_REPEATER1 +
1942 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (repeater_id - 1));
1943 core_link_read_dpcd(
1945 aux_interval_address,
1946 (uint8_t *)&link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1],
1947 sizeof(link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1]));
1948 link->dpcd_caps.lttpr_caps.aux_rd_interval[repeater_id - 1] &= 0x7F;
1956 static void repeater_training_done(struct dc_link *link, uint32_t offset)
1958 union dpcd_training_pattern dpcd_pattern = {0};
1960 const uint32_t dpcd_base_lt_offset =
1961 DP_TRAINING_PATTERN_SET_PHY_REPEATER1 +
1962 ((DP_REPEATER_CONFIGURATION_AND_STATUS_SIZE) * (offset - 1));
1963 /* Set training not in progress*/
1964 dpcd_pattern.v1_4.TRAINING_PATTERN_SET = DPCD_TRAINING_PATTERN_VIDEOIDLE;
1966 core_link_write_dpcd(
1968 dpcd_base_lt_offset,
1972 DC_LOG_HW_LINK_TRAINING("%s\n LTTPR Id: %d 0x%X pattern = %x\n",
1975 dpcd_base_lt_offset,
1976 dpcd_pattern.v1_4.TRAINING_PATTERN_SET);
1979 static void print_status_message(
1980 struct dc_link *link,
1981 const struct link_training_settings *lt_settings,
1982 enum link_training_result status)
1984 char *link_rate = "Unknown";
1985 char *lt_result = "Unknown";
1986 char *lt_spread = "Disabled";
1988 switch (lt_settings->link_settings.link_rate) {
1992 case LINK_RATE_RATE_2:
1995 case LINK_RATE_RATE_3:
1998 case LINK_RATE_HIGH:
2001 case LINK_RATE_RBR2:
2004 case LINK_RATE_RATE_6:
2007 case LINK_RATE_HIGH2:
2010 case LINK_RATE_HIGH3:
2013 #if defined(CONFIG_DRM_AMD_DC_DCN)
2014 case LINK_RATE_UHBR10:
2015 link_rate = "UHBR10";
2017 case LINK_RATE_UHBR13_5:
2018 link_rate = "UHBR13.5";
2020 case LINK_RATE_UHBR20:
2021 link_rate = "UHBR20";
2029 case LINK_TRAINING_SUCCESS:
2032 case LINK_TRAINING_CR_FAIL_LANE0:
2033 lt_result = "CR failed lane0";
2035 case LINK_TRAINING_CR_FAIL_LANE1:
2036 lt_result = "CR failed lane1";
2038 case LINK_TRAINING_CR_FAIL_LANE23:
2039 lt_result = "CR failed lane23";
2041 case LINK_TRAINING_EQ_FAIL_CR:
2042 lt_result = "CR failed in EQ";
2044 case LINK_TRAINING_EQ_FAIL_EQ:
2045 lt_result = "EQ failed";
2047 case LINK_TRAINING_LQA_FAIL:
2048 lt_result = "LQA failed";
2050 case LINK_TRAINING_LINK_LOSS:
2051 lt_result = "Link loss";
2053 #if defined(CONFIG_DRM_AMD_DC_DCN)
2054 case DP_128b_132b_LT_FAILED:
2055 lt_result = "LT_FAILED received";
2057 case DP_128b_132b_MAX_LOOP_COUNT_REACHED:
2058 lt_result = "max loop count reached";
2060 case DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT:
2061 lt_result = "channel EQ timeout";
2063 case DP_128b_132b_CDS_DONE_TIMEOUT:
2064 lt_result = "CDS timeout";
2071 switch (lt_settings->link_settings.link_spread) {
2072 case LINK_SPREAD_DISABLED:
2073 lt_spread = "Disabled";
2075 case LINK_SPREAD_05_DOWNSPREAD_30KHZ:
2076 lt_spread = "0.5% 30KHz";
2078 case LINK_SPREAD_05_DOWNSPREAD_33KHZ:
2079 lt_spread = "0.5% 33KHz";
2085 /* Connectivity log: link training */
2086 #if defined(CONFIG_DRM_AMD_DC_DCN)
2087 /* TODO - DP2.0 Log: add connectivity log for FFE PRESET */
2089 CONN_MSG_LT(link, "%sx%d %s VS=%d, PE=%d, DS=%s",
2091 lt_settings->link_settings.lane_count,
2093 lt_settings->lane_settings[0].VOLTAGE_SWING,
2094 lt_settings->lane_settings[0].PRE_EMPHASIS,
2098 void dc_link_dp_set_drive_settings(
2099 struct dc_link *link,
2100 struct link_training_settings *lt_settings)
2102 /* program ASIC PHY settings*/
2103 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2105 dp_hw_to_dpcd_lane_settings(lt_settings,
2106 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2108 /* Notify DP sink the PHY settings from source */
2109 dpcd_set_lane_settings(link, lt_settings, DPRX);
2112 bool dc_link_dp_perform_link_training_skip_aux(
2113 struct dc_link *link,
2114 const struct dc_link_settings *link_setting)
2116 struct link_training_settings lt_settings = {0};
2118 dp_decide_training_settings(
2122 override_training_settings(
2124 &link->preferred_training_settings,
2127 /* 1. Perform_clock_recovery_sequence. */
2129 /* transmit training pattern for clock recovery */
2130 dp_set_hw_training_pattern(link, lt_settings.pattern_for_cr, DPRX);
2132 /* call HWSS to set lane settings*/
2133 dp_set_hw_lane_settings(link, <_settings, DPRX);
2135 /* wait receiver to lock-on*/
2136 dp_wait_for_training_aux_rd_interval(link, lt_settings.cr_pattern_time);
2138 /* 2. Perform_channel_equalization_sequence. */
2140 /* transmit training pattern for channel equalization. */
2141 dp_set_hw_training_pattern(link, lt_settings.pattern_for_eq, DPRX);
2143 /* call HWSS to set lane settings*/
2144 dp_set_hw_lane_settings(link, <_settings, DPRX);
2146 /* wait receiver to lock-on. */
2147 dp_wait_for_training_aux_rd_interval(link, lt_settings.eq_pattern_time);
2149 /* 3. Perform_link_training_int. */
2151 /* Mainlink output idle pattern. */
2152 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
2154 print_status_message(link, <_settings, LINK_TRAINING_SUCCESS);
2159 enum dc_status dpcd_configure_lttpr_mode(struct dc_link *link, struct link_training_settings *lt_settings)
2161 enum dc_status status = DC_OK;
2163 if (lt_settings->lttpr_mode == LTTPR_MODE_TRANSPARENT)
2164 status = configure_lttpr_mode_transparent(link);
2166 else if (lt_settings->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT)
2167 status = configure_lttpr_mode_non_transparent(link, lt_settings);
2172 static void dpcd_exit_training_mode(struct dc_link *link)
2174 #if defined(CONFIG_DRM_AMD_DC_DCN)
2175 uint8_t sink_status = 0;
2179 /* clear training pattern set */
2180 dpcd_set_training_pattern(link, DP_TRAINING_PATTERN_VIDEOIDLE);
2182 #if defined(CONFIG_DRM_AMD_DC_DCN)
2183 /* poll for intra-hop disable */
2184 for (i = 0; i < 10; i++) {
2185 if ((core_link_read_dpcd(link, DP_SINK_STATUS, &sink_status, 1) == DC_OK) &&
2186 (sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION) == 0)
2193 enum dc_status dpcd_configure_channel_coding(struct dc_link *link,
2194 struct link_training_settings *lt_settings)
2196 enum dp_link_encoding encoding =
2197 dp_get_link_encoding_format(
2198 <_settings->link_settings);
2199 enum dc_status status;
2201 status = core_link_write_dpcd(
2203 DP_MAIN_LINK_CHANNEL_CODING_SET,
2204 (uint8_t *) &encoding,
2206 DC_LOG_HW_LINK_TRAINING("%s:\n 0x%X MAIN_LINK_CHANNEL_CODING_SET = %x\n",
2208 DP_MAIN_LINK_CHANNEL_CODING_SET,
2214 #if defined(CONFIG_DRM_AMD_DC_DCN)
2215 static void dpcd_128b_132b_get_aux_rd_interval(struct dc_link *link,
2216 uint32_t *interval_in_us)
2218 union dp_128b_132b_training_aux_rd_interval dpcd_interval;
2219 uint32_t interval_unit = 0;
2221 dpcd_interval.raw = 0;
2222 core_link_read_dpcd(link, DP_128b_132b_TRAINING_AUX_RD_INTERVAL,
2223 &dpcd_interval.raw, sizeof(dpcd_interval.raw));
2224 interval_unit = dpcd_interval.bits.UNIT ? 1 : 2; /* 0b = 2 ms, 1b = 1 ms */
2225 /* (128b/132b_TRAINING_AUX_RD_INTERVAL value + 1) *
2226 * INTERVAL_UNIT. The maximum is 256 ms
2228 *interval_in_us = (dpcd_interval.bits.VALUE + 1) * interval_unit * 1000;
2231 static enum link_training_result dp_perform_128b_132b_channel_eq_done_sequence(
2232 struct dc_link *link,
2233 struct link_training_settings *lt_settings)
2236 uint32_t aux_rd_interval = 0;
2237 uint32_t wait_time = 0;
2238 union lane_align_status_updated dpcd_lane_status_updated = {0};
2239 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2240 enum link_training_result status = LINK_TRAINING_SUCCESS;
2241 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = {0};
2243 /* Transmit 128b/132b_TPS1 over Main-Link */
2244 dp_set_hw_training_pattern(link, lt_settings->pattern_for_cr, DPRX);
2245 /* Set TRAINING_PATTERN_SET to 01h */
2246 dpcd_set_training_pattern(link, lt_settings->pattern_for_cr);
2248 /* Adjust TX_FFE_PRESET_VALUE and Transmit 128b/132b_TPS2 over Main-Link */
2249 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2250 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2251 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2252 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2253 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2254 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2255 dp_set_hw_training_pattern(link, lt_settings->pattern_for_eq, DPRX);
2257 /* Set loop counter to start from 1 */
2260 /* Set TRAINING_PATTERN_SET to 02h and TX_FFE_PRESET_VALUE in one AUX transaction */
2261 dpcd_set_lt_pattern_and_lane_settings(link, lt_settings,
2262 lt_settings->pattern_for_eq, DPRX);
2264 /* poll for channel EQ done */
2265 while (status == LINK_TRAINING_SUCCESS) {
2266 dp_wait_for_training_aux_rd_interval(link, aux_rd_interval);
2267 wait_time += aux_rd_interval;
2268 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2269 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2270 dp_decide_lane_settings(lt_settings, dpcd_lane_adjust,
2271 lt_settings->hw_lane_settings, lt_settings->dpcd_lane_settings);
2272 dpcd_128b_132b_get_aux_rd_interval(link, &aux_rd_interval);
2273 if (dp_is_ch_eq_done(lt_settings->link_settings.lane_count,
2274 dpcd_lane_status)) {
2277 } else if (loop_count >= lt_settings->eq_loop_count_limit) {
2278 status = DP_128b_132b_MAX_LOOP_COUNT_REACHED;
2279 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2280 status = DP_128b_132b_LT_FAILED;
2282 dp_set_hw_lane_settings(link, lt_settings, DPRX);
2283 dpcd_set_lane_settings(link, lt_settings, DPRX);
2288 /* poll for EQ interlane align done */
2289 while (status == LINK_TRAINING_SUCCESS) {
2290 if (dpcd_lane_status_updated.bits.EQ_INTERLANE_ALIGN_DONE_128b_132b) {
2293 } else if (wait_time >= lt_settings->eq_wait_time_limit) {
2294 status = DP_128b_132b_CHANNEL_EQ_DONE_TIMEOUT;
2295 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2296 status = DP_128b_132b_LT_FAILED;
2298 dp_wait_for_training_aux_rd_interval(link,
2299 lt_settings->eq_pattern_time);
2300 wait_time += lt_settings->eq_pattern_time;
2301 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2302 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2309 static enum link_training_result dp_perform_128b_132b_cds_done_sequence(
2310 struct dc_link *link,
2311 struct link_training_settings *lt_settings)
2313 /* Assumption: assume hardware has transmitted eq pattern */
2314 enum link_training_result status = LINK_TRAINING_SUCCESS;
2315 union lane_align_status_updated dpcd_lane_status_updated = {0};
2316 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX] = {0};
2317 union lane_adjust dpcd_lane_adjust[LANE_COUNT_DP_MAX] = { { {0} } };
2318 uint32_t wait_time = 0;
2320 /* initiate CDS done sequence */
2321 dpcd_set_training_pattern(link, lt_settings->pattern_for_cds);
2323 /* poll for CDS interlane align done and symbol lock */
2324 while (status == LINK_TRAINING_SUCCESS) {
2325 dp_wait_for_training_aux_rd_interval(link,
2326 lt_settings->cds_pattern_time);
2327 wait_time += lt_settings->cds_pattern_time;
2328 dp_get_lane_status_and_lane_adjust(link, lt_settings, dpcd_lane_status,
2329 &dpcd_lane_status_updated, dpcd_lane_adjust, DPRX);
2330 if (dp_is_symbol_locked(lt_settings->link_settings.lane_count, dpcd_lane_status) &&
2331 dpcd_lane_status_updated.bits.CDS_INTERLANE_ALIGN_DONE_128b_132b) {
2334 } else if (dpcd_lane_status_updated.bits.LT_FAILED_128b_132b) {
2335 status = DP_128b_132b_LT_FAILED;
2336 } else if (wait_time >= lt_settings->cds_wait_time_limit) {
2337 status = DP_128b_132b_CDS_DONE_TIMEOUT;
2345 static enum link_training_result dp_perform_8b_10b_link_training(
2346 struct dc_link *link,
2347 struct link_training_settings *lt_settings)
2349 enum link_training_result status = LINK_TRAINING_SUCCESS;
2351 uint8_t repeater_cnt;
2352 uint8_t repeater_id;
2355 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2356 start_clock_recovery_pattern_early(link, lt_settings, DPRX);
2358 /* 1. set link rate, lane count and spread. */
2359 dpcd_set_link_settings(link, lt_settings);
2361 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2363 /* 2. perform link training (set link training done
2364 * to false is done as well)
2366 repeater_cnt = dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt);
2368 for (repeater_id = repeater_cnt; (repeater_id > 0 && status == LINK_TRAINING_SUCCESS);
2370 status = perform_clock_recovery_sequence(link, lt_settings, repeater_id);
2372 if (status != LINK_TRAINING_SUCCESS)
2375 status = perform_channel_equalization_sequence(link,
2379 if (status != LINK_TRAINING_SUCCESS)
2382 repeater_training_done(link, repeater_id);
2385 for (lane = 0; lane < (uint8_t)lt_settings->link_settings.lane_count; lane++)
2386 lt_settings->dpcd_lane_settings[lane].raw = 0;
2389 if (status == LINK_TRAINING_SUCCESS) {
2390 status = perform_clock_recovery_sequence(link, lt_settings, DPRX);
2391 if (status == LINK_TRAINING_SUCCESS) {
2392 status = perform_channel_equalization_sequence(link,
2401 #if defined(CONFIG_DRM_AMD_DC_DCN)
2402 static enum link_training_result dp_perform_128b_132b_link_training(
2403 struct dc_link *link,
2404 struct link_training_settings *lt_settings)
2406 enum link_training_result result = LINK_TRAINING_SUCCESS;
2408 /* TODO - DP2.0 Link: remove legacy_dp2_lt logic */
2409 if (link->dc->debug.legacy_dp2_lt) {
2410 struct link_training_settings legacy_settings;
2412 decide_8b_10b_training_settings(link,
2413 <_settings->link_settings,
2415 return dp_perform_8b_10b_link_training(link, &legacy_settings);
2418 dpcd_set_link_settings(link, lt_settings);
2420 if (result == LINK_TRAINING_SUCCESS)
2421 result = dp_perform_128b_132b_channel_eq_done_sequence(link, lt_settings);
2423 if (result == LINK_TRAINING_SUCCESS)
2424 result = dp_perform_128b_132b_cds_done_sequence(link, lt_settings);
2430 enum link_training_result dc_link_dp_perform_link_training(
2431 struct dc_link *link,
2432 const struct dc_link_settings *link_settings,
2433 bool skip_video_pattern)
2435 enum link_training_result status = LINK_TRAINING_SUCCESS;
2436 struct link_training_settings lt_settings = {0};
2437 enum dp_link_encoding encoding =
2438 dp_get_link_encoding_format(link_settings);
2440 /* decide training settings */
2441 dp_decide_training_settings(
2445 override_training_settings(
2447 &link->preferred_training_settings,
2450 /* reset previous training states */
2451 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2452 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2453 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2454 link->apply_vendor_specific_lttpr_link_rate_wa = true;
2455 vendor_specific_lttpr_wa_four(link, true);
2457 dpcd_exit_training_mode(link);
2460 /* configure link prior to entering training mode */
2461 dpcd_configure_lttpr_mode(link, <_settings);
2462 dp_set_fec_ready(link, lt_settings.should_set_fec_ready);
2463 dpcd_configure_channel_coding(link, <_settings);
2465 /* enter training mode:
2466 * Per DP specs starting from here, DPTX device shall not issue
2467 * Non-LT AUX transactions inside training mode.
2469 if (encoding == DP_8b_10b_ENCODING)
2470 status = dp_perform_8b_10b_link_training(link, <_settings);
2471 #if defined(CONFIG_DRM_AMD_DC_DCN)
2472 else if (encoding == DP_128b_132b_ENCODING)
2473 status = dp_perform_128b_132b_link_training(link, <_settings);
2478 /* exit training mode */
2479 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
2480 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
2481 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
2482 link->apply_vendor_specific_lttpr_link_rate_wa = false;
2483 vendor_specific_lttpr_wa_four(link, (status != LINK_TRAINING_SUCCESS));
2485 dpcd_exit_training_mode(link);
2488 /* switch to video idle */
2489 if ((status == LINK_TRAINING_SUCCESS) || !skip_video_pattern)
2490 status = dp_transition_to_video_idle(link,
2494 /* dump debug data */
2495 print_status_message(link, <_settings, status);
2496 if (status != LINK_TRAINING_SUCCESS)
2497 link->ctx->dc->debug_data.ltFailCount++;
2501 bool perform_link_training_with_retries(
2502 const struct dc_link_settings *link_setting,
2503 bool skip_video_pattern,
2505 struct pipe_ctx *pipe_ctx,
2506 enum signal_type signal,
2510 uint8_t delay_between_attempts = LINK_TRAINING_RETRY_DELAY;
2511 struct dc_stream_state *stream = pipe_ctx->stream;
2512 struct dc_link *link = stream->link;
2513 enum dp_panel_mode panel_mode = dp_get_panel_mode(link);
2514 struct link_encoder *link_enc;
2515 enum link_training_result status = LINK_TRAINING_CR_FAIL_LANE0;
2516 struct dc_link_settings current_setting = *link_setting;
2518 /* Dynamically assigned link encoders associated with stream rather than
2521 if (link->is_dig_mapping_flexible && link->dc->res_pool->funcs->link_encs_assign)
2522 link_enc = link_enc_cfg_get_link_enc_used_by_stream(link->ctx->dc, pipe_ctx->stream);
2524 link_enc = link->link_enc;
2526 /* We need to do this before the link training to ensure the idle pattern in SST
2527 * mode will be sent right after the link training
2529 if (dp_get_link_encoding_format(¤t_setting) == DP_8b_10b_ENCODING) {
2530 link_enc->funcs->connect_dig_be_to_fe(link_enc,
2531 pipe_ctx->stream_res.stream_enc->id, true);
2532 dp_source_sequence_trace(link, DPCD_SOURCE_SEQ_AFTER_CONNECT_DIG_FE_BE);
2535 for (j = 0; j < attempts; ++j) {
2537 DC_LOG_HW_LINK_TRAINING("%s: Beginning link training attempt %u of %d\n",
2538 __func__, (unsigned int)j + 1, attempts);
2543 pipe_ctx->clock_source->id,
2546 if (stream->sink_patches.dppowerup_delay > 0) {
2547 int delay_dp_power_up_in_ms = stream->sink_patches.dppowerup_delay;
2549 msleep(delay_dp_power_up_in_ms);
2552 #ifdef CONFIG_DRM_AMD_DC_HDCP
2553 if (panel_mode == DP_PANEL_MODE_EDP) {
2554 struct cp_psp *cp_psp = &stream->ctx->cp_psp;
2556 if (cp_psp && cp_psp->funcs.enable_assr)
2557 /* ASSR is bound to fail with unsigned PSP
2558 * verstage used during devlopment phase.
2559 * Report and continue with eDP panel mode to
2560 * perform eDP link training with right settings
2562 cp_psp->funcs.enable_assr(cp_psp->handle, link);
2566 dp_set_panel_mode(link, panel_mode);
2568 if (link->aux_access_disabled) {
2569 dc_link_dp_perform_link_training_skip_aux(link, ¤t_setting);
2572 /** @todo Consolidate USB4 DP and DPx.x training. */
2573 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA) {
2574 status = dc_link_dpia_perform_link_training(link,
2576 skip_video_pattern);
2578 /* Transmit idle pattern once training successful. */
2579 if (status == LINK_TRAINING_SUCCESS)
2580 dp_set_hw_test_pattern(link, DP_TEST_PATTERN_VIDEO_MODE,
2583 status = dc_link_dp_perform_link_training(link,
2585 skip_video_pattern);
2588 if (status == LINK_TRAINING_SUCCESS)
2592 /* latest link training still fail, skip delay and keep PHY on
2594 if (j == (attempts - 1) && link->ep_type == DISPLAY_ENDPOINT_PHY)
2597 DC_LOG_WARNING("%s: Link training attempt %u of %d failed\n",
2598 __func__, (unsigned int)j + 1, attempts);
2600 dp_disable_link_phy(link, signal);
2602 /* Abort link training if failure due to sink being unplugged. */
2603 if (status == LINK_TRAINING_ABORT) {
2604 enum dc_connection_type type = dc_connection_none;
2606 dc_link_detect_sink(link, &type);
2607 if (type == dc_connection_none)
2609 } else if (do_fallback) {
2613 decide_fallback_link_setting(link, *link_setting, ¤t_setting, status);
2614 /* Fail link training if reduced link bandwidth no longer meets
2615 * stream requirements.
2617 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
2618 link_bw = dc_link_bandwidth_kbps(link, ¤t_setting);
2619 if (req_bw > link_bw)
2623 msleep(delay_between_attempts);
2625 delay_between_attempts += LINK_TRAINING_RETRY_DELAY;
2631 static enum clock_source_id get_clock_source_id(struct dc_link *link)
2633 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_UNDEFINED;
2634 struct clock_source *dp_cs = link->dc->res_pool->dp_clock_source;
2636 if (dp_cs != NULL) {
2637 dp_cs_id = dp_cs->id;
2640 * dp clock source is not initialized for some reason.
2641 * Should not happen, CLOCK_SOURCE_ID_EXTERNAL will be used
2649 static void set_dp_mst_mode(struct dc_link *link, bool mst_enable)
2651 if (mst_enable == false &&
2652 link->type == dc_connection_mst_branch) {
2653 /* Disable MST on link. Use only local sink. */
2654 dp_disable_link_phy_mst(link, link->connector_signal);
2656 link->type = dc_connection_single;
2657 link->local_sink = link->remote_sinks[0];
2658 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT;
2659 dc_sink_retain(link->local_sink);
2660 dm_helpers_dp_mst_stop_top_mgr(link->ctx, link);
2661 } else if (mst_enable == true &&
2662 link->type == dc_connection_single &&
2663 link->remote_sinks[0] != NULL) {
2664 /* Re-enable MST on link. */
2665 dp_disable_link_phy(link, link->connector_signal);
2666 dp_enable_mst_on_sink(link, true);
2668 link->type = dc_connection_mst_branch;
2669 link->local_sink->sink_signal = SIGNAL_TYPE_DISPLAY_PORT_MST;
2673 bool dc_link_dp_sync_lt_begin(struct dc_link *link)
2675 /* Begin Sync LT. During this time,
2676 * DPCD:600h must not be powered down.
2678 link->sync_lt_in_progress = true;
2680 /*Clear any existing preferred settings.*/
2681 memset(&link->preferred_training_settings, 0,
2682 sizeof(struct dc_link_training_overrides));
2683 memset(&link->preferred_link_setting, 0,
2684 sizeof(struct dc_link_settings));
2689 enum link_training_result dc_link_dp_sync_lt_attempt(
2690 struct dc_link *link,
2691 struct dc_link_settings *link_settings,
2692 struct dc_link_training_overrides *lt_overrides)
2694 struct link_training_settings lt_settings = {0};
2695 enum link_training_result lt_status = LINK_TRAINING_SUCCESS;
2696 enum dp_panel_mode panel_mode = DP_PANEL_MODE_DEFAULT;
2697 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
2698 bool fec_enable = false;
2700 dp_decide_training_settings(
2704 override_training_settings(
2708 /* Setup MST Mode */
2709 if (lt_overrides->mst_enable)
2710 set_dp_mst_mode(link, *lt_overrides->mst_enable);
2713 dp_disable_link_phy(link, link->connector_signal);
2716 dp_cs_id = get_clock_source_id(link);
2717 dp_enable_link_phy(link, link->connector_signal,
2718 dp_cs_id, link_settings);
2720 /* Set FEC enable */
2721 #if defined(CONFIG_DRM_AMD_DC_DCN)
2722 if (dp_get_link_encoding_format(link_settings) == DP_8b_10b_ENCODING) {
2724 fec_enable = lt_overrides->fec_enable && *lt_overrides->fec_enable;
2725 dp_set_fec_ready(link, fec_enable);
2726 #if defined(CONFIG_DRM_AMD_DC_DCN)
2730 if (lt_overrides->alternate_scrambler_reset) {
2731 if (*lt_overrides->alternate_scrambler_reset)
2732 panel_mode = DP_PANEL_MODE_EDP;
2734 panel_mode = DP_PANEL_MODE_DEFAULT;
2736 panel_mode = dp_get_panel_mode(link);
2738 dp_set_panel_mode(link, panel_mode);
2740 /* Attempt to train with given link training settings */
2741 if (link->ctx->dc->work_arounds.lt_early_cr_pattern)
2742 start_clock_recovery_pattern_early(link, <_settings, DPRX);
2744 /* Set link rate, lane count and spread. */
2745 dpcd_set_link_settings(link, <_settings);
2747 /* 2. perform link training (set link training done
2748 * to false is done as well)
2750 lt_status = perform_clock_recovery_sequence(link, <_settings, DPRX);
2751 if (lt_status == LINK_TRAINING_SUCCESS) {
2752 lt_status = perform_channel_equalization_sequence(link,
2757 /* 3. Sync LT must skip TRAINING_PATTERN_SET:0 (video pattern)*/
2758 /* 4. print status message*/
2759 print_status_message(link, <_settings, lt_status);
2764 bool dc_link_dp_sync_lt_end(struct dc_link *link, bool link_down)
2766 /* If input parameter is set, shut down phy.
2767 * Still shouldn't turn off dp_receiver (DPCD:600h)
2769 if (link_down == true) {
2770 #if defined(CONFIG_DRM_AMD_DC_DCN)
2771 struct dc_link_settings link_settings = link->cur_link_settings;
2773 dp_disable_link_phy(link, link->connector_signal);
2774 #if defined(CONFIG_DRM_AMD_DC_DCN)
2775 if (dp_get_link_encoding_format(&link_settings) == DP_8b_10b_ENCODING)
2777 dp_set_fec_ready(link, false);
2780 link->sync_lt_in_progress = false;
2784 #if defined(CONFIG_DRM_AMD_DC_DCN)
2785 static enum dc_link_rate get_lttpr_max_link_rate(struct dc_link *link)
2787 enum dc_link_rate lttpr_max_link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
2789 if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR20)
2790 lttpr_max_link_rate = LINK_RATE_UHBR20;
2791 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR13_5)
2792 lttpr_max_link_rate = LINK_RATE_UHBR13_5;
2793 else if (link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.bits.UHBR10)
2794 lttpr_max_link_rate = LINK_RATE_UHBR10;
2796 return lttpr_max_link_rate;
2800 bool dc_link_dp_get_max_link_enc_cap(const struct dc_link *link, struct dc_link_settings *max_link_enc_cap)
2802 struct link_encoder *link_enc = NULL;
2804 if (!max_link_enc_cap) {
2805 DC_LOG_ERROR("%s: Could not return max link encoder caps", __func__);
2809 /* Links supporting dynamically assigned link encoder will be assigned next
2810 * available encoder if one not already assigned.
2812 if (link->is_dig_mapping_flexible &&
2813 link->dc->res_pool->funcs->link_encs_assign) {
2814 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
2815 if (link_enc == NULL)
2816 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
2818 link_enc = link->link_enc;
2821 if (link_enc && link_enc->funcs->get_max_link_cap) {
2822 link_enc->funcs->get_max_link_cap(link_enc, max_link_enc_cap);
2826 DC_LOG_ERROR("%s: Max link encoder caps unknown", __func__);
2827 max_link_enc_cap->lane_count = 1;
2828 max_link_enc_cap->link_rate = 6;
2832 static struct dc_link_settings get_max_link_cap(struct dc_link *link)
2834 struct dc_link_settings max_link_cap = {0};
2835 #if defined(CONFIG_DRM_AMD_DC_DCN)
2836 enum dc_link_rate lttpr_max_link_rate;
2838 struct link_encoder *link_enc = NULL;
2840 /* Links supporting dynamically assigned link encoder will be assigned next
2841 * available encoder if one not already assigned.
2843 if (link->is_dig_mapping_flexible &&
2844 link->dc->res_pool->funcs->link_encs_assign) {
2845 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
2846 if (link_enc == NULL)
2847 link_enc = link_enc_cfg_get_next_avail_link_enc(link->ctx->dc);
2849 link_enc = link->link_enc;
2852 /* get max link encoder capability */
2854 link_enc->funcs->get_max_link_cap(link_enc, &max_link_cap);
2855 #if defined(CONFIG_DRM_AMD_DC_DCN)
2856 if (max_link_cap.link_rate >= LINK_RATE_UHBR10 &&
2857 !link->hpo_dp_link_enc)
2858 max_link_cap.link_rate = LINK_RATE_HIGH3;
2861 /* Lower link settings based on sink's link cap */
2862 if (link->reported_link_cap.lane_count < max_link_cap.lane_count)
2863 max_link_cap.lane_count =
2864 link->reported_link_cap.lane_count;
2865 if (link->reported_link_cap.link_rate < max_link_cap.link_rate)
2866 max_link_cap.link_rate =
2867 link->reported_link_cap.link_rate;
2868 if (link->reported_link_cap.link_spread <
2869 max_link_cap.link_spread)
2870 max_link_cap.link_spread =
2871 link->reported_link_cap.link_spread;
2873 * account for lttpr repeaters cap
2874 * notes: repeaters do not snoop in the DPRX Capabilities addresses (3.6.3).
2876 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) {
2877 if (link->dpcd_caps.lttpr_caps.max_lane_count < max_link_cap.lane_count)
2878 max_link_cap.lane_count = link->dpcd_caps.lttpr_caps.max_lane_count;
2880 #if defined(CONFIG_DRM_AMD_DC_DCN)
2881 lttpr_max_link_rate = get_lttpr_max_link_rate(link);
2883 if (lttpr_max_link_rate < max_link_cap.link_rate)
2884 max_link_cap.link_rate = lttpr_max_link_rate;
2886 if (link->dpcd_caps.lttpr_caps.max_link_rate < max_link_cap.link_rate)
2887 max_link_cap.link_rate = link->dpcd_caps.lttpr_caps.max_link_rate;
2890 DC_LOG_HW_LINK_TRAINING("%s\n Training with LTTPR, max_lane count %d max_link rate %d \n",
2892 max_link_cap.lane_count,
2893 max_link_cap.link_rate);
2895 return max_link_cap;
2898 static enum dc_status read_hpd_rx_irq_data(
2899 struct dc_link *link,
2900 union hpd_irq_data *irq_data)
2902 static enum dc_status retval;
2904 /* The HW reads 16 bytes from 200h on HPD,
2905 * but if we get an AUX_DEFER, the HW cannot retry
2906 * and this causes the CTS tests 4.3.2.1 - 3.2.4 to
2907 * fail, so we now explicitly read 6 bytes which is
2908 * the req from the above mentioned test cases.
2910 * For DP 1.4 we need to read those from 2002h range.
2912 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_14)
2913 retval = core_link_read_dpcd(
2917 sizeof(union hpd_irq_data));
2919 /* Read 14 bytes in a single read and then copy only the required fields.
2920 * This is more efficient than doing it in two separate AUX reads. */
2922 uint8_t tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI + 1];
2924 retval = core_link_read_dpcd(
2930 if (retval != DC_OK)
2933 irq_data->bytes.sink_cnt.raw = tmp[DP_SINK_COUNT_ESI - DP_SINK_COUNT_ESI];
2934 irq_data->bytes.device_service_irq.raw = tmp[DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 - DP_SINK_COUNT_ESI];
2935 irq_data->bytes.lane01_status.raw = tmp[DP_LANE0_1_STATUS_ESI - DP_SINK_COUNT_ESI];
2936 irq_data->bytes.lane23_status.raw = tmp[DP_LANE2_3_STATUS_ESI - DP_SINK_COUNT_ESI];
2937 irq_data->bytes.lane_status_updated.raw = tmp[DP_LANE_ALIGN_STATUS_UPDATED_ESI - DP_SINK_COUNT_ESI];
2938 irq_data->bytes.sink_status.raw = tmp[DP_SINK_STATUS_ESI - DP_SINK_COUNT_ESI];
2944 bool hpd_rx_irq_check_link_loss_status(
2945 struct dc_link *link,
2946 union hpd_irq_data *hpd_irq_dpcd_data)
2948 uint8_t irq_reg_rx_power_state = 0;
2949 enum dc_status dpcd_result = DC_ERROR_UNEXPECTED;
2950 union lane_status lane_status;
2952 bool sink_status_changed;
2955 sink_status_changed = false;
2956 return_code = false;
2958 if (link->cur_link_settings.lane_count == 0)
2961 /*1. Check that Link Status changed, before re-training.*/
2963 /*parse lane status*/
2964 for (lane = 0; lane < link->cur_link_settings.lane_count; lane++) {
2965 /* check status of lanes 0,1
2966 * changed DpcdAddress_Lane01Status (0x202)
2968 lane_status.raw = get_nibble_at_index(
2969 &hpd_irq_dpcd_data->bytes.lane01_status.raw,
2972 if (!lane_status.bits.CHANNEL_EQ_DONE_0 ||
2973 !lane_status.bits.CR_DONE_0 ||
2974 !lane_status.bits.SYMBOL_LOCKED_0) {
2975 /* if one of the channel equalization, clock
2976 * recovery or symbol lock is dropped
2977 * consider it as (link has been
2978 * dropped) dp sink status has changed
2980 sink_status_changed = true;
2985 /* Check interlane align.*/
2986 if (sink_status_changed ||
2987 !hpd_irq_dpcd_data->bytes.lane_status_updated.bits.INTERLANE_ALIGN_DONE) {
2989 DC_LOG_HW_HPD_IRQ("%s: Link Status changed.\n", __func__);
2993 /*2. Check that we can handle interrupt: Not in FS DOS,
2994 * Not in "Display Timeout" state, Link is trained.
2996 dpcd_result = core_link_read_dpcd(link,
2998 &irq_reg_rx_power_state,
2999 sizeof(irq_reg_rx_power_state));
3001 if (dpcd_result != DC_OK) {
3002 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain power state.\n",
3005 if (irq_reg_rx_power_state != DP_SET_POWER_D0)
3006 return_code = false;
3013 bool dp_verify_link_cap(
3014 struct dc_link *link,
3015 struct dc_link_settings *known_limit_link_setting,
3018 struct dc_link_settings max_link_cap = {0};
3019 struct dc_link_settings cur_link_setting = {0};
3020 struct dc_link_settings *cur = &cur_link_setting;
3021 struct dc_link_settings initial_link_settings = {0};
3023 bool skip_link_training;
3024 bool skip_video_pattern;
3025 enum clock_source_id dp_cs_id = CLOCK_SOURCE_ID_EXTERNAL;
3026 enum link_training_result status;
3027 union hpd_irq_data irq_data;
3029 /* link training starts with the maximum common settings
3030 * supported by both sink and ASIC.
3032 max_link_cap = get_max_link_cap(link);
3033 initial_link_settings = get_common_supported_link_settings(
3034 *known_limit_link_setting,
3037 /* Accept reported capabilities if link supports flexible encoder mapping or encoder already in use. */
3038 if (link->dc->debug.skip_detection_link_training ||
3039 link->is_dig_mapping_flexible) {
3040 /* TODO - should we check link encoder's max link caps here?
3041 * How do we know which link encoder to check from?
3043 link->verified_link_cap = *known_limit_link_setting;
3045 } else if (link->link_enc && link->dc->res_pool->funcs->link_encs_assign &&
3046 !link_enc_cfg_is_link_enc_avail(link->ctx->dc, link->link_enc->preferred_engine, link)) {
3047 link->verified_link_cap = initial_link_settings;
3051 memset(&irq_data, 0, sizeof(irq_data));
3053 skip_link_training = false;
3055 /* Grant extended timeout request */
3056 if ((link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT) && (link->dpcd_caps.lttpr_caps.max_ext_timeout > 0)) {
3057 uint8_t grant = link->dpcd_caps.lttpr_caps.max_ext_timeout & 0x80;
3059 core_link_write_dpcd(link, DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT, &grant, sizeof(grant));
3062 #if defined(CONFIG_DRM_AMD_DC_DCN)
3063 if (dp_get_link_encoding_format(&link->cur_link_settings) == DP_128b_132b_ENCODING)
3064 reset_dp_hpo_stream_encoders_for_link(link);
3066 /* TODO implement override and monitor patch later */
3068 /* try to train the link from high to low to
3069 * find the physical link capability
3071 /* disable PHY done possible by BIOS, will be done by driver itself */
3072 dp_disable_link_phy(link, link->connector_signal);
3074 dp_cs_id = get_clock_source_id(link);
3076 cur_link_setting = initial_link_settings;
3078 /* Temporary Renoir-specific workaround for SWDEV-215184;
3079 * PHY will sometimes be in bad state on hotplugging display from certain USB-C dongle,
3080 * so add extra cycle of enabling and disabling the PHY before first link training.
3082 if (link->link_enc && link->link_enc->features.flags.bits.DP_IS_USB_C &&
3083 link->dc->debug.usbc_combo_phy_reset_wa) {
3084 dp_enable_link_phy(link, link->connector_signal, dp_cs_id, cur);
3085 dp_disable_link_phy(link, link->connector_signal);
3089 skip_video_pattern = true;
3091 if (cur->link_rate == LINK_RATE_LOW)
3092 skip_video_pattern = false;
3096 link->connector_signal,
3101 if (skip_link_training)
3104 status = dc_link_dp_perform_link_training(
3107 skip_video_pattern);
3108 if (status == LINK_TRAINING_SUCCESS)
3115 link->verified_link_cap = *cur;
3117 if (read_hpd_rx_irq_data(link, &irq_data) == DC_OK)
3118 if (hpd_rx_irq_check_link_loss_status(
3123 /* always disable the link before trying another
3124 * setting or before returning we'll enable it later
3125 * based on the actual mode we're driving
3127 dp_disable_link_phy(link, link->connector_signal);
3128 } while (!success && decide_fallback_link_setting(link,
3129 initial_link_settings, cur, status));
3131 /* Link Training failed for all Link Settings
3132 * (Lane Count is still unknown)
3135 /* If all LT fails for all settings,
3136 * set verified = failed safe (1 lane low)
3138 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3139 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3141 link->verified_link_cap.link_spread =
3142 LINK_SPREAD_DISABLED;
3149 bool dp_verify_link_cap_with_retries(
3150 struct dc_link *link,
3151 struct dc_link_settings *known_limit_link_setting,
3155 bool success = false;
3157 for (i = 0; i < attempts; i++) {
3159 enum dc_connection_type type = dc_connection_none;
3161 memset(&link->verified_link_cap, 0,
3162 sizeof(struct dc_link_settings));
3163 if (!dc_link_detect_sink(link, &type) || type == dc_connection_none) {
3164 link->verified_link_cap.lane_count = LANE_COUNT_ONE;
3165 link->verified_link_cap.link_rate = LINK_RATE_LOW;
3166 link->verified_link_cap.link_spread = LINK_SPREAD_DISABLED;
3168 } else if (dp_verify_link_cap(link,
3169 known_limit_link_setting,
3170 &fail_count) && fail_count == 0) {
3179 bool dp_verify_mst_link_cap(
3180 struct dc_link *link)
3182 struct dc_link_settings max_link_cap = {0};
3184 if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3185 DP_8b_10b_ENCODING) {
3186 max_link_cap = get_max_link_cap(link);
3187 link->verified_link_cap = get_common_supported_link_settings(
3188 link->reported_link_cap,
3191 #if defined(CONFIG_DRM_AMD_DC_DCN)
3192 else if (dp_get_link_encoding_format(&link->reported_link_cap) ==
3193 DP_128b_132b_ENCODING) {
3194 dp_verify_link_cap_with_retries(link,
3195 &link->reported_link_cap,
3196 LINK_TRAINING_MAX_VERIFY_RETRY);
3202 static struct dc_link_settings get_common_supported_link_settings(
3203 struct dc_link_settings link_setting_a,
3204 struct dc_link_settings link_setting_b)
3206 struct dc_link_settings link_settings = {0};
3208 link_settings.lane_count =
3209 (link_setting_a.lane_count <=
3210 link_setting_b.lane_count) ?
3211 link_setting_a.lane_count :
3212 link_setting_b.lane_count;
3213 link_settings.link_rate =
3214 (link_setting_a.link_rate <=
3215 link_setting_b.link_rate) ?
3216 link_setting_a.link_rate :
3217 link_setting_b.link_rate;
3218 link_settings.link_spread = LINK_SPREAD_DISABLED;
3220 /* in DP compliance test, DPR-120 may have
3221 * a random value in its MAX_LINK_BW dpcd field.
3222 * We map it to the maximum supported link rate that
3223 * is smaller than MAX_LINK_BW in this case.
3225 #if defined(CONFIG_DRM_AMD_DC_DCN)
3226 if (link_settings.link_rate > LINK_RATE_UHBR20) {
3227 link_settings.link_rate = LINK_RATE_UHBR20;
3228 } else if (link_settings.link_rate < LINK_RATE_UHBR20 &&
3229 link_settings.link_rate > LINK_RATE_UHBR13_5) {
3230 link_settings.link_rate = LINK_RATE_UHBR13_5;
3231 } else if (link_settings.link_rate < LINK_RATE_UHBR10 &&
3232 link_settings.link_rate > LINK_RATE_HIGH3) {
3234 if (link_settings.link_rate > LINK_RATE_HIGH3) {
3236 link_settings.link_rate = LINK_RATE_HIGH3;
3237 } else if (link_settings.link_rate < LINK_RATE_HIGH3
3238 && link_settings.link_rate > LINK_RATE_HIGH2) {
3239 link_settings.link_rate = LINK_RATE_HIGH2;
3240 } else if (link_settings.link_rate < LINK_RATE_HIGH2
3241 && link_settings.link_rate > LINK_RATE_HIGH) {
3242 link_settings.link_rate = LINK_RATE_HIGH;
3243 } else if (link_settings.link_rate < LINK_RATE_HIGH
3244 && link_settings.link_rate > LINK_RATE_LOW) {
3245 link_settings.link_rate = LINK_RATE_LOW;
3246 } else if (link_settings.link_rate < LINK_RATE_LOW) {
3247 link_settings.link_rate = LINK_RATE_UNKNOWN;
3250 return link_settings;
3253 static inline bool reached_minimum_lane_count(enum dc_lane_count lane_count)
3255 return lane_count <= LANE_COUNT_ONE;
3258 static inline bool reached_minimum_link_rate(enum dc_link_rate link_rate)
3260 return link_rate <= LINK_RATE_LOW;
3263 static enum dc_lane_count reduce_lane_count(enum dc_lane_count lane_count)
3265 switch (lane_count) {
3266 case LANE_COUNT_FOUR:
3267 return LANE_COUNT_TWO;
3268 case LANE_COUNT_TWO:
3269 return LANE_COUNT_ONE;
3270 case LANE_COUNT_ONE:
3271 return LANE_COUNT_UNKNOWN;
3273 return LANE_COUNT_UNKNOWN;
3277 static enum dc_link_rate reduce_link_rate(enum dc_link_rate link_rate)
3279 switch (link_rate) {
3280 #if defined(CONFIG_DRM_AMD_DC_DCN)
3281 case LINK_RATE_UHBR20:
3282 return LINK_RATE_UHBR13_5;
3283 case LINK_RATE_UHBR13_5:
3284 return LINK_RATE_UHBR10;
3285 case LINK_RATE_UHBR10:
3286 return LINK_RATE_HIGH3;
3288 case LINK_RATE_HIGH3:
3289 return LINK_RATE_HIGH2;
3290 case LINK_RATE_HIGH2:
3291 return LINK_RATE_HIGH;
3292 case LINK_RATE_HIGH:
3293 return LINK_RATE_LOW;
3295 return LINK_RATE_UNKNOWN;
3297 return LINK_RATE_UNKNOWN;
3301 static enum dc_lane_count increase_lane_count(enum dc_lane_count lane_count)
3303 switch (lane_count) {
3304 case LANE_COUNT_ONE:
3305 return LANE_COUNT_TWO;
3306 case LANE_COUNT_TWO:
3307 return LANE_COUNT_FOUR;
3309 return LANE_COUNT_UNKNOWN;
3313 static enum dc_link_rate increase_link_rate(enum dc_link_rate link_rate)
3315 switch (link_rate) {
3317 return LINK_RATE_HIGH;
3318 case LINK_RATE_HIGH:
3319 return LINK_RATE_HIGH2;
3320 case LINK_RATE_HIGH2:
3321 return LINK_RATE_HIGH3;
3322 #if defined(CONFIG_DRM_AMD_DC_DCN)
3323 case LINK_RATE_HIGH3:
3324 return LINK_RATE_UHBR10;
3325 case LINK_RATE_UHBR10:
3326 return LINK_RATE_UHBR13_5;
3327 case LINK_RATE_UHBR13_5:
3328 return LINK_RATE_UHBR20;
3331 return LINK_RATE_UNKNOWN;
3335 #if defined(CONFIG_DRM_AMD_DC_DCN)
3336 static bool decide_fallback_link_setting_max_bw_policy(
3337 const struct dc_link_settings *max,
3338 struct dc_link_settings *cur)
3340 uint8_t cur_idx = 0, next_idx;
3343 while (cur_idx < ARRAY_SIZE(dp_lt_fallbacks))
3344 /* find current index */
3345 if (dp_lt_fallbacks[cur_idx].lane_count == cur->lane_count &&
3346 dp_lt_fallbacks[cur_idx].link_rate == cur->link_rate)
3351 next_idx = cur_idx + 1;
3353 while (next_idx < ARRAY_SIZE(dp_lt_fallbacks))
3354 /* find next index */
3355 if (dp_lt_fallbacks[next_idx].lane_count <= max->lane_count &&
3356 dp_lt_fallbacks[next_idx].link_rate <= max->link_rate)
3361 if (next_idx < ARRAY_SIZE(dp_lt_fallbacks)) {
3362 cur->lane_count = dp_lt_fallbacks[next_idx].lane_count;
3363 cur->link_rate = dp_lt_fallbacks[next_idx].link_rate;
3372 * function: set link rate and lane count fallback based
3373 * on current link setting and last link training result
3375 * true - link setting could be set
3376 * false - has reached minimum setting
3377 * and no further fallback could be done
3379 static bool decide_fallback_link_setting(
3380 struct dc_link *link,
3381 struct dc_link_settings initial_link_settings,
3382 struct dc_link_settings *current_link_setting,
3383 enum link_training_result training_result)
3385 if (!current_link_setting)
3387 #if defined(CONFIG_DRM_AMD_DC_DCN)
3388 if (dp_get_link_encoding_format(&initial_link_settings) == DP_128b_132b_ENCODING ||
3389 link->dc->debug.force_dp2_lt_fallback_method)
3390 return decide_fallback_link_setting_max_bw_policy(&initial_link_settings,
3391 current_link_setting);
3394 switch (training_result) {
3395 case LINK_TRAINING_CR_FAIL_LANE0:
3396 case LINK_TRAINING_CR_FAIL_LANE1:
3397 case LINK_TRAINING_CR_FAIL_LANE23:
3398 case LINK_TRAINING_LQA_FAIL:
3400 if (!reached_minimum_link_rate
3401 (current_link_setting->link_rate)) {
3402 current_link_setting->link_rate =
3404 current_link_setting->link_rate);
3405 } else if (!reached_minimum_lane_count
3406 (current_link_setting->lane_count)) {
3407 current_link_setting->link_rate =
3408 initial_link_settings.link_rate;
3409 if (training_result == LINK_TRAINING_CR_FAIL_LANE0)
3411 else if (training_result == LINK_TRAINING_CR_FAIL_LANE1)
3412 current_link_setting->lane_count =
3414 else if (training_result ==
3415 LINK_TRAINING_CR_FAIL_LANE23)
3416 current_link_setting->lane_count =
3419 current_link_setting->lane_count =
3421 current_link_setting->lane_count);
3427 case LINK_TRAINING_EQ_FAIL_EQ:
3429 if (!reached_minimum_lane_count
3430 (current_link_setting->lane_count)) {
3431 current_link_setting->lane_count =
3433 current_link_setting->lane_count);
3434 } else if (!reached_minimum_link_rate
3435 (current_link_setting->link_rate)) {
3436 current_link_setting->link_rate =
3438 current_link_setting->link_rate);
3444 case LINK_TRAINING_EQ_FAIL_CR:
3446 if (!reached_minimum_link_rate
3447 (current_link_setting->link_rate)) {
3448 current_link_setting->link_rate =
3450 current_link_setting->link_rate);
3462 bool dp_validate_mode_timing(
3463 struct dc_link *link,
3464 const struct dc_crtc_timing *timing)
3469 const struct dc_link_settings *link_setting;
3471 /* According to spec, VSC SDP should be used if pixel format is YCbCr420 */
3472 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420 &&
3473 !link->dpcd_caps.dprx_feature.bits.VSC_SDP_COLORIMETRY_SUPPORTED &&
3474 dal_graphics_object_id_get_connector_id(link->link_id) != CONNECTOR_ID_VIRTUAL)
3477 /*always DP fail safe mode*/
3478 if ((timing->pix_clk_100hz / 10) == (uint32_t) 25175 &&
3479 timing->h_addressable == (uint32_t) 640 &&
3480 timing->v_addressable == (uint32_t) 480)
3483 link_setting = dc_link_get_link_cap(link);
3485 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3486 /*if (flags.DYNAMIC_VALIDATION == 1 &&
3487 link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN)
3488 link_setting = &link->verified_link_cap;
3491 req_bw = dc_bandwidth_in_kbps_from_timing(timing);
3492 max_bw = dc_link_bandwidth_kbps(link, link_setting);
3494 if (req_bw <= max_bw) {
3495 /* remember the biggest mode here, during
3496 * initial link training (to get
3497 * verified_link_cap), LS sends event about
3498 * cannot train at reported cap to upper
3499 * layer and upper layer will re-enumerate modes.
3500 * this is not necessary if the lower
3501 * verified_link_cap is enough to drive
3504 /* TODO: DYNAMIC_VALIDATION needs to be implemented */
3505 /* if (flags.DYNAMIC_VALIDATION == 1)
3506 dpsst->max_req_bw_for_verified_linkcap = dal_max(
3507 dpsst->max_req_bw_for_verified_linkcap, req_bw); */
3513 static bool decide_dp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3515 struct dc_link_settings initial_link_setting = {
3516 LANE_COUNT_ONE, LINK_RATE_LOW, LINK_SPREAD_DISABLED, false, 0};
3517 struct dc_link_settings current_link_setting =
3518 initial_link_setting;
3521 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3524 /* search for the minimum link setting that:
3525 * 1. is supported according to the link training result
3526 * 2. could support the b/w requested by the timing
3528 while (current_link_setting.link_rate <=
3529 link->verified_link_cap.link_rate) {
3530 link_bw = dc_link_bandwidth_kbps(
3532 ¤t_link_setting);
3533 if (req_bw <= link_bw) {
3534 *link_setting = current_link_setting;
3538 if (current_link_setting.lane_count <
3539 link->verified_link_cap.lane_count) {
3540 current_link_setting.lane_count =
3541 increase_lane_count(
3542 current_link_setting.lane_count);
3544 current_link_setting.link_rate =
3546 current_link_setting.link_rate);
3547 current_link_setting.lane_count =
3548 initial_link_setting.lane_count;
3555 bool decide_edp_link_settings(struct dc_link *link, struct dc_link_settings *link_setting, uint32_t req_bw)
3557 struct dc_link_settings initial_link_setting;
3558 struct dc_link_settings current_link_setting;
3562 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3563 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3565 if (link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3566 link->dpcd_caps.edp_supported_link_rates_count == 0) {
3567 *link_setting = link->verified_link_cap;
3571 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3572 initial_link_setting.lane_count = LANE_COUNT_ONE;
3573 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
3574 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3575 initial_link_setting.use_link_rate_set = true;
3576 initial_link_setting.link_rate_set = 0;
3577 current_link_setting = initial_link_setting;
3579 /* search for the minimum link setting that:
3580 * 1. is supported according to the link training result
3581 * 2. could support the b/w requested by the timing
3583 while (current_link_setting.link_rate <=
3584 link->verified_link_cap.link_rate) {
3585 link_bw = dc_link_bandwidth_kbps(
3587 ¤t_link_setting);
3588 if (req_bw <= link_bw) {
3589 *link_setting = current_link_setting;
3593 if (current_link_setting.lane_count <
3594 link->verified_link_cap.lane_count) {
3595 current_link_setting.lane_count =
3596 increase_lane_count(
3597 current_link_setting.lane_count);
3599 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
3600 current_link_setting.link_rate_set++;
3601 current_link_setting.link_rate =
3602 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3603 current_link_setting.lane_count =
3604 initial_link_setting.lane_count;
3612 static bool decide_edp_link_settings_with_dsc(struct dc_link *link,
3613 struct dc_link_settings *link_setting,
3615 enum dc_link_rate max_link_rate)
3617 struct dc_link_settings initial_link_setting;
3618 struct dc_link_settings current_link_setting;
3621 unsigned int policy = 0;
3623 policy = link->ctx->dc->debug.force_dsc_edp_policy;
3624 if (max_link_rate == LINK_RATE_UNKNOWN)
3625 max_link_rate = link->verified_link_cap.link_rate;
3627 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
3628 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
3630 if ((link->dpcd_caps.dpcd_rev.raw < DPCD_REV_13 ||
3631 link->dpcd_caps.edp_supported_link_rates_count == 0)) {
3632 /* for DSC enabled case, we search for minimum lane count */
3633 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3634 initial_link_setting.lane_count = LANE_COUNT_ONE;
3635 initial_link_setting.link_rate = LINK_RATE_LOW;
3636 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3637 initial_link_setting.use_link_rate_set = false;
3638 initial_link_setting.link_rate_set = 0;
3639 current_link_setting = initial_link_setting;
3640 if (req_bw > dc_link_bandwidth_kbps(link, &link->verified_link_cap))
3643 /* search for the minimum link setting that:
3644 * 1. is supported according to the link training result
3645 * 2. could support the b/w requested by the timing
3647 while (current_link_setting.link_rate <=
3649 link_bw = dc_link_bandwidth_kbps(
3651 ¤t_link_setting);
3652 if (req_bw <= link_bw) {
3653 *link_setting = current_link_setting;
3658 if (current_link_setting.link_rate < max_link_rate) {
3659 current_link_setting.link_rate =
3661 current_link_setting.link_rate);
3663 if (current_link_setting.lane_count <
3664 link->verified_link_cap.lane_count) {
3665 current_link_setting.lane_count =
3666 increase_lane_count(
3667 current_link_setting.lane_count);
3668 current_link_setting.link_rate = initial_link_setting.link_rate;
3673 /* minimize link rate */
3674 if (current_link_setting.lane_count <
3675 link->verified_link_cap.lane_count) {
3676 current_link_setting.lane_count =
3677 increase_lane_count(
3678 current_link_setting.lane_count);
3680 current_link_setting.link_rate =
3682 current_link_setting.link_rate);
3683 current_link_setting.lane_count =
3684 initial_link_setting.lane_count;
3691 /* if optimize edp link is supported */
3692 memset(&initial_link_setting, 0, sizeof(initial_link_setting));
3693 initial_link_setting.lane_count = LANE_COUNT_ONE;
3694 initial_link_setting.link_rate = link->dpcd_caps.edp_supported_link_rates[0];
3695 initial_link_setting.link_spread = LINK_SPREAD_DISABLED;
3696 initial_link_setting.use_link_rate_set = true;
3697 initial_link_setting.link_rate_set = 0;
3698 current_link_setting = initial_link_setting;
3700 /* search for the minimum link setting that:
3701 * 1. is supported according to the link training result
3702 * 2. could support the b/w requested by the timing
3704 while (current_link_setting.link_rate <=
3706 link_bw = dc_link_bandwidth_kbps(
3708 ¤t_link_setting);
3709 if (req_bw <= link_bw) {
3710 *link_setting = current_link_setting;
3715 if (current_link_setting.link_rate_set <
3716 link->dpcd_caps.edp_supported_link_rates_count
3717 && current_link_setting.link_rate < max_link_rate) {
3718 current_link_setting.link_rate_set++;
3719 current_link_setting.link_rate =
3720 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3722 if (current_link_setting.lane_count < link->verified_link_cap.lane_count) {
3723 current_link_setting.lane_count =
3724 increase_lane_count(
3725 current_link_setting.lane_count);
3726 current_link_setting.link_rate_set = initial_link_setting.link_rate_set;
3727 current_link_setting.link_rate =
3728 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3733 /* minimize link rate */
3734 if (current_link_setting.lane_count <
3735 link->verified_link_cap.lane_count) {
3736 current_link_setting.lane_count =
3737 increase_lane_count(
3738 current_link_setting.lane_count);
3740 if (current_link_setting.link_rate_set < link->dpcd_caps.edp_supported_link_rates_count) {
3741 current_link_setting.link_rate_set++;
3742 current_link_setting.link_rate =
3743 link->dpcd_caps.edp_supported_link_rates[current_link_setting.link_rate_set];
3744 current_link_setting.lane_count =
3745 initial_link_setting.lane_count;
3754 static bool decide_mst_link_settings(const struct dc_link *link, struct dc_link_settings *link_setting)
3756 *link_setting = link->verified_link_cap;
3760 void decide_link_settings(struct dc_stream_state *stream,
3761 struct dc_link_settings *link_setting)
3763 struct dc_link *link;
3766 req_bw = dc_bandwidth_in_kbps_from_timing(&stream->timing);
3768 link = stream->link;
3770 /* if preferred is specified through AMDDP, use it, if it's enough
3773 if (link->preferred_link_setting.lane_count !=
3774 LANE_COUNT_UNKNOWN &&
3775 link->preferred_link_setting.link_rate !=
3776 LINK_RATE_UNKNOWN) {
3777 *link_setting = link->preferred_link_setting;
3781 /* MST doesn't perform link training for now
3782 * TODO: add MST specific link training routine
3784 if (stream->signal == SIGNAL_TYPE_DISPLAY_PORT_MST) {
3785 if (decide_mst_link_settings(link, link_setting))
3787 } else if (link->connector_signal == SIGNAL_TYPE_EDP) {
3788 /* enable edp link optimization for DSC eDP case */
3789 if (stream->timing.flags.DSC) {
3790 enum dc_link_rate max_link_rate = LINK_RATE_UNKNOWN;
3792 if (link->ctx->dc->debug.force_dsc_edp_policy) {
3793 /* calculate link max link rate cap*/
3794 struct dc_link_settings tmp_link_setting;
3795 struct dc_crtc_timing tmp_timing = stream->timing;
3796 uint32_t orig_req_bw;
3798 tmp_link_setting.link_rate = LINK_RATE_UNKNOWN;
3799 tmp_timing.flags.DSC = 0;
3800 orig_req_bw = dc_bandwidth_in_kbps_from_timing(&tmp_timing);
3801 decide_edp_link_settings(link, &tmp_link_setting, orig_req_bw);
3802 max_link_rate = tmp_link_setting.link_rate;
3804 if (decide_edp_link_settings_with_dsc(link, link_setting, req_bw, max_link_rate))
3806 } else if (decide_edp_link_settings(link, link_setting, req_bw))
3808 } else if (decide_dp_link_settings(link, link_setting, req_bw))
3811 BREAK_TO_DEBUGGER();
3812 ASSERT(link->verified_link_cap.lane_count != LANE_COUNT_UNKNOWN);
3814 *link_setting = link->verified_link_cap;
3817 /*************************Short Pulse IRQ***************************/
3818 bool dc_link_dp_allow_hpd_rx_irq(const struct dc_link *link)
3821 * Don't handle RX IRQ unless one of following is met:
3822 * 1) The link is established (cur_link_settings != unknown)
3823 * 2) We know we're dealing with a branch device, SST or MST
3826 if ((link->cur_link_settings.lane_count != LANE_COUNT_UNKNOWN) ||
3827 is_dp_branch_device(link))
3833 static bool handle_hpd_irq_psr_sink(struct dc_link *link)
3835 union dpcd_psr_configuration psr_configuration;
3837 if (!link->psr_settings.psr_feature_enabled)
3840 dm_helpers_dp_read_dpcd(
3843 368,/*DpcdAddress_PSR_Enable_Cfg*/
3844 &psr_configuration.raw,
3845 sizeof(psr_configuration.raw));
3847 if (psr_configuration.bits.ENABLE) {
3848 unsigned char dpcdbuf[3] = {0};
3849 union psr_error_status psr_error_status;
3850 union psr_sink_psr_status psr_sink_psr_status;
3852 dm_helpers_dp_read_dpcd(
3855 0x2006, /*DpcdAddress_PSR_Error_Status*/
3856 (unsigned char *) dpcdbuf,
3859 /*DPCD 2006h ERROR STATUS*/
3860 psr_error_status.raw = dpcdbuf[0];
3861 /*DPCD 2008h SINK PANEL SELF REFRESH STATUS*/
3862 psr_sink_psr_status.raw = dpcdbuf[2];
3864 if (psr_error_status.bits.LINK_CRC_ERROR ||
3865 psr_error_status.bits.RFB_STORAGE_ERROR ||
3866 psr_error_status.bits.VSC_SDP_ERROR) {
3869 /* Acknowledge and clear error bits */
3870 dm_helpers_dp_write_dpcd(
3873 8198,/*DpcdAddress_PSR_Error_Status*/
3874 &psr_error_status.raw,
3875 sizeof(psr_error_status.raw));
3877 /* PSR error, disable and re-enable PSR */
3878 if (link->psr_settings.psr_allow_active) {
3879 allow_active = false;
3880 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
3881 allow_active = true;
3882 dc_link_set_psr_allow_active(link, &allow_active, true, false, NULL);
3886 } else if (psr_sink_psr_status.bits.SINK_SELF_REFRESH_STATUS ==
3887 PSR_SINK_STATE_ACTIVE_DISPLAY_FROM_SINK_RFB){
3888 /* No error is detect, PSR is active.
3889 * We should return with IRQ_HPD handled without
3890 * checking for loss of sync since PSR would have
3891 * powered down main link.
3899 static void dp_test_send_link_training(struct dc_link *link)
3901 struct dc_link_settings link_settings = {0};
3903 core_link_read_dpcd(
3906 (unsigned char *)(&link_settings.lane_count),
3908 core_link_read_dpcd(
3911 (unsigned char *)(&link_settings.link_rate),
3914 /* Set preferred link settings */
3915 link->verified_link_cap.lane_count = link_settings.lane_count;
3916 link->verified_link_cap.link_rate = link_settings.link_rate;
3918 dp_retrain_link_dp_test(link, &link_settings, false);
3921 /* TODO Raven hbr2 compliance eye output is unstable
3922 * (toggling on and off) with debugger break
3923 * This caueses intermittent PHY automation failure
3924 * Need to look into the root cause */
3925 static void dp_test_send_phy_test_pattern(struct dc_link *link)
3927 union phy_test_pattern dpcd_test_pattern;
3928 union lane_adjust dpcd_lane_adjustment[2];
3929 unsigned char dpcd_post_cursor_2_adjustment = 0;
3930 #if defined(CONFIG_DRM_AMD_DC_DCN)
3931 unsigned char test_pattern_buffer[
3932 (DP_TEST_264BIT_CUSTOM_PATTERN_263_256 -
3933 DP_TEST_264BIT_CUSTOM_PATTERN_7_0)+1] = {0};
3935 unsigned char test_pattern_buffer[
3936 (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
3937 DP_TEST_80BIT_CUSTOM_PATTERN_7_0)+1] = {0};
3939 unsigned int test_pattern_size = 0;
3940 enum dp_test_pattern test_pattern;
3941 union lane_adjust dpcd_lane_adjust;
3943 struct link_training_settings link_training_settings;
3945 dpcd_test_pattern.raw = 0;
3946 memset(dpcd_lane_adjustment, 0, sizeof(dpcd_lane_adjustment));
3947 memset(&link_training_settings, 0, sizeof(link_training_settings));
3949 /* get phy test pattern and pattern parameters from DP receiver */
3950 core_link_read_dpcd(
3952 DP_PHY_TEST_PATTERN,
3953 &dpcd_test_pattern.raw,
3954 sizeof(dpcd_test_pattern));
3955 core_link_read_dpcd(
3957 DP_ADJUST_REQUEST_LANE0_1,
3958 &dpcd_lane_adjustment[0].raw,
3959 sizeof(dpcd_lane_adjustment));
3961 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
3962 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
3963 link->lttpr_mode == LTTPR_MODE_TRANSPARENT)
3964 vendor_specific_lttpr_wa_three_dpcd(
3966 link_training_settings.dpcd_lane_settings);
3968 /*get post cursor 2 parameters
3969 * For DP 1.1a or eariler, this DPCD register's value is 0
3970 * For DP 1.2 or later:
3971 * Bits 1:0 = POST_CURSOR2_LANE0; Bits 3:2 = POST_CURSOR2_LANE1
3972 * Bits 5:4 = POST_CURSOR2_LANE2; Bits 7:6 = POST_CURSOR2_LANE3
3974 core_link_read_dpcd(
3976 DP_ADJUST_REQUEST_POST_CURSOR2,
3977 &dpcd_post_cursor_2_adjustment,
3978 sizeof(dpcd_post_cursor_2_adjustment));
3980 /* translate request */
3981 switch (dpcd_test_pattern.bits.PATTERN) {
3982 case PHY_TEST_PATTERN_D10_2:
3983 test_pattern = DP_TEST_PATTERN_D102;
3985 case PHY_TEST_PATTERN_SYMBOL_ERROR:
3986 test_pattern = DP_TEST_PATTERN_SYMBOL_ERROR;
3988 case PHY_TEST_PATTERN_PRBS7:
3989 test_pattern = DP_TEST_PATTERN_PRBS7;
3991 case PHY_TEST_PATTERN_80BIT_CUSTOM:
3992 test_pattern = DP_TEST_PATTERN_80BIT_CUSTOM;
3994 case PHY_TEST_PATTERN_CP2520_1:
3995 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
3996 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
3997 DP_TEST_PATTERN_TRAINING_PATTERN4 :
3998 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
4000 case PHY_TEST_PATTERN_CP2520_2:
4001 /* CP2520 pattern is unstable, temporarily use TPS4 instead */
4002 test_pattern = (link->dc->caps.force_dp_tps4_for_cp2520 == 1) ?
4003 DP_TEST_PATTERN_TRAINING_PATTERN4 :
4004 DP_TEST_PATTERN_HBR2_COMPLIANCE_EYE;
4006 case PHY_TEST_PATTERN_CP2520_3:
4007 test_pattern = DP_TEST_PATTERN_TRAINING_PATTERN4;
4009 #if defined(CONFIG_DRM_AMD_DC_DCN)
4010 case PHY_TEST_PATTERN_128b_132b_TPS1:
4011 test_pattern = DP_TEST_PATTERN_128b_132b_TPS1;
4013 case PHY_TEST_PATTERN_128b_132b_TPS2:
4014 test_pattern = DP_TEST_PATTERN_128b_132b_TPS2;
4016 case PHY_TEST_PATTERN_PRBS9:
4017 test_pattern = DP_TEST_PATTERN_PRBS9;
4019 case PHY_TEST_PATTERN_PRBS11:
4020 test_pattern = DP_TEST_PATTERN_PRBS11;
4022 case PHY_TEST_PATTERN_PRBS15:
4023 test_pattern = DP_TEST_PATTERN_PRBS15;
4025 case PHY_TEST_PATTERN_PRBS23:
4026 test_pattern = DP_TEST_PATTERN_PRBS23;
4028 case PHY_TEST_PATTERN_PRBS31:
4029 test_pattern = DP_TEST_PATTERN_PRBS31;
4031 case PHY_TEST_PATTERN_264BIT_CUSTOM:
4032 test_pattern = DP_TEST_PATTERN_264BIT_CUSTOM;
4034 case PHY_TEST_PATTERN_SQUARE_PULSE:
4035 test_pattern = DP_TEST_PATTERN_SQUARE_PULSE;
4039 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
4043 if (test_pattern == DP_TEST_PATTERN_80BIT_CUSTOM) {
4044 test_pattern_size = (DP_TEST_80BIT_CUSTOM_PATTERN_79_72 -
4045 DP_TEST_80BIT_CUSTOM_PATTERN_7_0) + 1;
4046 core_link_read_dpcd(
4048 DP_TEST_80BIT_CUSTOM_PATTERN_7_0,
4049 test_pattern_buffer,
4053 #if defined(CONFIG_DRM_AMD_DC_DCN)
4054 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE) {
4055 test_pattern_size = 1; // Square pattern data is 1 byte (DP spec)
4056 core_link_read_dpcd(
4058 DP_PHY_SQUARE_PATTERN,
4059 test_pattern_buffer,
4063 if (test_pattern == DP_TEST_PATTERN_264BIT_CUSTOM) {
4064 test_pattern_size = (DP_TEST_264BIT_CUSTOM_PATTERN_263_256-
4065 DP_TEST_264BIT_CUSTOM_PATTERN_7_0) + 1;
4066 core_link_read_dpcd(
4068 DP_TEST_264BIT_CUSTOM_PATTERN_7_0,
4069 test_pattern_buffer,
4074 /* prepare link training settings */
4075 link_training_settings.link_settings = link->cur_link_settings;
4077 for (lane = 0; lane <
4078 (unsigned int)(link->cur_link_settings.lane_count);
4080 dpcd_lane_adjust.raw =
4081 get_nibble_at_index(&dpcd_lane_adjustment[0].raw, lane);
4082 if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4083 DP_8b_10b_ENCODING) {
4084 link_training_settings.hw_lane_settings[lane].VOLTAGE_SWING =
4085 (enum dc_voltage_swing)
4086 (dpcd_lane_adjust.bits.VOLTAGE_SWING_LANE);
4087 link_training_settings.hw_lane_settings[lane].PRE_EMPHASIS =
4088 (enum dc_pre_emphasis)
4089 (dpcd_lane_adjust.bits.PRE_EMPHASIS_LANE);
4090 link_training_settings.hw_lane_settings[lane].POST_CURSOR2 =
4091 (enum dc_post_cursor2)
4092 ((dpcd_post_cursor_2_adjustment >> (lane * 2)) & 0x03);
4094 #if defined(CONFIG_DRM_AMD_DC_DCN)
4095 else if (dp_get_link_encoding_format(&link->cur_link_settings) ==
4096 DP_128b_132b_ENCODING) {
4097 link_training_settings.hw_lane_settings[lane].FFE_PRESET.raw =
4098 dpcd_lane_adjust.tx_ffe.PRESET_VALUE;
4103 dp_hw_to_dpcd_lane_settings(&link_training_settings,
4104 link_training_settings.hw_lane_settings,
4105 link_training_settings.dpcd_lane_settings);
4106 /*Usage: Measure DP physical lane signal
4107 * by DP SI test equipment automatically.
4108 * PHY test pattern request is generated by equipment via HPD interrupt.
4109 * HPD needs to be active all the time. HPD should be active
4110 * all the time. Do not touch it.
4111 * forward request to DS
4113 dc_link_dp_set_test_pattern(
4116 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED,
4117 &link_training_settings,
4118 test_pattern_buffer,
4122 static void dp_test_send_link_test_pattern(struct dc_link *link)
4124 union link_test_pattern dpcd_test_pattern;
4125 union test_misc dpcd_test_params;
4126 enum dp_test_pattern test_pattern;
4127 enum dp_test_pattern_color_space test_pattern_color_space =
4128 DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED;
4129 enum dc_color_depth requestColorDepth = COLOR_DEPTH_UNDEFINED;
4130 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4131 struct pipe_ctx *pipe_ctx = NULL;
4134 memset(&dpcd_test_pattern, 0, sizeof(dpcd_test_pattern));
4135 memset(&dpcd_test_params, 0, sizeof(dpcd_test_params));
4137 for (i = 0; i < MAX_PIPES; i++) {
4138 if (pipes[i].stream == NULL)
4141 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
4142 pipe_ctx = &pipes[i];
4147 if (pipe_ctx == NULL)
4150 /* get link test pattern and pattern parameters */
4151 core_link_read_dpcd(
4154 &dpcd_test_pattern.raw,
4155 sizeof(dpcd_test_pattern));
4156 core_link_read_dpcd(
4159 &dpcd_test_params.raw,
4160 sizeof(dpcd_test_params));
4162 switch (dpcd_test_pattern.bits.PATTERN) {
4163 case LINK_TEST_PATTERN_COLOR_RAMP:
4164 test_pattern = DP_TEST_PATTERN_COLOR_RAMP;
4166 case LINK_TEST_PATTERN_VERTICAL_BARS:
4167 test_pattern = DP_TEST_PATTERN_VERTICAL_BARS;
4168 break; /* black and white */
4169 case LINK_TEST_PATTERN_COLOR_SQUARES:
4170 test_pattern = (dpcd_test_params.bits.DYN_RANGE ==
4171 TEST_DYN_RANGE_VESA ?
4172 DP_TEST_PATTERN_COLOR_SQUARES :
4173 DP_TEST_PATTERN_COLOR_SQUARES_CEA);
4176 test_pattern = DP_TEST_PATTERN_VIDEO_MODE;
4180 if (dpcd_test_params.bits.CLR_FORMAT == 0)
4181 test_pattern_color_space = DP_TEST_PATTERN_COLOR_SPACE_RGB;
4183 test_pattern_color_space = dpcd_test_params.bits.YCBCR_COEFS ?
4184 DP_TEST_PATTERN_COLOR_SPACE_YCBCR709 :
4185 DP_TEST_PATTERN_COLOR_SPACE_YCBCR601;
4187 switch (dpcd_test_params.bits.BPC) {
4189 requestColorDepth = COLOR_DEPTH_666;
4192 requestColorDepth = COLOR_DEPTH_888;
4195 requestColorDepth = COLOR_DEPTH_101010;
4198 requestColorDepth = COLOR_DEPTH_121212;
4204 switch (dpcd_test_params.bits.CLR_FORMAT) {
4206 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4209 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR422;
4212 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_YCBCR444;
4215 pipe_ctx->stream->timing.pixel_encoding = PIXEL_ENCODING_RGB;
4220 if (requestColorDepth != COLOR_DEPTH_UNDEFINED
4221 && pipe_ctx->stream->timing.display_color_depth != requestColorDepth) {
4222 DC_LOG_DEBUG("%s: original bpc %d, changing to %d\n",
4224 pipe_ctx->stream->timing.display_color_depth,
4226 pipe_ctx->stream->timing.display_color_depth = requestColorDepth;
4229 dp_update_dsc_config(pipe_ctx);
4231 dc_link_dp_set_test_pattern(
4234 test_pattern_color_space,
4240 static void dp_test_get_audio_test_data(struct dc_link *link, bool disable_video)
4242 union audio_test_mode dpcd_test_mode = {0};
4243 struct audio_test_pattern_type dpcd_pattern_type = {0};
4244 union audio_test_pattern_period dpcd_pattern_period[AUDIO_CHANNELS_COUNT] = {0};
4245 enum dp_test_pattern test_pattern = DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4247 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
4248 struct pipe_ctx *pipe_ctx = &pipes[0];
4249 unsigned int channel_count;
4250 unsigned int channel = 0;
4251 unsigned int modes = 0;
4252 unsigned int sampling_rate_in_hz = 0;
4254 // get audio test mode and test pattern parameters
4255 core_link_read_dpcd(
4258 &dpcd_test_mode.raw,
4259 sizeof(dpcd_test_mode));
4261 core_link_read_dpcd(
4263 DP_TEST_AUDIO_PATTERN_TYPE,
4264 &dpcd_pattern_type.value,
4265 sizeof(dpcd_pattern_type));
4267 channel_count = dpcd_test_mode.bits.channel_count + 1;
4269 // read pattern periods for requested channels when sawTooth pattern is requested
4270 if (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH ||
4271 dpcd_pattern_type.value == AUDIO_TEST_PATTERN_OPERATOR_DEFINED) {
4273 test_pattern = (dpcd_pattern_type.value == AUDIO_TEST_PATTERN_SAWTOOTH) ?
4274 DP_TEST_PATTERN_AUDIO_SAWTOOTH : DP_TEST_PATTERN_AUDIO_OPERATOR_DEFINED;
4275 // read period for each channel
4276 for (channel = 0; channel < channel_count; channel++) {
4277 core_link_read_dpcd(
4279 DP_TEST_AUDIO_PERIOD_CH1 + channel,
4280 &dpcd_pattern_period[channel].raw,
4281 sizeof(dpcd_pattern_period[channel]));
4285 // translate sampling rate
4286 switch (dpcd_test_mode.bits.sampling_rate) {
4287 case AUDIO_SAMPLING_RATE_32KHZ:
4288 sampling_rate_in_hz = 32000;
4290 case AUDIO_SAMPLING_RATE_44_1KHZ:
4291 sampling_rate_in_hz = 44100;
4293 case AUDIO_SAMPLING_RATE_48KHZ:
4294 sampling_rate_in_hz = 48000;
4296 case AUDIO_SAMPLING_RATE_88_2KHZ:
4297 sampling_rate_in_hz = 88200;
4299 case AUDIO_SAMPLING_RATE_96KHZ:
4300 sampling_rate_in_hz = 96000;
4302 case AUDIO_SAMPLING_RATE_176_4KHZ:
4303 sampling_rate_in_hz = 176400;
4305 case AUDIO_SAMPLING_RATE_192KHZ:
4306 sampling_rate_in_hz = 192000;
4309 sampling_rate_in_hz = 0;
4313 link->audio_test_data.flags.test_requested = 1;
4314 link->audio_test_data.flags.disable_video = disable_video;
4315 link->audio_test_data.sampling_rate = sampling_rate_in_hz;
4316 link->audio_test_data.channel_count = channel_count;
4317 link->audio_test_data.pattern_type = test_pattern;
4319 if (test_pattern == DP_TEST_PATTERN_AUDIO_SAWTOOTH) {
4320 for (modes = 0; modes < pipe_ctx->stream->audio_info.mode_count; modes++) {
4321 link->audio_test_data.pattern_period[modes] = dpcd_pattern_period[modes].bits.pattern_period;
4326 void dc_link_dp_handle_automated_test(struct dc_link *link)
4328 union test_request test_request;
4329 union test_response test_response;
4331 memset(&test_request, 0, sizeof(test_request));
4332 memset(&test_response, 0, sizeof(test_response));
4334 core_link_read_dpcd(
4338 sizeof(union test_request));
4339 if (test_request.bits.LINK_TRAINING) {
4340 /* ACK first to let DP RX test box monitor LT sequence */
4341 test_response.bits.ACK = 1;
4342 core_link_write_dpcd(
4346 sizeof(test_response));
4347 dp_test_send_link_training(link);
4348 /* no acknowledge request is needed again */
4349 test_response.bits.ACK = 0;
4351 if (test_request.bits.LINK_TEST_PATTRN) {
4352 dp_test_send_link_test_pattern(link);
4353 test_response.bits.ACK = 1;
4356 if (test_request.bits.AUDIO_TEST_PATTERN) {
4357 dp_test_get_audio_test_data(link, test_request.bits.TEST_AUDIO_DISABLED_VIDEO);
4358 test_response.bits.ACK = 1;
4361 if (test_request.bits.PHY_TEST_PATTERN) {
4362 dp_test_send_phy_test_pattern(link);
4363 test_response.bits.ACK = 1;
4366 /* send request acknowledgment */
4367 if (test_response.bits.ACK)
4368 core_link_write_dpcd(
4372 sizeof(test_response));
4375 void dc_link_dp_handle_link_loss(struct dc_link *link)
4378 struct pipe_ctx *pipe_ctx;
4380 for (i = 0; i < MAX_PIPES; i++) {
4381 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4382 if (pipe_ctx && pipe_ctx->stream && pipe_ctx->stream->link == link)
4386 if (pipe_ctx == NULL || pipe_ctx->stream == NULL)
4389 for (i = 0; i < MAX_PIPES; i++) {
4390 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4391 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4392 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4393 core_link_disable_stream(pipe_ctx);
4397 for (i = 0; i < MAX_PIPES; i++) {
4398 pipe_ctx = &link->dc->current_state->res_ctx.pipe_ctx[i];
4399 if (pipe_ctx && pipe_ctx->stream && !pipe_ctx->stream->dpms_off &&
4400 pipe_ctx->stream->link == link && !pipe_ctx->prev_odm_pipe) {
4401 core_link_enable_stream(link->dc->current_state, pipe_ctx);
4406 bool dc_link_handle_hpd_rx_irq(struct dc_link *link, union hpd_irq_data *out_hpd_irq_dpcd_data, bool *out_link_loss,
4407 bool defer_handling, bool *has_left_work)
4409 union hpd_irq_data hpd_irq_dpcd_data = {0};
4410 union device_service_irq device_service_clear = {0};
4411 enum dc_status result;
4412 bool status = false;
4415 *out_link_loss = false;
4418 *has_left_work = false;
4419 /* For use cases related to down stream connection status change,
4420 * PSR and device auto test, refer to function handle_sst_hpd_irq
4423 DC_LOG_HW_HPD_IRQ("%s: Got short pulse HPD on link %d\n",
4424 __func__, link->link_index);
4427 /* All the "handle_hpd_irq_xxx()" methods
4428 * should be called only after
4429 * dal_dpsst_ls_read_hpd_irq_data
4430 * Order of calls is important too
4432 result = read_hpd_rx_irq_data(link, &hpd_irq_dpcd_data);
4433 if (out_hpd_irq_dpcd_data)
4434 *out_hpd_irq_dpcd_data = hpd_irq_dpcd_data;
4436 if (result != DC_OK) {
4437 DC_LOG_HW_HPD_IRQ("%s: DPCD read failed to obtain irq data\n",
4442 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.AUTOMATED_TEST) {
4443 device_service_clear.bits.AUTOMATED_TEST = 1;
4444 core_link_write_dpcd(
4446 DP_DEVICE_SERVICE_IRQ_VECTOR,
4447 &device_service_clear.raw,
4448 sizeof(device_service_clear.raw));
4449 device_service_clear.raw = 0;
4450 if (defer_handling && has_left_work)
4451 *has_left_work = true;
4453 dc_link_dp_handle_automated_test(link);
4457 if (!dc_link_dp_allow_hpd_rx_irq(link)) {
4458 DC_LOG_HW_HPD_IRQ("%s: skipping HPD handling on %d\n",
4459 __func__, link->link_index);
4463 if (handle_hpd_irq_psr_sink(link))
4464 /* PSR-related error was detected and handled */
4467 /* If PSR-related error handled, Main link may be off,
4468 * so do not handle as a normal sink status change interrupt.
4471 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.UP_REQ_MSG_RDY) {
4472 if (defer_handling && has_left_work)
4473 *has_left_work = true;
4477 /* check if we have MST msg and return since we poll for it */
4478 if (hpd_irq_dpcd_data.bytes.device_service_irq.bits.DOWN_REP_MSG_RDY) {
4479 if (defer_handling && has_left_work)
4480 *has_left_work = true;
4484 /* For now we only handle 'Downstream port status' case.
4485 * If we got sink count changed it means
4486 * Downstream port status changed,
4487 * then DM should call DC to do the detection.
4488 * NOTE: Do not handle link loss on eDP since it is internal link*/
4489 if ((link->connector_signal != SIGNAL_TYPE_EDP) &&
4490 hpd_rx_irq_check_link_loss_status(
4492 &hpd_irq_dpcd_data)) {
4493 /* Connectivity log: link loss */
4494 CONN_DATA_LINK_LOSS(link,
4495 hpd_irq_dpcd_data.raw,
4496 sizeof(hpd_irq_dpcd_data),
4499 if (defer_handling && has_left_work)
4500 *has_left_work = true;
4502 dc_link_dp_handle_link_loss(link);
4506 *out_link_loss = true;
4509 if (link->type == dc_connection_sst_branch &&
4510 hpd_irq_dpcd_data.bytes.sink_cnt.bits.SINK_COUNT
4511 != link->dpcd_sink_count)
4514 /* reasons for HPD RX:
4515 * 1. Link Loss - ie Re-train the Link
4516 * 2. MST sideband message
4517 * 3. Automated Test - ie. Internal Commit
4518 * 4. CP (copy protection) - (not interesting for DM???)
4520 * 6. Downstream Port status changed
4521 * -ie. Detect - this the only one
4522 * which is interesting for DM because
4523 * it must call dc_link_detect.
4528 /*query dpcd for version and mst cap addresses*/
4529 bool is_mst_supported(struct dc_link *link)
4532 enum dc_status st = DC_OK;
4536 if (link->preferred_training_settings.mst_enable &&
4537 *link->preferred_training_settings.mst_enable == false) {
4544 st = core_link_read_dpcd(link, DP_DPCD_REV, &rev.raw,
4547 if (st == DC_OK && rev.raw >= DPCD_REV_12) {
4549 st = core_link_read_dpcd(link, DP_MSTM_CAP,
4550 &cap.raw, sizeof(cap));
4551 if (st == DC_OK && cap.bits.MST_CAP == 1)
4558 bool is_dp_active_dongle(const struct dc_link *link)
4560 return (link->dpcd_caps.dongle_type >= DISPLAY_DONGLE_DP_VGA_CONVERTER) &&
4561 (link->dpcd_caps.dongle_type <= DISPLAY_DONGLE_DP_HDMI_CONVERTER);
4564 bool is_dp_branch_device(const struct dc_link *link)
4566 return link->dpcd_caps.is_branch_dev;
4569 static int translate_dpcd_max_bpc(enum dpcd_downstream_port_max_bpc bpc)
4572 case DOWN_STREAM_MAX_8BPC:
4574 case DOWN_STREAM_MAX_10BPC:
4576 case DOWN_STREAM_MAX_12BPC:
4578 case DOWN_STREAM_MAX_16BPC:
4587 #if defined(CONFIG_DRM_AMD_DC_DCN)
4588 uint32_t dc_link_bw_kbps_from_raw_frl_link_rate_data(uint8_t bw)
4609 * Return PCON's post FRL link training supported BW if its non-zero, otherwise return max_supported_frl_bw.
4611 static uint32_t intersect_frl_link_bw_support(
4612 const uint32_t max_supported_frl_bw_in_kbps,
4613 const union hdmi_encoded_link_bw hdmi_encoded_link_bw)
4615 uint32_t supported_bw_in_kbps = max_supported_frl_bw_in_kbps;
4617 // HDMI_ENCODED_LINK_BW bits are only valid if HDMI Link Configuration bit is 1 (FRL mode)
4618 if (hdmi_encoded_link_bw.bits.FRL_MODE) {
4619 if (hdmi_encoded_link_bw.bits.BW_48Gbps)
4620 supported_bw_in_kbps = 48000000;
4621 else if (hdmi_encoded_link_bw.bits.BW_40Gbps)
4622 supported_bw_in_kbps = 40000000;
4623 else if (hdmi_encoded_link_bw.bits.BW_32Gbps)
4624 supported_bw_in_kbps = 32000000;
4625 else if (hdmi_encoded_link_bw.bits.BW_24Gbps)
4626 supported_bw_in_kbps = 24000000;
4627 else if (hdmi_encoded_link_bw.bits.BW_18Gbps)
4628 supported_bw_in_kbps = 18000000;
4629 else if (hdmi_encoded_link_bw.bits.BW_9Gbps)
4630 supported_bw_in_kbps = 9000000;
4633 return supported_bw_in_kbps;
4637 static void read_dp_device_vendor_id(struct dc_link *link)
4639 struct dp_device_vendor_id dp_id;
4641 /* read IEEE branch device id */
4642 core_link_read_dpcd(
4648 link->dpcd_caps.branch_dev_id =
4649 (dp_id.ieee_oui[0] << 16) +
4650 (dp_id.ieee_oui[1] << 8) +
4654 link->dpcd_caps.branch_dev_name,
4655 dp_id.ieee_device_id,
4656 sizeof(dp_id.ieee_device_id));
4661 static void get_active_converter_info(
4662 uint8_t data, struct dc_link *link)
4664 union dp_downstream_port_present ds_port = { .byte = data };
4665 memset(&link->dpcd_caps.dongle_caps, 0, sizeof(link->dpcd_caps.dongle_caps));
4667 /* decode converter info*/
4668 if (!ds_port.fields.PORT_PRESENT) {
4669 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4670 ddc_service_set_dongle_type(link->ddc,
4671 link->dpcd_caps.dongle_type);
4672 link->dpcd_caps.is_branch_dev = false;
4676 /* DPCD 0x5 bit 0 = 1, it indicate it's branch device */
4677 link->dpcd_caps.is_branch_dev = ds_port.fields.PORT_PRESENT;
4679 switch (ds_port.fields.PORT_TYPE) {
4680 case DOWNSTREAM_VGA:
4681 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_VGA_CONVERTER;
4683 case DOWNSTREAM_DVI_HDMI_DP_PLUS_PLUS:
4684 /* At this point we don't know is it DVI or HDMI or DP++,
4686 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_DP_DVI_CONVERTER;
4689 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4693 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_11) {
4694 uint8_t det_caps[16]; /* CTS 4.2.2.7 expects source to read Detailed Capabilities Info : 00080h-0008F.*/
4695 union dwnstream_port_caps_byte0 *port_caps =
4696 (union dwnstream_port_caps_byte0 *)det_caps;
4697 if (core_link_read_dpcd(link, DP_DOWNSTREAM_PORT_0,
4698 det_caps, sizeof(det_caps)) == DC_OK) {
4700 switch (port_caps->bits.DWN_STRM_PORTX_TYPE) {
4701 /*Handle DP case as DONGLE_NONE*/
4702 case DOWN_STREAM_DETAILED_DP:
4703 link->dpcd_caps.dongle_type = DISPLAY_DONGLE_NONE;
4705 case DOWN_STREAM_DETAILED_VGA:
4706 link->dpcd_caps.dongle_type =
4707 DISPLAY_DONGLE_DP_VGA_CONVERTER;
4709 case DOWN_STREAM_DETAILED_DVI:
4710 link->dpcd_caps.dongle_type =
4711 DISPLAY_DONGLE_DP_DVI_CONVERTER;
4713 case DOWN_STREAM_DETAILED_HDMI:
4714 case DOWN_STREAM_DETAILED_DP_PLUS_PLUS:
4715 /*Handle DP++ active converter case, process DP++ case as HDMI case according DP1.4 spec*/
4716 link->dpcd_caps.dongle_type =
4717 DISPLAY_DONGLE_DP_HDMI_CONVERTER;
4719 link->dpcd_caps.dongle_caps.dongle_type = link->dpcd_caps.dongle_type;
4720 if (ds_port.fields.DETAILED_CAPS) {
4722 union dwnstream_port_caps_byte3_hdmi
4723 hdmi_caps = {.raw = det_caps[3] };
4724 union dwnstream_port_caps_byte2
4725 hdmi_color_caps = {.raw = det_caps[2] };
4726 link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz =
4729 link->dpcd_caps.dongle_caps.is_dp_hdmi_s3d_converter =
4730 hdmi_caps.bits.FRAME_SEQ_TO_FRAME_PACK;
4731 /*YCBCR capability only for HDMI case*/
4732 if (port_caps->bits.DWN_STRM_PORTX_TYPE
4733 == DOWN_STREAM_DETAILED_HDMI) {
4734 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_pass_through =
4735 hdmi_caps.bits.YCrCr422_PASS_THROUGH;
4736 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_pass_through =
4737 hdmi_caps.bits.YCrCr420_PASS_THROUGH;
4738 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr422_converter =
4739 hdmi_caps.bits.YCrCr422_CONVERSION;
4740 link->dpcd_caps.dongle_caps.is_dp_hdmi_ycbcr420_converter =
4741 hdmi_caps.bits.YCrCr420_CONVERSION;
4744 link->dpcd_caps.dongle_caps.dp_hdmi_max_bpc =
4745 translate_dpcd_max_bpc(
4746 hdmi_color_caps.bits.MAX_BITS_PER_COLOR_COMPONENT);
4748 #if defined(CONFIG_DRM_AMD_DC_DCN)
4749 if (link->dc->caps.hdmi_frl_pcon_support) {
4750 union hdmi_encoded_link_bw hdmi_encoded_link_bw;
4752 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps =
4753 dc_link_bw_kbps_from_raw_frl_link_rate_data(
4754 hdmi_color_caps.bits.MAX_ENCODED_LINK_BW_SUPPORT);
4756 // Intersect reported max link bw support with the supported link rate post FRL link training
4757 if (core_link_read_dpcd(link, DP_PCON_HDMI_POST_FRL_STATUS,
4758 &hdmi_encoded_link_bw.raw, sizeof(hdmi_encoded_link_bw)) == DC_OK) {
4759 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps = intersect_frl_link_bw_support(
4760 link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps,
4761 hdmi_encoded_link_bw);
4764 if (link->dpcd_caps.dongle_caps.dp_hdmi_frl_max_link_bw_in_kbps > 0)
4765 link->dpcd_caps.dongle_caps.extendedCapValid = true;
4769 if (link->dpcd_caps.dongle_caps.dp_hdmi_max_pixel_clk_in_khz != 0)
4770 link->dpcd_caps.dongle_caps.extendedCapValid = true;
4778 ddc_service_set_dongle_type(link->ddc, link->dpcd_caps.dongle_type);
4781 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
4783 core_link_read_dpcd(
4785 DP_BRANCH_REVISION_START,
4786 (uint8_t *)&dp_hw_fw_revision,
4787 sizeof(dp_hw_fw_revision));
4789 link->dpcd_caps.branch_hw_revision =
4790 dp_hw_fw_revision.ieee_hw_rev;
4793 link->dpcd_caps.branch_fw_revision,
4794 dp_hw_fw_revision.ieee_fw_rev,
4795 sizeof(dp_hw_fw_revision.ieee_fw_rev));
4797 #if defined(CONFIG_DRM_AMD_DC_DCN)
4798 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14 &&
4799 link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
4800 union dp_dfp_cap_ext dfp_cap_ext;
4801 memset(&dfp_cap_ext, '\0', sizeof (dfp_cap_ext));
4802 core_link_read_dpcd(
4804 DP_DFP_CAPABILITY_EXTENSION_SUPPORT,
4806 sizeof(dfp_cap_ext.raw));
4807 link->dpcd_caps.dongle_caps.dfp_cap_ext.supported = dfp_cap_ext.fields.supported;
4808 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps =
4809 dfp_cap_ext.fields.max_pixel_rate_in_mps[0] +
4810 (dfp_cap_ext.fields.max_pixel_rate_in_mps[1] << 8);
4811 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width =
4812 dfp_cap_ext.fields.max_video_h_active_width[0] +
4813 (dfp_cap_ext.fields.max_video_h_active_width[1] << 8);
4814 link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height =
4815 dfp_cap_ext.fields.max_video_v_active_height[0] +
4816 (dfp_cap_ext.fields.max_video_v_active_height[1] << 8);
4817 link->dpcd_caps.dongle_caps.dfp_cap_ext.encoding_format_caps =
4818 dfp_cap_ext.fields.encoding_format_caps;
4819 link->dpcd_caps.dongle_caps.dfp_cap_ext.rgb_color_depth_caps =
4820 dfp_cap_ext.fields.rgb_color_depth_caps;
4821 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr444_color_depth_caps =
4822 dfp_cap_ext.fields.ycbcr444_color_depth_caps;
4823 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr422_color_depth_caps =
4824 dfp_cap_ext.fields.ycbcr422_color_depth_caps;
4825 link->dpcd_caps.dongle_caps.dfp_cap_ext.ycbcr420_color_depth_caps =
4826 dfp_cap_ext.fields.ycbcr420_color_depth_caps;
4827 DC_LOG_DP2("DFP capability extension is read at link %d", link->link_index);
4828 DC_LOG_DP2("\tdfp_cap_ext.supported = %s", link->dpcd_caps.dongle_caps.dfp_cap_ext.supported ? "true" : "false");
4829 DC_LOG_DP2("\tdfp_cap_ext.max_pixel_rate_in_mps = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_pixel_rate_in_mps);
4830 DC_LOG_DP2("\tdfp_cap_ext.max_video_h_active_width = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_h_active_width);
4831 DC_LOG_DP2("\tdfp_cap_ext.max_video_v_active_height = %d", link->dpcd_caps.dongle_caps.dfp_cap_ext.max_video_v_active_height);
4836 static void dp_wa_power_up_0010FA(struct dc_link *link, uint8_t *dpcd_data,
4841 if (!link->dpcd_caps.dpcd_rev.raw) {
4843 dp_receiver_power_ctrl(link, true);
4844 core_link_read_dpcd(link, DP_DPCD_REV,
4846 link->dpcd_caps.dpcd_rev.raw = dpcd_data[
4849 } while (retry++ < 4 && !link->dpcd_caps.dpcd_rev.raw);
4852 if (link->dpcd_caps.dongle_type == DISPLAY_DONGLE_DP_VGA_CONVERTER) {
4853 switch (link->dpcd_caps.branch_dev_id) {
4854 /* 0010FA active dongles (DP-VGA, DP-DLDVI converters) power down
4855 * all internal circuits including AUX communication preventing
4856 * reading DPCD table and EDID (spec violation).
4857 * Encoder will skip DP RX power down on disable_output to
4858 * keep receiver powered all the time.*/
4859 case DP_BRANCH_DEVICE_ID_0010FA:
4860 case DP_BRANCH_DEVICE_ID_0080E1:
4861 case DP_BRANCH_DEVICE_ID_00E04C:
4862 link->wa_flags.dp_keep_receiver_powered = true;
4865 /* TODO: May need work around for other dongles. */
4867 link->wa_flags.dp_keep_receiver_powered = false;
4871 link->wa_flags.dp_keep_receiver_powered = false;
4874 /* Read additional sink caps defined in source specific DPCD area
4875 * This function currently only reads from SinkCapability address (DP_SOURCE_SINK_CAP)
4877 static bool dpcd_read_sink_ext_caps(struct dc_link *link)
4884 if (core_link_read_dpcd(link, DP_SOURCE_SINK_CAP, &dpcd_data, 1) != DC_OK)
4887 link->dpcd_sink_ext_caps.raw = dpcd_data;
4891 bool dp_retrieve_lttpr_cap(struct dc_link *link)
4893 #if defined(CONFIG_DRM_AMD_DC_DCN)
4894 uint8_t lttpr_dpcd_data[8];
4895 bool allow_lttpr_non_transparent_mode = 0;
4897 uint8_t lttpr_dpcd_data[6];
4899 bool vbios_lttpr_enable = link->dc->caps.vbios_lttpr_enable;
4900 bool vbios_lttpr_interop = link->dc->caps.vbios_lttpr_aware;
4901 enum dc_status status = DC_ERROR_UNEXPECTED;
4902 bool is_lttpr_present = false;
4904 memset(lttpr_dpcd_data, '\0', sizeof(lttpr_dpcd_data));
4906 #if defined(CONFIG_DRM_AMD_DC_DCN)
4907 if ((link->dc->config.allow_lttpr_non_transparent_mode.bits.DP2_0 &&
4908 link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED)) {
4909 allow_lttpr_non_transparent_mode = 1;
4910 } else if (link->dc->config.allow_lttpr_non_transparent_mode.bits.DP1_4A &&
4911 !link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
4912 allow_lttpr_non_transparent_mode = 1;
4917 * Logic to determine LTTPR mode
4919 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4920 if (vbios_lttpr_enable && vbios_lttpr_interop)
4921 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4922 else if (!vbios_lttpr_enable && vbios_lttpr_interop) {
4923 #if defined(CONFIG_DRM_AMD_DC_DCN)
4924 if (allow_lttpr_non_transparent_mode)
4926 if (link->dc->config.allow_lttpr_non_transparent_mode)
4928 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4930 link->lttpr_mode = LTTPR_MODE_TRANSPARENT;
4931 } else if (!vbios_lttpr_enable && !vbios_lttpr_interop) {
4932 #if defined(CONFIG_DRM_AMD_DC_DCN)
4933 if (!allow_lttpr_non_transparent_mode || !link->dc->caps.extended_aux_timeout_support)
4935 if (!link->dc->config.allow_lttpr_non_transparent_mode
4936 || !link->dc->caps.extended_aux_timeout_support)
4938 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4940 link->lttpr_mode = LTTPR_MODE_NON_TRANSPARENT;
4942 #if defined(CONFIG_DRM_AMD_DC_DCN)
4943 /* Check DP tunnel LTTPR mode debug option. */
4944 if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA &&
4945 link->dc->debug.dpia_debug.bits.force_non_lttpr)
4946 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
4949 if (link->lttpr_mode == LTTPR_MODE_NON_TRANSPARENT || link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
4950 /* By reading LTTPR capability, RX assumes that we will enable
4951 * LTTPR extended aux timeout if LTTPR is present.
4953 status = core_link_read_dpcd(
4955 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV,
4957 sizeof(lttpr_dpcd_data));
4958 if (status != DC_OK) {
4959 DC_LOG_DP2("%s: Read LTTPR caps data failed.\n", __func__);
4963 link->dpcd_caps.lttpr_caps.revision.raw =
4964 lttpr_dpcd_data[DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV -
4965 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4967 link->dpcd_caps.lttpr_caps.max_link_rate =
4968 lttpr_dpcd_data[DP_MAX_LINK_RATE_PHY_REPEATER -
4969 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4971 link->dpcd_caps.lttpr_caps.phy_repeater_cnt =
4972 lttpr_dpcd_data[DP_PHY_REPEATER_CNT -
4973 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4975 link->dpcd_caps.lttpr_caps.max_lane_count =
4976 lttpr_dpcd_data[DP_MAX_LANE_COUNT_PHY_REPEATER -
4977 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4979 link->dpcd_caps.lttpr_caps.mode =
4980 lttpr_dpcd_data[DP_PHY_REPEATER_MODE -
4981 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4983 link->dpcd_caps.lttpr_caps.max_ext_timeout =
4984 lttpr_dpcd_data[DP_PHY_REPEATER_EXTENDED_WAIT_TIMEOUT -
4985 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4987 #if defined(CONFIG_DRM_AMD_DC_DCN)
4988 link->dpcd_caps.lttpr_caps.main_link_channel_coding.raw =
4989 lttpr_dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER -
4990 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4992 link->dpcd_caps.lttpr_caps.supported_128b_132b_rates.raw =
4993 lttpr_dpcd_data[DP_PHY_REPEATER_128b_132b_RATES -
4994 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
4997 /* Attempt to train in LTTPR transparent mode if repeater count exceeds 8. */
4998 is_lttpr_present = (dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) != 0 &&
4999 link->dpcd_caps.lttpr_caps.phy_repeater_cnt < 0xff &&
5000 link->dpcd_caps.lttpr_caps.max_lane_count > 0 &&
5001 link->dpcd_caps.lttpr_caps.max_lane_count <= 4 &&
5002 link->dpcd_caps.lttpr_caps.revision.raw >= 0x14);
5003 if (is_lttpr_present) {
5004 CONN_DATA_DETECT(link, lttpr_dpcd_data, sizeof(lttpr_dpcd_data), "LTTPR Caps: ");
5005 configure_lttpr_mode_transparent(link);
5007 link->lttpr_mode = LTTPR_MODE_NON_LTTPR;
5009 return is_lttpr_present;
5012 static bool retrieve_link_cap(struct dc_link *link)
5014 /* DP_ADAPTER_CAP - DP_DPCD_REV + 1 == 16 and also DP_DSC_BITS_PER_PIXEL_INC - DP_DSC_SUPPORT + 1 == 16,
5015 * which means size 16 will be good for both of those DPCD register block reads
5017 uint8_t dpcd_data[16];
5018 /*Only need to read 1 byte starting from DP_DPRX_FEATURE_ENUMERATION_LIST.
5020 uint8_t dpcd_dprx_data = '\0';
5021 uint8_t dpcd_power_state = '\0';
5023 struct dp_device_vendor_id sink_id;
5024 union down_stream_port_count down_strm_port_count;
5025 union edp_configuration_cap edp_config_cap;
5026 union dp_downstream_port_present ds_port = { 0 };
5027 enum dc_status status = DC_ERROR_UNEXPECTED;
5028 uint32_t read_dpcd_retry_cnt = 3;
5030 struct dp_sink_hw_fw_revision dp_hw_fw_revision;
5031 const uint32_t post_oui_delay = 30; // 30ms
5032 bool is_lttpr_present = false;
5034 memset(dpcd_data, '\0', sizeof(dpcd_data));
5035 memset(&down_strm_port_count,
5036 '\0', sizeof(union down_stream_port_count));
5037 memset(&edp_config_cap, '\0',
5038 sizeof(union edp_configuration_cap));
5040 /* if extended timeout is supported in hardware,
5041 * default to LTTPR timeout (3.2ms) first as a W/A for DP link layer
5042 * CTS 4.2.1.1 regression introduced by CTS specs requirement update.
5044 dc_link_aux_try_to_configure_timeout(link->ddc,
5045 LINK_AUX_DEFAULT_LTTPR_TIMEOUT_PERIOD);
5047 is_lttpr_present = dp_retrieve_lttpr_cap(link);
5048 /* Read DP tunneling information. */
5049 status = dpcd_get_tunneling_device_data(link);
5051 status = core_link_read_dpcd(link, DP_SET_POWER,
5052 &dpcd_power_state, sizeof(dpcd_power_state));
5054 /* Delay 1 ms if AUX CH is in power down state. Based on spec
5055 * section 2.3.1.2, if AUX CH may be powered down due to
5056 * write to DPCD 600h = 2. Sink AUX CH is monitoring differential
5057 * signal and may need up to 1 ms before being able to reply.
5059 if (status != DC_OK || dpcd_power_state == DP_SET_POWER_D3)
5062 dpcd_set_source_specific_data(link);
5063 /* Sink may need to configure internals based on vendor, so allow some
5064 * time before proceeding with possibly vendor specific transactions
5066 msleep(post_oui_delay);
5068 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5069 status = core_link_read_dpcd(
5074 if (status == DC_OK)
5078 if (status != DC_OK) {
5079 dm_error("%s: Read receiver caps dpcd data failed.\n", __func__);
5083 if (!is_lttpr_present)
5084 dc_link_aux_try_to_configure_timeout(link->ddc, LINK_AUX_DEFAULT_TIMEOUT_PERIOD);
5087 union training_aux_rd_interval aux_rd_interval;
5089 aux_rd_interval.raw =
5090 dpcd_data[DP_TRAINING_AUX_RD_INTERVAL];
5092 link->dpcd_caps.ext_receiver_cap_field_present =
5093 aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1;
5095 if (aux_rd_interval.bits.EXT_RECEIVER_CAP_FIELD_PRESENT == 1) {
5096 uint8_t ext_cap_data[16];
5098 memset(ext_cap_data, '\0', sizeof(ext_cap_data));
5099 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5100 status = core_link_read_dpcd(
5104 sizeof(ext_cap_data));
5105 if (status == DC_OK) {
5106 memcpy(dpcd_data, ext_cap_data, sizeof(dpcd_data));
5110 if (status != DC_OK)
5111 dm_error("%s: Read extend caps data failed, use cap from dpcd 0.\n", __func__);
5115 link->dpcd_caps.dpcd_rev.raw =
5116 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5118 if (link->dpcd_caps.ext_receiver_cap_field_present) {
5119 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5120 status = core_link_read_dpcd(
5122 DP_DPRX_FEATURE_ENUMERATION_LIST,
5124 sizeof(dpcd_dprx_data));
5125 if (status == DC_OK)
5129 link->dpcd_caps.dprx_feature.raw = dpcd_dprx_data;
5131 if (status != DC_OK)
5132 dm_error("%s: Read DPRX caps data failed.\n", __func__);
5136 link->dpcd_caps.dprx_feature.raw = 0;
5140 /* Error condition checking...
5141 * It is impossible for Sink to report Max Lane Count = 0.
5142 * It is possible for Sink to report Max Link Rate = 0, if it is
5143 * an eDP device that is reporting specialized link rates in the
5144 * SUPPORTED_LINK_RATE table.
5146 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5149 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5152 read_dp_device_vendor_id(link);
5154 get_active_converter_info(ds_port.byte, link);
5156 dp_wa_power_up_0010FA(link, dpcd_data, sizeof(dpcd_data));
5158 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5161 link->dpcd_caps.allow_invalid_MSA_timing_param =
5162 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5164 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5165 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5167 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5168 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5170 link->reported_link_cap.lane_count =
5171 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5172 link->reported_link_cap.link_rate = dpcd_data[
5173 DP_MAX_LINK_RATE - DP_DPCD_REV];
5174 link->reported_link_cap.link_spread =
5175 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5176 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5178 edp_config_cap.raw = dpcd_data[
5179 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5180 link->dpcd_caps.panel_mode_edp =
5181 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5182 link->dpcd_caps.dpcd_display_control_capable =
5183 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5185 link->test_pattern_enabled = false;
5186 link->compliance_test_state.raw = 0;
5188 /* read sink count */
5189 core_link_read_dpcd(link,
5191 &link->dpcd_caps.sink_count.raw,
5192 sizeof(link->dpcd_caps.sink_count.raw));
5194 /* read sink ieee oui */
5195 core_link_read_dpcd(link,
5197 (uint8_t *)(&sink_id),
5200 link->dpcd_caps.sink_dev_id =
5201 (sink_id.ieee_oui[0] << 16) +
5202 (sink_id.ieee_oui[1] << 8) +
5203 (sink_id.ieee_oui[2]);
5206 link->dpcd_caps.sink_dev_id_str,
5207 sink_id.ieee_device_id,
5208 sizeof(sink_id.ieee_device_id));
5210 /* Quirk Apple MBP 2017 15" Retina panel: Wrong DP_MAX_LINK_RATE */
5212 uint8_t str_mbp_2017[] = { 101, 68, 21, 101, 98, 97 };
5214 if ((link->dpcd_caps.sink_dev_id == 0x0010fa) &&
5215 !memcmp(link->dpcd_caps.sink_dev_id_str, str_mbp_2017,
5216 sizeof(str_mbp_2017))) {
5217 link->reported_link_cap.link_rate = 0x0c;
5221 core_link_read_dpcd(
5223 DP_SINK_HW_REVISION_START,
5224 (uint8_t *)&dp_hw_fw_revision,
5225 sizeof(dp_hw_fw_revision));
5227 link->dpcd_caps.sink_hw_revision =
5228 dp_hw_fw_revision.ieee_hw_rev;
5231 link->dpcd_caps.sink_fw_revision,
5232 dp_hw_fw_revision.ieee_fw_rev,
5233 sizeof(dp_hw_fw_revision.ieee_fw_rev));
5235 memset(&link->dpcd_caps.dsc_caps, '\0',
5236 sizeof(link->dpcd_caps.dsc_caps));
5237 memset(&link->dpcd_caps.fec_cap, '\0', sizeof(link->dpcd_caps.fec_cap));
5238 /* Read DSC and FEC sink capabilities if DP revision is 1.4 and up */
5239 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_14) {
5240 status = core_link_read_dpcd(
5243 &link->dpcd_caps.fec_cap.raw,
5244 sizeof(link->dpcd_caps.fec_cap.raw));
5245 status = core_link_read_dpcd(
5248 link->dpcd_caps.dsc_caps.dsc_basic_caps.raw,
5249 sizeof(link->dpcd_caps.dsc_caps.dsc_basic_caps.raw));
5250 #if defined(CONFIG_DRM_AMD_DC_DCN)
5251 if (link->dpcd_caps.dongle_type != DISPLAY_DONGLE_NONE) {
5252 status = core_link_read_dpcd(
5254 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5255 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5256 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5257 DC_LOG_DSC("DSC branch decoder capability is read at link %d", link->link_index);
5258 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_0 = 0x%02x",
5259 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_0);
5260 DC_LOG_DSC("\tBRANCH_OVERALL_THROUGHPUT_1 = 0x%02x",
5261 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_OVERALL_THROUGHPUT_1);
5262 DC_LOG_DSC("\tBRANCH_MAX_LINE_WIDTH 0x%02x",
5263 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.fields.BRANCH_MAX_LINE_WIDTH);
5266 status = core_link_read_dpcd(
5268 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0,
5269 link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw,
5270 sizeof(link->dpcd_caps.dsc_caps.dsc_branch_decoder_caps.raw));
5274 if (!dpcd_read_sink_ext_caps(link))
5275 link->dpcd_sink_ext_caps.raw = 0;
5277 #if defined(CONFIG_DRM_AMD_DC_DCN)
5278 link->dpcd_caps.channel_coding_cap.raw = dpcd_data[DP_MAIN_LINK_CHANNEL_CODING_CAP - DP_DPCD_REV];
5280 if (link->dpcd_caps.channel_coding_cap.bits.DP_128b_132b_SUPPORTED) {
5281 DC_LOG_DP2("128b/132b encoding is supported at link %d", link->link_index);
5283 core_link_read_dpcd(link,
5284 DP_128b_132b_SUPPORTED_LINK_RATES,
5285 &link->dpcd_caps.dp_128b_132b_supported_link_rates.raw,
5286 sizeof(link->dpcd_caps.dp_128b_132b_supported_link_rates.raw));
5287 if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR20)
5288 link->reported_link_cap.link_rate = LINK_RATE_UHBR20;
5289 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR13_5)
5290 link->reported_link_cap.link_rate = LINK_RATE_UHBR13_5;
5291 else if (link->dpcd_caps.dp_128b_132b_supported_link_rates.bits.UHBR10)
5292 link->reported_link_cap.link_rate = LINK_RATE_UHBR10;
5294 dm_error("%s: Invalid RX 128b_132b_supported_link_rates\n", __func__);
5295 DC_LOG_DP2("128b/132b supported link rates is read at link %d", link->link_index);
5296 DC_LOG_DP2("\tmax 128b/132b link rate support is %d.%d GHz",
5297 link->reported_link_cap.link_rate / 100,
5298 link->reported_link_cap.link_rate % 100);
5300 core_link_read_dpcd(link,
5301 DP_SINK_VIDEO_FALLBACK_FORMATS,
5302 &link->dpcd_caps.fallback_formats.raw,
5303 sizeof(link->dpcd_caps.fallback_formats.raw));
5304 DC_LOG_DP2("sink video fallback format is read at link %d", link->link_index);
5305 if (link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support)
5306 DC_LOG_DP2("\t1920x1080@60Hz 24bpp fallback format supported");
5307 if (link->dpcd_caps.fallback_formats.bits.dp_1280x720_60Hz_24bpp_support)
5308 DC_LOG_DP2("\t1280x720@60Hz 24bpp fallback format supported");
5309 if (link->dpcd_caps.fallback_formats.bits.dp_1024x768_60Hz_24bpp_support)
5310 DC_LOG_DP2("\t1024x768@60Hz 24bpp fallback format supported");
5311 if (link->dpcd_caps.fallback_formats.raw == 0) {
5312 DC_LOG_DP2("\tno supported fallback formats, assume 1920x1080@60Hz 24bpp is supported");
5313 link->dpcd_caps.fallback_formats.bits.dp_1920x1080_60Hz_24bpp_support = 1;
5316 core_link_read_dpcd(link,
5317 DP_FEC_CAPABILITY_1,
5318 &link->dpcd_caps.fec_cap1.raw,
5319 sizeof(link->dpcd_caps.fec_cap1.raw));
5320 DC_LOG_DP2("FEC CAPABILITY 1 is read at link %d", link->link_index);
5321 if (link->dpcd_caps.fec_cap1.bits.AGGREGATED_ERROR_COUNTERS_CAPABLE)
5322 DC_LOG_DP2("\tFEC aggregated error counters are supported");
5326 /* Connectivity log: detection */
5327 CONN_DATA_DETECT(link, dpcd_data, sizeof(dpcd_data), "Rx Caps: ");
5332 bool dp_overwrite_extended_receiver_cap(struct dc_link *link)
5334 uint8_t dpcd_data[16];
5335 uint32_t read_dpcd_retry_cnt = 3;
5336 enum dc_status status = DC_ERROR_UNEXPECTED;
5337 union dp_downstream_port_present ds_port = { 0 };
5338 union down_stream_port_count down_strm_port_count;
5339 union edp_configuration_cap edp_config_cap;
5343 for (i = 0; i < read_dpcd_retry_cnt; i++) {
5344 status = core_link_read_dpcd(
5349 if (status == DC_OK)
5353 link->dpcd_caps.dpcd_rev.raw =
5354 dpcd_data[DP_DPCD_REV - DP_DPCD_REV];
5356 if (dpcd_data[DP_MAX_LANE_COUNT - DP_DPCD_REV] == 0)
5359 ds_port.byte = dpcd_data[DP_DOWNSTREAMPORT_PRESENT -
5362 get_active_converter_info(ds_port.byte, link);
5364 down_strm_port_count.raw = dpcd_data[DP_DOWN_STREAM_PORT_COUNT -
5367 link->dpcd_caps.allow_invalid_MSA_timing_param =
5368 down_strm_port_count.bits.IGNORE_MSA_TIMING_PARAM;
5370 link->dpcd_caps.max_ln_count.raw = dpcd_data[
5371 DP_MAX_LANE_COUNT - DP_DPCD_REV];
5373 link->dpcd_caps.max_down_spread.raw = dpcd_data[
5374 DP_MAX_DOWNSPREAD - DP_DPCD_REV];
5376 link->reported_link_cap.lane_count =
5377 link->dpcd_caps.max_ln_count.bits.MAX_LANE_COUNT;
5378 link->reported_link_cap.link_rate = dpcd_data[
5379 DP_MAX_LINK_RATE - DP_DPCD_REV];
5380 link->reported_link_cap.link_spread =
5381 link->dpcd_caps.max_down_spread.bits.MAX_DOWN_SPREAD ?
5382 LINK_SPREAD_05_DOWNSPREAD_30KHZ : LINK_SPREAD_DISABLED;
5384 edp_config_cap.raw = dpcd_data[
5385 DP_EDP_CONFIGURATION_CAP - DP_DPCD_REV];
5386 link->dpcd_caps.panel_mode_edp =
5387 edp_config_cap.bits.ALT_SCRAMBLER_RESET;
5388 link->dpcd_caps.dpcd_display_control_capable =
5389 edp_config_cap.bits.DPCD_DISPLAY_CONTROL_CAPABLE;
5394 bool detect_dp_sink_caps(struct dc_link *link)
5396 return retrieve_link_cap(link);
5398 /* dc init_hw has power encoder using default
5399 * signal for connector. For native DP, no
5400 * need to power up encoder again. If not native
5401 * DP, hw_init may need check signal or power up
5404 /* TODO save sink caps in link->sink */
5407 static enum dc_link_rate linkRateInKHzToLinkRateMultiplier(uint32_t link_rate_in_khz)
5409 enum dc_link_rate link_rate;
5410 // LinkRate is normally stored as a multiplier of 0.27 Gbps per lane. Do the translation.
5411 switch (link_rate_in_khz) {
5413 link_rate = LINK_RATE_LOW; // Rate_1 (RBR) - 1.62 Gbps/Lane
5416 link_rate = LINK_RATE_RATE_2; // Rate_2 - 2.16 Gbps/Lane
5419 link_rate = LINK_RATE_RATE_3; // Rate_3 - 2.43 Gbps/Lane
5422 link_rate = LINK_RATE_HIGH; // Rate_4 (HBR) - 2.70 Gbps/Lane
5425 link_rate = LINK_RATE_RBR2; // Rate_5 (RBR2) - 3.24 Gbps/Lane
5428 link_rate = LINK_RATE_RATE_6; // Rate_6 - 4.32 Gbps/Lane
5431 link_rate = LINK_RATE_HIGH2; // Rate_7 (HBR2) - 5.40 Gbps/Lane
5434 link_rate = LINK_RATE_HIGH3; // Rate_8 (HBR3) - 8.10 Gbps/Lane
5437 link_rate = LINK_RATE_UNKNOWN;
5443 void detect_edp_sink_caps(struct dc_link *link)
5445 uint8_t supported_link_rates[16];
5447 uint32_t link_rate_in_khz;
5448 enum dc_link_rate link_rate = LINK_RATE_UNKNOWN;
5449 uint8_t backlight_adj_cap;
5451 retrieve_link_cap(link);
5452 link->dpcd_caps.edp_supported_link_rates_count = 0;
5453 memset(supported_link_rates, 0, sizeof(supported_link_rates));
5456 * edp_supported_link_rates_count is only valid for eDP v1.4 or higher.
5457 * Per VESA eDP spec, "The DPCD revision for eDP v1.4 is 13h"
5459 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_13 &&
5460 (link->dc->debug.optimize_edp_link_rate ||
5461 link->reported_link_cap.link_rate == LINK_RATE_UNKNOWN)) {
5462 // Read DPCD 00010h - 0001Fh 16 bytes at one shot
5463 core_link_read_dpcd(link, DP_SUPPORTED_LINK_RATES,
5464 supported_link_rates, sizeof(supported_link_rates));
5466 for (entry = 0; entry < 16; entry += 2) {
5467 // DPCD register reports per-lane link rate = 16-bit link rate capability
5468 // value X 200 kHz. Need multiplier to find link rate in kHz.
5469 link_rate_in_khz = (supported_link_rates[entry+1] * 0x100 +
5470 supported_link_rates[entry]) * 200;
5472 if (link_rate_in_khz != 0) {
5473 link_rate = linkRateInKHzToLinkRateMultiplier(link_rate_in_khz);
5474 link->dpcd_caps.edp_supported_link_rates[link->dpcd_caps.edp_supported_link_rates_count] = link_rate;
5475 link->dpcd_caps.edp_supported_link_rates_count++;
5477 if (link->reported_link_cap.link_rate < link_rate)
5478 link->reported_link_cap.link_rate = link_rate;
5482 link->verified_link_cap = link->reported_link_cap;
5484 core_link_read_dpcd(link, DP_EDP_BACKLIGHT_ADJUSTMENT_CAP,
5485 &backlight_adj_cap, sizeof(backlight_adj_cap));
5487 link->dpcd_caps.dynamic_backlight_capable_edp =
5488 (backlight_adj_cap & DP_EDP_DYNAMIC_BACKLIGHT_CAP) ? true:false;
5490 dc_link_set_default_brightness_aux(link);
5493 void dc_link_dp_enable_hpd(const struct dc_link *link)
5495 struct link_encoder *encoder = link->link_enc;
5497 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5498 encoder->funcs->enable_hpd(encoder);
5501 void dc_link_dp_disable_hpd(const struct dc_link *link)
5503 struct link_encoder *encoder = link->link_enc;
5505 if (encoder != NULL && encoder->funcs->enable_hpd != NULL)
5506 encoder->funcs->disable_hpd(encoder);
5509 static bool is_dp_phy_pattern(enum dp_test_pattern test_pattern)
5511 if ((DP_TEST_PATTERN_PHY_PATTERN_BEGIN <= test_pattern &&
5512 test_pattern <= DP_TEST_PATTERN_PHY_PATTERN_END) ||
5513 test_pattern == DP_TEST_PATTERN_VIDEO_MODE)
5519 static void set_crtc_test_pattern(struct dc_link *link,
5520 struct pipe_ctx *pipe_ctx,
5521 enum dp_test_pattern test_pattern,
5522 enum dp_test_pattern_color_space test_pattern_color_space)
5524 enum controller_dp_test_pattern controller_test_pattern;
5525 enum dc_color_depth color_depth = pipe_ctx->
5526 stream->timing.display_color_depth;
5527 struct bit_depth_reduction_params params;
5528 struct output_pixel_processor *opp = pipe_ctx->stream_res.opp;
5529 int width = pipe_ctx->stream->timing.h_addressable +
5530 pipe_ctx->stream->timing.h_border_left +
5531 pipe_ctx->stream->timing.h_border_right;
5532 int height = pipe_ctx->stream->timing.v_addressable +
5533 pipe_ctx->stream->timing.v_border_bottom +
5534 pipe_ctx->stream->timing.v_border_top;
5536 memset(¶ms, 0, sizeof(params));
5538 switch (test_pattern) {
5539 case DP_TEST_PATTERN_COLOR_SQUARES:
5540 controller_test_pattern =
5541 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES;
5543 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5544 controller_test_pattern =
5545 CONTROLLER_DP_TEST_PATTERN_COLORSQUARES_CEA;
5547 case DP_TEST_PATTERN_VERTICAL_BARS:
5548 controller_test_pattern =
5549 CONTROLLER_DP_TEST_PATTERN_VERTICALBARS;
5551 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5552 controller_test_pattern =
5553 CONTROLLER_DP_TEST_PATTERN_HORIZONTALBARS;
5555 case DP_TEST_PATTERN_COLOR_RAMP:
5556 controller_test_pattern =
5557 CONTROLLER_DP_TEST_PATTERN_COLORRAMP;
5560 controller_test_pattern =
5561 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE;
5565 switch (test_pattern) {
5566 case DP_TEST_PATTERN_COLOR_SQUARES:
5567 case DP_TEST_PATTERN_COLOR_SQUARES_CEA:
5568 case DP_TEST_PATTERN_VERTICAL_BARS:
5569 case DP_TEST_PATTERN_HORIZONTAL_BARS:
5570 case DP_TEST_PATTERN_COLOR_RAMP:
5572 /* disable bit depth reduction */
5573 pipe_ctx->stream->bit_depth_params = params;
5574 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5575 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5576 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5577 controller_test_pattern, color_depth);
5578 else if (link->dc->hwss.set_disp_pattern_generator) {
5579 struct pipe_ctx *odm_pipe;
5580 enum controller_dp_color_space controller_color_space;
5583 int dpg_width = width;
5585 switch (test_pattern_color_space) {
5586 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5587 controller_color_space = CONTROLLER_DP_COLOR_SPACE_RGB;
5589 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5590 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR601;
5592 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5593 controller_color_space = CONTROLLER_DP_COLOR_SPACE_YCBCR709;
5595 case DP_TEST_PATTERN_COLOR_SPACE_UNDEFINED:
5597 controller_color_space = CONTROLLER_DP_COLOR_SPACE_UDEFINED;
5598 DC_LOG_ERROR("%s: Color space must be defined for test pattern", __func__);
5603 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5605 dpg_width = width / opp_cnt;
5608 link->dc->hwss.set_disp_pattern_generator(link->dc,
5610 controller_test_pattern,
5611 controller_color_space,
5618 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5619 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5621 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5622 link->dc->hwss.set_disp_pattern_generator(link->dc,
5624 controller_test_pattern,
5625 controller_color_space,
5636 case DP_TEST_PATTERN_VIDEO_MODE:
5638 /* restore bitdepth reduction */
5639 resource_build_bit_depth_reduction_params(pipe_ctx->stream, ¶ms);
5640 pipe_ctx->stream->bit_depth_params = params;
5641 opp->funcs->opp_program_bit_depth_reduction(opp, ¶ms);
5642 if (pipe_ctx->stream_res.tg->funcs->set_test_pattern)
5643 pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg,
5644 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5646 else if (link->dc->hwss.set_disp_pattern_generator) {
5647 struct pipe_ctx *odm_pipe;
5649 int dpg_width = width;
5651 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe)
5654 dpg_width = width / opp_cnt;
5655 for (odm_pipe = pipe_ctx->next_odm_pipe; odm_pipe; odm_pipe = odm_pipe->next_odm_pipe) {
5656 struct output_pixel_processor *odm_opp = odm_pipe->stream_res.opp;
5658 odm_opp->funcs->opp_program_bit_depth_reduction(odm_opp, ¶ms);
5659 link->dc->hwss.set_disp_pattern_generator(link->dc,
5661 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5662 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
5669 link->dc->hwss.set_disp_pattern_generator(link->dc,
5671 CONTROLLER_DP_TEST_PATTERN_VIDEOMODE,
5672 CONTROLLER_DP_COLOR_SPACE_UDEFINED,
5687 bool dc_link_dp_set_test_pattern(
5688 struct dc_link *link,
5689 enum dp_test_pattern test_pattern,
5690 enum dp_test_pattern_color_space test_pattern_color_space,
5691 const struct link_training_settings *p_link_settings,
5692 const unsigned char *p_custom_pattern,
5693 unsigned int cust_pattern_size)
5695 struct pipe_ctx *pipes = link->dc->current_state->res_ctx.pipe_ctx;
5696 struct pipe_ctx *pipe_ctx = NULL;
5699 unsigned char link_qual_pattern[LANE_COUNT_DP_MAX] = {0};
5700 union dpcd_training_pattern training_pattern;
5701 enum dpcd_phy_test_patterns pattern;
5703 memset(&training_pattern, 0, sizeof(training_pattern));
5705 for (i = 0; i < MAX_PIPES; i++) {
5706 if (pipes[i].stream == NULL)
5709 if (pipes[i].stream->link == link && !pipes[i].top_pipe && !pipes[i].prev_odm_pipe) {
5710 pipe_ctx = &pipes[i];
5715 if (pipe_ctx == NULL)
5718 /* Reset CRTC Test Pattern if it is currently running and request is VideoMode */
5719 if (link->test_pattern_enabled && test_pattern ==
5720 DP_TEST_PATTERN_VIDEO_MODE) {
5721 /* Set CRTC Test Pattern */
5722 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
5723 dp_set_hw_test_pattern(link, test_pattern,
5724 (uint8_t *)p_custom_pattern,
5725 (uint32_t)cust_pattern_size);
5727 /* Unblank Stream */
5728 link->dc->hwss.unblank_stream(
5730 &link->verified_link_cap);
5731 /* TODO:m_pHwss->MuteAudioEndpoint
5732 * (pPathMode->pDisplayPath, false);
5735 /* Reset Test Pattern state */
5736 link->test_pattern_enabled = false;
5741 /* Check for PHY Test Patterns */
5742 if (is_dp_phy_pattern(test_pattern)) {
5743 /* Set DPCD Lane Settings before running test pattern */
5744 if (p_link_settings != NULL) {
5745 if (link->dc->debug.apply_vendor_specific_lttpr_wa &&
5746 (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN) &&
5747 link->lttpr_mode == LTTPR_MODE_TRANSPARENT) {
5748 dpcd_set_lane_settings(link, p_link_settings, DPRX);
5749 vendor_specific_lttpr_wa_five(
5751 p_link_settings->dpcd_lane_settings,
5752 p_link_settings->link_settings.lane_count);
5754 dp_set_hw_lane_settings(link, p_link_settings, DPRX);
5755 dpcd_set_lane_settings(link, p_link_settings, DPRX);
5759 /* Blank stream if running test pattern */
5760 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
5763 * MuteAudioEndpoint(pPathMode->pDisplayPath, true);
5766 pipes->stream_res.stream_enc->funcs->dp_blank(link, pipe_ctx->stream_res.stream_enc);
5769 dp_set_hw_test_pattern(link, test_pattern,
5770 (uint8_t *)p_custom_pattern,
5771 (uint32_t)cust_pattern_size);
5773 if (test_pattern != DP_TEST_PATTERN_VIDEO_MODE) {
5774 /* Set Test Pattern state */
5775 link->test_pattern_enabled = true;
5776 if (p_link_settings != NULL)
5777 dpcd_set_link_settings(link,
5781 switch (test_pattern) {
5782 case DP_TEST_PATTERN_VIDEO_MODE:
5783 pattern = PHY_TEST_PATTERN_NONE;
5785 case DP_TEST_PATTERN_D102:
5786 pattern = PHY_TEST_PATTERN_D10_2;
5788 case DP_TEST_PATTERN_SYMBOL_ERROR:
5789 pattern = PHY_TEST_PATTERN_SYMBOL_ERROR;
5791 case DP_TEST_PATTERN_PRBS7:
5792 pattern = PHY_TEST_PATTERN_PRBS7;
5794 case DP_TEST_PATTERN_80BIT_CUSTOM:
5795 pattern = PHY_TEST_PATTERN_80BIT_CUSTOM;
5797 case DP_TEST_PATTERN_CP2520_1:
5798 pattern = PHY_TEST_PATTERN_CP2520_1;
5800 case DP_TEST_PATTERN_CP2520_2:
5801 pattern = PHY_TEST_PATTERN_CP2520_2;
5803 case DP_TEST_PATTERN_CP2520_3:
5804 pattern = PHY_TEST_PATTERN_CP2520_3;
5806 #if defined(CONFIG_DRM_AMD_DC_DCN)
5807 case DP_TEST_PATTERN_128b_132b_TPS1:
5808 pattern = PHY_TEST_PATTERN_128b_132b_TPS1;
5810 case DP_TEST_PATTERN_128b_132b_TPS2:
5811 pattern = PHY_TEST_PATTERN_128b_132b_TPS2;
5813 case DP_TEST_PATTERN_PRBS9:
5814 pattern = PHY_TEST_PATTERN_PRBS9;
5816 case DP_TEST_PATTERN_PRBS11:
5817 pattern = PHY_TEST_PATTERN_PRBS11;
5819 case DP_TEST_PATTERN_PRBS15:
5820 pattern = PHY_TEST_PATTERN_PRBS15;
5822 case DP_TEST_PATTERN_PRBS23:
5823 pattern = PHY_TEST_PATTERN_PRBS23;
5825 case DP_TEST_PATTERN_PRBS31:
5826 pattern = PHY_TEST_PATTERN_PRBS31;
5828 case DP_TEST_PATTERN_264BIT_CUSTOM:
5829 pattern = PHY_TEST_PATTERN_264BIT_CUSTOM;
5831 case DP_TEST_PATTERN_SQUARE_PULSE:
5832 pattern = PHY_TEST_PATTERN_SQUARE_PULSE;
5839 if (test_pattern == DP_TEST_PATTERN_VIDEO_MODE
5840 /*TODO:&& !pPathMode->pDisplayPath->IsTargetPoweredOn()*/)
5843 if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_12) {
5844 #if defined(CONFIG_DRM_AMD_DC_DCN)
5845 if (test_pattern == DP_TEST_PATTERN_SQUARE_PULSE)
5846 core_link_write_dpcd(link,
5847 DP_LINK_SQUARE_PATTERN,
5852 /* tell receiver that we are sending qualification
5853 * pattern DP 1.2 or later - DP receiver's link quality
5854 * pattern is set using DPCD LINK_QUAL_LANEx_SET
5855 * register (0x10B~0x10E)\
5857 for (lane = 0; lane < LANE_COUNT_DP_MAX; lane++)
5858 link_qual_pattern[lane] =
5859 (unsigned char)(pattern);
5861 core_link_write_dpcd(link,
5862 DP_LINK_QUAL_LANE0_SET,
5864 sizeof(link_qual_pattern));
5865 } else if (link->dpcd_caps.dpcd_rev.raw >= DPCD_REV_10 ||
5866 link->dpcd_caps.dpcd_rev.raw == 0) {
5867 /* tell receiver that we are sending qualification
5868 * pattern DP 1.1a or earlier - DP receiver's link
5869 * quality pattern is set using
5870 * DPCD TRAINING_PATTERN_SET -> LINK_QUAL_PATTERN_SET
5871 * register (0x102). We will use v_1.3 when we are
5872 * setting test pattern for DP 1.1.
5874 core_link_read_dpcd(link, DP_TRAINING_PATTERN_SET,
5875 &training_pattern.raw,
5876 sizeof(training_pattern));
5877 training_pattern.v1_3.LINK_QUAL_PATTERN_SET = pattern;
5878 core_link_write_dpcd(link, DP_TRAINING_PATTERN_SET,
5879 &training_pattern.raw,
5880 sizeof(training_pattern));
5883 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
5885 switch (test_pattern_color_space) {
5886 case DP_TEST_PATTERN_COLOR_SPACE_RGB:
5887 color_space = COLOR_SPACE_SRGB;
5888 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5889 color_space = COLOR_SPACE_SRGB_LIMITED;
5892 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR601:
5893 color_space = COLOR_SPACE_YCBCR601;
5894 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5895 color_space = COLOR_SPACE_YCBCR601_LIMITED;
5897 case DP_TEST_PATTERN_COLOR_SPACE_YCBCR709:
5898 color_space = COLOR_SPACE_YCBCR709;
5899 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5900 color_space = COLOR_SPACE_YCBCR709_LIMITED;
5906 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable) {
5907 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
5908 union dmub_hw_lock_flags hw_locks = { 0 };
5909 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
5911 hw_locks.bits.lock_dig = 1;
5912 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
5914 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
5919 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_enable(
5920 pipe_ctx->stream_res.tg);
5923 pipe_ctx->stream_res.tg->funcs->lock(pipe_ctx->stream_res.tg);
5924 /* update MSA to requested color space */
5925 pipe_ctx->stream_res.stream_enc->funcs->dp_set_stream_attribute(pipe_ctx->stream_res.stream_enc,
5926 &pipe_ctx->stream->timing,
5928 pipe_ctx->stream->use_vsc_sdp_for_colorimetry,
5929 link->dpcd_caps.dprx_feature.bits.SST_SPLIT_SDP_CAP);
5931 if (pipe_ctx->stream->use_vsc_sdp_for_colorimetry) {
5932 if (test_pattern == DP_TEST_PATTERN_COLOR_SQUARES_CEA)
5933 pipe_ctx->stream->vsc_infopacket.sb[17] |= (1 << 7); // sb17 bit 7 Dynamic Range: 0 = VESA range, 1 = CTA range
5935 pipe_ctx->stream->vsc_infopacket.sb[17] &= ~(1 << 7);
5936 resource_build_info_frame(pipe_ctx);
5937 link->dc->hwss.update_info_frame(pipe_ctx);
5941 set_crtc_test_pattern(link, pipe_ctx, test_pattern, test_pattern_color_space);
5942 pipe_ctx->stream_res.tg->funcs->unlock(pipe_ctx->stream_res.tg);
5943 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5944 CRTC_STATE_VACTIVE);
5945 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5947 pipe_ctx->stream_res.tg->funcs->wait_for_state(pipe_ctx->stream_res.tg,
5948 CRTC_STATE_VACTIVE);
5950 if (pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable) {
5951 if (pipe_ctx->stream && should_use_dmub_lock(pipe_ctx->stream->link)) {
5952 union dmub_hw_lock_flags hw_locks = { 0 };
5953 struct dmub_hw_lock_inst_flags inst_flags = { 0 };
5955 hw_locks.bits.lock_dig = 1;
5956 inst_flags.dig_inst = pipe_ctx->stream_res.tg->inst;
5958 dmub_hw_lock_mgr_cmd(link->ctx->dmub_srv,
5963 pipe_ctx->stream_res.tg->funcs->lock_doublebuffer_disable(
5964 pipe_ctx->stream_res.tg);
5967 /* Set Test Pattern state */
5968 link->test_pattern_enabled = true;
5974 void dp_enable_mst_on_sink(struct dc_link *link, bool enable)
5976 unsigned char mstmCntl;
5978 core_link_read_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
5980 mstmCntl |= DP_MST_EN;
5982 mstmCntl &= (~DP_MST_EN);
5984 core_link_write_dpcd(link, DP_MSTM_CTRL, &mstmCntl, 1);
5987 void dp_set_panel_mode(struct dc_link *link, enum dp_panel_mode panel_mode)
5989 union dpcd_edp_config edp_config_set;
5990 bool panel_mode_edp = false;
5992 memset(&edp_config_set, '\0', sizeof(union dpcd_edp_config));
5994 if (panel_mode != DP_PANEL_MODE_DEFAULT) {
5996 switch (panel_mode) {
5997 case DP_PANEL_MODE_EDP:
5998 case DP_PANEL_MODE_SPECIAL:
5999 panel_mode_edp = true;
6006 /*set edp panel mode in receiver*/
6007 core_link_read_dpcd(
6009 DP_EDP_CONFIGURATION_SET,
6010 &edp_config_set.raw,
6011 sizeof(edp_config_set.raw));
6013 if (edp_config_set.bits.PANEL_MODE_EDP
6014 != panel_mode_edp) {
6015 enum dc_status result;
6017 edp_config_set.bits.PANEL_MODE_EDP =
6019 result = core_link_write_dpcd(
6021 DP_EDP_CONFIGURATION_SET,
6022 &edp_config_set.raw,
6023 sizeof(edp_config_set.raw));
6025 ASSERT(result == DC_OK);
6028 DC_LOG_DETECTION_DP_CAPS("Link: %d eDP panel mode supported: %d "
6029 "eDP panel mode enabled: %d \n",
6031 link->dpcd_caps.panel_mode_edp,
6035 enum dp_panel_mode dp_get_panel_mode(struct dc_link *link)
6037 /* We need to explicitly check that connector
6038 * is not DP. Some Travis_VGA get reported
6039 * by video bios as DP.
6041 if (link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT) {
6043 switch (link->dpcd_caps.branch_dev_id) {
6044 case DP_BRANCH_DEVICE_ID_0022B9:
6045 /* alternate scrambler reset is required for Travis
6046 * for the case when external chip does not
6047 * provide sink device id, alternate scrambler
6048 * scheme will be overriden later by querying
6052 link->dpcd_caps.branch_dev_name,
6053 DP_VGA_LVDS_CONVERTER_ID_2,
6056 branch_dev_name)) == 0) {
6057 return DP_PANEL_MODE_SPECIAL;
6060 case DP_BRANCH_DEVICE_ID_00001A:
6061 /* alternate scrambler reset is required for Travis
6062 * for the case when external chip does not provide
6063 * sink device id, alternate scrambler scheme will
6064 * be overriden later by querying Encoder feature
6066 if (strncmp(link->dpcd_caps.branch_dev_name,
6067 DP_VGA_LVDS_CONVERTER_ID_3,
6070 branch_dev_name)) == 0) {
6071 return DP_PANEL_MODE_SPECIAL;
6079 if (link->dpcd_caps.panel_mode_edp &&
6080 (link->connector_signal == SIGNAL_TYPE_EDP ||
6081 (link->connector_signal == SIGNAL_TYPE_DISPLAY_PORT &&
6082 link->is_internal_display))) {
6083 return DP_PANEL_MODE_EDP;
6086 return DP_PANEL_MODE_DEFAULT;
6089 enum dc_status dp_set_fec_ready(struct dc_link *link, bool ready)
6091 /* FEC has to be "set ready" before the link training.
6092 * The policy is to always train with FEC
6093 * if the sink supports it and leave it enabled on link.
6094 * If FEC is not supported, disable it.
6096 struct link_encoder *link_enc = NULL;
6097 enum dc_status status = DC_OK;
6098 uint8_t fec_config = 0;
6100 /* Access link encoder based on whether it is statically
6101 * or dynamically assigned to a link.
6103 if (link->is_dig_mapping_flexible &&
6104 link->dc->res_pool->funcs->link_encs_assign)
6105 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6107 link_enc = link->link_enc;
6110 if (!dc_link_should_enable_fec(link))
6113 if (link_enc->funcs->fec_set_ready &&
6114 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6117 status = core_link_write_dpcd(link,
6118 DP_FEC_CONFIGURATION,
6120 sizeof(fec_config));
6121 if (status == DC_OK) {
6122 link_enc->funcs->fec_set_ready(link_enc, true);
6123 link->fec_state = dc_link_fec_ready;
6125 link_enc->funcs->fec_set_ready(link_enc, false);
6126 link->fec_state = dc_link_fec_not_ready;
6127 dm_error("dpcd write failed to set fec_ready");
6129 } else if (link->fec_state == dc_link_fec_ready) {
6131 status = core_link_write_dpcd(link,
6132 DP_FEC_CONFIGURATION,
6134 sizeof(fec_config));
6135 link_enc->funcs->fec_set_ready(link_enc, false);
6136 link->fec_state = dc_link_fec_not_ready;
6143 void dp_set_fec_enable(struct dc_link *link, bool enable)
6145 struct link_encoder *link_enc = NULL;
6147 /* Access link encoder based on whether it is statically
6148 * or dynamically assigned to a link.
6150 if (link->is_dig_mapping_flexible &&
6151 link->dc->res_pool->funcs->link_encs_assign)
6152 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc, link);
6154 link_enc = link->link_enc;
6157 if (!dc_link_should_enable_fec(link))
6160 if (link_enc->funcs->fec_set_enable &&
6161 link->dpcd_caps.fec_cap.bits.FEC_CAPABLE) {
6162 if (link->fec_state == dc_link_fec_ready && enable) {
6163 /* Accord to DP spec, FEC enable sequence can first
6164 * be transmitted anytime after 1000 LL codes have
6165 * been transmitted on the link after link training
6166 * completion. Using 1 lane RBR should have the maximum
6167 * time for transmitting 1000 LL codes which is 6.173 us.
6168 * So use 7 microseconds delay instead.
6171 link_enc->funcs->fec_set_enable(link_enc, true);
6172 link->fec_state = dc_link_fec_enabled;
6173 } else if (link->fec_state == dc_link_fec_enabled && !enable) {
6174 link_enc->funcs->fec_set_enable(link_enc, false);
6175 link->fec_state = dc_link_fec_ready;
6180 struct link_encoder *dp_get_link_enc(struct dc_link *link)
6182 struct link_encoder *link_enc;
6184 link_enc = link->link_enc;
6185 if (link->is_dig_mapping_flexible &&
6186 link->dc->res_pool->funcs->link_encs_assign) {
6187 link_enc = link_enc_cfg_get_link_enc_used_by_link(link->ctx->dc,
6189 if (!link->link_enc)
6190 link_enc = link_enc_cfg_get_next_avail_link_enc(
6197 void dpcd_set_source_specific_data(struct dc_link *link)
6199 if (!link->dc->vendor_signature.is_valid) {
6200 enum dc_status __maybe_unused result_write_min_hblank = DC_NOT_SUPPORTED;
6201 struct dpcd_amd_signature amd_signature = {0};
6202 struct dpcd_amd_device_id amd_device_id = {0};
6204 amd_device_id.device_id_byte1 =
6205 (uint8_t)(link->ctx->asic_id.chip_id);
6206 amd_device_id.device_id_byte2 =
6207 (uint8_t)(link->ctx->asic_id.chip_id >> 8);
6208 amd_device_id.dce_version =
6209 (uint8_t)(link->ctx->dce_version);
6210 amd_device_id.dal_version_byte1 = 0x0; // needed? where to get?
6211 amd_device_id.dal_version_byte2 = 0x0; // needed? where to get?
6213 core_link_read_dpcd(link, DP_SOURCE_OUI,
6214 (uint8_t *)(&amd_signature),
6215 sizeof(amd_signature));
6217 if (!((amd_signature.AMD_IEEE_TxSignature_byte1 == 0x0) &&
6218 (amd_signature.AMD_IEEE_TxSignature_byte2 == 0x0) &&
6219 (amd_signature.AMD_IEEE_TxSignature_byte3 == 0x1A))) {
6221 amd_signature.AMD_IEEE_TxSignature_byte1 = 0x0;
6222 amd_signature.AMD_IEEE_TxSignature_byte2 = 0x0;
6223 amd_signature.AMD_IEEE_TxSignature_byte3 = 0x1A;
6225 core_link_write_dpcd(link, DP_SOURCE_OUI,
6226 (uint8_t *)(&amd_signature),
6227 sizeof(amd_signature));
6230 core_link_write_dpcd(link, DP_SOURCE_OUI+0x03,
6231 (uint8_t *)(&amd_device_id),
6232 sizeof(amd_device_id));
6234 if (link->ctx->dce_version >= DCN_VERSION_2_0 &&
6235 link->dc->caps.min_horizontal_blanking_period != 0) {
6237 uint8_t hblank_size = (uint8_t)link->dc->caps.min_horizontal_blanking_period;
6239 if (link->preferred_link_setting.dpcd_source_device_specific_field_support) {
6240 result_write_min_hblank = core_link_write_dpcd(link,
6241 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED, (uint8_t *)(&hblank_size),
6242 sizeof(hblank_size));
6244 if (result_write_min_hblank == DC_ERROR_UNEXPECTED)
6245 link->preferred_link_setting.dpcd_source_device_specific_field_support = false;
6247 DC_LOG_DC("Sink device does not support 00340h DPCD write. Skipping on purpose.\n");
6251 DC_TRACE_LEVEL_MESSAGE(DAL_TRACE_LEVEL_INFORMATION,
6252 WPP_BIT_FLAG_DC_DETECTION_DP_CAPS,
6253 "result=%u link_index=%u enum dce_version=%d DPCD=0x%04X min_hblank=%u branch_dev_id=0x%x branch_dev_name='%c%c%c%c%c%c'",
6254 result_write_min_hblank,
6256 link->ctx->dce_version,
6257 DP_SOURCE_MINIMUM_HBLANK_SUPPORTED,
6258 link->dc->caps.min_horizontal_blanking_period,
6259 link->dpcd_caps.branch_dev_id,
6260 link->dpcd_caps.branch_dev_name[0],
6261 link->dpcd_caps.branch_dev_name[1],
6262 link->dpcd_caps.branch_dev_name[2],
6263 link->dpcd_caps.branch_dev_name[3],
6264 link->dpcd_caps.branch_dev_name[4],
6265 link->dpcd_caps.branch_dev_name[5]);
6267 core_link_write_dpcd(link, DP_SOURCE_OUI,
6268 link->dc->vendor_signature.data.raw,
6269 sizeof(link->dc->vendor_signature.data.raw));
6273 bool dc_link_set_backlight_level_nits(struct dc_link *link,
6275 uint32_t backlight_millinits,
6276 uint32_t transition_time_in_ms)
6278 struct dpcd_source_backlight_set dpcd_backlight_set;
6279 uint8_t backlight_control = isHDR ? 1 : 0;
6281 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6282 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6285 // OLEDs have no PWM, they can only use AUX
6286 if (link->dpcd_sink_ext_caps.bits.oled == 1)
6287 backlight_control = 1;
6289 *(uint32_t *)&dpcd_backlight_set.backlight_level_millinits = backlight_millinits;
6290 *(uint16_t *)&dpcd_backlight_set.backlight_transition_time_ms = (uint16_t)transition_time_in_ms;
6293 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6294 (uint8_t *)(&dpcd_backlight_set),
6295 sizeof(dpcd_backlight_set)) != DC_OK)
6298 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_CONTROL,
6299 &backlight_control, 1) != DC_OK)
6305 bool dc_link_get_backlight_level_nits(struct dc_link *link,
6306 uint32_t *backlight_millinits_avg,
6307 uint32_t *backlight_millinits_peak)
6309 union dpcd_source_backlight_get dpcd_backlight_get;
6311 memset(&dpcd_backlight_get, 0, sizeof(union dpcd_source_backlight_get));
6313 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6314 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6317 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_CURRENT_PEAK,
6318 dpcd_backlight_get.raw,
6319 sizeof(union dpcd_source_backlight_get)) != DC_OK)
6322 *backlight_millinits_avg =
6323 dpcd_backlight_get.bytes.backlight_millinits_avg;
6324 *backlight_millinits_peak =
6325 dpcd_backlight_get.bytes.backlight_millinits_peak;
6327 /* On non-supported panels dpcd_read usually succeeds with 0 returned */
6328 if (*backlight_millinits_avg == 0 ||
6329 *backlight_millinits_avg > *backlight_millinits_peak)
6335 bool dc_link_backlight_enable_aux(struct dc_link *link, bool enable)
6337 uint8_t backlight_enable = enable ? 1 : 0;
6339 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6340 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6343 if (core_link_write_dpcd(link, DP_SOURCE_BACKLIGHT_ENABLE,
6344 &backlight_enable, 1) != DC_OK)
6350 // we read default from 0x320 because we expect BIOS wrote it there
6351 // regular get_backlight_nit reads from panel set at 0x326
6352 bool dc_link_read_default_bl_aux(struct dc_link *link, uint32_t *backlight_millinits)
6354 if (!link || (link->connector_signal != SIGNAL_TYPE_EDP &&
6355 link->connector_signal != SIGNAL_TYPE_DISPLAY_PORT))
6358 if (core_link_read_dpcd(link, DP_SOURCE_BACKLIGHT_LEVEL,
6359 (uint8_t *) backlight_millinits,
6360 sizeof(uint32_t)) != DC_OK)
6366 bool dc_link_set_default_brightness_aux(struct dc_link *link)
6368 uint32_t default_backlight;
6370 if (link && link->dpcd_sink_ext_caps.bits.oled == 1) {
6371 if (!dc_link_read_default_bl_aux(link, &default_backlight))
6372 default_backlight = 150000;
6373 // if < 5 nits or > 5000, it might be wrong readback
6374 if (default_backlight < 5000 || default_backlight > 5000000)
6375 default_backlight = 150000; //
6377 return dc_link_set_backlight_level_nits(link, true,
6378 default_backlight, 0);
6383 bool is_edp_ilr_optimization_required(struct dc_link *link, struct dc_crtc_timing *crtc_timing)
6385 struct dc_link_settings link_setting;
6386 uint8_t link_bw_set;
6387 uint8_t link_rate_set;
6389 union lane_count_set lane_count_set = {0};
6391 ASSERT(link || crtc_timing); // invalid input
6393 if (link->dpcd_caps.edp_supported_link_rates_count == 0 ||
6394 !link->dc->debug.optimize_edp_link_rate)
6398 // Read DPCD 00100h to find if standard link rates are set
6399 core_link_read_dpcd(link, DP_LINK_BW_SET,
6400 &link_bw_set, sizeof(link_bw_set));
6403 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS used link_bw_set\n");
6407 // Read DPCD 00115h to find the edp link rate set used
6408 core_link_read_dpcd(link, DP_LINK_RATE_SET,
6409 &link_rate_set, sizeof(link_rate_set));
6411 // Read DPCD 00101h to find out the number of lanes currently set
6412 core_link_read_dpcd(link, DP_LANE_COUNT_SET,
6413 &lane_count_set.raw, sizeof(lane_count_set));
6415 req_bw = dc_bandwidth_in_kbps_from_timing(crtc_timing);
6417 if (!crtc_timing->flags.DSC)
6418 decide_edp_link_settings(link, &link_setting, req_bw);
6420 decide_edp_link_settings_with_dsc(link, &link_setting, req_bw, LINK_RATE_UNKNOWN);
6422 if (link->dpcd_caps.edp_supported_link_rates[link_rate_set] != link_setting.link_rate ||
6423 lane_count_set.bits.LANE_COUNT_SET != link_setting.lane_count) {
6424 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: Optimization required, VBIOS link_rate_set not optimal\n");
6428 DC_LOG_EVENT_LINK_TRAINING("eDP ILR: No optimization required, VBIOS set optimal link_rate_set\n");
6432 enum dp_link_encoding dp_get_link_encoding_format(const struct dc_link_settings *link_settings)
6434 if ((link_settings->link_rate >= LINK_RATE_LOW) &&
6435 (link_settings->link_rate <= LINK_RATE_HIGH3))
6436 return DP_8b_10b_ENCODING;
6437 #if defined(CONFIG_DRM_AMD_DC_DCN)
6438 else if ((link_settings->link_rate >= LINK_RATE_UHBR10) &&
6439 (link_settings->link_rate <= LINK_RATE_UHBR20))
6440 return DP_128b_132b_ENCODING;
6442 return DP_UNKNOWN_ENCODING;
6445 #if defined(CONFIG_DRM_AMD_DC_DCN)
6446 enum dp_link_encoding dc_link_dp_mst_decide_link_encoding_format(const struct dc_link *link)
6448 struct dc_link_settings link_settings = {0};
6450 if (!dc_is_dp_signal(link->connector_signal))
6451 return DP_UNKNOWN_ENCODING;
6453 if (link->preferred_link_setting.lane_count !=
6454 LANE_COUNT_UNKNOWN &&
6455 link->preferred_link_setting.link_rate !=
6456 LINK_RATE_UNKNOWN) {
6457 link_settings = link->preferred_link_setting;
6459 decide_mst_link_settings(link, &link_settings);
6462 return dp_get_link_encoding_format(&link_settings);
6465 // TODO - DP2.0 Link: Fix get_lane_status to handle LTTPR offset (SST and MST)
6466 static void get_lane_status(
6467 struct dc_link *link,
6468 uint32_t lane_count,
6469 union lane_status *status,
6470 union lane_align_status_updated *status_updated)
6473 uint8_t dpcd_buf[3] = {0};
6475 if (status == NULL || status_updated == NULL) {
6479 core_link_read_dpcd(
6485 for (lane = 0; lane < lane_count; lane++) {
6486 status[lane].raw = get_nibble_at_index(&dpcd_buf[0], lane);
6489 status_updated->raw = dpcd_buf[2];
6492 bool dpcd_write_128b_132b_sst_payload_allocation_table(
6493 const struct dc_stream_state *stream,
6494 struct dc_link *link,
6495 struct link_mst_stream_allocation_table *proposed_table,
6498 const uint8_t vc_id = 1; /// VC ID always 1 for SST
6499 const uint8_t start_time_slot = 0; /// Always start at time slot 0 for SST
6500 bool result = false;
6501 uint8_t req_slot_count = 0;
6502 struct fixed31_32 avg_time_slots_per_mtp = { 0 };
6503 union payload_table_update_status update_status = { 0 };
6504 const uint32_t max_retries = 30;
6505 uint32_t retries = 0;
6508 avg_time_slots_per_mtp = calculate_sst_avg_time_slots_per_mtp(stream, link);
6509 req_slot_count = dc_fixpt_ceil(avg_time_slots_per_mtp);
6511 /// Leave req_slot_count = 0 if allocate is false.
6514 /// Write DPCD 2C0 = 1 to start updating
6515 update_status.bits.VC_PAYLOAD_TABLE_UPDATED = 1;
6516 core_link_write_dpcd(
6518 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6522 /// Program the changes in DPCD 1C0 - 1C2
6524 core_link_write_dpcd(
6526 DP_PAYLOAD_ALLOCATE_SET,
6530 ASSERT(start_time_slot == 0);
6531 core_link_write_dpcd(
6533 DP_PAYLOAD_ALLOCATE_START_TIME_SLOT,
6537 ASSERT(req_slot_count <= MAX_MTP_SLOT_COUNT); /// Validation should filter out modes that exceed link BW
6538 core_link_write_dpcd(
6540 DP_PAYLOAD_ALLOCATE_TIME_SLOT_COUNT,
6544 /// Poll till DPCD 2C0 read 1
6545 /// Try for at least 150ms (30 retries, with 5ms delay after each attempt)
6547 while (retries < max_retries) {
6548 if (core_link_read_dpcd(
6550 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6553 if (update_status.bits.VC_PAYLOAD_TABLE_UPDATED == 1) {
6554 DC_LOG_DP2("SST Update Payload: downstream payload table updated.");
6559 union dpcd_rev dpcdRev;
6561 if (core_link_read_dpcd(
6566 DC_LOG_ERROR("SST Update Payload: Unable to read DPCD revision "
6567 "of sink while polling payload table "
6568 "updated status bit.");
6576 if (!result && retries == max_retries) {
6577 DC_LOG_ERROR("SST Update Payload: Payload table not updated after retries, "
6578 "continue on. Something is wrong with the branch.");
6579 // TODO - DP2.0 Payload: Read and log the payload table from downstream branch
6582 proposed_table->stream_count = 1; /// Always 1 stream for SST
6583 proposed_table->stream_allocations[0].slot_count = req_slot_count;
6584 proposed_table->stream_allocations[0].vcp_id = vc_id;
6589 bool dpcd_poll_for_allocation_change_trigger(struct dc_link *link)
6592 * wait for ACT handled
6595 const int act_retries = 30;
6596 enum act_return_status result = ACT_FAILED;
6597 union payload_table_update_status update_status = {0};
6598 union lane_status dpcd_lane_status[LANE_COUNT_DP_MAX];
6599 union lane_align_status_updated lane_status_updated;
6601 for (i = 0; i < act_retries; i++) {
6602 get_lane_status(link, link->cur_link_settings.lane_count, dpcd_lane_status, &lane_status_updated);
6604 if (!dp_is_cr_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6605 !dp_is_ch_eq_done(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6606 !dp_is_symbol_locked(link->cur_link_settings.lane_count, dpcd_lane_status) ||
6607 !dp_is_interlane_aligned(lane_status_updated)) {
6608 DC_LOG_ERROR("SST Update Payload: Link loss occurred while "
6609 "polling for ACT handled.");
6610 result = ACT_LINK_LOST;
6613 core_link_read_dpcd(
6615 DP_PAYLOAD_TABLE_UPDATE_STATUS,
6619 if (update_status.bits.ACT_HANDLED == 1) {
6620 DC_LOG_DP2("SST Update Payload: ACT handled by downstream.");
6621 result = ACT_SUCCESS;
6628 if (result == ACT_FAILED) {
6629 DC_LOG_ERROR("SST Update Payload: ACT still not handled after retries, "
6630 "continue on. Something is wrong with the branch.");
6633 return (result == ACT_SUCCESS);
6636 struct fixed31_32 calculate_sst_avg_time_slots_per_mtp(
6637 const struct dc_stream_state *stream,
6638 const struct dc_link *link)
6640 struct fixed31_32 link_bw_effective =
6642 dc_link_bandwidth_kbps(link, &link->cur_link_settings));
6643 struct fixed31_32 timeslot_bw_effective =
6644 dc_fixpt_div_int(link_bw_effective, MAX_MTP_SLOT_COUNT);
6645 struct fixed31_32 timing_bw =
6647 dc_bandwidth_in_kbps_from_timing(&stream->timing));
6648 struct fixed31_32 avg_time_slots_per_mtp =
6649 dc_fixpt_div(timing_bw, timeslot_bw_effective);
6651 return avg_time_slots_per_mtp;
6654 bool is_dp_128b_132b_signal(struct pipe_ctx *pipe_ctx)
6656 return (pipe_ctx->stream_res.hpo_dp_stream_enc &&
6657 pipe_ctx->stream->link->hpo_dp_link_enc &&
6658 dc_is_dp_signal(pipe_ctx->stream->signal));
6662 void edp_panel_backlight_power_on(struct dc_link *link)
6664 if (link->connector_signal != SIGNAL_TYPE_EDP)
6667 link->dc->hwss.edp_power_control(link, true);
6668 link->dc->hwss.edp_wait_for_hpd_ready(link, true);
6669 if (link->dc->hwss.edp_backlight_control)
6670 link->dc->hwss.edp_backlight_control(link, true);