1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
6 #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/phy/phy.h>
11 #include <linux/phy/phy-dp.h>
12 #include <linux/rational.h>
13 #include <drm/drm_dp_helper.h>
14 #include <drm/drm_print.h>
16 #include "dp_catalog.h"
19 #define POLLING_SLEEP_US 1000
20 #define POLLING_TIMEOUT_US 10000
22 #define SCRAMBLER_RESET_COUNT_VALUE 0xFC
24 #define DP_INTERRUPT_STATUS_ACK_SHIFT 1
25 #define DP_INTERRUPT_STATUS_MASK_SHIFT 2
27 #define MSM_DP_CONTROLLER_AHB_OFFSET 0x0000
28 #define MSM_DP_CONTROLLER_AHB_SIZE 0x0200
29 #define MSM_DP_CONTROLLER_AUX_OFFSET 0x0200
30 #define MSM_DP_CONTROLLER_AUX_SIZE 0x0200
31 #define MSM_DP_CONTROLLER_LINK_OFFSET 0x0400
32 #define MSM_DP_CONTROLLER_LINK_SIZE 0x0C00
33 #define MSM_DP_CONTROLLER_P0_OFFSET 0x1000
34 #define MSM_DP_CONTROLLER_P0_SIZE 0x0400
36 #define DP_INTERRUPT_STATUS1 \
37 (DP_INTR_AUX_I2C_DONE| \
38 DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
39 DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
40 DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
41 DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
43 #define DP_INTERRUPT_STATUS1_ACK \
44 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
45 #define DP_INTERRUPT_STATUS1_MASK \
46 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
48 #define DP_INTERRUPT_STATUS2 \
49 (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
50 DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
52 #define DP_INTERRUPT_STATUS2_ACK \
53 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
54 #define DP_INTERRUPT_STATUS2_MASK \
55 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
57 struct dp_catalog_private {
60 u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
61 struct dp_catalog dp_catalog;
62 u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
65 void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
67 struct dp_catalog_private *catalog = container_of(dp_catalog,
68 struct dp_catalog_private, dp_catalog);
70 msm_disp_snapshot_add_block(disp_state, catalog->io->dp_controller.len,
71 catalog->io->dp_controller.base, "dp_ctrl");
74 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
76 offset += MSM_DP_CONTROLLER_AUX_OFFSET;
77 return readl_relaxed(catalog->io->dp_controller.base + offset);
80 static inline void dp_write_aux(struct dp_catalog_private *catalog,
83 offset += MSM_DP_CONTROLLER_AUX_OFFSET;
85 * To make sure aux reg writes happens before any other operation,
86 * this function uses writel() instread of writel_relaxed()
88 writel(data, catalog->io->dp_controller.base + offset);
91 static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
93 offset += MSM_DP_CONTROLLER_AHB_OFFSET;
94 return readl_relaxed(catalog->io->dp_controller.base + offset);
97 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
100 offset += MSM_DP_CONTROLLER_AHB_OFFSET;
102 * To make sure phy reg writes happens before any other operation,
103 * this function uses writel() instread of writel_relaxed()
105 writel(data, catalog->io->dp_controller.base + offset);
108 static inline void dp_write_p0(struct dp_catalog_private *catalog,
109 u32 offset, u32 data)
111 offset += MSM_DP_CONTROLLER_P0_OFFSET;
113 * To make sure interface reg writes happens before any other operation,
114 * this function uses writel() instread of writel_relaxed()
116 writel(data, catalog->io->dp_controller.base + offset);
119 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
122 offset += MSM_DP_CONTROLLER_P0_OFFSET;
124 * To make sure interface reg writes happens before any other operation,
125 * this function uses writel() instread of writel_relaxed()
127 return readl_relaxed(catalog->io->dp_controller.base + offset);
130 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
132 offset += MSM_DP_CONTROLLER_LINK_OFFSET;
133 return readl_relaxed(catalog->io->dp_controller.base + offset);
136 static inline void dp_write_link(struct dp_catalog_private *catalog,
137 u32 offset, u32 data)
139 offset += MSM_DP_CONTROLLER_LINK_OFFSET;
141 * To make sure link reg writes happens before any other operation,
142 * this function uses writel() instread of writel_relaxed()
144 writel(data, catalog->io->dp_controller.base + offset);
147 /* aux related catalog functions */
148 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
150 struct dp_catalog_private *catalog = container_of(dp_catalog,
151 struct dp_catalog_private, dp_catalog);
153 return dp_read_aux(catalog, REG_DP_AUX_DATA);
156 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
158 struct dp_catalog_private *catalog = container_of(dp_catalog,
159 struct dp_catalog_private, dp_catalog);
161 dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
165 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
167 struct dp_catalog_private *catalog = container_of(dp_catalog,
168 struct dp_catalog_private, dp_catalog);
170 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
174 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
177 struct dp_catalog_private *catalog = container_of(dp_catalog,
178 struct dp_catalog_private, dp_catalog);
181 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
182 data &= ~DP_AUX_TRANS_CTRL_GO;
183 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
185 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
190 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
192 struct dp_catalog_private *catalog = container_of(dp_catalog,
193 struct dp_catalog_private, dp_catalog);
195 dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
196 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
197 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
198 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
203 * dp_catalog_aux_reset() - reset AUX controller
205 * @dp_catalog: DP catalog structure
209 * This function reset AUX controller
211 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
214 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
217 struct dp_catalog_private *catalog = container_of(dp_catalog,
218 struct dp_catalog_private, dp_catalog);
220 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
222 aux_ctrl |= DP_AUX_CTRL_RESET;
223 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
224 usleep_range(1000, 1100); /* h/w recommended delay */
226 aux_ctrl &= ~DP_AUX_CTRL_RESET;
227 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
230 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
233 struct dp_catalog_private *catalog = container_of(dp_catalog,
234 struct dp_catalog_private, dp_catalog);
236 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
239 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
240 dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
241 aux_ctrl |= DP_AUX_CTRL_ENABLE;
243 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
246 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
249 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
251 struct dp_catalog_private *catalog = container_of(dp_catalog,
252 struct dp_catalog_private, dp_catalog);
253 struct dp_io *dp_io = catalog->io;
254 struct phy *phy = dp_io->phy;
259 static void dump_regs(void __iomem *base, int len)
265 len = DIV_ROUND_UP(len, 16);
266 for (i = 0; i < len; i++) {
267 x0 = readl_relaxed(base + addr_off);
268 x4 = readl_relaxed(base + addr_off + 0x04);
269 x8 = readl_relaxed(base + addr_off + 0x08);
270 xc = readl_relaxed(base + addr_off + 0x0c);
272 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
277 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
280 struct dp_catalog_private *catalog = container_of(dp_catalog,
281 struct dp_catalog_private, dp_catalog);
283 pr_info("AHB regs\n");
284 offset = MSM_DP_CONTROLLER_AHB_OFFSET;
285 len = MSM_DP_CONTROLLER_AHB_SIZE;
286 dump_regs(catalog->io->dp_controller.base + offset, len);
288 pr_info("AUXCLK regs\n");
289 offset = MSM_DP_CONTROLLER_AUX_OFFSET;
290 len = MSM_DP_CONTROLLER_AUX_SIZE;
291 dump_regs(catalog->io->dp_controller.base + offset, len);
293 pr_info("LCLK regs\n");
294 offset = MSM_DP_CONTROLLER_LINK_OFFSET;
295 len = MSM_DP_CONTROLLER_LINK_SIZE;
296 dump_regs(catalog->io->dp_controller.base + offset, len);
298 pr_info("P0CLK regs\n");
299 offset = MSM_DP_CONTROLLER_P0_OFFSET;
300 len = MSM_DP_CONTROLLER_P0_SIZE;
301 dump_regs(catalog->io->dp_controller.base + offset, len);
304 u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
306 struct dp_catalog_private *catalog = container_of(dp_catalog,
307 struct dp_catalog_private, dp_catalog);
310 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
311 intr &= ~DP_INTERRUPT_STATUS1_MASK;
312 intr_ack = (intr & DP_INTERRUPT_STATUS1)
313 << DP_INTERRUPT_STATUS_ACK_SHIFT;
314 dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
315 DP_INTERRUPT_STATUS1_MASK);
321 /* controller related catalog functions */
322 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
323 u32 dp_tu, u32 valid_boundary,
326 struct dp_catalog_private *catalog = container_of(dp_catalog,
327 struct dp_catalog_private, dp_catalog);
329 dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
330 dp_write_link(catalog, REG_DP_TU, dp_tu);
331 dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
334 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
336 struct dp_catalog_private *catalog = container_of(dp_catalog,
337 struct dp_catalog_private, dp_catalog);
339 dp_write_link(catalog, REG_DP_STATE_CTRL, state);
342 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
344 struct dp_catalog_private *catalog = container_of(dp_catalog,
345 struct dp_catalog_private, dp_catalog);
347 DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
349 dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
352 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
354 struct dp_catalog_private *catalog = container_of(dp_catalog,
355 struct dp_catalog_private, dp_catalog);
356 u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
359 ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
360 ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
361 ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
362 ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
364 dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
368 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
372 struct dp_catalog_private *catalog = container_of(dp_catalog,
373 struct dp_catalog_private, dp_catalog);
375 DRM_DEBUG_DP("enable=%d\n", enable);
378 * To make sure link reg writes happens before other operation,
379 * dp_write_link() function uses writel()
381 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
383 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
384 DP_MAINLINK_CTRL_ENABLE);
385 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
387 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
388 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
390 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
391 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
393 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
394 DP_MAINLINK_FB_BOUNDARY_SEL);
395 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
397 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
398 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
399 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
403 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
408 struct dp_catalog_private *catalog = container_of(dp_catalog,
409 struct dp_catalog_private, dp_catalog);
411 misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
414 misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
415 misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
416 misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
417 /* Configure clock to synchronous mode */
418 misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
420 DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
421 dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
424 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
425 u32 rate, u32 stream_rate_khz,
428 u32 pixel_m, pixel_n;
429 u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
430 u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
431 u32 const link_rate_hbr2 = 540000;
432 u32 const link_rate_hbr3 = 810000;
433 unsigned long den, num;
435 struct dp_catalog_private *catalog = container_of(dp_catalog,
436 struct dp_catalog_private, dp_catalog);
438 if (rate == link_rate_hbr3)
440 else if (rate == 1620000 || rate == 270000)
442 else if (rate == link_rate_hbr2)
445 DRM_ERROR("Invalid pixel mux divider\n");
447 dispcc_input_rate = (rate * 10) / pixel_div;
449 rational_best_approximation(dispcc_input_rate, stream_rate_khz,
450 (unsigned long)(1 << 16) - 1,
451 (unsigned long)(1 << 16) - 1, &den, &num);
458 mvid = (pixel_m & 0xFFFF) * 5;
459 nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
461 if (nvid < nvid_fixed) {
464 temp = (nvid_fixed / nvid) * nvid;
465 mvid = (nvid_fixed / nvid) * mvid;
469 if (link_rate_hbr2 == rate)
472 if (link_rate_hbr3 == rate)
475 DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
476 dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
477 dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
478 dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
481 int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
486 struct dp_catalog_private *catalog = container_of(dp_catalog,
487 struct dp_catalog_private, dp_catalog);
489 bit = BIT(pattern - 1);
490 DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
491 dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
493 bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
495 /* Poll for mainlink ready status */
496 ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
497 MSM_DP_CONTROLLER_LINK_OFFSET +
498 REG_DP_MAINLINK_READY,
500 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
502 DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
509 * dp_catalog_ctrl_reset() - reset DP controller
511 * @dp_catalog: DP catalog structure
515 * This function reset the DP controller
517 * NOTE: reset DP controller will also clear any pending HPD related interrupts
520 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
523 struct dp_catalog_private *catalog = container_of(dp_catalog,
524 struct dp_catalog_private, dp_catalog);
526 sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
528 sw_reset |= DP_SW_RESET;
529 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
530 usleep_range(1000, 1100); /* h/w recommended delay */
532 sw_reset &= ~DP_SW_RESET;
533 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
536 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
540 struct dp_catalog_private *catalog = container_of(dp_catalog,
541 struct dp_catalog_private, dp_catalog);
543 /* Poll for mainlink ready status */
544 ret = readl_poll_timeout(catalog->io->dp_controller.base +
545 MSM_DP_CONTROLLER_LINK_OFFSET +
546 REG_DP_MAINLINK_READY,
547 data, data & DP_MAINLINK_READY_FOR_VIDEO,
548 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
550 DRM_ERROR("mainlink not ready\n");
557 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
560 struct dp_catalog_private *catalog = container_of(dp_catalog,
561 struct dp_catalog_private, dp_catalog);
564 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
565 DP_INTERRUPT_STATUS1_MASK);
566 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
567 DP_INTERRUPT_STATUS2_MASK);
569 dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
570 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
574 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
575 u32 intr_mask, bool en)
577 struct dp_catalog_private *catalog = container_of(dp_catalog,
578 struct dp_catalog_private, dp_catalog);
580 u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
582 config = (en ? config | intr_mask : config & ~intr_mask);
584 DRM_DEBUG_DP("intr_mask=%#x config=%#x\n", intr_mask, config);
585 dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
586 config & DP_DP_HPD_INT_MASK);
589 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
591 struct dp_catalog_private *catalog = container_of(dp_catalog,
592 struct dp_catalog_private, dp_catalog);
594 u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
596 /* enable HPD plug and unplug interrupts */
597 dp_catalog_hpd_config_intr(dp_catalog,
598 DP_DP_HPD_PLUG_INT_MASK | DP_DP_HPD_UNPLUG_INT_MASK, true);
600 /* Configure REFTIMER and enable it */
601 reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
602 dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
605 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
608 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
610 struct dp_catalog_private *catalog = container_of(dp_catalog,
611 struct dp_catalog_private, dp_catalog);
614 status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
615 DRM_DEBUG_DP("aux status: %#x\n", status);
616 status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
617 status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
622 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
624 struct dp_catalog_private *catalog = container_of(dp_catalog,
625 struct dp_catalog_private, dp_catalog);
628 isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
629 dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
630 (isr & DP_DP_HPD_INT_MASK));
635 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
637 struct dp_catalog_private *catalog = container_of(dp_catalog,
638 struct dp_catalog_private, dp_catalog);
641 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
642 intr &= ~DP_INTERRUPT_STATUS2_MASK;
643 intr_ack = (intr & DP_INTERRUPT_STATUS2)
644 << DP_INTERRUPT_STATUS_ACK_SHIFT;
645 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
646 intr_ack | DP_INTERRUPT_STATUS2_MASK);
651 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
653 struct dp_catalog_private *catalog = container_of(dp_catalog,
654 struct dp_catalog_private, dp_catalog);
656 dp_write_ahb(catalog, REG_DP_PHY_CTRL,
657 DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
658 usleep_range(1000, 1100); /* h/w recommended delay */
659 dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
662 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
663 u8 v_level, u8 p_level)
665 struct dp_catalog_private *catalog = container_of(dp_catalog,
666 struct dp_catalog_private, dp_catalog);
667 struct dp_io *dp_io = catalog->io;
668 struct phy *phy = dp_io->phy;
669 struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
671 /* TODO: Update for all lanes instead of just first one */
672 opts_dp->voltage[0] = v_level;
673 opts_dp->pre[0] = p_level;
674 opts_dp->set_voltages = 1;
675 phy_configure(phy, &dp_io->phy_opts);
676 opts_dp->set_voltages = 0;
681 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
684 struct dp_catalog_private *catalog = container_of(dp_catalog,
685 struct dp_catalog_private, dp_catalog);
688 /* Make sure to clear the current pattern before starting a new one */
689 dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
691 DRM_DEBUG_DP("pattern: %#x\n", pattern);
693 case DP_PHY_TEST_PATTERN_D10_2:
694 dp_write_link(catalog, REG_DP_STATE_CTRL,
695 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
697 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
699 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
701 value |= SCRAMBLER_RESET_COUNT_VALUE;
702 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
704 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
705 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
706 dp_write_link(catalog, REG_DP_STATE_CTRL,
707 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
709 case DP_PHY_TEST_PATTERN_PRBS7:
710 dp_write_link(catalog, REG_DP_STATE_CTRL,
711 DP_STATE_CTRL_LINK_PRBS7);
713 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
714 dp_write_link(catalog, REG_DP_STATE_CTRL,
715 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
716 /* 00111110000011111000001111100000 */
717 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
719 /* 00001111100000111110000011111000 */
720 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
722 /* 1111100000111110 */
723 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
726 case DP_PHY_TEST_PATTERN_CP2520:
727 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
728 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
729 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
731 value = DP_HBR2_ERM_PATTERN;
732 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
734 value |= SCRAMBLER_RESET_COUNT_VALUE;
735 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
737 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
738 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
739 dp_write_link(catalog, REG_DP_STATE_CTRL,
740 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
741 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
742 value |= DP_MAINLINK_CTRL_ENABLE;
743 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
745 case DP_PHY_TEST_PATTERN_SEL_MASK:
746 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
747 DP_MAINLINK_CTRL_ENABLE);
748 dp_write_link(catalog, REG_DP_STATE_CTRL,
749 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
752 DRM_DEBUG_DP("No valid test pattern requested: %#x\n", pattern);
757 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
759 struct dp_catalog_private *catalog = container_of(dp_catalog,
760 struct dp_catalog_private, dp_catalog);
762 return dp_read_link(catalog, REG_DP_MAINLINK_READY);
765 /* panel related catalog functions */
766 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
768 struct dp_catalog_private *catalog = container_of(dp_catalog,
769 struct dp_catalog_private, dp_catalog);
771 dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
773 dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
774 dp_catalog->sync_start);
775 dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
776 dp_catalog->width_blanking);
777 dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
778 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0);
782 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
783 struct drm_display_mode *drm_mode)
785 struct dp_catalog_private *catalog = container_of(dp_catalog,
786 struct dp_catalog_private, dp_catalog);
787 u32 hsync_period, vsync_period;
788 u32 display_v_start, display_v_end;
789 u32 hsync_start_x, hsync_end_x;
794 /* TPG config parameters*/
795 hsync_period = drm_mode->htotal;
796 vsync_period = drm_mode->vtotal;
798 display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
800 display_v_end = ((vsync_period - (drm_mode->vsync_start -
804 display_v_start += drm_mode->htotal - drm_mode->hsync_start;
805 display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
807 hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
808 hsync_end_x = hsync_period - (drm_mode->hsync_start -
809 drm_mode->hdisplay) - 1;
811 v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
813 hsync_ctl = (hsync_period << 16) |
814 (drm_mode->hsync_end - drm_mode->hsync_start);
815 display_hctl = (hsync_end_x << 16) | hsync_start_x;
818 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
819 dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
820 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
822 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
824 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
825 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
826 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
827 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
828 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
829 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
830 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
831 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
832 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
833 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
834 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
835 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
836 dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
838 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
839 DP_TPG_CHECKERED_RECT_PATTERN);
840 dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
841 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
842 DP_TPG_VIDEO_CONFIG_RGB);
843 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
844 DP_BIST_ENABLE_DPBIST_EN);
845 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
846 DP_TIMING_ENGINE_EN_EN);
847 DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
850 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
852 struct dp_catalog_private *catalog = container_of(dp_catalog,
853 struct dp_catalog_private, dp_catalog);
855 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
856 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
857 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
860 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
862 struct dp_catalog_private *catalog;
865 DRM_ERROR("invalid input\n");
866 return ERR_PTR(-EINVAL);
869 catalog = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
871 return ERR_PTR(-ENOMEM);
876 return &catalog->dp_catalog;
879 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
881 struct dp_catalog_private *catalog;
882 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
883 enum dp_catalog_audio_sdp_type sdp;
884 enum dp_catalog_audio_header_type header;
889 catalog = container_of(dp_catalog,
890 struct dp_catalog_private, dp_catalog);
892 sdp_map = catalog->audio_map;
893 sdp = dp_catalog->sdp_type;
894 header = dp_catalog->sdp_header;
896 dp_catalog->audio_data = dp_read_link(catalog,
897 sdp_map[sdp][header]);
900 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
902 struct dp_catalog_private *catalog;
903 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
904 enum dp_catalog_audio_sdp_type sdp;
905 enum dp_catalog_audio_header_type header;
911 catalog = container_of(dp_catalog,
912 struct dp_catalog_private, dp_catalog);
914 sdp_map = catalog->audio_map;
915 sdp = dp_catalog->sdp_type;
916 header = dp_catalog->sdp_header;
917 data = dp_catalog->audio_data;
919 dp_write_link(catalog, sdp_map[sdp][header], data);
922 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
924 struct dp_catalog_private *catalog;
925 u32 acr_ctrl, select;
930 catalog = container_of(dp_catalog,
931 struct dp_catalog_private, dp_catalog);
933 select = dp_catalog->audio_data;
934 acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
936 DRM_DEBUG_DP("select: %#x, acr_ctrl: %#x\n", select, acr_ctrl);
938 dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
941 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
943 struct dp_catalog_private *catalog;
950 catalog = container_of(dp_catalog,
951 struct dp_catalog_private, dp_catalog);
953 enable = !!dp_catalog->audio_data;
954 audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
957 audio_ctrl |= BIT(0);
959 audio_ctrl &= ~BIT(0);
961 DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
963 dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
964 /* make sure audio engine is disabled */
968 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
970 struct dp_catalog_private *catalog;
977 catalog = container_of(dp_catalog,
978 struct dp_catalog_private, dp_catalog);
980 sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
981 /* AUDIO_TIMESTAMP_SDP_EN */
983 /* AUDIO_STREAM_SDP_EN */
985 /* AUDIO_COPY_MANAGEMENT_SDP_EN */
987 /* AUDIO_ISRC_SDP_EN */
989 /* AUDIO_INFOFRAME_SDP_EN */
992 DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
994 dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
996 sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
997 /* IFRM_REGSRC -> Do not use reg values */
999 /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1000 sdp_cfg2 &= ~BIT(1);
1002 DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
1004 dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1007 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1009 struct dp_catalog_private *catalog;
1011 static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1013 MMSS_DP_AUDIO_STREAM_0,
1014 MMSS_DP_AUDIO_STREAM_1,
1015 MMSS_DP_AUDIO_STREAM_1,
1018 MMSS_DP_AUDIO_TIMESTAMP_0,
1019 MMSS_DP_AUDIO_TIMESTAMP_1,
1020 MMSS_DP_AUDIO_TIMESTAMP_1,
1023 MMSS_DP_AUDIO_INFOFRAME_0,
1024 MMSS_DP_AUDIO_INFOFRAME_1,
1025 MMSS_DP_AUDIO_INFOFRAME_1,
1028 MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1029 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1030 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1033 MMSS_DP_AUDIO_ISRC_0,
1034 MMSS_DP_AUDIO_ISRC_1,
1035 MMSS_DP_AUDIO_ISRC_1,
1042 catalog = container_of(dp_catalog,
1043 struct dp_catalog_private, dp_catalog);
1045 catalog->audio_map = sdp_map;
1048 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1050 struct dp_catalog_private *catalog;
1051 u32 mainlink_levels, safe_to_exit_level;
1056 catalog = container_of(dp_catalog,
1057 struct dp_catalog_private, dp_catalog);
1059 safe_to_exit_level = dp_catalog->audio_data;
1060 mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1061 mainlink_levels &= 0xFE0;
1062 mainlink_levels |= safe_to_exit_level;
1064 DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1065 mainlink_levels, safe_to_exit_level);
1067 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);