Merge tag 'drm-misc-next-fixes-2021-09-09' of git://anongit.freedesktop.org/drm/drm...
[linux-2.6-microblaze.git] / drivers / gpu / drm / msm / dp / dp_catalog.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5
6 #define pr_fmt(fmt)     "[drm-dp] %s: " fmt, __func__
7
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/phy/phy.h>
11 #include <linux/phy/phy-dp.h>
12 #include <linux/rational.h>
13 #include <drm/drm_dp_helper.h>
14 #include <drm/drm_print.h>
15
16 #include "dp_catalog.h"
17 #include "dp_reg.h"
18
19 #define POLLING_SLEEP_US                        1000
20 #define POLLING_TIMEOUT_US                      10000
21
22 #define SCRAMBLER_RESET_COUNT_VALUE             0xFC
23
24 #define DP_INTERRUPT_STATUS_ACK_SHIFT   1
25 #define DP_INTERRUPT_STATUS_MASK_SHIFT  2
26
27 #define MSM_DP_CONTROLLER_AHB_OFFSET    0x0000
28 #define MSM_DP_CONTROLLER_AHB_SIZE      0x0200
29 #define MSM_DP_CONTROLLER_AUX_OFFSET    0x0200
30 #define MSM_DP_CONTROLLER_AUX_SIZE      0x0200
31 #define MSM_DP_CONTROLLER_LINK_OFFSET   0x0400
32 #define MSM_DP_CONTROLLER_LINK_SIZE     0x0C00
33 #define MSM_DP_CONTROLLER_P0_OFFSET     0x1000
34 #define MSM_DP_CONTROLLER_P0_SIZE       0x0400
35
36 #define DP_INTERRUPT_STATUS1 \
37         (DP_INTR_AUX_I2C_DONE| \
38         DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
39         DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
40         DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
41         DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
42
43 #define DP_INTERRUPT_STATUS1_ACK \
44         (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
45 #define DP_INTERRUPT_STATUS1_MASK \
46         (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
47
48 #define DP_INTERRUPT_STATUS2 \
49         (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
50         DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
51
52 #define DP_INTERRUPT_STATUS2_ACK \
53         (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
54 #define DP_INTERRUPT_STATUS2_MASK \
55         (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
56
57 struct dp_catalog_private {
58         struct device *dev;
59         struct dp_io *io;
60         u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
61         struct dp_catalog dp_catalog;
62         u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
63 };
64
65 void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
66 {
67         struct dp_catalog_private *catalog = container_of(dp_catalog,
68                         struct dp_catalog_private, dp_catalog);
69
70         msm_disp_snapshot_add_block(disp_state, catalog->io->dp_controller.len,
71                         catalog->io->dp_controller.base, "dp_ctrl");
72 }
73
74 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
75 {
76         offset += MSM_DP_CONTROLLER_AUX_OFFSET;
77         return readl_relaxed(catalog->io->dp_controller.base + offset);
78 }
79
80 static inline void dp_write_aux(struct dp_catalog_private *catalog,
81                                u32 offset, u32 data)
82 {
83         offset += MSM_DP_CONTROLLER_AUX_OFFSET;
84         /*
85          * To make sure aux reg writes happens before any other operation,
86          * this function uses writel() instread of writel_relaxed()
87          */
88         writel(data, catalog->io->dp_controller.base + offset);
89 }
90
91 static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
92 {
93         offset += MSM_DP_CONTROLLER_AHB_OFFSET;
94         return readl_relaxed(catalog->io->dp_controller.base + offset);
95 }
96
97 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
98                                u32 offset, u32 data)
99 {
100         offset += MSM_DP_CONTROLLER_AHB_OFFSET;
101         /*
102          * To make sure phy reg writes happens before any other operation,
103          * this function uses writel() instread of writel_relaxed()
104          */
105         writel(data, catalog->io->dp_controller.base + offset);
106 }
107
108 static inline void dp_write_p0(struct dp_catalog_private *catalog,
109                                u32 offset, u32 data)
110 {
111         offset += MSM_DP_CONTROLLER_P0_OFFSET;
112         /*
113          * To make sure interface reg writes happens before any other operation,
114          * this function uses writel() instread of writel_relaxed()
115          */
116         writel(data, catalog->io->dp_controller.base + offset);
117 }
118
119 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
120                                u32 offset)
121 {
122         offset += MSM_DP_CONTROLLER_P0_OFFSET;
123         /*
124          * To make sure interface reg writes happens before any other operation,
125          * this function uses writel() instread of writel_relaxed()
126          */
127         return readl_relaxed(catalog->io->dp_controller.base + offset);
128 }
129
130 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
131 {
132         offset += MSM_DP_CONTROLLER_LINK_OFFSET;
133         return readl_relaxed(catalog->io->dp_controller.base + offset);
134 }
135
136 static inline void dp_write_link(struct dp_catalog_private *catalog,
137                                u32 offset, u32 data)
138 {
139         offset += MSM_DP_CONTROLLER_LINK_OFFSET;
140         /*
141          * To make sure link reg writes happens before any other operation,
142          * this function uses writel() instread of writel_relaxed()
143          */
144         writel(data, catalog->io->dp_controller.base + offset);
145 }
146
147 /* aux related catalog functions */
148 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
149 {
150         struct dp_catalog_private *catalog = container_of(dp_catalog,
151                                 struct dp_catalog_private, dp_catalog);
152
153         return dp_read_aux(catalog, REG_DP_AUX_DATA);
154 }
155
156 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
157 {
158         struct dp_catalog_private *catalog = container_of(dp_catalog,
159                                 struct dp_catalog_private, dp_catalog);
160
161         dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
162         return 0;
163 }
164
165 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
166 {
167         struct dp_catalog_private *catalog = container_of(dp_catalog,
168                                 struct dp_catalog_private, dp_catalog);
169
170         dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
171         return 0;
172 }
173
174 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
175 {
176         u32 data;
177         struct dp_catalog_private *catalog = container_of(dp_catalog,
178                                 struct dp_catalog_private, dp_catalog);
179
180         if (read) {
181                 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
182                 data &= ~DP_AUX_TRANS_CTRL_GO;
183                 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
184         } else {
185                 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
186         }
187         return 0;
188 }
189
190 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
191 {
192         struct dp_catalog_private *catalog = container_of(dp_catalog,
193                                 struct dp_catalog_private, dp_catalog);
194
195         dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
196         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
197         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
198         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
199         return 0;
200 }
201
202 /**
203  * dp_catalog_aux_reset() - reset AUX controller
204  *
205  * @dp_catalog: DP catalog structure
206  *
207  * return: void
208  *
209  * This function reset AUX controller
210  *
211  * NOTE: reset AUX controller will also clear any pending HPD related interrupts
212  * 
213  */
214 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
215 {
216         u32 aux_ctrl;
217         struct dp_catalog_private *catalog = container_of(dp_catalog,
218                                 struct dp_catalog_private, dp_catalog);
219
220         aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
221
222         aux_ctrl |= DP_AUX_CTRL_RESET;
223         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
224         usleep_range(1000, 1100); /* h/w recommended delay */
225
226         aux_ctrl &= ~DP_AUX_CTRL_RESET;
227         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
228 }
229
230 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
231 {
232         u32 aux_ctrl;
233         struct dp_catalog_private *catalog = container_of(dp_catalog,
234                                 struct dp_catalog_private, dp_catalog);
235
236         aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
237
238         if (enable) {
239                 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
240                 dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
241                 aux_ctrl |= DP_AUX_CTRL_ENABLE;
242         } else {
243                 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
244         }
245
246         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
247 }
248
249 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
250 {
251         struct dp_catalog_private *catalog = container_of(dp_catalog,
252                                 struct dp_catalog_private, dp_catalog);
253         struct dp_io *dp_io = catalog->io;
254         struct phy *phy = dp_io->phy;
255
256         phy_calibrate(phy);
257 }
258
259 static void dump_regs(void __iomem *base, int len)
260 {
261         int i;
262         u32 x0, x4, x8, xc;
263         u32 addr_off = 0;
264
265         len = DIV_ROUND_UP(len, 16);
266         for (i = 0; i < len; i++) {
267                 x0 = readl_relaxed(base + addr_off);
268                 x4 = readl_relaxed(base + addr_off + 0x04);
269                 x8 = readl_relaxed(base + addr_off + 0x08);
270                 xc = readl_relaxed(base + addr_off + 0x0c);
271
272                 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
273                 addr_off += 16;
274         }
275 }
276
277 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
278 {
279         u32 offset, len;
280         struct dp_catalog_private *catalog = container_of(dp_catalog,
281                 struct dp_catalog_private, dp_catalog);
282
283         pr_info("AHB regs\n");
284         offset = MSM_DP_CONTROLLER_AHB_OFFSET;
285         len = MSM_DP_CONTROLLER_AHB_SIZE;
286         dump_regs(catalog->io->dp_controller.base + offset, len);
287
288         pr_info("AUXCLK regs\n");
289         offset = MSM_DP_CONTROLLER_AUX_OFFSET;
290         len = MSM_DP_CONTROLLER_AUX_SIZE;
291         dump_regs(catalog->io->dp_controller.base + offset, len);
292
293         pr_info("LCLK regs\n");
294         offset = MSM_DP_CONTROLLER_LINK_OFFSET;
295         len = MSM_DP_CONTROLLER_LINK_SIZE;
296         dump_regs(catalog->io->dp_controller.base + offset, len);
297
298         pr_info("P0CLK regs\n");
299         offset = MSM_DP_CONTROLLER_P0_OFFSET;
300         len = MSM_DP_CONTROLLER_P0_SIZE;
301         dump_regs(catalog->io->dp_controller.base + offset, len);
302 }
303
304 u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
305 {
306         struct dp_catalog_private *catalog = container_of(dp_catalog,
307                                 struct dp_catalog_private, dp_catalog);
308         u32 intr, intr_ack;
309
310         intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
311         intr &= ~DP_INTERRUPT_STATUS1_MASK;
312         intr_ack = (intr & DP_INTERRUPT_STATUS1)
313                         << DP_INTERRUPT_STATUS_ACK_SHIFT;
314         dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
315                         DP_INTERRUPT_STATUS1_MASK);
316
317         return intr;
318
319 }
320
321 /* controller related catalog functions */
322 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
323                                 u32 dp_tu, u32 valid_boundary,
324                                 u32 valid_boundary2)
325 {
326         struct dp_catalog_private *catalog = container_of(dp_catalog,
327                                 struct dp_catalog_private, dp_catalog);
328
329         dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
330         dp_write_link(catalog, REG_DP_TU, dp_tu);
331         dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
332 }
333
334 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
335 {
336         struct dp_catalog_private *catalog = container_of(dp_catalog,
337                                 struct dp_catalog_private, dp_catalog);
338
339         dp_write_link(catalog, REG_DP_STATE_CTRL, state);
340 }
341
342 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
343 {
344         struct dp_catalog_private *catalog = container_of(dp_catalog,
345                                 struct dp_catalog_private, dp_catalog);
346
347         DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
348
349         dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
350 }
351
352 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
353 {
354         struct dp_catalog_private *catalog = container_of(dp_catalog,
355                                 struct dp_catalog_private, dp_catalog);
356         u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
357         u32 ln_mapping;
358
359         ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
360         ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
361         ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
362         ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
363
364         dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
365                         ln_mapping);
366 }
367
368 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
369                                                 bool enable)
370 {
371         u32 mainlink_ctrl;
372         struct dp_catalog_private *catalog = container_of(dp_catalog,
373                                 struct dp_catalog_private, dp_catalog);
374
375         DRM_DEBUG_DP("enable=%d\n", enable);
376         if (enable) {
377                 /*
378                  * To make sure link reg writes happens before other operation,
379                  * dp_write_link() function uses writel()
380                  */
381                 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
382
383                 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
384                                                 DP_MAINLINK_CTRL_ENABLE);
385                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
386
387                 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
388                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
389
390                 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
391                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
392
393                 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
394                                         DP_MAINLINK_FB_BOUNDARY_SEL);
395                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
396         } else {
397                 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
398                 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
399                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
400         }
401 }
402
403 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
404                                         u32 colorimetry_cfg,
405                                         u32 test_bits_depth)
406 {
407         u32 misc_val;
408         struct dp_catalog_private *catalog = container_of(dp_catalog,
409                                 struct dp_catalog_private, dp_catalog);
410
411         misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
412
413         /* clear bpp bits */
414         misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
415         misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
416         misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
417         /* Configure clock to synchronous mode */
418         misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
419
420         DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
421         dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
422 }
423
424 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
425                                         u32 rate, u32 stream_rate_khz,
426                                         bool fixed_nvid)
427 {
428         u32 pixel_m, pixel_n;
429         u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
430         u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
431         u32 const link_rate_hbr2 = 540000;
432         u32 const link_rate_hbr3 = 810000;
433         unsigned long den, num;
434
435         struct dp_catalog_private *catalog = container_of(dp_catalog,
436                                 struct dp_catalog_private, dp_catalog);
437
438         if (rate == link_rate_hbr3)
439                 pixel_div = 6;
440         else if (rate == 1620000 || rate == 270000)
441                 pixel_div = 2;
442         else if (rate == link_rate_hbr2)
443                 pixel_div = 4;
444         else
445                 DRM_ERROR("Invalid pixel mux divider\n");
446
447         dispcc_input_rate = (rate * 10) / pixel_div;
448
449         rational_best_approximation(dispcc_input_rate, stream_rate_khz,
450                         (unsigned long)(1 << 16) - 1,
451                         (unsigned long)(1 << 16) - 1, &den, &num);
452
453         den = ~(den - num);
454         den = den & 0xFFFF;
455         pixel_m = num;
456         pixel_n = den;
457
458         mvid = (pixel_m & 0xFFFF) * 5;
459         nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
460
461         if (nvid < nvid_fixed) {
462                 u32 temp;
463
464                 temp = (nvid_fixed / nvid) * nvid;
465                 mvid = (nvid_fixed / nvid) * mvid;
466                 nvid = temp;
467         }
468
469         if (link_rate_hbr2 == rate)
470                 nvid *= 2;
471
472         if (link_rate_hbr3 == rate)
473                 nvid *= 3;
474
475         DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
476         dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
477         dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
478         dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
479 }
480
481 int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
482                                         u32 pattern)
483 {
484         int bit, ret;
485         u32 data;
486         struct dp_catalog_private *catalog = container_of(dp_catalog,
487                                 struct dp_catalog_private, dp_catalog);
488
489         bit = BIT(pattern - 1);
490         DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
491         dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
492
493         bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
494
495         /* Poll for mainlink ready status */
496         ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
497                                         MSM_DP_CONTROLLER_LINK_OFFSET +
498                                         REG_DP_MAINLINK_READY,
499                                         data, data & bit,
500                                         POLLING_SLEEP_US, POLLING_TIMEOUT_US);
501         if (ret < 0) {
502                 DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
503                 return ret;
504         }
505         return 0;
506 }
507
508 /**
509  * dp_catalog_ctrl_reset() - reset DP controller
510  *
511  * @dp_catalog: DP catalog structure
512  *
513  * return: void
514  *
515  * This function reset the DP controller
516  *
517  * NOTE: reset DP controller will also clear any pending HPD related interrupts
518  * 
519  */
520 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
521 {
522         u32 sw_reset;
523         struct dp_catalog_private *catalog = container_of(dp_catalog,
524                                 struct dp_catalog_private, dp_catalog);
525
526         sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
527
528         sw_reset |= DP_SW_RESET;
529         dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
530         usleep_range(1000, 1100); /* h/w recommended delay */
531
532         sw_reset &= ~DP_SW_RESET;
533         dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
534 }
535
536 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
537 {
538         u32 data;
539         int ret;
540         struct dp_catalog_private *catalog = container_of(dp_catalog,
541                                 struct dp_catalog_private, dp_catalog);
542
543         /* Poll for mainlink ready status */
544         ret = readl_poll_timeout(catalog->io->dp_controller.base +
545                                 MSM_DP_CONTROLLER_LINK_OFFSET +
546                                 REG_DP_MAINLINK_READY,
547                                 data, data & DP_MAINLINK_READY_FOR_VIDEO,
548                                 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
549         if (ret < 0) {
550                 DRM_ERROR("mainlink not ready\n");
551                 return false;
552         }
553
554         return true;
555 }
556
557 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
558                                                 bool enable)
559 {
560         struct dp_catalog_private *catalog = container_of(dp_catalog,
561                                 struct dp_catalog_private, dp_catalog);
562
563         if (enable) {
564                 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
565                                 DP_INTERRUPT_STATUS1_MASK);
566                 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
567                                 DP_INTERRUPT_STATUS2_MASK);
568         } else {
569                 dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
570                 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
571         }
572 }
573
574 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
575                         u32 intr_mask, bool en)
576 {
577         struct dp_catalog_private *catalog = container_of(dp_catalog,
578                                 struct dp_catalog_private, dp_catalog);
579
580         u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
581
582         config = (en ? config | intr_mask : config & ~intr_mask);
583
584         DRM_DEBUG_DP("intr_mask=%#x config=%#x\n", intr_mask, config);
585         dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
586                                 config & DP_DP_HPD_INT_MASK);
587 }
588
589 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
590 {
591         struct dp_catalog_private *catalog = container_of(dp_catalog,
592                                 struct dp_catalog_private, dp_catalog);
593
594         u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
595
596         /* enable HPD plug and unplug interrupts */
597         dp_catalog_hpd_config_intr(dp_catalog,
598                 DP_DP_HPD_PLUG_INT_MASK | DP_DP_HPD_UNPLUG_INT_MASK, true);
599
600         /* Configure REFTIMER and enable it */
601         reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
602         dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
603
604         /* Enable HPD */
605         dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
606 }
607
608 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
609 {
610         struct dp_catalog_private *catalog = container_of(dp_catalog,
611                                 struct dp_catalog_private, dp_catalog);
612         u32 status;
613
614         status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
615         DRM_DEBUG_DP("aux status: %#x\n", status);
616         status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
617         status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
618
619         return status;
620 }
621
622 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
623 {
624         struct dp_catalog_private *catalog = container_of(dp_catalog,
625                                 struct dp_catalog_private, dp_catalog);
626         int isr = 0;
627
628         isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
629         dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
630                                  (isr & DP_DP_HPD_INT_MASK));
631
632         return isr;
633 }
634
635 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
636 {
637         struct dp_catalog_private *catalog = container_of(dp_catalog,
638                                 struct dp_catalog_private, dp_catalog);
639         u32 intr, intr_ack;
640
641         intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
642         intr &= ~DP_INTERRUPT_STATUS2_MASK;
643         intr_ack = (intr & DP_INTERRUPT_STATUS2)
644                         << DP_INTERRUPT_STATUS_ACK_SHIFT;
645         dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
646                         intr_ack | DP_INTERRUPT_STATUS2_MASK);
647
648         return intr;
649 }
650
651 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
652 {
653         struct dp_catalog_private *catalog = container_of(dp_catalog,
654                                 struct dp_catalog_private, dp_catalog);
655
656         dp_write_ahb(catalog, REG_DP_PHY_CTRL,
657                         DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
658         usleep_range(1000, 1100); /* h/w recommended delay */
659         dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
660 }
661
662 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
663                 u8 v_level, u8 p_level)
664 {
665         struct dp_catalog_private *catalog = container_of(dp_catalog,
666                                 struct dp_catalog_private, dp_catalog);
667         struct dp_io *dp_io = catalog->io;
668         struct phy *phy = dp_io->phy;
669         struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
670
671         /* TODO: Update for all lanes instead of just first one */
672         opts_dp->voltage[0] = v_level;
673         opts_dp->pre[0] = p_level;
674         opts_dp->set_voltages = 1;
675         phy_configure(phy, &dp_io->phy_opts);
676         opts_dp->set_voltages = 0;
677
678         return 0;
679 }
680
681 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
682                         u32 pattern)
683 {
684         struct dp_catalog_private *catalog = container_of(dp_catalog,
685                                 struct dp_catalog_private, dp_catalog);
686         u32 value = 0x0;
687
688         /* Make sure to clear the current pattern before starting a new one */
689         dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
690
691         DRM_DEBUG_DP("pattern: %#x\n", pattern);
692         switch (pattern) {
693         case DP_PHY_TEST_PATTERN_D10_2:
694                 dp_write_link(catalog, REG_DP_STATE_CTRL,
695                                 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
696                 break;
697         case DP_PHY_TEST_PATTERN_ERROR_COUNT:
698                 value &= ~(1 << 16);
699                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
700                                         value);
701                 value |= SCRAMBLER_RESET_COUNT_VALUE;
702                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
703                                         value);
704                 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
705                                         DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
706                 dp_write_link(catalog, REG_DP_STATE_CTRL,
707                                         DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
708                 break;
709         case DP_PHY_TEST_PATTERN_PRBS7:
710                 dp_write_link(catalog, REG_DP_STATE_CTRL,
711                                 DP_STATE_CTRL_LINK_PRBS7);
712                 break;
713         case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
714                 dp_write_link(catalog, REG_DP_STATE_CTRL,
715                                 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
716                 /* 00111110000011111000001111100000 */
717                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
718                                 0x3E0F83E0);
719                 /* 00001111100000111110000011111000 */
720                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
721                                 0x0F83E0F8);
722                 /* 1111100000111110 */
723                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
724                                 0x0000F83E);
725                 break;
726         case DP_PHY_TEST_PATTERN_CP2520:
727                 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
728                 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
729                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
730
731                 value = DP_HBR2_ERM_PATTERN;
732                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
733                                 value);
734                 value |= SCRAMBLER_RESET_COUNT_VALUE;
735                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
736                                         value);
737                 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
738                                         DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
739                 dp_write_link(catalog, REG_DP_STATE_CTRL,
740                                         DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
741                 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
742                 value |= DP_MAINLINK_CTRL_ENABLE;
743                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
744                 break;
745         case DP_PHY_TEST_PATTERN_SEL_MASK:
746                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
747                                 DP_MAINLINK_CTRL_ENABLE);
748                 dp_write_link(catalog, REG_DP_STATE_CTRL,
749                                 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
750                 break;
751         default:
752                 DRM_DEBUG_DP("No valid test pattern requested: %#x\n", pattern);
753                 break;
754         }
755 }
756
757 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
758 {
759         struct dp_catalog_private *catalog = container_of(dp_catalog,
760                                 struct dp_catalog_private, dp_catalog);
761
762         return dp_read_link(catalog, REG_DP_MAINLINK_READY);
763 }
764
765 /* panel related catalog functions */
766 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
767 {
768         struct dp_catalog_private *catalog = container_of(dp_catalog,
769                                 struct dp_catalog_private, dp_catalog);
770
771         dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
772                                 dp_catalog->total);
773         dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
774                                 dp_catalog->sync_start);
775         dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
776                                 dp_catalog->width_blanking);
777         dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
778         return 0;
779 }
780
781 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
782                                 struct drm_display_mode *drm_mode)
783 {
784         struct dp_catalog_private *catalog = container_of(dp_catalog,
785                                 struct dp_catalog_private, dp_catalog);
786         u32 hsync_period, vsync_period;
787         u32 display_v_start, display_v_end;
788         u32 hsync_start_x, hsync_end_x;
789         u32 v_sync_width;
790         u32 hsync_ctl;
791         u32 display_hctl;
792
793         /* TPG config parameters*/
794         hsync_period = drm_mode->htotal;
795         vsync_period = drm_mode->vtotal;
796
797         display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
798                                         hsync_period);
799         display_v_end = ((vsync_period - (drm_mode->vsync_start -
800                                         drm_mode->vdisplay))
801                                         * hsync_period) - 1;
802
803         display_v_start += drm_mode->htotal - drm_mode->hsync_start;
804         display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
805
806         hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
807         hsync_end_x = hsync_period - (drm_mode->hsync_start -
808                                         drm_mode->hdisplay) - 1;
809
810         v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
811
812         hsync_ctl = (hsync_period << 16) |
813                         (drm_mode->hsync_end - drm_mode->hsync_start);
814         display_hctl = (hsync_end_x << 16) | hsync_start_x;
815
816
817         dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
818         dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
819         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
820                         hsync_period);
821         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
822                         hsync_period);
823         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
824         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
825         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
826         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
827         dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
828         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
829         dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
830         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
831         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
832         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
833         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
834         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
835         dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
836
837         dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
838                                 DP_TPG_CHECKERED_RECT_PATTERN);
839         dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
840                                 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
841                                 DP_TPG_VIDEO_CONFIG_RGB);
842         dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
843                                 DP_BIST_ENABLE_DPBIST_EN);
844         dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
845                                 DP_TIMING_ENGINE_EN_EN);
846         DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
847 }
848
849 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
850 {
851         struct dp_catalog_private *catalog = container_of(dp_catalog,
852                                 struct dp_catalog_private, dp_catalog);
853
854         dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
855         dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
856         dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
857 }
858
859 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
860 {
861         struct dp_catalog_private *catalog;
862
863         if (!io) {
864                 DRM_ERROR("invalid input\n");
865                 return ERR_PTR(-EINVAL);
866         }
867
868         catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
869         if (!catalog)
870                 return ERR_PTR(-ENOMEM);
871
872         catalog->dev = dev;
873         catalog->io = io;
874
875         return &catalog->dp_catalog;
876 }
877
878 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
879 {
880         struct dp_catalog_private *catalog;
881         u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
882         enum dp_catalog_audio_sdp_type sdp;
883         enum dp_catalog_audio_header_type header;
884
885         if (!dp_catalog)
886                 return;
887
888         catalog = container_of(dp_catalog,
889                 struct dp_catalog_private, dp_catalog);
890
891         sdp_map = catalog->audio_map;
892         sdp     = dp_catalog->sdp_type;
893         header  = dp_catalog->sdp_header;
894
895         dp_catalog->audio_data = dp_read_link(catalog,
896                         sdp_map[sdp][header]);
897 }
898
899 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
900 {
901         struct dp_catalog_private *catalog;
902         u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
903         enum dp_catalog_audio_sdp_type sdp;
904         enum dp_catalog_audio_header_type header;
905         u32 data;
906
907         if (!dp_catalog)
908                 return;
909
910         catalog = container_of(dp_catalog,
911                 struct dp_catalog_private, dp_catalog);
912
913         sdp_map = catalog->audio_map;
914         sdp     = dp_catalog->sdp_type;
915         header  = dp_catalog->sdp_header;
916         data    = dp_catalog->audio_data;
917
918         dp_write_link(catalog, sdp_map[sdp][header], data);
919 }
920
921 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
922 {
923         struct dp_catalog_private *catalog;
924         u32 acr_ctrl, select;
925
926         if (!dp_catalog)
927                 return;
928
929         catalog = container_of(dp_catalog,
930                 struct dp_catalog_private, dp_catalog);
931
932         select = dp_catalog->audio_data;
933         acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
934
935         DRM_DEBUG_DP("select: %#x, acr_ctrl: %#x\n", select, acr_ctrl);
936
937         dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
938 }
939
940 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
941 {
942         struct dp_catalog_private *catalog;
943         bool enable;
944         u32 audio_ctrl;
945
946         if (!dp_catalog)
947                 return;
948
949         catalog = container_of(dp_catalog,
950                 struct dp_catalog_private, dp_catalog);
951
952         enable = !!dp_catalog->audio_data;
953         audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
954
955         if (enable)
956                 audio_ctrl |= BIT(0);
957         else
958                 audio_ctrl &= ~BIT(0);
959
960         DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
961
962         dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
963         /* make sure audio engine is disabled */
964         wmb();
965 }
966
967 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
968 {
969         struct dp_catalog_private *catalog;
970         u32 sdp_cfg = 0;
971         u32 sdp_cfg2 = 0;
972
973         if (!dp_catalog)
974                 return;
975
976         catalog = container_of(dp_catalog,
977                 struct dp_catalog_private, dp_catalog);
978
979         sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
980         /* AUDIO_TIMESTAMP_SDP_EN */
981         sdp_cfg |= BIT(1);
982         /* AUDIO_STREAM_SDP_EN */
983         sdp_cfg |= BIT(2);
984         /* AUDIO_COPY_MANAGEMENT_SDP_EN */
985         sdp_cfg |= BIT(5);
986         /* AUDIO_ISRC_SDP_EN  */
987         sdp_cfg |= BIT(6);
988         /* AUDIO_INFOFRAME_SDP_EN  */
989         sdp_cfg |= BIT(20);
990
991         DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
992
993         dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
994
995         sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
996         /* IFRM_REGSRC -> Do not use reg values */
997         sdp_cfg2 &= ~BIT(0);
998         /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
999         sdp_cfg2 &= ~BIT(1);
1000
1001         DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
1002
1003         dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1004 }
1005
1006 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1007 {
1008         struct dp_catalog_private *catalog;
1009
1010         static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1011                 {
1012                         MMSS_DP_AUDIO_STREAM_0,
1013                         MMSS_DP_AUDIO_STREAM_1,
1014                         MMSS_DP_AUDIO_STREAM_1,
1015                 },
1016                 {
1017                         MMSS_DP_AUDIO_TIMESTAMP_0,
1018                         MMSS_DP_AUDIO_TIMESTAMP_1,
1019                         MMSS_DP_AUDIO_TIMESTAMP_1,
1020                 },
1021                 {
1022                         MMSS_DP_AUDIO_INFOFRAME_0,
1023                         MMSS_DP_AUDIO_INFOFRAME_1,
1024                         MMSS_DP_AUDIO_INFOFRAME_1,
1025                 },
1026                 {
1027                         MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1028                         MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1029                         MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1030                 },
1031                 {
1032                         MMSS_DP_AUDIO_ISRC_0,
1033                         MMSS_DP_AUDIO_ISRC_1,
1034                         MMSS_DP_AUDIO_ISRC_1,
1035                 },
1036         };
1037
1038         if (!dp_catalog)
1039                 return;
1040
1041         catalog = container_of(dp_catalog,
1042                 struct dp_catalog_private, dp_catalog);
1043
1044         catalog->audio_map = sdp_map;
1045 }
1046
1047 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1048 {
1049         struct dp_catalog_private *catalog;
1050         u32 mainlink_levels, safe_to_exit_level;
1051
1052         if (!dp_catalog)
1053                 return;
1054
1055         catalog = container_of(dp_catalog,
1056                 struct dp_catalog_private, dp_catalog);
1057
1058         safe_to_exit_level = dp_catalog->audio_data;
1059         mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1060         mainlink_levels &= 0xFE0;
1061         mainlink_levels |= safe_to_exit_level;
1062
1063         DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1064                          mainlink_levels, safe_to_exit_level);
1065
1066         dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1067 }