1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4 */
5
6 #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__
7
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/phy/phy.h>
11 #include <linux/phy/phy-dp.h>
12 #include <linux/rational.h>
13 #include <drm/drm_dp_helper.h>
14 #include <drm/drm_print.h>
15
16 #include "dp_catalog.h"
17 #include "dp_reg.h"
18
19 #define POLLING_SLEEP_US 1000
20 #define POLLING_TIMEOUT_US 10000
21
22 #define SCRAMBLER_RESET_COUNT_VALUE 0xFC
23
24 #define DP_INTERRUPT_STATUS_ACK_SHIFT 1
25 #define DP_INTERRUPT_STATUS_MASK_SHIFT 2
26
27 #define DP_INTERRUPT_STATUS1 \
28 (DP_INTR_AUX_I2C_DONE| \
29 DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
30 DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
31 DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
32 DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
33
34 #define DP_INTERRUPT_STATUS1_ACK \
35 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
36 #define DP_INTERRUPT_STATUS1_MASK \
37 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
38
39 #define DP_INTERRUPT_STATUS2 \
40 (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
41 DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
42
43 #define DP_INTERRUPT_STATUS2_ACK \
44 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
45 #define DP_INTERRUPT_STATUS2_MASK \
46 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
47
48 struct dp_catalog_private {
49 struct device *dev;
50 struct dp_io *io;
51 u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
52 struct dp_catalog dp_catalog;
53 u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
54 };
55
dp_catalog_snapshot(struct dp_catalog * dp_catalog,struct msm_disp_state * disp_state)56 void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
57 {
58 struct dp_catalog_private *catalog = container_of(dp_catalog,
59 struct dp_catalog_private, dp_catalog);
60 struct dss_io_data *dss = &catalog->io->dp_controller;
61
62 msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
63 msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
64 msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
65 msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
66 }
67
dp_read_aux(struct dp_catalog_private * catalog,u32 offset)68 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
69 {
70 return readl_relaxed(catalog->io->dp_controller.aux.base + offset);
71 }
72
dp_write_aux(struct dp_catalog_private * catalog,u32 offset,u32 data)73 static inline void dp_write_aux(struct dp_catalog_private *catalog,
74 u32 offset, u32 data)
75 {
76 /*
77 * To make sure aux reg writes happens before any other operation,
78 * this function uses writel() instread of writel_relaxed()
79 */
80 writel(data, catalog->io->dp_controller.aux.base + offset);
81 }
82
dp_read_ahb(struct dp_catalog_private * catalog,u32 offset)83 static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
84 {
85 return readl_relaxed(catalog->io->dp_controller.ahb.base + offset);
86 }
87
dp_write_ahb(struct dp_catalog_private * catalog,u32 offset,u32 data)88 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
89 u32 offset, u32 data)
90 {
91 /*
92 * To make sure phy reg writes happens before any other operation,
93 * this function uses writel() instread of writel_relaxed()
94 */
95 writel(data, catalog->io->dp_controller.ahb.base + offset);
96 }
97
dp_write_p0(struct dp_catalog_private * catalog,u32 offset,u32 data)98 static inline void dp_write_p0(struct dp_catalog_private *catalog,
99 u32 offset, u32 data)
100 {
101 /*
102 * To make sure interface reg writes happens before any other operation,
103 * this function uses writel() instread of writel_relaxed()
104 */
105 writel(data, catalog->io->dp_controller.p0.base + offset);
106 }
107
dp_read_p0(struct dp_catalog_private * catalog,u32 offset)108 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
109 u32 offset)
110 {
111 /*
112 * To make sure interface reg writes happens before any other operation,
113 * this function uses writel() instread of writel_relaxed()
114 */
115 return readl_relaxed(catalog->io->dp_controller.p0.base + offset);
116 }
117
dp_read_link(struct dp_catalog_private * catalog,u32 offset)118 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
119 {
120 return readl_relaxed(catalog->io->dp_controller.link.base + offset);
121 }
122
dp_write_link(struct dp_catalog_private * catalog,u32 offset,u32 data)123 static inline void dp_write_link(struct dp_catalog_private *catalog,
124 u32 offset, u32 data)
125 {
126 /*
127 * To make sure link reg writes happens before any other operation,
128 * this function uses writel() instread of writel_relaxed()
129 */
130 writel(data, catalog->io->dp_controller.link.base + offset);
131 }
132
133 /* aux related catalog functions */
dp_catalog_aux_read_data(struct dp_catalog * dp_catalog)134 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
135 {
136 struct dp_catalog_private *catalog = container_of(dp_catalog,
137 struct dp_catalog_private, dp_catalog);
138
139 return dp_read_aux(catalog, REG_DP_AUX_DATA);
140 }
141
dp_catalog_aux_write_data(struct dp_catalog * dp_catalog)142 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
143 {
144 struct dp_catalog_private *catalog = container_of(dp_catalog,
145 struct dp_catalog_private, dp_catalog);
146
147 dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
148 return 0;
149 }
150
dp_catalog_aux_write_trans(struct dp_catalog * dp_catalog)151 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
152 {
153 struct dp_catalog_private *catalog = container_of(dp_catalog,
154 struct dp_catalog_private, dp_catalog);
155
156 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
157 return 0;
158 }
159
dp_catalog_aux_clear_trans(struct dp_catalog * dp_catalog,bool read)160 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
161 {
162 u32 data;
163 struct dp_catalog_private *catalog = container_of(dp_catalog,
164 struct dp_catalog_private, dp_catalog);
165
166 if (read) {
167 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
168 data &= ~DP_AUX_TRANS_CTRL_GO;
169 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
170 } else {
171 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
172 }
173 return 0;
174 }
175
dp_catalog_aux_clear_hw_interrupts(struct dp_catalog * dp_catalog)176 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
177 {
178 struct dp_catalog_private *catalog = container_of(dp_catalog,
179 struct dp_catalog_private, dp_catalog);
180
181 dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
182 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
183 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
184 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
185 return 0;
186 }
187
188 /**
189 * dp_catalog_aux_reset() - reset AUX controller
190 *
191 * @dp_catalog: DP catalog structure
192 *
193 * return: void
194 *
195 * This function reset AUX controller
196 *
197 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
198 *
199 */
dp_catalog_aux_reset(struct dp_catalog * dp_catalog)200 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
201 {
202 u32 aux_ctrl;
203 struct dp_catalog_private *catalog = container_of(dp_catalog,
204 struct dp_catalog_private, dp_catalog);
205
206 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
207
208 aux_ctrl |= DP_AUX_CTRL_RESET;
209 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
210 usleep_range(1000, 1100); /* h/w recommended delay */
211
212 aux_ctrl &= ~DP_AUX_CTRL_RESET;
213 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
214 }
215
dp_catalog_aux_enable(struct dp_catalog * dp_catalog,bool enable)216 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
217 {
218 u32 aux_ctrl;
219 struct dp_catalog_private *catalog = container_of(dp_catalog,
220 struct dp_catalog_private, dp_catalog);
221
222 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
223
224 if (enable) {
225 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
226 dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
227 aux_ctrl |= DP_AUX_CTRL_ENABLE;
228 } else {
229 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
230 }
231
232 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
233 }
234
dp_catalog_aux_update_cfg(struct dp_catalog * dp_catalog)235 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
236 {
237 struct dp_catalog_private *catalog = container_of(dp_catalog,
238 struct dp_catalog_private, dp_catalog);
239 struct dp_io *dp_io = catalog->io;
240 struct phy *phy = dp_io->phy;
241
242 phy_calibrate(phy);
243 }
244
dump_regs(void __iomem * base,int len)245 static void dump_regs(void __iomem *base, int len)
246 {
247 int i;
248 u32 x0, x4, x8, xc;
249 u32 addr_off = 0;
250
251 len = DIV_ROUND_UP(len, 16);
252 for (i = 0; i < len; i++) {
253 x0 = readl_relaxed(base + addr_off);
254 x4 = readl_relaxed(base + addr_off + 0x04);
255 x8 = readl_relaxed(base + addr_off + 0x08);
256 xc = readl_relaxed(base + addr_off + 0x0c);
257
258 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
259 addr_off += 16;
260 }
261 }
262
dp_catalog_dump_regs(struct dp_catalog * dp_catalog)263 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
264 {
265 struct dp_catalog_private *catalog = container_of(dp_catalog,
266 struct dp_catalog_private, dp_catalog);
267 struct dss_io_data *io = &catalog->io->dp_controller;
268
269 pr_info("AHB regs\n");
270 dump_regs(io->ahb.base, io->ahb.len);
271
272 pr_info("AUXCLK regs\n");
273 dump_regs(io->aux.base, io->aux.len);
274
275 pr_info("LCLK regs\n");
276 dump_regs(io->link.base, io->link.len);
277
278 pr_info("P0CLK regs\n");
279 dump_regs(io->p0.base, io->p0.len);
280 }
281
dp_catalog_aux_get_irq(struct dp_catalog * dp_catalog)282 u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
283 {
284 struct dp_catalog_private *catalog = container_of(dp_catalog,
285 struct dp_catalog_private, dp_catalog);
286 u32 intr, intr_ack;
287
288 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
289 intr &= ~DP_INTERRUPT_STATUS1_MASK;
290 intr_ack = (intr & DP_INTERRUPT_STATUS1)
291 << DP_INTERRUPT_STATUS_ACK_SHIFT;
292 dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
293 DP_INTERRUPT_STATUS1_MASK);
294
295 return intr;
296
297 }
298
299 /* controller related catalog functions */
dp_catalog_ctrl_update_transfer_unit(struct dp_catalog * dp_catalog,u32 dp_tu,u32 valid_boundary,u32 valid_boundary2)300 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
301 u32 dp_tu, u32 valid_boundary,
302 u32 valid_boundary2)
303 {
304 struct dp_catalog_private *catalog = container_of(dp_catalog,
305 struct dp_catalog_private, dp_catalog);
306
307 dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
308 dp_write_link(catalog, REG_DP_TU, dp_tu);
309 dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
310 }
311
dp_catalog_ctrl_state_ctrl(struct dp_catalog * dp_catalog,u32 state)312 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
313 {
314 struct dp_catalog_private *catalog = container_of(dp_catalog,
315 struct dp_catalog_private, dp_catalog);
316
317 dp_write_link(catalog, REG_DP_STATE_CTRL, state);
318 }
319
dp_catalog_ctrl_config_ctrl(struct dp_catalog * dp_catalog,u32 cfg)320 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
321 {
322 struct dp_catalog_private *catalog = container_of(dp_catalog,
323 struct dp_catalog_private, dp_catalog);
324
325 DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
326
327 dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
328 }
329
dp_catalog_ctrl_lane_mapping(struct dp_catalog * dp_catalog)330 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
331 {
332 struct dp_catalog_private *catalog = container_of(dp_catalog,
333 struct dp_catalog_private, dp_catalog);
334 u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
335 u32 ln_mapping;
336
337 ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
338 ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
339 ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
340 ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
341
342 dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
343 ln_mapping);
344 }
345
dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog * dp_catalog,bool enable)346 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
347 bool enable)
348 {
349 u32 mainlink_ctrl;
350 struct dp_catalog_private *catalog = container_of(dp_catalog,
351 struct dp_catalog_private, dp_catalog);
352
353 DRM_DEBUG_DP("enable=%d\n", enable);
354 if (enable) {
355 /*
356 * To make sure link reg writes happens before other operation,
357 * dp_write_link() function uses writel()
358 */
359 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
360
361 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
362 DP_MAINLINK_CTRL_ENABLE);
363 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
364
365 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
366 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
367
368 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
369 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
370
371 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
372 DP_MAINLINK_FB_BOUNDARY_SEL);
373 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
374 } else {
375 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
376 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
377 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
378 }
379 }
380
dp_catalog_ctrl_config_misc(struct dp_catalog * dp_catalog,u32 colorimetry_cfg,u32 test_bits_depth)381 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
382 u32 colorimetry_cfg,
383 u32 test_bits_depth)
384 {
385 u32 misc_val;
386 struct dp_catalog_private *catalog = container_of(dp_catalog,
387 struct dp_catalog_private, dp_catalog);
388
389 misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
390
391 /* clear bpp bits */
392 misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
393 misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
394 misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
395 /* Configure clock to synchronous mode */
396 misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
397
398 DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
399 dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
400 }
401
dp_catalog_ctrl_config_msa(struct dp_catalog * dp_catalog,u32 rate,u32 stream_rate_khz,bool fixed_nvid)402 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
403 u32 rate, u32 stream_rate_khz,
404 bool fixed_nvid)
405 {
406 u32 pixel_m, pixel_n;
407 u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
408 u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
409 u32 const link_rate_hbr2 = 540000;
410 u32 const link_rate_hbr3 = 810000;
411 unsigned long den, num;
412
413 struct dp_catalog_private *catalog = container_of(dp_catalog,
414 struct dp_catalog_private, dp_catalog);
415
416 if (rate == link_rate_hbr3)
417 pixel_div = 6;
418 else if (rate == 1620000 || rate == 270000)
419 pixel_div = 2;
420 else if (rate == link_rate_hbr2)
421 pixel_div = 4;
422 else
423 DRM_ERROR("Invalid pixel mux divider\n");
424
425 dispcc_input_rate = (rate * 10) / pixel_div;
426
427 rational_best_approximation(dispcc_input_rate, stream_rate_khz,
428 (unsigned long)(1 << 16) - 1,
429 (unsigned long)(1 << 16) - 1, &den, &num);
430
431 den = ~(den - num);
432 den = den & 0xFFFF;
433 pixel_m = num;
434 pixel_n = den;
435
436 mvid = (pixel_m & 0xFFFF) * 5;
437 nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
438
439 if (nvid < nvid_fixed) {
440 u32 temp;
441
442 temp = (nvid_fixed / nvid) * nvid;
443 mvid = (nvid_fixed / nvid) * mvid;
444 nvid = temp;
445 }
446
447 if (link_rate_hbr2 == rate)
448 nvid *= 2;
449
450 if (link_rate_hbr3 == rate)
451 nvid *= 3;
452
453 DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
454 dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
455 dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
456 dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
457 }
458
dp_catalog_ctrl_set_pattern(struct dp_catalog * dp_catalog,u32 pattern)459 int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
460 u32 pattern)
461 {
462 int bit, ret;
463 u32 data;
464 struct dp_catalog_private *catalog = container_of(dp_catalog,
465 struct dp_catalog_private, dp_catalog);
466
467 bit = BIT(pattern - 1);
468 DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
469 dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
470
471 bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
472
473 /* Poll for mainlink ready status */
474 ret = readx_poll_timeout(readl, catalog->io->dp_controller.link.base +
475 REG_DP_MAINLINK_READY,
476 data, data & bit,
477 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
478 if (ret < 0) {
479 DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
480 return ret;
481 }
482 return 0;
483 }
484
485 /**
486 * dp_catalog_ctrl_reset() - reset DP controller
487 *
488 * @dp_catalog: DP catalog structure
489 *
490 * return: void
491 *
492 * This function reset the DP controller
493 *
494 * NOTE: reset DP controller will also clear any pending HPD related interrupts
495 *
496 */
dp_catalog_ctrl_reset(struct dp_catalog * dp_catalog)497 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
498 {
499 u32 sw_reset;
500 struct dp_catalog_private *catalog = container_of(dp_catalog,
501 struct dp_catalog_private, dp_catalog);
502
503 sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
504
505 sw_reset |= DP_SW_RESET;
506 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
507 usleep_range(1000, 1100); /* h/w recommended delay */
508
509 sw_reset &= ~DP_SW_RESET;
510 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
511 }
512
dp_catalog_ctrl_mainlink_ready(struct dp_catalog * dp_catalog)513 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
514 {
515 u32 data;
516 int ret;
517 struct dp_catalog_private *catalog = container_of(dp_catalog,
518 struct dp_catalog_private, dp_catalog);
519
520 /* Poll for mainlink ready status */
521 ret = readl_poll_timeout(catalog->io->dp_controller.link.base +
522 REG_DP_MAINLINK_READY,
523 data, data & DP_MAINLINK_READY_FOR_VIDEO,
524 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
525 if (ret < 0) {
526 DRM_ERROR("mainlink not ready\n");
527 return false;
528 }
529
530 return true;
531 }
532
dp_catalog_ctrl_enable_irq(struct dp_catalog * dp_catalog,bool enable)533 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
534 bool enable)
535 {
536 struct dp_catalog_private *catalog = container_of(dp_catalog,
537 struct dp_catalog_private, dp_catalog);
538
539 if (enable) {
540 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
541 DP_INTERRUPT_STATUS1_MASK);
542 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
543 DP_INTERRUPT_STATUS2_MASK);
544 } else {
545 dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
546 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
547 }
548 }
549
dp_catalog_hpd_config_intr(struct dp_catalog * dp_catalog,u32 intr_mask,bool en)550 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
551 u32 intr_mask, bool en)
552 {
553 struct dp_catalog_private *catalog = container_of(dp_catalog,
554 struct dp_catalog_private, dp_catalog);
555
556 u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
557
558 config = (en ? config | intr_mask : config & ~intr_mask);
559
560 DRM_DEBUG_DP("intr_mask=%#x config=%#x\n", intr_mask, config);
561 dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
562 config & DP_DP_HPD_INT_MASK);
563 }
564
dp_catalog_ctrl_hpd_config(struct dp_catalog * dp_catalog)565 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
566 {
567 struct dp_catalog_private *catalog = container_of(dp_catalog,
568 struct dp_catalog_private, dp_catalog);
569
570 u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
571
572 /* enable HPD plug and unplug interrupts */
573 dp_catalog_hpd_config_intr(dp_catalog,
574 DP_DP_HPD_PLUG_INT_MASK | DP_DP_HPD_UNPLUG_INT_MASK, true);
575
576 /* Configure REFTIMER and enable it */
577 reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
578 dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
579
580 /* Enable HPD */
581 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
582 }
583
dp_catalog_link_is_connected(struct dp_catalog * dp_catalog)584 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
585 {
586 struct dp_catalog_private *catalog = container_of(dp_catalog,
587 struct dp_catalog_private, dp_catalog);
588 u32 status;
589
590 status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
591 DRM_DEBUG_DP("aux status: %#x\n", status);
592 status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
593 status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
594
595 return status;
596 }
597
dp_catalog_hpd_get_intr_status(struct dp_catalog * dp_catalog)598 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
599 {
600 struct dp_catalog_private *catalog = container_of(dp_catalog,
601 struct dp_catalog_private, dp_catalog);
602 int isr = 0;
603
604 isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
605 dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
606 (isr & DP_DP_HPD_INT_MASK));
607
608 return isr;
609 }
610
dp_catalog_ctrl_get_interrupt(struct dp_catalog * dp_catalog)611 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
612 {
613 struct dp_catalog_private *catalog = container_of(dp_catalog,
614 struct dp_catalog_private, dp_catalog);
615 u32 intr, intr_ack;
616
617 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
618 intr &= ~DP_INTERRUPT_STATUS2_MASK;
619 intr_ack = (intr & DP_INTERRUPT_STATUS2)
620 << DP_INTERRUPT_STATUS_ACK_SHIFT;
621 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
622 intr_ack | DP_INTERRUPT_STATUS2_MASK);
623
624 return intr;
625 }
626
dp_catalog_ctrl_phy_reset(struct dp_catalog * dp_catalog)627 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
628 {
629 struct dp_catalog_private *catalog = container_of(dp_catalog,
630 struct dp_catalog_private, dp_catalog);
631
632 dp_write_ahb(catalog, REG_DP_PHY_CTRL,
633 DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
634 usleep_range(1000, 1100); /* h/w recommended delay */
635 dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
636 }
637
dp_catalog_ctrl_update_vx_px(struct dp_catalog * dp_catalog,u8 v_level,u8 p_level)638 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
639 u8 v_level, u8 p_level)
640 {
641 struct dp_catalog_private *catalog = container_of(dp_catalog,
642 struct dp_catalog_private, dp_catalog);
643 struct dp_io *dp_io = catalog->io;
644 struct phy *phy = dp_io->phy;
645 struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
646
647 /* TODO: Update for all lanes instead of just first one */
648 opts_dp->voltage[0] = v_level;
649 opts_dp->pre[0] = p_level;
650 opts_dp->set_voltages = 1;
651 phy_configure(phy, &dp_io->phy_opts);
652 opts_dp->set_voltages = 0;
653
654 return 0;
655 }
656
dp_catalog_ctrl_send_phy_pattern(struct dp_catalog * dp_catalog,u32 pattern)657 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
658 u32 pattern)
659 {
660 struct dp_catalog_private *catalog = container_of(dp_catalog,
661 struct dp_catalog_private, dp_catalog);
662 u32 value = 0x0;
663
664 /* Make sure to clear the current pattern before starting a new one */
665 dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
666
667 DRM_DEBUG_DP("pattern: %#x\n", pattern);
668 switch (pattern) {
669 case DP_PHY_TEST_PATTERN_D10_2:
670 dp_write_link(catalog, REG_DP_STATE_CTRL,
671 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
672 break;
673 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
674 value &= ~(1 << 16);
675 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
676 value);
677 value |= SCRAMBLER_RESET_COUNT_VALUE;
678 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
679 value);
680 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
681 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
682 dp_write_link(catalog, REG_DP_STATE_CTRL,
683 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
684 break;
685 case DP_PHY_TEST_PATTERN_PRBS7:
686 dp_write_link(catalog, REG_DP_STATE_CTRL,
687 DP_STATE_CTRL_LINK_PRBS7);
688 break;
689 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
690 dp_write_link(catalog, REG_DP_STATE_CTRL,
691 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
692 /* 00111110000011111000001111100000 */
693 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
694 0x3E0F83E0);
695 /* 00001111100000111110000011111000 */
696 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
697 0x0F83E0F8);
698 /* 1111100000111110 */
699 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
700 0x0000F83E);
701 break;
702 case DP_PHY_TEST_PATTERN_CP2520:
703 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
704 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
705 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
706
707 value = DP_HBR2_ERM_PATTERN;
708 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
709 value);
710 value |= SCRAMBLER_RESET_COUNT_VALUE;
711 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
712 value);
713 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
714 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
715 dp_write_link(catalog, REG_DP_STATE_CTRL,
716 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
717 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
718 value |= DP_MAINLINK_CTRL_ENABLE;
719 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
720 break;
721 case DP_PHY_TEST_PATTERN_SEL_MASK:
722 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
723 DP_MAINLINK_CTRL_ENABLE);
724 dp_write_link(catalog, REG_DP_STATE_CTRL,
725 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
726 break;
727 default:
728 DRM_DEBUG_DP("No valid test pattern requested: %#x\n", pattern);
729 break;
730 }
731 }
732
dp_catalog_ctrl_read_phy_pattern(struct dp_catalog * dp_catalog)733 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
734 {
735 struct dp_catalog_private *catalog = container_of(dp_catalog,
736 struct dp_catalog_private, dp_catalog);
737
738 return dp_read_link(catalog, REG_DP_MAINLINK_READY);
739 }
740
741 /* panel related catalog functions */
dp_catalog_panel_timing_cfg(struct dp_catalog * dp_catalog)742 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
743 {
744 struct dp_catalog_private *catalog = container_of(dp_catalog,
745 struct dp_catalog_private, dp_catalog);
746
747 dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
748 dp_catalog->total);
749 dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
750 dp_catalog->sync_start);
751 dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
752 dp_catalog->width_blanking);
753 dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
754 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0);
755 return 0;
756 }
757
dp_catalog_panel_tpg_enable(struct dp_catalog * dp_catalog,struct drm_display_mode * drm_mode)758 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
759 struct drm_display_mode *drm_mode)
760 {
761 struct dp_catalog_private *catalog = container_of(dp_catalog,
762 struct dp_catalog_private, dp_catalog);
763 u32 hsync_period, vsync_period;
764 u32 display_v_start, display_v_end;
765 u32 hsync_start_x, hsync_end_x;
766 u32 v_sync_width;
767 u32 hsync_ctl;
768 u32 display_hctl;
769
770 /* TPG config parameters*/
771 hsync_period = drm_mode->htotal;
772 vsync_period = drm_mode->vtotal;
773
774 display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
775 hsync_period);
776 display_v_end = ((vsync_period - (drm_mode->vsync_start -
777 drm_mode->vdisplay))
778 * hsync_period) - 1;
779
780 display_v_start += drm_mode->htotal - drm_mode->hsync_start;
781 display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
782
783 hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
784 hsync_end_x = hsync_period - (drm_mode->hsync_start -
785 drm_mode->hdisplay) - 1;
786
787 v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
788
789 hsync_ctl = (hsync_period << 16) |
790 (drm_mode->hsync_end - drm_mode->hsync_start);
791 display_hctl = (hsync_end_x << 16) | hsync_start_x;
792
793
794 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
795 dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
796 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
797 hsync_period);
798 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
799 hsync_period);
800 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
801 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
802 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
803 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
804 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
805 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
806 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
807 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
808 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
809 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
810 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
811 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
812 dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
813
814 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
815 DP_TPG_CHECKERED_RECT_PATTERN);
816 dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
817 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
818 DP_TPG_VIDEO_CONFIG_RGB);
819 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
820 DP_BIST_ENABLE_DPBIST_EN);
821 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
822 DP_TIMING_ENGINE_EN_EN);
823 DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
824 }
825
dp_catalog_panel_tpg_disable(struct dp_catalog * dp_catalog)826 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
827 {
828 struct dp_catalog_private *catalog = container_of(dp_catalog,
829 struct dp_catalog_private, dp_catalog);
830
831 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
832 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
833 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
834 }
835
dp_catalog_get(struct device * dev,struct dp_io * io)836 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
837 {
838 struct dp_catalog_private *catalog;
839
840 if (!io) {
841 DRM_ERROR("invalid input\n");
842 return ERR_PTR(-EINVAL);
843 }
844
845 catalog = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
846 if (!catalog)
847 return ERR_PTR(-ENOMEM);
848
849 catalog->dev = dev;
850 catalog->io = io;
851
852 return &catalog->dp_catalog;
853 }
854
dp_catalog_audio_get_header(struct dp_catalog * dp_catalog)855 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
856 {
857 struct dp_catalog_private *catalog;
858 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
859 enum dp_catalog_audio_sdp_type sdp;
860 enum dp_catalog_audio_header_type header;
861
862 if (!dp_catalog)
863 return;
864
865 catalog = container_of(dp_catalog,
866 struct dp_catalog_private, dp_catalog);
867
868 sdp_map = catalog->audio_map;
869 sdp = dp_catalog->sdp_type;
870 header = dp_catalog->sdp_header;
871
872 dp_catalog->audio_data = dp_read_link(catalog,
873 sdp_map[sdp][header]);
874 }
875
dp_catalog_audio_set_header(struct dp_catalog * dp_catalog)876 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
877 {
878 struct dp_catalog_private *catalog;
879 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
880 enum dp_catalog_audio_sdp_type sdp;
881 enum dp_catalog_audio_header_type header;
882 u32 data;
883
884 if (!dp_catalog)
885 return;
886
887 catalog = container_of(dp_catalog,
888 struct dp_catalog_private, dp_catalog);
889
890 sdp_map = catalog->audio_map;
891 sdp = dp_catalog->sdp_type;
892 header = dp_catalog->sdp_header;
893 data = dp_catalog->audio_data;
894
895 dp_write_link(catalog, sdp_map[sdp][header], data);
896 }
897
dp_catalog_audio_config_acr(struct dp_catalog * dp_catalog)898 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
899 {
900 struct dp_catalog_private *catalog;
901 u32 acr_ctrl, select;
902
903 if (!dp_catalog)
904 return;
905
906 catalog = container_of(dp_catalog,
907 struct dp_catalog_private, dp_catalog);
908
909 select = dp_catalog->audio_data;
910 acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
911
912 DRM_DEBUG_DP("select: %#x, acr_ctrl: %#x\n", select, acr_ctrl);
913
914 dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
915 }
916
dp_catalog_audio_enable(struct dp_catalog * dp_catalog)917 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
918 {
919 struct dp_catalog_private *catalog;
920 bool enable;
921 u32 audio_ctrl;
922
923 if (!dp_catalog)
924 return;
925
926 catalog = container_of(dp_catalog,
927 struct dp_catalog_private, dp_catalog);
928
929 enable = !!dp_catalog->audio_data;
930 audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
931
932 if (enable)
933 audio_ctrl |= BIT(0);
934 else
935 audio_ctrl &= ~BIT(0);
936
937 DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
938
939 dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
940 /* make sure audio engine is disabled */
941 wmb();
942 }
943
dp_catalog_audio_config_sdp(struct dp_catalog * dp_catalog)944 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
945 {
946 struct dp_catalog_private *catalog;
947 u32 sdp_cfg = 0;
948 u32 sdp_cfg2 = 0;
949
950 if (!dp_catalog)
951 return;
952
953 catalog = container_of(dp_catalog,
954 struct dp_catalog_private, dp_catalog);
955
956 sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
957 /* AUDIO_TIMESTAMP_SDP_EN */
958 sdp_cfg |= BIT(1);
959 /* AUDIO_STREAM_SDP_EN */
960 sdp_cfg |= BIT(2);
961 /* AUDIO_COPY_MANAGEMENT_SDP_EN */
962 sdp_cfg |= BIT(5);
963 /* AUDIO_ISRC_SDP_EN */
964 sdp_cfg |= BIT(6);
965 /* AUDIO_INFOFRAME_SDP_EN */
966 sdp_cfg |= BIT(20);
967
968 DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
969
970 dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
971
972 sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
973 /* IFRM_REGSRC -> Do not use reg values */
974 sdp_cfg2 &= ~BIT(0);
975 /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
976 sdp_cfg2 &= ~BIT(1);
977
978 DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
979
980 dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
981 }
982
dp_catalog_audio_init(struct dp_catalog * dp_catalog)983 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
984 {
985 struct dp_catalog_private *catalog;
986
987 static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
988 {
989 MMSS_DP_AUDIO_STREAM_0,
990 MMSS_DP_AUDIO_STREAM_1,
991 MMSS_DP_AUDIO_STREAM_1,
992 },
993 {
994 MMSS_DP_AUDIO_TIMESTAMP_0,
995 MMSS_DP_AUDIO_TIMESTAMP_1,
996 MMSS_DP_AUDIO_TIMESTAMP_1,
997 },
998 {
999 MMSS_DP_AUDIO_INFOFRAME_0,
1000 MMSS_DP_AUDIO_INFOFRAME_1,
1001 MMSS_DP_AUDIO_INFOFRAME_1,
1002 },
1003 {
1004 MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1005 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1006 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1007 },
1008 {
1009 MMSS_DP_AUDIO_ISRC_0,
1010 MMSS_DP_AUDIO_ISRC_1,
1011 MMSS_DP_AUDIO_ISRC_1,
1012 },
1013 };
1014
1015 if (!dp_catalog)
1016 return;
1017
1018 catalog = container_of(dp_catalog,
1019 struct dp_catalog_private, dp_catalog);
1020
1021 catalog->audio_map = sdp_map;
1022 }
1023
dp_catalog_audio_sfe_level(struct dp_catalog * dp_catalog)1024 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1025 {
1026 struct dp_catalog_private *catalog;
1027 u32 mainlink_levels, safe_to_exit_level;
1028
1029 if (!dp_catalog)
1030 return;
1031
1032 catalog = container_of(dp_catalog,
1033 struct dp_catalog_private, dp_catalog);
1034
1035 safe_to_exit_level = dp_catalog->audio_data;
1036 mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1037 mainlink_levels &= 0xFE0;
1038 mainlink_levels |= safe_to_exit_level;
1039
1040 DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1041 mainlink_levels, safe_to_exit_level);
1042
1043 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1044 }
1045