Loading...
Note: File does not exist in v3.1.
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4 */
5
6#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__
7
8#include <linux/delay.h>
9#include <linux/iopoll.h>
10#include <linux/phy/phy.h>
11#include <linux/phy/phy-dp.h>
12#include <linux/rational.h>
13#include <drm/display/drm_dp_helper.h>
14#include <drm/drm_print.h>
15
16#include "dp_catalog.h"
17#include "dp_reg.h"
18
19#define POLLING_SLEEP_US 1000
20#define POLLING_TIMEOUT_US 10000
21
22#define SCRAMBLER_RESET_COUNT_VALUE 0xFC
23
24#define DP_INTERRUPT_STATUS_ACK_SHIFT 1
25#define DP_INTERRUPT_STATUS_MASK_SHIFT 2
26
27#define DP_INTF_CONFIG_DATABUS_WIDEN BIT(4)
28
29#define DP_INTERRUPT_STATUS1 \
30 (DP_INTR_AUX_I2C_DONE| \
31 DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
32 DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
33 DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
34 DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
35
36#define DP_INTERRUPT_STATUS1_ACK \
37 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
38#define DP_INTERRUPT_STATUS1_MASK \
39 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
40
41#define DP_INTERRUPT_STATUS2 \
42 (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
43 DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
44
45#define DP_INTERRUPT_STATUS2_ACK \
46 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
47#define DP_INTERRUPT_STATUS2_MASK \
48 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
49
50struct dp_catalog_private {
51 struct device *dev;
52 struct drm_device *drm_dev;
53 struct dp_io *io;
54 u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
55 struct dp_catalog dp_catalog;
56 u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
57};
58
59void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
60{
61 struct dp_catalog_private *catalog = container_of(dp_catalog,
62 struct dp_catalog_private, dp_catalog);
63 struct dss_io_data *dss = &catalog->io->dp_controller;
64
65 msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
66 msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
67 msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
68 msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
69}
70
71static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
72{
73 return readl_relaxed(catalog->io->dp_controller.aux.base + offset);
74}
75
76static inline void dp_write_aux(struct dp_catalog_private *catalog,
77 u32 offset, u32 data)
78{
79 /*
80 * To make sure aux reg writes happens before any other operation,
81 * this function uses writel() instread of writel_relaxed()
82 */
83 writel(data, catalog->io->dp_controller.aux.base + offset);
84}
85
86static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset)
87{
88 return readl_relaxed(catalog->io->dp_controller.ahb.base + offset);
89}
90
91static inline void dp_write_ahb(struct dp_catalog_private *catalog,
92 u32 offset, u32 data)
93{
94 /*
95 * To make sure phy reg writes happens before any other operation,
96 * this function uses writel() instread of writel_relaxed()
97 */
98 writel(data, catalog->io->dp_controller.ahb.base + offset);
99}
100
101static inline void dp_write_p0(struct dp_catalog_private *catalog,
102 u32 offset, u32 data)
103{
104 /*
105 * To make sure interface reg writes happens before any other operation,
106 * this function uses writel() instread of writel_relaxed()
107 */
108 writel(data, catalog->io->dp_controller.p0.base + offset);
109}
110
111static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
112 u32 offset)
113{
114 /*
115 * To make sure interface reg writes happens before any other operation,
116 * this function uses writel() instread of writel_relaxed()
117 */
118 return readl_relaxed(catalog->io->dp_controller.p0.base + offset);
119}
120
121static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
122{
123 return readl_relaxed(catalog->io->dp_controller.link.base + offset);
124}
125
126static inline void dp_write_link(struct dp_catalog_private *catalog,
127 u32 offset, u32 data)
128{
129 /*
130 * To make sure link reg writes happens before any other operation,
131 * this function uses writel() instread of writel_relaxed()
132 */
133 writel(data, catalog->io->dp_controller.link.base + offset);
134}
135
136/* aux related catalog functions */
137u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
138{
139 struct dp_catalog_private *catalog = container_of(dp_catalog,
140 struct dp_catalog_private, dp_catalog);
141
142 return dp_read_aux(catalog, REG_DP_AUX_DATA);
143}
144
145int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
146{
147 struct dp_catalog_private *catalog = container_of(dp_catalog,
148 struct dp_catalog_private, dp_catalog);
149
150 dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
151 return 0;
152}
153
154int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
155{
156 struct dp_catalog_private *catalog = container_of(dp_catalog,
157 struct dp_catalog_private, dp_catalog);
158
159 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
160 return 0;
161}
162
163int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
164{
165 u32 data;
166 struct dp_catalog_private *catalog = container_of(dp_catalog,
167 struct dp_catalog_private, dp_catalog);
168
169 if (read) {
170 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
171 data &= ~DP_AUX_TRANS_CTRL_GO;
172 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
173 } else {
174 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
175 }
176 return 0;
177}
178
179int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
180{
181 struct dp_catalog_private *catalog = container_of(dp_catalog,
182 struct dp_catalog_private, dp_catalog);
183
184 dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
185 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
186 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
187 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
188 return 0;
189}
190
191/**
192 * dp_catalog_aux_reset() - reset AUX controller
193 *
194 * @dp_catalog: DP catalog structure
195 *
196 * return: void
197 *
198 * This function reset AUX controller
199 *
200 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
201 *
202 */
203void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
204{
205 u32 aux_ctrl;
206 struct dp_catalog_private *catalog = container_of(dp_catalog,
207 struct dp_catalog_private, dp_catalog);
208
209 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
210
211 aux_ctrl |= DP_AUX_CTRL_RESET;
212 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
213 usleep_range(1000, 1100); /* h/w recommended delay */
214
215 aux_ctrl &= ~DP_AUX_CTRL_RESET;
216 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
217}
218
219void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
220{
221 u32 aux_ctrl;
222 struct dp_catalog_private *catalog = container_of(dp_catalog,
223 struct dp_catalog_private, dp_catalog);
224
225 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
226
227 if (enable) {
228 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
229 dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
230 aux_ctrl |= DP_AUX_CTRL_ENABLE;
231 } else {
232 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
233 }
234
235 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
236}
237
238void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
239{
240 struct dp_catalog_private *catalog = container_of(dp_catalog,
241 struct dp_catalog_private, dp_catalog);
242 struct dp_io *dp_io = catalog->io;
243 struct phy *phy = dp_io->phy;
244
245 phy_calibrate(phy);
246}
247
248int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog)
249{
250 u32 state;
251 struct dp_catalog_private *catalog = container_of(dp_catalog,
252 struct dp_catalog_private, dp_catalog);
253
254 /* poll for hpd connected status every 2ms and timeout after 500ms */
255 return readl_poll_timeout(catalog->io->dp_controller.aux.base +
256 REG_DP_DP_HPD_INT_STATUS,
257 state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
258 2000, 500000);
259}
260
261static void dump_regs(void __iomem *base, int len)
262{
263 int i;
264 u32 x0, x4, x8, xc;
265 u32 addr_off = 0;
266
267 len = DIV_ROUND_UP(len, 16);
268 for (i = 0; i < len; i++) {
269 x0 = readl_relaxed(base + addr_off);
270 x4 = readl_relaxed(base + addr_off + 0x04);
271 x8 = readl_relaxed(base + addr_off + 0x08);
272 xc = readl_relaxed(base + addr_off + 0x0c);
273
274 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
275 addr_off += 16;
276 }
277}
278
279void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
280{
281 struct dp_catalog_private *catalog = container_of(dp_catalog,
282 struct dp_catalog_private, dp_catalog);
283 struct dss_io_data *io = &catalog->io->dp_controller;
284
285 pr_info("AHB regs\n");
286 dump_regs(io->ahb.base, io->ahb.len);
287
288 pr_info("AUXCLK regs\n");
289 dump_regs(io->aux.base, io->aux.len);
290
291 pr_info("LCLK regs\n");
292 dump_regs(io->link.base, io->link.len);
293
294 pr_info("P0CLK regs\n");
295 dump_regs(io->p0.base, io->p0.len);
296}
297
298u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
299{
300 struct dp_catalog_private *catalog = container_of(dp_catalog,
301 struct dp_catalog_private, dp_catalog);
302 u32 intr, intr_ack;
303
304 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
305 intr &= ~DP_INTERRUPT_STATUS1_MASK;
306 intr_ack = (intr & DP_INTERRUPT_STATUS1)
307 << DP_INTERRUPT_STATUS_ACK_SHIFT;
308 dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
309 DP_INTERRUPT_STATUS1_MASK);
310
311 return intr;
312
313}
314
315/* controller related catalog functions */
316void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
317 u32 dp_tu, u32 valid_boundary,
318 u32 valid_boundary2)
319{
320 struct dp_catalog_private *catalog = container_of(dp_catalog,
321 struct dp_catalog_private, dp_catalog);
322
323 dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
324 dp_write_link(catalog, REG_DP_TU, dp_tu);
325 dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
326}
327
328void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
329{
330 struct dp_catalog_private *catalog = container_of(dp_catalog,
331 struct dp_catalog_private, dp_catalog);
332
333 dp_write_link(catalog, REG_DP_STATE_CTRL, state);
334}
335
336void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
337{
338 struct dp_catalog_private *catalog = container_of(dp_catalog,
339 struct dp_catalog_private, dp_catalog);
340
341 drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
342
343 dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
344}
345
346void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
347{
348 struct dp_catalog_private *catalog = container_of(dp_catalog,
349 struct dp_catalog_private, dp_catalog);
350 u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
351 u32 ln_mapping;
352
353 ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
354 ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
355 ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
356 ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
357
358 dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
359 ln_mapping);
360}
361
362void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
363 bool enable)
364{
365 u32 mainlink_ctrl;
366 struct dp_catalog_private *catalog = container_of(dp_catalog,
367 struct dp_catalog_private, dp_catalog);
368
369 drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
370 if (enable) {
371 /*
372 * To make sure link reg writes happens before other operation,
373 * dp_write_link() function uses writel()
374 */
375 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
376
377 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
378 DP_MAINLINK_CTRL_ENABLE);
379 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
380
381 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
382 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
383
384 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
385 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
386
387 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
388 DP_MAINLINK_FB_BOUNDARY_SEL);
389 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
390 } else {
391 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
392 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
393 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
394 }
395}
396
397void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
398 u32 colorimetry_cfg,
399 u32 test_bits_depth)
400{
401 u32 misc_val;
402 struct dp_catalog_private *catalog = container_of(dp_catalog,
403 struct dp_catalog_private, dp_catalog);
404
405 misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
406
407 /* clear bpp bits */
408 misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
409 misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
410 misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
411 /* Configure clock to synchronous mode */
412 misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
413
414 drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
415 dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
416}
417
418void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
419 u32 rate, u32 stream_rate_khz,
420 bool fixed_nvid)
421{
422 u32 pixel_m, pixel_n;
423 u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
424 u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
425 u32 const link_rate_hbr2 = 540000;
426 u32 const link_rate_hbr3 = 810000;
427 unsigned long den, num;
428
429 struct dp_catalog_private *catalog = container_of(dp_catalog,
430 struct dp_catalog_private, dp_catalog);
431
432 if (rate == link_rate_hbr3)
433 pixel_div = 6;
434 else if (rate == 162000 || rate == 270000)
435 pixel_div = 2;
436 else if (rate == link_rate_hbr2)
437 pixel_div = 4;
438 else
439 DRM_ERROR("Invalid pixel mux divider\n");
440
441 dispcc_input_rate = (rate * 10) / pixel_div;
442
443 rational_best_approximation(dispcc_input_rate, stream_rate_khz,
444 (unsigned long)(1 << 16) - 1,
445 (unsigned long)(1 << 16) - 1, &den, &num);
446
447 den = ~(den - num);
448 den = den & 0xFFFF;
449 pixel_m = num;
450 pixel_n = den;
451
452 mvid = (pixel_m & 0xFFFF) * 5;
453 nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
454
455 if (nvid < nvid_fixed) {
456 u32 temp;
457
458 temp = (nvid_fixed / nvid) * nvid;
459 mvid = (nvid_fixed / nvid) * mvid;
460 nvid = temp;
461 }
462
463 if (link_rate_hbr2 == rate)
464 nvid *= 2;
465
466 if (link_rate_hbr3 == rate)
467 nvid *= 3;
468
469 drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
470 dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
471 dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
472 dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
473}
474
475int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog,
476 u32 state_bit)
477{
478 int bit, ret;
479 u32 data;
480 struct dp_catalog_private *catalog = container_of(dp_catalog,
481 struct dp_catalog_private, dp_catalog);
482
483 bit = BIT(state_bit - 1);
484 drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
485 dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
486
487 bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
488
489 /* Poll for mainlink ready status */
490 ret = readx_poll_timeout(readl, catalog->io->dp_controller.link.base +
491 REG_DP_MAINLINK_READY,
492 data, data & bit,
493 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
494 if (ret < 0) {
495 DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
496 return ret;
497 }
498 return 0;
499}
500
501/**
502 * dp_catalog_hw_revision() - retrieve DP hw revision
503 *
504 * @dp_catalog: DP catalog structure
505 *
506 * Return: DP controller hw revision
507 *
508 */
509u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog)
510{
511 const struct dp_catalog_private *catalog = container_of(dp_catalog,
512 struct dp_catalog_private, dp_catalog);
513
514 return dp_read_ahb(catalog, REG_DP_HW_VERSION);
515}
516
517/**
518 * dp_catalog_ctrl_reset() - reset DP controller
519 *
520 * @dp_catalog: DP catalog structure
521 *
522 * return: void
523 *
524 * This function reset the DP controller
525 *
526 * NOTE: reset DP controller will also clear any pending HPD related interrupts
527 *
528 */
529void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
530{
531 u32 sw_reset;
532 struct dp_catalog_private *catalog = container_of(dp_catalog,
533 struct dp_catalog_private, dp_catalog);
534
535 sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
536
537 sw_reset |= DP_SW_RESET;
538 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
539 usleep_range(1000, 1100); /* h/w recommended delay */
540
541 sw_reset &= ~DP_SW_RESET;
542 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
543}
544
545bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
546{
547 u32 data;
548 int ret;
549 struct dp_catalog_private *catalog = container_of(dp_catalog,
550 struct dp_catalog_private, dp_catalog);
551
552 /* Poll for mainlink ready status */
553 ret = readl_poll_timeout(catalog->io->dp_controller.link.base +
554 REG_DP_MAINLINK_READY,
555 data, data & DP_MAINLINK_READY_FOR_VIDEO,
556 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
557 if (ret < 0) {
558 DRM_ERROR("mainlink not ready\n");
559 return false;
560 }
561
562 return true;
563}
564
565void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
566 bool enable)
567{
568 struct dp_catalog_private *catalog = container_of(dp_catalog,
569 struct dp_catalog_private, dp_catalog);
570
571 if (enable) {
572 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
573 DP_INTERRUPT_STATUS1_MASK);
574 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
575 DP_INTERRUPT_STATUS2_MASK);
576 } else {
577 dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
578 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
579 }
580}
581
582void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
583 u32 intr_mask, bool en)
584{
585 struct dp_catalog_private *catalog = container_of(dp_catalog,
586 struct dp_catalog_private, dp_catalog);
587
588 u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
589
590 config = (en ? config | intr_mask : config & ~intr_mask);
591
592 drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
593 intr_mask, config);
594 dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
595 config & DP_DP_HPD_INT_MASK);
596}
597
598void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
599{
600 struct dp_catalog_private *catalog = container_of(dp_catalog,
601 struct dp_catalog_private, dp_catalog);
602
603 u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
604
605 /* Configure REFTIMER and enable it */
606 reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
607 dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
608
609 /* Enable HPD */
610 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
611}
612
613u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
614{
615 struct dp_catalog_private *catalog = container_of(dp_catalog,
616 struct dp_catalog_private, dp_catalog);
617 u32 status;
618
619 status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
620 drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
621 status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
622 status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
623
624 return status;
625}
626
627u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
628{
629 struct dp_catalog_private *catalog = container_of(dp_catalog,
630 struct dp_catalog_private, dp_catalog);
631 int isr, mask;
632
633 isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
634 dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
635 (isr & DP_DP_HPD_INT_MASK));
636 mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
637
638 /*
639 * We only want to return interrupts that are unmasked to the caller.
640 * However, the interrupt status field also contains other
641 * informational bits about the HPD state status, so we only mask
642 * out the part of the register that tells us about which interrupts
643 * are pending.
644 */
645 return isr & (mask | ~DP_DP_HPD_INT_MASK);
646}
647
648int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
649{
650 struct dp_catalog_private *catalog = container_of(dp_catalog,
651 struct dp_catalog_private, dp_catalog);
652 u32 intr, intr_ack;
653
654 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
655 intr &= ~DP_INTERRUPT_STATUS2_MASK;
656 intr_ack = (intr & DP_INTERRUPT_STATUS2)
657 << DP_INTERRUPT_STATUS_ACK_SHIFT;
658 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
659 intr_ack | DP_INTERRUPT_STATUS2_MASK);
660
661 return intr;
662}
663
664void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
665{
666 struct dp_catalog_private *catalog = container_of(dp_catalog,
667 struct dp_catalog_private, dp_catalog);
668
669 dp_write_ahb(catalog, REG_DP_PHY_CTRL,
670 DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
671 usleep_range(1000, 1100); /* h/w recommended delay */
672 dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
673}
674
675int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
676 u8 v_level, u8 p_level)
677{
678 struct dp_catalog_private *catalog = container_of(dp_catalog,
679 struct dp_catalog_private, dp_catalog);
680 struct dp_io *dp_io = catalog->io;
681 struct phy *phy = dp_io->phy;
682 struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
683
684 /* TODO: Update for all lanes instead of just first one */
685 opts_dp->voltage[0] = v_level;
686 opts_dp->pre[0] = p_level;
687 opts_dp->set_voltages = 1;
688 phy_configure(phy, &dp_io->phy_opts);
689 opts_dp->set_voltages = 0;
690
691 return 0;
692}
693
694void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
695 u32 pattern)
696{
697 struct dp_catalog_private *catalog = container_of(dp_catalog,
698 struct dp_catalog_private, dp_catalog);
699 u32 value = 0x0;
700
701 /* Make sure to clear the current pattern before starting a new one */
702 dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
703
704 drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
705 switch (pattern) {
706 case DP_PHY_TEST_PATTERN_D10_2:
707 dp_write_link(catalog, REG_DP_STATE_CTRL,
708 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
709 break;
710 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
711 value &= ~(1 << 16);
712 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
713 value);
714 value |= SCRAMBLER_RESET_COUNT_VALUE;
715 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
716 value);
717 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
718 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
719 dp_write_link(catalog, REG_DP_STATE_CTRL,
720 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
721 break;
722 case DP_PHY_TEST_PATTERN_PRBS7:
723 dp_write_link(catalog, REG_DP_STATE_CTRL,
724 DP_STATE_CTRL_LINK_PRBS7);
725 break;
726 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
727 dp_write_link(catalog, REG_DP_STATE_CTRL,
728 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
729 /* 00111110000011111000001111100000 */
730 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
731 0x3E0F83E0);
732 /* 00001111100000111110000011111000 */
733 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
734 0x0F83E0F8);
735 /* 1111100000111110 */
736 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
737 0x0000F83E);
738 break;
739 case DP_PHY_TEST_PATTERN_CP2520:
740 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
741 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
742 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
743
744 value = DP_HBR2_ERM_PATTERN;
745 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
746 value);
747 value |= SCRAMBLER_RESET_COUNT_VALUE;
748 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
749 value);
750 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
751 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
752 dp_write_link(catalog, REG_DP_STATE_CTRL,
753 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
754 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
755 value |= DP_MAINLINK_CTRL_ENABLE;
756 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
757 break;
758 case DP_PHY_TEST_PATTERN_SEL_MASK:
759 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
760 DP_MAINLINK_CTRL_ENABLE);
761 dp_write_link(catalog, REG_DP_STATE_CTRL,
762 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
763 break;
764 default:
765 drm_dbg_dp(catalog->drm_dev,
766 "No valid test pattern requested: %#x\n", pattern);
767 break;
768 }
769}
770
771u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
772{
773 struct dp_catalog_private *catalog = container_of(dp_catalog,
774 struct dp_catalog_private, dp_catalog);
775
776 return dp_read_link(catalog, REG_DP_MAINLINK_READY);
777}
778
779/* panel related catalog functions */
780int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
781{
782 struct dp_catalog_private *catalog = container_of(dp_catalog,
783 struct dp_catalog_private, dp_catalog);
784 u32 reg;
785
786 dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
787 dp_catalog->total);
788 dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
789 dp_catalog->sync_start);
790 dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
791 dp_catalog->width_blanking);
792 dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
793
794 reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
795
796 if (dp_catalog->wide_bus_en)
797 reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
798 else
799 reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
800
801
802 DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg);
803
804 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
805 return 0;
806}
807
808void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
809 struct drm_display_mode *drm_mode)
810{
811 struct dp_catalog_private *catalog = container_of(dp_catalog,
812 struct dp_catalog_private, dp_catalog);
813 u32 hsync_period, vsync_period;
814 u32 display_v_start, display_v_end;
815 u32 hsync_start_x, hsync_end_x;
816 u32 v_sync_width;
817 u32 hsync_ctl;
818 u32 display_hctl;
819
820 /* TPG config parameters*/
821 hsync_period = drm_mode->htotal;
822 vsync_period = drm_mode->vtotal;
823
824 display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
825 hsync_period);
826 display_v_end = ((vsync_period - (drm_mode->vsync_start -
827 drm_mode->vdisplay))
828 * hsync_period) - 1;
829
830 display_v_start += drm_mode->htotal - drm_mode->hsync_start;
831 display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
832
833 hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
834 hsync_end_x = hsync_period - (drm_mode->hsync_start -
835 drm_mode->hdisplay) - 1;
836
837 v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
838
839 hsync_ctl = (hsync_period << 16) |
840 (drm_mode->hsync_end - drm_mode->hsync_start);
841 display_hctl = (hsync_end_x << 16) | hsync_start_x;
842
843
844 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
845 dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
846 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
847 hsync_period);
848 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
849 hsync_period);
850 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
851 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
852 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
853 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
854 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
855 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
856 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
857 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
858 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
859 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
860 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
861 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
862 dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
863
864 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
865 DP_TPG_CHECKERED_RECT_PATTERN);
866 dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
867 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
868 DP_TPG_VIDEO_CONFIG_RGB);
869 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
870 DP_BIST_ENABLE_DPBIST_EN);
871 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
872 DP_TIMING_ENGINE_EN_EN);
873 drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
874}
875
876void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
877{
878 struct dp_catalog_private *catalog = container_of(dp_catalog,
879 struct dp_catalog_private, dp_catalog);
880
881 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
882 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
883 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
884}
885
886struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
887{
888 struct dp_catalog_private *catalog;
889
890 if (!io) {
891 DRM_ERROR("invalid input\n");
892 return ERR_PTR(-EINVAL);
893 }
894
895 catalog = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
896 if (!catalog)
897 return ERR_PTR(-ENOMEM);
898
899 catalog->dev = dev;
900 catalog->io = io;
901
902 return &catalog->dp_catalog;
903}
904
905void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
906{
907 struct dp_catalog_private *catalog;
908 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
909 enum dp_catalog_audio_sdp_type sdp;
910 enum dp_catalog_audio_header_type header;
911
912 if (!dp_catalog)
913 return;
914
915 catalog = container_of(dp_catalog,
916 struct dp_catalog_private, dp_catalog);
917
918 sdp_map = catalog->audio_map;
919 sdp = dp_catalog->sdp_type;
920 header = dp_catalog->sdp_header;
921
922 dp_catalog->audio_data = dp_read_link(catalog,
923 sdp_map[sdp][header]);
924}
925
926void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
927{
928 struct dp_catalog_private *catalog;
929 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
930 enum dp_catalog_audio_sdp_type sdp;
931 enum dp_catalog_audio_header_type header;
932 u32 data;
933
934 if (!dp_catalog)
935 return;
936
937 catalog = container_of(dp_catalog,
938 struct dp_catalog_private, dp_catalog);
939
940 sdp_map = catalog->audio_map;
941 sdp = dp_catalog->sdp_type;
942 header = dp_catalog->sdp_header;
943 data = dp_catalog->audio_data;
944
945 dp_write_link(catalog, sdp_map[sdp][header], data);
946}
947
948void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
949{
950 struct dp_catalog_private *catalog;
951 u32 acr_ctrl, select;
952
953 if (!dp_catalog)
954 return;
955
956 catalog = container_of(dp_catalog,
957 struct dp_catalog_private, dp_catalog);
958
959 select = dp_catalog->audio_data;
960 acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
961
962 drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
963 select, acr_ctrl);
964
965 dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
966}
967
968void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
969{
970 struct dp_catalog_private *catalog;
971 bool enable;
972 u32 audio_ctrl;
973
974 if (!dp_catalog)
975 return;
976
977 catalog = container_of(dp_catalog,
978 struct dp_catalog_private, dp_catalog);
979
980 enable = !!dp_catalog->audio_data;
981 audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
982
983 if (enable)
984 audio_ctrl |= BIT(0);
985 else
986 audio_ctrl &= ~BIT(0);
987
988 drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
989
990 dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
991 /* make sure audio engine is disabled */
992 wmb();
993}
994
995void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
996{
997 struct dp_catalog_private *catalog;
998 u32 sdp_cfg = 0;
999 u32 sdp_cfg2 = 0;
1000
1001 if (!dp_catalog)
1002 return;
1003
1004 catalog = container_of(dp_catalog,
1005 struct dp_catalog_private, dp_catalog);
1006
1007 sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
1008 /* AUDIO_TIMESTAMP_SDP_EN */
1009 sdp_cfg |= BIT(1);
1010 /* AUDIO_STREAM_SDP_EN */
1011 sdp_cfg |= BIT(2);
1012 /* AUDIO_COPY_MANAGEMENT_SDP_EN */
1013 sdp_cfg |= BIT(5);
1014 /* AUDIO_ISRC_SDP_EN */
1015 sdp_cfg |= BIT(6);
1016 /* AUDIO_INFOFRAME_SDP_EN */
1017 sdp_cfg |= BIT(20);
1018
1019 drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1020
1021 dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1022
1023 sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1024 /* IFRM_REGSRC -> Do not use reg values */
1025 sdp_cfg2 &= ~BIT(0);
1026 /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1027 sdp_cfg2 &= ~BIT(1);
1028
1029 drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1030
1031 dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1032}
1033
1034void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1035{
1036 struct dp_catalog_private *catalog;
1037
1038 static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1039 {
1040 MMSS_DP_AUDIO_STREAM_0,
1041 MMSS_DP_AUDIO_STREAM_1,
1042 MMSS_DP_AUDIO_STREAM_1,
1043 },
1044 {
1045 MMSS_DP_AUDIO_TIMESTAMP_0,
1046 MMSS_DP_AUDIO_TIMESTAMP_1,
1047 MMSS_DP_AUDIO_TIMESTAMP_1,
1048 },
1049 {
1050 MMSS_DP_AUDIO_INFOFRAME_0,
1051 MMSS_DP_AUDIO_INFOFRAME_1,
1052 MMSS_DP_AUDIO_INFOFRAME_1,
1053 },
1054 {
1055 MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1056 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1057 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1058 },
1059 {
1060 MMSS_DP_AUDIO_ISRC_0,
1061 MMSS_DP_AUDIO_ISRC_1,
1062 MMSS_DP_AUDIO_ISRC_1,
1063 },
1064 };
1065
1066 if (!dp_catalog)
1067 return;
1068
1069 catalog = container_of(dp_catalog,
1070 struct dp_catalog_private, dp_catalog);
1071
1072 catalog->audio_map = sdp_map;
1073}
1074
1075void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1076{
1077 struct dp_catalog_private *catalog;
1078 u32 mainlink_levels, safe_to_exit_level;
1079
1080 if (!dp_catalog)
1081 return;
1082
1083 catalog = container_of(dp_catalog,
1084 struct dp_catalog_private, dp_catalog);
1085
1086 safe_to_exit_level = dp_catalog->audio_data;
1087 mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1088 mainlink_levels &= 0xFE0;
1089 mainlink_levels |= safe_to_exit_level;
1090
1091 drm_dbg_dp(catalog->drm_dev,
1092 "mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1093 mainlink_levels, safe_to_exit_level);
1094
1095 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1096}