Linux Audio

Check our new training course

Linux debugging, profiling, tracing and performance analysis training

Apr 14-17, 2025
Register
Loading...
Note: File does not exist in v3.1.
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
   4 */
   5
   6#define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
   7
   8#include <linux/delay.h>
   9#include <linux/iopoll.h>
  10#include <linux/phy/phy.h>
  11#include <linux/phy/phy-dp.h>
  12#include <linux/rational.h>
  13#include <drm/drm_dp_helper.h>
  14#include <drm/drm_print.h>
  15
  16#include "dp_catalog.h"
  17#include "dp_reg.h"
  18
  19#define POLLING_SLEEP_US			1000
  20#define POLLING_TIMEOUT_US			10000
  21
  22#define SCRAMBLER_RESET_COUNT_VALUE		0xFC
  23
  24#define DP_INTERRUPT_STATUS_ACK_SHIFT	1
  25#define DP_INTERRUPT_STATUS_MASK_SHIFT	2
  26
  27#define MSM_DP_CONTROLLER_AHB_OFFSET	0x0000
  28#define MSM_DP_CONTROLLER_AHB_SIZE	0x0200
  29#define MSM_DP_CONTROLLER_AUX_OFFSET	0x0200
  30#define MSM_DP_CONTROLLER_AUX_SIZE	0x0200
  31#define MSM_DP_CONTROLLER_LINK_OFFSET	0x0400
  32#define MSM_DP_CONTROLLER_LINK_SIZE	0x0C00
  33#define MSM_DP_CONTROLLER_P0_OFFSET	0x1000
  34#define MSM_DP_CONTROLLER_P0_SIZE	0x0400
  35
  36#define DP_INTERRUPT_STATUS1 \
  37	(DP_INTR_AUX_I2C_DONE| \
  38	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
  39	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
  40	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
  41	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
  42
  43#define DP_INTERRUPT_STATUS1_ACK \
  44	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  45#define DP_INTERRUPT_STATUS1_MASK \
  46	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  47
  48#define DP_INTERRUPT_STATUS2 \
  49	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
  50	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
  51
  52#define DP_INTERRUPT_STATUS2_ACK \
  53	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  54#define DP_INTERRUPT_STATUS2_MASK \
  55	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  56
  57struct dp_catalog_private {
  58	struct device *dev;
  59	struct dp_io *io;
  60	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
  61	struct dp_catalog dp_catalog;
  62	u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
  63};
  64
  65void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
  66{
  67	struct dp_catalog_private *catalog = container_of(dp_catalog,
  68			struct dp_catalog_private, dp_catalog);
  69
  70	msm_disp_snapshot_add_block(disp_state, catalog->io->dp_controller.len,
  71			catalog->io->dp_controller.base, "dp_ctrl");
  72}
  73
  74static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
  75{
  76	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
  77	return readl_relaxed(catalog->io->dp_controller.base + offset);
  78}
  79
  80static inline void dp_write_aux(struct dp_catalog_private *catalog,
  81			       u32 offset, u32 data)
  82{
  83	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
  84	/*
  85	 * To make sure aux reg writes happens before any other operation,
  86	 * this function uses writel() instread of writel_relaxed()
  87	 */
  88	writel(data, catalog->io->dp_controller.base + offset);
  89}
  90
  91static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
  92{
  93	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
  94	return readl_relaxed(catalog->io->dp_controller.base + offset);
  95}
  96
  97static inline void dp_write_ahb(struct dp_catalog_private *catalog,
  98			       u32 offset, u32 data)
  99{
 100	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
 101	/*
 102	 * To make sure phy reg writes happens before any other operation,
 103	 * this function uses writel() instread of writel_relaxed()
 104	 */
 105	writel(data, catalog->io->dp_controller.base + offset);
 106}
 107
 108static inline void dp_write_p0(struct dp_catalog_private *catalog,
 109			       u32 offset, u32 data)
 110{
 111	offset += MSM_DP_CONTROLLER_P0_OFFSET;
 112	/*
 113	 * To make sure interface reg writes happens before any other operation,
 114	 * this function uses writel() instread of writel_relaxed()
 115	 */
 116	writel(data, catalog->io->dp_controller.base + offset);
 117}
 118
 119static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
 120			       u32 offset)
 121{
 122	offset += MSM_DP_CONTROLLER_P0_OFFSET;
 123	/*
 124	 * To make sure interface reg writes happens before any other operation,
 125	 * this function uses writel() instread of writel_relaxed()
 126	 */
 127	return readl_relaxed(catalog->io->dp_controller.base + offset);
 128}
 129
 130static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
 131{
 132	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
 133	return readl_relaxed(catalog->io->dp_controller.base + offset);
 134}
 135
 136static inline void dp_write_link(struct dp_catalog_private *catalog,
 137			       u32 offset, u32 data)
 138{
 139	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
 140	/*
 141	 * To make sure link reg writes happens before any other operation,
 142	 * this function uses writel() instread of writel_relaxed()
 143	 */
 144	writel(data, catalog->io->dp_controller.base + offset);
 145}
 146
 147/* aux related catalog functions */
 148u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
 149{
 150	struct dp_catalog_private *catalog = container_of(dp_catalog,
 151				struct dp_catalog_private, dp_catalog);
 152
 153	return dp_read_aux(catalog, REG_DP_AUX_DATA);
 154}
 155
 156int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
 157{
 158	struct dp_catalog_private *catalog = container_of(dp_catalog,
 159				struct dp_catalog_private, dp_catalog);
 160
 161	dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
 162	return 0;
 163}
 164
 165int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
 166{
 167	struct dp_catalog_private *catalog = container_of(dp_catalog,
 168				struct dp_catalog_private, dp_catalog);
 169
 170	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
 171	return 0;
 172}
 173
 174int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
 175{
 176	u32 data;
 177	struct dp_catalog_private *catalog = container_of(dp_catalog,
 178				struct dp_catalog_private, dp_catalog);
 179
 180	if (read) {
 181		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
 182		data &= ~DP_AUX_TRANS_CTRL_GO;
 183		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
 184	} else {
 185		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
 186	}
 187	return 0;
 188}
 189
 190int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
 191{
 192	struct dp_catalog_private *catalog = container_of(dp_catalog,
 193				struct dp_catalog_private, dp_catalog);
 194
 195	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
 196	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
 197	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
 198	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
 199	return 0;
 200}
 201
 202/**
 203 * dp_catalog_aux_reset() - reset AUX controller
 204 *
 205 * @dp_catalog: DP catalog structure
 206 *
 207 * return: void
 208 *
 209 * This function reset AUX controller
 210 *
 211 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
 212 * 
 213 */
 214void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
 215{
 216	u32 aux_ctrl;
 217	struct dp_catalog_private *catalog = container_of(dp_catalog,
 218				struct dp_catalog_private, dp_catalog);
 219
 220	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
 221
 222	aux_ctrl |= DP_AUX_CTRL_RESET;
 223	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 224	usleep_range(1000, 1100); /* h/w recommended delay */
 225
 226	aux_ctrl &= ~DP_AUX_CTRL_RESET;
 227	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 228}
 229
 230void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
 231{
 232	u32 aux_ctrl;
 233	struct dp_catalog_private *catalog = container_of(dp_catalog,
 234				struct dp_catalog_private, dp_catalog);
 235
 236	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
 237
 238	if (enable) {
 239		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
 240		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
 241		aux_ctrl |= DP_AUX_CTRL_ENABLE;
 242	} else {
 243		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
 244	}
 245
 246	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 247}
 248
 249void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
 250{
 251	struct dp_catalog_private *catalog = container_of(dp_catalog,
 252				struct dp_catalog_private, dp_catalog);
 253	struct dp_io *dp_io = catalog->io;
 254	struct phy *phy = dp_io->phy;
 255
 256	phy_calibrate(phy);
 257}
 258
 259static void dump_regs(void __iomem *base, int len)
 260{
 261	int i;
 262	u32 x0, x4, x8, xc;
 263	u32 addr_off = 0;
 264
 265	len = DIV_ROUND_UP(len, 16);
 266	for (i = 0; i < len; i++) {
 267		x0 = readl_relaxed(base + addr_off);
 268		x4 = readl_relaxed(base + addr_off + 0x04);
 269		x8 = readl_relaxed(base + addr_off + 0x08);
 270		xc = readl_relaxed(base + addr_off + 0x0c);
 271
 272		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
 273		addr_off += 16;
 274	}
 275}
 276
 277void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
 278{
 279	u32 offset, len;
 280	struct dp_catalog_private *catalog = container_of(dp_catalog,
 281		struct dp_catalog_private, dp_catalog);
 282
 283	pr_info("AHB regs\n");
 284	offset = MSM_DP_CONTROLLER_AHB_OFFSET;
 285	len = MSM_DP_CONTROLLER_AHB_SIZE;
 286	dump_regs(catalog->io->dp_controller.base + offset, len);
 287
 288	pr_info("AUXCLK regs\n");
 289	offset = MSM_DP_CONTROLLER_AUX_OFFSET;
 290	len = MSM_DP_CONTROLLER_AUX_SIZE;
 291	dump_regs(catalog->io->dp_controller.base + offset, len);
 292
 293	pr_info("LCLK regs\n");
 294	offset = MSM_DP_CONTROLLER_LINK_OFFSET;
 295	len = MSM_DP_CONTROLLER_LINK_SIZE;
 296	dump_regs(catalog->io->dp_controller.base + offset, len);
 297
 298	pr_info("P0CLK regs\n");
 299	offset = MSM_DP_CONTROLLER_P0_OFFSET;
 300	len = MSM_DP_CONTROLLER_P0_SIZE;
 301	dump_regs(catalog->io->dp_controller.base + offset, len);
 302}
 303
 304u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
 305{
 306	struct dp_catalog_private *catalog = container_of(dp_catalog,
 307				struct dp_catalog_private, dp_catalog);
 308	u32 intr, intr_ack;
 309
 310	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
 311	intr &= ~DP_INTERRUPT_STATUS1_MASK;
 312	intr_ack = (intr & DP_INTERRUPT_STATUS1)
 313			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 314	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
 315			DP_INTERRUPT_STATUS1_MASK);
 316
 317	return intr;
 318
 319}
 320
 321/* controller related catalog functions */
 322void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
 323				u32 dp_tu, u32 valid_boundary,
 324				u32 valid_boundary2)
 325{
 326	struct dp_catalog_private *catalog = container_of(dp_catalog,
 327				struct dp_catalog_private, dp_catalog);
 328
 329	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
 330	dp_write_link(catalog, REG_DP_TU, dp_tu);
 331	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
 332}
 333
 334void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
 335{
 336	struct dp_catalog_private *catalog = container_of(dp_catalog,
 337				struct dp_catalog_private, dp_catalog);
 338
 339	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
 340}
 341
 342void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
 343{
 344	struct dp_catalog_private *catalog = container_of(dp_catalog,
 345				struct dp_catalog_private, dp_catalog);
 346
 347	DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
 348
 349	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
 350}
 351
 352void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
 353{
 354	struct dp_catalog_private *catalog = container_of(dp_catalog,
 355				struct dp_catalog_private, dp_catalog);
 356	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
 357	u32 ln_mapping;
 358
 359	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
 360	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
 361	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
 362	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
 363
 364	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
 365			ln_mapping);
 366}
 367
 368void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
 369						bool enable)
 370{
 371	u32 mainlink_ctrl;
 372	struct dp_catalog_private *catalog = container_of(dp_catalog,
 373				struct dp_catalog_private, dp_catalog);
 374
 375	if (enable) {
 376		/*
 377		 * To make sure link reg writes happens before other operation,
 378		 * dp_write_link() function uses writel()
 379		 */
 380		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 381
 382		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
 383						DP_MAINLINK_CTRL_ENABLE);
 384		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 385
 386		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
 387		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 388
 389		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
 390		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 391
 392		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
 393					DP_MAINLINK_FB_BOUNDARY_SEL);
 394		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 395	} else {
 396		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 397		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
 398		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 399	}
 400}
 401
 402void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
 403					u32 colorimetry_cfg,
 404					u32 test_bits_depth)
 405{
 406	u32 misc_val;
 407	struct dp_catalog_private *catalog = container_of(dp_catalog,
 408				struct dp_catalog_private, dp_catalog);
 409
 410	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
 411
 412	/* clear bpp bits */
 413	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
 414	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
 415	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
 416	/* Configure clock to synchronous mode */
 417	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
 418
 419	DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
 420	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
 421}
 422
 423void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
 424					u32 rate, u32 stream_rate_khz,
 425					bool fixed_nvid)
 426{
 427	u32 pixel_m, pixel_n;
 428	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
 429	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
 430	u32 const link_rate_hbr2 = 540000;
 431	u32 const link_rate_hbr3 = 810000;
 432	unsigned long den, num;
 433
 434	struct dp_catalog_private *catalog = container_of(dp_catalog,
 435				struct dp_catalog_private, dp_catalog);
 436
 437	if (rate == link_rate_hbr3)
 438		pixel_div = 6;
 439	else if (rate == 1620000 || rate == 270000)
 440		pixel_div = 2;
 441	else if (rate == link_rate_hbr2)
 442		pixel_div = 4;
 443	else
 444		DRM_ERROR("Invalid pixel mux divider\n");
 445
 446	dispcc_input_rate = (rate * 10) / pixel_div;
 447
 448	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
 449			(unsigned long)(1 << 16) - 1,
 450			(unsigned long)(1 << 16) - 1, &den, &num);
 451
 452	den = ~(den - num);
 453	den = den & 0xFFFF;
 454	pixel_m = num;
 455	pixel_n = den;
 456
 457	mvid = (pixel_m & 0xFFFF) * 5;
 458	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
 459
 460	if (nvid < nvid_fixed) {
 461		u32 temp;
 462
 463		temp = (nvid_fixed / nvid) * nvid;
 464		mvid = (nvid_fixed / nvid) * mvid;
 465		nvid = temp;
 466	}
 467
 468	if (link_rate_hbr2 == rate)
 469		nvid *= 2;
 470
 471	if (link_rate_hbr3 == rate)
 472		nvid *= 3;
 473
 474	DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
 475	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
 476	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
 477	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
 478}
 479
 480int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
 481					u32 pattern)
 482{
 483	int bit, ret;
 484	u32 data;
 485	struct dp_catalog_private *catalog = container_of(dp_catalog,
 486				struct dp_catalog_private, dp_catalog);
 487
 488	bit = BIT(pattern - 1);
 489	DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
 490	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
 491
 492	bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
 493
 494	/* Poll for mainlink ready status */
 495	ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
 496					MSM_DP_CONTROLLER_LINK_OFFSET +
 497					REG_DP_MAINLINK_READY,
 498					data, data & bit,
 499					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 500	if (ret < 0) {
 501		DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
 502		return ret;
 503	}
 504	return 0;
 505}
 506
 507/**
 508 * dp_catalog_ctrl_reset() - reset DP controller
 509 *
 510 * @dp_catalog: DP catalog structure
 511 *
 512 * return: void
 513 *
 514 * This function reset the DP controller
 515 *
 516 * NOTE: reset DP controller will also clear any pending HPD related interrupts
 517 * 
 518 */
 519void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
 520{
 521	u32 sw_reset;
 522	struct dp_catalog_private *catalog = container_of(dp_catalog,
 523				struct dp_catalog_private, dp_catalog);
 524
 525	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
 526
 527	sw_reset |= DP_SW_RESET;
 528	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 529	usleep_range(1000, 1100); /* h/w recommended delay */
 530
 531	sw_reset &= ~DP_SW_RESET;
 532	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 533}
 534
 535bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
 536{
 537	u32 data;
 538	int ret;
 539	struct dp_catalog_private *catalog = container_of(dp_catalog,
 540				struct dp_catalog_private, dp_catalog);
 541
 542	/* Poll for mainlink ready status */
 543	ret = readl_poll_timeout(catalog->io->dp_controller.base +
 544				MSM_DP_CONTROLLER_LINK_OFFSET +
 545				REG_DP_MAINLINK_READY,
 546				data, data & DP_MAINLINK_READY_FOR_VIDEO,
 547				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 548	if (ret < 0) {
 549		DRM_ERROR("mainlink not ready\n");
 550		return false;
 551	}
 552
 553	return true;
 554}
 555
 556void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
 557						bool enable)
 558{
 559	struct dp_catalog_private *catalog = container_of(dp_catalog,
 560				struct dp_catalog_private, dp_catalog);
 561
 562	if (enable) {
 563		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
 564				DP_INTERRUPT_STATUS1_MASK);
 565		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 566				DP_INTERRUPT_STATUS2_MASK);
 567	} else {
 568		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
 569		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
 570	}
 571}
 572
 573void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
 574			u32 intr_mask, bool en)
 575{
 576	struct dp_catalog_private *catalog = container_of(dp_catalog,
 577				struct dp_catalog_private, dp_catalog);
 578
 579	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
 580
 581	config = (en ? config | intr_mask : config & ~intr_mask);
 582
 583	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
 584				config & DP_DP_HPD_INT_MASK);
 585}
 586
 587void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
 588{
 589	struct dp_catalog_private *catalog = container_of(dp_catalog,
 590				struct dp_catalog_private, dp_catalog);
 591
 592	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
 593
 594	/* enable HPD plug and unplug interrupts */
 595	dp_catalog_hpd_config_intr(dp_catalog,
 596		DP_DP_HPD_PLUG_INT_MASK | DP_DP_HPD_UNPLUG_INT_MASK, true);
 597
 598	/* Configure REFTIMER and enable it */
 599	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
 600	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
 601
 602	/* Enable HPD */
 603	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
 604}
 605
 606u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
 607{
 608	struct dp_catalog_private *catalog = container_of(dp_catalog,
 609				struct dp_catalog_private, dp_catalog);
 610	u32 status;
 611
 612	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 613	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
 614	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
 615
 616	return status;
 617}
 618
 619u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
 620{
 621	struct dp_catalog_private *catalog = container_of(dp_catalog,
 622				struct dp_catalog_private, dp_catalog);
 623	int isr = 0;
 624
 625	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 626	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
 627				 (isr & DP_DP_HPD_INT_MASK));
 628
 629	return isr;
 630}
 631
 632int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
 633{
 634	struct dp_catalog_private *catalog = container_of(dp_catalog,
 635				struct dp_catalog_private, dp_catalog);
 636	u32 intr, intr_ack;
 637
 638	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
 639	intr &= ~DP_INTERRUPT_STATUS2_MASK;
 640	intr_ack = (intr & DP_INTERRUPT_STATUS2)
 641			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 642	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 643			intr_ack | DP_INTERRUPT_STATUS2_MASK);
 644
 645	return intr;
 646}
 647
 648void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
 649{
 650	struct dp_catalog_private *catalog = container_of(dp_catalog,
 651				struct dp_catalog_private, dp_catalog);
 652
 653	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
 654			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
 655	usleep_range(1000, 1100); /* h/w recommended delay */
 656	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
 657}
 658
 659int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
 660		u8 v_level, u8 p_level)
 661{
 662	struct dp_catalog_private *catalog = container_of(dp_catalog,
 663				struct dp_catalog_private, dp_catalog);
 664	struct dp_io *dp_io = catalog->io;
 665	struct phy *phy = dp_io->phy;
 666	struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
 667
 668	/* TODO: Update for all lanes instead of just first one */
 669	opts_dp->voltage[0] = v_level;
 670	opts_dp->pre[0] = p_level;
 671	opts_dp->set_voltages = 1;
 672	phy_configure(phy, &dp_io->phy_opts);
 673	opts_dp->set_voltages = 0;
 674
 675	return 0;
 676}
 677
 678void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
 679			u32 pattern)
 680{
 681	struct dp_catalog_private *catalog = container_of(dp_catalog,
 682				struct dp_catalog_private, dp_catalog);
 683	u32 value = 0x0;
 684
 685	/* Make sure to clear the current pattern before starting a new one */
 686	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
 687
 688	switch (pattern) {
 689	case DP_PHY_TEST_PATTERN_D10_2:
 690		dp_write_link(catalog, REG_DP_STATE_CTRL,
 691				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
 692		break;
 693	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
 694		value &= ~(1 << 16);
 695		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 696					value);
 697		value |= SCRAMBLER_RESET_COUNT_VALUE;
 698		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 699					value);
 700		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 701					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 702		dp_write_link(catalog, REG_DP_STATE_CTRL,
 703					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 704		break;
 705	case DP_PHY_TEST_PATTERN_PRBS7:
 706		dp_write_link(catalog, REG_DP_STATE_CTRL,
 707				DP_STATE_CTRL_LINK_PRBS7);
 708		break;
 709	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
 710		dp_write_link(catalog, REG_DP_STATE_CTRL,
 711				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
 712		/* 00111110000011111000001111100000 */
 713		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
 714				0x3E0F83E0);
 715		/* 00001111100000111110000011111000 */
 716		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
 717				0x0F83E0F8);
 718		/* 1111100000111110 */
 719		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
 720				0x0000F83E);
 721		break;
 722	case DP_PHY_TEST_PATTERN_CP2520:
 723		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 724		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
 725		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 726
 727		value = DP_HBR2_ERM_PATTERN;
 728		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 729				value);
 730		value |= SCRAMBLER_RESET_COUNT_VALUE;
 731		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 732					value);
 733		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 734					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 735		dp_write_link(catalog, REG_DP_STATE_CTRL,
 736					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 737		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 738		value |= DP_MAINLINK_CTRL_ENABLE;
 739		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 740		break;
 741	case DP_PHY_TEST_PATTERN_SEL_MASK:
 742		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
 743				DP_MAINLINK_CTRL_ENABLE);
 744		dp_write_link(catalog, REG_DP_STATE_CTRL,
 745				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
 746		break;
 747	default:
 748		DRM_DEBUG_DP("No valid test pattern requested:0x%x\n", pattern);
 749		break;
 750	}
 751}
 752
 753u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
 754{
 755	struct dp_catalog_private *catalog = container_of(dp_catalog,
 756				struct dp_catalog_private, dp_catalog);
 757
 758	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
 759}
 760
 761/* panel related catalog functions */
 762int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
 763{
 764	struct dp_catalog_private *catalog = container_of(dp_catalog,
 765				struct dp_catalog_private, dp_catalog);
 766
 767	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
 768				dp_catalog->total);
 769	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
 770				dp_catalog->sync_start);
 771	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
 772				dp_catalog->width_blanking);
 773	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
 774	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0);
 775	return 0;
 776}
 777
 778void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
 779				struct drm_display_mode *drm_mode)
 780{
 781	struct dp_catalog_private *catalog = container_of(dp_catalog,
 782				struct dp_catalog_private, dp_catalog);
 783	u32 hsync_period, vsync_period;
 784	u32 display_v_start, display_v_end;
 785	u32 hsync_start_x, hsync_end_x;
 786	u32 v_sync_width;
 787	u32 hsync_ctl;
 788	u32 display_hctl;
 789
 790	/* TPG config parameters*/
 791	hsync_period = drm_mode->htotal;
 792	vsync_period = drm_mode->vtotal;
 793
 794	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
 795					hsync_period);
 796	display_v_end = ((vsync_period - (drm_mode->vsync_start -
 797					drm_mode->vdisplay))
 798					* hsync_period) - 1;
 799
 800	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
 801	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
 802
 803	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
 804	hsync_end_x = hsync_period - (drm_mode->hsync_start -
 805					drm_mode->hdisplay) - 1;
 806
 807	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
 808
 809	hsync_ctl = (hsync_period << 16) |
 810			(drm_mode->hsync_end - drm_mode->hsync_start);
 811	display_hctl = (hsync_end_x << 16) | hsync_start_x;
 812
 813
 814	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
 815	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
 816	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
 817			hsync_period);
 818	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
 819			hsync_period);
 820	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
 821	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
 822	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
 823	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
 824	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
 825	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
 826	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
 827	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
 828	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
 829	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
 830	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
 831	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
 832	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
 833
 834	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
 835				DP_TPG_CHECKERED_RECT_PATTERN);
 836	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
 837				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
 838				DP_TPG_VIDEO_CONFIG_RGB);
 839	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
 840				DP_BIST_ENABLE_DPBIST_EN);
 841	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
 842				DP_TIMING_ENGINE_EN_EN);
 843	DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
 844}
 845
 846void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
 847{
 848	struct dp_catalog_private *catalog = container_of(dp_catalog,
 849				struct dp_catalog_private, dp_catalog);
 850
 851	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
 852	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
 853	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
 854}
 855
 856struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
 857{
 858	struct dp_catalog_private *catalog;
 859
 860	if (!io) {
 861		DRM_ERROR("invalid input\n");
 862		return ERR_PTR(-EINVAL);
 863	}
 864
 865	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
 866	if (!catalog)
 867		return ERR_PTR(-ENOMEM);
 868
 869	catalog->dev = dev;
 870	catalog->io = io;
 871
 872	return &catalog->dp_catalog;
 873}
 874
 875void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
 876{
 877	struct dp_catalog_private *catalog;
 878	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
 879	enum dp_catalog_audio_sdp_type sdp;
 880	enum dp_catalog_audio_header_type header;
 881
 882	if (!dp_catalog)
 883		return;
 884
 885	catalog = container_of(dp_catalog,
 886		struct dp_catalog_private, dp_catalog);
 887
 888	sdp_map = catalog->audio_map;
 889	sdp     = dp_catalog->sdp_type;
 890	header  = dp_catalog->sdp_header;
 891
 892	dp_catalog->audio_data = dp_read_link(catalog,
 893			sdp_map[sdp][header]);
 894}
 895
 896void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
 897{
 898	struct dp_catalog_private *catalog;
 899	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
 900	enum dp_catalog_audio_sdp_type sdp;
 901	enum dp_catalog_audio_header_type header;
 902	u32 data;
 903
 904	if (!dp_catalog)
 905		return;
 906
 907	catalog = container_of(dp_catalog,
 908		struct dp_catalog_private, dp_catalog);
 909
 910	sdp_map = catalog->audio_map;
 911	sdp     = dp_catalog->sdp_type;
 912	header  = dp_catalog->sdp_header;
 913	data    = dp_catalog->audio_data;
 914
 915	dp_write_link(catalog, sdp_map[sdp][header], data);
 916}
 917
 918void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
 919{
 920	struct dp_catalog_private *catalog;
 921	u32 acr_ctrl, select;
 922
 923	if (!dp_catalog)
 924		return;
 925
 926	catalog = container_of(dp_catalog,
 927		struct dp_catalog_private, dp_catalog);
 928
 929	select = dp_catalog->audio_data;
 930	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
 931
 932	DRM_DEBUG_DP("select = 0x%x, acr_ctrl = 0x%x\n", select, acr_ctrl);
 933
 934	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
 935}
 936
 937void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
 938{
 939	struct dp_catalog_private *catalog;
 940	bool enable;
 941	u32 audio_ctrl;
 942
 943	if (!dp_catalog)
 944		return;
 945
 946	catalog = container_of(dp_catalog,
 947		struct dp_catalog_private, dp_catalog);
 948
 949	enable = !!dp_catalog->audio_data;
 950	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
 951
 952	if (enable)
 953		audio_ctrl |= BIT(0);
 954	else
 955		audio_ctrl &= ~BIT(0);
 956
 957	DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
 958
 959	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
 960	/* make sure audio engine is disabled */
 961	wmb();
 962}
 963
 964void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
 965{
 966	struct dp_catalog_private *catalog;
 967	u32 sdp_cfg = 0;
 968	u32 sdp_cfg2 = 0;
 969
 970	if (!dp_catalog)
 971		return;
 972
 973	catalog = container_of(dp_catalog,
 974		struct dp_catalog_private, dp_catalog);
 975
 976	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
 977	/* AUDIO_TIMESTAMP_SDP_EN */
 978	sdp_cfg |= BIT(1);
 979	/* AUDIO_STREAM_SDP_EN */
 980	sdp_cfg |= BIT(2);
 981	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
 982	sdp_cfg |= BIT(5);
 983	/* AUDIO_ISRC_SDP_EN  */
 984	sdp_cfg |= BIT(6);
 985	/* AUDIO_INFOFRAME_SDP_EN  */
 986	sdp_cfg |= BIT(20);
 987
 988	DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
 989
 990	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
 991
 992	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
 993	/* IFRM_REGSRC -> Do not use reg values */
 994	sdp_cfg2 &= ~BIT(0);
 995	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
 996	sdp_cfg2 &= ~BIT(1);
 997
 998	DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
 999
1000	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1001}
1002
1003void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1004{
1005	struct dp_catalog_private *catalog;
1006
1007	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1008		{
1009			MMSS_DP_AUDIO_STREAM_0,
1010			MMSS_DP_AUDIO_STREAM_1,
1011			MMSS_DP_AUDIO_STREAM_1,
1012		},
1013		{
1014			MMSS_DP_AUDIO_TIMESTAMP_0,
1015			MMSS_DP_AUDIO_TIMESTAMP_1,
1016			MMSS_DP_AUDIO_TIMESTAMP_1,
1017		},
1018		{
1019			MMSS_DP_AUDIO_INFOFRAME_0,
1020			MMSS_DP_AUDIO_INFOFRAME_1,
1021			MMSS_DP_AUDIO_INFOFRAME_1,
1022		},
1023		{
1024			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1025			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1026			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1027		},
1028		{
1029			MMSS_DP_AUDIO_ISRC_0,
1030			MMSS_DP_AUDIO_ISRC_1,
1031			MMSS_DP_AUDIO_ISRC_1,
1032		},
1033	};
1034
1035	if (!dp_catalog)
1036		return;
1037
1038	catalog = container_of(dp_catalog,
1039		struct dp_catalog_private, dp_catalog);
1040
1041	catalog->audio_map = sdp_map;
1042}
1043
1044void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1045{
1046	struct dp_catalog_private *catalog;
1047	u32 mainlink_levels, safe_to_exit_level;
1048
1049	if (!dp_catalog)
1050		return;
1051
1052	catalog = container_of(dp_catalog,
1053		struct dp_catalog_private, dp_catalog);
1054
1055	safe_to_exit_level = dp_catalog->audio_data;
1056	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1057	mainlink_levels &= 0xFE0;
1058	mainlink_levels |= safe_to_exit_level;
1059
1060	DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1061			 mainlink_levels, safe_to_exit_level);
1062
1063	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1064}