Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.1.
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
   4 */
   5
   6#define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
   7
   8#include <linux/delay.h>
   9#include <linux/iopoll.h>
  10#include <linux/phy/phy.h>
  11#include <linux/phy/phy-dp.h>
  12#include <linux/rational.h>
  13#include <drm/display/drm_dp_helper.h>
  14#include <drm/drm_print.h>
  15
  16#include "dp_catalog.h"
  17#include "dp_reg.h"
  18
  19#define POLLING_SLEEP_US			1000
  20#define POLLING_TIMEOUT_US			10000
  21
  22#define SCRAMBLER_RESET_COUNT_VALUE		0xFC
  23
  24#define DP_INTERRUPT_STATUS_ACK_SHIFT	1
  25#define DP_INTERRUPT_STATUS_MASK_SHIFT	2
  26
  27#define DP_INTF_CONFIG_DATABUS_WIDEN     BIT(4)
  28
  29#define DP_INTERRUPT_STATUS1 \
  30	(DP_INTR_AUX_XFER_DONE| \
  31	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
  32	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
  33	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
  34	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
  35
  36#define DP_INTERRUPT_STATUS1_ACK \
  37	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  38#define DP_INTERRUPT_STATUS1_MASK \
  39	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  40
  41#define DP_INTERRUPT_STATUS2 \
  42	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
  43	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
  44
  45#define DP_INTERRUPT_STATUS2_ACK \
  46	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  47#define DP_INTERRUPT_STATUS2_MASK \
  48	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  49
  50#define DP_INTERRUPT_STATUS4 \
  51	(PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
  52	PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
  53
  54#define DP_INTERRUPT_MASK4 \
  55	(PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
  56	PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
  57
  58struct dp_catalog_private {
  59	struct device *dev;
  60	struct drm_device *drm_dev;
  61	struct dp_io *io;
  62	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
  63	struct dp_catalog dp_catalog;
  64	u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
  65};
  66
  67void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
  68{
  69	struct dp_catalog_private *catalog = container_of(dp_catalog,
  70			struct dp_catalog_private, dp_catalog);
  71	struct dss_io_data *dss = &catalog->io->dp_controller;
  72
  73	msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
  74	msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
  75	msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
  76	msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
  77}
  78
  79static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
  80{
  81	return readl_relaxed(catalog->io->dp_controller.aux.base + offset);
  82}
  83
  84static inline void dp_write_aux(struct dp_catalog_private *catalog,
  85			       u32 offset, u32 data)
  86{
  87	/*
  88	 * To make sure aux reg writes happens before any other operation,
  89	 * this function uses writel() instread of writel_relaxed()
  90	 */
  91	writel(data, catalog->io->dp_controller.aux.base + offset);
  92}
  93
  94static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset)
  95{
  96	return readl_relaxed(catalog->io->dp_controller.ahb.base + offset);
  97}
  98
  99static inline void dp_write_ahb(struct dp_catalog_private *catalog,
 100			       u32 offset, u32 data)
 101{
 102	/*
 103	 * To make sure phy reg writes happens before any other operation,
 104	 * this function uses writel() instread of writel_relaxed()
 105	 */
 106	writel(data, catalog->io->dp_controller.ahb.base + offset);
 107}
 108
 109static inline void dp_write_p0(struct dp_catalog_private *catalog,
 110			       u32 offset, u32 data)
 111{
 112	/*
 113	 * To make sure interface reg writes happens before any other operation,
 114	 * this function uses writel() instread of writel_relaxed()
 115	 */
 116	writel(data, catalog->io->dp_controller.p0.base + offset);
 117}
 118
 119static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
 120			       u32 offset)
 121{
 122	/*
 123	 * To make sure interface reg writes happens before any other operation,
 124	 * this function uses writel() instread of writel_relaxed()
 125	 */
 126	return readl_relaxed(catalog->io->dp_controller.p0.base + offset);
 127}
 128
 129static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
 130{
 131	return readl_relaxed(catalog->io->dp_controller.link.base + offset);
 132}
 133
 134static inline void dp_write_link(struct dp_catalog_private *catalog,
 135			       u32 offset, u32 data)
 136{
 137	/*
 138	 * To make sure link reg writes happens before any other operation,
 139	 * this function uses writel() instread of writel_relaxed()
 140	 */
 141	writel(data, catalog->io->dp_controller.link.base + offset);
 142}
 143
 144/* aux related catalog functions */
 145u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
 146{
 147	struct dp_catalog_private *catalog = container_of(dp_catalog,
 148				struct dp_catalog_private, dp_catalog);
 149
 150	return dp_read_aux(catalog, REG_DP_AUX_DATA);
 151}
 152
 153int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
 154{
 155	struct dp_catalog_private *catalog = container_of(dp_catalog,
 156				struct dp_catalog_private, dp_catalog);
 157
 158	dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
 159	return 0;
 160}
 161
 162int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
 163{
 164	struct dp_catalog_private *catalog = container_of(dp_catalog,
 165				struct dp_catalog_private, dp_catalog);
 166
 167	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
 168	return 0;
 169}
 170
 171int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
 172{
 173	u32 data;
 174	struct dp_catalog_private *catalog = container_of(dp_catalog,
 175				struct dp_catalog_private, dp_catalog);
 176
 177	if (read) {
 178		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
 179		data &= ~DP_AUX_TRANS_CTRL_GO;
 180		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
 181	} else {
 182		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
 183	}
 184	return 0;
 185}
 186
 187int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
 188{
 189	struct dp_catalog_private *catalog = container_of(dp_catalog,
 190				struct dp_catalog_private, dp_catalog);
 191
 192	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
 193	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
 194	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
 195	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
 196	return 0;
 197}
 198
 199/**
 200 * dp_catalog_aux_reset() - reset AUX controller
 201 *
 202 * @dp_catalog: DP catalog structure
 203 *
 204 * return: void
 205 *
 206 * This function reset AUX controller
 207 *
 208 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
 209 * 
 210 */
 211void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
 212{
 213	u32 aux_ctrl;
 214	struct dp_catalog_private *catalog = container_of(dp_catalog,
 215				struct dp_catalog_private, dp_catalog);
 216
 217	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
 218
 219	aux_ctrl |= DP_AUX_CTRL_RESET;
 220	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 221	usleep_range(1000, 1100); /* h/w recommended delay */
 222
 223	aux_ctrl &= ~DP_AUX_CTRL_RESET;
 224	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 225}
 226
 227void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
 228{
 229	u32 aux_ctrl;
 230	struct dp_catalog_private *catalog = container_of(dp_catalog,
 231				struct dp_catalog_private, dp_catalog);
 232
 233	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
 234
 235	if (enable) {
 236		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
 237		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
 238		aux_ctrl |= DP_AUX_CTRL_ENABLE;
 239	} else {
 240		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
 241	}
 242
 243	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 244}
 245
 246void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
 247{
 248	struct dp_catalog_private *catalog = container_of(dp_catalog,
 249				struct dp_catalog_private, dp_catalog);
 250	struct dp_io *dp_io = catalog->io;
 251	struct phy *phy = dp_io->phy;
 252
 253	phy_calibrate(phy);
 254}
 255
 256int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog)
 257{
 258	u32 state;
 259	struct dp_catalog_private *catalog = container_of(dp_catalog,
 260				struct dp_catalog_private, dp_catalog);
 261
 262	/* poll for hpd connected status every 2ms and timeout after 500ms */
 263	return readl_poll_timeout(catalog->io->dp_controller.aux.base +
 264				REG_DP_DP_HPD_INT_STATUS,
 265				state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
 266				2000, 500000);
 267}
 268
 269static void dump_regs(void __iomem *base, int len)
 270{
 271	int i;
 272	u32 x0, x4, x8, xc;
 273	u32 addr_off = 0;
 274
 275	len = DIV_ROUND_UP(len, 16);
 276	for (i = 0; i < len; i++) {
 277		x0 = readl_relaxed(base + addr_off);
 278		x4 = readl_relaxed(base + addr_off + 0x04);
 279		x8 = readl_relaxed(base + addr_off + 0x08);
 280		xc = readl_relaxed(base + addr_off + 0x0c);
 281
 282		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
 283		addr_off += 16;
 284	}
 285}
 286
 287void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
 288{
 289	struct dp_catalog_private *catalog = container_of(dp_catalog,
 290		struct dp_catalog_private, dp_catalog);
 291	struct dss_io_data *io = &catalog->io->dp_controller;
 292
 293	pr_info("AHB regs\n");
 294	dump_regs(io->ahb.base, io->ahb.len);
 295
 296	pr_info("AUXCLK regs\n");
 297	dump_regs(io->aux.base, io->aux.len);
 298
 299	pr_info("LCLK regs\n");
 300	dump_regs(io->link.base, io->link.len);
 301
 302	pr_info("P0CLK regs\n");
 303	dump_regs(io->p0.base, io->p0.len);
 304}
 305
 306u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
 307{
 308	struct dp_catalog_private *catalog = container_of(dp_catalog,
 309				struct dp_catalog_private, dp_catalog);
 310	u32 intr, intr_ack;
 311
 312	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
 313	intr &= ~DP_INTERRUPT_STATUS1_MASK;
 314	intr_ack = (intr & DP_INTERRUPT_STATUS1)
 315			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 316	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
 317			DP_INTERRUPT_STATUS1_MASK);
 318
 319	return intr;
 320
 321}
 322
 323/* controller related catalog functions */
 324void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
 325				u32 dp_tu, u32 valid_boundary,
 326				u32 valid_boundary2)
 327{
 328	struct dp_catalog_private *catalog = container_of(dp_catalog,
 329				struct dp_catalog_private, dp_catalog);
 330
 331	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
 332	dp_write_link(catalog, REG_DP_TU, dp_tu);
 333	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
 334}
 335
 336void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
 337{
 338	struct dp_catalog_private *catalog = container_of(dp_catalog,
 339				struct dp_catalog_private, dp_catalog);
 340
 341	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
 342}
 343
 344void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
 345{
 346	struct dp_catalog_private *catalog = container_of(dp_catalog,
 347				struct dp_catalog_private, dp_catalog);
 348
 349	drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
 350
 351	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
 352}
 353
 354void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
 355{
 356	struct dp_catalog_private *catalog = container_of(dp_catalog,
 357				struct dp_catalog_private, dp_catalog);
 358	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
 359	u32 ln_mapping;
 360
 361	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
 362	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
 363	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
 364	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
 365
 366	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
 367			ln_mapping);
 368}
 369
 370void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog,
 371						bool enable)
 372{
 373	u32 val;
 374	struct dp_catalog_private *catalog = container_of(dp_catalog,
 375				struct dp_catalog_private, dp_catalog);
 376
 377	val = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 378
 379	if (enable)
 380		val |= DP_MAINLINK_CTRL_ENABLE;
 381	else
 382		val &= ~DP_MAINLINK_CTRL_ENABLE;
 383
 384	dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
 385}
 386
 387void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
 388						bool enable)
 389{
 390	u32 mainlink_ctrl;
 391	struct dp_catalog_private *catalog = container_of(dp_catalog,
 392				struct dp_catalog_private, dp_catalog);
 393
 394	drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
 395	if (enable) {
 396		/*
 397		 * To make sure link reg writes happens before other operation,
 398		 * dp_write_link() function uses writel()
 399		 */
 400		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 401
 402		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
 403						DP_MAINLINK_CTRL_ENABLE);
 404		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 405
 406		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
 407		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 408
 409		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
 410		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 411
 412		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
 413					DP_MAINLINK_FB_BOUNDARY_SEL);
 414		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 415	} else {
 416		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 417		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
 418		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 419	}
 420}
 421
 422void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
 423					u32 colorimetry_cfg,
 424					u32 test_bits_depth)
 425{
 426	u32 misc_val;
 427	struct dp_catalog_private *catalog = container_of(dp_catalog,
 428				struct dp_catalog_private, dp_catalog);
 429
 430	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
 431
 432	/* clear bpp bits */
 433	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
 434	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
 435	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
 436	/* Configure clock to synchronous mode */
 437	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
 438
 439	drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
 440	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
 441}
 442
 443void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
 444					u32 rate, u32 stream_rate_khz,
 445					bool fixed_nvid)
 446{
 447	u32 pixel_m, pixel_n;
 448	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
 449	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
 450	u32 const link_rate_hbr2 = 540000;
 451	u32 const link_rate_hbr3 = 810000;
 452	unsigned long den, num;
 453
 454	struct dp_catalog_private *catalog = container_of(dp_catalog,
 455				struct dp_catalog_private, dp_catalog);
 456
 457	if (rate == link_rate_hbr3)
 458		pixel_div = 6;
 459	else if (rate == 162000 || rate == 270000)
 460		pixel_div = 2;
 461	else if (rate == link_rate_hbr2)
 462		pixel_div = 4;
 463	else
 464		DRM_ERROR("Invalid pixel mux divider\n");
 465
 466	dispcc_input_rate = (rate * 10) / pixel_div;
 467
 468	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
 469			(unsigned long)(1 << 16) - 1,
 470			(unsigned long)(1 << 16) - 1, &den, &num);
 471
 472	den = ~(den - num);
 473	den = den & 0xFFFF;
 474	pixel_m = num;
 475	pixel_n = den;
 476
 477	mvid = (pixel_m & 0xFFFF) * 5;
 478	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
 479
 480	if (nvid < nvid_fixed) {
 481		u32 temp;
 482
 483		temp = (nvid_fixed / nvid) * nvid;
 484		mvid = (nvid_fixed / nvid) * mvid;
 485		nvid = temp;
 486	}
 487
 488	if (link_rate_hbr2 == rate)
 489		nvid *= 2;
 490
 491	if (link_rate_hbr3 == rate)
 492		nvid *= 3;
 493
 494	drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
 495	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
 496	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
 497	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
 498}
 499
 500int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog,
 501					u32 state_bit)
 502{
 503	int bit, ret;
 504	u32 data;
 505	struct dp_catalog_private *catalog = container_of(dp_catalog,
 506				struct dp_catalog_private, dp_catalog);
 507
 508	bit = BIT(state_bit - 1);
 509	drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
 510	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
 511
 512	bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
 513
 514	/* Poll for mainlink ready status */
 515	ret = readx_poll_timeout(readl, catalog->io->dp_controller.link.base +
 516					REG_DP_MAINLINK_READY,
 517					data, data & bit,
 518					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 519	if (ret < 0) {
 520		DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
 521		return ret;
 522	}
 523	return 0;
 524}
 525
 526/**
 527 * dp_catalog_hw_revision() - retrieve DP hw revision
 528 *
 529 * @dp_catalog: DP catalog structure
 530 *
 531 * Return: DP controller hw revision
 532 *
 533 */
 534u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog)
 535{
 536	const struct dp_catalog_private *catalog = container_of(dp_catalog,
 537				struct dp_catalog_private, dp_catalog);
 538
 539	return dp_read_ahb(catalog, REG_DP_HW_VERSION);
 540}
 541
 542/**
 543 * dp_catalog_ctrl_reset() - reset DP controller
 544 *
 545 * @dp_catalog: DP catalog structure
 546 *
 547 * return: void
 548 *
 549 * This function reset the DP controller
 550 *
 551 * NOTE: reset DP controller will also clear any pending HPD related interrupts
 552 * 
 553 */
 554void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
 555{
 556	u32 sw_reset;
 557	struct dp_catalog_private *catalog = container_of(dp_catalog,
 558				struct dp_catalog_private, dp_catalog);
 559
 560	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
 561
 562	sw_reset |= DP_SW_RESET;
 563	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 564	usleep_range(1000, 1100); /* h/w recommended delay */
 565
 566	sw_reset &= ~DP_SW_RESET;
 567	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 568}
 569
 570bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
 571{
 572	u32 data;
 573	int ret;
 574	struct dp_catalog_private *catalog = container_of(dp_catalog,
 575				struct dp_catalog_private, dp_catalog);
 576
 577	/* Poll for mainlink ready status */
 578	ret = readl_poll_timeout(catalog->io->dp_controller.link.base +
 579				REG_DP_MAINLINK_READY,
 580				data, data & DP_MAINLINK_READY_FOR_VIDEO,
 581				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 582	if (ret < 0) {
 583		DRM_ERROR("mainlink not ready\n");
 584		return false;
 585	}
 586
 587	return true;
 588}
 589
 590void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
 591						bool enable)
 592{
 593	struct dp_catalog_private *catalog = container_of(dp_catalog,
 594				struct dp_catalog_private, dp_catalog);
 595
 596	if (enable) {
 597		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
 598				DP_INTERRUPT_STATUS1_MASK);
 599		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 600				DP_INTERRUPT_STATUS2_MASK);
 601	} else {
 602		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
 603		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
 604	}
 605}
 606
 607void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
 608			u32 intr_mask, bool en)
 609{
 610	struct dp_catalog_private *catalog = container_of(dp_catalog,
 611				struct dp_catalog_private, dp_catalog);
 612
 613	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
 614
 615	config = (en ? config | intr_mask : config & ~intr_mask);
 616
 617	drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
 618					intr_mask, config);
 619	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
 620				config & DP_DP_HPD_INT_MASK);
 621}
 622
 623void dp_catalog_ctrl_hpd_enable(struct dp_catalog *dp_catalog)
 624{
 625	struct dp_catalog_private *catalog = container_of(dp_catalog,
 626				struct dp_catalog_private, dp_catalog);
 627
 628	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
 629
 630	/* Configure REFTIMER and enable it */
 631	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
 632	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
 633
 634	/* Enable HPD */
 635	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
 636}
 637
 638void dp_catalog_ctrl_hpd_disable(struct dp_catalog *dp_catalog)
 639{
 640	struct dp_catalog_private *catalog = container_of(dp_catalog,
 641				struct dp_catalog_private, dp_catalog);
 642
 643	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
 644
 645	reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
 646	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
 647
 648	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0);
 649}
 650
 651static void dp_catalog_enable_sdp(struct dp_catalog_private *catalog)
 652{
 653	/* trigger sdp */
 654	dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
 655	dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
 656}
 657
 658void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog)
 659{
 660	struct dp_catalog_private *catalog = container_of(dp_catalog,
 661				struct dp_catalog_private, dp_catalog);
 662	u32 config;
 663
 664	/* enable PSR1 function */
 665	config = dp_read_link(catalog, REG_PSR_CONFIG);
 666	config |= PSR1_SUPPORTED;
 667	dp_write_link(catalog, REG_PSR_CONFIG, config);
 668
 669	dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
 670	dp_catalog_enable_sdp(catalog);
 671}
 672
 673void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter)
 674{
 675	struct dp_catalog_private *catalog = container_of(dp_catalog,
 676			struct dp_catalog_private, dp_catalog);
 677	u32 cmd;
 678
 679	cmd = dp_read_link(catalog, REG_PSR_CMD);
 680
 681	cmd &= ~(PSR_ENTER | PSR_EXIT);
 682
 683	if (enter)
 684		cmd |= PSR_ENTER;
 685	else
 686		cmd |= PSR_EXIT;
 687
 688	dp_catalog_enable_sdp(catalog);
 689	dp_write_link(catalog, REG_PSR_CMD, cmd);
 690}
 691
 692u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
 693{
 694	struct dp_catalog_private *catalog = container_of(dp_catalog,
 695				struct dp_catalog_private, dp_catalog);
 696	u32 status;
 697
 698	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 699	drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
 700	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
 701	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
 702
 703	return status;
 704}
 705
 706u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
 707{
 708	struct dp_catalog_private *catalog = container_of(dp_catalog,
 709				struct dp_catalog_private, dp_catalog);
 710	int isr, mask;
 711
 712	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 713	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
 714				 (isr & DP_DP_HPD_INT_MASK));
 715	mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
 716
 717	/*
 718	 * We only want to return interrupts that are unmasked to the caller.
 719	 * However, the interrupt status field also contains other
 720	 * informational bits about the HPD state status, so we only mask
 721	 * out the part of the register that tells us about which interrupts
 722	 * are pending.
 723	 */
 724	return isr & (mask | ~DP_DP_HPD_INT_MASK);
 725}
 726
 727u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog)
 728{
 729	struct dp_catalog_private *catalog = container_of(dp_catalog,
 730				struct dp_catalog_private, dp_catalog);
 731	u32 intr, intr_ack;
 732
 733	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
 734	intr_ack = (intr & DP_INTERRUPT_STATUS4)
 735			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 736	dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
 737
 738	return intr;
 739}
 740
 741int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
 742{
 743	struct dp_catalog_private *catalog = container_of(dp_catalog,
 744				struct dp_catalog_private, dp_catalog);
 745	u32 intr, intr_ack;
 746
 747	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
 748	intr &= ~DP_INTERRUPT_STATUS2_MASK;
 749	intr_ack = (intr & DP_INTERRUPT_STATUS2)
 750			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 751	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 752			intr_ack | DP_INTERRUPT_STATUS2_MASK);
 753
 754	return intr;
 755}
 756
 757void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
 758{
 759	struct dp_catalog_private *catalog = container_of(dp_catalog,
 760				struct dp_catalog_private, dp_catalog);
 761
 762	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
 763			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
 764	usleep_range(1000, 1100); /* h/w recommended delay */
 765	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
 766}
 767
 768int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
 769		u8 v_level, u8 p_level)
 770{
 771	struct dp_catalog_private *catalog = container_of(dp_catalog,
 772				struct dp_catalog_private, dp_catalog);
 773	struct dp_io *dp_io = catalog->io;
 774	struct phy *phy = dp_io->phy;
 775	struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
 776
 777	/* TODO: Update for all lanes instead of just first one */
 778	opts_dp->voltage[0] = v_level;
 779	opts_dp->pre[0] = p_level;
 780	opts_dp->set_voltages = 1;
 781	phy_configure(phy, &dp_io->phy_opts);
 782	opts_dp->set_voltages = 0;
 783
 784	return 0;
 785}
 786
 787void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
 788			u32 pattern)
 789{
 790	struct dp_catalog_private *catalog = container_of(dp_catalog,
 791				struct dp_catalog_private, dp_catalog);
 792	u32 value = 0x0;
 793
 794	/* Make sure to clear the current pattern before starting a new one */
 795	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
 796
 797	drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
 798	switch (pattern) {
 799	case DP_PHY_TEST_PATTERN_D10_2:
 800		dp_write_link(catalog, REG_DP_STATE_CTRL,
 801				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
 802		break;
 803	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
 804		value &= ~(1 << 16);
 805		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 806					value);
 807		value |= SCRAMBLER_RESET_COUNT_VALUE;
 808		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 809					value);
 810		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 811					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 812		dp_write_link(catalog, REG_DP_STATE_CTRL,
 813					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 814		break;
 815	case DP_PHY_TEST_PATTERN_PRBS7:
 816		dp_write_link(catalog, REG_DP_STATE_CTRL,
 817				DP_STATE_CTRL_LINK_PRBS7);
 818		break;
 819	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
 820		dp_write_link(catalog, REG_DP_STATE_CTRL,
 821				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
 822		/* 00111110000011111000001111100000 */
 823		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
 824				0x3E0F83E0);
 825		/* 00001111100000111110000011111000 */
 826		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
 827				0x0F83E0F8);
 828		/* 1111100000111110 */
 829		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
 830				0x0000F83E);
 831		break;
 832	case DP_PHY_TEST_PATTERN_CP2520:
 833		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 834		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
 835		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 836
 837		value = DP_HBR2_ERM_PATTERN;
 838		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 839				value);
 840		value |= SCRAMBLER_RESET_COUNT_VALUE;
 841		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 842					value);
 843		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 844					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 845		dp_write_link(catalog, REG_DP_STATE_CTRL,
 846					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 847		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 848		value |= DP_MAINLINK_CTRL_ENABLE;
 849		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 850		break;
 851	case DP_PHY_TEST_PATTERN_SEL_MASK:
 852		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
 853				DP_MAINLINK_CTRL_ENABLE);
 854		dp_write_link(catalog, REG_DP_STATE_CTRL,
 855				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
 856		break;
 857	default:
 858		drm_dbg_dp(catalog->drm_dev,
 859				"No valid test pattern requested: %#x\n", pattern);
 860		break;
 861	}
 862}
 863
 864u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
 865{
 866	struct dp_catalog_private *catalog = container_of(dp_catalog,
 867				struct dp_catalog_private, dp_catalog);
 868
 869	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
 870}
 871
 872/* panel related catalog functions */
 873int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
 874{
 875	struct dp_catalog_private *catalog = container_of(dp_catalog,
 876				struct dp_catalog_private, dp_catalog);
 877	u32 reg;
 878
 879	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
 880				dp_catalog->total);
 881	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
 882				dp_catalog->sync_start);
 883	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
 884				dp_catalog->width_blanking);
 885	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
 886
 887	reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
 888
 889	if (dp_catalog->wide_bus_en)
 890		reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
 891	else
 892		reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
 893
 894
 895	DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg);
 896
 897	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
 898	return 0;
 899}
 900
 901void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
 902				struct drm_display_mode *drm_mode)
 903{
 904	struct dp_catalog_private *catalog = container_of(dp_catalog,
 905				struct dp_catalog_private, dp_catalog);
 906	u32 hsync_period, vsync_period;
 907	u32 display_v_start, display_v_end;
 908	u32 hsync_start_x, hsync_end_x;
 909	u32 v_sync_width;
 910	u32 hsync_ctl;
 911	u32 display_hctl;
 912
 913	/* TPG config parameters*/
 914	hsync_period = drm_mode->htotal;
 915	vsync_period = drm_mode->vtotal;
 916
 917	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
 918					hsync_period);
 919	display_v_end = ((vsync_period - (drm_mode->vsync_start -
 920					drm_mode->vdisplay))
 921					* hsync_period) - 1;
 922
 923	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
 924	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
 925
 926	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
 927	hsync_end_x = hsync_period - (drm_mode->hsync_start -
 928					drm_mode->hdisplay) - 1;
 929
 930	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
 931
 932	hsync_ctl = (hsync_period << 16) |
 933			(drm_mode->hsync_end - drm_mode->hsync_start);
 934	display_hctl = (hsync_end_x << 16) | hsync_start_x;
 935
 936
 937	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
 938	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
 939	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
 940			hsync_period);
 941	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
 942			hsync_period);
 943	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
 944	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
 945	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
 946	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
 947	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
 948	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
 949	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
 950	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
 951	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
 952	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
 953	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
 954	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
 955	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
 956
 957	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
 958				DP_TPG_CHECKERED_RECT_PATTERN);
 959	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
 960				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
 961				DP_TPG_VIDEO_CONFIG_RGB);
 962	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
 963				DP_BIST_ENABLE_DPBIST_EN);
 964	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
 965				DP_TIMING_ENGINE_EN_EN);
 966	drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
 967}
 968
 969void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
 970{
 971	struct dp_catalog_private *catalog = container_of(dp_catalog,
 972				struct dp_catalog_private, dp_catalog);
 973
 974	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
 975	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
 976	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
 977}
 978
 979struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
 980{
 981	struct dp_catalog_private *catalog;
 982
 983	if (!io) {
 984		DRM_ERROR("invalid input\n");
 985		return ERR_PTR(-EINVAL);
 986	}
 987
 988	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
 989	if (!catalog)
 990		return ERR_PTR(-ENOMEM);
 991
 992	catalog->dev = dev;
 993	catalog->io = io;
 994
 995	return &catalog->dp_catalog;
 996}
 997
 998void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
 999{
1000	struct dp_catalog_private *catalog;
1001	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1002	enum dp_catalog_audio_sdp_type sdp;
1003	enum dp_catalog_audio_header_type header;
1004
1005	if (!dp_catalog)
1006		return;
1007
1008	catalog = container_of(dp_catalog,
1009		struct dp_catalog_private, dp_catalog);
1010
1011	sdp_map = catalog->audio_map;
1012	sdp     = dp_catalog->sdp_type;
1013	header  = dp_catalog->sdp_header;
1014
1015	dp_catalog->audio_data = dp_read_link(catalog,
1016			sdp_map[sdp][header]);
1017}
1018
1019void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
1020{
1021	struct dp_catalog_private *catalog;
1022	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1023	enum dp_catalog_audio_sdp_type sdp;
1024	enum dp_catalog_audio_header_type header;
1025	u32 data;
1026
1027	if (!dp_catalog)
1028		return;
1029
1030	catalog = container_of(dp_catalog,
1031		struct dp_catalog_private, dp_catalog);
1032
1033	sdp_map = catalog->audio_map;
1034	sdp     = dp_catalog->sdp_type;
1035	header  = dp_catalog->sdp_header;
1036	data    = dp_catalog->audio_data;
1037
1038	dp_write_link(catalog, sdp_map[sdp][header], data);
1039}
1040
1041void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
1042{
1043	struct dp_catalog_private *catalog;
1044	u32 acr_ctrl, select;
1045
1046	if (!dp_catalog)
1047		return;
1048
1049	catalog = container_of(dp_catalog,
1050		struct dp_catalog_private, dp_catalog);
1051
1052	select = dp_catalog->audio_data;
1053	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1054
1055	drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1056					select, acr_ctrl);
1057
1058	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1059}
1060
1061void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
1062{
1063	struct dp_catalog_private *catalog;
1064	bool enable;
1065	u32 audio_ctrl;
1066
1067	if (!dp_catalog)
1068		return;
1069
1070	catalog = container_of(dp_catalog,
1071		struct dp_catalog_private, dp_catalog);
1072
1073	enable = !!dp_catalog->audio_data;
1074	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1075
1076	if (enable)
1077		audio_ctrl |= BIT(0);
1078	else
1079		audio_ctrl &= ~BIT(0);
1080
1081	drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1082
1083	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1084	/* make sure audio engine is disabled */
1085	wmb();
1086}
1087
1088void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
1089{
1090	struct dp_catalog_private *catalog;
1091	u32 sdp_cfg = 0;
1092	u32 sdp_cfg2 = 0;
1093
1094	if (!dp_catalog)
1095		return;
1096
1097	catalog = container_of(dp_catalog,
1098		struct dp_catalog_private, dp_catalog);
1099
1100	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
1101	/* AUDIO_TIMESTAMP_SDP_EN */
1102	sdp_cfg |= BIT(1);
1103	/* AUDIO_STREAM_SDP_EN */
1104	sdp_cfg |= BIT(2);
1105	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
1106	sdp_cfg |= BIT(5);
1107	/* AUDIO_ISRC_SDP_EN  */
1108	sdp_cfg |= BIT(6);
1109	/* AUDIO_INFOFRAME_SDP_EN  */
1110	sdp_cfg |= BIT(20);
1111
1112	drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1113
1114	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1115
1116	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1117	/* IFRM_REGSRC -> Do not use reg values */
1118	sdp_cfg2 &= ~BIT(0);
1119	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1120	sdp_cfg2 &= ~BIT(1);
1121
1122	drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1123
1124	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1125}
1126
1127void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1128{
1129	struct dp_catalog_private *catalog;
1130
1131	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1132		{
1133			MMSS_DP_AUDIO_STREAM_0,
1134			MMSS_DP_AUDIO_STREAM_1,
1135			MMSS_DP_AUDIO_STREAM_1,
1136		},
1137		{
1138			MMSS_DP_AUDIO_TIMESTAMP_0,
1139			MMSS_DP_AUDIO_TIMESTAMP_1,
1140			MMSS_DP_AUDIO_TIMESTAMP_1,
1141		},
1142		{
1143			MMSS_DP_AUDIO_INFOFRAME_0,
1144			MMSS_DP_AUDIO_INFOFRAME_1,
1145			MMSS_DP_AUDIO_INFOFRAME_1,
1146		},
1147		{
1148			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1149			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1150			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1151		},
1152		{
1153			MMSS_DP_AUDIO_ISRC_0,
1154			MMSS_DP_AUDIO_ISRC_1,
1155			MMSS_DP_AUDIO_ISRC_1,
1156		},
1157	};
1158
1159	if (!dp_catalog)
1160		return;
1161
1162	catalog = container_of(dp_catalog,
1163		struct dp_catalog_private, dp_catalog);
1164
1165	catalog->audio_map = sdp_map;
1166}
1167
1168void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1169{
1170	struct dp_catalog_private *catalog;
1171	u32 mainlink_levels, safe_to_exit_level;
1172
1173	if (!dp_catalog)
1174		return;
1175
1176	catalog = container_of(dp_catalog,
1177		struct dp_catalog_private, dp_catalog);
1178
1179	safe_to_exit_level = dp_catalog->audio_data;
1180	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1181	mainlink_levels &= 0xFE0;
1182	mainlink_levels |= safe_to_exit_level;
1183
1184	drm_dbg_dp(catalog->drm_dev,
1185			"mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1186			 mainlink_levels, safe_to_exit_level);
1187
1188	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1189}