Linux Audio

Check our new training course

Loading...
Note: File does not exist in v4.6.
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
   4 */
   5
   6#define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
   7
   8#include <linux/delay.h>
   9#include <linux/iopoll.h>
  10#include <linux/platform_device.h>
  11#include <linux/rational.h>
  12#include <drm/display/drm_dp_helper.h>
  13#include <drm/drm_print.h>
  14
  15#include "dp_catalog.h"
  16#include "dp_reg.h"
  17
  18#define POLLING_SLEEP_US			1000
  19#define POLLING_TIMEOUT_US			10000
  20
  21#define SCRAMBLER_RESET_COUNT_VALUE		0xFC
  22
  23#define DP_INTERRUPT_STATUS_ACK_SHIFT	1
  24#define DP_INTERRUPT_STATUS_MASK_SHIFT	2
  25
  26#define DP_INTF_CONFIG_DATABUS_WIDEN     BIT(4)
  27
  28#define DP_INTERRUPT_STATUS1 \
  29	(DP_INTR_AUX_XFER_DONE| \
  30	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
  31	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
  32	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
  33	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
  34
  35#define DP_INTERRUPT_STATUS1_ACK \
  36	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  37#define DP_INTERRUPT_STATUS1_MASK \
  38	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  39
  40#define DP_INTERRUPT_STATUS2 \
  41	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
  42	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
  43
  44#define DP_INTERRUPT_STATUS2_ACK \
  45	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  46#define DP_INTERRUPT_STATUS2_MASK \
  47	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  48
  49#define DP_INTERRUPT_STATUS4 \
  50	(PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
  51	PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
  52
  53#define DP_INTERRUPT_MASK4 \
  54	(PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
  55	PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
  56
  57#define DP_DEFAULT_AHB_OFFSET	0x0000
  58#define DP_DEFAULT_AHB_SIZE	0x0200
  59#define DP_DEFAULT_AUX_OFFSET	0x0200
  60#define DP_DEFAULT_AUX_SIZE	0x0200
  61#define DP_DEFAULT_LINK_OFFSET	0x0400
  62#define DP_DEFAULT_LINK_SIZE	0x0C00
  63#define DP_DEFAULT_P0_OFFSET	0x1000
  64#define DP_DEFAULT_P0_SIZE	0x0400
  65
  66struct dss_io_region {
  67	size_t len;
  68	void __iomem *base;
  69};
  70
  71struct dss_io_data {
  72	struct dss_io_region ahb;
  73	struct dss_io_region aux;
  74	struct dss_io_region link;
  75	struct dss_io_region p0;
  76};
  77
  78struct dp_catalog_private {
  79	struct device *dev;
  80	struct drm_device *drm_dev;
  81	struct dss_io_data io;
  82	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
  83	struct dp_catalog dp_catalog;
  84	u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
  85};
  86
  87void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
  88{
  89	struct dp_catalog_private *catalog = container_of(dp_catalog,
  90			struct dp_catalog_private, dp_catalog);
  91	struct dss_io_data *dss = &catalog->io;
  92
  93	msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
  94	msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
  95	msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
  96	msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
  97}
  98
  99static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
 100{
 101	return readl_relaxed(catalog->io.aux.base + offset);
 102}
 103
 104static inline void dp_write_aux(struct dp_catalog_private *catalog,
 105			       u32 offset, u32 data)
 106{
 107	/*
 108	 * To make sure aux reg writes happens before any other operation,
 109	 * this function uses writel() instread of writel_relaxed()
 110	 */
 111	writel(data, catalog->io.aux.base + offset);
 112}
 113
 114static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset)
 115{
 116	return readl_relaxed(catalog->io.ahb.base + offset);
 117}
 118
 119static inline void dp_write_ahb(struct dp_catalog_private *catalog,
 120			       u32 offset, u32 data)
 121{
 122	/*
 123	 * To make sure phy reg writes happens before any other operation,
 124	 * this function uses writel() instread of writel_relaxed()
 125	 */
 126	writel(data, catalog->io.ahb.base + offset);
 127}
 128
 129static inline void dp_write_p0(struct dp_catalog_private *catalog,
 130			       u32 offset, u32 data)
 131{
 132	/*
 133	 * To make sure interface reg writes happens before any other operation,
 134	 * this function uses writel() instread of writel_relaxed()
 135	 */
 136	writel(data, catalog->io.p0.base + offset);
 137}
 138
 139static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
 140			       u32 offset)
 141{
 142	/*
 143	 * To make sure interface reg writes happens before any other operation,
 144	 * this function uses writel() instread of writel_relaxed()
 145	 */
 146	return readl_relaxed(catalog->io.p0.base + offset);
 147}
 148
 149static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
 150{
 151	return readl_relaxed(catalog->io.link.base + offset);
 152}
 153
 154static inline void dp_write_link(struct dp_catalog_private *catalog,
 155			       u32 offset, u32 data)
 156{
 157	/*
 158	 * To make sure link reg writes happens before any other operation,
 159	 * this function uses writel() instread of writel_relaxed()
 160	 */
 161	writel(data, catalog->io.link.base + offset);
 162}
 163
 164/* aux related catalog functions */
 165u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
 166{
 167	struct dp_catalog_private *catalog = container_of(dp_catalog,
 168				struct dp_catalog_private, dp_catalog);
 169
 170	return dp_read_aux(catalog, REG_DP_AUX_DATA);
 171}
 172
 173int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
 174{
 175	struct dp_catalog_private *catalog = container_of(dp_catalog,
 176				struct dp_catalog_private, dp_catalog);
 177
 178	dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
 179	return 0;
 180}
 181
 182int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
 183{
 184	struct dp_catalog_private *catalog = container_of(dp_catalog,
 185				struct dp_catalog_private, dp_catalog);
 186
 187	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
 188	return 0;
 189}
 190
 191int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
 192{
 193	u32 data;
 194	struct dp_catalog_private *catalog = container_of(dp_catalog,
 195				struct dp_catalog_private, dp_catalog);
 196
 197	if (read) {
 198		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
 199		data &= ~DP_AUX_TRANS_CTRL_GO;
 200		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
 201	} else {
 202		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
 203	}
 204	return 0;
 205}
 206
 207int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
 208{
 209	struct dp_catalog_private *catalog = container_of(dp_catalog,
 210				struct dp_catalog_private, dp_catalog);
 211
 212	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
 213	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
 214	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
 215	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
 216	return 0;
 217}
 218
 219/**
 220 * dp_catalog_aux_reset() - reset AUX controller
 221 *
 222 * @dp_catalog: DP catalog structure
 223 *
 224 * return: void
 225 *
 226 * This function reset AUX controller
 227 *
 228 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
 229 * 
 230 */
 231void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
 232{
 233	u32 aux_ctrl;
 234	struct dp_catalog_private *catalog = container_of(dp_catalog,
 235				struct dp_catalog_private, dp_catalog);
 236
 237	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
 238
 239	aux_ctrl |= DP_AUX_CTRL_RESET;
 240	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 241	usleep_range(1000, 1100); /* h/w recommended delay */
 242
 243	aux_ctrl &= ~DP_AUX_CTRL_RESET;
 244	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 245}
 246
 247void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
 248{
 249	u32 aux_ctrl;
 250	struct dp_catalog_private *catalog = container_of(dp_catalog,
 251				struct dp_catalog_private, dp_catalog);
 252
 253	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
 254
 255	if (enable) {
 256		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
 257		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
 258		aux_ctrl |= DP_AUX_CTRL_ENABLE;
 259	} else {
 260		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
 261	}
 262
 263	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 264}
 265
 266int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog,
 267					      unsigned long wait_us)
 268{
 269	u32 state;
 270	struct dp_catalog_private *catalog = container_of(dp_catalog,
 271				struct dp_catalog_private, dp_catalog);
 272
 273	/* poll for hpd connected status every 2ms and timeout after wait_us */
 274	return readl_poll_timeout(catalog->io.aux.base +
 275				REG_DP_DP_HPD_INT_STATUS,
 276				state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
 277				min(wait_us, 2000), wait_us);
 278}
 279
 280static void dump_regs(void __iomem *base, int len)
 281{
 282	int i;
 283	u32 x0, x4, x8, xc;
 284	u32 addr_off = 0;
 285
 286	len = DIV_ROUND_UP(len, 16);
 287	for (i = 0; i < len; i++) {
 288		x0 = readl_relaxed(base + addr_off);
 289		x4 = readl_relaxed(base + addr_off + 0x04);
 290		x8 = readl_relaxed(base + addr_off + 0x08);
 291		xc = readl_relaxed(base + addr_off + 0x0c);
 292
 293		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
 294		addr_off += 16;
 295	}
 296}
 297
 298void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
 299{
 300	struct dp_catalog_private *catalog = container_of(dp_catalog,
 301		struct dp_catalog_private, dp_catalog);
 302	struct dss_io_data *io = &catalog->io;
 303
 304	pr_info("AHB regs\n");
 305	dump_regs(io->ahb.base, io->ahb.len);
 306
 307	pr_info("AUXCLK regs\n");
 308	dump_regs(io->aux.base, io->aux.len);
 309
 310	pr_info("LCLK regs\n");
 311	dump_regs(io->link.base, io->link.len);
 312
 313	pr_info("P0CLK regs\n");
 314	dump_regs(io->p0.base, io->p0.len);
 315}
 316
 317u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
 318{
 319	struct dp_catalog_private *catalog = container_of(dp_catalog,
 320				struct dp_catalog_private, dp_catalog);
 321	u32 intr, intr_ack;
 322
 323	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
 324	intr &= ~DP_INTERRUPT_STATUS1_MASK;
 325	intr_ack = (intr & DP_INTERRUPT_STATUS1)
 326			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 327	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
 328			DP_INTERRUPT_STATUS1_MASK);
 329
 330	return intr;
 331
 332}
 333
 334/* controller related catalog functions */
 335void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
 336				u32 dp_tu, u32 valid_boundary,
 337				u32 valid_boundary2)
 338{
 339	struct dp_catalog_private *catalog = container_of(dp_catalog,
 340				struct dp_catalog_private, dp_catalog);
 341
 342	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
 343	dp_write_link(catalog, REG_DP_TU, dp_tu);
 344	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
 345}
 346
 347void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
 348{
 349	struct dp_catalog_private *catalog = container_of(dp_catalog,
 350				struct dp_catalog_private, dp_catalog);
 351
 352	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
 353}
 354
 355void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
 356{
 357	struct dp_catalog_private *catalog = container_of(dp_catalog,
 358				struct dp_catalog_private, dp_catalog);
 359
 360	drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
 361
 362	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
 363}
 364
 365void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
 366{
 367	struct dp_catalog_private *catalog = container_of(dp_catalog,
 368				struct dp_catalog_private, dp_catalog);
 369	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
 370	u32 ln_mapping;
 371
 372	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
 373	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
 374	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
 375	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
 376
 377	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
 378			ln_mapping);
 379}
 380
 381void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog,
 382						bool enable)
 383{
 384	u32 val;
 385	struct dp_catalog_private *catalog = container_of(dp_catalog,
 386				struct dp_catalog_private, dp_catalog);
 387
 388	val = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 389
 390	if (enable)
 391		val |= DP_MAINLINK_CTRL_ENABLE;
 392	else
 393		val &= ~DP_MAINLINK_CTRL_ENABLE;
 394
 395	dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
 396}
 397
 398void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
 399						bool enable)
 400{
 401	u32 mainlink_ctrl;
 402	struct dp_catalog_private *catalog = container_of(dp_catalog,
 403				struct dp_catalog_private, dp_catalog);
 404
 405	drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
 406	if (enable) {
 407		/*
 408		 * To make sure link reg writes happens before other operation,
 409		 * dp_write_link() function uses writel()
 410		 */
 411		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 412
 413		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
 414						DP_MAINLINK_CTRL_ENABLE);
 415		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 416
 417		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
 418		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 419
 420		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
 421		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 422
 423		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
 424					DP_MAINLINK_FB_BOUNDARY_SEL);
 425		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 426	} else {
 427		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 428		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
 429		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 430	}
 431}
 432
 433void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
 434					u32 colorimetry_cfg,
 435					u32 test_bits_depth)
 436{
 437	u32 misc_val;
 438	struct dp_catalog_private *catalog = container_of(dp_catalog,
 439				struct dp_catalog_private, dp_catalog);
 440
 441	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
 442
 443	/* clear bpp bits */
 444	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
 445	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
 446	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
 447	/* Configure clock to synchronous mode */
 448	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
 449
 450	drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
 451	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
 452}
 453
 454void dp_catalog_setup_peripheral_flush(struct dp_catalog *dp_catalog)
 455{
 456	u32 mainlink_ctrl, hw_revision;
 457	struct dp_catalog_private *catalog = container_of(dp_catalog,
 458				struct dp_catalog_private, dp_catalog);
 459
 460	mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 461
 462	hw_revision = dp_catalog_hw_revision(dp_catalog);
 463	if (hw_revision >= DP_HW_VERSION_1_2)
 464		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_SDE_PERIPH_UPDATE;
 465	else
 466		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_UPDATE_SDP;
 467
 468	dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 469}
 470
 471void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
 472					u32 rate, u32 stream_rate_khz,
 473					bool fixed_nvid, bool is_ycbcr_420)
 474{
 475	u32 pixel_m, pixel_n;
 476	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
 477	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
 478	u32 const link_rate_hbr2 = 540000;
 479	u32 const link_rate_hbr3 = 810000;
 480	unsigned long den, num;
 481
 482	struct dp_catalog_private *catalog = container_of(dp_catalog,
 483				struct dp_catalog_private, dp_catalog);
 484
 485	if (rate == link_rate_hbr3)
 486		pixel_div = 6;
 487	else if (rate == 162000 || rate == 270000)
 488		pixel_div = 2;
 489	else if (rate == link_rate_hbr2)
 490		pixel_div = 4;
 491	else
 492		DRM_ERROR("Invalid pixel mux divider\n");
 493
 494	dispcc_input_rate = (rate * 10) / pixel_div;
 495
 496	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
 497			(unsigned long)(1 << 16) - 1,
 498			(unsigned long)(1 << 16) - 1, &den, &num);
 499
 500	den = ~(den - num);
 501	den = den & 0xFFFF;
 502	pixel_m = num;
 503	pixel_n = den;
 504
 505	mvid = (pixel_m & 0xFFFF) * 5;
 506	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
 507
 508	if (nvid < nvid_fixed) {
 509		u32 temp;
 510
 511		temp = (nvid_fixed / nvid) * nvid;
 512		mvid = (nvid_fixed / nvid) * mvid;
 513		nvid = temp;
 514	}
 515
 516	if (is_ycbcr_420)
 517		mvid /= 2;
 518
 519	if (link_rate_hbr2 == rate)
 520		nvid *= 2;
 521
 522	if (link_rate_hbr3 == rate)
 523		nvid *= 3;
 524
 525	drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
 526	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
 527	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
 528	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
 529}
 530
 531int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog,
 532					u32 state_bit)
 533{
 534	int bit, ret;
 535	u32 data;
 536	struct dp_catalog_private *catalog = container_of(dp_catalog,
 537				struct dp_catalog_private, dp_catalog);
 538
 539	bit = BIT(state_bit - 1);
 540	drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
 541	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
 542
 543	bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
 544
 545	/* Poll for mainlink ready status */
 546	ret = readx_poll_timeout(readl, catalog->io.link.base +
 547					REG_DP_MAINLINK_READY,
 548					data, data & bit,
 549					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 550	if (ret < 0) {
 551		DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
 552		return ret;
 553	}
 554	return 0;
 555}
 556
 557/**
 558 * dp_catalog_hw_revision() - retrieve DP hw revision
 559 *
 560 * @dp_catalog: DP catalog structure
 561 *
 562 * Return: DP controller hw revision
 563 *
 564 */
 565u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog)
 566{
 567	const struct dp_catalog_private *catalog = container_of(dp_catalog,
 568				struct dp_catalog_private, dp_catalog);
 569
 570	return dp_read_ahb(catalog, REG_DP_HW_VERSION);
 571}
 572
 573/**
 574 * dp_catalog_ctrl_reset() - reset DP controller
 575 *
 576 * @dp_catalog: DP catalog structure
 577 *
 578 * return: void
 579 *
 580 * This function reset the DP controller
 581 *
 582 * NOTE: reset DP controller will also clear any pending HPD related interrupts
 583 * 
 584 */
 585void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
 586{
 587	u32 sw_reset;
 588	struct dp_catalog_private *catalog = container_of(dp_catalog,
 589				struct dp_catalog_private, dp_catalog);
 590
 591	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
 592
 593	sw_reset |= DP_SW_RESET;
 594	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 595	usleep_range(1000, 1100); /* h/w recommended delay */
 596
 597	sw_reset &= ~DP_SW_RESET;
 598	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 599}
 600
 601bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
 602{
 603	u32 data;
 604	int ret;
 605	struct dp_catalog_private *catalog = container_of(dp_catalog,
 606				struct dp_catalog_private, dp_catalog);
 607
 608	/* Poll for mainlink ready status */
 609	ret = readl_poll_timeout(catalog->io.link.base +
 610				REG_DP_MAINLINK_READY,
 611				data, data & DP_MAINLINK_READY_FOR_VIDEO,
 612				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 613	if (ret < 0) {
 614		DRM_ERROR("mainlink not ready\n");
 615		return false;
 616	}
 617
 618	return true;
 619}
 620
 621void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
 622						bool enable)
 623{
 624	struct dp_catalog_private *catalog = container_of(dp_catalog,
 625				struct dp_catalog_private, dp_catalog);
 626
 627	if (enable) {
 628		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
 629				DP_INTERRUPT_STATUS1_MASK);
 630		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 631				DP_INTERRUPT_STATUS2_MASK);
 632	} else {
 633		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
 634		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
 635	}
 636}
 637
 638void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
 639			u32 intr_mask, bool en)
 640{
 641	struct dp_catalog_private *catalog = container_of(dp_catalog,
 642				struct dp_catalog_private, dp_catalog);
 643
 644	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
 645
 646	config = (en ? config | intr_mask : config & ~intr_mask);
 647
 648	drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
 649					intr_mask, config);
 650	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
 651				config & DP_DP_HPD_INT_MASK);
 652}
 653
 654void dp_catalog_ctrl_hpd_enable(struct dp_catalog *dp_catalog)
 655{
 656	struct dp_catalog_private *catalog = container_of(dp_catalog,
 657				struct dp_catalog_private, dp_catalog);
 658
 659	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
 660
 661	/* Configure REFTIMER and enable it */
 662	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
 663	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
 664
 665	/* Enable HPD */
 666	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
 667}
 668
 669void dp_catalog_ctrl_hpd_disable(struct dp_catalog *dp_catalog)
 670{
 671	struct dp_catalog_private *catalog = container_of(dp_catalog,
 672				struct dp_catalog_private, dp_catalog);
 673
 674	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
 675
 676	reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
 677	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
 678
 679	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0);
 680}
 681
 682static void dp_catalog_enable_sdp(struct dp_catalog_private *catalog)
 683{
 684	/* trigger sdp */
 685	dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
 686	dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
 687}
 688
 689void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog)
 690{
 691	struct dp_catalog_private *catalog = container_of(dp_catalog,
 692				struct dp_catalog_private, dp_catalog);
 693	u32 config;
 694
 695	/* enable PSR1 function */
 696	config = dp_read_link(catalog, REG_PSR_CONFIG);
 697	config |= PSR1_SUPPORTED;
 698	dp_write_link(catalog, REG_PSR_CONFIG, config);
 699
 700	dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
 701	dp_catalog_enable_sdp(catalog);
 702}
 703
 704void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter)
 705{
 706	struct dp_catalog_private *catalog = container_of(dp_catalog,
 707			struct dp_catalog_private, dp_catalog);
 708	u32 cmd;
 709
 710	cmd = dp_read_link(catalog, REG_PSR_CMD);
 711
 712	cmd &= ~(PSR_ENTER | PSR_EXIT);
 713
 714	if (enter)
 715		cmd |= PSR_ENTER;
 716	else
 717		cmd |= PSR_EXIT;
 718
 719	dp_catalog_enable_sdp(catalog);
 720	dp_write_link(catalog, REG_PSR_CMD, cmd);
 721}
 722
 723u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
 724{
 725	struct dp_catalog_private *catalog = container_of(dp_catalog,
 726				struct dp_catalog_private, dp_catalog);
 727	u32 status;
 728
 729	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 730	drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
 731	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
 732	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
 733
 734	return status;
 735}
 736
 737u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
 738{
 739	struct dp_catalog_private *catalog = container_of(dp_catalog,
 740				struct dp_catalog_private, dp_catalog);
 741	int isr, mask;
 742
 743	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 744	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
 745				 (isr & DP_DP_HPD_INT_MASK));
 746	mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
 747
 748	/*
 749	 * We only want to return interrupts that are unmasked to the caller.
 750	 * However, the interrupt status field also contains other
 751	 * informational bits about the HPD state status, so we only mask
 752	 * out the part of the register that tells us about which interrupts
 753	 * are pending.
 754	 */
 755	return isr & (mask | ~DP_DP_HPD_INT_MASK);
 756}
 757
 758u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog)
 759{
 760	struct dp_catalog_private *catalog = container_of(dp_catalog,
 761				struct dp_catalog_private, dp_catalog);
 762	u32 intr, intr_ack;
 763
 764	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
 765	intr_ack = (intr & DP_INTERRUPT_STATUS4)
 766			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 767	dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
 768
 769	return intr;
 770}
 771
 772int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
 773{
 774	struct dp_catalog_private *catalog = container_of(dp_catalog,
 775				struct dp_catalog_private, dp_catalog);
 776	u32 intr, intr_ack;
 777
 778	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
 779	intr &= ~DP_INTERRUPT_STATUS2_MASK;
 780	intr_ack = (intr & DP_INTERRUPT_STATUS2)
 781			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 782	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 783			intr_ack | DP_INTERRUPT_STATUS2_MASK);
 784
 785	return intr;
 786}
 787
 788void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
 789{
 790	struct dp_catalog_private *catalog = container_of(dp_catalog,
 791				struct dp_catalog_private, dp_catalog);
 792
 793	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
 794			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
 795	usleep_range(1000, 1100); /* h/w recommended delay */
 796	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
 797}
 798
 799void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
 800			u32 pattern)
 801{
 802	struct dp_catalog_private *catalog = container_of(dp_catalog,
 803				struct dp_catalog_private, dp_catalog);
 804	u32 value = 0x0;
 805
 806	/* Make sure to clear the current pattern before starting a new one */
 807	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
 808
 809	drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
 810	switch (pattern) {
 811	case DP_PHY_TEST_PATTERN_D10_2:
 812		dp_write_link(catalog, REG_DP_STATE_CTRL,
 813				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
 814		break;
 815	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
 816		value &= ~(1 << 16);
 817		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 818					value);
 819		value |= SCRAMBLER_RESET_COUNT_VALUE;
 820		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 821					value);
 822		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 823					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 824		dp_write_link(catalog, REG_DP_STATE_CTRL,
 825					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 826		break;
 827	case DP_PHY_TEST_PATTERN_PRBS7:
 828		dp_write_link(catalog, REG_DP_STATE_CTRL,
 829				DP_STATE_CTRL_LINK_PRBS7);
 830		break;
 831	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
 832		dp_write_link(catalog, REG_DP_STATE_CTRL,
 833				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
 834		/* 00111110000011111000001111100000 */
 835		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
 836				0x3E0F83E0);
 837		/* 00001111100000111110000011111000 */
 838		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
 839				0x0F83E0F8);
 840		/* 1111100000111110 */
 841		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
 842				0x0000F83E);
 843		break;
 844	case DP_PHY_TEST_PATTERN_CP2520:
 845		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 846		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
 847		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 848
 849		value = DP_HBR2_ERM_PATTERN;
 850		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 851				value);
 852		value |= SCRAMBLER_RESET_COUNT_VALUE;
 853		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 854					value);
 855		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 856					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 857		dp_write_link(catalog, REG_DP_STATE_CTRL,
 858					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 859		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 860		value |= DP_MAINLINK_CTRL_ENABLE;
 861		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 862		break;
 863	case DP_PHY_TEST_PATTERN_SEL_MASK:
 864		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
 865				DP_MAINLINK_CTRL_ENABLE);
 866		dp_write_link(catalog, REG_DP_STATE_CTRL,
 867				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
 868		break;
 869	default:
 870		drm_dbg_dp(catalog->drm_dev,
 871				"No valid test pattern requested: %#x\n", pattern);
 872		break;
 873	}
 874}
 875
 876u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
 877{
 878	struct dp_catalog_private *catalog = container_of(dp_catalog,
 879				struct dp_catalog_private, dp_catalog);
 880
 881	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
 882}
 883
 884/* panel related catalog functions */
 885int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
 886{
 887	struct dp_catalog_private *catalog = container_of(dp_catalog,
 888				struct dp_catalog_private, dp_catalog);
 889	u32 reg;
 890
 891	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
 892				dp_catalog->total);
 893	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
 894				dp_catalog->sync_start);
 895	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
 896				dp_catalog->width_blanking);
 897	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
 898
 899	reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
 900
 901	if (dp_catalog->wide_bus_en)
 902		reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
 903	else
 904		reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
 905
 906
 907	DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg);
 908
 909	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
 910	return 0;
 911}
 912
 913static void dp_catalog_panel_send_vsc_sdp(struct dp_catalog *dp_catalog, struct dp_sdp *vsc_sdp)
 914{
 915	struct dp_catalog_private *catalog;
 916	u32 header[2];
 917	u32 val;
 918	int i;
 919
 920	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
 921
 922	dp_utils_pack_sdp_header(&vsc_sdp->sdp_header, header);
 923
 924	dp_write_link(catalog, MMSS_DP_GENERIC0_0, header[0]);
 925	dp_write_link(catalog, MMSS_DP_GENERIC0_1, header[1]);
 926
 927	for (i = 0; i < sizeof(vsc_sdp->db); i += 4) {
 928		val = ((vsc_sdp->db[i]) | (vsc_sdp->db[i + 1] << 8) | (vsc_sdp->db[i + 2] << 16) |
 929		       (vsc_sdp->db[i + 3] << 24));
 930		dp_write_link(catalog, MMSS_DP_GENERIC0_2 + i, val);
 931	}
 932}
 933
 934static void dp_catalog_panel_update_sdp(struct dp_catalog *dp_catalog)
 935{
 936	struct dp_catalog_private *catalog;
 937	u32 hw_revision;
 938
 939	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
 940
 941	hw_revision = dp_catalog_hw_revision(dp_catalog);
 942	if (hw_revision < DP_HW_VERSION_1_2 && hw_revision >= DP_HW_VERSION_1_0) {
 943		dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x01);
 944		dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x00);
 945	}
 946}
 947
 948void dp_catalog_panel_enable_vsc_sdp(struct dp_catalog *dp_catalog, struct dp_sdp *vsc_sdp)
 949{
 950	struct dp_catalog_private *catalog;
 951	u32 cfg, cfg2, misc;
 952
 953	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
 954
 955	cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
 956	cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
 957	misc = dp_read_link(catalog, REG_DP_MISC1_MISC0);
 958
 959	cfg |= GEN0_SDP_EN;
 960	dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
 961
 962	cfg2 |= GENERIC0_SDPSIZE_VALID;
 963	dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
 964
 965	dp_catalog_panel_send_vsc_sdp(dp_catalog, vsc_sdp);
 966
 967	/* indicates presence of VSC (BIT(6) of MISC1) */
 968	misc |= DP_MISC1_VSC_SDP;
 969
 970	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=1\n");
 971
 972	pr_debug("misc settings = 0x%x\n", misc);
 973	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
 974
 975	dp_catalog_panel_update_sdp(dp_catalog);
 976}
 977
 978void dp_catalog_panel_disable_vsc_sdp(struct dp_catalog *dp_catalog)
 979{
 980	struct dp_catalog_private *catalog;
 981	u32 cfg, cfg2, misc;
 982
 983	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
 984
 985	cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
 986	cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
 987	misc = dp_read_link(catalog, REG_DP_MISC1_MISC0);
 988
 989	cfg &= ~GEN0_SDP_EN;
 990	dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
 991
 992	cfg2 &= ~GENERIC0_SDPSIZE_VALID;
 993	dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
 994
 995	/* switch back to MSA */
 996	misc &= ~DP_MISC1_VSC_SDP;
 997
 998	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=0\n");
 999
1000	pr_debug("misc settings = 0x%x\n", misc);
1001	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
1002
1003	dp_catalog_panel_update_sdp(dp_catalog);
1004}
1005
1006void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
1007				struct drm_display_mode *drm_mode)
1008{
1009	struct dp_catalog_private *catalog = container_of(dp_catalog,
1010				struct dp_catalog_private, dp_catalog);
1011	u32 hsync_period, vsync_period;
1012	u32 display_v_start, display_v_end;
1013	u32 hsync_start_x, hsync_end_x;
1014	u32 v_sync_width;
1015	u32 hsync_ctl;
1016	u32 display_hctl;
1017
1018	/* TPG config parameters*/
1019	hsync_period = drm_mode->htotal;
1020	vsync_period = drm_mode->vtotal;
1021
1022	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
1023					hsync_period);
1024	display_v_end = ((vsync_period - (drm_mode->vsync_start -
1025					drm_mode->vdisplay))
1026					* hsync_period) - 1;
1027
1028	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
1029	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
1030
1031	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
1032	hsync_end_x = hsync_period - (drm_mode->hsync_start -
1033					drm_mode->hdisplay) - 1;
1034
1035	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
1036
1037	hsync_ctl = (hsync_period << 16) |
1038			(drm_mode->hsync_end - drm_mode->hsync_start);
1039	display_hctl = (hsync_end_x << 16) | hsync_start_x;
1040
1041
1042	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
1043	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
1044	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
1045			hsync_period);
1046	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
1047			hsync_period);
1048	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
1049	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
1050	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
1051	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
1052	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
1053	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
1054	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
1055	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
1056	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
1057	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
1058	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
1059	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
1060	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
1061
1062	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
1063				DP_TPG_CHECKERED_RECT_PATTERN);
1064	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
1065				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
1066				DP_TPG_VIDEO_CONFIG_RGB);
1067	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
1068				DP_BIST_ENABLE_DPBIST_EN);
1069	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
1070				DP_TIMING_ENGINE_EN_EN);
1071	drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
1072}
1073
1074void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
1075{
1076	struct dp_catalog_private *catalog = container_of(dp_catalog,
1077				struct dp_catalog_private, dp_catalog);
1078
1079	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
1080	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
1081	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
1082}
1083
1084static void __iomem *dp_ioremap(struct platform_device *pdev, int idx, size_t *len)
1085{
1086	struct resource *res;
1087	void __iomem *base;
1088
1089	base = devm_platform_get_and_ioremap_resource(pdev, idx, &res);
1090	if (!IS_ERR(base))
1091		*len = resource_size(res);
1092
1093	return base;
1094}
1095
1096static int dp_catalog_get_io(struct dp_catalog_private *catalog)
1097{
1098	struct platform_device *pdev = to_platform_device(catalog->dev);
1099	struct dss_io_data *dss = &catalog->io;
1100
1101	dss->ahb.base = dp_ioremap(pdev, 0, &dss->ahb.len);
1102	if (IS_ERR(dss->ahb.base))
1103		return PTR_ERR(dss->ahb.base);
1104
1105	dss->aux.base = dp_ioremap(pdev, 1, &dss->aux.len);
1106	if (IS_ERR(dss->aux.base)) {
1107		/*
1108		 * The initial binding had a single reg, but in order to
1109		 * support variation in the sub-region sizes this was split.
1110		 * dp_ioremap() will fail with -EINVAL here if only a single
1111		 * reg is specified, so fill in the sub-region offsets and
1112		 * lengths based on this single region.
1113		 */
1114		if (PTR_ERR(dss->aux.base) == -EINVAL) {
1115			if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) {
1116				DRM_ERROR("legacy memory region not large enough\n");
1117				return -EINVAL;
1118			}
1119
1120			dss->ahb.len = DP_DEFAULT_AHB_SIZE;
1121			dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET;
1122			dss->aux.len = DP_DEFAULT_AUX_SIZE;
1123			dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET;
1124			dss->link.len = DP_DEFAULT_LINK_SIZE;
1125			dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET;
1126			dss->p0.len = DP_DEFAULT_P0_SIZE;
1127		} else {
1128			DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base);
1129			return PTR_ERR(dss->aux.base);
1130		}
1131	} else {
1132		dss->link.base = dp_ioremap(pdev, 2, &dss->link.len);
1133		if (IS_ERR(dss->link.base)) {
1134			DRM_ERROR("unable to remap link region: %pe\n", dss->link.base);
1135			return PTR_ERR(dss->link.base);
1136		}
1137
1138		dss->p0.base = dp_ioremap(pdev, 3, &dss->p0.len);
1139		if (IS_ERR(dss->p0.base)) {
1140			DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base);
1141			return PTR_ERR(dss->p0.base);
1142		}
1143	}
1144
1145	return 0;
1146}
1147
1148struct dp_catalog *dp_catalog_get(struct device *dev)
1149{
1150	struct dp_catalog_private *catalog;
1151	int ret;
1152
1153	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
1154	if (!catalog)
1155		return ERR_PTR(-ENOMEM);
1156
1157	catalog->dev = dev;
1158
1159	ret = dp_catalog_get_io(catalog);
1160	if (ret)
1161		return ERR_PTR(ret);
1162
1163	return &catalog->dp_catalog;
1164}
1165
1166void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
1167{
1168	struct dp_catalog_private *catalog;
1169	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1170	enum dp_catalog_audio_sdp_type sdp;
1171	enum dp_catalog_audio_header_type header;
1172
1173	if (!dp_catalog)
1174		return;
1175
1176	catalog = container_of(dp_catalog,
1177		struct dp_catalog_private, dp_catalog);
1178
1179	sdp_map = catalog->audio_map;
1180	sdp     = dp_catalog->sdp_type;
1181	header  = dp_catalog->sdp_header;
1182
1183	dp_catalog->audio_data = dp_read_link(catalog,
1184			sdp_map[sdp][header]);
1185}
1186
1187void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
1188{
1189	struct dp_catalog_private *catalog;
1190	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1191	enum dp_catalog_audio_sdp_type sdp;
1192	enum dp_catalog_audio_header_type header;
1193	u32 data;
1194
1195	if (!dp_catalog)
1196		return;
1197
1198	catalog = container_of(dp_catalog,
1199		struct dp_catalog_private, dp_catalog);
1200
1201	sdp_map = catalog->audio_map;
1202	sdp     = dp_catalog->sdp_type;
1203	header  = dp_catalog->sdp_header;
1204	data    = dp_catalog->audio_data;
1205
1206	dp_write_link(catalog, sdp_map[sdp][header], data);
1207}
1208
1209void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
1210{
1211	struct dp_catalog_private *catalog;
1212	u32 acr_ctrl, select;
1213
1214	if (!dp_catalog)
1215		return;
1216
1217	catalog = container_of(dp_catalog,
1218		struct dp_catalog_private, dp_catalog);
1219
1220	select = dp_catalog->audio_data;
1221	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1222
1223	drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1224					select, acr_ctrl);
1225
1226	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1227}
1228
1229void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
1230{
1231	struct dp_catalog_private *catalog;
1232	bool enable;
1233	u32 audio_ctrl;
1234
1235	if (!dp_catalog)
1236		return;
1237
1238	catalog = container_of(dp_catalog,
1239		struct dp_catalog_private, dp_catalog);
1240
1241	enable = !!dp_catalog->audio_data;
1242	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1243
1244	if (enable)
1245		audio_ctrl |= BIT(0);
1246	else
1247		audio_ctrl &= ~BIT(0);
1248
1249	drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1250
1251	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1252	/* make sure audio engine is disabled */
1253	wmb();
1254}
1255
1256void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
1257{
1258	struct dp_catalog_private *catalog;
1259	u32 sdp_cfg = 0;
1260	u32 sdp_cfg2 = 0;
1261
1262	if (!dp_catalog)
1263		return;
1264
1265	catalog = container_of(dp_catalog,
1266		struct dp_catalog_private, dp_catalog);
1267
1268	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
1269	/* AUDIO_TIMESTAMP_SDP_EN */
1270	sdp_cfg |= BIT(1);
1271	/* AUDIO_STREAM_SDP_EN */
1272	sdp_cfg |= BIT(2);
1273	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
1274	sdp_cfg |= BIT(5);
1275	/* AUDIO_ISRC_SDP_EN  */
1276	sdp_cfg |= BIT(6);
1277	/* AUDIO_INFOFRAME_SDP_EN  */
1278	sdp_cfg |= BIT(20);
1279
1280	drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1281
1282	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1283
1284	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1285	/* IFRM_REGSRC -> Do not use reg values */
1286	sdp_cfg2 &= ~BIT(0);
1287	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1288	sdp_cfg2 &= ~BIT(1);
1289
1290	drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1291
1292	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1293}
1294
1295void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1296{
1297	struct dp_catalog_private *catalog;
1298
1299	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1300		{
1301			MMSS_DP_AUDIO_STREAM_0,
1302			MMSS_DP_AUDIO_STREAM_1,
1303			MMSS_DP_AUDIO_STREAM_1,
1304		},
1305		{
1306			MMSS_DP_AUDIO_TIMESTAMP_0,
1307			MMSS_DP_AUDIO_TIMESTAMP_1,
1308			MMSS_DP_AUDIO_TIMESTAMP_1,
1309		},
1310		{
1311			MMSS_DP_AUDIO_INFOFRAME_0,
1312			MMSS_DP_AUDIO_INFOFRAME_1,
1313			MMSS_DP_AUDIO_INFOFRAME_1,
1314		},
1315		{
1316			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1317			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1318			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1319		},
1320		{
1321			MMSS_DP_AUDIO_ISRC_0,
1322			MMSS_DP_AUDIO_ISRC_1,
1323			MMSS_DP_AUDIO_ISRC_1,
1324		},
1325	};
1326
1327	if (!dp_catalog)
1328		return;
1329
1330	catalog = container_of(dp_catalog,
1331		struct dp_catalog_private, dp_catalog);
1332
1333	catalog->audio_map = sdp_map;
1334}
1335
1336void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1337{
1338	struct dp_catalog_private *catalog;
1339	u32 mainlink_levels, safe_to_exit_level;
1340
1341	if (!dp_catalog)
1342		return;
1343
1344	catalog = container_of(dp_catalog,
1345		struct dp_catalog_private, dp_catalog);
1346
1347	safe_to_exit_level = dp_catalog->audio_data;
1348	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1349	mainlink_levels &= 0xFE0;
1350	mainlink_levels |= safe_to_exit_level;
1351
1352	drm_dbg_dp(catalog->drm_dev,
1353			"mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1354			 mainlink_levels, safe_to_exit_level);
1355
1356	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1357}