Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.1.
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
   4 */
   5
   6#define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
   7
   8#include <linux/delay.h>
   9#include <linux/iopoll.h>
  10#include <linux/platform_device.h>
  11#include <linux/rational.h>
  12#include <drm/display/drm_dp_helper.h>
  13#include <drm/drm_print.h>
  14
  15#include "dp_catalog.h"
  16#include "dp_reg.h"
  17
  18#define POLLING_SLEEP_US			1000
  19#define POLLING_TIMEOUT_US			10000
  20
  21#define SCRAMBLER_RESET_COUNT_VALUE		0xFC
  22
  23#define DP_INTERRUPT_STATUS_ACK_SHIFT	1
  24#define DP_INTERRUPT_STATUS_MASK_SHIFT	2
  25
  26#define DP_INTF_CONFIG_DATABUS_WIDEN     BIT(4)
  27
  28#define DP_INTERRUPT_STATUS1 \
  29	(DP_INTR_AUX_XFER_DONE| \
  30	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
  31	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
  32	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
  33	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
  34
  35#define DP_INTERRUPT_STATUS1_ACK \
  36	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  37#define DP_INTERRUPT_STATUS1_MASK \
  38	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  39
  40#define DP_INTERRUPT_STATUS2 \
  41	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
  42	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
  43
  44#define DP_INTERRUPT_STATUS2_ACK \
  45	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
  46#define DP_INTERRUPT_STATUS2_MASK \
  47	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
  48
  49#define DP_INTERRUPT_STATUS4 \
  50	(PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
  51	PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
  52
  53#define DP_INTERRUPT_MASK4 \
  54	(PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
  55	PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
  56
  57#define DP_DEFAULT_AHB_OFFSET	0x0000
  58#define DP_DEFAULT_AHB_SIZE	0x0200
  59#define DP_DEFAULT_AUX_OFFSET	0x0200
  60#define DP_DEFAULT_AUX_SIZE	0x0200
  61#define DP_DEFAULT_LINK_OFFSET	0x0400
  62#define DP_DEFAULT_LINK_SIZE	0x0C00
  63#define DP_DEFAULT_P0_OFFSET	0x1000
  64#define DP_DEFAULT_P0_SIZE	0x0400
  65
  66struct dss_io_region {
  67	size_t len;
  68	void __iomem *base;
  69};
  70
  71struct dss_io_data {
  72	struct dss_io_region ahb;
  73	struct dss_io_region aux;
  74	struct dss_io_region link;
  75	struct dss_io_region p0;
  76};
  77
  78struct msm_dp_catalog_private {
  79	struct device *dev;
  80	struct drm_device *drm_dev;
  81	struct dss_io_data io;
  82	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
  83	struct msm_dp_catalog msm_dp_catalog;
  84};
  85
  86void msm_dp_catalog_snapshot(struct msm_dp_catalog *msm_dp_catalog, struct msm_disp_state *disp_state)
  87{
  88	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
  89			struct msm_dp_catalog_private, msm_dp_catalog);
  90	struct dss_io_data *dss = &catalog->io;
  91
  92	msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
  93	msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
  94	msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
  95	msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
  96}
  97
  98static inline u32 msm_dp_read_aux(struct msm_dp_catalog_private *catalog, u32 offset)
  99{
 100	return readl_relaxed(catalog->io.aux.base + offset);
 101}
 102
 103static inline void msm_dp_write_aux(struct msm_dp_catalog_private *catalog,
 104			       u32 offset, u32 data)
 105{
 106	/*
 107	 * To make sure aux reg writes happens before any other operation,
 108	 * this function uses writel() instread of writel_relaxed()
 109	 */
 110	writel(data, catalog->io.aux.base + offset);
 111}
 112
 113static inline u32 msm_dp_read_ahb(const struct msm_dp_catalog_private *catalog, u32 offset)
 114{
 115	return readl_relaxed(catalog->io.ahb.base + offset);
 116}
 117
 118static inline void msm_dp_write_ahb(struct msm_dp_catalog_private *catalog,
 119			       u32 offset, u32 data)
 120{
 121	/*
 122	 * To make sure phy reg writes happens before any other operation,
 123	 * this function uses writel() instread of writel_relaxed()
 124	 */
 125	writel(data, catalog->io.ahb.base + offset);
 126}
 127
 128static inline void msm_dp_write_p0(struct msm_dp_catalog_private *catalog,
 129			       u32 offset, u32 data)
 130{
 131	/*
 132	 * To make sure interface reg writes happens before any other operation,
 133	 * this function uses writel() instread of writel_relaxed()
 134	 */
 135	writel(data, catalog->io.p0.base + offset);
 136}
 137
 138static inline u32 msm_dp_read_p0(struct msm_dp_catalog_private *catalog,
 139			       u32 offset)
 140{
 141	/*
 142	 * To make sure interface reg writes happens before any other operation,
 143	 * this function uses writel() instread of writel_relaxed()
 144	 */
 145	return readl_relaxed(catalog->io.p0.base + offset);
 146}
 147
 148static inline u32 msm_dp_read_link(struct msm_dp_catalog_private *catalog, u32 offset)
 149{
 150	return readl_relaxed(catalog->io.link.base + offset);
 151}
 152
 153static inline void msm_dp_write_link(struct msm_dp_catalog_private *catalog,
 154			       u32 offset, u32 data)
 155{
 156	/*
 157	 * To make sure link reg writes happens before any other operation,
 158	 * this function uses writel() instread of writel_relaxed()
 159	 */
 160	writel(data, catalog->io.link.base + offset);
 161}
 162
 163/* aux related catalog functions */
 164u32 msm_dp_catalog_aux_read_data(struct msm_dp_catalog *msm_dp_catalog)
 165{
 166	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 167				struct msm_dp_catalog_private, msm_dp_catalog);
 168
 169	return msm_dp_read_aux(catalog, REG_DP_AUX_DATA);
 170}
 171
 172int msm_dp_catalog_aux_write_data(struct msm_dp_catalog *msm_dp_catalog, u32 data)
 173{
 174	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 175				struct msm_dp_catalog_private, msm_dp_catalog);
 176
 177	msm_dp_write_aux(catalog, REG_DP_AUX_DATA, data);
 178	return 0;
 179}
 180
 181int msm_dp_catalog_aux_write_trans(struct msm_dp_catalog *msm_dp_catalog, u32 data)
 182{
 183	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 184				struct msm_dp_catalog_private, msm_dp_catalog);
 185
 186	msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
 187	return 0;
 188}
 189
 190int msm_dp_catalog_aux_clear_trans(struct msm_dp_catalog *msm_dp_catalog, bool read)
 191{
 192	u32 data;
 193	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 194				struct msm_dp_catalog_private, msm_dp_catalog);
 195
 196	if (read) {
 197		data = msm_dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
 198		data &= ~DP_AUX_TRANS_CTRL_GO;
 199		msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
 200	} else {
 201		msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
 202	}
 203	return 0;
 204}
 205
 206int msm_dp_catalog_aux_clear_hw_interrupts(struct msm_dp_catalog *msm_dp_catalog)
 207{
 208	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 209				struct msm_dp_catalog_private, msm_dp_catalog);
 210
 211	msm_dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
 212	msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
 213	msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
 214	msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
 215	return 0;
 216}
 217
 218/**
 219 * msm_dp_catalog_aux_reset() - reset AUX controller
 220 *
 221 * @msm_dp_catalog: DP catalog structure
 222 *
 223 * return: void
 224 *
 225 * This function reset AUX controller
 226 *
 227 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
 228 * 
 229 */
 230void msm_dp_catalog_aux_reset(struct msm_dp_catalog *msm_dp_catalog)
 231{
 232	u32 aux_ctrl;
 233	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 234				struct msm_dp_catalog_private, msm_dp_catalog);
 235
 236	aux_ctrl = msm_dp_read_aux(catalog, REG_DP_AUX_CTRL);
 237
 238	aux_ctrl |= DP_AUX_CTRL_RESET;
 239	msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 240	usleep_range(1000, 1100); /* h/w recommended delay */
 241
 242	aux_ctrl &= ~DP_AUX_CTRL_RESET;
 243	msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 244}
 245
 246void msm_dp_catalog_aux_enable(struct msm_dp_catalog *msm_dp_catalog, bool enable)
 247{
 248	u32 aux_ctrl;
 249	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 250				struct msm_dp_catalog_private, msm_dp_catalog);
 251
 252	aux_ctrl = msm_dp_read_aux(catalog, REG_DP_AUX_CTRL);
 253
 254	if (enable) {
 255		msm_dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
 256		msm_dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
 257		aux_ctrl |= DP_AUX_CTRL_ENABLE;
 258	} else {
 259		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
 260	}
 261
 262	msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
 263}
 264
 265int msm_dp_catalog_aux_wait_for_hpd_connect_state(struct msm_dp_catalog *msm_dp_catalog,
 266					      unsigned long wait_us)
 267{
 268	u32 state;
 269	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 270				struct msm_dp_catalog_private, msm_dp_catalog);
 271
 272	/* poll for hpd connected status every 2ms and timeout after wait_us */
 273	return readl_poll_timeout(catalog->io.aux.base +
 274				REG_DP_DP_HPD_INT_STATUS,
 275				state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
 276				min(wait_us, 2000), wait_us);
 277}
 278
 279static void dump_regs(void __iomem *base, int len)
 280{
 281	int i;
 282	u32 x0, x4, x8, xc;
 283	u32 addr_off = 0;
 284
 285	len = DIV_ROUND_UP(len, 16);
 286	for (i = 0; i < len; i++) {
 287		x0 = readl_relaxed(base + addr_off);
 288		x4 = readl_relaxed(base + addr_off + 0x04);
 289		x8 = readl_relaxed(base + addr_off + 0x08);
 290		xc = readl_relaxed(base + addr_off + 0x0c);
 291
 292		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
 293		addr_off += 16;
 294	}
 295}
 296
 297void msm_dp_catalog_dump_regs(struct msm_dp_catalog *msm_dp_catalog)
 298{
 299	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 300		struct msm_dp_catalog_private, msm_dp_catalog);
 301	struct dss_io_data *io = &catalog->io;
 302
 303	pr_info("AHB regs\n");
 304	dump_regs(io->ahb.base, io->ahb.len);
 305
 306	pr_info("AUXCLK regs\n");
 307	dump_regs(io->aux.base, io->aux.len);
 308
 309	pr_info("LCLK regs\n");
 310	dump_regs(io->link.base, io->link.len);
 311
 312	pr_info("P0CLK regs\n");
 313	dump_regs(io->p0.base, io->p0.len);
 314}
 315
 316u32 msm_dp_catalog_aux_get_irq(struct msm_dp_catalog *msm_dp_catalog)
 317{
 318	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 319				struct msm_dp_catalog_private, msm_dp_catalog);
 320	u32 intr, intr_ack;
 321
 322	intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS);
 323	intr &= ~DP_INTERRUPT_STATUS1_MASK;
 324	intr_ack = (intr & DP_INTERRUPT_STATUS1)
 325			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 326	msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
 327			DP_INTERRUPT_STATUS1_MASK);
 328
 329	return intr;
 330
 331}
 332
 333/* controller related catalog functions */
 334void msm_dp_catalog_ctrl_update_transfer_unit(struct msm_dp_catalog *msm_dp_catalog,
 335				u32 msm_dp_tu, u32 valid_boundary,
 336				u32 valid_boundary2)
 337{
 338	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 339				struct msm_dp_catalog_private, msm_dp_catalog);
 340
 341	msm_dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
 342	msm_dp_write_link(catalog, REG_DP_TU, msm_dp_tu);
 343	msm_dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
 344}
 345
 346void msm_dp_catalog_ctrl_state_ctrl(struct msm_dp_catalog *msm_dp_catalog, u32 state)
 347{
 348	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 349				struct msm_dp_catalog_private, msm_dp_catalog);
 350
 351	msm_dp_write_link(catalog, REG_DP_STATE_CTRL, state);
 352}
 353
 354void msm_dp_catalog_ctrl_config_ctrl(struct msm_dp_catalog *msm_dp_catalog, u32 cfg)
 355{
 356	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 357				struct msm_dp_catalog_private, msm_dp_catalog);
 358
 359	drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
 360
 361	msm_dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
 362}
 363
 364void msm_dp_catalog_ctrl_lane_mapping(struct msm_dp_catalog *msm_dp_catalog)
 365{
 366	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 367				struct msm_dp_catalog_private, msm_dp_catalog);
 368	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
 369	u32 ln_mapping;
 370
 371	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
 372	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
 373	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
 374	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
 375
 376	msm_dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
 377			ln_mapping);
 378}
 379
 380void msm_dp_catalog_ctrl_psr_mainlink_enable(struct msm_dp_catalog *msm_dp_catalog,
 381						bool enable)
 382{
 383	u32 val;
 384	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 385				struct msm_dp_catalog_private, msm_dp_catalog);
 386
 387	val = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 388
 389	if (enable)
 390		val |= DP_MAINLINK_CTRL_ENABLE;
 391	else
 392		val &= ~DP_MAINLINK_CTRL_ENABLE;
 393
 394	msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
 395}
 396
 397void msm_dp_catalog_ctrl_mainlink_ctrl(struct msm_dp_catalog *msm_dp_catalog,
 398						bool enable)
 399{
 400	u32 mainlink_ctrl;
 401	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 402				struct msm_dp_catalog_private, msm_dp_catalog);
 403
 404	drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
 405	if (enable) {
 406		/*
 407		 * To make sure link reg writes happens before other operation,
 408		 * msm_dp_write_link() function uses writel()
 409		 */
 410		mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 411
 412		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
 413						DP_MAINLINK_CTRL_ENABLE);
 414		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 415
 416		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
 417		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 418
 419		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
 420		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 421
 422		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
 423					DP_MAINLINK_FB_BOUNDARY_SEL);
 424		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 425	} else {
 426		mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 427		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
 428		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 429	}
 430}
 431
 432void msm_dp_catalog_ctrl_config_misc(struct msm_dp_catalog *msm_dp_catalog,
 433					u32 colorimetry_cfg,
 434					u32 test_bits_depth)
 435{
 436	u32 misc_val;
 437	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 438				struct msm_dp_catalog_private, msm_dp_catalog);
 439
 440	misc_val = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
 441
 442	/* clear bpp bits */
 443	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
 444	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
 445	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
 446	/* Configure clock to synchronous mode */
 447	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
 448
 449	drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
 450	msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
 451}
 452
 453void msm_dp_catalog_setup_peripheral_flush(struct msm_dp_catalog *msm_dp_catalog)
 454{
 455	u32 mainlink_ctrl, hw_revision;
 456	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 457				struct msm_dp_catalog_private, msm_dp_catalog);
 458
 459	mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 460
 461	hw_revision = msm_dp_catalog_hw_revision(msm_dp_catalog);
 462	if (hw_revision >= DP_HW_VERSION_1_2)
 463		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_SDE_PERIPH_UPDATE;
 464	else
 465		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_UPDATE_SDP;
 466
 467	msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
 468}
 469
 470void msm_dp_catalog_ctrl_config_msa(struct msm_dp_catalog *msm_dp_catalog,
 471					u32 rate, u32 stream_rate_khz,
 472					bool is_ycbcr_420)
 473{
 474	u32 pixel_m, pixel_n;
 475	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
 476	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
 477	u32 const link_rate_hbr2 = 540000;
 478	u32 const link_rate_hbr3 = 810000;
 479	unsigned long den, num;
 480
 481	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 482				struct msm_dp_catalog_private, msm_dp_catalog);
 483
 484	if (rate == link_rate_hbr3)
 485		pixel_div = 6;
 486	else if (rate == 162000 || rate == 270000)
 487		pixel_div = 2;
 488	else if (rate == link_rate_hbr2)
 489		pixel_div = 4;
 490	else
 491		DRM_ERROR("Invalid pixel mux divider\n");
 492
 493	dispcc_input_rate = (rate * 10) / pixel_div;
 494
 495	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
 496			(unsigned long)(1 << 16) - 1,
 497			(unsigned long)(1 << 16) - 1, &den, &num);
 498
 499	den = ~(den - num);
 500	den = den & 0xFFFF;
 501	pixel_m = num;
 502	pixel_n = den;
 503
 504	mvid = (pixel_m & 0xFFFF) * 5;
 505	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
 506
 507	if (nvid < nvid_fixed) {
 508		u32 temp;
 509
 510		temp = (nvid_fixed / nvid) * nvid;
 511		mvid = (nvid_fixed / nvid) * mvid;
 512		nvid = temp;
 513	}
 514
 515	if (is_ycbcr_420)
 516		mvid /= 2;
 517
 518	if (link_rate_hbr2 == rate)
 519		nvid *= 2;
 520
 521	if (link_rate_hbr3 == rate)
 522		nvid *= 3;
 523
 524	drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
 525	msm_dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
 526	msm_dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
 527	msm_dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
 528}
 529
 530int msm_dp_catalog_ctrl_set_pattern_state_bit(struct msm_dp_catalog *msm_dp_catalog,
 531					u32 state_bit)
 532{
 533	int bit, ret;
 534	u32 data;
 535	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 536				struct msm_dp_catalog_private, msm_dp_catalog);
 537
 538	bit = BIT(state_bit - 1);
 539	drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
 540	msm_dp_catalog_ctrl_state_ctrl(msm_dp_catalog, bit);
 541
 542	bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
 543
 544	/* Poll for mainlink ready status */
 545	ret = readx_poll_timeout(readl, catalog->io.link.base +
 546					REG_DP_MAINLINK_READY,
 547					data, data & bit,
 548					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 549	if (ret < 0) {
 550		DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
 551		return ret;
 552	}
 553	return 0;
 554}
 555
 556/**
 557 * msm_dp_catalog_hw_revision() - retrieve DP hw revision
 558 *
 559 * @msm_dp_catalog: DP catalog structure
 560 *
 561 * Return: DP controller hw revision
 562 *
 563 */
 564u32 msm_dp_catalog_hw_revision(const struct msm_dp_catalog *msm_dp_catalog)
 565{
 566	const struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 567				struct msm_dp_catalog_private, msm_dp_catalog);
 568
 569	return msm_dp_read_ahb(catalog, REG_DP_HW_VERSION);
 570}
 571
 572/**
 573 * msm_dp_catalog_ctrl_reset() - reset DP controller
 574 *
 575 * @msm_dp_catalog: DP catalog structure
 576 *
 577 * return: void
 578 *
 579 * This function reset the DP controller
 580 *
 581 * NOTE: reset DP controller will also clear any pending HPD related interrupts
 582 * 
 583 */
 584void msm_dp_catalog_ctrl_reset(struct msm_dp_catalog *msm_dp_catalog)
 585{
 586	u32 sw_reset;
 587	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 588				struct msm_dp_catalog_private, msm_dp_catalog);
 589
 590	sw_reset = msm_dp_read_ahb(catalog, REG_DP_SW_RESET);
 591
 592	sw_reset |= DP_SW_RESET;
 593	msm_dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 594	usleep_range(1000, 1100); /* h/w recommended delay */
 595
 596	sw_reset &= ~DP_SW_RESET;
 597	msm_dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
 598}
 599
 600bool msm_dp_catalog_ctrl_mainlink_ready(struct msm_dp_catalog *msm_dp_catalog)
 601{
 602	u32 data;
 603	int ret;
 604	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 605				struct msm_dp_catalog_private, msm_dp_catalog);
 606
 607	/* Poll for mainlink ready status */
 608	ret = readl_poll_timeout(catalog->io.link.base +
 609				REG_DP_MAINLINK_READY,
 610				data, data & DP_MAINLINK_READY_FOR_VIDEO,
 611				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
 612	if (ret < 0) {
 613		DRM_ERROR("mainlink not ready\n");
 614		return false;
 615	}
 616
 617	return true;
 618}
 619
 620void msm_dp_catalog_ctrl_enable_irq(struct msm_dp_catalog *msm_dp_catalog,
 621						bool enable)
 622{
 623	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 624				struct msm_dp_catalog_private, msm_dp_catalog);
 625
 626	if (enable) {
 627		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS,
 628				DP_INTERRUPT_STATUS1_MASK);
 629		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 630				DP_INTERRUPT_STATUS2_MASK);
 631	} else {
 632		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
 633		msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
 634	}
 635}
 636
 637void msm_dp_catalog_hpd_config_intr(struct msm_dp_catalog *msm_dp_catalog,
 638			u32 intr_mask, bool en)
 639{
 640	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 641				struct msm_dp_catalog_private, msm_dp_catalog);
 642
 643	u32 config = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
 644
 645	config = (en ? config | intr_mask : config & ~intr_mask);
 646
 647	drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
 648					intr_mask, config);
 649	msm_dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
 650				config & DP_DP_HPD_INT_MASK);
 651}
 652
 653void msm_dp_catalog_ctrl_hpd_enable(struct msm_dp_catalog *msm_dp_catalog)
 654{
 655	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 656				struct msm_dp_catalog_private, msm_dp_catalog);
 657
 658	u32 reftimer = msm_dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
 659
 660	/* Configure REFTIMER and enable it */
 661	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
 662	msm_dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
 663
 664	/* Enable HPD */
 665	msm_dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
 666}
 667
 668void msm_dp_catalog_ctrl_hpd_disable(struct msm_dp_catalog *msm_dp_catalog)
 669{
 670	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 671				struct msm_dp_catalog_private, msm_dp_catalog);
 672
 673	u32 reftimer = msm_dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
 674
 675	reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
 676	msm_dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
 677
 678	msm_dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0);
 679}
 680
 681static void msm_dp_catalog_enable_sdp(struct msm_dp_catalog_private *catalog)
 682{
 683	/* trigger sdp */
 684	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
 685	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
 686}
 687
 688void msm_dp_catalog_ctrl_config_psr(struct msm_dp_catalog *msm_dp_catalog)
 689{
 690	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 691				struct msm_dp_catalog_private, msm_dp_catalog);
 692	u32 config;
 693
 694	/* enable PSR1 function */
 695	config = msm_dp_read_link(catalog, REG_PSR_CONFIG);
 696	config |= PSR1_SUPPORTED;
 697	msm_dp_write_link(catalog, REG_PSR_CONFIG, config);
 698
 699	msm_dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
 700	msm_dp_catalog_enable_sdp(catalog);
 701}
 702
 703void msm_dp_catalog_ctrl_set_psr(struct msm_dp_catalog *msm_dp_catalog, bool enter)
 704{
 705	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 706			struct msm_dp_catalog_private, msm_dp_catalog);
 707	u32 cmd;
 708
 709	cmd = msm_dp_read_link(catalog, REG_PSR_CMD);
 710
 711	cmd &= ~(PSR_ENTER | PSR_EXIT);
 712
 713	if (enter)
 714		cmd |= PSR_ENTER;
 715	else
 716		cmd |= PSR_EXIT;
 717
 718	msm_dp_catalog_enable_sdp(catalog);
 719	msm_dp_write_link(catalog, REG_PSR_CMD, cmd);
 720}
 721
 722u32 msm_dp_catalog_link_is_connected(struct msm_dp_catalog *msm_dp_catalog)
 723{
 724	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 725				struct msm_dp_catalog_private, msm_dp_catalog);
 726	u32 status;
 727
 728	status = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 729	drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
 730	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
 731	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
 732
 733	return status;
 734}
 735
 736u32 msm_dp_catalog_hpd_get_intr_status(struct msm_dp_catalog *msm_dp_catalog)
 737{
 738	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 739				struct msm_dp_catalog_private, msm_dp_catalog);
 740	int isr, mask;
 741
 742	isr = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
 743	msm_dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
 744				 (isr & DP_DP_HPD_INT_MASK));
 745	mask = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
 746
 747	/*
 748	 * We only want to return interrupts that are unmasked to the caller.
 749	 * However, the interrupt status field also contains other
 750	 * informational bits about the HPD state status, so we only mask
 751	 * out the part of the register that tells us about which interrupts
 752	 * are pending.
 753	 */
 754	return isr & (mask | ~DP_DP_HPD_INT_MASK);
 755}
 756
 757u32 msm_dp_catalog_ctrl_read_psr_interrupt_status(struct msm_dp_catalog *msm_dp_catalog)
 758{
 759	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 760				struct msm_dp_catalog_private, msm_dp_catalog);
 761	u32 intr, intr_ack;
 762
 763	intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
 764	intr_ack = (intr & DP_INTERRUPT_STATUS4)
 765			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 766	msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
 767
 768	return intr;
 769}
 770
 771int msm_dp_catalog_ctrl_get_interrupt(struct msm_dp_catalog *msm_dp_catalog)
 772{
 773	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 774				struct msm_dp_catalog_private, msm_dp_catalog);
 775	u32 intr, intr_ack;
 776
 777	intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
 778	intr &= ~DP_INTERRUPT_STATUS2_MASK;
 779	intr_ack = (intr & DP_INTERRUPT_STATUS2)
 780			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
 781	msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
 782			intr_ack | DP_INTERRUPT_STATUS2_MASK);
 783
 784	return intr;
 785}
 786
 787void msm_dp_catalog_ctrl_phy_reset(struct msm_dp_catalog *msm_dp_catalog)
 788{
 789	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 790				struct msm_dp_catalog_private, msm_dp_catalog);
 791
 792	msm_dp_write_ahb(catalog, REG_DP_PHY_CTRL,
 793			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
 794	usleep_range(1000, 1100); /* h/w recommended delay */
 795	msm_dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
 796}
 797
 798void msm_dp_catalog_ctrl_send_phy_pattern(struct msm_dp_catalog *msm_dp_catalog,
 799			u32 pattern)
 800{
 801	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 802				struct msm_dp_catalog_private, msm_dp_catalog);
 803	u32 value = 0x0;
 804
 805	/* Make sure to clear the current pattern before starting a new one */
 806	msm_dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
 807
 808	drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
 809	switch (pattern) {
 810	case DP_PHY_TEST_PATTERN_D10_2:
 811		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
 812				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
 813		break;
 814	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
 815		value &= ~(1 << 16);
 816		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 817					value);
 818		value |= SCRAMBLER_RESET_COUNT_VALUE;
 819		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 820					value);
 821		msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 822					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 823		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
 824					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 825		break;
 826	case DP_PHY_TEST_PATTERN_PRBS7:
 827		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
 828				DP_STATE_CTRL_LINK_PRBS7);
 829		break;
 830	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
 831		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
 832				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
 833		/* 00111110000011111000001111100000 */
 834		msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
 835				0x3E0F83E0);
 836		/* 00001111100000111110000011111000 */
 837		msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
 838				0x0F83E0F8);
 839		/* 1111100000111110 */
 840		msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
 841				0x0000F83E);
 842		break;
 843	case DP_PHY_TEST_PATTERN_CP2520:
 844		value = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 845		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
 846		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 847
 848		value = DP_HBR2_ERM_PATTERN;
 849		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 850				value);
 851		value |= SCRAMBLER_RESET_COUNT_VALUE;
 852		msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
 853					value);
 854		msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
 855					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
 856		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
 857					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
 858		value = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
 859		value |= DP_MAINLINK_CTRL_ENABLE;
 860		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
 861		break;
 862	case DP_PHY_TEST_PATTERN_SEL_MASK:
 863		msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
 864				DP_MAINLINK_CTRL_ENABLE);
 865		msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
 866				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
 867		break;
 868	default:
 869		drm_dbg_dp(catalog->drm_dev,
 870				"No valid test pattern requested: %#x\n", pattern);
 871		break;
 872	}
 873}
 874
 875u32 msm_dp_catalog_ctrl_read_phy_pattern(struct msm_dp_catalog *msm_dp_catalog)
 876{
 877	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 878				struct msm_dp_catalog_private, msm_dp_catalog);
 879
 880	return msm_dp_read_link(catalog, REG_DP_MAINLINK_READY);
 881}
 882
 883/* panel related catalog functions */
 884int msm_dp_catalog_panel_timing_cfg(struct msm_dp_catalog *msm_dp_catalog, u32 total,
 885				u32 sync_start, u32 width_blanking, u32 msm_dp_active)
 886{
 887	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
 888				struct msm_dp_catalog_private, msm_dp_catalog);
 889	u32 reg;
 890
 891	msm_dp_write_link(catalog, REG_DP_TOTAL_HOR_VER, total);
 892	msm_dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC, sync_start);
 893	msm_dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY, width_blanking);
 894	msm_dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, msm_dp_active);
 895
 896	reg = msm_dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
 897
 898	if (msm_dp_catalog->wide_bus_en)
 899		reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
 900	else
 901		reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
 902
 903
 904	DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", msm_dp_catalog->wide_bus_en, reg);
 905
 906	msm_dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
 907	return 0;
 908}
 909
 910static void msm_dp_catalog_panel_send_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog, struct dp_sdp *vsc_sdp)
 911{
 912	struct msm_dp_catalog_private *catalog;
 913	u32 header[2];
 914	u32 val;
 915	int i;
 916
 917	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
 918
 919	msm_dp_utils_pack_sdp_header(&vsc_sdp->sdp_header, header);
 920
 921	msm_dp_write_link(catalog, MMSS_DP_GENERIC0_0, header[0]);
 922	msm_dp_write_link(catalog, MMSS_DP_GENERIC0_1, header[1]);
 923
 924	for (i = 0; i < sizeof(vsc_sdp->db); i += 4) {
 925		val = ((vsc_sdp->db[i]) | (vsc_sdp->db[i + 1] << 8) | (vsc_sdp->db[i + 2] << 16) |
 926		       (vsc_sdp->db[i + 3] << 24));
 927		msm_dp_write_link(catalog, MMSS_DP_GENERIC0_2 + i, val);
 928	}
 929}
 930
 931static void msm_dp_catalog_panel_update_sdp(struct msm_dp_catalog *msm_dp_catalog)
 932{
 933	struct msm_dp_catalog_private *catalog;
 934	u32 hw_revision;
 935
 936	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
 937
 938	hw_revision = msm_dp_catalog_hw_revision(msm_dp_catalog);
 939	if (hw_revision < DP_HW_VERSION_1_2 && hw_revision >= DP_HW_VERSION_1_0) {
 940		msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x01);
 941		msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x00);
 942	}
 943}
 944
 945void msm_dp_catalog_panel_enable_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog, struct dp_sdp *vsc_sdp)
 946{
 947	struct msm_dp_catalog_private *catalog;
 948	u32 cfg, cfg2, misc;
 949
 950	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
 951
 952	cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
 953	cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
 954	misc = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
 955
 956	cfg |= GEN0_SDP_EN;
 957	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
 958
 959	cfg2 |= GENERIC0_SDPSIZE_VALID;
 960	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
 961
 962	msm_dp_catalog_panel_send_vsc_sdp(msm_dp_catalog, vsc_sdp);
 963
 964	/* indicates presence of VSC (BIT(6) of MISC1) */
 965	misc |= DP_MISC1_VSC_SDP;
 966
 967	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=1\n");
 968
 969	pr_debug("misc settings = 0x%x\n", misc);
 970	msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
 971
 972	msm_dp_catalog_panel_update_sdp(msm_dp_catalog);
 973}
 974
 975void msm_dp_catalog_panel_disable_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog)
 976{
 977	struct msm_dp_catalog_private *catalog;
 978	u32 cfg, cfg2, misc;
 979
 980	catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
 981
 982	cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
 983	cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
 984	misc = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
 985
 986	cfg &= ~GEN0_SDP_EN;
 987	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
 988
 989	cfg2 &= ~GENERIC0_SDPSIZE_VALID;
 990	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
 991
 992	/* switch back to MSA */
 993	misc &= ~DP_MISC1_VSC_SDP;
 994
 995	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=0\n");
 996
 997	pr_debug("misc settings = 0x%x\n", misc);
 998	msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
 999
1000	msm_dp_catalog_panel_update_sdp(msm_dp_catalog);
1001}
1002
1003void msm_dp_catalog_panel_tpg_enable(struct msm_dp_catalog *msm_dp_catalog,
1004				struct drm_display_mode *drm_mode)
1005{
1006	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1007				struct msm_dp_catalog_private, msm_dp_catalog);
1008	u32 hsync_period, vsync_period;
1009	u32 display_v_start, display_v_end;
1010	u32 hsync_start_x, hsync_end_x;
1011	u32 v_sync_width;
1012	u32 hsync_ctl;
1013	u32 display_hctl;
1014
1015	/* TPG config parameters*/
1016	hsync_period = drm_mode->htotal;
1017	vsync_period = drm_mode->vtotal;
1018
1019	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
1020					hsync_period);
1021	display_v_end = ((vsync_period - (drm_mode->vsync_start -
1022					drm_mode->vdisplay))
1023					* hsync_period) - 1;
1024
1025	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
1026	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
1027
1028	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
1029	hsync_end_x = hsync_period - (drm_mode->hsync_start -
1030					drm_mode->hdisplay) - 1;
1031
1032	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
1033
1034	hsync_ctl = (hsync_period << 16) |
1035			(drm_mode->hsync_end - drm_mode->hsync_start);
1036	display_hctl = (hsync_end_x << 16) | hsync_start_x;
1037
1038
1039	msm_dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
1040	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
1041			hsync_period);
1042	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
1043			hsync_period);
1044	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
1045	msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
1046	msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
1047	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
1048	msm_dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
1049	msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
1050	msm_dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
1051	msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
1052	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
1053	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
1054	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
1055	msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
1056	msm_dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
1057
1058	msm_dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
1059				DP_TPG_CHECKERED_RECT_PATTERN);
1060	msm_dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
1061				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
1062				DP_TPG_VIDEO_CONFIG_RGB);
1063	msm_dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
1064				DP_BIST_ENABLE_DPBIST_EN);
1065	msm_dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
1066				DP_TIMING_ENGINE_EN_EN);
1067	drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
1068}
1069
1070void msm_dp_catalog_panel_tpg_disable(struct msm_dp_catalog *msm_dp_catalog)
1071{
1072	struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1073				struct msm_dp_catalog_private, msm_dp_catalog);
1074
1075	msm_dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
1076	msm_dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
1077	msm_dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
1078}
1079
1080static void __iomem *msm_dp_ioremap(struct platform_device *pdev, int idx, size_t *len)
1081{
1082	struct resource *res;
1083	void __iomem *base;
1084
1085	base = devm_platform_get_and_ioremap_resource(pdev, idx, &res);
1086	if (!IS_ERR(base))
1087		*len = resource_size(res);
1088
1089	return base;
1090}
1091
1092static int msm_dp_catalog_get_io(struct msm_dp_catalog_private *catalog)
1093{
1094	struct platform_device *pdev = to_platform_device(catalog->dev);
1095	struct dss_io_data *dss = &catalog->io;
1096
1097	dss->ahb.base = msm_dp_ioremap(pdev, 0, &dss->ahb.len);
1098	if (IS_ERR(dss->ahb.base))
1099		return PTR_ERR(dss->ahb.base);
1100
1101	dss->aux.base = msm_dp_ioremap(pdev, 1, &dss->aux.len);
1102	if (IS_ERR(dss->aux.base)) {
1103		/*
1104		 * The initial binding had a single reg, but in order to
1105		 * support variation in the sub-region sizes this was split.
1106		 * msm_dp_ioremap() will fail with -EINVAL here if only a single
1107		 * reg is specified, so fill in the sub-region offsets and
1108		 * lengths based on this single region.
1109		 */
1110		if (PTR_ERR(dss->aux.base) == -EINVAL) {
1111			if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) {
1112				DRM_ERROR("legacy memory region not large enough\n");
1113				return -EINVAL;
1114			}
1115
1116			dss->ahb.len = DP_DEFAULT_AHB_SIZE;
1117			dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET;
1118			dss->aux.len = DP_DEFAULT_AUX_SIZE;
1119			dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET;
1120			dss->link.len = DP_DEFAULT_LINK_SIZE;
1121			dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET;
1122			dss->p0.len = DP_DEFAULT_P0_SIZE;
1123		} else {
1124			DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base);
1125			return PTR_ERR(dss->aux.base);
1126		}
1127	} else {
1128		dss->link.base = msm_dp_ioremap(pdev, 2, &dss->link.len);
1129		if (IS_ERR(dss->link.base)) {
1130			DRM_ERROR("unable to remap link region: %pe\n", dss->link.base);
1131			return PTR_ERR(dss->link.base);
1132		}
1133
1134		dss->p0.base = msm_dp_ioremap(pdev, 3, &dss->p0.len);
1135		if (IS_ERR(dss->p0.base)) {
1136			DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base);
1137			return PTR_ERR(dss->p0.base);
1138		}
1139	}
1140
1141	return 0;
1142}
1143
1144struct msm_dp_catalog *msm_dp_catalog_get(struct device *dev)
1145{
1146	struct msm_dp_catalog_private *catalog;
1147	int ret;
1148
1149	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
1150	if (!catalog)
1151		return ERR_PTR(-ENOMEM);
1152
1153	catalog->dev = dev;
1154
1155	ret = msm_dp_catalog_get_io(catalog);
1156	if (ret)
1157		return ERR_PTR(ret);
1158
1159	return &catalog->msm_dp_catalog;
1160}
1161
1162u32 msm_dp_catalog_audio_get_header(struct msm_dp_catalog *msm_dp_catalog,
1163				enum msm_dp_catalog_audio_sdp_type sdp,
1164				enum msm_dp_catalog_audio_header_type header)
1165{
1166	struct msm_dp_catalog_private *catalog;
1167	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1168
1169	catalog = container_of(msm_dp_catalog,
1170		struct msm_dp_catalog_private, msm_dp_catalog);
1171
1172	sdp_map = catalog->audio_map;
1173
1174	return msm_dp_read_link(catalog, sdp_map[sdp][header]);
1175}
1176
1177void msm_dp_catalog_audio_set_header(struct msm_dp_catalog *msm_dp_catalog,
1178				 enum msm_dp_catalog_audio_sdp_type sdp,
1179				 enum msm_dp_catalog_audio_header_type header,
1180				 u32 data)
1181{
1182	struct msm_dp_catalog_private *catalog;
1183	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1184
1185	if (!msm_dp_catalog)
1186		return;
1187
1188	catalog = container_of(msm_dp_catalog,
1189		struct msm_dp_catalog_private, msm_dp_catalog);
1190
1191	sdp_map = catalog->audio_map;
1192
1193	msm_dp_write_link(catalog, sdp_map[sdp][header], data);
1194}
1195
1196void msm_dp_catalog_audio_config_acr(struct msm_dp_catalog *msm_dp_catalog, u32 select)
1197{
1198	struct msm_dp_catalog_private *catalog;
1199	u32 acr_ctrl;
1200
1201	if (!msm_dp_catalog)
1202		return;
1203
1204	catalog = container_of(msm_dp_catalog,
1205		struct msm_dp_catalog_private, msm_dp_catalog);
1206
1207	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1208
1209	drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1210					select, acr_ctrl);
1211
1212	msm_dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1213}
1214
1215void msm_dp_catalog_audio_enable(struct msm_dp_catalog *msm_dp_catalog, bool enable)
1216{
1217	struct msm_dp_catalog_private *catalog;
1218	u32 audio_ctrl;
1219
1220	if (!msm_dp_catalog)
1221		return;
1222
1223	catalog = container_of(msm_dp_catalog,
1224		struct msm_dp_catalog_private, msm_dp_catalog);
1225
1226	audio_ctrl = msm_dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1227
1228	if (enable)
1229		audio_ctrl |= BIT(0);
1230	else
1231		audio_ctrl &= ~BIT(0);
1232
1233	drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1234
1235	msm_dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1236	/* make sure audio engine is disabled */
1237	wmb();
1238}
1239
1240void msm_dp_catalog_audio_config_sdp(struct msm_dp_catalog *msm_dp_catalog)
1241{
1242	struct msm_dp_catalog_private *catalog;
1243	u32 sdp_cfg = 0;
1244	u32 sdp_cfg2 = 0;
1245
1246	if (!msm_dp_catalog)
1247		return;
1248
1249	catalog = container_of(msm_dp_catalog,
1250		struct msm_dp_catalog_private, msm_dp_catalog);
1251
1252	sdp_cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
1253	/* AUDIO_TIMESTAMP_SDP_EN */
1254	sdp_cfg |= BIT(1);
1255	/* AUDIO_STREAM_SDP_EN */
1256	sdp_cfg |= BIT(2);
1257	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
1258	sdp_cfg |= BIT(5);
1259	/* AUDIO_ISRC_SDP_EN  */
1260	sdp_cfg |= BIT(6);
1261	/* AUDIO_INFOFRAME_SDP_EN  */
1262	sdp_cfg |= BIT(20);
1263
1264	drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1265
1266	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1267
1268	sdp_cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1269	/* IFRM_REGSRC -> Do not use reg values */
1270	sdp_cfg2 &= ~BIT(0);
1271	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1272	sdp_cfg2 &= ~BIT(1);
1273
1274	drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1275
1276	msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1277}
1278
1279void msm_dp_catalog_audio_init(struct msm_dp_catalog *msm_dp_catalog)
1280{
1281	struct msm_dp_catalog_private *catalog;
1282
1283	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1284		{
1285			MMSS_DP_AUDIO_STREAM_0,
1286			MMSS_DP_AUDIO_STREAM_1,
1287			MMSS_DP_AUDIO_STREAM_1,
1288		},
1289		{
1290			MMSS_DP_AUDIO_TIMESTAMP_0,
1291			MMSS_DP_AUDIO_TIMESTAMP_1,
1292			MMSS_DP_AUDIO_TIMESTAMP_1,
1293		},
1294		{
1295			MMSS_DP_AUDIO_INFOFRAME_0,
1296			MMSS_DP_AUDIO_INFOFRAME_1,
1297			MMSS_DP_AUDIO_INFOFRAME_1,
1298		},
1299		{
1300			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1301			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1302			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1303		},
1304		{
1305			MMSS_DP_AUDIO_ISRC_0,
1306			MMSS_DP_AUDIO_ISRC_1,
1307			MMSS_DP_AUDIO_ISRC_1,
1308		},
1309	};
1310
1311	if (!msm_dp_catalog)
1312		return;
1313
1314	catalog = container_of(msm_dp_catalog,
1315		struct msm_dp_catalog_private, msm_dp_catalog);
1316
1317	catalog->audio_map = sdp_map;
1318}
1319
1320void msm_dp_catalog_audio_sfe_level(struct msm_dp_catalog *msm_dp_catalog, u32 safe_to_exit_level)
1321{
1322	struct msm_dp_catalog_private *catalog;
1323	u32 mainlink_levels;
1324
1325	if (!msm_dp_catalog)
1326		return;
1327
1328	catalog = container_of(msm_dp_catalog,
1329		struct msm_dp_catalog_private, msm_dp_catalog);
1330
1331	mainlink_levels = msm_dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1332	mainlink_levels &= 0xFE0;
1333	mainlink_levels |= safe_to_exit_level;
1334
1335	drm_dbg_dp(catalog->drm_dev,
1336			"mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1337			 mainlink_levels, safe_to_exit_level);
1338
1339	msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1340}