Linux Audio

Check our new training course

Loading...
v5.14.15
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (C) 2012 Texas Instruments
   4 * Author: Rob Clark <robdclark@gmail.com>
   5 */
   6
   7#include <linux/delay.h>
   8#include <linux/dma-mapping.h>
   9#include <linux/of_graph.h>
  10#include <linux/pm_runtime.h>
  11
  12#include <drm/drm_atomic.h>
  13#include <drm/drm_atomic_helper.h>
  14#include <drm/drm_crtc.h>
  15#include <drm/drm_fb_cma_helper.h>
  16#include <drm/drm_fourcc.h>
  17#include <drm/drm_gem_cma_helper.h>
 
  18#include <drm/drm_modeset_helper_vtables.h>
  19#include <drm/drm_print.h>
  20#include <drm/drm_vblank.h>
  21
  22#include "tilcdc_drv.h"
  23#include "tilcdc_regs.h"
  24
  25#define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
  26#define TILCDC_PALETTE_SIZE			32
  27#define TILCDC_PALETTE_FIRST_ENTRY		0x4000
  28
  29struct tilcdc_crtc {
  30	struct drm_crtc base;
  31
  32	struct drm_plane primary;
  33	const struct tilcdc_panel_info *info;
  34	struct drm_pending_vblank_event *event;
  35	struct mutex enable_lock;
  36	bool enabled;
  37	bool shutdown;
  38	wait_queue_head_t frame_done_wq;
  39	bool frame_done;
  40	spinlock_t irq_lock;
  41
  42	unsigned int lcd_fck_rate;
  43
  44	ktime_t last_vblank;
  45	unsigned int hvtotal_us;
  46
  47	struct drm_framebuffer *next_fb;
  48
  49	/* Only set if an external encoder is connected */
  50	bool simulate_vesa_sync;
  51
  52	int sync_lost_count;
  53	bool frame_intact;
  54	struct work_struct recover_work;
  55
  56	dma_addr_t palette_dma_handle;
  57	u16 *palette_base;
  58	struct completion palette_loaded;
  59};
  60#define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
  61
  62static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
  63{
  64	struct drm_device *dev = crtc->dev;
  65	struct tilcdc_drm_private *priv = dev->dev_private;
  66	struct drm_gem_cma_object *gem;
  67	dma_addr_t start, end;
  68	u64 dma_base_and_ceiling;
  69
  70	gem = drm_fb_cma_get_gem_obj(fb, 0);
  71
  72	start = gem->paddr + fb->offsets[0] +
  73		crtc->y * fb->pitches[0] +
  74		crtc->x * fb->format->cpp[0];
  75
  76	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
  77
  78	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
  79	 * with a single insruction, if available. This should make it more
  80	 * unlikely that LCDC would fetch the DMA addresses in the middle of
  81	 * an update.
  82	 */
  83	if (priv->rev == 1)
  84		end -= 1;
  85
  86	dma_base_and_ceiling = (u64)end << 32 | start;
  87	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
  88}
  89
  90/*
  91 * The driver currently only supports only true color formats. For
  92 * true color the palette block is bypassed, but a 32 byte palette
  93 * should still be loaded. The first 16-bit entry must be 0x4000 while
  94 * all other entries must be zeroed.
  95 */
  96static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
  97{
  98	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
  99	struct drm_device *dev = crtc->dev;
 100	struct tilcdc_drm_private *priv = dev->dev_private;
 101	int ret;
 102
 103	reinit_completion(&tilcdc_crtc->palette_loaded);
 104
 105	/* Tell the LCDC where the palette is located. */
 106	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
 107		     tilcdc_crtc->palette_dma_handle);
 108	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
 109		     (u32) tilcdc_crtc->palette_dma_handle +
 110		     TILCDC_PALETTE_SIZE - 1);
 111
 112	/* Set dma load mode for palette loading only. */
 113	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 114			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
 115			  LCDC_PALETTE_LOAD_MODE_MASK);
 116
 117	/* Enable DMA Palette Loaded Interrupt */
 118	if (priv->rev == 1)
 119		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 120	else
 121		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
 122
 123	/* Enable LCDC DMA and wait for palette to be loaded. */
 124	tilcdc_clear_irqstatus(dev, 0xffffffff);
 125	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 126
 127	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
 128					  msecs_to_jiffies(50));
 129	if (ret == 0)
 130		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
 131
 132	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
 133	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 134	if (priv->rev == 1)
 135		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 136	else
 137		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
 138}
 139
 140static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
 141{
 142	struct tilcdc_drm_private *priv = dev->dev_private;
 143
 144	tilcdc_clear_irqstatus(dev, 0xffffffff);
 145
 146	if (priv->rev == 1) {
 147		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 148			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 149			LCDC_V1_UNDERFLOW_INT_ENA);
 150	} else {
 151		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
 152			LCDC_V2_UNDERFLOW_INT_ENA |
 153			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 154	}
 155}
 156
 157static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
 158{
 159	struct tilcdc_drm_private *priv = dev->dev_private;
 160
 161	/* disable irqs that we might have enabled: */
 162	if (priv->rev == 1) {
 163		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 164			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 165			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
 166		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
 167			LCDC_V1_END_OF_FRAME_INT_ENA);
 168	} else {
 169		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 170			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
 171			LCDC_V2_END_OF_FRAME0_INT_ENA |
 172			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 173	}
 174}
 175
 176static void reset(struct drm_crtc *crtc)
 177{
 178	struct drm_device *dev = crtc->dev;
 179	struct tilcdc_drm_private *priv = dev->dev_private;
 180
 181	if (priv->rev != 2)
 182		return;
 183
 184	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 185	usleep_range(250, 1000);
 186	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 187}
 188
 189/*
 190 * Calculate the percentage difference between the requested pixel clock rate
 191 * and the effective rate resulting from calculating the clock divider value.
 192 */
 193static unsigned int tilcdc_pclk_diff(unsigned long rate,
 194				     unsigned long real_rate)
 195{
 196	int r = rate / 100, rr = real_rate / 100;
 197
 198	return (unsigned int)(abs(((rr - r) * 100) / r));
 199}
 200
 201static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
 202{
 203	struct drm_device *dev = crtc->dev;
 204	struct tilcdc_drm_private *priv = dev->dev_private;
 205	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 206	unsigned long clk_rate, real_pclk_rate, pclk_rate;
 207	unsigned int clkdiv;
 208	int ret;
 209
 210	clkdiv = 2; /* first try using a standard divider of 2 */
 211
 212	/* mode.clock is in KHz, set_rate wants parameter in Hz */
 213	pclk_rate = crtc->mode.clock * 1000;
 214
 215	ret = clk_set_rate(priv->clk, pclk_rate * clkdiv);
 216	clk_rate = clk_get_rate(priv->clk);
 217	real_pclk_rate = clk_rate / clkdiv;
 218	if (ret < 0 || tilcdc_pclk_diff(pclk_rate, real_pclk_rate) > 5) {
 219		/*
 220		 * If we fail to set the clock rate (some architectures don't
 221		 * use the common clock framework yet and may not implement
 222		 * all the clk API calls for every clock), try the next best
 223		 * thing: adjusting the clock divider, unless clk_get_rate()
 224		 * failed as well.
 225		 */
 226		if (!clk_rate) {
 227			/* Nothing more we can do. Just bail out. */
 228			dev_err(dev->dev,
 229				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
 230			return;
 231		}
 232
 233		clkdiv = DIV_ROUND_CLOSEST(clk_rate, pclk_rate);
 234
 235		/*
 236		 * Emit a warning if the real clock rate resulting from the
 237		 * calculated divider differs much from the requested rate.
 238		 *
 239		 * 5% is an arbitrary value - LCDs are usually quite tolerant
 240		 * about pixel clock rates.
 241		 */
 242		real_pclk_rate = clk_rate / clkdiv;
 243
 244		if (tilcdc_pclk_diff(pclk_rate, real_pclk_rate) > 5) {
 245			dev_warn(dev->dev,
 246				 "effective pixel clock rate (%luHz) differs from the requested rate (%luHz)\n",
 247				 real_pclk_rate, pclk_rate);
 248		}
 249	}
 250
 251	tilcdc_crtc->lcd_fck_rate = clk_rate;
 252
 253	DBG("lcd_clk=%u, mode clock=%d, div=%u",
 254	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
 255
 256	/* Configure the LCD clock divisor. */
 257	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
 258		     LCDC_RASTER_MODE);
 259
 260	if (priv->rev == 2)
 261		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
 262				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
 263				LCDC_V2_CORE_CLK_EN);
 264}
 265
 266static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
 267{
 268	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
 269			      mode->clock);
 270}
 271
 272static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
 273{
 274	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 275	struct drm_device *dev = crtc->dev;
 276	struct tilcdc_drm_private *priv = dev->dev_private;
 277	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
 278	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
 279	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
 280	struct drm_framebuffer *fb = crtc->primary->state->fb;
 281
 282	if (WARN_ON(!info))
 283		return;
 284
 285	if (WARN_ON(!fb))
 286		return;
 287
 288	/* Configure the Burst Size and fifo threshold of DMA: */
 289	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
 290	switch (info->dma_burst_sz) {
 291	case 1:
 292		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
 293		break;
 294	case 2:
 295		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
 296		break;
 297	case 4:
 298		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
 299		break;
 300	case 8:
 301		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
 302		break;
 303	case 16:
 304		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
 305		break;
 306	default:
 307		dev_err(dev->dev, "invalid burst size\n");
 308		return;
 309	}
 310	reg |= (info->fifo_th << 8);
 311	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
 312
 313	/* Configure timings: */
 314	hbp = mode->htotal - mode->hsync_end;
 315	hfp = mode->hsync_start - mode->hdisplay;
 316	hsw = mode->hsync_end - mode->hsync_start;
 317	vbp = mode->vtotal - mode->vsync_end;
 318	vfp = mode->vsync_start - mode->vdisplay;
 319	vsw = mode->vsync_end - mode->vsync_start;
 320
 321	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
 322	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
 323
 324	/* Set AC Bias Period and Number of Transitions per Interrupt: */
 325	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
 326	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
 327		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
 328
 329	/*
 330	 * subtract one from hfp, hbp, hsw because the hardware uses
 331	 * a value of 0 as 1
 332	 */
 333	if (priv->rev == 2) {
 334		/* clear bits we're going to set */
 335		reg &= ~0x78000033;
 336		reg |= ((hfp-1) & 0x300) >> 8;
 337		reg |= ((hbp-1) & 0x300) >> 4;
 338		reg |= ((hsw-1) & 0x3c0) << 21;
 339	}
 340	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
 341
 342	reg = (((mode->hdisplay >> 4) - 1) << 4) |
 343		(((hbp-1) & 0xff) << 24) |
 344		(((hfp-1) & 0xff) << 16) |
 345		(((hsw-1) & 0x3f) << 10);
 346	if (priv->rev == 2)
 347		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
 348	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
 349
 350	reg = ((mode->vdisplay - 1) & 0x3ff) |
 351		((vbp & 0xff) << 24) |
 352		((vfp & 0xff) << 16) |
 353		(((vsw-1) & 0x3f) << 10);
 354	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
 355
 356	/*
 357	 * be sure to set Bit 10 for the V2 LCDC controller,
 358	 * otherwise limited to 1024 pixels width, stopping
 359	 * 1920x1080 being supported.
 360	 */
 361	if (priv->rev == 2) {
 362		if ((mode->vdisplay - 1) & 0x400) {
 363			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
 364				LCDC_LPP_B10);
 365		} else {
 366			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
 367				LCDC_LPP_B10);
 368		}
 369	}
 370
 371	/* Configure display type: */
 372	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
 373		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
 374		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
 375		  0x000ff000 /* Palette Loading Delay bits */);
 376	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
 377	if (info->tft_alt_mode)
 378		reg |= LCDC_TFT_ALT_ENABLE;
 379	if (priv->rev == 2) {
 380		switch (fb->format->format) {
 381		case DRM_FORMAT_BGR565:
 382		case DRM_FORMAT_RGB565:
 383			break;
 384		case DRM_FORMAT_XBGR8888:
 385		case DRM_FORMAT_XRGB8888:
 386			reg |= LCDC_V2_TFT_24BPP_UNPACK;
 387			fallthrough;
 388		case DRM_FORMAT_BGR888:
 389		case DRM_FORMAT_RGB888:
 390			reg |= LCDC_V2_TFT_24BPP_MODE;
 391			break;
 392		default:
 393			dev_err(dev->dev, "invalid pixel format\n");
 394			return;
 395		}
 396	}
 397	reg |= info->fdd << 12;
 398	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
 399
 400	if (info->invert_pxl_clk)
 401		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 402	else
 403		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 404
 405	if (info->sync_ctrl)
 406		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 407	else
 408		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 409
 410	if (info->sync_edge)
 411		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 412	else
 413		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 414
 415	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
 416		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 417	else
 418		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 419
 420	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
 421		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 422	else
 423		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 424
 425	if (info->raster_order)
 426		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 427	else
 428		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 429
 430	tilcdc_crtc_set_clk(crtc);
 431
 432	tilcdc_crtc_load_palette(crtc);
 433
 434	set_scanout(crtc, fb);
 435
 436	crtc->hwmode = crtc->state->adjusted_mode;
 437
 438	tilcdc_crtc->hvtotal_us =
 439		tilcdc_mode_hvtotal(&crtc->hwmode);
 440}
 441
 442static void tilcdc_crtc_enable(struct drm_crtc *crtc)
 443{
 444	struct drm_device *dev = crtc->dev;
 445	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 446	unsigned long flags;
 447
 448	mutex_lock(&tilcdc_crtc->enable_lock);
 449	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
 450		mutex_unlock(&tilcdc_crtc->enable_lock);
 451		return;
 452	}
 453
 454	pm_runtime_get_sync(dev->dev);
 455
 456	reset(crtc);
 457
 458	tilcdc_crtc_set_mode(crtc);
 459
 460	tilcdc_crtc_enable_irqs(dev);
 461
 462	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
 463	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 464			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
 465			  LCDC_PALETTE_LOAD_MODE_MASK);
 466
 467	/* There is no real chance for a race here as the time stamp
 468	 * is taken before the raster DMA is started. The spin-lock is
 469	 * taken to have a memory barrier after taking the time-stamp
 470	 * and to avoid a context switch between taking the stamp and
 471	 * enabling the raster.
 472	 */
 473	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 474	tilcdc_crtc->last_vblank = ktime_get();
 475	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 476	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 477
 478	drm_crtc_vblank_on(crtc);
 479
 480	tilcdc_crtc->enabled = true;
 481	mutex_unlock(&tilcdc_crtc->enable_lock);
 482}
 483
 484static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
 485				      struct drm_atomic_state *state)
 486{
 487	tilcdc_crtc_enable(crtc);
 488}
 489
 490static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
 491{
 492	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 493	struct drm_device *dev = crtc->dev;
 494	int ret;
 495
 496	mutex_lock(&tilcdc_crtc->enable_lock);
 497	if (shutdown)
 498		tilcdc_crtc->shutdown = true;
 499	if (!tilcdc_crtc->enabled) {
 500		mutex_unlock(&tilcdc_crtc->enable_lock);
 501		return;
 502	}
 503	tilcdc_crtc->frame_done = false;
 504	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 505
 506	/*
 507	 * Wait for framedone irq which will still come before putting
 508	 * things to sleep..
 509	 */
 510	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
 511				 tilcdc_crtc->frame_done,
 512				 msecs_to_jiffies(500));
 513	if (ret == 0)
 514		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
 515			__func__);
 516
 517	drm_crtc_vblank_off(crtc);
 518
 519	spin_lock_irq(&crtc->dev->event_lock);
 520
 521	if (crtc->state->event) {
 522		drm_crtc_send_vblank_event(crtc, crtc->state->event);
 523		crtc->state->event = NULL;
 524	}
 525
 526	spin_unlock_irq(&crtc->dev->event_lock);
 527
 528	tilcdc_crtc_disable_irqs(dev);
 529
 530	pm_runtime_put_sync(dev->dev);
 531
 532	tilcdc_crtc->enabled = false;
 533	mutex_unlock(&tilcdc_crtc->enable_lock);
 534}
 535
 536static void tilcdc_crtc_disable(struct drm_crtc *crtc)
 537{
 538	tilcdc_crtc_off(crtc, false);
 539}
 540
 541static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
 542				       struct drm_atomic_state *state)
 543{
 544	tilcdc_crtc_disable(crtc);
 545}
 546
 547static void tilcdc_crtc_atomic_flush(struct drm_crtc *crtc,
 548				     struct drm_atomic_state *state)
 549{
 550	if (!crtc->state->event)
 551		return;
 552
 553	spin_lock_irq(&crtc->dev->event_lock);
 554	drm_crtc_send_vblank_event(crtc, crtc->state->event);
 555	crtc->state->event = NULL;
 556	spin_unlock_irq(&crtc->dev->event_lock);
 557}
 558
 559void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
 560{
 561	tilcdc_crtc_off(crtc, true);
 562}
 563
 564static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
 565{
 566	return crtc->state && crtc->state->enable && crtc->state->active;
 567}
 568
 569static void tilcdc_crtc_recover_work(struct work_struct *work)
 570{
 571	struct tilcdc_crtc *tilcdc_crtc =
 572		container_of(work, struct tilcdc_crtc, recover_work);
 573	struct drm_crtc *crtc = &tilcdc_crtc->base;
 574
 575	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
 576
 577	drm_modeset_lock(&crtc->mutex, NULL);
 578
 579	if (!tilcdc_crtc_is_on(crtc))
 580		goto out;
 581
 582	tilcdc_crtc_disable(crtc);
 583	tilcdc_crtc_enable(crtc);
 584out:
 585	drm_modeset_unlock(&crtc->mutex);
 586}
 587
 588static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
 589{
 590	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 591
 592	tilcdc_crtc_shutdown(crtc);
 593
 594	flush_workqueue(priv->wq);
 595
 596	of_node_put(crtc->port);
 597	drm_crtc_cleanup(crtc);
 598}
 599
 600int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
 601		struct drm_framebuffer *fb,
 602		struct drm_pending_vblank_event *event)
 603{
 604	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 605	struct drm_device *dev = crtc->dev;
 606
 607	if (tilcdc_crtc->event) {
 608		dev_err(dev->dev, "already pending page flip!\n");
 609		return -EBUSY;
 610	}
 611
 612	tilcdc_crtc->event = event;
 613
 614	mutex_lock(&tilcdc_crtc->enable_lock);
 615
 616	if (tilcdc_crtc->enabled) {
 617		unsigned long flags;
 618		ktime_t next_vblank;
 619		s64 tdiff;
 620
 621		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 622
 623		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
 624					   tilcdc_crtc->hvtotal_us);
 625		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
 626
 627		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
 628			tilcdc_crtc->next_fb = fb;
 629		else
 630			set_scanout(crtc, fb);
 631
 632		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 633	}
 634
 635	mutex_unlock(&tilcdc_crtc->enable_lock);
 636
 637	return 0;
 638}
 639
 640static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
 641		const struct drm_display_mode *mode,
 642		struct drm_display_mode *adjusted_mode)
 643{
 644	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 645
 646	if (!tilcdc_crtc->simulate_vesa_sync)
 647		return true;
 648
 649	/*
 650	 * tilcdc does not generate VESA-compliant sync but aligns
 651	 * VS on the second edge of HS instead of first edge.
 652	 * We use adjusted_mode, to fixup sync by aligning both rising
 653	 * edges and add HSKEW offset to fix the sync.
 654	 */
 655	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
 656	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
 657
 658	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
 659		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
 660		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
 661	} else {
 662		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
 663		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
 664	}
 665
 666	return true;
 667}
 668
 669static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
 670				    struct drm_atomic_state *state)
 671{
 672	struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
 673									  crtc);
 674	/* If we are not active we don't care */
 675	if (!crtc_state->active)
 676		return 0;
 677
 678	if (state->planes[0].ptr != crtc->primary ||
 679	    state->planes[0].state == NULL ||
 680	    state->planes[0].state->crtc != crtc) {
 681		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
 682		return -EINVAL;
 683	}
 684
 685	return 0;
 686}
 687
 688static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
 689{
 690	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 691	struct drm_device *dev = crtc->dev;
 692	struct tilcdc_drm_private *priv = dev->dev_private;
 693	unsigned long flags;
 694
 695	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 696
 697	tilcdc_clear_irqstatus(dev, LCDC_END_OF_FRAME0);
 698
 699	if (priv->rev == 1)
 700		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
 701			   LCDC_V1_END_OF_FRAME_INT_ENA);
 702	else
 703		tilcdc_set(dev, LCDC_INT_ENABLE_SET_REG,
 704			   LCDC_V2_END_OF_FRAME0_INT_ENA);
 705
 706	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 707
 708	return 0;
 709}
 710
 711static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
 712{
 713	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 714	struct drm_device *dev = crtc->dev;
 715	struct tilcdc_drm_private *priv = dev->dev_private;
 716	unsigned long flags;
 717
 718	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 719
 720	if (priv->rev == 1)
 721		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
 722			     LCDC_V1_END_OF_FRAME_INT_ENA);
 723	else
 724		tilcdc_clear(dev, LCDC_INT_ENABLE_SET_REG,
 725			     LCDC_V2_END_OF_FRAME0_INT_ENA);
 726
 727	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 728}
 729
 730static void tilcdc_crtc_reset(struct drm_crtc *crtc)
 731{
 732	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 733	struct drm_device *dev = crtc->dev;
 734	int ret;
 735
 736	drm_atomic_helper_crtc_reset(crtc);
 737
 738	/* Turn the raster off if it for some reason is on. */
 739	pm_runtime_get_sync(dev->dev);
 740	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
 741		/* Enable DMA Frame Done Interrupt */
 742		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
 743		tilcdc_clear_irqstatus(dev, 0xffffffff);
 744
 745		tilcdc_crtc->frame_done = false;
 746		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 747
 748		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
 749					 tilcdc_crtc->frame_done,
 750					 msecs_to_jiffies(500));
 751		if (ret == 0)
 752			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
 753				__func__);
 754	}
 755	pm_runtime_put_sync(dev->dev);
 756}
 757
 758static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
 759	.destroy        = tilcdc_crtc_destroy,
 760	.set_config     = drm_atomic_helper_set_config,
 761	.page_flip      = drm_atomic_helper_page_flip,
 762	.reset		= tilcdc_crtc_reset,
 763	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
 764	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
 765	.enable_vblank	= tilcdc_crtc_enable_vblank,
 766	.disable_vblank	= tilcdc_crtc_disable_vblank,
 767};
 768
 769static enum drm_mode_status
 770tilcdc_crtc_mode_valid(struct drm_crtc *crtc,
 771		       const struct drm_display_mode *mode)
 772{
 773	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 774	unsigned int bandwidth;
 775	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
 776
 777	/*
 778	 * check to see if the width is within the range that
 779	 * the LCD Controller physically supports
 780	 */
 781	if (mode->hdisplay > priv->max_width)
 782		return MODE_VIRTUAL_X;
 783
 784	/* width must be multiple of 16 */
 785	if (mode->hdisplay & 0xf)
 786		return MODE_VIRTUAL_X;
 787
 788	if (mode->vdisplay > 2048)
 789		return MODE_VIRTUAL_Y;
 790
 791	DBG("Processing mode %dx%d@%d with pixel clock %d",
 792		mode->hdisplay, mode->vdisplay,
 793		drm_mode_vrefresh(mode), mode->clock);
 794
 795	hbp = mode->htotal - mode->hsync_end;
 796	hfp = mode->hsync_start - mode->hdisplay;
 797	hsw = mode->hsync_end - mode->hsync_start;
 798	vbp = mode->vtotal - mode->vsync_end;
 799	vfp = mode->vsync_start - mode->vdisplay;
 800	vsw = mode->vsync_end - mode->vsync_start;
 801
 802	if ((hbp-1) & ~0x3ff) {
 803		DBG("Pruning mode: Horizontal Back Porch out of range");
 804		return MODE_HBLANK_WIDE;
 805	}
 806
 807	if ((hfp-1) & ~0x3ff) {
 808		DBG("Pruning mode: Horizontal Front Porch out of range");
 809		return MODE_HBLANK_WIDE;
 810	}
 811
 812	if ((hsw-1) & ~0x3ff) {
 813		DBG("Pruning mode: Horizontal Sync Width out of range");
 814		return MODE_HSYNC_WIDE;
 815	}
 816
 817	if (vbp & ~0xff) {
 818		DBG("Pruning mode: Vertical Back Porch out of range");
 819		return MODE_VBLANK_WIDE;
 820	}
 821
 822	if (vfp & ~0xff) {
 823		DBG("Pruning mode: Vertical Front Porch out of range");
 824		return MODE_VBLANK_WIDE;
 825	}
 826
 827	if ((vsw-1) & ~0x3f) {
 828		DBG("Pruning mode: Vertical Sync Width out of range");
 829		return MODE_VSYNC_WIDE;
 830	}
 831
 832	/*
 833	 * some devices have a maximum allowed pixel clock
 834	 * configured from the DT
 835	 */
 836	if (mode->clock > priv->max_pixelclock) {
 837		DBG("Pruning mode: pixel clock too high");
 838		return MODE_CLOCK_HIGH;
 839	}
 840
 841	/*
 842	 * some devices further limit the max horizontal resolution
 843	 * configured from the DT
 844	 */
 845	if (mode->hdisplay > priv->max_width)
 846		return MODE_BAD_WIDTH;
 847
 848	/* filter out modes that would require too much memory bandwidth: */
 849	bandwidth = mode->hdisplay * mode->vdisplay *
 850		drm_mode_vrefresh(mode);
 851	if (bandwidth > priv->max_bandwidth) {
 852		DBG("Pruning mode: exceeds defined bandwidth limit");
 853		return MODE_BAD;
 854	}
 855
 856	return MODE_OK;
 857}
 858
 859static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
 860	.mode_valid	= tilcdc_crtc_mode_valid,
 861	.mode_fixup	= tilcdc_crtc_mode_fixup,
 862	.atomic_check	= tilcdc_crtc_atomic_check,
 863	.atomic_enable	= tilcdc_crtc_atomic_enable,
 864	.atomic_disable	= tilcdc_crtc_atomic_disable,
 865	.atomic_flush	= tilcdc_crtc_atomic_flush,
 866};
 867
 868void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
 869		const struct tilcdc_panel_info *info)
 870{
 871	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 872	tilcdc_crtc->info = info;
 873}
 874
 875void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
 876					bool simulate_vesa_sync)
 877{
 878	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 879
 880	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
 881}
 882
 883void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
 884{
 885	struct drm_device *dev = crtc->dev;
 886	struct tilcdc_drm_private *priv = dev->dev_private;
 887	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 888
 889	drm_modeset_lock(&crtc->mutex, NULL);
 890	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
 891		if (tilcdc_crtc_is_on(crtc)) {
 892			pm_runtime_get_sync(dev->dev);
 893			tilcdc_crtc_disable(crtc);
 894
 895			tilcdc_crtc_set_clk(crtc);
 896
 897			tilcdc_crtc_enable(crtc);
 898			pm_runtime_put_sync(dev->dev);
 899		}
 900	}
 901	drm_modeset_unlock(&crtc->mutex);
 902}
 903
 904#define SYNC_LOST_COUNT_LIMIT 50
 905
 906irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
 907{
 908	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 909	struct drm_device *dev = crtc->dev;
 910	struct tilcdc_drm_private *priv = dev->dev_private;
 911	uint32_t stat, reg;
 912
 913	stat = tilcdc_read_irqstatus(dev);
 914	tilcdc_clear_irqstatus(dev, stat);
 915
 916	if (stat & LCDC_END_OF_FRAME0) {
 917		bool skip_event = false;
 918		ktime_t now;
 919
 920		now = ktime_get();
 921
 922		spin_lock(&tilcdc_crtc->irq_lock);
 923
 924		tilcdc_crtc->last_vblank = now;
 925
 926		if (tilcdc_crtc->next_fb) {
 927			set_scanout(crtc, tilcdc_crtc->next_fb);
 928			tilcdc_crtc->next_fb = NULL;
 929			skip_event = true;
 930		}
 931
 932		spin_unlock(&tilcdc_crtc->irq_lock);
 933
 934		drm_crtc_handle_vblank(crtc);
 935
 936		if (!skip_event) {
 937			struct drm_pending_vblank_event *event;
 938
 939			spin_lock(&dev->event_lock);
 940
 941			event = tilcdc_crtc->event;
 942			tilcdc_crtc->event = NULL;
 943			if (event)
 944				drm_crtc_send_vblank_event(crtc, event);
 945
 946			spin_unlock(&dev->event_lock);
 947		}
 948
 949		if (tilcdc_crtc->frame_intact)
 950			tilcdc_crtc->sync_lost_count = 0;
 951		else
 952			tilcdc_crtc->frame_intact = true;
 953	}
 954
 955	if (stat & LCDC_FIFO_UNDERFLOW)
 956		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
 957				    __func__, stat);
 958
 959	if (stat & LCDC_PL_LOAD_DONE) {
 960		complete(&tilcdc_crtc->palette_loaded);
 961		if (priv->rev == 1)
 962			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 963				     LCDC_V1_PL_INT_ENA);
 964		else
 965			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 966				     LCDC_V2_PL_INT_ENA);
 967	}
 968
 969	if (stat & LCDC_SYNC_LOST) {
 970		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
 971				    __func__, stat);
 972		tilcdc_crtc->frame_intact = false;
 973		if (priv->rev == 1) {
 974			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
 975			if (reg & LCDC_RASTER_ENABLE) {
 976				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 977					     LCDC_RASTER_ENABLE);
 978				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 979					   LCDC_RASTER_ENABLE);
 980			}
 981		} else {
 982			if (tilcdc_crtc->sync_lost_count++ >
 983			    SYNC_LOST_COUNT_LIMIT) {
 984				dev_err(dev->dev,
 985					"%s(0x%08x): Sync lost flood detected, recovering",
 986					__func__, stat);
 987				queue_work(system_wq,
 988					   &tilcdc_crtc->recover_work);
 989				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 990					     LCDC_SYNC_LOST);
 991				tilcdc_crtc->sync_lost_count = 0;
 992			}
 993		}
 994	}
 995
 996	if (stat & LCDC_FRAME_DONE) {
 997		tilcdc_crtc->frame_done = true;
 998		wake_up(&tilcdc_crtc->frame_done_wq);
 999		/* rev 1 lcdc appears to hang if irq is not disbaled here */
1000		if (priv->rev == 1)
1001			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
1002				     LCDC_V1_FRAME_DONE_INT_ENA);
1003	}
1004
1005	/* For revision 2 only */
1006	if (priv->rev == 2) {
1007		/* Indicate to LCDC that the interrupt service routine has
1008		 * completed, see 13.3.6.1.6 in AM335x TRM.
1009		 */
1010		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
1011	}
1012
1013	return IRQ_HANDLED;
1014}
1015
1016int tilcdc_crtc_create(struct drm_device *dev)
1017{
1018	struct tilcdc_drm_private *priv = dev->dev_private;
1019	struct tilcdc_crtc *tilcdc_crtc;
1020	struct drm_crtc *crtc;
1021	int ret;
1022
1023	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
1024	if (!tilcdc_crtc)
1025		return -ENOMEM;
1026
1027	init_completion(&tilcdc_crtc->palette_loaded);
1028	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1029					TILCDC_PALETTE_SIZE,
1030					&tilcdc_crtc->palette_dma_handle,
1031					GFP_KERNEL | __GFP_ZERO);
1032	if (!tilcdc_crtc->palette_base)
1033		return -ENOMEM;
1034	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1035
1036	crtc = &tilcdc_crtc->base;
1037
1038	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1039	if (ret < 0)
1040		goto fail;
1041
1042	mutex_init(&tilcdc_crtc->enable_lock);
1043
1044	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1045
1046	spin_lock_init(&tilcdc_crtc->irq_lock);
1047	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1048
1049	ret = drm_crtc_init_with_planes(dev, crtc,
1050					&tilcdc_crtc->primary,
1051					NULL,
1052					&tilcdc_crtc_funcs,
1053					"tilcdc crtc");
1054	if (ret < 0)
1055		goto fail;
1056
1057	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1058
1059	if (priv->is_componentized) {
1060		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1061		if (!crtc->port) { /* This should never happen */
1062			dev_err(dev->dev, "Port node not found in %pOF\n",
1063				dev->dev->of_node);
1064			ret = -EINVAL;
1065			goto fail;
1066		}
1067	}
1068
1069	priv->crtc = crtc;
1070	return 0;
1071
1072fail:
1073	tilcdc_crtc_destroy(crtc);
1074	return ret;
1075}
v6.9.4
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (C) 2012 Texas Instruments
   4 * Author: Rob Clark <robdclark@gmail.com>
   5 */
   6
   7#include <linux/delay.h>
   8#include <linux/dma-mapping.h>
   9#include <linux/of_graph.h>
  10#include <linux/pm_runtime.h>
  11
  12#include <drm/drm_atomic.h>
  13#include <drm/drm_atomic_helper.h>
  14#include <drm/drm_crtc.h>
  15#include <drm/drm_fb_dma_helper.h>
  16#include <drm/drm_fourcc.h>
  17#include <drm/drm_framebuffer.h>
  18#include <drm/drm_gem_dma_helper.h>
  19#include <drm/drm_modeset_helper_vtables.h>
  20#include <drm/drm_print.h>
  21#include <drm/drm_vblank.h>
  22
  23#include "tilcdc_drv.h"
  24#include "tilcdc_regs.h"
  25
  26#define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
  27#define TILCDC_PALETTE_SIZE			32
  28#define TILCDC_PALETTE_FIRST_ENTRY		0x4000
  29
  30struct tilcdc_crtc {
  31	struct drm_crtc base;
  32
  33	struct drm_plane primary;
  34	const struct tilcdc_panel_info *info;
  35	struct drm_pending_vblank_event *event;
  36	struct mutex enable_lock;
  37	bool enabled;
  38	bool shutdown;
  39	wait_queue_head_t frame_done_wq;
  40	bool frame_done;
  41	spinlock_t irq_lock;
  42
  43	unsigned int lcd_fck_rate;
  44
  45	ktime_t last_vblank;
  46	unsigned int hvtotal_us;
  47
  48	struct drm_framebuffer *next_fb;
  49
  50	/* Only set if an external encoder is connected */
  51	bool simulate_vesa_sync;
  52
  53	int sync_lost_count;
  54	bool frame_intact;
  55	struct work_struct recover_work;
  56
  57	dma_addr_t palette_dma_handle;
  58	u16 *palette_base;
  59	struct completion palette_loaded;
  60};
  61#define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
  62
  63static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
  64{
  65	struct drm_device *dev = crtc->dev;
  66	struct tilcdc_drm_private *priv = dev->dev_private;
  67	struct drm_gem_dma_object *gem;
  68	dma_addr_t start, end;
  69	u64 dma_base_and_ceiling;
  70
  71	gem = drm_fb_dma_get_gem_obj(fb, 0);
  72
  73	start = gem->dma_addr + fb->offsets[0] +
  74		crtc->y * fb->pitches[0] +
  75		crtc->x * fb->format->cpp[0];
  76
  77	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
  78
  79	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
  80	 * with a single insruction, if available. This should make it more
  81	 * unlikely that LCDC would fetch the DMA addresses in the middle of
  82	 * an update.
  83	 */
  84	if (priv->rev == 1)
  85		end -= 1;
  86
  87	dma_base_and_ceiling = (u64)end << 32 | start;
  88	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
  89}
  90
  91/*
  92 * The driver currently only supports only true color formats. For
  93 * true color the palette block is bypassed, but a 32 byte palette
  94 * should still be loaded. The first 16-bit entry must be 0x4000 while
  95 * all other entries must be zeroed.
  96 */
  97static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
  98{
  99	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 100	struct drm_device *dev = crtc->dev;
 101	struct tilcdc_drm_private *priv = dev->dev_private;
 102	int ret;
 103
 104	reinit_completion(&tilcdc_crtc->palette_loaded);
 105
 106	/* Tell the LCDC where the palette is located. */
 107	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
 108		     tilcdc_crtc->palette_dma_handle);
 109	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
 110		     (u32) tilcdc_crtc->palette_dma_handle +
 111		     TILCDC_PALETTE_SIZE - 1);
 112
 113	/* Set dma load mode for palette loading only. */
 114	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 115			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
 116			  LCDC_PALETTE_LOAD_MODE_MASK);
 117
 118	/* Enable DMA Palette Loaded Interrupt */
 119	if (priv->rev == 1)
 120		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 121	else
 122		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
 123
 124	/* Enable LCDC DMA and wait for palette to be loaded. */
 125	tilcdc_clear_irqstatus(dev, 0xffffffff);
 126	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 127
 128	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
 129					  msecs_to_jiffies(50));
 130	if (ret == 0)
 131		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
 132
 133	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
 134	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 135	if (priv->rev == 1)
 136		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 137	else
 138		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
 139}
 140
 141static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
 142{
 143	struct tilcdc_drm_private *priv = dev->dev_private;
 144
 145	tilcdc_clear_irqstatus(dev, 0xffffffff);
 146
 147	if (priv->rev == 1) {
 148		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 149			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 150			LCDC_V1_UNDERFLOW_INT_ENA);
 151	} else {
 152		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
 153			LCDC_V2_UNDERFLOW_INT_ENA |
 154			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 155	}
 156}
 157
 158static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
 159{
 160	struct tilcdc_drm_private *priv = dev->dev_private;
 161
 162	/* disable irqs that we might have enabled: */
 163	if (priv->rev == 1) {
 164		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 165			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 166			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
 167		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
 168			LCDC_V1_END_OF_FRAME_INT_ENA);
 169	} else {
 170		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 171			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
 172			LCDC_V2_END_OF_FRAME0_INT_ENA |
 173			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 174	}
 175}
 176
 177static void reset(struct drm_crtc *crtc)
 178{
 179	struct drm_device *dev = crtc->dev;
 180	struct tilcdc_drm_private *priv = dev->dev_private;
 181
 182	if (priv->rev != 2)
 183		return;
 184
 185	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 186	usleep_range(250, 1000);
 187	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 188}
 189
 190/*
 191 * Calculate the percentage difference between the requested pixel clock rate
 192 * and the effective rate resulting from calculating the clock divider value.
 193 */
 194static unsigned int tilcdc_pclk_diff(unsigned long rate,
 195				     unsigned long real_rate)
 196{
 197	int r = rate / 100, rr = real_rate / 100;
 198
 199	return (unsigned int)(abs(((rr - r) * 100) / r));
 200}
 201
 202static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
 203{
 204	struct drm_device *dev = crtc->dev;
 205	struct tilcdc_drm_private *priv = dev->dev_private;
 206	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 207	unsigned long clk_rate, real_pclk_rate, pclk_rate;
 208	unsigned int clkdiv;
 209	int ret;
 210
 211	clkdiv = 2; /* first try using a standard divider of 2 */
 212
 213	/* mode.clock is in KHz, set_rate wants parameter in Hz */
 214	pclk_rate = crtc->mode.clock * 1000;
 215
 216	ret = clk_set_rate(priv->clk, pclk_rate * clkdiv);
 217	clk_rate = clk_get_rate(priv->clk);
 218	real_pclk_rate = clk_rate / clkdiv;
 219	if (ret < 0 || tilcdc_pclk_diff(pclk_rate, real_pclk_rate) > 5) {
 220		/*
 221		 * If we fail to set the clock rate (some architectures don't
 222		 * use the common clock framework yet and may not implement
 223		 * all the clk API calls for every clock), try the next best
 224		 * thing: adjusting the clock divider, unless clk_get_rate()
 225		 * failed as well.
 226		 */
 227		if (!clk_rate) {
 228			/* Nothing more we can do. Just bail out. */
 229			dev_err(dev->dev,
 230				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
 231			return;
 232		}
 233
 234		clkdiv = DIV_ROUND_CLOSEST(clk_rate, pclk_rate);
 235
 236		/*
 237		 * Emit a warning if the real clock rate resulting from the
 238		 * calculated divider differs much from the requested rate.
 239		 *
 240		 * 5% is an arbitrary value - LCDs are usually quite tolerant
 241		 * about pixel clock rates.
 242		 */
 243		real_pclk_rate = clk_rate / clkdiv;
 244
 245		if (tilcdc_pclk_diff(pclk_rate, real_pclk_rate) > 5) {
 246			dev_warn(dev->dev,
 247				 "effective pixel clock rate (%luHz) differs from the requested rate (%luHz)\n",
 248				 real_pclk_rate, pclk_rate);
 249		}
 250	}
 251
 252	tilcdc_crtc->lcd_fck_rate = clk_rate;
 253
 254	DBG("lcd_clk=%u, mode clock=%d, div=%u",
 255	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
 256
 257	/* Configure the LCD clock divisor. */
 258	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
 259		     LCDC_RASTER_MODE);
 260
 261	if (priv->rev == 2)
 262		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
 263				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
 264				LCDC_V2_CORE_CLK_EN);
 265}
 266
 267static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
 268{
 269	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
 270			      mode->clock);
 271}
 272
 273static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
 274{
 275	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 276	struct drm_device *dev = crtc->dev;
 277	struct tilcdc_drm_private *priv = dev->dev_private;
 278	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
 279	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
 280	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
 281	struct drm_framebuffer *fb = crtc->primary->state->fb;
 282
 283	if (WARN_ON(!info))
 284		return;
 285
 286	if (WARN_ON(!fb))
 287		return;
 288
 289	/* Configure the Burst Size and fifo threshold of DMA: */
 290	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
 291	switch (info->dma_burst_sz) {
 292	case 1:
 293		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
 294		break;
 295	case 2:
 296		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
 297		break;
 298	case 4:
 299		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
 300		break;
 301	case 8:
 302		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
 303		break;
 304	case 16:
 305		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
 306		break;
 307	default:
 308		dev_err(dev->dev, "invalid burst size\n");
 309		return;
 310	}
 311	reg |= (info->fifo_th << 8);
 312	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
 313
 314	/* Configure timings: */
 315	hbp = mode->htotal - mode->hsync_end;
 316	hfp = mode->hsync_start - mode->hdisplay;
 317	hsw = mode->hsync_end - mode->hsync_start;
 318	vbp = mode->vtotal - mode->vsync_end;
 319	vfp = mode->vsync_start - mode->vdisplay;
 320	vsw = mode->vsync_end - mode->vsync_start;
 321
 322	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
 323	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
 324
 325	/* Set AC Bias Period and Number of Transitions per Interrupt: */
 326	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
 327	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
 328		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
 329
 330	/*
 331	 * subtract one from hfp, hbp, hsw because the hardware uses
 332	 * a value of 0 as 1
 333	 */
 334	if (priv->rev == 2) {
 335		/* clear bits we're going to set */
 336		reg &= ~0x78000033;
 337		reg |= ((hfp-1) & 0x300) >> 8;
 338		reg |= ((hbp-1) & 0x300) >> 4;
 339		reg |= ((hsw-1) & 0x3c0) << 21;
 340	}
 341	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
 342
 343	reg = (((mode->hdisplay >> 4) - 1) << 4) |
 344		(((hbp-1) & 0xff) << 24) |
 345		(((hfp-1) & 0xff) << 16) |
 346		(((hsw-1) & 0x3f) << 10);
 347	if (priv->rev == 2)
 348		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
 349	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
 350
 351	reg = ((mode->vdisplay - 1) & 0x3ff) |
 352		((vbp & 0xff) << 24) |
 353		((vfp & 0xff) << 16) |
 354		(((vsw-1) & 0x3f) << 10);
 355	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
 356
 357	/*
 358	 * be sure to set Bit 10 for the V2 LCDC controller,
 359	 * otherwise limited to 1024 pixels width, stopping
 360	 * 1920x1080 being supported.
 361	 */
 362	if (priv->rev == 2) {
 363		if ((mode->vdisplay - 1) & 0x400) {
 364			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
 365				LCDC_LPP_B10);
 366		} else {
 367			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
 368				LCDC_LPP_B10);
 369		}
 370	}
 371
 372	/* Configure display type: */
 373	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
 374		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
 375		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
 376		  0x000ff000 /* Palette Loading Delay bits */);
 377	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
 378	if (info->tft_alt_mode)
 379		reg |= LCDC_TFT_ALT_ENABLE;
 380	if (priv->rev == 2) {
 381		switch (fb->format->format) {
 382		case DRM_FORMAT_BGR565:
 383		case DRM_FORMAT_RGB565:
 384			break;
 385		case DRM_FORMAT_XBGR8888:
 386		case DRM_FORMAT_XRGB8888:
 387			reg |= LCDC_V2_TFT_24BPP_UNPACK;
 388			fallthrough;
 389		case DRM_FORMAT_BGR888:
 390		case DRM_FORMAT_RGB888:
 391			reg |= LCDC_V2_TFT_24BPP_MODE;
 392			break;
 393		default:
 394			dev_err(dev->dev, "invalid pixel format\n");
 395			return;
 396		}
 397	}
 398	reg |= info->fdd << 12;
 399	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
 400
 401	if (info->invert_pxl_clk)
 402		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 403	else
 404		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 405
 406	if (info->sync_ctrl)
 407		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 408	else
 409		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 410
 411	if (info->sync_edge)
 412		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 413	else
 414		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 415
 416	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
 417		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 418	else
 419		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 420
 421	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
 422		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 423	else
 424		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 425
 426	if (info->raster_order)
 427		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 428	else
 429		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 430
 431	tilcdc_crtc_set_clk(crtc);
 432
 433	tilcdc_crtc_load_palette(crtc);
 434
 435	set_scanout(crtc, fb);
 436
 437	drm_mode_copy(&crtc->hwmode, &crtc->state->adjusted_mode);
 438
 439	tilcdc_crtc->hvtotal_us =
 440		tilcdc_mode_hvtotal(&crtc->hwmode);
 441}
 442
 443static void tilcdc_crtc_enable(struct drm_crtc *crtc)
 444{
 445	struct drm_device *dev = crtc->dev;
 446	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 447	unsigned long flags;
 448
 449	mutex_lock(&tilcdc_crtc->enable_lock);
 450	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
 451		mutex_unlock(&tilcdc_crtc->enable_lock);
 452		return;
 453	}
 454
 455	pm_runtime_get_sync(dev->dev);
 456
 457	reset(crtc);
 458
 459	tilcdc_crtc_set_mode(crtc);
 460
 461	tilcdc_crtc_enable_irqs(dev);
 462
 463	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
 464	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 465			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
 466			  LCDC_PALETTE_LOAD_MODE_MASK);
 467
 468	/* There is no real chance for a race here as the time stamp
 469	 * is taken before the raster DMA is started. The spin-lock is
 470	 * taken to have a memory barrier after taking the time-stamp
 471	 * and to avoid a context switch between taking the stamp and
 472	 * enabling the raster.
 473	 */
 474	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 475	tilcdc_crtc->last_vblank = ktime_get();
 476	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 477	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 478
 479	drm_crtc_vblank_on(crtc);
 480
 481	tilcdc_crtc->enabled = true;
 482	mutex_unlock(&tilcdc_crtc->enable_lock);
 483}
 484
 485static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
 486				      struct drm_atomic_state *state)
 487{
 488	tilcdc_crtc_enable(crtc);
 489}
 490
 491static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
 492{
 493	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 494	struct drm_device *dev = crtc->dev;
 495	int ret;
 496
 497	mutex_lock(&tilcdc_crtc->enable_lock);
 498	if (shutdown)
 499		tilcdc_crtc->shutdown = true;
 500	if (!tilcdc_crtc->enabled) {
 501		mutex_unlock(&tilcdc_crtc->enable_lock);
 502		return;
 503	}
 504	tilcdc_crtc->frame_done = false;
 505	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 506
 507	/*
 508	 * Wait for framedone irq which will still come before putting
 509	 * things to sleep..
 510	 */
 511	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
 512				 tilcdc_crtc->frame_done,
 513				 msecs_to_jiffies(500));
 514	if (ret == 0)
 515		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
 516			__func__);
 517
 518	drm_crtc_vblank_off(crtc);
 519
 520	spin_lock_irq(&crtc->dev->event_lock);
 521
 522	if (crtc->state->event) {
 523		drm_crtc_send_vblank_event(crtc, crtc->state->event);
 524		crtc->state->event = NULL;
 525	}
 526
 527	spin_unlock_irq(&crtc->dev->event_lock);
 528
 529	tilcdc_crtc_disable_irqs(dev);
 530
 531	pm_runtime_put_sync(dev->dev);
 532
 533	tilcdc_crtc->enabled = false;
 534	mutex_unlock(&tilcdc_crtc->enable_lock);
 535}
 536
 537static void tilcdc_crtc_disable(struct drm_crtc *crtc)
 538{
 539	tilcdc_crtc_off(crtc, false);
 540}
 541
 542static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
 543				       struct drm_atomic_state *state)
 544{
 545	tilcdc_crtc_disable(crtc);
 546}
 547
 548static void tilcdc_crtc_atomic_flush(struct drm_crtc *crtc,
 549				     struct drm_atomic_state *state)
 550{
 551	if (!crtc->state->event)
 552		return;
 553
 554	spin_lock_irq(&crtc->dev->event_lock);
 555	drm_crtc_send_vblank_event(crtc, crtc->state->event);
 556	crtc->state->event = NULL;
 557	spin_unlock_irq(&crtc->dev->event_lock);
 558}
 559
 560void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
 561{
 562	tilcdc_crtc_off(crtc, true);
 563}
 564
 565static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
 566{
 567	return crtc->state && crtc->state->enable && crtc->state->active;
 568}
 569
 570static void tilcdc_crtc_recover_work(struct work_struct *work)
 571{
 572	struct tilcdc_crtc *tilcdc_crtc =
 573		container_of(work, struct tilcdc_crtc, recover_work);
 574	struct drm_crtc *crtc = &tilcdc_crtc->base;
 575
 576	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
 577
 578	drm_modeset_lock(&crtc->mutex, NULL);
 579
 580	if (!tilcdc_crtc_is_on(crtc))
 581		goto out;
 582
 583	tilcdc_crtc_disable(crtc);
 584	tilcdc_crtc_enable(crtc);
 585out:
 586	drm_modeset_unlock(&crtc->mutex);
 587}
 588
 589static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
 590{
 591	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 592
 593	tilcdc_crtc_shutdown(crtc);
 594
 595	flush_workqueue(priv->wq);
 596
 597	of_node_put(crtc->port);
 598	drm_crtc_cleanup(crtc);
 599}
 600
 601int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
 602		struct drm_framebuffer *fb,
 603		struct drm_pending_vblank_event *event)
 604{
 605	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 606	struct drm_device *dev = crtc->dev;
 607
 608	if (tilcdc_crtc->event) {
 609		dev_err(dev->dev, "already pending page flip!\n");
 610		return -EBUSY;
 611	}
 612
 613	tilcdc_crtc->event = event;
 614
 615	mutex_lock(&tilcdc_crtc->enable_lock);
 616
 617	if (tilcdc_crtc->enabled) {
 618		unsigned long flags;
 619		ktime_t next_vblank;
 620		s64 tdiff;
 621
 622		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 623
 624		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
 625					   tilcdc_crtc->hvtotal_us);
 626		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
 627
 628		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
 629			tilcdc_crtc->next_fb = fb;
 630		else
 631			set_scanout(crtc, fb);
 632
 633		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 634	}
 635
 636	mutex_unlock(&tilcdc_crtc->enable_lock);
 637
 638	return 0;
 639}
 640
 641static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
 642		const struct drm_display_mode *mode,
 643		struct drm_display_mode *adjusted_mode)
 644{
 645	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 646
 647	if (!tilcdc_crtc->simulate_vesa_sync)
 648		return true;
 649
 650	/*
 651	 * tilcdc does not generate VESA-compliant sync but aligns
 652	 * VS on the second edge of HS instead of first edge.
 653	 * We use adjusted_mode, to fixup sync by aligning both rising
 654	 * edges and add HSKEW offset to fix the sync.
 655	 */
 656	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
 657	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
 658
 659	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
 660		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
 661		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
 662	} else {
 663		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
 664		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
 665	}
 666
 667	return true;
 668}
 669
 670static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
 671				    struct drm_atomic_state *state)
 672{
 673	struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
 674									  crtc);
 675	/* If we are not active we don't care */
 676	if (!crtc_state->active)
 677		return 0;
 678
 679	if (state->planes[0].ptr != crtc->primary ||
 680	    state->planes[0].state == NULL ||
 681	    state->planes[0].state->crtc != crtc) {
 682		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
 683		return -EINVAL;
 684	}
 685
 686	return 0;
 687}
 688
 689static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
 690{
 691	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 692	struct drm_device *dev = crtc->dev;
 693	struct tilcdc_drm_private *priv = dev->dev_private;
 694	unsigned long flags;
 695
 696	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 697
 698	tilcdc_clear_irqstatus(dev, LCDC_END_OF_FRAME0);
 699
 700	if (priv->rev == 1)
 701		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
 702			   LCDC_V1_END_OF_FRAME_INT_ENA);
 703	else
 704		tilcdc_set(dev, LCDC_INT_ENABLE_SET_REG,
 705			   LCDC_V2_END_OF_FRAME0_INT_ENA);
 706
 707	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 708
 709	return 0;
 710}
 711
 712static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
 713{
 714	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 715	struct drm_device *dev = crtc->dev;
 716	struct tilcdc_drm_private *priv = dev->dev_private;
 717	unsigned long flags;
 718
 719	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 720
 721	if (priv->rev == 1)
 722		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
 723			     LCDC_V1_END_OF_FRAME_INT_ENA);
 724	else
 725		tilcdc_clear(dev, LCDC_INT_ENABLE_SET_REG,
 726			     LCDC_V2_END_OF_FRAME0_INT_ENA);
 727
 728	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 729}
 730
 731static void tilcdc_crtc_reset(struct drm_crtc *crtc)
 732{
 733	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 734	struct drm_device *dev = crtc->dev;
 735	int ret;
 736
 737	drm_atomic_helper_crtc_reset(crtc);
 738
 739	/* Turn the raster off if it for some reason is on. */
 740	pm_runtime_get_sync(dev->dev);
 741	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
 742		/* Enable DMA Frame Done Interrupt */
 743		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
 744		tilcdc_clear_irqstatus(dev, 0xffffffff);
 745
 746		tilcdc_crtc->frame_done = false;
 747		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 748
 749		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
 750					 tilcdc_crtc->frame_done,
 751					 msecs_to_jiffies(500));
 752		if (ret == 0)
 753			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
 754				__func__);
 755	}
 756	pm_runtime_put_sync(dev->dev);
 757}
 758
 759static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
 760	.destroy        = tilcdc_crtc_destroy,
 761	.set_config     = drm_atomic_helper_set_config,
 762	.page_flip      = drm_atomic_helper_page_flip,
 763	.reset		= tilcdc_crtc_reset,
 764	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
 765	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
 766	.enable_vblank	= tilcdc_crtc_enable_vblank,
 767	.disable_vblank	= tilcdc_crtc_disable_vblank,
 768};
 769
 770static enum drm_mode_status
 771tilcdc_crtc_mode_valid(struct drm_crtc *crtc,
 772		       const struct drm_display_mode *mode)
 773{
 774	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 775	unsigned int bandwidth;
 776	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
 777
 778	/*
 779	 * check to see if the width is within the range that
 780	 * the LCD Controller physically supports
 781	 */
 782	if (mode->hdisplay > priv->max_width)
 783		return MODE_VIRTUAL_X;
 784
 785	/* width must be multiple of 16 */
 786	if (mode->hdisplay & 0xf)
 787		return MODE_VIRTUAL_X;
 788
 789	if (mode->vdisplay > 2048)
 790		return MODE_VIRTUAL_Y;
 791
 792	DBG("Processing mode %dx%d@%d with pixel clock %d",
 793		mode->hdisplay, mode->vdisplay,
 794		drm_mode_vrefresh(mode), mode->clock);
 795
 796	hbp = mode->htotal - mode->hsync_end;
 797	hfp = mode->hsync_start - mode->hdisplay;
 798	hsw = mode->hsync_end - mode->hsync_start;
 799	vbp = mode->vtotal - mode->vsync_end;
 800	vfp = mode->vsync_start - mode->vdisplay;
 801	vsw = mode->vsync_end - mode->vsync_start;
 802
 803	if ((hbp-1) & ~0x3ff) {
 804		DBG("Pruning mode: Horizontal Back Porch out of range");
 805		return MODE_HBLANK_WIDE;
 806	}
 807
 808	if ((hfp-1) & ~0x3ff) {
 809		DBG("Pruning mode: Horizontal Front Porch out of range");
 810		return MODE_HBLANK_WIDE;
 811	}
 812
 813	if ((hsw-1) & ~0x3ff) {
 814		DBG("Pruning mode: Horizontal Sync Width out of range");
 815		return MODE_HSYNC_WIDE;
 816	}
 817
 818	if (vbp & ~0xff) {
 819		DBG("Pruning mode: Vertical Back Porch out of range");
 820		return MODE_VBLANK_WIDE;
 821	}
 822
 823	if (vfp & ~0xff) {
 824		DBG("Pruning mode: Vertical Front Porch out of range");
 825		return MODE_VBLANK_WIDE;
 826	}
 827
 828	if ((vsw-1) & ~0x3f) {
 829		DBG("Pruning mode: Vertical Sync Width out of range");
 830		return MODE_VSYNC_WIDE;
 831	}
 832
 833	/*
 834	 * some devices have a maximum allowed pixel clock
 835	 * configured from the DT
 836	 */
 837	if (mode->clock > priv->max_pixelclock) {
 838		DBG("Pruning mode: pixel clock too high");
 839		return MODE_CLOCK_HIGH;
 840	}
 841
 842	/*
 843	 * some devices further limit the max horizontal resolution
 844	 * configured from the DT
 845	 */
 846	if (mode->hdisplay > priv->max_width)
 847		return MODE_BAD_WIDTH;
 848
 849	/* filter out modes that would require too much memory bandwidth: */
 850	bandwidth = mode->hdisplay * mode->vdisplay *
 851		drm_mode_vrefresh(mode);
 852	if (bandwidth > priv->max_bandwidth) {
 853		DBG("Pruning mode: exceeds defined bandwidth limit");
 854		return MODE_BAD;
 855	}
 856
 857	return MODE_OK;
 858}
 859
 860static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
 861	.mode_valid	= tilcdc_crtc_mode_valid,
 862	.mode_fixup	= tilcdc_crtc_mode_fixup,
 863	.atomic_check	= tilcdc_crtc_atomic_check,
 864	.atomic_enable	= tilcdc_crtc_atomic_enable,
 865	.atomic_disable	= tilcdc_crtc_atomic_disable,
 866	.atomic_flush	= tilcdc_crtc_atomic_flush,
 867};
 868
 869void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
 870		const struct tilcdc_panel_info *info)
 871{
 872	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 873	tilcdc_crtc->info = info;
 874}
 875
 876void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
 877					bool simulate_vesa_sync)
 878{
 879	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 880
 881	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
 882}
 883
 884void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
 885{
 886	struct drm_device *dev = crtc->dev;
 887	struct tilcdc_drm_private *priv = dev->dev_private;
 888	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 889
 890	drm_modeset_lock(&crtc->mutex, NULL);
 891	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
 892		if (tilcdc_crtc_is_on(crtc)) {
 893			pm_runtime_get_sync(dev->dev);
 894			tilcdc_crtc_disable(crtc);
 895
 896			tilcdc_crtc_set_clk(crtc);
 897
 898			tilcdc_crtc_enable(crtc);
 899			pm_runtime_put_sync(dev->dev);
 900		}
 901	}
 902	drm_modeset_unlock(&crtc->mutex);
 903}
 904
 905#define SYNC_LOST_COUNT_LIMIT 50
 906
 907irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
 908{
 909	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 910	struct drm_device *dev = crtc->dev;
 911	struct tilcdc_drm_private *priv = dev->dev_private;
 912	uint32_t stat, reg;
 913
 914	stat = tilcdc_read_irqstatus(dev);
 915	tilcdc_clear_irqstatus(dev, stat);
 916
 917	if (stat & LCDC_END_OF_FRAME0) {
 918		bool skip_event = false;
 919		ktime_t now;
 920
 921		now = ktime_get();
 922
 923		spin_lock(&tilcdc_crtc->irq_lock);
 924
 925		tilcdc_crtc->last_vblank = now;
 926
 927		if (tilcdc_crtc->next_fb) {
 928			set_scanout(crtc, tilcdc_crtc->next_fb);
 929			tilcdc_crtc->next_fb = NULL;
 930			skip_event = true;
 931		}
 932
 933		spin_unlock(&tilcdc_crtc->irq_lock);
 934
 935		drm_crtc_handle_vblank(crtc);
 936
 937		if (!skip_event) {
 938			struct drm_pending_vblank_event *event;
 939
 940			spin_lock(&dev->event_lock);
 941
 942			event = tilcdc_crtc->event;
 943			tilcdc_crtc->event = NULL;
 944			if (event)
 945				drm_crtc_send_vblank_event(crtc, event);
 946
 947			spin_unlock(&dev->event_lock);
 948		}
 949
 950		if (tilcdc_crtc->frame_intact)
 951			tilcdc_crtc->sync_lost_count = 0;
 952		else
 953			tilcdc_crtc->frame_intact = true;
 954	}
 955
 956	if (stat & LCDC_FIFO_UNDERFLOW)
 957		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
 958				    __func__, stat);
 959
 960	if (stat & LCDC_PL_LOAD_DONE) {
 961		complete(&tilcdc_crtc->palette_loaded);
 962		if (priv->rev == 1)
 963			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 964				     LCDC_V1_PL_INT_ENA);
 965		else
 966			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 967				     LCDC_V2_PL_INT_ENA);
 968	}
 969
 970	if (stat & LCDC_SYNC_LOST) {
 971		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
 972				    __func__, stat);
 973		tilcdc_crtc->frame_intact = false;
 974		if (priv->rev == 1) {
 975			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
 976			if (reg & LCDC_RASTER_ENABLE) {
 977				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 978					     LCDC_RASTER_ENABLE);
 979				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 980					   LCDC_RASTER_ENABLE);
 981			}
 982		} else {
 983			if (tilcdc_crtc->sync_lost_count++ >
 984			    SYNC_LOST_COUNT_LIMIT) {
 985				dev_err(dev->dev,
 986					"%s(0x%08x): Sync lost flood detected, recovering",
 987					__func__, stat);
 988				queue_work(system_wq,
 989					   &tilcdc_crtc->recover_work);
 990				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 991					     LCDC_SYNC_LOST);
 992				tilcdc_crtc->sync_lost_count = 0;
 993			}
 994		}
 995	}
 996
 997	if (stat & LCDC_FRAME_DONE) {
 998		tilcdc_crtc->frame_done = true;
 999		wake_up(&tilcdc_crtc->frame_done_wq);
1000		/* rev 1 lcdc appears to hang if irq is not disabled here */
1001		if (priv->rev == 1)
1002			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
1003				     LCDC_V1_FRAME_DONE_INT_ENA);
1004	}
1005
1006	/* For revision 2 only */
1007	if (priv->rev == 2) {
1008		/* Indicate to LCDC that the interrupt service routine has
1009		 * completed, see 13.3.6.1.6 in AM335x TRM.
1010		 */
1011		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
1012	}
1013
1014	return IRQ_HANDLED;
1015}
1016
1017int tilcdc_crtc_create(struct drm_device *dev)
1018{
1019	struct tilcdc_drm_private *priv = dev->dev_private;
1020	struct tilcdc_crtc *tilcdc_crtc;
1021	struct drm_crtc *crtc;
1022	int ret;
1023
1024	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
1025	if (!tilcdc_crtc)
1026		return -ENOMEM;
1027
1028	init_completion(&tilcdc_crtc->palette_loaded);
1029	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1030					TILCDC_PALETTE_SIZE,
1031					&tilcdc_crtc->palette_dma_handle,
1032					GFP_KERNEL | __GFP_ZERO);
1033	if (!tilcdc_crtc->palette_base)
1034		return -ENOMEM;
1035	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1036
1037	crtc = &tilcdc_crtc->base;
1038
1039	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1040	if (ret < 0)
1041		goto fail;
1042
1043	mutex_init(&tilcdc_crtc->enable_lock);
1044
1045	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1046
1047	spin_lock_init(&tilcdc_crtc->irq_lock);
1048	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1049
1050	ret = drm_crtc_init_with_planes(dev, crtc,
1051					&tilcdc_crtc->primary,
1052					NULL,
1053					&tilcdc_crtc_funcs,
1054					"tilcdc crtc");
1055	if (ret < 0)
1056		goto fail;
1057
1058	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1059
1060	if (priv->is_componentized) {
1061		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1062		if (!crtc->port) { /* This should never happen */
1063			dev_err(dev->dev, "Port node not found in %pOF\n",
1064				dev->dev->of_node);
1065			ret = -EINVAL;
1066			goto fail;
1067		}
1068	}
1069
1070	priv->crtc = crtc;
1071	return 0;
1072
1073fail:
1074	tilcdc_crtc_destroy(crtc);
1075	return ret;
1076}