Linux Audio

Check our new training course

Loading...
v5.9
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (C) 2012 Texas Instruments
   4 * Author: Rob Clark <robdclark@gmail.com>
 
 
 
 
 
 
 
 
 
 
 
 
   5 */
   6
   7#include <linux/delay.h>
   8#include <linux/dma-mapping.h>
   9#include <linux/of_graph.h>
  10#include <linux/pm_runtime.h>
  11
  12#include <drm/drm_atomic.h>
  13#include <drm/drm_atomic_helper.h>
  14#include <drm/drm_crtc.h>
  15#include <drm/drm_fb_cma_helper.h>
  16#include <drm/drm_fourcc.h>
  17#include <drm/drm_gem_cma_helper.h>
  18#include <drm/drm_modeset_helper_vtables.h>
  19#include <drm/drm_print.h>
  20#include <drm/drm_vblank.h>
  21
  22#include "tilcdc_drv.h"
  23#include "tilcdc_regs.h"
  24
  25#define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
  26#define TILCDC_PALETTE_SIZE			32
  27#define TILCDC_PALETTE_FIRST_ENTRY		0x4000
  28
  29struct tilcdc_crtc {
  30	struct drm_crtc base;
  31
  32	struct drm_plane primary;
  33	const struct tilcdc_panel_info *info;
  34	struct drm_pending_vblank_event *event;
  35	struct mutex enable_lock;
  36	bool enabled;
  37	bool shutdown;
  38	wait_queue_head_t frame_done_wq;
  39	bool frame_done;
  40	spinlock_t irq_lock;
  41
  42	unsigned int lcd_fck_rate;
  43
  44	ktime_t last_vblank;
  45	unsigned int hvtotal_us;
  46
 
  47	struct drm_framebuffer *next_fb;
  48
 
 
 
  49	/* Only set if an external encoder is connected */
  50	bool simulate_vesa_sync;
  51
  52	int sync_lost_count;
  53	bool frame_intact;
  54	struct work_struct recover_work;
  55
  56	dma_addr_t palette_dma_handle;
  57	u16 *palette_base;
  58	struct completion palette_loaded;
  59};
  60#define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
  61
 
 
 
 
 
 
 
 
 
 
 
  62static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
  63{
 
  64	struct drm_device *dev = crtc->dev;
  65	struct tilcdc_drm_private *priv = dev->dev_private;
  66	struct drm_gem_cma_object *gem;
  67	dma_addr_t start, end;
  68	u64 dma_base_and_ceiling;
  69
  70	gem = drm_fb_cma_get_gem_obj(fb, 0);
  71
  72	start = gem->paddr + fb->offsets[0] +
  73		crtc->y * fb->pitches[0] +
  74		crtc->x * fb->format->cpp[0];
  75
  76	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
  77
  78	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
  79	 * with a single insruction, if available. This should make it more
  80	 * unlikely that LCDC would fetch the DMA addresses in the middle of
  81	 * an update.
  82	 */
  83	if (priv->rev == 1)
  84		end -= 1;
  85
  86	dma_base_and_ceiling = (u64)end << 32 | start;
  87	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
 
 
 
 
 
 
  88}
  89
  90/*
  91 * The driver currently only supports only true color formats. For
  92 * true color the palette block is bypassed, but a 32 byte palette
  93 * should still be loaded. The first 16-bit entry must be 0x4000 while
  94 * all other entries must be zeroed.
  95 */
  96static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
  97{
  98	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
  99	struct drm_device *dev = crtc->dev;
 100	struct tilcdc_drm_private *priv = dev->dev_private;
 101	int ret;
 102
 103	reinit_completion(&tilcdc_crtc->palette_loaded);
 104
 105	/* Tell the LCDC where the palette is located. */
 106	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
 107		     tilcdc_crtc->palette_dma_handle);
 108	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
 109		     (u32) tilcdc_crtc->palette_dma_handle +
 110		     TILCDC_PALETTE_SIZE - 1);
 111
 112	/* Set dma load mode for palette loading only. */
 113	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 114			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
 115			  LCDC_PALETTE_LOAD_MODE_MASK);
 116
 117	/* Enable DMA Palette Loaded Interrupt */
 118	if (priv->rev == 1)
 119		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 120	else
 121		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
 122
 123	/* Enable LCDC DMA and wait for palette to be loaded. */
 124	tilcdc_clear_irqstatus(dev, 0xffffffff);
 125	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 126
 127	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
 128					  msecs_to_jiffies(50));
 129	if (ret == 0)
 130		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
 131
 132	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
 133	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 134	if (priv->rev == 1)
 135		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 136	else
 137		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
 138}
 139
 140static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
 141{
 142	struct tilcdc_drm_private *priv = dev->dev_private;
 143
 144	tilcdc_clear_irqstatus(dev, 0xffffffff);
 145
 146	if (priv->rev == 1) {
 147		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 148			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 149			LCDC_V1_UNDERFLOW_INT_ENA);
 150		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
 151			LCDC_V1_END_OF_FRAME_INT_ENA);
 152	} else {
 153		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
 154			LCDC_V2_UNDERFLOW_INT_ENA |
 155			LCDC_V2_END_OF_FRAME0_INT_ENA |
 156			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 157	}
 158}
 159
 160static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
 161{
 162	struct tilcdc_drm_private *priv = dev->dev_private;
 163
 164	/* disable irqs that we might have enabled: */
 165	if (priv->rev == 1) {
 166		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 167			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 168			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
 169		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
 170			LCDC_V1_END_OF_FRAME_INT_ENA);
 171	} else {
 172		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 173			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
 174			LCDC_V2_END_OF_FRAME0_INT_ENA |
 175			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 176	}
 177}
 178
 179static void reset(struct drm_crtc *crtc)
 180{
 181	struct drm_device *dev = crtc->dev;
 182	struct tilcdc_drm_private *priv = dev->dev_private;
 183
 184	if (priv->rev != 2)
 185		return;
 186
 187	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 188	usleep_range(250, 1000);
 189	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 190}
 191
 192/*
 193 * Calculate the percentage difference between the requested pixel clock rate
 194 * and the effective rate resulting from calculating the clock divider value.
 195 */
 196static unsigned int tilcdc_pclk_diff(unsigned long rate,
 197				     unsigned long real_rate)
 198{
 199	int r = rate / 100, rr = real_rate / 100;
 200
 201	return (unsigned int)(abs(((rr - r) * 100) / r));
 202}
 203
 204static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
 205{
 206	struct drm_device *dev = crtc->dev;
 207	struct tilcdc_drm_private *priv = dev->dev_private;
 208	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 209	unsigned long clk_rate, real_rate, req_rate;
 210	unsigned int clkdiv;
 211	int ret;
 212
 213	clkdiv = 2; /* first try using a standard divider of 2 */
 214
 215	/* mode.clock is in KHz, set_rate wants parameter in Hz */
 216	req_rate = crtc->mode.clock * 1000;
 217
 218	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
 219	clk_rate = clk_get_rate(priv->clk);
 220	if (ret < 0 || tilcdc_pclk_diff(req_rate, clk_rate) > 5) {
 221		/*
 222		 * If we fail to set the clock rate (some architectures don't
 223		 * use the common clock framework yet and may not implement
 224		 * all the clk API calls for every clock), try the next best
 225		 * thing: adjusting the clock divider, unless clk_get_rate()
 226		 * failed as well.
 227		 */
 228		if (!clk_rate) {
 229			/* Nothing more we can do. Just bail out. */
 230			dev_err(dev->dev,
 231				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
 232			return;
 233		}
 234
 235		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
 236
 237		/*
 238		 * Emit a warning if the real clock rate resulting from the
 239		 * calculated divider differs much from the requested rate.
 240		 *
 241		 * 5% is an arbitrary value - LCDs are usually quite tolerant
 242		 * about pixel clock rates.
 243		 */
 244		real_rate = clkdiv * req_rate;
 245
 246		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
 247			dev_warn(dev->dev,
 248				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
 249				 clk_rate, real_rate);
 250		}
 251	}
 252
 253	tilcdc_crtc->lcd_fck_rate = clk_rate;
 254
 255	DBG("lcd_clk=%u, mode clock=%d, div=%u",
 256	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
 257
 258	/* Configure the LCD clock divisor. */
 259	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
 260		     LCDC_RASTER_MODE);
 261
 262	if (priv->rev == 2)
 263		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
 264				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
 265				LCDC_V2_CORE_CLK_EN);
 266}
 267
 268static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
 269{
 270	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
 271			      mode->clock);
 272}
 273
 274static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
 275{
 276	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 277	struct drm_device *dev = crtc->dev;
 278	struct tilcdc_drm_private *priv = dev->dev_private;
 279	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
 280	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
 281	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
 282	struct drm_framebuffer *fb = crtc->primary->state->fb;
 283
 284	if (WARN_ON(!info))
 285		return;
 286
 287	if (WARN_ON(!fb))
 288		return;
 289
 290	/* Configure the Burst Size and fifo threshold of DMA: */
 291	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
 292	switch (info->dma_burst_sz) {
 293	case 1:
 294		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
 295		break;
 296	case 2:
 297		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
 298		break;
 299	case 4:
 300		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
 301		break;
 302	case 8:
 303		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
 304		break;
 305	case 16:
 306		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
 307		break;
 308	default:
 309		dev_err(dev->dev, "invalid burst size\n");
 310		return;
 311	}
 312	reg |= (info->fifo_th << 8);
 313	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
 314
 315	/* Configure timings: */
 316	hbp = mode->htotal - mode->hsync_end;
 317	hfp = mode->hsync_start - mode->hdisplay;
 318	hsw = mode->hsync_end - mode->hsync_start;
 319	vbp = mode->vtotal - mode->vsync_end;
 320	vfp = mode->vsync_start - mode->vdisplay;
 321	vsw = mode->vsync_end - mode->vsync_start;
 322
 323	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
 324	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
 325
 326	/* Set AC Bias Period and Number of Transitions per Interrupt: */
 327	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
 328	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
 329		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
 330
 331	/*
 332	 * subtract one from hfp, hbp, hsw because the hardware uses
 333	 * a value of 0 as 1
 334	 */
 335	if (priv->rev == 2) {
 336		/* clear bits we're going to set */
 337		reg &= ~0x78000033;
 338		reg |= ((hfp-1) & 0x300) >> 8;
 339		reg |= ((hbp-1) & 0x300) >> 4;
 340		reg |= ((hsw-1) & 0x3c0) << 21;
 341	}
 342	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
 343
 344	reg = (((mode->hdisplay >> 4) - 1) << 4) |
 345		(((hbp-1) & 0xff) << 24) |
 346		(((hfp-1) & 0xff) << 16) |
 347		(((hsw-1) & 0x3f) << 10);
 348	if (priv->rev == 2)
 349		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
 350	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
 351
 352	reg = ((mode->vdisplay - 1) & 0x3ff) |
 353		((vbp & 0xff) << 24) |
 354		((vfp & 0xff) << 16) |
 355		(((vsw-1) & 0x3f) << 10);
 356	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
 357
 358	/*
 359	 * be sure to set Bit 10 for the V2 LCDC controller,
 360	 * otherwise limited to 1024 pixels width, stopping
 361	 * 1920x1080 being supported.
 362	 */
 363	if (priv->rev == 2) {
 364		if ((mode->vdisplay - 1) & 0x400) {
 365			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
 366				LCDC_LPP_B10);
 367		} else {
 368			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
 369				LCDC_LPP_B10);
 370		}
 371	}
 372
 373	/* Configure display type: */
 374	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
 375		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
 376		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
 377		  0x000ff000 /* Palette Loading Delay bits */);
 378	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
 379	if (info->tft_alt_mode)
 380		reg |= LCDC_TFT_ALT_ENABLE;
 381	if (priv->rev == 2) {
 382		switch (fb->format->format) {
 383		case DRM_FORMAT_BGR565:
 384		case DRM_FORMAT_RGB565:
 385			break;
 386		case DRM_FORMAT_XBGR8888:
 387		case DRM_FORMAT_XRGB8888:
 388			reg |= LCDC_V2_TFT_24BPP_UNPACK;
 389			fallthrough;
 390		case DRM_FORMAT_BGR888:
 391		case DRM_FORMAT_RGB888:
 392			reg |= LCDC_V2_TFT_24BPP_MODE;
 393			break;
 394		default:
 395			dev_err(dev->dev, "invalid pixel format\n");
 396			return;
 397		}
 398	}
 399	reg |= info->fdd < 12;
 400	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
 401
 402	if (info->invert_pxl_clk)
 403		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 404	else
 405		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 406
 407	if (info->sync_ctrl)
 408		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 409	else
 410		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 411
 412	if (info->sync_edge)
 413		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 414	else
 415		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 416
 417	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
 418		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 419	else
 420		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 421
 422	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
 423		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 424	else
 425		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 426
 427	if (info->raster_order)
 428		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 429	else
 430		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 431
 432	tilcdc_crtc_set_clk(crtc);
 433
 434	tilcdc_crtc_load_palette(crtc);
 435
 436	set_scanout(crtc, fb);
 437
 438	crtc->hwmode = crtc->state->adjusted_mode;
 439
 440	tilcdc_crtc->hvtotal_us =
 441		tilcdc_mode_hvtotal(&crtc->hwmode);
 442}
 443
 444static void tilcdc_crtc_enable(struct drm_crtc *crtc)
 445{
 446	struct drm_device *dev = crtc->dev;
 447	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 448	unsigned long flags;
 449
 
 450	mutex_lock(&tilcdc_crtc->enable_lock);
 451	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
 452		mutex_unlock(&tilcdc_crtc->enable_lock);
 453		return;
 454	}
 455
 456	pm_runtime_get_sync(dev->dev);
 457
 458	reset(crtc);
 459
 460	tilcdc_crtc_set_mode(crtc);
 461
 462	tilcdc_crtc_enable_irqs(dev);
 463
 464	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
 465	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 466			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
 467			  LCDC_PALETTE_LOAD_MODE_MASK);
 468
 469	/* There is no real chance for a race here as the time stamp
 470	 * is taken before the raster DMA is started. The spin-lock is
 471	 * taken to have a memory barrier after taking the time-stamp
 472	 * and to avoid a context switch between taking the stamp and
 473	 * enabling the raster.
 474	 */
 475	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 476	tilcdc_crtc->last_vblank = ktime_get();
 477	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 478	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 479
 480	drm_crtc_vblank_on(crtc);
 481
 482	tilcdc_crtc->enabled = true;
 483	mutex_unlock(&tilcdc_crtc->enable_lock);
 484}
 485
 486static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
 487				      struct drm_crtc_state *old_state)
 488{
 489	tilcdc_crtc_enable(crtc);
 490}
 491
 492static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
 493{
 494	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 495	struct drm_device *dev = crtc->dev;
 
 496	int ret;
 497
 498	mutex_lock(&tilcdc_crtc->enable_lock);
 499	if (shutdown)
 500		tilcdc_crtc->shutdown = true;
 501	if (!tilcdc_crtc->enabled) {
 502		mutex_unlock(&tilcdc_crtc->enable_lock);
 503		return;
 504	}
 505	tilcdc_crtc->frame_done = false;
 506	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 507
 508	/*
 509	 * Wait for framedone irq which will still come before putting
 510	 * things to sleep..
 511	 */
 512	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
 513				 tilcdc_crtc->frame_done,
 514				 msecs_to_jiffies(500));
 515	if (ret == 0)
 516		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
 517			__func__);
 518
 519	drm_crtc_vblank_off(crtc);
 520
 521	tilcdc_crtc_disable_irqs(dev);
 522
 523	pm_runtime_put_sync(dev->dev);
 524
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 525	tilcdc_crtc->enabled = false;
 526	mutex_unlock(&tilcdc_crtc->enable_lock);
 527}
 528
 529static void tilcdc_crtc_disable(struct drm_crtc *crtc)
 530{
 
 531	tilcdc_crtc_off(crtc, false);
 532}
 533
 534static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
 535				       struct drm_crtc_state *old_state)
 536{
 537	tilcdc_crtc_disable(crtc);
 538}
 539
 540static void tilcdc_crtc_atomic_flush(struct drm_crtc *crtc,
 541				     struct drm_crtc_state *old_state)
 542{
 543	if (!crtc->state->event)
 544		return;
 545
 546	spin_lock_irq(&crtc->dev->event_lock);
 547	drm_crtc_send_vblank_event(crtc, crtc->state->event);
 548	crtc->state->event = NULL;
 549	spin_unlock_irq(&crtc->dev->event_lock);
 550}
 551
 552void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
 553{
 554	tilcdc_crtc_off(crtc, true);
 555}
 556
 557static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
 558{
 559	return crtc->state && crtc->state->enable && crtc->state->active;
 560}
 561
 562static void tilcdc_crtc_recover_work(struct work_struct *work)
 563{
 564	struct tilcdc_crtc *tilcdc_crtc =
 565		container_of(work, struct tilcdc_crtc, recover_work);
 566	struct drm_crtc *crtc = &tilcdc_crtc->base;
 567
 568	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
 569
 570	drm_modeset_lock(&crtc->mutex, NULL);
 571
 572	if (!tilcdc_crtc_is_on(crtc))
 573		goto out;
 574
 575	tilcdc_crtc_disable(crtc);
 576	tilcdc_crtc_enable(crtc);
 577out:
 578	drm_modeset_unlock(&crtc->mutex);
 579}
 580
 581static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
 582{
 
 583	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 584
 585	tilcdc_crtc_shutdown(crtc);
 
 
 586
 587	flush_workqueue(priv->wq);
 588
 589	of_node_put(crtc->port);
 590	drm_crtc_cleanup(crtc);
 
 591}
 592
 593int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
 594		struct drm_framebuffer *fb,
 595		struct drm_pending_vblank_event *event)
 596{
 597	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 598	struct drm_device *dev = crtc->dev;
 
 
 
 599
 600	if (tilcdc_crtc->event) {
 601		dev_err(dev->dev, "already pending page flip!\n");
 602		return -EBUSY;
 603	}
 604
 605	tilcdc_crtc->event = event;
 606
 607	mutex_lock(&tilcdc_crtc->enable_lock);
 608
 609	if (tilcdc_crtc->enabled) {
 610		unsigned long flags;
 
 611		ktime_t next_vblank;
 612		s64 tdiff;
 613
 614		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 615
 616		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
 617					   tilcdc_crtc->hvtotal_us);
 
 618		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
 619
 620		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
 621			tilcdc_crtc->next_fb = fb;
 622		else
 623			set_scanout(crtc, fb);
 624
 625		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 626	}
 627
 628	mutex_unlock(&tilcdc_crtc->enable_lock);
 
 
 
 
 
 629
 630	return 0;
 631}
 632
 633static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
 634		const struct drm_display_mode *mode,
 635		struct drm_display_mode *adjusted_mode)
 636{
 637	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 638
 639	if (!tilcdc_crtc->simulate_vesa_sync)
 640		return true;
 641
 642	/*
 643	 * tilcdc does not generate VESA-compliant sync but aligns
 644	 * VS on the second edge of HS instead of first edge.
 645	 * We use adjusted_mode, to fixup sync by aligning both rising
 646	 * edges and add HSKEW offset to fix the sync.
 647	 */
 648	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
 649	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
 650
 651	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
 652		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
 653		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
 654	} else {
 655		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
 656		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
 657	}
 658
 659	return true;
 660}
 661
 662static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
 663				    struct drm_crtc_state *state)
 664{
 
 
 
 665	/* If we are not active we don't care */
 666	if (!state->active)
 667		return 0;
 668
 669	if (state->state->planes[0].ptr != crtc->primary ||
 670	    state->state->planes[0].state == NULL ||
 671	    state->state->planes[0].state->crtc != crtc) {
 672		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
 673		return -EINVAL;
 674	}
 675
 676	return 0;
 677}
 
 
 
 678
 679static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
 680{
 681	return 0;
 682}
 683
 684static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
 685{
 686}
 687
 688static void tilcdc_crtc_reset(struct drm_crtc *crtc)
 689{
 690	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 691	struct drm_device *dev = crtc->dev;
 692	int ret;
 693
 694	drm_atomic_helper_crtc_reset(crtc);
 695
 696	/* Turn the raster off if it for some reason is on. */
 697	pm_runtime_get_sync(dev->dev);
 698	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
 699		/* Enable DMA Frame Done Interrupt */
 700		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
 701		tilcdc_clear_irqstatus(dev, 0xffffffff);
 702
 703		tilcdc_crtc->frame_done = false;
 704		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 705
 706		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
 707					 tilcdc_crtc->frame_done,
 708					 msecs_to_jiffies(500));
 709		if (ret == 0)
 710			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
 711				__func__);
 712	}
 713	pm_runtime_put_sync(dev->dev);
 714}
 715
 716static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
 717	.destroy        = tilcdc_crtc_destroy,
 718	.set_config     = drm_atomic_helper_set_config,
 719	.page_flip      = drm_atomic_helper_page_flip,
 720	.reset		= tilcdc_crtc_reset,
 721	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
 722	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
 723	.enable_vblank	= tilcdc_crtc_enable_vblank,
 724	.disable_vblank	= tilcdc_crtc_disable_vblank,
 
 
 
 
 
 725};
 726
 727int tilcdc_crtc_max_width(struct drm_crtc *crtc)
 728{
 729	struct drm_device *dev = crtc->dev;
 730	struct tilcdc_drm_private *priv = dev->dev_private;
 731	int max_width = 0;
 732
 733	if (priv->rev == 1)
 734		max_width = 1024;
 735	else if (priv->rev == 2)
 736		max_width = 2048;
 737
 738	return max_width;
 739}
 740
 741static enum drm_mode_status
 742tilcdc_crtc_mode_valid(struct drm_crtc *crtc,
 743		       const struct drm_display_mode *mode)
 744{
 745	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 746	unsigned int bandwidth;
 747	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
 748
 749	/*
 750	 * check to see if the width is within the range that
 751	 * the LCD Controller physically supports
 752	 */
 753	if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
 754		return MODE_VIRTUAL_X;
 755
 756	/* width must be multiple of 16 */
 757	if (mode->hdisplay & 0xf)
 758		return MODE_VIRTUAL_X;
 759
 760	if (mode->vdisplay > 2048)
 761		return MODE_VIRTUAL_Y;
 762
 763	DBG("Processing mode %dx%d@%d with pixel clock %d",
 764		mode->hdisplay, mode->vdisplay,
 765		drm_mode_vrefresh(mode), mode->clock);
 766
 767	hbp = mode->htotal - mode->hsync_end;
 768	hfp = mode->hsync_start - mode->hdisplay;
 769	hsw = mode->hsync_end - mode->hsync_start;
 770	vbp = mode->vtotal - mode->vsync_end;
 771	vfp = mode->vsync_start - mode->vdisplay;
 772	vsw = mode->vsync_end - mode->vsync_start;
 773
 774	if ((hbp-1) & ~0x3ff) {
 775		DBG("Pruning mode: Horizontal Back Porch out of range");
 776		return MODE_HBLANK_WIDE;
 777	}
 778
 779	if ((hfp-1) & ~0x3ff) {
 780		DBG("Pruning mode: Horizontal Front Porch out of range");
 781		return MODE_HBLANK_WIDE;
 782	}
 783
 784	if ((hsw-1) & ~0x3ff) {
 785		DBG("Pruning mode: Horizontal Sync Width out of range");
 786		return MODE_HSYNC_WIDE;
 787	}
 788
 789	if (vbp & ~0xff) {
 790		DBG("Pruning mode: Vertical Back Porch out of range");
 791		return MODE_VBLANK_WIDE;
 792	}
 793
 794	if (vfp & ~0xff) {
 795		DBG("Pruning mode: Vertical Front Porch out of range");
 796		return MODE_VBLANK_WIDE;
 797	}
 798
 799	if ((vsw-1) & ~0x3f) {
 800		DBG("Pruning mode: Vertical Sync Width out of range");
 801		return MODE_VSYNC_WIDE;
 802	}
 803
 804	/*
 805	 * some devices have a maximum allowed pixel clock
 806	 * configured from the DT
 807	 */
 808	if (mode->clock > priv->max_pixelclock) {
 809		DBG("Pruning mode: pixel clock too high");
 810		return MODE_CLOCK_HIGH;
 811	}
 812
 813	/*
 814	 * some devices further limit the max horizontal resolution
 815	 * configured from the DT
 816	 */
 817	if (mode->hdisplay > priv->max_width)
 818		return MODE_BAD_WIDTH;
 819
 820	/* filter out modes that would require too much memory bandwidth: */
 821	bandwidth = mode->hdisplay * mode->vdisplay *
 822		drm_mode_vrefresh(mode);
 823	if (bandwidth > priv->max_bandwidth) {
 824		DBG("Pruning mode: exceeds defined bandwidth limit");
 825		return MODE_BAD;
 826	}
 827
 828	return MODE_OK;
 829}
 830
 831static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
 832	.mode_valid	= tilcdc_crtc_mode_valid,
 833	.mode_fixup	= tilcdc_crtc_mode_fixup,
 834	.atomic_check	= tilcdc_crtc_atomic_check,
 835	.atomic_enable	= tilcdc_crtc_atomic_enable,
 836	.atomic_disable	= tilcdc_crtc_atomic_disable,
 837	.atomic_flush	= tilcdc_crtc_atomic_flush,
 838};
 839
 840void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
 841		const struct tilcdc_panel_info *info)
 842{
 843	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 844	tilcdc_crtc->info = info;
 845}
 846
 847void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
 848					bool simulate_vesa_sync)
 849{
 850	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 851
 852	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
 853}
 854
 855void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
 856{
 857	struct drm_device *dev = crtc->dev;
 858	struct tilcdc_drm_private *priv = dev->dev_private;
 859	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 860
 861	drm_modeset_lock(&crtc->mutex, NULL);
 862	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
 863		if (tilcdc_crtc_is_on(crtc)) {
 864			pm_runtime_get_sync(dev->dev);
 865			tilcdc_crtc_disable(crtc);
 866
 867			tilcdc_crtc_set_clk(crtc);
 868
 869			tilcdc_crtc_enable(crtc);
 870			pm_runtime_put_sync(dev->dev);
 871		}
 872	}
 873	drm_modeset_unlock(&crtc->mutex);
 874}
 875
 876#define SYNC_LOST_COUNT_LIMIT 50
 877
 878irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
 879{
 880	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 881	struct drm_device *dev = crtc->dev;
 882	struct tilcdc_drm_private *priv = dev->dev_private;
 883	uint32_t stat, reg;
 884
 885	stat = tilcdc_read_irqstatus(dev);
 886	tilcdc_clear_irqstatus(dev, stat);
 887
 888	if (stat & LCDC_END_OF_FRAME0) {
 889		unsigned long flags;
 890		bool skip_event = false;
 891		ktime_t now;
 892
 893		now = ktime_get();
 894
 
 
 895		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 896
 897		tilcdc_crtc->last_vblank = now;
 898
 899		if (tilcdc_crtc->next_fb) {
 900			set_scanout(crtc, tilcdc_crtc->next_fb);
 901			tilcdc_crtc->next_fb = NULL;
 902			skip_event = true;
 903		}
 904
 905		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 906
 907		drm_crtc_handle_vblank(crtc);
 908
 909		if (!skip_event) {
 910			struct drm_pending_vblank_event *event;
 911
 912			spin_lock_irqsave(&dev->event_lock, flags);
 913
 914			event = tilcdc_crtc->event;
 915			tilcdc_crtc->event = NULL;
 916			if (event)
 917				drm_crtc_send_vblank_event(crtc, event);
 918
 919			spin_unlock_irqrestore(&dev->event_lock, flags);
 920		}
 921
 922		if (tilcdc_crtc->frame_intact)
 923			tilcdc_crtc->sync_lost_count = 0;
 924		else
 925			tilcdc_crtc->frame_intact = true;
 926	}
 927
 928	if (stat & LCDC_FIFO_UNDERFLOW)
 929		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
 930				    __func__, stat);
 931
 932	if (stat & LCDC_PL_LOAD_DONE) {
 933		complete(&tilcdc_crtc->palette_loaded);
 934		if (priv->rev == 1)
 935			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 936				     LCDC_V1_PL_INT_ENA);
 937		else
 938			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 939				     LCDC_V2_PL_INT_ENA);
 940	}
 941
 942	if (stat & LCDC_SYNC_LOST) {
 943		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
 944				    __func__, stat);
 945		tilcdc_crtc->frame_intact = false;
 946		if (priv->rev == 1) {
 947			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
 948			if (reg & LCDC_RASTER_ENABLE) {
 949				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 950					     LCDC_RASTER_ENABLE);
 951				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 952					   LCDC_RASTER_ENABLE);
 953			}
 954		} else {
 955			if (tilcdc_crtc->sync_lost_count++ >
 956			    SYNC_LOST_COUNT_LIMIT) {
 957				dev_err(dev->dev,
 958					"%s(0x%08x): Sync lost flood detected, recovering",
 959					__func__, stat);
 960				queue_work(system_wq,
 961					   &tilcdc_crtc->recover_work);
 962				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 963					     LCDC_SYNC_LOST);
 964				tilcdc_crtc->sync_lost_count = 0;
 965			}
 966		}
 967	}
 968
 969	if (stat & LCDC_FRAME_DONE) {
 970		tilcdc_crtc->frame_done = true;
 971		wake_up(&tilcdc_crtc->frame_done_wq);
 972		/* rev 1 lcdc appears to hang if irq is not disbaled here */
 973		if (priv->rev == 1)
 974			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 975				     LCDC_V1_FRAME_DONE_INT_ENA);
 976	}
 977
 978	/* For revision 2 only */
 979	if (priv->rev == 2) {
 980		/* Indicate to LCDC that the interrupt service routine has
 981		 * completed, see 13.3.6.1.6 in AM335x TRM.
 982		 */
 983		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
 984	}
 985
 986	return IRQ_HANDLED;
 987}
 988
 989int tilcdc_crtc_create(struct drm_device *dev)
 990{
 991	struct tilcdc_drm_private *priv = dev->dev_private;
 992	struct tilcdc_crtc *tilcdc_crtc;
 993	struct drm_crtc *crtc;
 994	int ret;
 995
 996	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
 997	if (!tilcdc_crtc)
 
 998		return -ENOMEM;
 
 999
1000	init_completion(&tilcdc_crtc->palette_loaded);
1001	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1002					TILCDC_PALETTE_SIZE,
1003					&tilcdc_crtc->palette_dma_handle,
1004					GFP_KERNEL | __GFP_ZERO);
1005	if (!tilcdc_crtc->palette_base)
1006		return -ENOMEM;
1007	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1008
1009	crtc = &tilcdc_crtc->base;
1010
1011	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1012	if (ret < 0)
1013		goto fail;
1014
1015	mutex_init(&tilcdc_crtc->enable_lock);
1016
1017	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1018
 
 
 
1019	spin_lock_init(&tilcdc_crtc->irq_lock);
1020	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1021
1022	ret = drm_crtc_init_with_planes(dev, crtc,
1023					&tilcdc_crtc->primary,
1024					NULL,
1025					&tilcdc_crtc_funcs,
1026					"tilcdc crtc");
1027	if (ret < 0)
1028		goto fail;
1029
1030	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1031
1032	if (priv->is_componentized) {
1033		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
 
 
 
 
 
 
 
 
 
1034		if (!crtc->port) { /* This should never happen */
1035			dev_err(dev->dev, "Port node not found in %pOF\n",
1036				dev->dev->of_node);
1037			ret = -EINVAL;
1038			goto fail;
1039		}
1040	}
1041
1042	priv->crtc = crtc;
1043	return 0;
1044
1045fail:
1046	tilcdc_crtc_destroy(crtc);
1047	return ret;
1048}
v4.10.11
 
   1/*
   2 * Copyright (C) 2012 Texas Instruments
   3 * Author: Rob Clark <robdclark@gmail.com>
   4 *
   5 * This program is free software; you can redistribute it and/or modify it
   6 * under the terms of the GNU General Public License version 2 as published by
   7 * the Free Software Foundation.
   8 *
   9 * This program is distributed in the hope that it will be useful, but WITHOUT
  10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
  12 * more details.
  13 *
  14 * You should have received a copy of the GNU General Public License along with
  15 * this program.  If not, see <http://www.gnu.org/licenses/>.
  16 */
  17
 
 
 
 
 
  18#include <drm/drm_atomic.h>
  19#include <drm/drm_atomic_helper.h>
  20#include <drm/drm_crtc.h>
  21#include <drm/drm_flip_work.h>
  22#include <drm/drm_plane_helper.h>
  23#include <linux/workqueue.h>
  24#include <linux/completion.h>
  25#include <linux/dma-mapping.h>
 
  26
  27#include "tilcdc_drv.h"
  28#include "tilcdc_regs.h"
  29
  30#define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
  31#define TILCDC_PALETTE_SIZE			32
  32#define TILCDC_PALETTE_FIRST_ENTRY		0x4000
  33
  34struct tilcdc_crtc {
  35	struct drm_crtc base;
  36
  37	struct drm_plane primary;
  38	const struct tilcdc_panel_info *info;
  39	struct drm_pending_vblank_event *event;
  40	struct mutex enable_lock;
  41	bool enabled;
  42	bool shutdown;
  43	wait_queue_head_t frame_done_wq;
  44	bool frame_done;
  45	spinlock_t irq_lock;
  46
  47	unsigned int lcd_fck_rate;
  48
  49	ktime_t last_vblank;
 
  50
  51	struct drm_framebuffer *curr_fb;
  52	struct drm_framebuffer *next_fb;
  53
  54	/* for deferred fb unref's: */
  55	struct drm_flip_work unref_work;
  56
  57	/* Only set if an external encoder is connected */
  58	bool simulate_vesa_sync;
  59
  60	int sync_lost_count;
  61	bool frame_intact;
  62	struct work_struct recover_work;
  63
  64	dma_addr_t palette_dma_handle;
  65	u16 *palette_base;
  66	struct completion palette_loaded;
  67};
  68#define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
  69
  70static void unref_worker(struct drm_flip_work *work, void *val)
  71{
  72	struct tilcdc_crtc *tilcdc_crtc =
  73		container_of(work, struct tilcdc_crtc, unref_work);
  74	struct drm_device *dev = tilcdc_crtc->base.dev;
  75
  76	mutex_lock(&dev->mode_config.mutex);
  77	drm_framebuffer_unreference(val);
  78	mutex_unlock(&dev->mode_config.mutex);
  79}
  80
  81static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
  82{
  83	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
  84	struct drm_device *dev = crtc->dev;
  85	struct tilcdc_drm_private *priv = dev->dev_private;
  86	struct drm_gem_cma_object *gem;
  87	dma_addr_t start, end;
  88	u64 dma_base_and_ceiling;
  89
  90	gem = drm_fb_cma_get_gem_obj(fb, 0);
  91
  92	start = gem->paddr + fb->offsets[0] +
  93		crtc->y * fb->pitches[0] +
  94		crtc->x * drm_format_plane_cpp(fb->pixel_format, 0);
  95
  96	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
  97
  98	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
  99	 * with a single insruction, if available. This should make it more
 100	 * unlikely that LCDC would fetch the DMA addresses in the middle of
 101	 * an update.
 102	 */
 103	if (priv->rev == 1)
 104		end -= 1;
 105
 106	dma_base_and_ceiling = (u64)end << 32 | start;
 107	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
 108
 109	if (tilcdc_crtc->curr_fb)
 110		drm_flip_work_queue(&tilcdc_crtc->unref_work,
 111			tilcdc_crtc->curr_fb);
 112
 113	tilcdc_crtc->curr_fb = fb;
 114}
 115
 116/*
 117 * The driver currently only supports only true color formats. For
 118 * true color the palette block is bypassed, but a 32 byte palette
 119 * should still be loaded. The first 16-bit entry must be 0x4000 while
 120 * all other entries must be zeroed.
 121 */
 122static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
 123{
 124	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 125	struct drm_device *dev = crtc->dev;
 126	struct tilcdc_drm_private *priv = dev->dev_private;
 127	int ret;
 128
 129	reinit_completion(&tilcdc_crtc->palette_loaded);
 130
 131	/* Tell the LCDC where the palette is located. */
 132	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
 133		     tilcdc_crtc->palette_dma_handle);
 134	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
 135		     (u32) tilcdc_crtc->palette_dma_handle +
 136		     TILCDC_PALETTE_SIZE - 1);
 137
 138	/* Set dma load mode for palette loading only. */
 139	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 140			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
 141			  LCDC_PALETTE_LOAD_MODE_MASK);
 142
 143	/* Enable DMA Palette Loaded Interrupt */
 144	if (priv->rev == 1)
 145		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 146	else
 147		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
 148
 149	/* Enable LCDC DMA and wait for palette to be loaded. */
 150	tilcdc_clear_irqstatus(dev, 0xffffffff);
 151	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 152
 153	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
 154					  msecs_to_jiffies(50));
 155	if (ret == 0)
 156		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
 157
 158	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
 159	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 160	if (priv->rev == 1)
 161		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
 162	else
 163		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
 164}
 165
 166static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
 167{
 168	struct tilcdc_drm_private *priv = dev->dev_private;
 169
 170	tilcdc_clear_irqstatus(dev, 0xffffffff);
 171
 172	if (priv->rev == 1) {
 173		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 174			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 175			LCDC_V1_UNDERFLOW_INT_ENA);
 176		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
 177			LCDC_V1_END_OF_FRAME_INT_ENA);
 178	} else {
 179		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
 180			LCDC_V2_UNDERFLOW_INT_ENA |
 181			LCDC_V2_END_OF_FRAME0_INT_ENA |
 182			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 183	}
 184}
 185
 186static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
 187{
 188	struct tilcdc_drm_private *priv = dev->dev_private;
 189
 190	/* disable irqs that we might have enabled: */
 191	if (priv->rev == 1) {
 192		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 193			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
 194			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
 195		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
 196			LCDC_V1_END_OF_FRAME_INT_ENA);
 197	} else {
 198		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 199			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
 200			LCDC_V2_END_OF_FRAME0_INT_ENA |
 201			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
 202	}
 203}
 204
 205static void reset(struct drm_crtc *crtc)
 206{
 207	struct drm_device *dev = crtc->dev;
 208	struct tilcdc_drm_private *priv = dev->dev_private;
 209
 210	if (priv->rev != 2)
 211		return;
 212
 213	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 214	usleep_range(250, 1000);
 215	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
 216}
 217
 218/*
 219 * Calculate the percentage difference between the requested pixel clock rate
 220 * and the effective rate resulting from calculating the clock divider value.
 221 */
 222static unsigned int tilcdc_pclk_diff(unsigned long rate,
 223				     unsigned long real_rate)
 224{
 225	int r = rate / 100, rr = real_rate / 100;
 226
 227	return (unsigned int)(abs(((rr - r) * 100) / r));
 228}
 229
 230static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
 231{
 232	struct drm_device *dev = crtc->dev;
 233	struct tilcdc_drm_private *priv = dev->dev_private;
 234	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 235	unsigned long clk_rate, real_rate, req_rate;
 236	unsigned int clkdiv;
 237	int ret;
 238
 239	clkdiv = 2; /* first try using a standard divider of 2 */
 240
 241	/* mode.clock is in KHz, set_rate wants parameter in Hz */
 242	req_rate = crtc->mode.clock * 1000;
 243
 244	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
 245	clk_rate = clk_get_rate(priv->clk);
 246	if (ret < 0) {
 247		/*
 248		 * If we fail to set the clock rate (some architectures don't
 249		 * use the common clock framework yet and may not implement
 250		 * all the clk API calls for every clock), try the next best
 251		 * thing: adjusting the clock divider, unless clk_get_rate()
 252		 * failed as well.
 253		 */
 254		if (!clk_rate) {
 255			/* Nothing more we can do. Just bail out. */
 256			dev_err(dev->dev,
 257				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
 258			return;
 259		}
 260
 261		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
 262
 263		/*
 264		 * Emit a warning if the real clock rate resulting from the
 265		 * calculated divider differs much from the requested rate.
 266		 *
 267		 * 5% is an arbitrary value - LCDs are usually quite tolerant
 268		 * about pixel clock rates.
 269		 */
 270		real_rate = clkdiv * req_rate;
 271
 272		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
 273			dev_warn(dev->dev,
 274				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
 275				 clk_rate, real_rate);
 276		}
 277	}
 278
 279	tilcdc_crtc->lcd_fck_rate = clk_rate;
 280
 281	DBG("lcd_clk=%u, mode clock=%d, div=%u",
 282	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
 283
 284	/* Configure the LCD clock divisor. */
 285	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
 286		     LCDC_RASTER_MODE);
 287
 288	if (priv->rev == 2)
 289		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
 290				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
 291				LCDC_V2_CORE_CLK_EN);
 292}
 293
 
 
 
 
 
 
 294static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
 295{
 296	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 297	struct drm_device *dev = crtc->dev;
 298	struct tilcdc_drm_private *priv = dev->dev_private;
 299	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
 300	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
 301	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
 302	struct drm_framebuffer *fb = crtc->primary->state->fb;
 303
 304	if (WARN_ON(!info))
 305		return;
 306
 307	if (WARN_ON(!fb))
 308		return;
 309
 310	/* Configure the Burst Size and fifo threshold of DMA: */
 311	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
 312	switch (info->dma_burst_sz) {
 313	case 1:
 314		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
 315		break;
 316	case 2:
 317		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
 318		break;
 319	case 4:
 320		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
 321		break;
 322	case 8:
 323		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
 324		break;
 325	case 16:
 326		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
 327		break;
 328	default:
 329		dev_err(dev->dev, "invalid burst size\n");
 330		return;
 331	}
 332	reg |= (info->fifo_th << 8);
 333	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
 334
 335	/* Configure timings: */
 336	hbp = mode->htotal - mode->hsync_end;
 337	hfp = mode->hsync_start - mode->hdisplay;
 338	hsw = mode->hsync_end - mode->hsync_start;
 339	vbp = mode->vtotal - mode->vsync_end;
 340	vfp = mode->vsync_start - mode->vdisplay;
 341	vsw = mode->vsync_end - mode->vsync_start;
 342
 343	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
 344	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
 345
 346	/* Set AC Bias Period and Number of Transitions per Interrupt: */
 347	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
 348	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
 349		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
 350
 351	/*
 352	 * subtract one from hfp, hbp, hsw because the hardware uses
 353	 * a value of 0 as 1
 354	 */
 355	if (priv->rev == 2) {
 356		/* clear bits we're going to set */
 357		reg &= ~0x78000033;
 358		reg |= ((hfp-1) & 0x300) >> 8;
 359		reg |= ((hbp-1) & 0x300) >> 4;
 360		reg |= ((hsw-1) & 0x3c0) << 21;
 361	}
 362	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
 363
 364	reg = (((mode->hdisplay >> 4) - 1) << 4) |
 365		(((hbp-1) & 0xff) << 24) |
 366		(((hfp-1) & 0xff) << 16) |
 367		(((hsw-1) & 0x3f) << 10);
 368	if (priv->rev == 2)
 369		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
 370	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
 371
 372	reg = ((mode->vdisplay - 1) & 0x3ff) |
 373		((vbp & 0xff) << 24) |
 374		((vfp & 0xff) << 16) |
 375		(((vsw-1) & 0x3f) << 10);
 376	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
 377
 378	/*
 379	 * be sure to set Bit 10 for the V2 LCDC controller,
 380	 * otherwise limited to 1024 pixels width, stopping
 381	 * 1920x1080 being supported.
 382	 */
 383	if (priv->rev == 2) {
 384		if ((mode->vdisplay - 1) & 0x400) {
 385			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
 386				LCDC_LPP_B10);
 387		} else {
 388			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
 389				LCDC_LPP_B10);
 390		}
 391	}
 392
 393	/* Configure display type: */
 394	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
 395		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
 396		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
 397		  0x000ff000 /* Palette Loading Delay bits */);
 398	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
 399	if (info->tft_alt_mode)
 400		reg |= LCDC_TFT_ALT_ENABLE;
 401	if (priv->rev == 2) {
 402		switch (fb->pixel_format) {
 403		case DRM_FORMAT_BGR565:
 404		case DRM_FORMAT_RGB565:
 405			break;
 406		case DRM_FORMAT_XBGR8888:
 407		case DRM_FORMAT_XRGB8888:
 408			reg |= LCDC_V2_TFT_24BPP_UNPACK;
 409			/* fallthrough */
 410		case DRM_FORMAT_BGR888:
 411		case DRM_FORMAT_RGB888:
 412			reg |= LCDC_V2_TFT_24BPP_MODE;
 413			break;
 414		default:
 415			dev_err(dev->dev, "invalid pixel format\n");
 416			return;
 417		}
 418	}
 419	reg |= info->fdd < 12;
 420	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
 421
 422	if (info->invert_pxl_clk)
 423		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 424	else
 425		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
 426
 427	if (info->sync_ctrl)
 428		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 429	else
 430		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
 431
 432	if (info->sync_edge)
 433		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 434	else
 435		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
 436
 437	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
 438		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 439	else
 440		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
 441
 442	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
 443		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 444	else
 445		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
 446
 447	if (info->raster_order)
 448		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 449	else
 450		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
 451
 452	tilcdc_crtc_set_clk(crtc);
 453
 454	tilcdc_crtc_load_palette(crtc);
 455
 456	set_scanout(crtc, fb);
 457
 458	drm_framebuffer_reference(fb);
 459
 460	crtc->hwmode = crtc->state->adjusted_mode;
 
 461}
 462
 463static void tilcdc_crtc_enable(struct drm_crtc *crtc)
 464{
 465	struct drm_device *dev = crtc->dev;
 466	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 
 467
 468	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
 469	mutex_lock(&tilcdc_crtc->enable_lock);
 470	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
 471		mutex_unlock(&tilcdc_crtc->enable_lock);
 472		return;
 473	}
 474
 475	pm_runtime_get_sync(dev->dev);
 476
 477	reset(crtc);
 478
 479	tilcdc_crtc_set_mode(crtc);
 480
 481	tilcdc_crtc_enable_irqs(dev);
 482
 483	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
 484	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
 485			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
 486			  LCDC_PALETTE_LOAD_MODE_MASK);
 
 
 
 
 
 
 
 
 
 487	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 
 488
 489	drm_crtc_vblank_on(crtc);
 490
 491	tilcdc_crtc->enabled = true;
 492	mutex_unlock(&tilcdc_crtc->enable_lock);
 493}
 494
 
 
 
 
 
 
 495static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
 496{
 497	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 498	struct drm_device *dev = crtc->dev;
 499	struct tilcdc_drm_private *priv = dev->dev_private;
 500	int ret;
 501
 502	mutex_lock(&tilcdc_crtc->enable_lock);
 503	if (shutdown)
 504		tilcdc_crtc->shutdown = true;
 505	if (!tilcdc_crtc->enabled) {
 506		mutex_unlock(&tilcdc_crtc->enable_lock);
 507		return;
 508	}
 509	tilcdc_crtc->frame_done = false;
 510	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
 511
 512	/*
 513	 * Wait for framedone irq which will still come before putting
 514	 * things to sleep..
 515	 */
 516	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
 517				 tilcdc_crtc->frame_done,
 518				 msecs_to_jiffies(500));
 519	if (ret == 0)
 520		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
 521			__func__);
 522
 523	drm_crtc_vblank_off(crtc);
 524
 525	tilcdc_crtc_disable_irqs(dev);
 526
 527	pm_runtime_put_sync(dev->dev);
 528
 529	if (tilcdc_crtc->next_fb) {
 530		drm_flip_work_queue(&tilcdc_crtc->unref_work,
 531				    tilcdc_crtc->next_fb);
 532		tilcdc_crtc->next_fb = NULL;
 533	}
 534
 535	if (tilcdc_crtc->curr_fb) {
 536		drm_flip_work_queue(&tilcdc_crtc->unref_work,
 537				    tilcdc_crtc->curr_fb);
 538		tilcdc_crtc->curr_fb = NULL;
 539	}
 540
 541	drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
 542	tilcdc_crtc->last_vblank = 0;
 543
 544	tilcdc_crtc->enabled = false;
 545	mutex_unlock(&tilcdc_crtc->enable_lock);
 546}
 547
 548static void tilcdc_crtc_disable(struct drm_crtc *crtc)
 549{
 550	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
 551	tilcdc_crtc_off(crtc, false);
 552}
 553
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 554void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
 555{
 556	tilcdc_crtc_off(crtc, true);
 557}
 558
 559static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
 560{
 561	return crtc->state && crtc->state->enable && crtc->state->active;
 562}
 563
 564static void tilcdc_crtc_recover_work(struct work_struct *work)
 565{
 566	struct tilcdc_crtc *tilcdc_crtc =
 567		container_of(work, struct tilcdc_crtc, recover_work);
 568	struct drm_crtc *crtc = &tilcdc_crtc->base;
 569
 570	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
 571
 572	drm_modeset_lock_crtc(crtc, NULL);
 573
 574	if (!tilcdc_crtc_is_on(crtc))
 575		goto out;
 576
 577	tilcdc_crtc_disable(crtc);
 578	tilcdc_crtc_enable(crtc);
 579out:
 580	drm_modeset_unlock_crtc(crtc);
 581}
 582
 583static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
 584{
 585	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 586	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 587
 588	drm_modeset_lock_crtc(crtc, NULL);
 589	tilcdc_crtc_disable(crtc);
 590	drm_modeset_unlock_crtc(crtc);
 591
 592	flush_workqueue(priv->wq);
 593
 594	of_node_put(crtc->port);
 595	drm_crtc_cleanup(crtc);
 596	drm_flip_work_cleanup(&tilcdc_crtc->unref_work);
 597}
 598
 599int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
 600		struct drm_framebuffer *fb,
 601		struct drm_pending_vblank_event *event)
 602{
 603	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 604	struct drm_device *dev = crtc->dev;
 605	unsigned long flags;
 606
 607	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
 608
 609	if (tilcdc_crtc->event) {
 610		dev_err(dev->dev, "already pending page flip!\n");
 611		return -EBUSY;
 612	}
 613
 614	drm_framebuffer_reference(fb);
 615
 616	crtc->primary->fb = fb;
 617
 618	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 619
 620	if (crtc->hwmode.vrefresh && ktime_to_ns(tilcdc_crtc->last_vblank)) {
 621		ktime_t next_vblank;
 622		s64 tdiff;
 623
 
 
 624		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
 625			1000000 / crtc->hwmode.vrefresh);
 626
 627		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
 628
 629		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
 630			tilcdc_crtc->next_fb = fb;
 
 
 
 
 631	}
 632
 633	if (tilcdc_crtc->next_fb != fb)
 634		set_scanout(crtc, fb);
 635
 636	tilcdc_crtc->event = event;
 637
 638	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 639
 640	return 0;
 641}
 642
 643static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
 644		const struct drm_display_mode *mode,
 645		struct drm_display_mode *adjusted_mode)
 646{
 647	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 648
 649	if (!tilcdc_crtc->simulate_vesa_sync)
 650		return true;
 651
 652	/*
 653	 * tilcdc does not generate VESA-compliant sync but aligns
 654	 * VS on the second edge of HS instead of first edge.
 655	 * We use adjusted_mode, to fixup sync by aligning both rising
 656	 * edges and add HSKEW offset to fix the sync.
 657	 */
 658	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
 659	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
 660
 661	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
 662		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
 663		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
 664	} else {
 665		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
 666		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
 667	}
 668
 669	return true;
 670}
 671
 672static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
 673				    struct drm_crtc_state *state)
 674{
 675	struct drm_display_mode *mode = &state->mode;
 676	int ret;
 677
 678	/* If we are not active we don't care */
 679	if (!state->active)
 680		return 0;
 681
 682	if (state->state->planes[0].ptr != crtc->primary ||
 683	    state->state->planes[0].state == NULL ||
 684	    state->state->planes[0].state->crtc != crtc) {
 685		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
 686		return -EINVAL;
 687	}
 688
 689	ret = tilcdc_crtc_mode_valid(crtc, mode);
 690	if (ret) {
 691		dev_dbg(crtc->dev->dev, "Mode \"%s\" not valid", mode->name);
 692		return -EINVAL;
 693	}
 694
 
 
 695	return 0;
 696}
 697
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 698static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
 699	.destroy        = tilcdc_crtc_destroy,
 700	.set_config     = drm_atomic_helper_set_config,
 701	.page_flip      = drm_atomic_helper_page_flip,
 702	.reset		= drm_atomic_helper_crtc_reset,
 703	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
 704	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
 705};
 706
 707static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
 708		.mode_fixup     = tilcdc_crtc_mode_fixup,
 709		.enable		= tilcdc_crtc_enable,
 710		.disable	= tilcdc_crtc_disable,
 711		.atomic_check	= tilcdc_crtc_atomic_check,
 712};
 713
 714int tilcdc_crtc_max_width(struct drm_crtc *crtc)
 715{
 716	struct drm_device *dev = crtc->dev;
 717	struct tilcdc_drm_private *priv = dev->dev_private;
 718	int max_width = 0;
 719
 720	if (priv->rev == 1)
 721		max_width = 1024;
 722	else if (priv->rev == 2)
 723		max_width = 2048;
 724
 725	return max_width;
 726}
 727
 728int tilcdc_crtc_mode_valid(struct drm_crtc *crtc, struct drm_display_mode *mode)
 
 
 729{
 730	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
 731	unsigned int bandwidth;
 732	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
 733
 734	/*
 735	 * check to see if the width is within the range that
 736	 * the LCD Controller physically supports
 737	 */
 738	if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
 739		return MODE_VIRTUAL_X;
 740
 741	/* width must be multiple of 16 */
 742	if (mode->hdisplay & 0xf)
 743		return MODE_VIRTUAL_X;
 744
 745	if (mode->vdisplay > 2048)
 746		return MODE_VIRTUAL_Y;
 747
 748	DBG("Processing mode %dx%d@%d with pixel clock %d",
 749		mode->hdisplay, mode->vdisplay,
 750		drm_mode_vrefresh(mode), mode->clock);
 751
 752	hbp = mode->htotal - mode->hsync_end;
 753	hfp = mode->hsync_start - mode->hdisplay;
 754	hsw = mode->hsync_end - mode->hsync_start;
 755	vbp = mode->vtotal - mode->vsync_end;
 756	vfp = mode->vsync_start - mode->vdisplay;
 757	vsw = mode->vsync_end - mode->vsync_start;
 758
 759	if ((hbp-1) & ~0x3ff) {
 760		DBG("Pruning mode: Horizontal Back Porch out of range");
 761		return MODE_HBLANK_WIDE;
 762	}
 763
 764	if ((hfp-1) & ~0x3ff) {
 765		DBG("Pruning mode: Horizontal Front Porch out of range");
 766		return MODE_HBLANK_WIDE;
 767	}
 768
 769	if ((hsw-1) & ~0x3ff) {
 770		DBG("Pruning mode: Horizontal Sync Width out of range");
 771		return MODE_HSYNC_WIDE;
 772	}
 773
 774	if (vbp & ~0xff) {
 775		DBG("Pruning mode: Vertical Back Porch out of range");
 776		return MODE_VBLANK_WIDE;
 777	}
 778
 779	if (vfp & ~0xff) {
 780		DBG("Pruning mode: Vertical Front Porch out of range");
 781		return MODE_VBLANK_WIDE;
 782	}
 783
 784	if ((vsw-1) & ~0x3f) {
 785		DBG("Pruning mode: Vertical Sync Width out of range");
 786		return MODE_VSYNC_WIDE;
 787	}
 788
 789	/*
 790	 * some devices have a maximum allowed pixel clock
 791	 * configured from the DT
 792	 */
 793	if (mode->clock > priv->max_pixelclock) {
 794		DBG("Pruning mode: pixel clock too high");
 795		return MODE_CLOCK_HIGH;
 796	}
 797
 798	/*
 799	 * some devices further limit the max horizontal resolution
 800	 * configured from the DT
 801	 */
 802	if (mode->hdisplay > priv->max_width)
 803		return MODE_BAD_WIDTH;
 804
 805	/* filter out modes that would require too much memory bandwidth: */
 806	bandwidth = mode->hdisplay * mode->vdisplay *
 807		drm_mode_vrefresh(mode);
 808	if (bandwidth > priv->max_bandwidth) {
 809		DBG("Pruning mode: exceeds defined bandwidth limit");
 810		return MODE_BAD;
 811	}
 812
 813	return MODE_OK;
 814}
 815
 
 
 
 
 
 
 
 
 
 816void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
 817		const struct tilcdc_panel_info *info)
 818{
 819	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 820	tilcdc_crtc->info = info;
 821}
 822
 823void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
 824					bool simulate_vesa_sync)
 825{
 826	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 827
 828	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
 829}
 830
 831void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
 832{
 833	struct drm_device *dev = crtc->dev;
 834	struct tilcdc_drm_private *priv = dev->dev_private;
 835	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 836
 837	drm_modeset_lock_crtc(crtc, NULL);
 838	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
 839		if (tilcdc_crtc_is_on(crtc)) {
 840			pm_runtime_get_sync(dev->dev);
 841			tilcdc_crtc_disable(crtc);
 842
 843			tilcdc_crtc_set_clk(crtc);
 844
 845			tilcdc_crtc_enable(crtc);
 846			pm_runtime_put_sync(dev->dev);
 847		}
 848	}
 849	drm_modeset_unlock_crtc(crtc);
 850}
 851
 852#define SYNC_LOST_COUNT_LIMIT 50
 853
 854irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
 855{
 856	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
 857	struct drm_device *dev = crtc->dev;
 858	struct tilcdc_drm_private *priv = dev->dev_private;
 859	uint32_t stat, reg;
 860
 861	stat = tilcdc_read_irqstatus(dev);
 862	tilcdc_clear_irqstatus(dev, stat);
 863
 864	if (stat & LCDC_END_OF_FRAME0) {
 865		unsigned long flags;
 866		bool skip_event = false;
 867		ktime_t now;
 868
 869		now = ktime_get();
 870
 871		drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
 872
 873		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
 874
 875		tilcdc_crtc->last_vblank = now;
 876
 877		if (tilcdc_crtc->next_fb) {
 878			set_scanout(crtc, tilcdc_crtc->next_fb);
 879			tilcdc_crtc->next_fb = NULL;
 880			skip_event = true;
 881		}
 882
 883		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
 884
 885		drm_crtc_handle_vblank(crtc);
 886
 887		if (!skip_event) {
 888			struct drm_pending_vblank_event *event;
 889
 890			spin_lock_irqsave(&dev->event_lock, flags);
 891
 892			event = tilcdc_crtc->event;
 893			tilcdc_crtc->event = NULL;
 894			if (event)
 895				drm_crtc_send_vblank_event(crtc, event);
 896
 897			spin_unlock_irqrestore(&dev->event_lock, flags);
 898		}
 899
 900		if (tilcdc_crtc->frame_intact)
 901			tilcdc_crtc->sync_lost_count = 0;
 902		else
 903			tilcdc_crtc->frame_intact = true;
 904	}
 905
 906	if (stat & LCDC_FIFO_UNDERFLOW)
 907		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
 908				    __func__, stat);
 909
 910	if (stat & LCDC_PL_LOAD_DONE) {
 911		complete(&tilcdc_crtc->palette_loaded);
 912		if (priv->rev == 1)
 913			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 914				     LCDC_V1_PL_INT_ENA);
 915		else
 916			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 917				     LCDC_V2_PL_INT_ENA);
 918	}
 919
 920	if (stat & LCDC_SYNC_LOST) {
 921		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
 922				    __func__, stat);
 923		tilcdc_crtc->frame_intact = false;
 924		if (priv->rev == 1) {
 925			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
 926			if (reg & LCDC_RASTER_ENABLE) {
 927				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 928					     LCDC_RASTER_ENABLE);
 929				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
 930					   LCDC_RASTER_ENABLE);
 931			}
 932		} else {
 933			if (tilcdc_crtc->sync_lost_count++ >
 934			    SYNC_LOST_COUNT_LIMIT) {
 935				dev_err(dev->dev,
 936					"%s(0x%08x): Sync lost flood detected, recovering",
 937					__func__, stat);
 938				queue_work(system_wq,
 939					   &tilcdc_crtc->recover_work);
 940				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
 941					     LCDC_SYNC_LOST);
 942				tilcdc_crtc->sync_lost_count = 0;
 943			}
 944		}
 945	}
 946
 947	if (stat & LCDC_FRAME_DONE) {
 948		tilcdc_crtc->frame_done = true;
 949		wake_up(&tilcdc_crtc->frame_done_wq);
 950		/* rev 1 lcdc appears to hang if irq is not disbaled here */
 951		if (priv->rev == 1)
 952			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
 953				     LCDC_V1_FRAME_DONE_INT_ENA);
 954	}
 955
 956	/* For revision 2 only */
 957	if (priv->rev == 2) {
 958		/* Indicate to LCDC that the interrupt service routine has
 959		 * completed, see 13.3.6.1.6 in AM335x TRM.
 960		 */
 961		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
 962	}
 963
 964	return IRQ_HANDLED;
 965}
 966
 967int tilcdc_crtc_create(struct drm_device *dev)
 968{
 969	struct tilcdc_drm_private *priv = dev->dev_private;
 970	struct tilcdc_crtc *tilcdc_crtc;
 971	struct drm_crtc *crtc;
 972	int ret;
 973
 974	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
 975	if (!tilcdc_crtc) {
 976		dev_err(dev->dev, "allocation failed\n");
 977		return -ENOMEM;
 978	}
 979
 980	init_completion(&tilcdc_crtc->palette_loaded);
 981	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
 982					TILCDC_PALETTE_SIZE,
 983					&tilcdc_crtc->palette_dma_handle,
 984					GFP_KERNEL | __GFP_ZERO);
 985	if (!tilcdc_crtc->palette_base)
 986		return -ENOMEM;
 987	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
 988
 989	crtc = &tilcdc_crtc->base;
 990
 991	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
 992	if (ret < 0)
 993		goto fail;
 994
 995	mutex_init(&tilcdc_crtc->enable_lock);
 996
 997	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
 998
 999	drm_flip_work_init(&tilcdc_crtc->unref_work,
1000			"unref", unref_worker);
1001
1002	spin_lock_init(&tilcdc_crtc->irq_lock);
1003	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1004
1005	ret = drm_crtc_init_with_planes(dev, crtc,
1006					&tilcdc_crtc->primary,
1007					NULL,
1008					&tilcdc_crtc_funcs,
1009					"tilcdc crtc");
1010	if (ret < 0)
1011		goto fail;
1012
1013	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1014
1015	if (priv->is_componentized) {
1016		struct device_node *ports =
1017			of_get_child_by_name(dev->dev->of_node, "ports");
1018
1019		if (ports) {
1020			crtc->port = of_get_child_by_name(ports, "port");
1021			of_node_put(ports);
1022		} else {
1023			crtc->port =
1024				of_get_child_by_name(dev->dev->of_node, "port");
1025		}
1026		if (!crtc->port) { /* This should never happen */
1027			dev_err(dev->dev, "Port node not found in %s\n",
1028				dev->dev->of_node->full_name);
1029			ret = -EINVAL;
1030			goto fail;
1031		}
1032	}
1033
1034	priv->crtc = crtc;
1035	return 0;
1036
1037fail:
1038	tilcdc_crtc_destroy(crtc);
1039	return -ENOMEM;
1040}