Linux Audio

Check our new training course

Loading...
v4.6
   1/*
   2 * Copyright 2011 Red Hat Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Ben Skeggs
  23 */
  24
  25#include <linux/dma-mapping.h>
 
  26
  27#include <drm/drmP.h>
 
 
  28#include <drm/drm_crtc_helper.h>
  29#include <drm/drm_plane_helper.h>
  30#include <drm/drm_dp_helper.h>
  31#include <drm/drm_fb_helper.h>
 
 
  32
  33#include <nvif/class.h>
  34#include <nvif/cl0002.h>
  35#include <nvif/cl5070.h>
  36#include <nvif/cl507a.h>
  37#include <nvif/cl507b.h>
  38#include <nvif/cl507c.h>
  39#include <nvif/cl507d.h>
  40#include <nvif/cl507e.h>
 
  41
  42#include "nouveau_drm.h"
  43#include "nouveau_dma.h"
  44#include "nouveau_gem.h"
  45#include "nouveau_connector.h"
  46#include "nouveau_encoder.h"
  47#include "nouveau_crtc.h"
  48#include "nouveau_fence.h"
 
  49#include "nv50_display.h"
  50
  51#define EVO_DMA_NR 9
  52
  53#define EVO_MASTER  (0x00)
  54#define EVO_FLIP(c) (0x01 + (c))
  55#define EVO_OVLY(c) (0x05 + (c))
  56#define EVO_OIMM(c) (0x09 + (c))
  57#define EVO_CURS(c) (0x0d + (c))
  58
  59/* offsets in shared sync bo of various structures */
  60#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  61#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
  62#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
  63#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  64
  65/******************************************************************************
  66 * EVO channel
  67 *****************************************************************************/
  68
  69struct nv50_chan {
  70	struct nvif_object user;
  71	struct nvif_device *device;
  72};
  73
  74static int
  75nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
  76		 const s32 *oclass, u8 head, void *data, u32 size,
  77		 struct nv50_chan *chan)
  78{
  79	struct nvif_sclass *sclass;
  80	int ret, i, n;
  81
  82	chan->device = device;
  83
  84	ret = n = nvif_object_sclass_get(disp, &sclass);
  85	if (ret < 0)
  86		return ret;
  87
  88	while (oclass[0]) {
  89		for (i = 0; i < n; i++) {
  90			if (sclass[i].oclass == oclass[0]) {
  91				ret = nvif_object_init(disp, 0, oclass[0],
  92						       data, size, &chan->user);
  93				if (ret == 0)
  94					nvif_object_map(&chan->user);
  95				nvif_object_sclass_put(&sclass);
  96				return ret;
  97			}
  98		}
  99		oclass++;
 100	}
 101
 102	nvif_object_sclass_put(&sclass);
 103	return -ENOSYS;
 104}
 105
 106static void
 107nv50_chan_destroy(struct nv50_chan *chan)
 108{
 109	nvif_object_fini(&chan->user);
 110}
 111
 112/******************************************************************************
 113 * PIO EVO channel
 114 *****************************************************************************/
 115
 116struct nv50_pioc {
 117	struct nv50_chan base;
 118};
 119
 120static void
 121nv50_pioc_destroy(struct nv50_pioc *pioc)
 122{
 123	nv50_chan_destroy(&pioc->base);
 124}
 125
 126static int
 127nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
 128		 const s32 *oclass, u8 head, void *data, u32 size,
 129		 struct nv50_pioc *pioc)
 130{
 131	return nv50_chan_create(device, disp, oclass, head, data, size,
 132				&pioc->base);
 133}
 134
 135/******************************************************************************
 136 * Cursor Immediate
 137 *****************************************************************************/
 138
 139struct nv50_curs {
 140	struct nv50_pioc base;
 141};
 142
 143static int
 144nv50_curs_create(struct nvif_device *device, struct nvif_object *disp,
 145		 int head, struct nv50_curs *curs)
 146{
 147	struct nv50_disp_cursor_v0 args = {
 148		.head = head,
 149	};
 150	static const s32 oclass[] = {
 151		GK104_DISP_CURSOR,
 152		GF110_DISP_CURSOR,
 153		GT214_DISP_CURSOR,
 154		G82_DISP_CURSOR,
 155		NV50_DISP_CURSOR,
 156		0
 157	};
 158
 159	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
 160				&curs->base);
 161}
 162
 163/******************************************************************************
 164 * Overlay Immediate
 165 *****************************************************************************/
 166
 167struct nv50_oimm {
 168	struct nv50_pioc base;
 169};
 170
 171static int
 172nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
 173		 int head, struct nv50_oimm *oimm)
 174{
 175	struct nv50_disp_cursor_v0 args = {
 176		.head = head,
 177	};
 178	static const s32 oclass[] = {
 179		GK104_DISP_OVERLAY,
 180		GF110_DISP_OVERLAY,
 181		GT214_DISP_OVERLAY,
 182		G82_DISP_OVERLAY,
 183		NV50_DISP_OVERLAY,
 184		0
 185	};
 186
 187	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
 188				&oimm->base);
 189}
 190
 191/******************************************************************************
 192 * DMA EVO channel
 193 *****************************************************************************/
 194
 
 
 
 
 
 195struct nv50_dmac {
 196	struct nv50_chan base;
 197	dma_addr_t handle;
 198	u32 *ptr;
 199
 200	struct nvif_object sync;
 201	struct nvif_object vram;
 
 202
 203	/* Protects against concurrent pushbuf access to this channel, lock is
 204	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
 205	 * dropped again by evo_kick. */
 206	struct mutex lock;
 207};
 208
 209static void
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 210nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
 211{
 212	struct nvif_device *device = dmac->base.device;
 
 
 
 
 
 213
 214	nvif_object_fini(&dmac->vram);
 215	nvif_object_fini(&dmac->sync);
 216
 217	nv50_chan_destroy(&dmac->base);
 218
 219	if (dmac->ptr) {
 220		struct device *dev = nvxx_device(device)->dev;
 221		dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
 222	}
 223}
 224
 225static int
 226nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
 227		 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
 228		 struct nv50_dmac *dmac)
 229{
 230	struct nv50_disp_core_channel_dma_v0 *args = data;
 231	struct nvif_object pushbuf;
 232	int ret;
 233
 234	mutex_init(&dmac->lock);
 
 235
 236	dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
 237				       &dmac->handle, GFP_KERNEL);
 238	if (!dmac->ptr)
 239		return -ENOMEM;
 240
 241	ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
 242			       &(struct nv_dma_v0) {
 243					.target = NV_DMA_V0_TARGET_PCI_US,
 244					.access = NV_DMA_V0_ACCESS_RD,
 245					.start = dmac->handle + 0x0000,
 246					.limit = dmac->handle + 0x0fff,
 247			       }, sizeof(struct nv_dma_v0), &pushbuf);
 248	if (ret)
 249		return ret;
 250
 251	args->pushbuf = nvif_handle(&pushbuf);
 252
 253	ret = nv50_chan_create(device, disp, oclass, head, data, size,
 254			       &dmac->base);
 255	nvif_object_fini(&pushbuf);
 256	if (ret)
 257		return ret;
 258
 259	ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
 260			       &(struct nv_dma_v0) {
 261					.target = NV_DMA_V0_TARGET_VRAM,
 262					.access = NV_DMA_V0_ACCESS_RDWR,
 263					.start = syncbuf + 0x0000,
 264					.limit = syncbuf + 0x0fff,
 265			       }, sizeof(struct nv_dma_v0),
 266			       &dmac->sync);
 267	if (ret)
 268		return ret;
 269
 270	ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
 271			       &(struct nv_dma_v0) {
 272					.target = NV_DMA_V0_TARGET_VRAM,
 273					.access = NV_DMA_V0_ACCESS_RDWR,
 274					.start = 0,
 275					.limit = device->info.ram_user - 1,
 276			       }, sizeof(struct nv_dma_v0),
 277			       &dmac->vram);
 278	if (ret)
 279		return ret;
 280
 281	return ret;
 282}
 283
 284/******************************************************************************
 285 * Core
 286 *****************************************************************************/
 287
 288struct nv50_mast {
 289	struct nv50_dmac base;
 290};
 291
 292static int
 293nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
 294		 u64 syncbuf, struct nv50_mast *core)
 295{
 296	struct nv50_disp_core_channel_dma_v0 args = {
 297		.pushbuf = 0xb0007d00,
 298	};
 299	static const s32 oclass[] = {
 
 
 300		GM200_DISP_CORE_CHANNEL_DMA,
 301		GM107_DISP_CORE_CHANNEL_DMA,
 302		GK110_DISP_CORE_CHANNEL_DMA,
 303		GK104_DISP_CORE_CHANNEL_DMA,
 304		GF110_DISP_CORE_CHANNEL_DMA,
 305		GT214_DISP_CORE_CHANNEL_DMA,
 306		GT206_DISP_CORE_CHANNEL_DMA,
 307		GT200_DISP_CORE_CHANNEL_DMA,
 308		G82_DISP_CORE_CHANNEL_DMA,
 309		NV50_DISP_CORE_CHANNEL_DMA,
 310		0
 311	};
 312
 313	return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
 314				syncbuf, &core->base);
 315}
 316
 317/******************************************************************************
 318 * Base
 319 *****************************************************************************/
 320
 321struct nv50_sync {
 322	struct nv50_dmac base;
 323	u32 addr;
 324	u32 data;
 325};
 326
 327static int
 328nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
 329		 int head, u64 syncbuf, struct nv50_sync *base)
 330{
 331	struct nv50_disp_base_channel_dma_v0 args = {
 332		.pushbuf = 0xb0007c00 | head,
 333		.head = head,
 334	};
 335	static const s32 oclass[] = {
 336		GK110_DISP_BASE_CHANNEL_DMA,
 337		GK104_DISP_BASE_CHANNEL_DMA,
 338		GF110_DISP_BASE_CHANNEL_DMA,
 339		GT214_DISP_BASE_CHANNEL_DMA,
 340		GT200_DISP_BASE_CHANNEL_DMA,
 341		G82_DISP_BASE_CHANNEL_DMA,
 342		NV50_DISP_BASE_CHANNEL_DMA,
 343		0
 344	};
 345
 346	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 347				syncbuf, &base->base);
 348}
 349
 350/******************************************************************************
 351 * Overlay
 352 *****************************************************************************/
 353
 354struct nv50_ovly {
 355	struct nv50_dmac base;
 356};
 357
 358static int
 359nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
 360		 int head, u64 syncbuf, struct nv50_ovly *ovly)
 361{
 362	struct nv50_disp_overlay_channel_dma_v0 args = {
 363		.pushbuf = 0xb0007e00 | head,
 364		.head = head,
 365	};
 366	static const s32 oclass[] = {
 367		GK104_DISP_OVERLAY_CONTROL_DMA,
 368		GF110_DISP_OVERLAY_CONTROL_DMA,
 369		GT214_DISP_OVERLAY_CHANNEL_DMA,
 370		GT200_DISP_OVERLAY_CHANNEL_DMA,
 371		G82_DISP_OVERLAY_CHANNEL_DMA,
 372		NV50_DISP_OVERLAY_CHANNEL_DMA,
 373		0
 374	};
 375
 376	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 377				syncbuf, &ovly->base);
 378}
 379
 380struct nv50_head {
 381	struct nouveau_crtc base;
 382	struct nouveau_bo *image;
 383	struct nv50_curs curs;
 384	struct nv50_sync sync;
 
 385	struct nv50_ovly ovly;
 386	struct nv50_oimm oimm;
 387};
 388
 389#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
 390#define nv50_curs(c) (&nv50_head(c)->curs)
 391#define nv50_sync(c) (&nv50_head(c)->sync)
 392#define nv50_ovly(c) (&nv50_head(c)->ovly)
 393#define nv50_oimm(c) (&nv50_head(c)->oimm)
 394#define nv50_chan(c) (&(c)->base.base)
 395#define nv50_vers(c) nv50_chan(c)->user.oclass
 396
 397struct nv50_fbdma {
 398	struct list_head head;
 399	struct nvif_object core;
 400	struct nvif_object base[4];
 401};
 402
 403struct nv50_disp {
 404	struct nvif_object *disp;
 405	struct nv50_mast mast;
 406
 407	struct list_head fbdma;
 408
 409	struct nouveau_bo *sync;
 
 
 410};
 411
 412static struct nv50_disp *
 413nv50_disp(struct drm_device *dev)
 414{
 415	return nouveau_display(dev)->priv;
 416}
 417
 418#define nv50_mast(d) (&nv50_disp(d)->mast)
 419
 420static struct drm_crtc *
 421nv50_display_crtc_get(struct drm_encoder *encoder)
 422{
 423	return nouveau_encoder(encoder)->crtc;
 424}
 425
 426/******************************************************************************
 427 * EVO channel helpers
 428 *****************************************************************************/
 429static u32 *
 430evo_wait(void *evoc, int nr)
 431{
 432	struct nv50_dmac *dmac = evoc;
 433	struct nvif_device *device = dmac->base.device;
 434	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
 435
 436	mutex_lock(&dmac->lock);
 437	if (put + nr >= (PAGE_SIZE / 4) - 8) {
 438		dmac->ptr[put] = 0x20000000;
 439
 440		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
 441		if (nvif_msec(device, 2000,
 442			if (!nvif_rd32(&dmac->base.user, 0x0004))
 443				break;
 444		) < 0) {
 445			mutex_unlock(&dmac->lock);
 446			printk(KERN_ERR "nouveau: evo channel stalled\n");
 447			return NULL;
 448		}
 449
 450		put = 0;
 451	}
 452
 453	return dmac->ptr + put;
 454}
 455
 456static void
 457evo_kick(u32 *push, void *evoc)
 458{
 459	struct nv50_dmac *dmac = evoc;
 460	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
 461	mutex_unlock(&dmac->lock);
 462}
 463
 464#if 1
 465#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
 466#define evo_data(p,d)   *((p)++) = (d)
 467#else
 468#define evo_mthd(p,m,s) do {                                                   \
 469	const u32 _m = (m), _s = (s);                                          \
 470	printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__);                     \
 471	*((p)++) = ((_s << 18) | _m);                                          \
 472} while(0)
 473#define evo_data(p,d) do {                                                     \
 474	const u32 _d = (d);                                                    \
 475	printk(KERN_ERR "\t%08x\n", _d);                                       \
 476	*((p)++) = _d;                                                         \
 
 
 477} while(0)
 478#endif
 479
 480static bool
 481evo_sync_wait(void *data)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 482{
 483	if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
 484		return true;
 485	usleep_range(1, 2);
 486	return false;
 487}
 488
 489static int
 490evo_sync(struct drm_device *dev)
 
 491{
 492	struct nvif_device *device = &nouveau_drm(dev)->device;
 493	struct nv50_disp *disp = nv50_disp(dev);
 494	struct nv50_mast *mast = nv50_mast(dev);
 495	u32 *push = evo_wait(mast, 8);
 496	if (push) {
 497		nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
 498		evo_mthd(push, 0x0084, 1);
 499		evo_data(push, 0x80000000 | EVO_MAST_NTFY);
 500		evo_mthd(push, 0x0080, 2);
 501		evo_data(push, 0x00000000);
 502		evo_data(push, 0x00000000);
 503		evo_kick(push, mast);
 504		if (nvif_msec(device, 2000,
 505			if (evo_sync_wait(disp->sync))
 506				break;
 507		) >= 0)
 508			return 0;
 509	}
 510
 511	return -EBUSY;
 512}
 513
 514/******************************************************************************
 515 * Page flipping channel
 516 *****************************************************************************/
 517struct nouveau_bo *
 518nv50_display_crtc_sema(struct drm_device *dev, int crtc)
 519{
 520	return nv50_disp(dev)->sync;
 
 
 
 
 
 
 
 
 
 
 
 521}
 522
 523struct nv50_display_flip {
 524	struct nv50_disp *disp;
 525	struct nv50_sync *chan;
 526};
 
 
 
 
 
 
 
 527
 528static bool
 529nv50_display_flip_wait(void *data)
 
 
 530{
 531	struct nv50_display_flip *flip = data;
 532	if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
 533					      flip->chan->data)
 534		return true;
 535	usleep_range(1, 2);
 536	return false;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 537}
 538
 539void
 540nv50_display_flip_stop(struct drm_crtc *crtc)
 541{
 542	struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
 543	struct nv50_display_flip flip = {
 544		.disp = nv50_disp(crtc->dev),
 545		.chan = nv50_sync(crtc),
 546	};
 547	u32 *push;
 
 548
 549	push = evo_wait(flip.chan, 8);
 550	if (push) {
 551		evo_mthd(push, 0x0084, 1);
 552		evo_data(push, 0x00000000);
 553		evo_mthd(push, 0x0094, 1);
 554		evo_data(push, 0x00000000);
 555		evo_mthd(push, 0x00c0, 1);
 556		evo_data(push, 0x00000000);
 557		evo_mthd(push, 0x0080, 1);
 558		evo_data(push, 0x00000000);
 559		evo_kick(push, flip.chan);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 560	}
 561
 562	nvif_msec(device, 2000,
 563		if (nv50_display_flip_wait(&flip))
 564			break;
 565	);
 
 
 
 
 
 566}
 567
 568int
 569nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
 570		       struct nouveau_channel *chan, u32 swap_interval)
 571{
 572	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
 573	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 574	struct nv50_head *head = nv50_head(crtc);
 575	struct nv50_sync *sync = nv50_sync(crtc);
 576	u32 *push;
 577	int ret;
 578
 579	if (crtc->primary->fb->width != fb->width ||
 580	    crtc->primary->fb->height != fb->height)
 581		return -EINVAL;
 582
 583	swap_interval <<= 4;
 584	if (swap_interval == 0)
 585		swap_interval |= 0x100;
 586	if (chan == NULL)
 587		evo_sync(crtc->dev);
 588
 589	push = evo_wait(sync, 128);
 590	if (unlikely(push == NULL))
 591		return -EBUSY;
 
 
 
 
 
 
 
 592
 593	if (chan && chan->user.oclass < G82_CHANNEL_GPFIFO) {
 594		ret = RING_SPACE(chan, 8);
 595		if (ret)
 596			return ret;
 597
 598		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
 599		OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
 600		OUT_RING  (chan, sync->addr ^ 0x10);
 601		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
 602		OUT_RING  (chan, sync->data + 1);
 603		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
 604		OUT_RING  (chan, sync->addr);
 605		OUT_RING  (chan, sync->data);
 606	} else
 607	if (chan && chan->user.oclass < FERMI_CHANNEL_GPFIFO) {
 608		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
 609		ret = RING_SPACE(chan, 12);
 610		if (ret)
 611			return ret;
 612
 613		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
 614		OUT_RING  (chan, chan->vram.handle);
 615		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 616		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
 617		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
 618		OUT_RING  (chan, sync->data + 1);
 619		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
 620		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 621		OUT_RING  (chan, upper_32_bits(addr));
 622		OUT_RING  (chan, lower_32_bits(addr));
 623		OUT_RING  (chan, sync->data);
 624		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
 625	} else
 626	if (chan) {
 627		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
 628		ret = RING_SPACE(chan, 10);
 629		if (ret)
 630			return ret;
 631
 632		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 633		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
 634		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
 635		OUT_RING  (chan, sync->data + 1);
 636		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
 637				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
 638		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 639		OUT_RING  (chan, upper_32_bits(addr));
 640		OUT_RING  (chan, lower_32_bits(addr));
 641		OUT_RING  (chan, sync->data);
 642		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
 643				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
 644	}
 645
 646	if (chan) {
 647		sync->addr ^= 0x10;
 648		sync->data++;
 649		FIRE_RING (chan);
 650	}
 651
 652	/* queue the flip */
 653	evo_mthd(push, 0x0100, 1);
 654	evo_data(push, 0xfffe0000);
 655	evo_mthd(push, 0x0084, 1);
 656	evo_data(push, swap_interval);
 657	if (!(swap_interval & 0x00000100)) {
 658		evo_mthd(push, 0x00e0, 1);
 659		evo_data(push, 0x40000000);
 660	}
 661	evo_mthd(push, 0x0088, 4);
 662	evo_data(push, sync->addr);
 663	evo_data(push, sync->data++);
 664	evo_data(push, sync->data);
 665	evo_data(push, sync->base.sync.handle);
 666	evo_mthd(push, 0x00a0, 2);
 667	evo_data(push, 0x00000000);
 668	evo_data(push, 0x00000000);
 669	evo_mthd(push, 0x00c0, 1);
 670	evo_data(push, nv_fb->r_handle);
 671	evo_mthd(push, 0x0110, 2);
 672	evo_data(push, 0x00000000);
 673	evo_data(push, 0x00000000);
 674	if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) {
 675		evo_mthd(push, 0x0800, 5);
 676		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
 677		evo_data(push, 0);
 678		evo_data(push, (fb->height << 16) | fb->width);
 679		evo_data(push, nv_fb->r_pitch);
 680		evo_data(push, nv_fb->r_format);
 681	} else {
 682		evo_mthd(push, 0x0400, 5);
 683		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
 684		evo_data(push, 0);
 685		evo_data(push, (fb->height << 16) | fb->width);
 686		evo_data(push, nv_fb->r_pitch);
 687		evo_data(push, nv_fb->r_format);
 688	}
 689	evo_mthd(push, 0x0080, 1);
 690	evo_data(push, 0x00000000);
 691	evo_kick(push, sync);
 692
 693	nouveau_bo_ref(nv_fb->nvbo, &head->image);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 694	return 0;
 695}
 696
 697/******************************************************************************
 698 * CRTC
 699 *****************************************************************************/
 700static int
 701nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
 
 
 
 
 
 
 
 702{
 703	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 704	struct nouveau_connector *nv_connector;
 705	struct drm_connector *connector;
 706	u32 *push, mode = 0x00;
 707
 708	nv_connector = nouveau_crtc_connector_get(nv_crtc);
 709	connector = &nv_connector->base;
 710	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
 711		if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
 712			mode = DITHERING_MODE_DYNAMIC2X2;
 713	} else {
 714		mode = nv_connector->dithering_mode;
 
 
 
 
 
 
 
 
 
 
 715	}
 
 716
 717	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
 718		if (connector->display_info.bpc >= 8)
 719			mode |= DITHERING_DEPTH_8BPC;
 720	} else {
 721		mode |= nv_connector->dithering_depth;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 722	}
 723
 724	push = evo_wait(mast, 4);
 725	if (push) {
 726		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 727			evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
 728			evo_data(push, mode);
 729		} else
 730		if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) {
 731			evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
 732			evo_data(push, mode);
 733		} else {
 734			evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
 735			evo_data(push, mode);
 736		}
 737
 738		if (update) {
 739			evo_mthd(push, 0x0080, 1);
 740			evo_data(push, 0x00000000);
 741		}
 742		evo_kick(push, mast);
 743	}
 744
 745	return 0;
 746}
 747
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 748static int
 749nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
 
 750{
 751	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 752	struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
 753	struct drm_crtc *crtc = &nv_crtc->base;
 754	struct nouveau_connector *nv_connector;
 755	int mode = DRM_MODE_SCALE_NONE;
 756	u32 oX, oY, *push;
 
 
 
 
 
 
 
 
 757
 758	/* start off at the resolution we programmed the crtc for, this
 759	 * effectively handles NONE/FULL scaling
 760	 */
 761	nv_connector = nouveau_crtc_connector_get(nv_crtc);
 762	if (nv_connector && nv_connector->native_mode) {
 763		mode = nv_connector->scaling_mode;
 764		if (nv_connector->scaling_full) /* non-EDID LVDS/eDP mode */
 765			mode = DRM_MODE_SCALE_FULLSCREEN;
 766	}
 767
 768	if (mode != DRM_MODE_SCALE_NONE)
 769		omode = nv_connector->native_mode;
 770	else
 771		omode = umode;
 772
 773	oX = omode->hdisplay;
 774	oY = omode->vdisplay;
 775	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
 776		oY *= 2;
 
 
 
 
 777
 778	/* add overscan compensation if necessary, will keep the aspect
 779	 * ratio the same as the backend mode unless overridden by the
 780	 * user setting both hborder and vborder properties.
 781	 */
 782	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
 783			     (nv_connector->underscan == UNDERSCAN_AUTO &&
 784			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
 785		u32 bX = nv_connector->underscan_hborder;
 786		u32 bY = nv_connector->underscan_vborder;
 787		u32 aspect = (oY << 19) / oX;
 788
 789		if (bX) {
 790			oX -= (bX * 2);
 791			if (bY) oY -= (bY * 2);
 792			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
 793		} else {
 794			oX -= (oX >> 4) + 32;
 795			if (bY) oY -= (bY * 2);
 796			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
 797		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 798	}
 
 799
 800	/* handle CENTER/ASPECT scaling, taking into account the areas
 801	 * removed already for overscan compensation
 802	 */
 803	switch (mode) {
 804	case DRM_MODE_SCALE_CENTER:
 805		oX = min((u32)umode->hdisplay, oX);
 806		oY = min((u32)umode->vdisplay, oY);
 807		/* fall-through */
 808	case DRM_MODE_SCALE_ASPECT:
 809		if (oY < oX) {
 810			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
 811			oX = ((oY * aspect) + (aspect / 2)) >> 19;
 812		} else {
 813			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
 814			oY = ((oX * aspect) + (aspect / 2)) >> 19;
 815		}
 816		break;
 817	default:
 818		break;
 819	}
 
 820
 821	push = evo_wait(mast, 8);
 822	if (push) {
 823		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 824			/*XXX: SCALE_CTRL_ACTIVE??? */
 825			evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
 826			evo_data(push, (oY << 16) | oX);
 827			evo_data(push, (oY << 16) | oX);
 828			evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 829			evo_data(push, 0x00000000);
 830			evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
 831			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
 
 
 
 832		} else {
 833			evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
 834			evo_data(push, (oY << 16) | oX);
 835			evo_data(push, (oY << 16) | oX);
 836			evo_data(push, (oY << 16) | oX);
 837			evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
 838			evo_data(push, 0x00000000);
 839			evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
 840			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
 
 
 
 841		}
 
 
 
 842
 843		evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 
 844
 845		if (update) {
 846			nv50_display_flip_stop(crtc);
 847			nv50_display_flip_next(crtc, crtc->primary->fb,
 848					       NULL, 1);
 849		}
 
 
 
 
 
 850	}
 
 851
 852	return 0;
 
 
 
 
 
 
 
 
 
 853}
 854
 855static int
 856nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec)
 857{
 858	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 859	u32 *push;
 
 
 
 
 
 
 
 
 
 860
 861	push = evo_wait(mast, 8);
 862	if (!push)
 863		return -ENOMEM;
 
 
 864
 865	evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1);
 866	evo_data(push, usec);
 867	evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 868	return 0;
 869}
 870
 
 
 
 
 
 
 
 871static int
 872nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
 
 873{
 874	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 875	u32 *push, hue, vib;
 876	int adj;
 877
 878	adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
 879	vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
 880	hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
 881
 882	push = evo_wait(mast, 16);
 883	if (push) {
 884		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 885			evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
 886			evo_data(push, (hue << 20) | (vib << 8));
 887		} else {
 888			evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
 889			evo_data(push, (hue << 20) | (vib << 8));
 890		}
 891
 892		if (update) {
 893			evo_mthd(push, 0x0080, 1);
 894			evo_data(push, 0x00000000);
 895		}
 896		evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 897	}
 898
 
 
 899	return 0;
 900}
 901
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 902static int
 903nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
 904		    int x, int y, bool update)
 905{
 906	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
 907	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 908	u32 *push;
 
 
 
 
 
 
 
 
 
 
 909
 910	push = evo_wait(mast, 16);
 911	if (push) {
 912		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 913			evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
 914			evo_data(push, nvfb->nvbo->bo.offset >> 8);
 915			evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
 916			evo_data(push, (fb->height << 16) | fb->width);
 917			evo_data(push, nvfb->r_pitch);
 918			evo_data(push, nvfb->r_format);
 919			evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
 920			evo_data(push, (y << 16) | x);
 921			if (nv50_vers(mast) > NV50_DISP_CORE_CHANNEL_DMA) {
 922				evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
 923				evo_data(push, nvfb->r_handle);
 924			}
 925		} else {
 926			evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
 927			evo_data(push, nvfb->nvbo->bo.offset >> 8);
 928			evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
 929			evo_data(push, (fb->height << 16) | fb->width);
 930			evo_data(push, nvfb->r_pitch);
 931			evo_data(push, nvfb->r_format);
 932			evo_data(push, nvfb->r_handle);
 933			evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
 934			evo_data(push, (y << 16) | x);
 935		}
 936
 937		if (update) {
 938			evo_mthd(push, 0x0080, 1);
 939			evo_data(push, 0x00000000);
 
 
 
 
 
 
 
 
 
 
 
 
 940		}
 941		evo_kick(push, mast);
 942	}
 943
 944	nv_crtc->fb.handle = nvfb->r_handle;
 945	return 0;
 
 
 
 
 
 
 946}
 947
 948static void
 949nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
 950{
 951	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 952	u32 *push = evo_wait(mast, 16);
 953	if (push) {
 954		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
 955			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
 956			evo_data(push, 0x85000000);
 957			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
 958		} else
 959		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 960			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
 961			evo_data(push, 0x85000000);
 962			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
 963			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
 964			evo_data(push, mast->base.vram.handle);
 965		} else {
 966			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
 967			evo_data(push, 0x85000000);
 968			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
 969			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
 970			evo_data(push, mast->base.vram.handle);
 971		}
 972		evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 973	}
 974	nv_crtc->cursor.visible = true;
 975}
 976
 977static void
 978nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
 979{
 980	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 981	u32 *push = evo_wait(mast, 16);
 982	if (push) {
 983		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
 984			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
 985			evo_data(push, 0x05000000);
 986		} else
 987		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 988			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
 989			evo_data(push, 0x05000000);
 990			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
 991			evo_data(push, 0x00000000);
 992		} else {
 993			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
 994			evo_data(push, 0x05000000);
 995			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
 996			evo_data(push, 0x00000000);
 997		}
 998		evo_kick(push, mast);
 999	}
1000	nv_crtc->cursor.visible = false;
1001}
1002
1003static void
1004nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
1005{
1006	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1007
1008	if (show && nv_crtc->cursor.nvbo && nv_crtc->base.enabled)
1009		nv50_crtc_cursor_show(nv_crtc);
1010	else
1011		nv50_crtc_cursor_hide(nv_crtc);
1012
1013	if (update) {
1014		u32 *push = evo_wait(mast, 2);
1015		if (push) {
1016			evo_mthd(push, 0x0080, 1);
1017			evo_data(push, 0x00000000);
1018			evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 
1019		}
 
1020	}
1021}
1022
1023static void
1024nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
1025{
 
 
 
 
 
 
 
 
 
 
1026}
1027
1028static void
1029nv50_crtc_prepare(struct drm_crtc *crtc)
1030{
1031	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1032	struct nv50_mast *mast = nv50_mast(crtc->dev);
1033	u32 *push;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1034
1035	nv50_display_flip_stop(crtc);
1036
1037	push = evo_wait(mast, 6);
1038	if (push) {
1039		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1040			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1041			evo_data(push, 0x00000000);
1042			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1043			evo_data(push, 0x40000000);
1044		} else
1045		if (nv50_vers(mast) <  GF110_DISP_CORE_CHANNEL_DMA) {
1046			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1047			evo_data(push, 0x00000000);
1048			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1049			evo_data(push, 0x40000000);
1050			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1051			evo_data(push, 0x00000000);
1052		} else {
1053			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1054			evo_data(push, 0x00000000);
1055			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
1056			evo_data(push, 0x03000000);
1057			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1058			evo_data(push, 0x00000000);
1059		}
 
 
 
1060
1061		evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1062	}
1063
1064	nv50_crtc_cursor_show_hide(nv_crtc, false, false);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1065}
1066
1067static void
1068nv50_crtc_commit(struct drm_crtc *crtc)
1069{
1070	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1071	struct nv50_mast *mast = nv50_mast(crtc->dev);
1072	u32 *push;
1073
1074	push = evo_wait(mast, 32);
1075	if (push) {
1076		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1077			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1078			evo_data(push, nv_crtc->fb.handle);
1079			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1080			evo_data(push, 0xc0000000);
1081			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1082		} else
1083		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1084			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1085			evo_data(push, nv_crtc->fb.handle);
1086			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1087			evo_data(push, 0xc0000000);
1088			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1089			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1090			evo_data(push, mast->base.vram.handle);
1091		} else {
1092			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1093			evo_data(push, nv_crtc->fb.handle);
1094			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1095			evo_data(push, 0x83000000);
1096			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1097			evo_data(push, 0x00000000);
1098			evo_data(push, 0x00000000);
1099			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1100			evo_data(push, mast->base.vram.handle);
1101			evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1102			evo_data(push, 0xffffff00);
 
 
 
 
1103		}
 
 
 
1104
1105		evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1106	}
 
1107
1108	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1109	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
 
 
 
 
 
 
 
1110}
1111
1112static bool
1113nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1114		     struct drm_display_mode *adjusted_mode)
1115{
1116	drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1117	return true;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1118}
1119
1120static int
1121nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1122{
1123	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1124	struct nv50_head *head = nv50_head(crtc);
1125	int ret;
1126
1127	ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true);
1128	if (ret == 0) {
1129		if (head->image)
1130			nouveau_bo_unpin(head->image);
1131		nouveau_bo_ref(nvfb->nvbo, &head->image);
1132	}
1133
1134	return ret;
 
 
 
 
 
 
 
 
 
 
1135}
1136
1137static int
1138nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1139		   struct drm_display_mode *mode, int x, int y,
1140		   struct drm_framebuffer *old_fb)
1141{
1142	struct nv50_mast *mast = nv50_mast(crtc->dev);
1143	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1144	struct nouveau_connector *nv_connector;
1145	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1146	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1147	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1148	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1149	u32 vblan2e = 0, vblan2s = 1, vblankus = 0;
1150	u32 *push;
1151	int ret;
1152
1153	hactive = mode->htotal;
1154	hsynce  = mode->hsync_end - mode->hsync_start - 1;
1155	hbackp  = mode->htotal - mode->hsync_end;
1156	hblanke = hsynce + hbackp;
1157	hfrontp = mode->hsync_start - mode->hdisplay;
1158	hblanks = mode->htotal - hfrontp - 1;
1159
1160	vactive = mode->vtotal * vscan / ilace;
1161	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1162	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1163	vblanke = vsynce + vbackp;
1164	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1165	vblanks = vactive - vfrontp - 1;
1166	/* XXX: Safe underestimate, even "0" works */
1167	vblankus = (vactive - mode->vdisplay - 2) * hactive;
1168	vblankus *= 1000;
1169	vblankus /= mode->clock;
1170
1171	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1172		vblan2e = vactive + vsynce + vbackp;
1173		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1174		vactive = (vactive * 2) + 1;
 
 
1175	}
1176
1177	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1178	if (ret)
1179		return ret;
 
 
 
 
 
 
 
 
 
1180
1181	push = evo_wait(mast, 64);
1182	if (push) {
1183		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1184			evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1185			evo_data(push, 0x00800000 | mode->clock);
1186			evo_data(push, (ilace == 2) ? 2 : 0);
1187			evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1188			evo_data(push, 0x00000000);
1189			evo_data(push, (vactive << 16) | hactive);
1190			evo_data(push, ( vsynce << 16) | hsynce);
1191			evo_data(push, (vblanke << 16) | hblanke);
1192			evo_data(push, (vblanks << 16) | hblanks);
1193			evo_data(push, (vblan2e << 16) | vblan2s);
1194			evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1195			evo_data(push, 0x00000000);
1196			evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1197			evo_data(push, 0x00000311);
1198			evo_data(push, 0x00000100);
1199		} else {
1200			evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1201			evo_data(push, 0x00000000);
1202			evo_data(push, (vactive << 16) | hactive);
1203			evo_data(push, ( vsynce << 16) | hsynce);
1204			evo_data(push, (vblanke << 16) | hblanke);
1205			evo_data(push, (vblanks << 16) | hblanks);
1206			evo_data(push, (vblan2e << 16) | vblan2s);
1207			evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1208			evo_data(push, 0x00000000); /* ??? */
1209			evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1210			evo_data(push, mode->clock * 1000);
1211			evo_data(push, 0x00200000); /* ??? */
1212			evo_data(push, mode->clock * 1000);
1213			evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1214			evo_data(push, 0x00000311);
1215			evo_data(push, 0x00000100);
1216		}
 
1217
1218		evo_kick(push, mast);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1219	}
1220
1221	nv_connector = nouveau_crtc_connector_get(nv_crtc);
1222	nv50_crtc_set_dither(nv_crtc, false);
1223	nv50_crtc_set_scale(nv_crtc, false);
1224
1225	/* G94 only accepts this after setting scale */
1226	if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA)
1227		nv50_crtc_set_raster_vblank_dmi(nv_crtc, vblankus);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1228
1229	nv50_crtc_set_color_vibrance(nv_crtc, false);
1230	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1231	return 0;
 
 
 
 
 
1232}
1233
1234static int
1235nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1236			struct drm_framebuffer *old_fb)
1237{
1238	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1239	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1240	int ret;
 
 
 
 
 
 
 
 
 
1241
1242	if (!crtc->primary->fb) {
1243		NV_DEBUG(drm, "No FB bound\n");
1244		return 0;
1245	}
 
 
 
 
 
 
 
 
 
 
 
1246
1247	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1248	if (ret)
1249		return ret;
 
 
 
 
 
 
 
 
1250
1251	nv50_display_flip_stop(crtc);
1252	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1253	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1254	return 0;
1255}
1256
1257static int
1258nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1259			       struct drm_framebuffer *fb, int x, int y,
1260			       enum mode_set_atomic state)
1261{
1262	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1263	nv50_display_flip_stop(crtc);
1264	nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1265	return 0;
1266}
1267
1268static void
1269nv50_crtc_lut_load(struct drm_crtc *crtc)
1270{
 
1271	struct nv50_disp *disp = nv50_disp(crtc->dev);
1272	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1273	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
 
 
 
 
1274	int i;
1275
1276	for (i = 0; i < 256; i++) {
1277		u16 r = nv_crtc->lut.r[i] >> 2;
1278		u16 g = nv_crtc->lut.g[i] >> 2;
1279		u16 b = nv_crtc->lut.b[i] >> 2;
1280
1281		if (disp->disp->oclass < GF110_DISP) {
1282			writew(r + 0x0000, lut + (i * 0x08) + 0);
1283			writew(g + 0x0000, lut + (i * 0x08) + 2);
1284			writew(b + 0x0000, lut + (i * 0x08) + 4);
1285		} else {
1286			writew(r + 0x6000, lut + (i * 0x20) + 0);
1287			writew(g + 0x6000, lut + (i * 0x20) + 2);
1288			writew(b + 0x6000, lut + (i * 0x20) + 4);
1289		}
1290	}
1291}
1292
1293static void
1294nv50_crtc_disable(struct drm_crtc *crtc)
1295{
1296	struct nv50_head *head = nv50_head(crtc);
1297	evo_sync(crtc->dev);
1298	if (head->image)
1299		nouveau_bo_unpin(head->image);
1300	nouveau_bo_ref(NULL, &head->image);
1301}
1302
1303static int
1304nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1305		     uint32_t handle, uint32_t width, uint32_t height)
1306{
1307	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1308	struct drm_device *dev = crtc->dev;
1309	struct drm_gem_object *gem = NULL;
1310	struct nouveau_bo *nvbo = NULL;
1311	int ret = 0;
 
 
 
1312
1313	if (handle) {
1314		if (width != 64 || height != 64)
1315			return -EINVAL;
1316
1317		gem = drm_gem_object_lookup(dev, file_priv, handle);
1318		if (unlikely(!gem))
1319			return -ENOENT;
1320		nvbo = nouveau_gem_object(gem);
 
 
 
 
 
 
 
 
 
1321
1322		ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1323	}
1324
1325	if (ret == 0) {
1326		if (nv_crtc->cursor.nvbo)
1327			nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1328		nouveau_bo_ref(nvbo, &nv_crtc->cursor.nvbo);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1329	}
1330	drm_gem_object_unreference_unlocked(gem);
1331
1332	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1333	return ret;
 
1334}
1335
1336static int
1337nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1338{
1339	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1340	struct nv50_curs *curs = nv50_curs(crtc);
1341	struct nv50_chan *chan = nv50_chan(curs);
1342	nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1343	nvif_wr32(&chan->user, 0x0080, 0x00000000);
1344
1345	nv_crtc->cursor_saved_x = x;
1346	nv_crtc->cursor_saved_y = y;
1347	return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1348}
1349
1350static void
1351nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1352		    uint32_t start, uint32_t size)
1353{
1354	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1355	u32 end = min_t(u32, start + size, 256);
1356	u32 i;
1357
1358	for (i = start; i < end; i++) {
1359		nv_crtc->lut.r[i] = r[i];
1360		nv_crtc->lut.g[i] = g[i];
1361		nv_crtc->lut.b[i] = b[i];
1362	}
1363
1364	nv50_crtc_lut_load(crtc);
1365}
1366
1367static void
1368nv50_crtc_cursor_restore(struct nouveau_crtc *nv_crtc, int x, int y)
1369{
1370	nv50_crtc_cursor_move(&nv_crtc->base, x, y);
 
 
 
1371
1372	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1373}
1374
1375static void
1376nv50_crtc_destroy(struct drm_crtc *crtc)
1377{
1378	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1379	struct nv50_disp *disp = nv50_disp(crtc->dev);
1380	struct nv50_head *head = nv50_head(crtc);
1381	struct nv50_fbdma *fbdma;
1382
1383	list_for_each_entry(fbdma, &disp->fbdma, head) {
1384		nvif_object_fini(&fbdma->base[nv_crtc->index]);
1385	}
1386
1387	nv50_dmac_destroy(&head->ovly.base, disp->disp);
1388	nv50_pioc_destroy(&head->oimm.base);
1389	nv50_dmac_destroy(&head->sync.base, disp->disp);
1390	nv50_pioc_destroy(&head->curs.base);
1391
1392	/*XXX: this shouldn't be necessary, but the core doesn't call
1393	 *     disconnect() during the cleanup paths
1394	 */
1395	if (head->image)
1396		nouveau_bo_unpin(head->image);
1397	nouveau_bo_ref(NULL, &head->image);
1398
1399	/*XXX: ditto */
1400	if (nv_crtc->cursor.nvbo)
1401		nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1402	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1403
1404	nouveau_bo_unmap(nv_crtc->lut.nvbo);
1405	if (nv_crtc->lut.nvbo)
1406		nouveau_bo_unpin(nv_crtc->lut.nvbo);
1407	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1408
1409	drm_crtc_cleanup(crtc);
1410	kfree(crtc);
1411}
1412
1413static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1414	.dpms = nv50_crtc_dpms,
1415	.prepare = nv50_crtc_prepare,
1416	.commit = nv50_crtc_commit,
1417	.mode_fixup = nv50_crtc_mode_fixup,
1418	.mode_set = nv50_crtc_mode_set,
1419	.mode_set_base = nv50_crtc_mode_set_base,
1420	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1421	.load_lut = nv50_crtc_lut_load,
1422	.disable = nv50_crtc_disable,
1423};
1424
1425static const struct drm_crtc_funcs nv50_crtc_func = {
1426	.cursor_set = nv50_crtc_cursor_set,
1427	.cursor_move = nv50_crtc_cursor_move,
1428	.gamma_set = nv50_crtc_gamma_set,
1429	.set_config = nouveau_crtc_set_config,
1430	.destroy = nv50_crtc_destroy,
1431	.page_flip = nouveau_crtc_page_flip,
1432};
1433
1434static int
1435nv50_crtc_create(struct drm_device *dev, int index)
1436{
1437	struct nouveau_drm *drm = nouveau_drm(dev);
1438	struct nvif_device *device = &drm->device;
1439	struct nv50_disp *disp = nv50_disp(dev);
1440	struct nv50_head *head;
 
 
1441	struct drm_crtc *crtc;
1442	int ret, i;
1443
1444	head = kzalloc(sizeof(*head), GFP_KERNEL);
1445	if (!head)
1446		return -ENOMEM;
1447
1448	head->base.index = index;
1449	head->base.set_dither = nv50_crtc_set_dither;
1450	head->base.set_scale = nv50_crtc_set_scale;
1451	head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1452	head->base.color_vibrance = 50;
1453	head->base.vibrant_hue = 0;
1454	head->base.cursor.set_pos = nv50_crtc_cursor_restore;
1455	for (i = 0; i < 256; i++) {
1456		head->base.lut.r[i] = i << 8;
1457		head->base.lut.g[i] = i << 8;
1458		head->base.lut.b[i] = i << 8;
1459	}
1460
1461	crtc = &head->base.base;
1462	drm_crtc_init(dev, crtc, &nv50_crtc_func);
1463	drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
 
 
1464	drm_mode_crtc_set_gamma_size(crtc, 256);
1465
1466	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1467			     0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
1468	if (!ret) {
1469		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
1470		if (!ret) {
1471			ret = nouveau_bo_map(head->base.lut.nvbo);
1472			if (ret)
1473				nouveau_bo_unpin(head->base.lut.nvbo);
1474		}
1475		if (ret)
1476			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1477	}
1478
1479	if (ret)
1480		goto out;
1481
1482	/* allocate cursor resources */
1483	ret = nv50_curs_create(device, disp->disp, index, &head->curs);
1484	if (ret)
1485		goto out;
1486
1487	/* allocate page flip / sync resources */
1488	ret = nv50_base_create(device, disp->disp, index, disp->sync->bo.offset,
1489			       &head->sync);
1490	if (ret)
1491		goto out;
1492
1493	head->sync.addr = EVO_FLIP_SEM0(index);
1494	head->sync.data = 0x00000000;
1495
1496	/* allocate overlay resources */
1497	ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
1498	if (ret)
1499		goto out;
1500
1501	ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
1502			       &head->ovly);
1503	if (ret)
1504		goto out;
1505
1506out:
1507	if (ret)
1508		nv50_crtc_destroy(crtc);
1509	return ret;
1510}
1511
1512/******************************************************************************
1513 * Encoder helpers
1514 *****************************************************************************/
1515static bool
1516nv50_encoder_mode_fixup(struct drm_encoder *encoder,
1517			const struct drm_display_mode *mode,
1518			struct drm_display_mode *adjusted_mode)
1519{
1520	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1521	struct nouveau_connector *nv_connector;
1522
1523	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1524	if (nv_connector && nv_connector->native_mode) {
1525		nv_connector->scaling_full = false;
1526		if (nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) {
1527			switch (nv_connector->type) {
1528			case DCB_CONNECTOR_LVDS:
1529			case DCB_CONNECTOR_LVDS_SPWG:
1530			case DCB_CONNECTOR_eDP:
1531				/* force use of scaler for non-edid modes */
1532				if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
1533					return true;
1534				nv_connector->scaling_full = true;
1535				break;
1536			default:
1537				return true;
1538			}
1539		}
1540
1541		drm_mode_copy(adjusted_mode, nv_connector->native_mode);
1542	}
1543
1544	return true;
 
 
1545}
1546
1547/******************************************************************************
1548 * DAC
1549 *****************************************************************************/
1550static void
1551nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1552{
1553	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1554	struct nv50_disp *disp = nv50_disp(encoder->dev);
1555	struct {
1556		struct nv50_disp_mthd_v1 base;
1557		struct nv50_disp_dac_pwr_v0 pwr;
1558	} args = {
1559		.base.version = 1,
1560		.base.method = NV50_DISP_MTHD_V1_DAC_PWR,
1561		.base.hasht  = nv_encoder->dcb->hasht,
1562		.base.hashm  = nv_encoder->dcb->hashm,
1563		.pwr.state = 1,
1564		.pwr.data  = 1,
1565		.pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
1566			      mode != DRM_MODE_DPMS_OFF),
1567		.pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
1568			      mode != DRM_MODE_DPMS_OFF),
1569	};
 
1570
1571	nvif_mthd(disp->disp, 0, &args, sizeof(args));
 
 
 
 
 
 
 
 
1572}
1573
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1574static void
1575nv50_dac_commit(struct drm_encoder *encoder)
1576{
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1577}
1578
1579static void
1580nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1581		  struct drm_display_mode *adjusted_mode)
1582{
1583	struct nv50_mast *mast = nv50_mast(encoder->dev);
1584	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1585	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
 
1586	u32 *push;
1587
1588	nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1589
1590	push = evo_wait(mast, 8);
1591	if (push) {
1592		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1593			u32 syncs = 0x00000000;
1594
1595			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1596				syncs |= 0x00000001;
1597			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1598				syncs |= 0x00000002;
1599
1600			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1601			evo_data(push, 1 << nv_crtc->index);
1602			evo_data(push, syncs);
1603		} else {
1604			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1605			u32 syncs = 0x00000001;
1606
1607			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1608				syncs |= 0x00000008;
1609			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1610				syncs |= 0x00000010;
1611
1612			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1613				magic |= 0x00000001;
1614
1615			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1616			evo_data(push, syncs);
1617			evo_data(push, magic);
1618			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1619			evo_data(push, 1 << nv_crtc->index);
1620		}
1621
1622		evo_kick(push, mast);
1623	}
1624
1625	nv_encoder->crtc = encoder->crtc;
1626}
1627
1628static void
1629nv50_dac_disconnect(struct drm_encoder *encoder)
1630{
1631	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1632	struct nv50_mast *mast = nv50_mast(encoder->dev);
1633	const int or = nv_encoder->or;
1634	u32 *push;
1635
1636	if (nv_encoder->crtc) {
1637		nv50_crtc_prepare(nv_encoder->crtc);
1638
1639		push = evo_wait(mast, 4);
1640		if (push) {
1641			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1642				evo_mthd(push, 0x0400 + (or * 0x080), 1);
1643				evo_data(push, 0x00000000);
1644			} else {
1645				evo_mthd(push, 0x0180 + (or * 0x020), 1);
1646				evo_data(push, 0x00000000);
1647			}
1648			evo_kick(push, mast);
1649		}
1650	}
1651
1652	nv_encoder->crtc = NULL;
1653}
1654
1655static enum drm_connector_status
1656nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1657{
1658	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1659	struct nv50_disp *disp = nv50_disp(encoder->dev);
1660	struct {
1661		struct nv50_disp_mthd_v1 base;
1662		struct nv50_disp_dac_load_v0 load;
1663	} args = {
1664		.base.version = 1,
1665		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
1666		.base.hasht  = nv_encoder->dcb->hasht,
1667		.base.hashm  = nv_encoder->dcb->hashm,
1668	};
1669	int ret;
1670
1671	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
1672	if (args.load.data == 0)
1673		args.load.data = 340;
1674
1675	ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
1676	if (ret || !args.load.load)
1677		return connector_status_disconnected;
1678
1679	return connector_status_connected;
1680}
1681
 
 
 
 
 
 
 
 
1682static void
1683nv50_dac_destroy(struct drm_encoder *encoder)
1684{
1685	drm_encoder_cleanup(encoder);
1686	kfree(encoder);
1687}
1688
1689static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1690	.dpms = nv50_dac_dpms,
1691	.mode_fixup = nv50_encoder_mode_fixup,
1692	.prepare = nv50_dac_disconnect,
1693	.commit = nv50_dac_commit,
1694	.mode_set = nv50_dac_mode_set,
1695	.disable = nv50_dac_disconnect,
1696	.get_crtc = nv50_display_crtc_get,
1697	.detect = nv50_dac_detect
1698};
1699
1700static const struct drm_encoder_funcs nv50_dac_func = {
1701	.destroy = nv50_dac_destroy,
1702};
1703
1704static int
1705nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1706{
1707	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1708	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
1709	struct nvkm_i2c_bus *bus;
1710	struct nouveau_encoder *nv_encoder;
1711	struct drm_encoder *encoder;
1712	int type = DRM_MODE_ENCODER_DAC;
1713
1714	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1715	if (!nv_encoder)
1716		return -ENOMEM;
1717	nv_encoder->dcb = dcbe;
1718	nv_encoder->or = ffs(dcbe->or) - 1;
1719
1720	bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1721	if (bus)
1722		nv_encoder->i2c = &bus->i2c;
1723
1724	encoder = to_drm_encoder(nv_encoder);
1725	encoder->possible_crtcs = dcbe->heads;
1726	encoder->possible_clones = 0;
1727	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, NULL);
1728	drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
 
1729
1730	drm_mode_connector_attach_encoder(connector, encoder);
1731	return 0;
1732}
1733
1734/******************************************************************************
1735 * Audio
1736 *****************************************************************************/
1737static void
1738nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1739{
1740	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1741	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1742	struct nouveau_connector *nv_connector;
1743	struct nv50_disp *disp = nv50_disp(encoder->dev);
1744	struct __packed {
1745		struct {
1746			struct nv50_disp_mthd_v1 mthd;
1747			struct nv50_disp_sor_hda_eld_v0 eld;
1748		} base;
1749		u8 data[sizeof(nv_connector->base.eld)];
1750	} args = {
1751		.base.mthd.version = 1,
1752		.base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1753		.base.mthd.hasht   = nv_encoder->dcb->hasht,
1754		.base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
1755				     (0x0100 << nv_crtc->index),
1756	};
1757
1758	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1759	if (!drm_detect_monitor_audio(nv_connector->edid))
1760		return;
1761
1762	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1763	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
1764
1765	nvif_mthd(disp->disp, 0, &args,
1766		  sizeof(args.base) + drm_eld_size(args.data));
1767}
1768
 
 
 
1769static void
1770nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1771{
1772	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1773	struct nv50_disp *disp = nv50_disp(encoder->dev);
1774	struct {
1775		struct nv50_disp_mthd_v1 base;
1776		struct nv50_disp_sor_hda_eld_v0 eld;
1777	} args = {
1778		.base.version = 1,
1779		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1780		.base.hasht   = nv_encoder->dcb->hasht,
1781		.base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
1782				(0x0100 << nv_crtc->index),
1783	};
1784
1785	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1786}
1787
1788/******************************************************************************
1789 * HDMI
1790 *****************************************************************************/
1791static void
1792nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1793{
1794	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1795	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1796	struct nv50_disp *disp = nv50_disp(encoder->dev);
1797	struct {
1798		struct nv50_disp_mthd_v1 base;
1799		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
 
1800	} args = {
1801		.base.version = 1,
1802		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1803		.base.hasht  = nv_encoder->dcb->hasht,
1804		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
1805			       (0x0100 << nv_crtc->index),
1806		.pwr.state = 1,
1807		.pwr.rekey = 56, /* binary driver, and tegra, constant */
1808	};
1809	struct nouveau_connector *nv_connector;
1810	u32 max_ac_packet;
 
 
 
 
1811
1812	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1813	if (!drm_detect_hdmi_monitor(nv_connector->edid))
1814		return;
1815
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1816	max_ac_packet  = mode->htotal - mode->hdisplay;
1817	max_ac_packet -= args.pwr.rekey;
1818	max_ac_packet -= 18; /* constant from tegra */
1819	args.pwr.max_ac_packet = max_ac_packet / 32;
1820
1821	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1822	nv50_audio_mode_set(encoder, mode);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1823}
1824
1825static void
1826nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1827{
1828	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1829	struct nv50_disp *disp = nv50_disp(encoder->dev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1830	struct {
1831		struct nv50_disp_mthd_v1 base;
1832		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
1833	} args = {
1834		.base.version = 1,
1835		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1836		.base.hasht  = nv_encoder->dcb->hasht,
1837		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
1838			       (0x0100 << nv_crtc->index),
1839	};
1840
1841	nvif_mthd(disp->disp, 0, &args, sizeof(args));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1842}
1843
1844/******************************************************************************
1845 * SOR
1846 *****************************************************************************/
1847static void
1848nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1849{
1850	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1851	struct nv50_disp *disp = nv50_disp(encoder->dev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1852	struct {
1853		struct nv50_disp_mthd_v1 base;
1854		struct nv50_disp_sor_pwr_v0 pwr;
1855	} args = {
1856		.base.version = 1,
1857		.base.method = NV50_DISP_MTHD_V1_SOR_PWR,
1858		.base.hasht  = nv_encoder->dcb->hasht,
1859		.base.hashm  = nv_encoder->dcb->hashm,
1860		.pwr.state = mode == DRM_MODE_DPMS_ON,
1861	};
1862	struct {
1863		struct nv50_disp_mthd_v1 base;
1864		struct nv50_disp_sor_dp_pwr_v0 pwr;
1865	} link = {
1866		.base.version = 1,
1867		.base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR,
1868		.base.hasht  = nv_encoder->dcb->hasht,
1869		.base.hashm  = nv_encoder->dcb->hashm,
1870		.pwr.state = mode == DRM_MODE_DPMS_ON,
1871	};
1872	struct drm_device *dev = encoder->dev;
1873	struct drm_encoder *partner;
1874
1875	nv_encoder->last_dpms = mode;
 
 
 
1876
1877	list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1878		struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
 
1879
1880		if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1881			continue;
 
 
1882
1883		if (nv_partner != nv_encoder &&
1884		    nv_partner->dcb->or == nv_encoder->dcb->or) {
1885			if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1886				return;
1887			break;
1888		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1889	}
1890
1891	if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1892		args.pwr.state = 1;
1893		nvif_mthd(disp->disp, 0, &args, sizeof(args));
1894		nvif_mthd(disp->disp, 0, &link, sizeof(link));
1895	} else {
1896		nvif_mthd(disp->disp, 0, &args, sizeof(args));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1897	}
1898}
1899
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1900static void
1901nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
 
1902{
1903	struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
1904	u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
1905	if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
1906		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1907			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
1908			evo_data(push, (nv_encoder->ctrl = temp));
1909		} else {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1910			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1911			evo_data(push, (nv_encoder->ctrl = temp));
1912		}
1913		evo_kick(push, mast);
 
1914	}
1915}
1916
1917static void
1918nv50_sor_disconnect(struct drm_encoder *encoder)
1919{
1920	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1921	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1922
1923	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1924	nv_encoder->crtc = NULL;
1925
1926	if (nv_crtc) {
1927		nv50_crtc_prepare(&nv_crtc->base);
1928		nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
1929		nv50_audio_disconnect(encoder, nv_crtc);
1930		nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
1931	}
1932}
1933
1934static void
1935nv50_sor_commit(struct drm_encoder *encoder)
1936{
 
 
 
 
 
 
 
 
 
 
 
1937}
1938
1939static void
1940nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1941		  struct drm_display_mode *mode)
1942{
1943	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1944	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
 
1945	struct {
1946		struct nv50_disp_mthd_v1 base;
1947		struct nv50_disp_sor_lvds_script_v0 lvds;
1948	} lvds = {
1949		.base.version = 1,
1950		.base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1951		.base.hasht   = nv_encoder->dcb->hasht,
1952		.base.hashm   = nv_encoder->dcb->hashm,
1953	};
1954	struct nv50_disp *disp = nv50_disp(encoder->dev);
1955	struct nv50_mast *mast = nv50_mast(encoder->dev);
1956	struct drm_device *dev = encoder->dev;
1957	struct nouveau_drm *drm = nouveau_drm(dev);
1958	struct nouveau_connector *nv_connector;
1959	struct nvbios *bios = &drm->vbios;
1960	u32 mask, ctrl;
1961	u8 owner = 1 << nv_crtc->index;
1962	u8 proto = 0xf;
1963	u8 depth = 0x0;
1964
1965	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1966	nv_encoder->crtc = encoder->crtc;
 
1967
1968	switch (nv_encoder->dcb->type) {
1969	case DCB_OUTPUT_TMDS:
1970		if (nv_encoder->dcb->sorconf.link & 1) {
1971			proto = 0x1;
1972			/* Only enable dual-link if:
1973			 *  - Need to (i.e. rate > 165MHz)
1974			 *  - DCB says we can
1975			 *  - Not an HDMI monitor, since there's no dual-link
1976			 *    on HDMI.
1977			 */
1978			if (mode->clock >= 165000 &&
1979			    nv_encoder->dcb->duallink_possible &&
1980			    !drm_detect_hdmi_monitor(nv_connector->edid))
1981				proto |= 0x4;
1982		} else {
1983			proto = 0x2;
1984		}
1985
1986		nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
1987		break;
1988	case DCB_OUTPUT_LVDS:
1989		proto = 0x0;
1990
1991		if (bios->fp_no_ddc) {
1992			if (bios->fp.dual_link)
1993				lvds.lvds.script |= 0x0100;
1994			if (bios->fp.if_is_24bit)
1995				lvds.lvds.script |= 0x0200;
1996		} else {
1997			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1998				if (((u8 *)nv_connector->edid)[121] == 2)
1999					lvds.lvds.script |= 0x0100;
2000			} else
2001			if (mode->clock >= bios->fp.duallink_transition_clk) {
2002				lvds.lvds.script |= 0x0100;
2003			}
2004
2005			if (lvds.lvds.script & 0x0100) {
2006				if (bios->fp.strapless_is_24bit & 2)
2007					lvds.lvds.script |= 0x0200;
2008			} else {
2009				if (bios->fp.strapless_is_24bit & 1)
2010					lvds.lvds.script |= 0x0200;
2011			}
2012
2013			if (nv_connector->base.display_info.bpc == 8)
2014				lvds.lvds.script |= 0x0200;
2015		}
2016
2017		nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
2018		break;
2019	case DCB_OUTPUT_DP:
2020		if (nv_connector->base.display_info.bpc == 6) {
2021			nv_encoder->dp.datarate = mode->clock * 18 / 8;
2022			depth = 0x2;
2023		} else
2024		if (nv_connector->base.display_info.bpc == 8) {
2025			nv_encoder->dp.datarate = mode->clock * 24 / 8;
2026			depth = 0x5;
2027		} else {
2028			nv_encoder->dp.datarate = mode->clock * 30 / 8;
2029			depth = 0x6;
2030		}
2031
2032		if (nv_encoder->dcb->sorconf.link & 1)
2033			proto = 0x8;
2034		else
2035			proto = 0x9;
2036		nv50_audio_mode_set(encoder, mode);
 
2037		break;
2038	default:
2039		BUG_ON(1);
2040		break;
2041	}
2042
2043	nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
2044
2045	if (nv50_vers(mast) >= GF110_DISP) {
2046		u32 *push = evo_wait(mast, 3);
2047		if (push) {
2048			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2049			u32 syncs = 0x00000001;
2050
2051			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2052				syncs |= 0x00000008;
2053			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2054				syncs |= 0x00000010;
2055
2056			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2057				magic |= 0x00000001;
2058
2059			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2060			evo_data(push, syncs | (depth << 6));
2061			evo_data(push, magic);
2062			evo_kick(push, mast);
2063		}
2064
2065		ctrl = proto << 8;
2066		mask = 0x00000f00;
2067	} else {
2068		ctrl = (depth << 16) | (proto << 8);
2069		if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2070			ctrl |= 0x00001000;
2071		if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2072			ctrl |= 0x00002000;
2073		mask = 0x000f3f00;
2074	}
2075
2076	nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
2077}
2078
 
 
 
 
 
 
 
2079static void
2080nv50_sor_destroy(struct drm_encoder *encoder)
2081{
 
 
2082	drm_encoder_cleanup(encoder);
2083	kfree(encoder);
2084}
2085
2086static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
2087	.dpms = nv50_sor_dpms,
2088	.mode_fixup = nv50_encoder_mode_fixup,
2089	.prepare = nv50_sor_disconnect,
2090	.commit = nv50_sor_commit,
2091	.mode_set = nv50_sor_mode_set,
2092	.disable = nv50_sor_disconnect,
2093	.get_crtc = nv50_display_crtc_get,
2094};
2095
2096static const struct drm_encoder_funcs nv50_sor_func = {
2097	.destroy = nv50_sor_destroy,
2098};
2099
2100static int
2101nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
2102{
 
2103	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2104	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2105	struct nouveau_encoder *nv_encoder;
2106	struct drm_encoder *encoder;
2107	int type;
2108
2109	switch (dcbe->type) {
2110	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
2111	case DCB_OUTPUT_TMDS:
2112	case DCB_OUTPUT_DP:
2113	default:
2114		type = DRM_MODE_ENCODER_TMDS;
2115		break;
2116	}
2117
2118	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2119	if (!nv_encoder)
2120		return -ENOMEM;
2121	nv_encoder->dcb = dcbe;
2122	nv_encoder->or = ffs(dcbe->or) - 1;
2123	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
 
 
 
 
 
 
 
 
2124
2125	if (dcbe->type == DCB_OUTPUT_DP) {
 
2126		struct nvkm_i2c_aux *aux =
2127			nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
2128		if (aux) {
2129			nv_encoder->i2c = &aux->i2c;
 
 
 
 
 
 
 
 
2130			nv_encoder->aux = aux;
2131		}
 
 
 
 
 
 
 
 
 
2132	} else {
2133		struct nvkm_i2c_bus *bus =
2134			nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2135		if (bus)
2136			nv_encoder->i2c = &bus->i2c;
2137	}
2138
2139	encoder = to_drm_encoder(nv_encoder);
2140	encoder->possible_crtcs = dcbe->heads;
2141	encoder->possible_clones = 0;
2142	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, NULL);
2143	drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
2144
2145	drm_mode_connector_attach_encoder(connector, encoder);
2146	return 0;
2147}
2148
2149/******************************************************************************
2150 * PIOR
2151 *****************************************************************************/
 
 
 
 
 
 
 
 
 
 
 
2152
2153static void
2154nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2155{
2156	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2157	struct nv50_disp *disp = nv50_disp(encoder->dev);
2158	struct {
2159		struct nv50_disp_mthd_v1 base;
2160		struct nv50_disp_pior_pwr_v0 pwr;
2161	} args = {
2162		.base.version = 1,
2163		.base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
2164		.base.hasht  = nv_encoder->dcb->hasht,
2165		.base.hashm  = nv_encoder->dcb->hashm,
2166		.pwr.state = mode == DRM_MODE_DPMS_ON,
2167		.pwr.type = nv_encoder->dcb->type,
2168	};
2169
2170	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2171}
2172
2173static bool
2174nv50_pior_mode_fixup(struct drm_encoder *encoder,
2175		     const struct drm_display_mode *mode,
2176		     struct drm_display_mode *adjusted_mode)
2177{
2178	if (!nv50_encoder_mode_fixup(encoder, mode, adjusted_mode))
2179		return false;
2180	adjusted_mode->clock *= 2;
2181	return true;
2182}
2183
2184static void
2185nv50_pior_commit(struct drm_encoder *encoder)
2186{
2187}
2188
2189static void
2190nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2191		   struct drm_display_mode *adjusted_mode)
2192{
2193	struct nv50_mast *mast = nv50_mast(encoder->dev);
2194	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2195	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2196	struct nouveau_connector *nv_connector;
 
2197	u8 owner = 1 << nv_crtc->index;
2198	u8 proto, depth;
2199	u32 *push;
2200
 
 
2201	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2202	switch (nv_connector->base.display_info.bpc) {
2203	case 10: depth = 0x6; break;
2204	case  8: depth = 0x5; break;
2205	case  6: depth = 0x2; break;
2206	default: depth = 0x0; break;
2207	}
2208
2209	switch (nv_encoder->dcb->type) {
2210	case DCB_OUTPUT_TMDS:
2211	case DCB_OUTPUT_DP:
2212		proto = 0x0;
2213		break;
2214	default:
2215		BUG_ON(1);
2216		break;
2217	}
2218
2219	nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2220
2221	push = evo_wait(mast, 8);
2222	if (push) {
2223		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2224			u32 ctrl = (depth << 16) | (proto << 8) | owner;
2225			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2226				ctrl |= 0x00001000;
2227			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2228				ctrl |= 0x00002000;
2229			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2230			evo_data(push, ctrl);
2231		}
2232
2233		evo_kick(push, mast);
2234	}
2235
2236	nv_encoder->crtc = encoder->crtc;
2237}
2238
2239static void
2240nv50_pior_disconnect(struct drm_encoder *encoder)
2241{
2242	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2243	struct nv50_mast *mast = nv50_mast(encoder->dev);
2244	const int or = nv_encoder->or;
2245	u32 *push;
2246
2247	if (nv_encoder->crtc) {
2248		nv50_crtc_prepare(nv_encoder->crtc);
2249
2250		push = evo_wait(mast, 4);
2251		if (push) {
2252			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2253				evo_mthd(push, 0x0700 + (or * 0x040), 1);
2254				evo_data(push, 0x00000000);
2255			}
2256			evo_kick(push, mast);
2257		}
2258	}
2259
2260	nv_encoder->crtc = NULL;
2261}
2262
2263static void
2264nv50_pior_destroy(struct drm_encoder *encoder)
2265{
2266	drm_encoder_cleanup(encoder);
2267	kfree(encoder);
2268}
2269
2270static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2271	.dpms = nv50_pior_dpms,
2272	.mode_fixup = nv50_pior_mode_fixup,
2273	.prepare = nv50_pior_disconnect,
2274	.commit = nv50_pior_commit,
2275	.mode_set = nv50_pior_mode_set,
2276	.disable = nv50_pior_disconnect,
2277	.get_crtc = nv50_display_crtc_get,
2278};
2279
2280static const struct drm_encoder_funcs nv50_pior_func = {
2281	.destroy = nv50_pior_destroy,
2282};
2283
2284static int
2285nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2286{
 
2287	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2288	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2289	struct nvkm_i2c_bus *bus = NULL;
2290	struct nvkm_i2c_aux *aux = NULL;
2291	struct i2c_adapter *ddc;
2292	struct nouveau_encoder *nv_encoder;
2293	struct drm_encoder *encoder;
2294	int type;
2295
2296	switch (dcbe->type) {
2297	case DCB_OUTPUT_TMDS:
2298		bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
2299		ddc  = bus ? &bus->i2c : NULL;
2300		type = DRM_MODE_ENCODER_TMDS;
2301		break;
2302	case DCB_OUTPUT_DP:
2303		aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
2304		ddc  = aux ? &aux->i2c : NULL;
2305		type = DRM_MODE_ENCODER_TMDS;
2306		break;
2307	default:
2308		return -ENODEV;
2309	}
2310
2311	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2312	if (!nv_encoder)
2313		return -ENOMEM;
2314	nv_encoder->dcb = dcbe;
2315	nv_encoder->or = ffs(dcbe->or) - 1;
2316	nv_encoder->i2c = ddc;
2317	nv_encoder->aux = aux;
2318
2319	encoder = to_drm_encoder(nv_encoder);
2320	encoder->possible_crtcs = dcbe->heads;
2321	encoder->possible_clones = 0;
2322	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, NULL);
2323	drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
 
2324
2325	drm_mode_connector_attach_encoder(connector, encoder);
2326	return 0;
2327}
2328
2329/******************************************************************************
2330 * Framebuffer
2331 *****************************************************************************/
2332
2333static void
2334nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2335{
2336	int i;
2337	for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2338		nvif_object_fini(&fbdma->base[i]);
2339	nvif_object_fini(&fbdma->core);
2340	list_del(&fbdma->head);
2341	kfree(fbdma);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2342}
2343
2344static int
2345nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2346{
 
 
 
 
 
2347	struct nouveau_drm *drm = nouveau_drm(dev);
2348	struct nv50_disp *disp = nv50_disp(dev);
2349	struct nv50_mast *mast = nv50_mast(dev);
2350	struct __attribute__ ((packed)) {
2351		struct nv_dma_v0 base;
2352		union {
2353			struct nv50_dma_v0 nv50;
2354			struct gf100_dma_v0 gf100;
2355			struct gf119_dma_v0 gf119;
2356		};
2357	} args = {};
2358	struct nv50_fbdma *fbdma;
2359	struct drm_crtc *crtc;
2360	u32 size = sizeof(args.base);
2361	int ret;
 
 
 
 
 
2362
2363	list_for_each_entry(fbdma, &disp->fbdma, head) {
2364		if (fbdma->core.handle == name)
2365			return 0;
 
 
 
 
 
 
2366	}
2367
2368	fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2369	if (!fbdma)
2370		return -ENOMEM;
2371	list_add(&fbdma->head, &disp->fbdma);
 
 
 
 
 
2372
2373	args.base.target = NV_DMA_V0_TARGET_VRAM;
2374	args.base.access = NV_DMA_V0_ACCESS_RDWR;
2375	args.base.start = offset;
2376	args.base.limit = offset + length - 1;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2377
2378	if (drm->device.info.chipset < 0x80) {
2379		args.nv50.part = NV50_DMA_V0_PART_256;
2380		size += sizeof(args.nv50);
2381	} else
2382	if (drm->device.info.chipset < 0xc0) {
2383		args.nv50.part = NV50_DMA_V0_PART_256;
2384		args.nv50.kind = kind;
2385		size += sizeof(args.nv50);
2386	} else
2387	if (drm->device.info.chipset < 0xd0) {
2388		args.gf100.kind = kind;
2389		size += sizeof(args.gf100);
2390	} else {
2391		args.gf119.page = GF119_DMA_V0_PAGE_LP;
2392		args.gf119.kind = kind;
2393		size += sizeof(args.gf119);
2394	}
2395
2396	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2397		struct nv50_head *head = nv50_head(crtc);
2398		int ret = nvif_object_init(&head->sync.base.base.user, name,
2399					   NV_DMA_IN_MEMORY, &args, size,
2400					   &fbdma->base[head->base.index]);
2401		if (ret) {
2402			nv50_fbdma_fini(fbdma);
2403			return ret;
 
 
 
 
 
 
 
 
2404		}
2405	}
2406
2407	ret = nvif_object_init(&mast->base.base.user, name, NV_DMA_IN_MEMORY,
2408			       &args, size, &fbdma->core);
2409	if (ret) {
2410		nv50_fbdma_fini(fbdma);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2411		return ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2412	}
2413
2414	return 0;
2415}
2416
2417static void
2418nv50_fb_dtor(struct drm_framebuffer *fb)
 
2419{
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2420}
2421
2422static int
2423nv50_fb_ctor(struct drm_framebuffer *fb)
2424{
2425	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2426	struct nouveau_drm *drm = nouveau_drm(fb->dev);
2427	struct nouveau_bo *nvbo = nv_fb->nvbo;
2428	struct nv50_disp *disp = nv50_disp(fb->dev);
2429	u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2430	u8 tile = nvbo->tile_mode;
2431
2432	if (drm->device.info.chipset >= 0xc0)
2433		tile >>= 4; /* yep.. */
 
2434
2435	switch (fb->depth) {
2436	case  8: nv_fb->r_format = 0x1e00; break;
2437	case 15: nv_fb->r_format = 0xe900; break;
2438	case 16: nv_fb->r_format = 0xe800; break;
2439	case 24:
2440	case 32: nv_fb->r_format = 0xcf00; break;
2441	case 30: nv_fb->r_format = 0xd100; break;
2442	default:
2443		 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
2444		 return -EINVAL;
2445	}
2446
2447	if (disp->disp->oclass < G82_DISP) {
2448		nv_fb->r_pitch   = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2449					    (fb->pitches[0] | 0x00100000);
2450		nv_fb->r_format |= kind << 16;
2451	} else
2452	if (disp->disp->oclass < GF110_DISP) {
2453		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2454					   (fb->pitches[0] | 0x00100000);
2455	} else {
2456		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2457					   (fb->pitches[0] | 0x01000000);
 
2458	}
2459	nv_fb->r_handle = 0xffff0000 | kind;
2460
2461	return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0,
2462			       drm->device.info.ram_user, kind);
2463}
2464
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2465/******************************************************************************
2466 * Init
2467 *****************************************************************************/
2468
2469void
2470nv50_display_fini(struct drm_device *dev)
2471{
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2472}
2473
2474int
2475nv50_display_init(struct drm_device *dev)
2476{
2477	struct nv50_disp *disp = nv50_disp(dev);
2478	struct drm_crtc *crtc;
2479	u32 *push;
2480
2481	push = evo_wait(nv50_mast(dev), 32);
2482	if (!push)
2483		return -EBUSY;
2484
2485	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2486		struct nv50_sync *sync = nv50_sync(crtc);
2487
2488		nv50_crtc_lut_load(crtc);
2489		nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2490	}
2491
2492	evo_mthd(push, 0x0088, 1);
2493	evo_data(push, nv50_mast(dev)->base.sync.handle);
2494	evo_kick(push, nv50_mast(dev));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2495	return 0;
2496}
2497
2498void
2499nv50_display_destroy(struct drm_device *dev)
2500{
2501	struct nv50_disp *disp = nv50_disp(dev);
2502	struct nv50_fbdma *fbdma, *fbtmp;
2503
2504	list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
2505		nv50_fbdma_fini(fbdma);
2506	}
2507
2508	nv50_dmac_destroy(&disp->mast.base, disp->disp);
2509
2510	nouveau_bo_unmap(disp->sync);
2511	if (disp->sync)
2512		nouveau_bo_unpin(disp->sync);
2513	nouveau_bo_ref(NULL, &disp->sync);
2514
2515	nouveau_display(dev)->priv = NULL;
2516	kfree(disp);
2517}
2518
 
 
 
 
2519int
2520nv50_display_create(struct drm_device *dev)
2521{
2522	struct nvif_device *device = &nouveau_drm(dev)->device;
2523	struct nouveau_drm *drm = nouveau_drm(dev);
2524	struct dcb_table *dcb = &drm->vbios.dcb;
2525	struct drm_connector *connector, *tmp;
2526	struct nv50_disp *disp;
2527	struct dcb_output *dcbe;
2528	int crtcs, ret, i;
2529
2530	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2531	if (!disp)
2532		return -ENOMEM;
2533	INIT_LIST_HEAD(&disp->fbdma);
 
2534
2535	nouveau_display(dev)->priv = disp;
2536	nouveau_display(dev)->dtor = nv50_display_destroy;
2537	nouveau_display(dev)->init = nv50_display_init;
2538	nouveau_display(dev)->fini = nv50_display_fini;
2539	nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
2540	nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
2541	disp->disp = &nouveau_display(dev)->disp;
 
 
 
 
2542
2543	/* small shared memory area we use for notifiers and semaphores */
2544	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2545			     0, 0x0000, NULL, NULL, &disp->sync);
2546	if (!ret) {
2547		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
2548		if (!ret) {
2549			ret = nouveau_bo_map(disp->sync);
2550			if (ret)
2551				nouveau_bo_unpin(disp->sync);
2552		}
2553		if (ret)
2554			nouveau_bo_ref(NULL, &disp->sync);
2555	}
2556
2557	if (ret)
2558		goto out;
2559
2560	/* allocate master evo channel */
2561	ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
2562			      &disp->mast);
2563	if (ret)
2564		goto out;
2565
2566	/* create crtc objects to represent the hw heads */
2567	if (disp->disp->oclass >= GF110_DISP)
2568		crtcs = nvif_rd32(&device->object, 0x022448);
2569	else
2570		crtcs = 2;
2571
2572	for (i = 0; i < crtcs; i++) {
2573		ret = nv50_crtc_create(dev, i);
 
 
2574		if (ret)
2575			goto out;
2576	}
2577
2578	/* create encoder/connector objects based on VBIOS DCB table */
2579	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2580		connector = nouveau_connector_create(dev, dcbe->connector);
2581		if (IS_ERR(connector))
2582			continue;
2583
2584		if (dcbe->location == DCB_LOC_ON_CHIP) {
2585			switch (dcbe->type) {
2586			case DCB_OUTPUT_TMDS:
2587			case DCB_OUTPUT_LVDS:
2588			case DCB_OUTPUT_DP:
2589				ret = nv50_sor_create(connector, dcbe);
2590				break;
2591			case DCB_OUTPUT_ANALOG:
2592				ret = nv50_dac_create(connector, dcbe);
2593				break;
2594			default:
2595				ret = -ENODEV;
2596				break;
2597			}
2598		} else {
2599			ret = nv50_pior_create(connector, dcbe);
2600		}
2601
2602		if (ret) {
2603			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2604				     dcbe->location, dcbe->type,
2605				     ffs(dcbe->or) - 1, ret);
2606			ret = 0;
2607		}
2608	}
2609
2610	/* cull any connectors we created that don't have an encoder */
2611	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2612		if (connector->encoder_ids[0])
2613			continue;
2614
2615		NV_WARN(drm, "%s has no encoders, removing\n",
2616			connector->name);
2617		connector->funcs->destroy(connector);
2618	}
2619
2620out:
2621	if (ret)
2622		nv50_display_destroy(dev);
2623	return ret;
2624}
v4.17
   1/*
   2 * Copyright 2011 Red Hat Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Ben Skeggs
  23 */
  24
  25#include <linux/dma-mapping.h>
  26#include <linux/hdmi.h>
  27
  28#include <drm/drmP.h>
  29#include <drm/drm_atomic.h>
  30#include <drm/drm_atomic_helper.h>
  31#include <drm/drm_crtc_helper.h>
 
  32#include <drm/drm_dp_helper.h>
  33#include <drm/drm_fb_helper.h>
  34#include <drm/drm_plane_helper.h>
  35#include <drm/drm_edid.h>
  36
  37#include <nvif/class.h>
  38#include <nvif/cl0002.h>
  39#include <nvif/cl5070.h>
  40#include <nvif/cl507a.h>
  41#include <nvif/cl507b.h>
  42#include <nvif/cl507c.h>
  43#include <nvif/cl507d.h>
  44#include <nvif/cl507e.h>
  45#include <nvif/event.h>
  46
  47#include "nouveau_drv.h"
  48#include "nouveau_dma.h"
  49#include "nouveau_gem.h"
  50#include "nouveau_connector.h"
  51#include "nouveau_encoder.h"
  52#include "nouveau_crtc.h"
  53#include "nouveau_fence.h"
  54#include "nouveau_fbcon.h"
  55#include "nv50_display.h"
  56
  57#define EVO_DMA_NR 9
  58
  59#define EVO_MASTER  (0x00)
  60#define EVO_FLIP(c) (0x01 + (c))
  61#define EVO_OVLY(c) (0x05 + (c))
  62#define EVO_OIMM(c) (0x09 + (c))
  63#define EVO_CURS(c) (0x0d + (c))
  64
  65/* offsets in shared sync bo of various structures */
  66#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  67#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
  68#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
  69#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
  70#define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
  71#define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
  72
  73/******************************************************************************
  74 * Atomic state
  75 *****************************************************************************/
  76#define nv50_atom(p) container_of((p), struct nv50_atom, state)
  77
  78struct nv50_atom {
  79	struct drm_atomic_state state;
  80
  81	struct list_head outp;
  82	bool lock_core;
  83	bool flush_disable;
  84};
  85
  86struct nv50_outp_atom {
  87	struct list_head head;
  88
  89	struct drm_encoder *encoder;
  90	bool flush_disable;
  91
  92	union {
  93		struct {
  94			bool ctrl:1;
  95		};
  96		u8 mask;
  97	} clr;
  98
  99	union {
 100		struct {
 101			bool ctrl:1;
 102		};
 103		u8 mask;
 104	} set;
 105};
 106
 107#define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
 108
 109struct nv50_head_atom {
 110	struct drm_crtc_state state;
 111
 112	struct {
 113		u16 iW;
 114		u16 iH;
 115		u16 oW;
 116		u16 oH;
 117	} view;
 118
 119	struct nv50_head_mode {
 120		bool interlace;
 121		u32 clock;
 122		struct {
 123			u16 active;
 124			u16 synce;
 125			u16 blanke;
 126			u16 blanks;
 127		} h;
 128		struct {
 129			u32 active;
 130			u16 synce;
 131			u16 blanke;
 132			u16 blanks;
 133			u16 blank2s;
 134			u16 blank2e;
 135			u16 blankus;
 136		} v;
 137	} mode;
 138
 139	struct {
 140		bool visible;
 141		u32 handle;
 142		u64 offset:40;
 143		u8  mode:4;
 144	} lut;
 145
 146	struct {
 147		bool visible;
 148		u32 handle;
 149		u64 offset:40;
 150		u8  format;
 151		u8  kind:7;
 152		u8  layout:1;
 153		u8  block:4;
 154		u32 pitch:20;
 155		u16 x;
 156		u16 y;
 157		u16 w;
 158		u16 h;
 159	} core;
 160
 161	struct {
 162		bool visible;
 163		u32 handle;
 164		u64 offset:40;
 165		u8  layout:1;
 166		u8  format:1;
 167	} curs;
 168
 169	struct {
 170		u8  depth;
 171		u8  cpp;
 172		u16 x;
 173		u16 y;
 174		u16 w;
 175		u16 h;
 176	} base;
 177
 178	struct {
 179		u8 cpp;
 180	} ovly;
 181
 182	struct {
 183		bool enable:1;
 184		u8 bits:2;
 185		u8 mode:4;
 186	} dither;
 187
 188	struct {
 189		struct {
 190			u16 cos:12;
 191			u16 sin:12;
 192		} sat;
 193	} procamp;
 194
 195	union {
 196		struct {
 197			bool ilut:1;
 198			bool core:1;
 199			bool curs:1;
 200		};
 201		u8 mask;
 202	} clr;
 203
 204	union {
 205		struct {
 206			bool ilut:1;
 207			bool core:1;
 208			bool curs:1;
 209			bool view:1;
 210			bool mode:1;
 211			bool base:1;
 212			bool ovly:1;
 213			bool dither:1;
 214			bool procamp:1;
 215		};
 216		u16 mask;
 217	} set;
 218};
 219
 220static inline struct nv50_head_atom *
 221nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
 222{
 223	struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
 224	if (IS_ERR(statec))
 225		return (void *)statec;
 226	return nv50_head_atom(statec);
 227}
 228
 229#define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
 230
 231struct nv50_wndw_atom {
 232	struct drm_plane_state state;
 233	u8 interval;
 234
 235	struct {
 236		u32  handle;
 237		u16  offset:12;
 238		bool awaken:1;
 239	} ntfy;
 240
 241	struct {
 242		u32 handle;
 243		u16 offset:12;
 244		u32 acquire;
 245		u32 release;
 246	} sema;
 247
 248	struct {
 249		u8 enable:2;
 250	} lut;
 251
 252	struct {
 253		u8  mode:2;
 254		u8  interval:4;
 255
 256		u8  format;
 257		u8  kind:7;
 258		u8  layout:1;
 259		u8  block:4;
 260		u32 pitch:20;
 261		u16 w;
 262		u16 h;
 263
 264		u32 handle;
 265		u64 offset;
 266	} image;
 267
 268	struct {
 269		u16 x;
 270		u16 y;
 271	} point;
 272
 273	union {
 274		struct {
 275			bool ntfy:1;
 276			bool sema:1;
 277			bool image:1;
 278		};
 279		u8 mask;
 280	} clr;
 281
 282	union {
 283		struct {
 284			bool ntfy:1;
 285			bool sema:1;
 286			bool image:1;
 287			bool lut:1;
 288			bool point:1;
 289		};
 290		u8 mask;
 291	} set;
 292};
 293
 294/******************************************************************************
 295 * EVO channel
 296 *****************************************************************************/
 297
 298struct nv50_chan {
 299	struct nvif_object user;
 300	struct nvif_device *device;
 301};
 302
 303static int
 304nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
 305		 const s32 *oclass, u8 head, void *data, u32 size,
 306		 struct nv50_chan *chan)
 307{
 308	struct nvif_sclass *sclass;
 309	int ret, i, n;
 310
 311	chan->device = device;
 312
 313	ret = n = nvif_object_sclass_get(disp, &sclass);
 314	if (ret < 0)
 315		return ret;
 316
 317	while (oclass[0]) {
 318		for (i = 0; i < n; i++) {
 319			if (sclass[i].oclass == oclass[0]) {
 320				ret = nvif_object_init(disp, 0, oclass[0],
 321						       data, size, &chan->user);
 322				if (ret == 0)
 323					nvif_object_map(&chan->user, NULL, 0);
 324				nvif_object_sclass_put(&sclass);
 325				return ret;
 326			}
 327		}
 328		oclass++;
 329	}
 330
 331	nvif_object_sclass_put(&sclass);
 332	return -ENOSYS;
 333}
 334
 335static void
 336nv50_chan_destroy(struct nv50_chan *chan)
 337{
 338	nvif_object_fini(&chan->user);
 339}
 340
 341/******************************************************************************
 342 * PIO EVO channel
 343 *****************************************************************************/
 344
 345struct nv50_pioc {
 346	struct nv50_chan base;
 347};
 348
 349static void
 350nv50_pioc_destroy(struct nv50_pioc *pioc)
 351{
 352	nv50_chan_destroy(&pioc->base);
 353}
 354
 355static int
 356nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
 357		 const s32 *oclass, u8 head, void *data, u32 size,
 358		 struct nv50_pioc *pioc)
 359{
 360	return nv50_chan_create(device, disp, oclass, head, data, size,
 361				&pioc->base);
 362}
 363
 364/******************************************************************************
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 365 * Overlay Immediate
 366 *****************************************************************************/
 367
 368struct nv50_oimm {
 369	struct nv50_pioc base;
 370};
 371
 372static int
 373nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
 374		 int head, struct nv50_oimm *oimm)
 375{
 376	struct nv50_disp_cursor_v0 args = {
 377		.head = head,
 378	};
 379	static const s32 oclass[] = {
 380		GK104_DISP_OVERLAY,
 381		GF110_DISP_OVERLAY,
 382		GT214_DISP_OVERLAY,
 383		G82_DISP_OVERLAY,
 384		NV50_DISP_OVERLAY,
 385		0
 386	};
 387
 388	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
 389				&oimm->base);
 390}
 391
 392/******************************************************************************
 393 * DMA EVO channel
 394 *****************************************************************************/
 395
 396struct nv50_dmac_ctxdma {
 397	struct list_head head;
 398	struct nvif_object object;
 399};
 400
 401struct nv50_dmac {
 402	struct nv50_chan base;
 403	dma_addr_t handle;
 404	u32 *ptr;
 405
 406	struct nvif_object sync;
 407	struct nvif_object vram;
 408	struct list_head ctxdma;
 409
 410	/* Protects against concurrent pushbuf access to this channel, lock is
 411	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
 412	 * dropped again by evo_kick. */
 413	struct mutex lock;
 414};
 415
 416static void
 417nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
 418{
 419	nvif_object_fini(&ctxdma->object);
 420	list_del(&ctxdma->head);
 421	kfree(ctxdma);
 422}
 423
 424static struct nv50_dmac_ctxdma *
 425nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
 426{
 427	struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
 428	struct nv50_dmac_ctxdma *ctxdma;
 429	const u8    kind = fb->nvbo->kind;
 430	const u32 handle = 0xfb000000 | kind;
 431	struct {
 432		struct nv_dma_v0 base;
 433		union {
 434			struct nv50_dma_v0 nv50;
 435			struct gf100_dma_v0 gf100;
 436			struct gf119_dma_v0 gf119;
 437		};
 438	} args = {};
 439	u32 argc = sizeof(args.base);
 440	int ret;
 441
 442	list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
 443		if (ctxdma->object.handle == handle)
 444			return ctxdma;
 445	}
 446
 447	if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
 448		return ERR_PTR(-ENOMEM);
 449	list_add(&ctxdma->head, &dmac->ctxdma);
 450
 451	args.base.target = NV_DMA_V0_TARGET_VRAM;
 452	args.base.access = NV_DMA_V0_ACCESS_RDWR;
 453	args.base.start  = 0;
 454	args.base.limit  = drm->client.device.info.ram_user - 1;
 455
 456	if (drm->client.device.info.chipset < 0x80) {
 457		args.nv50.part = NV50_DMA_V0_PART_256;
 458		argc += sizeof(args.nv50);
 459	} else
 460	if (drm->client.device.info.chipset < 0xc0) {
 461		args.nv50.part = NV50_DMA_V0_PART_256;
 462		args.nv50.kind = kind;
 463		argc += sizeof(args.nv50);
 464	} else
 465	if (drm->client.device.info.chipset < 0xd0) {
 466		args.gf100.kind = kind;
 467		argc += sizeof(args.gf100);
 468	} else {
 469		args.gf119.page = GF119_DMA_V0_PAGE_LP;
 470		args.gf119.kind = kind;
 471		argc += sizeof(args.gf119);
 472	}
 473
 474	ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
 475			       &args, argc, &ctxdma->object);
 476	if (ret) {
 477		nv50_dmac_ctxdma_del(ctxdma);
 478		return ERR_PTR(ret);
 479	}
 480
 481	return ctxdma;
 482}
 483
 484static void
 485nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
 486{
 487	struct nvif_device *device = dmac->base.device;
 488	struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
 489
 490	list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
 491		nv50_dmac_ctxdma_del(ctxdma);
 492	}
 493
 494	nvif_object_fini(&dmac->vram);
 495	nvif_object_fini(&dmac->sync);
 496
 497	nv50_chan_destroy(&dmac->base);
 498
 499	if (dmac->ptr) {
 500		struct device *dev = nvxx_device(device)->dev;
 501		dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
 502	}
 503}
 504
 505static int
 506nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
 507		 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
 508		 struct nv50_dmac *dmac)
 509{
 510	struct nv50_disp_core_channel_dma_v0 *args = data;
 511	struct nvif_object pushbuf;
 512	int ret;
 513
 514	mutex_init(&dmac->lock);
 515	INIT_LIST_HEAD(&dmac->ctxdma);
 516
 517	dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
 518				       &dmac->handle, GFP_KERNEL);
 519	if (!dmac->ptr)
 520		return -ENOMEM;
 521
 522	ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
 523			       &(struct nv_dma_v0) {
 524					.target = NV_DMA_V0_TARGET_PCI_US,
 525					.access = NV_DMA_V0_ACCESS_RD,
 526					.start = dmac->handle + 0x0000,
 527					.limit = dmac->handle + 0x0fff,
 528			       }, sizeof(struct nv_dma_v0), &pushbuf);
 529	if (ret)
 530		return ret;
 531
 532	args->pushbuf = nvif_handle(&pushbuf);
 533
 534	ret = nv50_chan_create(device, disp, oclass, head, data, size,
 535			       &dmac->base);
 536	nvif_object_fini(&pushbuf);
 537	if (ret)
 538		return ret;
 539
 540	ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
 541			       &(struct nv_dma_v0) {
 542					.target = NV_DMA_V0_TARGET_VRAM,
 543					.access = NV_DMA_V0_ACCESS_RDWR,
 544					.start = syncbuf + 0x0000,
 545					.limit = syncbuf + 0x0fff,
 546			       }, sizeof(struct nv_dma_v0),
 547			       &dmac->sync);
 548	if (ret)
 549		return ret;
 550
 551	ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
 552			       &(struct nv_dma_v0) {
 553					.target = NV_DMA_V0_TARGET_VRAM,
 554					.access = NV_DMA_V0_ACCESS_RDWR,
 555					.start = 0,
 556					.limit = device->info.ram_user - 1,
 557			       }, sizeof(struct nv_dma_v0),
 558			       &dmac->vram);
 559	if (ret)
 560		return ret;
 561
 562	return ret;
 563}
 564
 565/******************************************************************************
 566 * Core
 567 *****************************************************************************/
 568
 569struct nv50_mast {
 570	struct nv50_dmac base;
 571};
 572
 573static int
 574nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
 575		 u64 syncbuf, struct nv50_mast *core)
 576{
 577	struct nv50_disp_core_channel_dma_v0 args = {
 578		.pushbuf = 0xb0007d00,
 579	};
 580	static const s32 oclass[] = {
 581		GP102_DISP_CORE_CHANNEL_DMA,
 582		GP100_DISP_CORE_CHANNEL_DMA,
 583		GM200_DISP_CORE_CHANNEL_DMA,
 584		GM107_DISP_CORE_CHANNEL_DMA,
 585		GK110_DISP_CORE_CHANNEL_DMA,
 586		GK104_DISP_CORE_CHANNEL_DMA,
 587		GF110_DISP_CORE_CHANNEL_DMA,
 588		GT214_DISP_CORE_CHANNEL_DMA,
 589		GT206_DISP_CORE_CHANNEL_DMA,
 590		GT200_DISP_CORE_CHANNEL_DMA,
 591		G82_DISP_CORE_CHANNEL_DMA,
 592		NV50_DISP_CORE_CHANNEL_DMA,
 593		0
 594	};
 595
 596	return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
 597				syncbuf, &core->base);
 598}
 599
 600/******************************************************************************
 601 * Base
 602 *****************************************************************************/
 603
 604struct nv50_sync {
 605	struct nv50_dmac base;
 606	u32 addr;
 607	u32 data;
 608};
 609
 610static int
 611nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
 612		 int head, u64 syncbuf, struct nv50_sync *base)
 613{
 614	struct nv50_disp_base_channel_dma_v0 args = {
 615		.pushbuf = 0xb0007c00 | head,
 616		.head = head,
 617	};
 618	static const s32 oclass[] = {
 619		GK110_DISP_BASE_CHANNEL_DMA,
 620		GK104_DISP_BASE_CHANNEL_DMA,
 621		GF110_DISP_BASE_CHANNEL_DMA,
 622		GT214_DISP_BASE_CHANNEL_DMA,
 623		GT200_DISP_BASE_CHANNEL_DMA,
 624		G82_DISP_BASE_CHANNEL_DMA,
 625		NV50_DISP_BASE_CHANNEL_DMA,
 626		0
 627	};
 628
 629	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 630				syncbuf, &base->base);
 631}
 632
 633/******************************************************************************
 634 * Overlay
 635 *****************************************************************************/
 636
 637struct nv50_ovly {
 638	struct nv50_dmac base;
 639};
 640
 641static int
 642nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
 643		 int head, u64 syncbuf, struct nv50_ovly *ovly)
 644{
 645	struct nv50_disp_overlay_channel_dma_v0 args = {
 646		.pushbuf = 0xb0007e00 | head,
 647		.head = head,
 648	};
 649	static const s32 oclass[] = {
 650		GK104_DISP_OVERLAY_CONTROL_DMA,
 651		GF110_DISP_OVERLAY_CONTROL_DMA,
 652		GT214_DISP_OVERLAY_CHANNEL_DMA,
 653		GT200_DISP_OVERLAY_CHANNEL_DMA,
 654		G82_DISP_OVERLAY_CHANNEL_DMA,
 655		NV50_DISP_OVERLAY_CHANNEL_DMA,
 656		0
 657	};
 658
 659	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 660				syncbuf, &ovly->base);
 661}
 662
 663struct nv50_head {
 664	struct nouveau_crtc base;
 665	struct {
 666		struct nouveau_bo *nvbo[2];
 667		int next;
 668	} lut;
 669	struct nv50_ovly ovly;
 670	struct nv50_oimm oimm;
 671};
 672
 673#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
 
 
 674#define nv50_ovly(c) (&nv50_head(c)->ovly)
 675#define nv50_oimm(c) (&nv50_head(c)->oimm)
 676#define nv50_chan(c) (&(c)->base.base)
 677#define nv50_vers(c) nv50_chan(c)->user.oclass
 678
 
 
 
 
 
 
 679struct nv50_disp {
 680	struct nvif_object *disp;
 681	struct nv50_mast mast;
 682
 
 
 683	struct nouveau_bo *sync;
 684
 685	struct mutex mutex;
 686};
 687
 688static struct nv50_disp *
 689nv50_disp(struct drm_device *dev)
 690{
 691	return nouveau_display(dev)->priv;
 692}
 693
 694#define nv50_mast(d) (&nv50_disp(d)->mast)
 695
 
 
 
 
 
 
 696/******************************************************************************
 697 * EVO channel helpers
 698 *****************************************************************************/
 699static u32 *
 700evo_wait(void *evoc, int nr)
 701{
 702	struct nv50_dmac *dmac = evoc;
 703	struct nvif_device *device = dmac->base.device;
 704	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
 705
 706	mutex_lock(&dmac->lock);
 707	if (put + nr >= (PAGE_SIZE / 4) - 8) {
 708		dmac->ptr[put] = 0x20000000;
 709
 710		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
 711		if (nvif_msec(device, 2000,
 712			if (!nvif_rd32(&dmac->base.user, 0x0004))
 713				break;
 714		) < 0) {
 715			mutex_unlock(&dmac->lock);
 716			pr_err("nouveau: evo channel stalled\n");
 717			return NULL;
 718		}
 719
 720		put = 0;
 721	}
 722
 723	return dmac->ptr + put;
 724}
 725
 726static void
 727evo_kick(u32 *push, void *evoc)
 728{
 729	struct nv50_dmac *dmac = evoc;
 730	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
 731	mutex_unlock(&dmac->lock);
 732}
 733
 734#define evo_mthd(p, m, s) do {						\
 735	const u32 _m = (m), _s = (s);					\
 736	if (drm_debug & DRM_UT_KMS)					\
 737		pr_err("%04x %d %s\n", _m, _s, __func__);		\
 738	*((p)++) = ((_s << 18) | _m);					\
 
 
 
 739} while(0)
 740
 741#define evo_data(p, d) do {						\
 742	const u32 _d = (d);						\
 743	if (drm_debug & DRM_UT_KMS)					\
 744		pr_err("\t%08x\n", _d);					\
 745	*((p)++) = _d;							\
 746} while(0)
 
 747
 748/******************************************************************************
 749 * Plane
 750 *****************************************************************************/
 751#define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
 752
 753struct nv50_wndw {
 754	const struct nv50_wndw_func *func;
 755	struct nv50_dmac *dmac;
 756
 757	struct drm_plane plane;
 758
 759	struct nvif_notify notify;
 760	u16 ntfy;
 761	u16 sema;
 762	u32 data;
 763};
 764
 765struct nv50_wndw_func {
 766	void *(*dtor)(struct nv50_wndw *);
 767	int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
 768		       struct nv50_head_atom *asyh);
 769	void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
 770			struct nv50_head_atom *asyh);
 771	void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
 772			struct nv50_wndw_atom *asyw);
 773
 774	void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 775	void (*sema_clr)(struct nv50_wndw *);
 776	void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 777	void (*ntfy_clr)(struct nv50_wndw *);
 778	int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
 779	void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 780	void (*image_clr)(struct nv50_wndw *);
 781	void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
 782	void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
 783
 784	u32 (*update)(struct nv50_wndw *, u32 interlock);
 785};
 786
 787static int
 788nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
 789{
 790	if (asyw->set.ntfy)
 791		return wndw->func->ntfy_wait_begun(wndw, asyw);
 792	return 0;
 
 793}
 794
 795static u32
 796nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
 797		    struct nv50_wndw_atom *asyw)
 798{
 799	if (asyw->clr.sema && (!asyw->set.sema || flush))
 800		wndw->func->sema_clr(wndw);
 801	if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
 802		wndw->func->ntfy_clr(wndw);
 803	if (asyw->clr.image && (!asyw->set.image || flush))
 804		wndw->func->image_clr(wndw);
 
 
 
 
 
 
 
 
 
 
 
 
 805
 806	return flush ? wndw->func->update(wndw, interlock) : 0;
 807}
 808
 809static u32
 810nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
 811		    struct nv50_wndw_atom *asyw)
 
 
 812{
 813	if (interlock) {
 814		asyw->image.mode = 0;
 815		asyw->image.interval = 1;
 816	}
 817
 818	if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
 819	if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
 820	if (asyw->set.image) wndw->func->image_set(wndw, asyw);
 821	if (asyw->set.lut  ) wndw->func->lut      (wndw, asyw);
 822	if (asyw->set.point) wndw->func->point    (wndw, asyw);
 823
 824	return wndw->func->update(wndw, interlock);
 825}
 826
 827static void
 828nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
 829			       struct nv50_wndw_atom *asyw,
 830			       struct nv50_head_atom *asyh)
 831{
 832	struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
 833	NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
 834	wndw->func->release(wndw, asyw, asyh);
 835	asyw->ntfy.handle = 0;
 836	asyw->sema.handle = 0;
 837}
 838
 839static int
 840nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
 841			       struct nv50_wndw_atom *asyw,
 842			       struct nv50_head_atom *asyh)
 843{
 844	struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
 845	struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
 846	int ret;
 847
 848	NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
 849
 850	asyw->image.w = fb->base.width;
 851	asyw->image.h = fb->base.height;
 852	asyw->image.kind = fb->nvbo->kind;
 853
 854	if (asyh->state.pageflip_flags & DRM_MODE_PAGE_FLIP_ASYNC)
 855		asyw->interval = 0;
 856	else
 857		asyw->interval = 1;
 858
 859	if (asyw->image.kind) {
 860		asyw->image.layout = 0;
 861		if (drm->client.device.info.chipset >= 0xc0)
 862			asyw->image.block = fb->nvbo->mode >> 4;
 863		else
 864			asyw->image.block = fb->nvbo->mode;
 865		asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
 866	} else {
 867		asyw->image.layout = 1;
 868		asyw->image.block  = 0;
 869		asyw->image.pitch  = fb->base.pitches[0];
 870	}
 871
 872	ret = wndw->func->acquire(wndw, asyw, asyh);
 873	if (ret)
 874		return ret;
 875
 876	if (asyw->set.image) {
 877		if (!(asyw->image.mode = asyw->interval ? 0 : 1))
 878			asyw->image.interval = asyw->interval;
 879		else
 880			asyw->image.interval = 0;
 881	}
 882
 883	return 0;
 884}
 885
 886static int
 887nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
 888{
 889	struct nouveau_drm *drm = nouveau_drm(plane->dev);
 890	struct nv50_wndw *wndw = nv50_wndw(plane);
 891	struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
 892	struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 893	struct nv50_head_atom *harm = NULL, *asyh = NULL;
 894	bool varm = false, asyv = false, asym = false;
 895	int ret;
 896
 897	NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
 898	if (asyw->state.crtc) {
 899		asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
 900		if (IS_ERR(asyh))
 901			return PTR_ERR(asyh);
 902		asym = drm_atomic_crtc_needs_modeset(&asyh->state);
 903		asyv = asyh->state.active;
 904	}
 905
 906	if (armw->state.crtc) {
 907		harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
 908		if (IS_ERR(harm))
 909			return PTR_ERR(harm);
 910		varm = harm->state.crtc->state->active;
 911	}
 912
 913	if (asyv) {
 914		asyw->point.x = asyw->state.crtc_x;
 915		asyw->point.y = asyw->state.crtc_y;
 916		if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
 917			asyw->set.point = true;
 918
 919		ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
 920		if (ret)
 921			return ret;
 922	} else
 923	if (varm) {
 924		nv50_wndw_atomic_check_release(wndw, asyw, harm);
 925	} else {
 926		return 0;
 927	}
 928
 929	if (!asyv || asym) {
 930		asyw->clr.ntfy = armw->ntfy.handle != 0;
 931		asyw->clr.sema = armw->sema.handle != 0;
 932		if (wndw->func->image_clr)
 933			asyw->clr.image = armw->image.handle != 0;
 934		asyw->set.lut = wndw->func->lut && asyv;
 935	}
 936
 937	return 0;
 938}
 939
 940static void
 941nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
 
 942{
 943	struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
 944	struct nouveau_drm *drm = nouveau_drm(plane->dev);
 
 
 
 
 945
 946	NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
 947	if (!old_state->fb)
 948		return;
 949
 950	nouveau_bo_unpin(fb->nvbo);
 951}
 
 
 
 952
 953static int
 954nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
 955{
 956	struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
 957	struct nouveau_drm *drm = nouveau_drm(plane->dev);
 958	struct nv50_wndw *wndw = nv50_wndw(plane);
 959	struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 960	struct nv50_head_atom *asyh;
 961	struct nv50_dmac_ctxdma *ctxdma;
 962	int ret;
 963
 964	NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
 965	if (!asyw->state.fb)
 966		return 0;
 
 967
 968	ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
 969	if (ret)
 970		return ret;
 
 
 
 
 
 
 
 
 
 
 
 971
 972	ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
 973	if (IS_ERR(ctxdma)) {
 974		nouveau_bo_unpin(fb->nvbo);
 975		return PTR_ERR(ctxdma);
 976	}
 977
 978	asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
 979	asyw->image.handle = ctxdma->object.handle;
 980	asyw->image.offset = fb->nvbo->bo.offset;
 981
 982	if (wndw->func->prepare) {
 983		asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
 984		if (IS_ERR(asyh))
 985			return PTR_ERR(asyh);
 
 
 
 
 986
 987		wndw->func->prepare(wndw, asyh, asyw);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 988	}
 
 
 
 989
 990	return 0;
 991}
 992
 993static const struct drm_plane_helper_funcs
 994nv50_wndw_helper = {
 995	.prepare_fb = nv50_wndw_prepare_fb,
 996	.cleanup_fb = nv50_wndw_cleanup_fb,
 997	.atomic_check = nv50_wndw_atomic_check,
 998};
 999
1000static void
1001nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
1002			       struct drm_plane_state *state)
1003{
1004	struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
1005	__drm_atomic_helper_plane_destroy_state(&asyw->state);
1006	kfree(asyw);
1007}
1008
1009static struct drm_plane_state *
1010nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
1011{
1012	struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
1013	struct nv50_wndw_atom *asyw;
1014	if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
1015		return NULL;
1016	__drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
1017	asyw->interval = 1;
1018	asyw->sema = armw->sema;
1019	asyw->ntfy = armw->ntfy;
1020	asyw->image = armw->image;
1021	asyw->point = armw->point;
1022	asyw->lut = armw->lut;
1023	asyw->clr.mask = 0;
1024	asyw->set.mask = 0;
1025	return &asyw->state;
1026}
1027
1028static void
1029nv50_wndw_reset(struct drm_plane *plane)
1030{
1031	struct nv50_wndw_atom *asyw;
1032
1033	if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
1034		return;
1035
1036	if (plane->state)
1037		plane->funcs->atomic_destroy_state(plane, plane->state);
1038	plane->state = &asyw->state;
1039	plane->state->plane = plane;
1040	plane->state->rotation = DRM_MODE_ROTATE_0;
1041}
1042
1043static void
1044nv50_wndw_destroy(struct drm_plane *plane)
1045{
1046	struct nv50_wndw *wndw = nv50_wndw(plane);
1047	void *data;
1048	nvif_notify_fini(&wndw->notify);
1049	data = wndw->func->dtor(wndw);
1050	drm_plane_cleanup(&wndw->plane);
1051	kfree(data);
1052}
1053
1054static const struct drm_plane_funcs
1055nv50_wndw = {
1056	.update_plane = drm_atomic_helper_update_plane,
1057	.disable_plane = drm_atomic_helper_disable_plane,
1058	.destroy = nv50_wndw_destroy,
1059	.reset = nv50_wndw_reset,
1060	.atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
1061	.atomic_destroy_state = nv50_wndw_atomic_destroy_state,
1062};
1063
1064static void
1065nv50_wndw_fini(struct nv50_wndw *wndw)
1066{
1067	nvif_notify_put(&wndw->notify);
1068}
1069
1070static void
1071nv50_wndw_init(struct nv50_wndw *wndw)
1072{
1073	nvif_notify_get(&wndw->notify);
1074}
1075
1076static int
1077nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
1078	       enum drm_plane_type type, const char *name, int index,
1079	       struct nv50_dmac *dmac, const u32 *format, int nformat,
1080	       struct nv50_wndw *wndw)
1081{
1082	int ret;
1083
1084	wndw->func = func;
1085	wndw->dmac = dmac;
1086
1087	ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw,
1088				       format, nformat, NULL,
1089				       type, "%s-%d", name, index);
1090	if (ret)
1091		return ret;
1092
1093	drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
1094	return 0;
1095}
1096
1097/******************************************************************************
1098 * Cursor plane
1099 *****************************************************************************/
1100#define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1101
1102struct nv50_curs {
1103	struct nv50_wndw wndw;
1104	struct nvif_object chan;
1105};
1106
1107static u32
1108nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
1109{
1110	struct nv50_curs *curs = nv50_curs(wndw);
1111	nvif_wr32(&curs->chan, 0x0080, 0x00000000);
1112	return 0;
1113}
1114
1115static void
1116nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1117{
1118	struct nv50_curs *curs = nv50_curs(wndw);
1119	nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
1120}
1121
1122static void
1123nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
1124		  struct nv50_wndw_atom *asyw)
1125{
1126	u32 handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
1127	u32 offset = asyw->image.offset;
1128	if (asyh->curs.handle != handle || asyh->curs.offset != offset) {
1129		asyh->curs.handle = handle;
1130		asyh->curs.offset = offset;
1131		asyh->set.curs = asyh->curs.visible;
1132	}
1133}
1134
1135static void
1136nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1137		  struct nv50_head_atom *asyh)
1138{
1139	asyh->curs.visible = false;
1140}
1141
1142static int
1143nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1144		  struct nv50_head_atom *asyh)
1145{
1146	int ret;
1147
1148	ret = drm_atomic_helper_check_plane_state(&asyw->state, &asyh->state,
1149						  DRM_PLANE_HELPER_NO_SCALING,
1150						  DRM_PLANE_HELPER_NO_SCALING,
1151						  true, true);
1152	asyh->curs.visible = asyw->state.visible;
1153	if (ret || !asyh->curs.visible)
1154		return ret;
1155
1156	switch (asyw->state.fb->width) {
1157	case 32: asyh->curs.layout = 0; break;
1158	case 64: asyh->curs.layout = 1; break;
1159	default:
1160		return -EINVAL;
1161	}
1162
1163	if (asyw->state.fb->width != asyw->state.fb->height)
1164		return -EINVAL;
 
 
 
 
 
 
 
 
 
 
 
1165
1166	switch (asyw->state.fb->format->format) {
1167	case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
1168	default:
1169		WARN_ON(1);
1170		return -EINVAL;
1171	}
1172
1173	return 0;
1174}
1175
1176static void *
1177nv50_curs_dtor(struct nv50_wndw *wndw)
1178{
1179	struct nv50_curs *curs = nv50_curs(wndw);
1180	nvif_object_fini(&curs->chan);
1181	return curs;
1182}
1183
1184static const u32
1185nv50_curs_format[] = {
1186	DRM_FORMAT_ARGB8888,
1187};
1188
1189static const struct nv50_wndw_func
1190nv50_curs = {
1191	.dtor = nv50_curs_dtor,
1192	.acquire = nv50_curs_acquire,
1193	.release = nv50_curs_release,
1194	.prepare = nv50_curs_prepare,
1195	.point = nv50_curs_point,
1196	.update = nv50_curs_update,
1197};
1198
1199static int
1200nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
1201	      struct nv50_curs **pcurs)
1202{
1203	static const struct nvif_mclass curses[] = {
1204		{ GK104_DISP_CURSOR, 0 },
1205		{ GF110_DISP_CURSOR, 0 },
1206		{ GT214_DISP_CURSOR, 0 },
1207		{   G82_DISP_CURSOR, 0 },
1208		{  NV50_DISP_CURSOR, 0 },
1209		{}
1210	};
1211	struct nv50_disp_cursor_v0 args = {
1212		.head = head->base.index,
1213	};
1214	struct nv50_disp *disp = nv50_disp(drm->dev);
1215	struct nv50_curs *curs;
1216	int cid, ret;
1217
1218	cid = nvif_mclass(disp->disp, curses);
1219	if (cid < 0) {
1220		NV_ERROR(drm, "No supported cursor immediate class\n");
1221		return cid;
 
 
 
 
1222	}
1223
1224	if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
1225		return -ENOMEM;
 
 
1226
1227	ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
1228			     "curs", head->base.index, &disp->mast.base,
1229			     nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
1230			     &curs->wndw);
1231	if (ret) {
1232		kfree(curs);
1233		return ret;
1234	}
1235
1236	ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
1237			       sizeof(args), &curs->chan);
1238	if (ret) {
1239		NV_ERROR(drm, "curs%04x allocation failed: %d\n",
1240			 curses[cid].oclass, ret);
1241		return ret;
1242	}
 
 
 
1243
1244	return 0;
1245}
1246
1247/******************************************************************************
1248 * Primary plane
1249 *****************************************************************************/
1250#define nv50_base(p) container_of((p), struct nv50_base, wndw)
1251
1252struct nv50_base {
1253	struct nv50_wndw wndw;
1254	struct nv50_sync chan;
1255	int id;
1256};
1257
1258static int
1259nv50_base_notify(struct nvif_notify *notify)
1260{
1261	return NVIF_NOTIFY_KEEP;
1262}
1263
1264static void
1265nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1266{
1267	struct nv50_base *base = nv50_base(wndw);
1268	u32 *push;
1269	if ((push = evo_wait(&base->chan, 2))) {
1270		evo_mthd(push, 0x00e0, 1);
1271		evo_data(push, asyw->lut.enable << 30);
1272		evo_kick(push, &base->chan);
1273	}
1274}
1275
1276static void
1277nv50_base_image_clr(struct nv50_wndw *wndw)
1278{
1279	struct nv50_base *base = nv50_base(wndw);
1280	u32 *push;
1281	if ((push = evo_wait(&base->chan, 4))) {
1282		evo_mthd(push, 0x0084, 1);
1283		evo_data(push, 0x00000000);
1284		evo_mthd(push, 0x00c0, 1);
1285		evo_data(push, 0x00000000);
1286		evo_kick(push, &base->chan);
 
 
 
 
 
 
 
 
1287	}
1288}
1289
1290static void
1291nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1292{
1293	struct nv50_base *base = nv50_base(wndw);
1294	const s32 oclass = base->chan.base.base.user.oclass;
1295	u32 *push;
1296	if ((push = evo_wait(&base->chan, 10))) {
1297		evo_mthd(push, 0x0084, 1);
1298		evo_data(push, (asyw->image.mode << 8) |
1299			       (asyw->image.interval << 4));
1300		evo_mthd(push, 0x00c0, 1);
1301		evo_data(push, asyw->image.handle);
1302		if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
1303			evo_mthd(push, 0x0800, 5);
1304			evo_data(push, asyw->image.offset >> 8);
1305			evo_data(push, 0x00000000);
1306			evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1307			evo_data(push, (asyw->image.layout << 20) |
1308					asyw->image.pitch |
1309					asyw->image.block);
1310			evo_data(push, (asyw->image.kind << 16) |
1311				       (asyw->image.format << 8));
1312		} else
1313		if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1314			evo_mthd(push, 0x0800, 5);
1315			evo_data(push, asyw->image.offset >> 8);
1316			evo_data(push, 0x00000000);
1317			evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1318			evo_data(push, (asyw->image.layout << 20) |
1319					asyw->image.pitch |
1320					asyw->image.block);
1321			evo_data(push, asyw->image.format << 8);
1322		} else {
1323			evo_mthd(push, 0x0400, 5);
1324			evo_data(push, asyw->image.offset >> 8);
 
 
 
1325			evo_data(push, 0x00000000);
1326			evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1327			evo_data(push, (asyw->image.layout << 24) |
1328					asyw->image.pitch |
1329					asyw->image.block);
1330			evo_data(push, asyw->image.format << 8);
1331		}
1332		evo_kick(push, &base->chan);
1333	}
1334}
1335
1336static void
1337nv50_base_ntfy_clr(struct nv50_wndw *wndw)
1338{
1339	struct nv50_base *base = nv50_base(wndw);
1340	u32 *push;
1341	if ((push = evo_wait(&base->chan, 2))) {
1342		evo_mthd(push, 0x00a4, 1);
1343		evo_data(push, 0x00000000);
1344		evo_kick(push, &base->chan);
1345	}
1346}
1347
1348static void
1349nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1350{
1351	struct nv50_base *base = nv50_base(wndw);
1352	u32 *push;
1353	if ((push = evo_wait(&base->chan, 3))) {
1354		evo_mthd(push, 0x00a0, 2);
1355		evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
1356		evo_data(push, asyw->ntfy.handle);
1357		evo_kick(push, &base->chan);
1358	}
1359}
1360
1361static void
1362nv50_base_sema_clr(struct nv50_wndw *wndw)
1363{
1364	struct nv50_base *base = nv50_base(wndw);
1365	u32 *push;
1366	if ((push = evo_wait(&base->chan, 2))) {
1367		evo_mthd(push, 0x0094, 1);
1368		evo_data(push, 0x00000000);
1369		evo_kick(push, &base->chan);
1370	}
1371}
1372
1373static void
1374nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1375{
1376	struct nv50_base *base = nv50_base(wndw);
1377	u32 *push;
1378	if ((push = evo_wait(&base->chan, 5))) {
1379		evo_mthd(push, 0x0088, 4);
1380		evo_data(push, asyw->sema.offset);
1381		evo_data(push, asyw->sema.acquire);
1382		evo_data(push, asyw->sema.release);
1383		evo_data(push, asyw->sema.handle);
1384		evo_kick(push, &base->chan);
1385	}
1386}
1387
1388static u32
1389nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
1390{
1391	struct nv50_base *base = nv50_base(wndw);
1392	u32 *push;
1393
1394	if (!(push = evo_wait(&base->chan, 2)))
1395		return 0;
1396	evo_mthd(push, 0x0080, 1);
1397	evo_data(push, interlock);
1398	evo_kick(push, &base->chan);
1399
1400	if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
1401		return interlock ? 2 << (base->id * 8) : 0;
1402	return interlock ? 2 << (base->id * 4) : 0;
1403}
1404
1405static int
1406nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1407{
1408	struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
1409	struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1410	if (nvif_msec(&drm->client.device, 2000ULL,
1411		u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
1412		if ((data & 0xc0000000) == 0x40000000)
1413			break;
1414		usleep_range(1, 2);
1415	) < 0)
1416		return -ETIMEDOUT;
1417	return 0;
1418}
1419
1420static void
1421nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1422		  struct nv50_head_atom *asyh)
1423{
1424	asyh->base.cpp = 0;
1425}
1426
1427static int
1428nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1429		  struct nv50_head_atom *asyh)
1430{
1431	const struct drm_framebuffer *fb = asyw->state.fb;
1432	int ret;
 
1433
1434	if (!fb->format->depth)
1435		return -EINVAL;
 
1436
1437	ret = drm_atomic_helper_check_plane_state(&asyw->state, &asyh->state,
1438						  DRM_PLANE_HELPER_NO_SCALING,
1439						  DRM_PLANE_HELPER_NO_SCALING,
1440						  false, true);
1441	if (ret)
1442		return ret;
 
 
 
1443
1444	asyh->base.depth = fb->format->depth;
1445	asyh->base.cpp = fb->format->cpp[0];
1446	asyh->base.x = asyw->state.src.x1 >> 16;
1447	asyh->base.y = asyw->state.src.y1 >> 16;
1448	asyh->base.w = asyw->state.fb->width;
1449	asyh->base.h = asyw->state.fb->height;
1450
1451	switch (fb->format->format) {
1452	case DRM_FORMAT_C8         : asyw->image.format = 0x1e; break;
1453	case DRM_FORMAT_RGB565     : asyw->image.format = 0xe8; break;
1454	case DRM_FORMAT_XRGB1555   :
1455	case DRM_FORMAT_ARGB1555   : asyw->image.format = 0xe9; break;
1456	case DRM_FORMAT_XRGB8888   :
1457	case DRM_FORMAT_ARGB8888   : asyw->image.format = 0xcf; break;
1458	case DRM_FORMAT_XBGR2101010:
1459	case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
1460	case DRM_FORMAT_XBGR8888   :
1461	case DRM_FORMAT_ABGR8888   : asyw->image.format = 0xd5; break;
1462	default:
1463		WARN_ON(1);
1464		return -EINVAL;
1465	}
1466
1467	asyw->lut.enable = 1;
1468	asyw->set.image = true;
1469	return 0;
1470}
1471
1472static void *
1473nv50_base_dtor(struct nv50_wndw *wndw)
1474{
1475	struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1476	struct nv50_base *base = nv50_base(wndw);
1477	nv50_dmac_destroy(&base->chan.base, disp->disp);
1478	return base;
1479}
1480
1481static const u32
1482nv50_base_format[] = {
1483	DRM_FORMAT_C8,
1484	DRM_FORMAT_RGB565,
1485	DRM_FORMAT_XRGB1555,
1486	DRM_FORMAT_ARGB1555,
1487	DRM_FORMAT_XRGB8888,
1488	DRM_FORMAT_ARGB8888,
1489	DRM_FORMAT_XBGR2101010,
1490	DRM_FORMAT_ABGR2101010,
1491	DRM_FORMAT_XBGR8888,
1492	DRM_FORMAT_ABGR8888,
1493};
1494
1495static const struct nv50_wndw_func
1496nv50_base = {
1497	.dtor = nv50_base_dtor,
1498	.acquire = nv50_base_acquire,
1499	.release = nv50_base_release,
1500	.sema_set = nv50_base_sema_set,
1501	.sema_clr = nv50_base_sema_clr,
1502	.ntfy_set = nv50_base_ntfy_set,
1503	.ntfy_clr = nv50_base_ntfy_clr,
1504	.ntfy_wait_begun = nv50_base_ntfy_wait_begun,
1505	.image_set = nv50_base_image_set,
1506	.image_clr = nv50_base_image_clr,
1507	.lut = nv50_base_lut,
1508	.update = nv50_base_update,
1509};
1510
1511static int
1512nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
1513	      struct nv50_base **pbase)
1514{
1515	struct nv50_disp *disp = nv50_disp(drm->dev);
1516	struct nv50_base *base;
1517	int ret;
1518
1519	if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
1520		return -ENOMEM;
1521	base->id = head->base.index;
1522	base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
1523	base->wndw.sema = EVO_FLIP_SEM0(base->id);
1524	base->wndw.data = 0x00000000;
1525
1526	ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
1527			     "base", base->id, &base->chan.base,
1528			     nv50_base_format, ARRAY_SIZE(nv50_base_format),
1529			     &base->wndw);
1530	if (ret) {
1531		kfree(base);
1532		return ret;
1533	}
1534
1535	ret = nv50_base_create(&drm->client.device, disp->disp, base->id,
1536			       disp->sync->bo.offset, &base->chan);
1537	if (ret)
1538		return ret;
1539
1540	return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
1541				false,
1542				NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
1543				&(struct nvif_notify_uevent_req) {},
1544				sizeof(struct nvif_notify_uevent_req),
1545				sizeof(struct nvif_notify_uevent_rep),
1546				&base->wndw.notify);
1547}
1548
1549/******************************************************************************
1550 * Head
1551 *****************************************************************************/
1552static void
1553nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
1554{
1555	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1556	u32 *push;
1557	if ((push = evo_wait(core, 2))) {
1558		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1559			evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
1560		else
1561			evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
1562		evo_data(push, (asyh->procamp.sat.sin << 20) |
1563			       (asyh->procamp.sat.cos << 8));
1564		evo_kick(push, core);
1565	}
1566}
1567
1568static void
1569nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
1570{
1571	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1572	u32 *push;
1573	if ((push = evo_wait(core, 2))) {
1574		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1575			evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
1576		else
1577		if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
1578			evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
1579		else
1580			evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
1581		evo_data(push, (asyh->dither.mode << 3) |
1582			       (asyh->dither.bits << 1) |
1583			        asyh->dither.enable);
1584		evo_kick(push, core);
1585	}
1586}
 
 
 
 
 
 
 
1587
1588static void
1589nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
1590{
1591	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1592	u32 bounds = 0;
1593	u32 *push;
1594
1595	if (asyh->base.cpp) {
1596		switch (asyh->base.cpp) {
1597		case 8: bounds |= 0x00000500; break;
1598		case 4: bounds |= 0x00000300; break;
1599		case 2: bounds |= 0x00000100; break;
1600		default:
1601			WARN_ON(1);
1602			break;
1603		}
1604		bounds |= 0x00000001;
1605	}
1606
1607	if ((push = evo_wait(core, 2))) {
1608		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1609			evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
1610		else
1611			evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
1612		evo_data(push, bounds);
1613		evo_kick(push, core);
1614	}
1615}
1616
1617static void
1618nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
1619{
1620	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1621	u32 bounds = 0;
1622	u32 *push;
1623
1624	if (asyh->base.cpp) {
1625		switch (asyh->base.cpp) {
1626		case 8: bounds |= 0x00000500; break;
1627		case 4: bounds |= 0x00000300; break;
1628		case 2: bounds |= 0x00000100; break;
1629		case 1: bounds |= 0x00000000; break;
1630		default:
1631			WARN_ON(1);
1632			break;
 
 
 
 
 
 
 
1633		}
1634		bounds |= 0x00000001;
1635	}
1636
1637	if ((push = evo_wait(core, 2))) {
1638		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1639			evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
1640		else
1641			evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
1642		evo_data(push, bounds);
1643		evo_kick(push, core);
1644	}
 
1645}
1646
1647static void
1648nv50_head_curs_clr(struct nv50_head *head)
1649{
1650	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1651	u32 *push;
1652	if ((push = evo_wait(core, 4))) {
1653		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1654			evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1655			evo_data(push, 0x05000000);
1656		} else
1657		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1658			evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1659			evo_data(push, 0x05000000);
1660			evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1661			evo_data(push, 0x00000000);
1662		} else {
1663			evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
1664			evo_data(push, 0x05000000);
1665			evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1666			evo_data(push, 0x00000000);
1667		}
1668		evo_kick(push, core);
1669	}
 
1670}
1671
1672static void
1673nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1674{
1675	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1676	u32 *push;
1677	if ((push = evo_wait(core, 5))) {
1678		if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
1679			evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1680			evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1681						    (asyh->curs.format << 24));
1682			evo_data(push, asyh->curs.offset >> 8);
1683		} else
1684		if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1685			evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1686			evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1687						    (asyh->curs.format << 24));
1688			evo_data(push, asyh->curs.offset >> 8);
1689			evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1690			evo_data(push, asyh->curs.handle);
1691		} else {
1692			evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
1693			evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1694						    (asyh->curs.format << 24));
1695			evo_data(push, asyh->curs.offset >> 8);
1696			evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1697			evo_data(push, asyh->curs.handle);
1698		}
1699		evo_kick(push, core);
1700	}
1701}
1702
1703static void
1704nv50_head_core_clr(struct nv50_head *head)
1705{
1706	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1707	u32 *push;
1708	if ((push = evo_wait(core, 2))) {
1709		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1710			evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
1711		else
1712			evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
1713		evo_data(push, 0x00000000);
1714		evo_kick(push, core);
1715	}
1716}
1717
1718static void
1719nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1720{
1721	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
 
1722	u32 *push;
1723	if ((push = evo_wait(core, 9))) {
1724		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1725			evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1726			evo_data(push, asyh->core.offset >> 8);
1727			evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1728			evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1729			evo_data(push, asyh->core.layout << 20 |
1730				       (asyh->core.pitch >> 8) << 8 |
1731				       asyh->core.block);
1732			evo_data(push, asyh->core.kind << 16 |
1733				       asyh->core.format << 8);
1734			evo_data(push, asyh->core.handle);
1735			evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1736			evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1737			/* EVO will complain with INVALID_STATE if we have an
1738			 * active cursor and (re)specify HeadSetContextDmaIso
1739			 * without also updating HeadSetOffsetCursor.
1740			 */
1741			asyh->set.curs = asyh->curs.visible;
1742		} else
1743		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1744			evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1745			evo_data(push, asyh->core.offset >> 8);
1746			evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1747			evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1748			evo_data(push, asyh->core.layout << 20 |
1749				       (asyh->core.pitch >> 8) << 8 |
1750				       asyh->core.block);
1751			evo_data(push, asyh->core.format << 8);
1752			evo_data(push, asyh->core.handle);
1753			evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1754			evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1755		} else {
1756			evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
1757			evo_data(push, asyh->core.offset >> 8);
1758			evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
1759			evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1760			evo_data(push, asyh->core.layout << 24 |
1761				       (asyh->core.pitch >> 8) << 8 |
1762				       asyh->core.block);
1763			evo_data(push, asyh->core.format << 8);
1764			evo_data(push, asyh->core.handle);
1765			evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
1766			evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1767		}
1768		evo_kick(push, core);
1769	}
1770}
1771
1772static void
1773nv50_head_lut_clr(struct nv50_head *head)
1774{
1775	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1776	u32 *push;
1777	if ((push = evo_wait(core, 4))) {
1778		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1779			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1780			evo_data(push, 0x40000000);
1781		} else
1782		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1783			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
 
 
1784			evo_data(push, 0x40000000);
1785			evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1786			evo_data(push, 0x00000000);
1787		} else {
1788			evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
 
 
1789			evo_data(push, 0x03000000);
1790			evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1791			evo_data(push, 0x00000000);
1792		}
1793		evo_kick(push, core);
1794	}
1795}
1796
1797static void
1798nv50_head_lut_load(struct drm_property_blob *blob, int mode,
1799		   struct nouveau_bo *nvbo)
1800{
1801	struct drm_color_lut *in = (struct drm_color_lut *)blob->data;
1802	void __iomem *lut = (u8 *)nvbo_kmap_obj_iovirtual(nvbo);
1803	const int size = blob->length / sizeof(*in);
1804	int bits, shift, i;
1805	u16 zero, r, g, b;
1806
1807	/* This can't happen.. But it shuts the compiler up. */
1808	if (WARN_ON(size != 256))
1809		return;
1810
1811	switch (mode) {
1812	case 0: /* LORES. */
1813	case 1: /* HIRES. */
1814		bits = 11;
1815		shift = 3;
1816		zero = 0x0000;
1817		break;
1818	case 7: /* INTERPOLATE_257_UNITY_RANGE. */
1819		bits = 14;
1820		shift = 0;
1821		zero = 0x6000;
1822		break;
1823	default:
1824		WARN_ON(1);
1825		return;
1826	}
1827
1828	for (i = 0; i < size; i++) {
1829		r = (drm_color_lut_extract(in[i].  red, bits) + zero) << shift;
1830		g = (drm_color_lut_extract(in[i].green, bits) + zero) << shift;
1831		b = (drm_color_lut_extract(in[i]. blue, bits) + zero) << shift;
1832		writew(r, lut + (i * 0x08) + 0);
1833		writew(g, lut + (i * 0x08) + 2);
1834		writew(b, lut + (i * 0x08) + 4);
1835	}
1836
1837	/* INTERPOLATE modes require a "next" entry to interpolate with,
1838	 * so we replicate the last entry to deal with this for now.
1839	 */
1840	writew(r, lut + (i * 0x08) + 0);
1841	writew(g, lut + (i * 0x08) + 2);
1842	writew(b, lut + (i * 0x08) + 4);
1843}
1844
1845static void
1846nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1847{
1848	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
 
1849	u32 *push;
1850	if ((push = evo_wait(core, 7))) {
1851		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1852			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1853			evo_data(push, 0x80000000 | asyh->lut.mode << 30);
1854			evo_data(push, asyh->lut.offset >> 8);
 
 
 
 
1855		} else
1856		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1857			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1858			evo_data(push, 0x80000000 | asyh->lut.mode << 30);
1859			evo_data(push, asyh->lut.offset >> 8);
1860			evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1861			evo_data(push, asyh->lut.handle);
 
 
1862		} else {
1863			evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
1864			evo_data(push, 0x80000000 | asyh->lut.mode << 24);
1865			evo_data(push, asyh->lut.offset >> 8);
 
 
1866			evo_data(push, 0x00000000);
1867			evo_data(push, 0x00000000);
1868			evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1869			evo_data(push, asyh->lut.handle);
1870		}
1871		evo_kick(push, core);
1872	}
1873}
1874
1875static void
1876nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
1877{
1878	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1879	struct nv50_head_mode *m = &asyh->mode;
1880	u32 *push;
1881	if ((push = evo_wait(core, 14))) {
1882		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1883			evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
1884			evo_data(push, 0x00800000 | m->clock);
1885			evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
1886			evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
1887			evo_data(push, 0x00000000);
1888			evo_data(push, (m->v.active  << 16) | m->h.active );
1889			evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1890			evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1891			evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1892			evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1893			evo_data(push, asyh->mode.v.blankus);
1894			evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
1895			evo_data(push, 0x00000000);
1896		} else {
1897			evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
1898			evo_data(push, 0x00000000);
1899			evo_data(push, (m->v.active  << 16) | m->h.active );
1900			evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1901			evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1902			evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1903			evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1904			evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
1905			evo_data(push, 0x00000000); /* ??? */
1906			evo_data(push, 0xffffff00);
1907			evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
1908			evo_data(push, m->clock * 1000);
1909			evo_data(push, 0x00200000); /* ??? */
1910			evo_data(push, m->clock * 1000);
1911		}
1912		evo_kick(push, core);
1913	}
1914}
1915
1916static void
1917nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
1918{
1919	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1920	u32 *push;
1921	if ((push = evo_wait(core, 10))) {
1922		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1923			evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
1924			evo_data(push, 0x00000000);
1925			evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
1926			evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1927			evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
1928			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1929			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1930		} else {
1931			evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
1932			evo_data(push, 0x00000000);
1933			evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
1934			evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1935			evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
1936			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1937			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1938			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1939		}
1940		evo_kick(push, core);
1941	}
1942}
1943
1944static void
1945nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
1946{
1947	if (asyh->clr.ilut && (!asyh->set.ilut || y))
1948		nv50_head_lut_clr(head);
1949	if (asyh->clr.core && (!asyh->set.core || y))
1950		nv50_head_core_clr(head);
1951	if (asyh->clr.curs && (!asyh->set.curs || y))
1952		nv50_head_curs_clr(head);
1953}
1954
1955static void
1956nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
 
1957{
1958	if (asyh->set.view   ) nv50_head_view    (head, asyh);
1959	if (asyh->set.mode   ) nv50_head_mode    (head, asyh);
1960	if (asyh->set.ilut   ) {
1961		struct nouveau_bo *nvbo = head->lut.nvbo[head->lut.next];
1962		struct drm_property_blob *blob = asyh->state.gamma_lut;
1963		if (blob)
1964			nv50_head_lut_load(blob, asyh->lut.mode, nvbo);
1965		asyh->lut.offset = nvbo->bo.offset;
1966		head->lut.next ^= 1;
1967		nv50_head_lut_set(head, asyh);
1968	}
1969	if (asyh->set.core   ) nv50_head_core_set(head, asyh);
1970	if (asyh->set.curs   ) nv50_head_curs_set(head, asyh);
1971	if (asyh->set.base   ) nv50_head_base    (head, asyh);
1972	if (asyh->set.ovly   ) nv50_head_ovly    (head, asyh);
1973	if (asyh->set.dither ) nv50_head_dither  (head, asyh);
1974	if (asyh->set.procamp) nv50_head_procamp (head, asyh);
1975}
1976
1977static void
1978nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
1979			       struct nv50_head_atom *asyh,
1980			       struct nouveau_conn_atom *asyc)
1981{
1982	const int vib = asyc->procamp.color_vibrance - 100;
1983	const int hue = asyc->procamp.vibrant_hue - 90;
1984	const int adj = (vib > 0) ? 50 : 0;
1985	asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
1986	asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
1987	asyh->set.procamp = true;
1988}
1989
1990static void
1991nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
1992			      struct nv50_head_atom *asyh,
1993			      struct nouveau_conn_atom *asyc)
1994{
1995	struct drm_connector *connector = asyc->state.connector;
1996	u32 mode = 0x00;
 
1997
1998	if (asyc->dither.mode == DITHERING_MODE_AUTO) {
1999		if (asyh->base.depth > connector->display_info.bpc * 3)
2000			mode = DITHERING_MODE_DYNAMIC2X2;
2001	} else {
2002		mode = asyc->dither.mode;
2003	}
2004
2005	if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
2006		if (connector->display_info.bpc >= 8)
2007			mode |= DITHERING_DEPTH_8BPC;
2008	} else {
2009		mode |= asyc->dither.depth;
2010	}
2011
2012	asyh->dither.enable = mode;
2013	asyh->dither.bits = mode >> 1;
2014	asyh->dither.mode = mode >> 3;
2015	asyh->set.dither = true;
2016}
2017
2018static void
2019nv50_head_atomic_check_view(struct nv50_head_atom *armh,
2020			    struct nv50_head_atom *asyh,
2021			    struct nouveau_conn_atom *asyc)
2022{
2023	struct drm_connector *connector = asyc->state.connector;
2024	struct drm_display_mode *omode = &asyh->state.adjusted_mode;
2025	struct drm_display_mode *umode = &asyh->state.mode;
2026	int mode = asyc->scaler.mode;
2027	struct edid *edid;
2028	int umode_vdisplay, omode_hdisplay, omode_vdisplay;
 
 
 
 
2029
2030	if (connector->edid_blob_ptr)
2031		edid = (struct edid *)connector->edid_blob_ptr->data;
2032	else
2033		edid = NULL;
 
 
 
 
 
 
 
 
 
 
 
 
 
2034
2035	if (!asyc->scaler.full) {
2036		if (mode == DRM_MODE_SCALE_NONE)
2037			omode = umode;
2038	} else {
2039		/* Non-EDID LVDS/eDP mode. */
2040		mode = DRM_MODE_SCALE_FULLSCREEN;
2041	}
2042
2043	/* For the user-specified mode, we must ignore doublescan and
2044	 * the like, but honor frame packing.
2045	 */
2046	umode_vdisplay = umode->vdisplay;
2047	if ((umode->flags & DRM_MODE_FLAG_3D_MASK) == DRM_MODE_FLAG_3D_FRAME_PACKING)
2048		umode_vdisplay += umode->vtotal;
2049	asyh->view.iW = umode->hdisplay;
2050	asyh->view.iH = umode_vdisplay;
2051	/* For the output mode, we can just use the stock helper. */
2052	drm_mode_get_hv_timing(omode, &omode_hdisplay, &omode_vdisplay);
2053	asyh->view.oW = omode_hdisplay;
2054	asyh->view.oH = omode_vdisplay;
2055
2056	/* Add overscan compensation if necessary, will keep the aspect
2057	 * ratio the same as the backend mode unless overridden by the
2058	 * user setting both hborder and vborder properties.
2059	 */
2060	if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
2061	    (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
2062	     drm_detect_hdmi_monitor(edid)))) {
2063		u32 bX = asyc->scaler.underscan.hborder;
2064		u32 bY = asyc->scaler.underscan.vborder;
2065		u32 r = (asyh->view.oH << 19) / asyh->view.oW;
2066
2067		if (bX) {
2068			asyh->view.oW -= (bX * 2);
2069			if (bY) asyh->view.oH -= (bY * 2);
2070			else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
 
 
 
2071		} else {
2072			asyh->view.oW -= (asyh->view.oW >> 4) + 32;
2073			if (bY) asyh->view.oH -= (bY * 2);
2074			else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
 
 
 
 
 
 
 
 
 
 
 
 
 
2075		}
2076	}
2077
2078	/* Handle CENTER/ASPECT scaling, taking into account the areas
2079	 * removed already for overscan compensation.
2080	 */
2081	switch (mode) {
2082	case DRM_MODE_SCALE_CENTER:
2083		asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
2084		asyh->view.oH = min((u16)umode_vdisplay, asyh->view.oH);
2085		/* fall-through */
2086	case DRM_MODE_SCALE_ASPECT:
2087		if (asyh->view.oH < asyh->view.oW) {
2088			u32 r = (asyh->view.iW << 19) / asyh->view.iH;
2089			asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
2090		} else {
2091			u32 r = (asyh->view.iH << 19) / asyh->view.iW;
2092			asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2093		}
2094		break;
2095	default:
2096		break;
2097	}
2098
2099	asyh->set.view = true;
2100}
 
2101
2102static void
2103nv50_head_atomic_check_lut(struct nv50_head *head,
2104			   struct nv50_head_atom *armh,
2105			   struct nv50_head_atom *asyh)
2106{
2107	struct nv50_disp *disp = nv50_disp(head->base.base.dev);
2108
2109	/* An I8 surface without an input LUT makes no sense, and
2110	 * EVO will throw an error if you try.
2111	 *
2112	 * Legacy clients actually cause this due to the order in
2113	 * which they call ioctls, so we will enable the LUT with
2114	 * whatever contents the buffer already contains to avoid
2115	 * triggering the error check.
2116	 */
2117	if (!asyh->state.gamma_lut && asyh->base.cpp != 1) {
2118		asyh->lut.handle = 0;
2119		asyh->clr.ilut = armh->lut.visible;
2120		return;
2121	}
2122
2123	if (disp->disp->oclass < GF110_DISP) {
2124		asyh->lut.mode = (asyh->base.cpp == 1) ? 0 : 1;
2125		asyh->set.ilut = true;
2126	} else {
2127		asyh->lut.mode = 7;
2128		asyh->set.ilut = asyh->state.color_mgmt_changed;
2129	}
2130	asyh->lut.handle = disp->mast.base.vram.handle;
2131}
2132
2133static void
2134nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
 
2135{
2136	struct drm_display_mode *mode = &asyh->state.adjusted_mode;
2137	struct nv50_head_mode *m = &asyh->mode;
2138	u32 blankus;
2139
2140	drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V | CRTC_STEREO_DOUBLE);
2141
2142	/*
2143	 * DRM modes are defined in terms of a repeating interval
2144	 * starting with the active display area.  The hardware modes
2145	 * are defined in terms of a repeating interval starting one
2146	 * unit (pixel or line) into the sync pulse.  So, add bias.
2147	 */
2148
2149	m->h.active = mode->crtc_htotal;
2150	m->h.synce  = mode->crtc_hsync_end - mode->crtc_hsync_start - 1;
2151	m->h.blanke = mode->crtc_hblank_end - mode->crtc_hsync_start - 1;
2152	m->h.blanks = m->h.blanke + mode->crtc_hdisplay;
2153
2154	m->v.active = mode->crtc_vtotal;
2155	m->v.synce  = mode->crtc_vsync_end - mode->crtc_vsync_start - 1;
2156	m->v.blanke = mode->crtc_vblank_end - mode->crtc_vsync_start - 1;
2157	m->v.blanks = m->v.blanke + mode->crtc_vdisplay;
2158
2159	/*XXX: Safe underestimate, even "0" works */
2160	blankus = (m->v.active - mode->crtc_vdisplay - 2) * m->h.active;
2161	blankus *= 1000;
2162	blankus /= mode->crtc_clock;
2163	m->v.blankus = blankus;
2164
2165	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
2166		m->v.blank2e =  m->v.active + m->v.blanke;
2167		m->v.blank2s =  m->v.blank2e + mode->crtc_vdisplay;
2168		m->v.active  = (m->v.active * 2) + 1;
2169		m->interlace = true;
2170	} else {
2171		m->v.blank2e = 0;
2172		m->v.blank2s = 1;
2173		m->interlace = false;
2174	}
2175	m->clock = mode->crtc_clock;
2176
2177	asyh->set.mode = true;
 
 
 
2178}
2179
2180static int
2181nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
 
 
 
 
 
 
 
 
 
 
 
2182{
2183	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
2184	struct nv50_disp *disp = nv50_disp(crtc->dev);
2185	struct nv50_head *head = nv50_head(crtc);
2186	struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2187	struct nv50_head_atom *asyh = nv50_head_atom(state);
2188	struct nouveau_conn_atom *asyc = NULL;
2189	struct drm_connector_state *conns;
2190	struct drm_connector *conn;
2191	int i;
2192
2193	NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
2194	if (asyh->state.active) {
2195		for_each_new_connector_in_state(asyh->state.state, conn, conns, i) {
2196			if (conns->crtc == crtc) {
2197				asyc = nouveau_conn_atom(conns);
2198				break;
2199			}
 
 
 
 
 
 
2200		}
 
 
 
 
 
 
 
 
 
 
 
 
2201
2202		if (armh->state.active) {
2203			if (asyc) {
2204				if (asyh->state.mode_changed)
2205					asyc->set.scaler = true;
2206				if (armh->base.depth != asyh->base.depth)
2207					asyc->set.dither = true;
2208			}
2209		} else {
2210			if (asyc)
2211				asyc->set.mask = ~0;
2212			asyh->set.mask = ~0;
2213		}
2214
2215		if (asyh->state.mode_changed)
2216			nv50_head_atomic_check_mode(head, asyh);
 
2217
2218		if (asyh->state.color_mgmt_changed ||
2219		    asyh->base.cpp != armh->base.cpp)
2220			nv50_head_atomic_check_lut(head, armh, asyh);
2221		asyh->lut.visible = asyh->lut.handle != 0;
2222
2223		if (asyc) {
2224			if (asyc->set.scaler)
2225				nv50_head_atomic_check_view(armh, asyh, asyc);
2226			if (asyc->set.dither)
2227				nv50_head_atomic_check_dither(armh, asyh, asyc);
2228			if (asyc->set.procamp)
2229				nv50_head_atomic_check_procamp(armh, asyh, asyc);
2230		}
2231
2232		if ((asyh->core.visible = (asyh->base.cpp != 0))) {
2233			asyh->core.x = asyh->base.x;
2234			asyh->core.y = asyh->base.y;
2235			asyh->core.w = asyh->base.w;
2236			asyh->core.h = asyh->base.h;
2237		} else
2238		if ((asyh->core.visible = asyh->curs.visible) ||
2239		    (asyh->core.visible = asyh->lut.visible)) {
2240			/*XXX: We need to either find some way of having the
2241			 *     primary base layer appear black, while still
2242			 *     being able to display the other layers, or we
2243			 *     need to allocate a dummy black surface here.
2244			 */
2245			asyh->core.x = 0;
2246			asyh->core.y = 0;
2247			asyh->core.w = asyh->state.mode.hdisplay;
2248			asyh->core.h = asyh->state.mode.vdisplay;
2249		}
2250		asyh->core.handle = disp->mast.base.vram.handle;
2251		asyh->core.offset = 0;
2252		asyh->core.format = 0xcf;
2253		asyh->core.kind = 0;
2254		asyh->core.layout = 1;
2255		asyh->core.block = 0;
2256		asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
2257		asyh->set.base = armh->base.cpp != asyh->base.cpp;
2258		asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
2259	} else {
2260		asyh->lut.visible = false;
2261		asyh->core.visible = false;
2262		asyh->curs.visible = false;
2263		asyh->base.cpp = 0;
2264		asyh->ovly.cpp = 0;
2265	}
2266
2267	if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
2268		if (asyh->core.visible) {
2269			if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
2270				asyh->set.core = true;
2271		} else
2272		if (armh->core.visible) {
2273			asyh->clr.core = true;
2274		}
2275
2276		if (asyh->curs.visible) {
2277			if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
2278				asyh->set.curs = true;
2279		} else
2280		if (armh->curs.visible) {
2281			asyh->clr.curs = true;
2282		}
2283	} else {
2284		asyh->clr.ilut = armh->lut.visible;
2285		asyh->clr.core = armh->core.visible;
2286		asyh->clr.curs = armh->curs.visible;
2287		asyh->set.ilut = asyh->lut.visible;
2288		asyh->set.core = asyh->core.visible;
2289		asyh->set.curs = asyh->curs.visible;
2290	}
 
2291
2292	if (asyh->clr.mask || asyh->set.mask)
2293		nv50_atom(asyh->state.state)->lock_core = true;
2294	return 0;
2295}
2296
2297static const struct drm_crtc_helper_funcs
2298nv50_head_help = {
2299	.atomic_check = nv50_head_atomic_check,
2300};
 
 
 
 
2301
2302static void
2303nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
2304			       struct drm_crtc_state *state)
2305{
2306	struct nv50_head_atom *asyh = nv50_head_atom(state);
2307	__drm_atomic_helper_crtc_destroy_state(&asyh->state);
2308	kfree(asyh);
2309}
2310
2311static struct drm_crtc_state *
2312nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
2313{
2314	struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2315	struct nv50_head_atom *asyh;
2316	if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
2317		return NULL;
2318	__drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
2319	asyh->view = armh->view;
2320	asyh->mode = armh->mode;
2321	asyh->lut  = armh->lut;
2322	asyh->core = armh->core;
2323	asyh->curs = armh->curs;
2324	asyh->base = armh->base;
2325	asyh->ovly = armh->ovly;
2326	asyh->dither = armh->dither;
2327	asyh->procamp = armh->procamp;
2328	asyh->clr.mask = 0;
2329	asyh->set.mask = 0;
2330	return &asyh->state;
2331}
2332
2333static void
2334__drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
2335			       struct drm_crtc_state *state)
2336{
2337	if (crtc->state)
2338		crtc->funcs->atomic_destroy_state(crtc, crtc->state);
2339	crtc->state = state;
2340	crtc->state->crtc = crtc;
 
 
 
 
 
 
 
2341}
2342
2343static void
2344nv50_head_reset(struct drm_crtc *crtc)
2345{
2346	struct nv50_head_atom *asyh;
2347
2348	if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
2349		return;
2350
2351	__drm_atomic_helper_crtc_reset(crtc, &asyh->state);
2352}
2353
2354static void
2355nv50_head_destroy(struct drm_crtc *crtc)
2356{
 
2357	struct nv50_disp *disp = nv50_disp(crtc->dev);
2358	struct nv50_head *head = nv50_head(crtc);
2359	int i;
 
 
 
 
2360
2361	nv50_dmac_destroy(&head->ovly.base, disp->disp);
2362	nv50_pioc_destroy(&head->oimm.base);
 
 
2363
2364	for (i = 0; i < ARRAY_SIZE(head->lut.nvbo); i++)
2365		nouveau_bo_unmap_unpin_unref(&head->lut.nvbo[i]);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2366
2367	drm_crtc_cleanup(crtc);
2368	kfree(crtc);
2369}
2370
2371static const struct drm_crtc_funcs
2372nv50_head_func = {
2373	.reset = nv50_head_reset,
2374	.gamma_set = drm_atomic_helper_legacy_gamma_set,
2375	.destroy = nv50_head_destroy,
2376	.set_config = drm_atomic_helper_set_config,
2377	.page_flip = drm_atomic_helper_page_flip,
2378	.atomic_duplicate_state = nv50_head_atomic_duplicate_state,
2379	.atomic_destroy_state = nv50_head_atomic_destroy_state,
 
 
 
 
 
 
 
 
 
 
2380};
2381
2382static int
2383nv50_head_create(struct drm_device *dev, int index)
2384{
2385	struct nouveau_drm *drm = nouveau_drm(dev);
2386	struct nvif_device *device = &drm->client.device;
2387	struct nv50_disp *disp = nv50_disp(dev);
2388	struct nv50_head *head;
2389	struct nv50_base *base;
2390	struct nv50_curs *curs;
2391	struct drm_crtc *crtc;
2392	int ret, i;
2393
2394	head = kzalloc(sizeof(*head), GFP_KERNEL);
2395	if (!head)
2396		return -ENOMEM;
2397
2398	head->base.index = index;
2399	ret = nv50_base_new(drm, head, &base);
2400	if (ret == 0)
2401		ret = nv50_curs_new(drm, head, &curs);
2402	if (ret) {
2403		kfree(head);
2404		return ret;
 
 
 
 
2405	}
2406
2407	crtc = &head->base.base;
2408	drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
2409				  &curs->wndw.plane, &nv50_head_func,
2410				  "head-%d", head->base.index);
2411	drm_crtc_helper_add(crtc, &nv50_head_help);
2412	drm_mode_crtc_set_gamma_size(crtc, 256);
2413
2414	for (i = 0; i < ARRAY_SIZE(head->lut.nvbo); i++) {
2415		ret = nouveau_bo_new_pin_map(&drm->client, 1025 * 8, 0x100,
2416					     TTM_PL_FLAG_VRAM,
2417					     &head->lut.nvbo[i]);
 
 
 
 
 
2418		if (ret)
2419			goto out;
2420	}
2421
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2422	/* allocate overlay resources */
2423	ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
2424	if (ret)
2425		goto out;
2426
2427	ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
2428			       &head->ovly);
2429	if (ret)
2430		goto out;
2431
2432out:
2433	if (ret)
2434		nv50_head_destroy(crtc);
2435	return ret;
2436}
2437
2438/******************************************************************************
2439 * Output path helpers
2440 *****************************************************************************/
2441static void
2442nv50_outp_release(struct nouveau_encoder *nv_encoder)
 
 
2443{
2444	struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
2445	struct {
2446		struct nv50_disp_mthd_v1 base;
2447	} args = {
2448		.base.version = 1,
2449		.base.method = NV50_DISP_MTHD_V1_RELEASE,
2450		.base.hasht  = nv_encoder->dcb->hasht,
2451		.base.hashm  = nv_encoder->dcb->hashm,
2452	};
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2453
2454	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2455	nv_encoder->or = -1;
2456	nv_encoder->link = 0;
2457}
2458
2459static int
2460nv50_outp_acquire(struct nouveau_encoder *nv_encoder)
 
 
 
2461{
2462	struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
2463	struct nv50_disp *disp = nv50_disp(drm->dev);
2464	struct {
2465		struct nv50_disp_mthd_v1 base;
2466		struct nv50_disp_acquire_v0 info;
2467	} args = {
2468		.base.version = 1,
2469		.base.method = NV50_DISP_MTHD_V1_ACQUIRE,
2470		.base.hasht  = nv_encoder->dcb->hasht,
2471		.base.hashm  = nv_encoder->dcb->hashm,
 
 
 
 
 
 
2472	};
2473	int ret;
2474
2475	ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2476	if (ret) {
2477		NV_ERROR(drm, "error acquiring output path: %d\n", ret);
2478		return ret;
2479	}
2480
2481	nv_encoder->or = args.info.or;
2482	nv_encoder->link = args.info.link;
2483	return 0;
2484}
2485
2486static int
2487nv50_outp_atomic_check_view(struct drm_encoder *encoder,
2488			    struct drm_crtc_state *crtc_state,
2489			    struct drm_connector_state *conn_state,
2490			    struct drm_display_mode *native_mode)
2491{
2492	struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
2493	struct drm_display_mode *mode = &crtc_state->mode;
2494	struct drm_connector *connector = conn_state->connector;
2495	struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
2496	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
2497
2498	NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
2499	asyc->scaler.full = false;
2500	if (!native_mode)
2501		return 0;
2502
2503	if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
2504		switch (connector->connector_type) {
2505		case DRM_MODE_CONNECTOR_LVDS:
2506		case DRM_MODE_CONNECTOR_eDP:
2507			/* Force use of scaler for non-EDID modes. */
2508			if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
2509				break;
2510			mode = native_mode;
2511			asyc->scaler.full = true;
2512			break;
2513		default:
2514			break;
2515		}
2516	} else {
2517		mode = native_mode;
2518	}
2519
2520	if (!drm_mode_equal(adjusted_mode, mode)) {
2521		drm_mode_copy(adjusted_mode, mode);
2522		crtc_state->mode_changed = true;
2523	}
2524
2525	return 0;
2526}
2527
2528static int
2529nv50_outp_atomic_check(struct drm_encoder *encoder,
2530		       struct drm_crtc_state *crtc_state,
2531		       struct drm_connector_state *conn_state)
2532{
2533	struct nouveau_connector *nv_connector =
2534		nouveau_connector(conn_state->connector);
2535	return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2536					   nv_connector->native_mode);
2537}
2538
2539/******************************************************************************
2540 * DAC
2541 *****************************************************************************/
2542static void
2543nv50_dac_disable(struct drm_encoder *encoder)
2544{
2545	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2546	struct nv50_mast *mast = nv50_mast(encoder->dev);
2547	const int or = nv_encoder->or;
2548	u32 *push;
2549
2550	if (nv_encoder->crtc) {
2551		push = evo_wait(mast, 4);
2552		if (push) {
2553			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2554				evo_mthd(push, 0x0400 + (or * 0x080), 1);
2555				evo_data(push, 0x00000000);
2556			} else {
2557				evo_mthd(push, 0x0180 + (or * 0x020), 1);
2558				evo_data(push, 0x00000000);
2559			}
2560			evo_kick(push, mast);
2561		}
2562	}
2563
2564	nv_encoder->crtc = NULL;
2565	nv50_outp_release(nv_encoder);
2566}
2567
2568static void
2569nv50_dac_enable(struct drm_encoder *encoder)
 
2570{
2571	struct nv50_mast *mast = nv50_mast(encoder->dev);
2572	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2573	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2574	struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
2575	u32 *push;
2576
2577	nv50_outp_acquire(nv_encoder);
2578
2579	push = evo_wait(mast, 8);
2580	if (push) {
2581		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2582			u32 syncs = 0x00000000;
2583
2584			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2585				syncs |= 0x00000001;
2586			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2587				syncs |= 0x00000002;
2588
2589			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
2590			evo_data(push, 1 << nv_crtc->index);
2591			evo_data(push, syncs);
2592		} else {
2593			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2594			u32 syncs = 0x00000001;
2595
2596			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2597				syncs |= 0x00000008;
2598			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2599				syncs |= 0x00000010;
2600
2601			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2602				magic |= 0x00000001;
2603
2604			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2605			evo_data(push, syncs);
2606			evo_data(push, magic);
2607			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
2608			evo_data(push, 1 << nv_crtc->index);
2609		}
2610
2611		evo_kick(push, mast);
2612	}
2613
2614	nv_encoder->crtc = encoder->crtc;
2615}
2616
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2617static enum drm_connector_status
2618nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
2619{
2620	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2621	struct nv50_disp *disp = nv50_disp(encoder->dev);
2622	struct {
2623		struct nv50_disp_mthd_v1 base;
2624		struct nv50_disp_dac_load_v0 load;
2625	} args = {
2626		.base.version = 1,
2627		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
2628		.base.hasht  = nv_encoder->dcb->hasht,
2629		.base.hashm  = nv_encoder->dcb->hashm,
2630	};
2631	int ret;
2632
2633	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
2634	if (args.load.data == 0)
2635		args.load.data = 340;
2636
2637	ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2638	if (ret || !args.load.load)
2639		return connector_status_disconnected;
2640
2641	return connector_status_connected;
2642}
2643
2644static const struct drm_encoder_helper_funcs
2645nv50_dac_help = {
2646	.atomic_check = nv50_outp_atomic_check,
2647	.enable = nv50_dac_enable,
2648	.disable = nv50_dac_disable,
2649	.detect = nv50_dac_detect
2650};
2651
2652static void
2653nv50_dac_destroy(struct drm_encoder *encoder)
2654{
2655	drm_encoder_cleanup(encoder);
2656	kfree(encoder);
2657}
2658
2659static const struct drm_encoder_funcs
2660nv50_dac_func = {
 
 
 
 
 
 
 
 
 
 
2661	.destroy = nv50_dac_destroy,
2662};
2663
2664static int
2665nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
2666{
2667	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2668	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
2669	struct nvkm_i2c_bus *bus;
2670	struct nouveau_encoder *nv_encoder;
2671	struct drm_encoder *encoder;
2672	int type = DRM_MODE_ENCODER_DAC;
2673
2674	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2675	if (!nv_encoder)
2676		return -ENOMEM;
2677	nv_encoder->dcb = dcbe;
 
2678
2679	bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2680	if (bus)
2681		nv_encoder->i2c = &bus->i2c;
2682
2683	encoder = to_drm_encoder(nv_encoder);
2684	encoder->possible_crtcs = dcbe->heads;
2685	encoder->possible_clones = 0;
2686	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
2687			 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
2688	drm_encoder_helper_add(encoder, &nv50_dac_help);
2689
2690	drm_mode_connector_attach_encoder(connector, encoder);
2691	return 0;
2692}
2693
2694/******************************************************************************
2695 * Audio
2696 *****************************************************************************/
2697static void
2698nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2699{
2700	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2701	struct nv50_disp *disp = nv50_disp(encoder->dev);
2702	struct {
2703		struct nv50_disp_mthd_v1 base;
2704		struct nv50_disp_sor_hda_eld_v0 eld;
2705	} args = {
2706		.base.version = 1,
2707		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2708		.base.hasht   = nv_encoder->dcb->hasht,
2709		.base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2710				(0x0100 << nv_crtc->index),
2711	};
2712
2713	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2714}
2715
2716static void
2717nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2718{
2719	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2720	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2721	struct nouveau_connector *nv_connector;
2722	struct nv50_disp *disp = nv50_disp(encoder->dev);
2723	struct __packed {
2724		struct {
2725			struct nv50_disp_mthd_v1 mthd;
2726			struct nv50_disp_sor_hda_eld_v0 eld;
2727		} base;
2728		u8 data[sizeof(nv_connector->base.eld)];
2729	} args = {
2730		.base.mthd.version = 1,
2731		.base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2732		.base.mthd.hasht   = nv_encoder->dcb->hasht,
2733		.base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2734				     (0x0100 << nv_crtc->index),
2735	};
2736
2737	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2738	if (!drm_detect_monitor_audio(nv_connector->edid))
2739		return;
2740
 
2741	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
2742
2743	nvif_mthd(disp->disp, 0, &args,
2744		  sizeof(args.base) + drm_eld_size(args.data));
2745}
2746
2747/******************************************************************************
2748 * HDMI
2749 *****************************************************************************/
2750static void
2751nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2752{
2753	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2754	struct nv50_disp *disp = nv50_disp(encoder->dev);
2755	struct {
2756		struct nv50_disp_mthd_v1 base;
2757		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2758	} args = {
2759		.base.version = 1,
2760		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2761		.base.hasht  = nv_encoder->dcb->hasht,
2762		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2763			       (0x0100 << nv_crtc->index),
2764	};
2765
2766	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2767}
2768
 
 
 
2769static void
2770nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2771{
2772	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2773	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2774	struct nv50_disp *disp = nv50_disp(encoder->dev);
2775	struct {
2776		struct nv50_disp_mthd_v1 base;
2777		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2778		u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */
2779	} args = {
2780		.base.version = 1,
2781		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2782		.base.hasht  = nv_encoder->dcb->hasht,
2783		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2784			       (0x0100 << nv_crtc->index),
2785		.pwr.state = 1,
2786		.pwr.rekey = 56, /* binary driver, and tegra, constant */
2787	};
2788	struct nouveau_connector *nv_connector;
2789	u32 max_ac_packet;
2790	union hdmi_infoframe avi_frame;
2791	union hdmi_infoframe vendor_frame;
2792	int ret;
2793	int size;
2794
2795	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2796	if (!drm_detect_hdmi_monitor(nv_connector->edid))
2797		return;
2798
2799	ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi, mode,
2800						       false);
2801	if (!ret) {
2802		/* We have an AVI InfoFrame, populate it to the display */
2803		args.pwr.avi_infoframe_length
2804			= hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
2805	}
2806
2807	ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi,
2808							  &nv_connector->base, mode);
2809	if (!ret) {
2810		/* We have a Vendor InfoFrame, populate it to the display */
2811		args.pwr.vendor_infoframe_length
2812			= hdmi_infoframe_pack(&vendor_frame,
2813					      args.infoframes
2814					      + args.pwr.avi_infoframe_length,
2815					      17);
2816	}
2817
2818	max_ac_packet  = mode->htotal - mode->hdisplay;
2819	max_ac_packet -= args.pwr.rekey;
2820	max_ac_packet -= 18; /* constant from tegra */
2821	args.pwr.max_ac_packet = max_ac_packet / 32;
2822
2823	size = sizeof(args.base)
2824		+ sizeof(args.pwr)
2825		+ args.pwr.avi_infoframe_length
2826		+ args.pwr.vendor_infoframe_length;
2827	nvif_mthd(disp->disp, 0, &args, size);
2828	nv50_audio_enable(encoder, mode);
2829}
2830
2831/******************************************************************************
2832 * MST
2833 *****************************************************************************/
2834#define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2835#define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2836#define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2837
2838struct nv50_mstm {
2839	struct nouveau_encoder *outp;
2840
2841	struct drm_dp_mst_topology_mgr mgr;
2842	struct nv50_msto *msto[4];
2843
2844	bool modified;
2845	bool disabled;
2846	int links;
2847};
2848
2849struct nv50_mstc {
2850	struct nv50_mstm *mstm;
2851	struct drm_dp_mst_port *port;
2852	struct drm_connector connector;
2853
2854	struct drm_display_mode *native;
2855	struct edid *edid;
2856
2857	int pbn;
2858};
2859
2860struct nv50_msto {
2861	struct drm_encoder encoder;
2862
2863	struct nv50_head *head;
2864	struct nv50_mstc *mstc;
2865	bool disabled;
2866};
2867
2868static struct drm_dp_payload *
2869nv50_msto_payload(struct nv50_msto *msto)
2870{
2871	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2872	struct nv50_mstc *mstc = msto->mstc;
2873	struct nv50_mstm *mstm = mstc->mstm;
2874	int vcpi = mstc->port->vcpi.vcpi, i;
2875
2876	NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
2877	for (i = 0; i < mstm->mgr.max_payloads; i++) {
2878		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2879		NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2880			  mstm->outp->base.base.name, i, payload->vcpi,
2881			  payload->start_slot, payload->num_slots);
2882	}
2883
2884	for (i = 0; i < mstm->mgr.max_payloads; i++) {
2885		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2886		if (payload->vcpi == vcpi)
2887			return payload;
2888	}
2889
2890	return NULL;
2891}
2892
2893static void
2894nv50_msto_cleanup(struct nv50_msto *msto)
2895{
2896	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2897	struct nv50_mstc *mstc = msto->mstc;
2898	struct nv50_mstm *mstm = mstc->mstm;
2899
2900	NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
2901	if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
2902		drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
2903	if (msto->disabled) {
2904		msto->mstc = NULL;
2905		msto->head = NULL;
2906		msto->disabled = false;
2907	}
2908}
2909
2910static void
2911nv50_msto_prepare(struct nv50_msto *msto)
2912{
2913	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2914	struct nv50_mstc *mstc = msto->mstc;
2915	struct nv50_mstm *mstm = mstc->mstm;
2916	struct {
2917		struct nv50_disp_mthd_v1 base;
2918		struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
2919	} args = {
2920		.base.version = 1,
2921		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
2922		.base.hasht  = mstm->outp->dcb->hasht,
2923		.base.hashm  = (0xf0ff & mstm->outp->dcb->hashm) |
2924			       (0x0100 << msto->head->base.index),
2925	};
2926
2927	NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
2928	if (mstc->port && mstc->port->vcpi.vcpi > 0) {
2929		struct drm_dp_payload *payload = nv50_msto_payload(msto);
2930		if (payload) {
2931			args.vcpi.start_slot = payload->start_slot;
2932			args.vcpi.num_slots = payload->num_slots;
2933			args.vcpi.pbn = mstc->port->vcpi.pbn;
2934			args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
2935		}
2936	}
2937
2938	NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
2939		  msto->encoder.name, msto->head->base.base.name,
2940		  args.vcpi.start_slot, args.vcpi.num_slots,
2941		  args.vcpi.pbn, args.vcpi.aligned_pbn);
2942	nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
2943}
2944
2945static int
2946nv50_msto_atomic_check(struct drm_encoder *encoder,
2947		       struct drm_crtc_state *crtc_state,
2948		       struct drm_connector_state *conn_state)
2949{
2950	struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
2951	struct nv50_mstm *mstm = mstc->mstm;
2952	int bpp = conn_state->connector->display_info.bpc * 3;
2953	int slots;
2954
2955	mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
2956
2957	slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2958	if (slots < 0)
2959		return slots;
2960
2961	return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2962					   mstc->native);
2963}
2964
 
 
 
2965static void
2966nv50_msto_enable(struct drm_encoder *encoder)
2967{
2968	struct nv50_head *head = nv50_head(encoder->crtc);
2969	struct nv50_msto *msto = nv50_msto(encoder);
2970	struct nv50_mstc *mstc = NULL;
2971	struct nv50_mstm *mstm = NULL;
2972	struct drm_connector *connector;
2973	struct drm_connector_list_iter conn_iter;
2974	u8 proto, depth;
2975	int slots;
2976	bool r;
2977
2978	drm_connector_list_iter_begin(encoder->dev, &conn_iter);
2979	drm_for_each_connector_iter(connector, &conn_iter) {
2980		if (connector->state->best_encoder == &msto->encoder) {
2981			mstc = nv50_mstc(connector);
2982			mstm = mstc->mstm;
2983			break;
2984		}
2985	}
2986	drm_connector_list_iter_end(&conn_iter);
2987
2988	if (WARN_ON(!mstc))
2989		return;
2990
2991	slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2992	r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, slots);
2993	WARN_ON(!r);
2994
2995	if (!mstm->links++)
2996		nv50_outp_acquire(mstm->outp);
2997
2998	if (mstm->outp->link & 1)
2999		proto = 0x8;
3000	else
3001		proto = 0x9;
3002
3003	switch (mstc->connector.display_info.bpc) {
3004	case  6: depth = 0x2; break;
3005	case  8: depth = 0x5; break;
3006	case 10:
3007	default: depth = 0x6; break;
3008	}
3009
3010	mstm->outp->update(mstm->outp, head->base.index,
3011			   &head->base.base.state->adjusted_mode, proto, depth);
3012
3013	msto->head = head;
3014	msto->mstc = mstc;
3015	mstm->modified = true;
3016}
3017
3018static void
3019nv50_msto_disable(struct drm_encoder *encoder)
3020{
3021	struct nv50_msto *msto = nv50_msto(encoder);
3022	struct nv50_mstc *mstc = msto->mstc;
3023	struct nv50_mstm *mstm = mstc->mstm;
3024
3025	if (mstc->port)
3026		drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
3027
3028	mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
3029	mstm->modified = true;
3030	if (!--mstm->links)
3031		mstm->disabled = true;
3032	msto->disabled = true;
3033}
3034
3035static const struct drm_encoder_helper_funcs
3036nv50_msto_help = {
3037	.disable = nv50_msto_disable,
3038	.enable = nv50_msto_enable,
3039	.atomic_check = nv50_msto_atomic_check,
3040};
3041
3042static void
3043nv50_msto_destroy(struct drm_encoder *encoder)
3044{
3045	struct nv50_msto *msto = nv50_msto(encoder);
3046	drm_encoder_cleanup(&msto->encoder);
3047	kfree(msto);
3048}
3049
3050static const struct drm_encoder_funcs
3051nv50_msto = {
3052	.destroy = nv50_msto_destroy,
3053};
3054
3055static int
3056nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
3057	      struct nv50_msto **pmsto)
3058{
3059	struct nv50_msto *msto;
3060	int ret;
3061
3062	if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
3063		return -ENOMEM;
3064
3065	ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
3066			       DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
3067	if (ret) {
3068		kfree(*pmsto);
3069		*pmsto = NULL;
3070		return ret;
3071	}
3072
3073	drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
3074	msto->encoder.possible_crtcs = heads;
3075	return 0;
3076}
3077
3078static struct drm_encoder *
3079nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
3080			      struct drm_connector_state *connector_state)
3081{
3082	struct nv50_head *head = nv50_head(connector_state->crtc);
3083	struct nv50_mstc *mstc = nv50_mstc(connector);
3084	if (mstc->port) {
3085		struct nv50_mstm *mstm = mstc->mstm;
3086		return &mstm->msto[head->base.index]->encoder;
3087	}
3088	return NULL;
3089}
3090
3091static struct drm_encoder *
3092nv50_mstc_best_encoder(struct drm_connector *connector)
3093{
3094	struct nv50_mstc *mstc = nv50_mstc(connector);
3095	if (mstc->port) {
3096		struct nv50_mstm *mstm = mstc->mstm;
3097		return &mstm->msto[0]->encoder;
3098	}
3099	return NULL;
3100}
3101
3102static enum drm_mode_status
3103nv50_mstc_mode_valid(struct drm_connector *connector,
3104		     struct drm_display_mode *mode)
3105{
3106	return MODE_OK;
3107}
3108
3109static int
3110nv50_mstc_get_modes(struct drm_connector *connector)
3111{
3112	struct nv50_mstc *mstc = nv50_mstc(connector);
3113	int ret = 0;
3114
3115	mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
3116	drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
3117	if (mstc->edid)
3118		ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
3119
3120	if (!mstc->connector.display_info.bpc)
3121		mstc->connector.display_info.bpc = 8;
3122
3123	if (mstc->native)
3124		drm_mode_destroy(mstc->connector.dev, mstc->native);
3125	mstc->native = nouveau_conn_native_mode(&mstc->connector);
3126	return ret;
3127}
3128
3129static const struct drm_connector_helper_funcs
3130nv50_mstc_help = {
3131	.get_modes = nv50_mstc_get_modes,
3132	.mode_valid = nv50_mstc_mode_valid,
3133	.best_encoder = nv50_mstc_best_encoder,
3134	.atomic_best_encoder = nv50_mstc_atomic_best_encoder,
3135};
3136
3137static enum drm_connector_status
3138nv50_mstc_detect(struct drm_connector *connector, bool force)
3139{
3140	struct nv50_mstc *mstc = nv50_mstc(connector);
3141	if (!mstc->port)
3142		return connector_status_disconnected;
3143	return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
3144}
3145
3146static void
3147nv50_mstc_destroy(struct drm_connector *connector)
3148{
3149	struct nv50_mstc *mstc = nv50_mstc(connector);
3150	drm_connector_cleanup(&mstc->connector);
3151	kfree(mstc);
3152}
3153
3154static const struct drm_connector_funcs
3155nv50_mstc = {
3156	.reset = nouveau_conn_reset,
3157	.detect = nv50_mstc_detect,
3158	.fill_modes = drm_helper_probe_single_connector_modes,
3159	.destroy = nv50_mstc_destroy,
3160	.atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
3161	.atomic_destroy_state = nouveau_conn_atomic_destroy_state,
3162	.atomic_set_property = nouveau_conn_atomic_set_property,
3163	.atomic_get_property = nouveau_conn_atomic_get_property,
3164};
3165
3166static int
3167nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
3168	      const char *path, struct nv50_mstc **pmstc)
3169{
3170	struct drm_device *dev = mstm->outp->base.base.dev;
3171	struct nv50_mstc *mstc;
3172	int ret, i;
3173
3174	if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
3175		return -ENOMEM;
3176	mstc->mstm = mstm;
3177	mstc->port = port;
3178
3179	ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
3180				 DRM_MODE_CONNECTOR_DisplayPort);
3181	if (ret) {
3182		kfree(*pmstc);
3183		*pmstc = NULL;
3184		return ret;
3185	}
3186
3187	drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
3188
3189	mstc->connector.funcs->reset(&mstc->connector);
3190	nouveau_conn_attach_properties(&mstc->connector);
3191
3192	for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto[i]; i++)
3193		drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
3194
3195	drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
3196	drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
3197	drm_mode_connector_set_path_property(&mstc->connector, path);
3198	return 0;
3199}
3200
3201static void
3202nv50_mstm_cleanup(struct nv50_mstm *mstm)
3203{
3204	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3205	struct drm_encoder *encoder;
3206	int ret;
3207
3208	NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
3209	ret = drm_dp_check_act_status(&mstm->mgr);
3210
3211	ret = drm_dp_update_payload_part2(&mstm->mgr);
3212
3213	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3214		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3215			struct nv50_msto *msto = nv50_msto(encoder);
3216			struct nv50_mstc *mstc = msto->mstc;
3217			if (mstc && mstc->mstm == mstm)
3218				nv50_msto_cleanup(msto);
3219		}
3220	}
3221
3222	mstm->modified = false;
3223}
3224
3225static void
3226nv50_mstm_prepare(struct nv50_mstm *mstm)
3227{
3228	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3229	struct drm_encoder *encoder;
3230	int ret;
3231
3232	NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
3233	ret = drm_dp_update_payload_part1(&mstm->mgr);
3234
3235	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3236		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3237			struct nv50_msto *msto = nv50_msto(encoder);
3238			struct nv50_mstc *mstc = msto->mstc;
3239			if (mstc && mstc->mstm == mstm)
3240				nv50_msto_prepare(msto);
3241		}
3242	}
3243
3244	if (mstm->disabled) {
3245		if (!mstm->links)
3246			nv50_outp_release(mstm->outp);
3247		mstm->disabled = false;
3248	}
3249}
3250
3251static void
3252nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
3253{
3254	struct nv50_mstm *mstm = nv50_mstm(mgr);
3255	drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
3256}
3257
3258static void
3259nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
3260			    struct drm_connector *connector)
3261{
3262	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3263	struct nv50_mstc *mstc = nv50_mstc(connector);
3264
3265	drm_connector_unregister(&mstc->connector);
3266
3267	drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
3268
3269	drm_modeset_lock(&drm->dev->mode_config.connection_mutex, NULL);
3270	mstc->port = NULL;
3271	drm_modeset_unlock(&drm->dev->mode_config.connection_mutex);
3272
3273	drm_connector_unreference(&mstc->connector);
3274}
3275
3276static void
3277nv50_mstm_register_connector(struct drm_connector *connector)
3278{
3279	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3280
3281	drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
3282
3283	drm_connector_register(connector);
3284}
3285
3286static struct drm_connector *
3287nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
3288			struct drm_dp_mst_port *port, const char *path)
3289{
3290	struct nv50_mstm *mstm = nv50_mstm(mgr);
3291	struct nv50_mstc *mstc;
3292	int ret;
3293
3294	ret = nv50_mstc_new(mstm, port, path, &mstc);
3295	if (ret) {
3296		if (mstc)
3297			mstc->connector.funcs->destroy(&mstc->connector);
3298		return NULL;
3299	}
3300
3301	return &mstc->connector;
3302}
3303
3304static const struct drm_dp_mst_topology_cbs
3305nv50_mstm = {
3306	.add_connector = nv50_mstm_add_connector,
3307	.register_connector = nv50_mstm_register_connector,
3308	.destroy_connector = nv50_mstm_destroy_connector,
3309	.hotplug = nv50_mstm_hotplug,
3310};
3311
3312void
3313nv50_mstm_service(struct nv50_mstm *mstm)
3314{
3315	struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL;
3316	bool handled = true;
3317	int ret;
3318	u8 esi[8] = {};
3319
3320	if (!aux)
3321		return;
3322
3323	while (handled) {
3324		ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
3325		if (ret != 8) {
3326			drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3327			return;
3328		}
3329
3330		drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
3331		if (!handled)
3332			break;
3333
3334		drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
3335	}
3336}
3337
3338void
3339nv50_mstm_remove(struct nv50_mstm *mstm)
3340{
3341	if (mstm)
3342		drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3343}
3344
3345static int
3346nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
3347{
3348	struct nouveau_encoder *outp = mstm->outp;
3349	struct {
3350		struct nv50_disp_mthd_v1 base;
3351		struct nv50_disp_sor_dp_mst_link_v0 mst;
3352	} args = {
3353		.base.version = 1,
3354		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
3355		.base.hasht = outp->dcb->hasht,
3356		.base.hashm = outp->dcb->hashm,
3357		.mst.state = state,
3358	};
3359	struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
3360	struct nvif_object *disp = &drm->display->disp;
3361	int ret;
 
 
 
 
 
 
 
 
 
3362
3363	if (dpcd >= 0x12) {
3364		ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
3365		if (ret < 0)
3366			return ret;
3367
3368		dpcd &= ~DP_MST_EN;
3369		if (state)
3370			dpcd |= DP_MST_EN;
3371
3372		ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
3373		if (ret < 0)
3374			return ret;
3375	}
3376
3377	return nvif_mthd(disp, 0, &args, sizeof(args));
3378}
3379
3380int
3381nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
3382{
3383	int ret, state = 0;
3384
3385	if (!mstm)
3386		return 0;
3387
3388	if (dpcd[0] >= 0x12) {
3389		ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
3390		if (ret < 0)
3391			return ret;
3392
3393		if (!(dpcd[1] & DP_MST_CAP))
3394			dpcd[0] = 0x11;
3395		else
3396			state = allow;
3397	}
3398
3399	ret = nv50_mstm_enable(mstm, dpcd[0], state);
3400	if (ret)
3401		return ret;
3402
3403	ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
3404	if (ret)
3405		return nv50_mstm_enable(mstm, dpcd[0], 0);
3406
3407	return mstm->mgr.mst_state;
3408}
3409
3410static void
3411nv50_mstm_fini(struct nv50_mstm *mstm)
3412{
3413	if (mstm && mstm->mgr.mst_state)
3414		drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
3415}
3416
3417static void
3418nv50_mstm_init(struct nv50_mstm *mstm)
3419{
3420	if (mstm && mstm->mgr.mst_state)
3421		drm_dp_mst_topology_mgr_resume(&mstm->mgr);
3422}
3423
3424static void
3425nv50_mstm_del(struct nv50_mstm **pmstm)
3426{
3427	struct nv50_mstm *mstm = *pmstm;
3428	if (mstm) {
3429		kfree(*pmstm);
3430		*pmstm = NULL;
3431	}
3432}
3433
3434static int
3435nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
3436	      int conn_base_id, struct nv50_mstm **pmstm)
3437{
3438	const int max_payloads = hweight8(outp->dcb->heads);
3439	struct drm_device *dev = outp->base.base.dev;
3440	struct nv50_mstm *mstm;
3441	int ret, i;
3442	u8 dpcd;
3443
3444	/* This is a workaround for some monitors not functioning
3445	 * correctly in MST mode on initial module load.  I think
3446	 * some bad interaction with the VBIOS may be responsible.
3447	 *
3448	 * A good ol' off and on again seems to work here ;)
3449	 */
3450	ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
3451	if (ret >= 0 && dpcd >= 0x12)
3452		drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
3453
3454	if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
3455		return -ENOMEM;
3456	mstm->outp = outp;
3457	mstm->mgr.cbs = &nv50_mstm;
3458
3459	ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
3460					   max_payloads, conn_base_id);
3461	if (ret)
3462		return ret;
3463
3464	for (i = 0; i < max_payloads; i++) {
3465		ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
3466				    i, &mstm->msto[i]);
3467		if (ret)
3468			return ret;
3469	}
3470
3471	return 0;
3472}
3473
3474/******************************************************************************
3475 * SOR
3476 *****************************************************************************/
3477static void
3478nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
3479		struct drm_display_mode *mode, u8 proto, u8 depth)
3480{
3481	struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
3482	u32 *push;
3483
3484	if (!mode) {
3485		nv_encoder->ctrl &= ~BIT(head);
3486		if (!(nv_encoder->ctrl & 0x0000000f))
3487			nv_encoder->ctrl = 0;
3488	} else {
3489		nv_encoder->ctrl |= proto << 8;
3490		nv_encoder->ctrl |= BIT(head);
3491	}
3492
3493	if ((push = evo_wait(core, 6))) {
3494		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
3495			if (mode) {
3496				if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3497					nv_encoder->ctrl |= 0x00001000;
3498				if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3499					nv_encoder->ctrl |= 0x00002000;
3500				nv_encoder->ctrl |= depth << 16;
3501			}
3502			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
 
3503		} else {
3504			if (mode) {
3505				u32 magic = 0x31ec6000 | (head << 25);
3506				u32 syncs = 0x00000001;
3507				if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3508					syncs |= 0x00000008;
3509				if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3510					syncs |= 0x00000010;
3511				if (mode->flags & DRM_MODE_FLAG_INTERLACE)
3512					magic |= 0x00000001;
3513
3514				evo_mthd(push, 0x0404 + (head * 0x300), 2);
3515				evo_data(push, syncs | (depth << 6));
3516				evo_data(push, magic);
3517			}
3518			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
 
3519		}
3520		evo_data(push, nv_encoder->ctrl);
3521		evo_kick(push, core);
3522	}
3523}
3524
3525static void
3526nv50_sor_disable(struct drm_encoder *encoder)
3527{
3528	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3529	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
3530
 
3531	nv_encoder->crtc = NULL;
3532
3533	if (nv_crtc) {
3534		struct nvkm_i2c_aux *aux = nv_encoder->aux;
3535		u8 pwr;
 
 
 
 
3536
3537		if (aux) {
3538			int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
3539			if (ret == 0) {
3540				pwr &= ~DP_SET_POWER_MASK;
3541				pwr |=  DP_SET_POWER_D3;
3542				nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
3543			}
3544		}
3545
3546		nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
3547		nv50_audio_disable(encoder, nv_crtc);
3548		nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
3549		nv50_outp_release(nv_encoder);
3550	}
3551}
3552
3553static void
3554nv50_sor_enable(struct drm_encoder *encoder)
 
3555{
3556	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3557	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3558	struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3559	struct {
3560		struct nv50_disp_mthd_v1 base;
3561		struct nv50_disp_sor_lvds_script_v0 lvds;
3562	} lvds = {
3563		.base.version = 1,
3564		.base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
3565		.base.hasht   = nv_encoder->dcb->hasht,
3566		.base.hashm   = nv_encoder->dcb->hashm,
3567	};
3568	struct nv50_disp *disp = nv50_disp(encoder->dev);
 
3569	struct drm_device *dev = encoder->dev;
3570	struct nouveau_drm *drm = nouveau_drm(dev);
3571	struct nouveau_connector *nv_connector;
3572	struct nvbios *bios = &drm->vbios;
 
 
3573	u8 proto = 0xf;
3574	u8 depth = 0x0;
3575
3576	nv_connector = nouveau_encoder_connector_get(nv_encoder);
3577	nv_encoder->crtc = encoder->crtc;
3578	nv50_outp_acquire(nv_encoder);
3579
3580	switch (nv_encoder->dcb->type) {
3581	case DCB_OUTPUT_TMDS:
3582		if (nv_encoder->link & 1) {
3583			proto = 0x1;
3584			/* Only enable dual-link if:
3585			 *  - Need to (i.e. rate > 165MHz)
3586			 *  - DCB says we can
3587			 *  - Not an HDMI monitor, since there's no dual-link
3588			 *    on HDMI.
3589			 */
3590			if (mode->clock >= 165000 &&
3591			    nv_encoder->dcb->duallink_possible &&
3592			    !drm_detect_hdmi_monitor(nv_connector->edid))
3593				proto |= 0x4;
3594		} else {
3595			proto = 0x2;
3596		}
3597
3598		nv50_hdmi_enable(&nv_encoder->base.base, mode);
3599		break;
3600	case DCB_OUTPUT_LVDS:
3601		proto = 0x0;
3602
3603		if (bios->fp_no_ddc) {
3604			if (bios->fp.dual_link)
3605				lvds.lvds.script |= 0x0100;
3606			if (bios->fp.if_is_24bit)
3607				lvds.lvds.script |= 0x0200;
3608		} else {
3609			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
3610				if (((u8 *)nv_connector->edid)[121] == 2)
3611					lvds.lvds.script |= 0x0100;
3612			} else
3613			if (mode->clock >= bios->fp.duallink_transition_clk) {
3614				lvds.lvds.script |= 0x0100;
3615			}
3616
3617			if (lvds.lvds.script & 0x0100) {
3618				if (bios->fp.strapless_is_24bit & 2)
3619					lvds.lvds.script |= 0x0200;
3620			} else {
3621				if (bios->fp.strapless_is_24bit & 1)
3622					lvds.lvds.script |= 0x0200;
3623			}
3624
3625			if (nv_connector->base.display_info.bpc == 8)
3626				lvds.lvds.script |= 0x0200;
3627		}
3628
3629		nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
3630		break;
3631	case DCB_OUTPUT_DP:
3632		if (nv_connector->base.display_info.bpc == 6)
 
3633			depth = 0x2;
3634		else
3635		if (nv_connector->base.display_info.bpc == 8)
 
3636			depth = 0x5;
3637		else
 
3638			depth = 0x6;
 
3639
3640		if (nv_encoder->link & 1)
3641			proto = 0x8;
3642		else
3643			proto = 0x9;
3644
3645		nv50_audio_enable(encoder, mode);
3646		break;
3647	default:
3648		BUG();
3649		break;
3650	}
3651
3652	nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3653}
3654
3655static const struct drm_encoder_helper_funcs
3656nv50_sor_help = {
3657	.atomic_check = nv50_outp_atomic_check,
3658	.enable = nv50_sor_enable,
3659	.disable = nv50_sor_disable,
3660};
3661
3662static void
3663nv50_sor_destroy(struct drm_encoder *encoder)
3664{
3665	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3666	nv50_mstm_del(&nv_encoder->dp.mstm);
3667	drm_encoder_cleanup(encoder);
3668	kfree(encoder);
3669}
3670
3671static const struct drm_encoder_funcs
3672nv50_sor_func = {
 
 
 
 
 
 
 
 
 
3673	.destroy = nv50_sor_destroy,
3674};
3675
3676static int
3677nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
3678{
3679	struct nouveau_connector *nv_connector = nouveau_connector(connector);
3680	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3681	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
3682	struct nouveau_encoder *nv_encoder;
3683	struct drm_encoder *encoder;
3684	int type, ret;
3685
3686	switch (dcbe->type) {
3687	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
3688	case DCB_OUTPUT_TMDS:
3689	case DCB_OUTPUT_DP:
3690	default:
3691		type = DRM_MODE_ENCODER_TMDS;
3692		break;
3693	}
3694
3695	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3696	if (!nv_encoder)
3697		return -ENOMEM;
3698	nv_encoder->dcb = dcbe;
3699	nv_encoder->update = nv50_sor_update;
3700
3701	encoder = to_drm_encoder(nv_encoder);
3702	encoder->possible_crtcs = dcbe->heads;
3703	encoder->possible_clones = 0;
3704	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
3705			 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
3706	drm_encoder_helper_add(encoder, &nv50_sor_help);
3707
3708	drm_mode_connector_attach_encoder(connector, encoder);
3709
3710	if (dcbe->type == DCB_OUTPUT_DP) {
3711		struct nv50_disp *disp = nv50_disp(encoder->dev);
3712		struct nvkm_i2c_aux *aux =
3713			nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
3714		if (aux) {
3715			if (disp->disp->oclass < GF110_DISP) {
3716				/* HW has no support for address-only
3717				 * transactions, so we're required to
3718				 * use custom I2C-over-AUX code.
3719				 */
3720				nv_encoder->i2c = &aux->i2c;
3721			} else {
3722				nv_encoder->i2c = &nv_connector->aux.ddc;
3723			}
3724			nv_encoder->aux = aux;
3725		}
3726
3727		/*TODO: Use DP Info Table to check for support. */
3728		if (disp->disp->oclass >= GF110_DISP) {
3729			ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
3730					    nv_connector->base.base.id,
3731					    &nv_encoder->dp.mstm);
3732			if (ret)
3733				return ret;
3734		}
3735	} else {
3736		struct nvkm_i2c_bus *bus =
3737			nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
3738		if (bus)
3739			nv_encoder->i2c = &bus->i2c;
3740	}
3741
 
 
 
 
 
 
 
3742	return 0;
3743}
3744
3745/******************************************************************************
3746 * PIOR
3747 *****************************************************************************/
3748static int
3749nv50_pior_atomic_check(struct drm_encoder *encoder,
3750		       struct drm_crtc_state *crtc_state,
3751		       struct drm_connector_state *conn_state)
3752{
3753	int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
3754	if (ret)
3755		return ret;
3756	crtc_state->adjusted_mode.clock *= 2;
3757	return 0;
3758}
3759
3760static void
3761nv50_pior_disable(struct drm_encoder *encoder)
3762{
3763	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3764	struct nv50_mast *mast = nv50_mast(encoder->dev);
3765	const int or = nv_encoder->or;
3766	u32 *push;
 
 
 
 
 
 
 
 
 
 
 
 
3767
3768	if (nv_encoder->crtc) {
3769		push = evo_wait(mast, 4);
3770		if (push) {
3771			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3772				evo_mthd(push, 0x0700 + (or * 0x040), 1);
3773				evo_data(push, 0x00000000);
3774			}
3775			evo_kick(push, mast);
3776		}
3777	}
3778
3779	nv_encoder->crtc = NULL;
3780	nv50_outp_release(nv_encoder);
 
3781}
3782
3783static void
3784nv50_pior_enable(struct drm_encoder *encoder)
 
3785{
3786	struct nv50_mast *mast = nv50_mast(encoder->dev);
3787	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3788	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3789	struct nouveau_connector *nv_connector;
3790	struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3791	u8 owner = 1 << nv_crtc->index;
3792	u8 proto, depth;
3793	u32 *push;
3794
3795	nv50_outp_acquire(nv_encoder);
3796
3797	nv_connector = nouveau_encoder_connector_get(nv_encoder);
3798	switch (nv_connector->base.display_info.bpc) {
3799	case 10: depth = 0x6; break;
3800	case  8: depth = 0x5; break;
3801	case  6: depth = 0x2; break;
3802	default: depth = 0x0; break;
3803	}
3804
3805	switch (nv_encoder->dcb->type) {
3806	case DCB_OUTPUT_TMDS:
3807	case DCB_OUTPUT_DP:
3808		proto = 0x0;
3809		break;
3810	default:
3811		BUG();
3812		break;
3813	}
3814
 
 
3815	push = evo_wait(mast, 8);
3816	if (push) {
3817		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3818			u32 ctrl = (depth << 16) | (proto << 8) | owner;
3819			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3820				ctrl |= 0x00001000;
3821			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3822				ctrl |= 0x00002000;
3823			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
3824			evo_data(push, ctrl);
3825		}
3826
3827		evo_kick(push, mast);
3828	}
3829
3830	nv_encoder->crtc = encoder->crtc;
3831}
3832
3833static const struct drm_encoder_helper_funcs
3834nv50_pior_help = {
3835	.atomic_check = nv50_pior_atomic_check,
3836	.enable = nv50_pior_enable,
3837	.disable = nv50_pior_disable,
3838};
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3839
3840static void
3841nv50_pior_destroy(struct drm_encoder *encoder)
3842{
3843	drm_encoder_cleanup(encoder);
3844	kfree(encoder);
3845}
3846
3847static const struct drm_encoder_funcs
3848nv50_pior_func = {
 
 
 
 
 
 
 
 
 
3849	.destroy = nv50_pior_destroy,
3850};
3851
3852static int
3853nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
3854{
3855	struct nouveau_connector *nv_connector = nouveau_connector(connector);
3856	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3857	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
3858	struct nvkm_i2c_bus *bus = NULL;
3859	struct nvkm_i2c_aux *aux = NULL;
3860	struct i2c_adapter *ddc;
3861	struct nouveau_encoder *nv_encoder;
3862	struct drm_encoder *encoder;
3863	int type;
3864
3865	switch (dcbe->type) {
3866	case DCB_OUTPUT_TMDS:
3867		bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
3868		ddc  = bus ? &bus->i2c : NULL;
3869		type = DRM_MODE_ENCODER_TMDS;
3870		break;
3871	case DCB_OUTPUT_DP:
3872		aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
3873		ddc  = aux ? &nv_connector->aux.ddc : NULL;
3874		type = DRM_MODE_ENCODER_TMDS;
3875		break;
3876	default:
3877		return -ENODEV;
3878	}
3879
3880	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3881	if (!nv_encoder)
3882		return -ENOMEM;
3883	nv_encoder->dcb = dcbe;
 
3884	nv_encoder->i2c = ddc;
3885	nv_encoder->aux = aux;
3886
3887	encoder = to_drm_encoder(nv_encoder);
3888	encoder->possible_crtcs = dcbe->heads;
3889	encoder->possible_clones = 0;
3890	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
3891			 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
3892	drm_encoder_helper_add(encoder, &nv50_pior_help);
3893
3894	drm_mode_connector_attach_encoder(connector, encoder);
3895	return 0;
3896}
3897
3898/******************************************************************************
3899 * Atomic
3900 *****************************************************************************/
3901
3902static void
3903nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
3904{
3905	struct nv50_disp *disp = nv50_disp(drm->dev);
3906	struct nv50_dmac *core = &disp->mast.base;
3907	struct nv50_mstm *mstm;
3908	struct drm_encoder *encoder;
3909	u32 *push;
3910
3911	NV_ATOMIC(drm, "commit core %08x\n", interlock);
3912
3913	drm_for_each_encoder(encoder, drm->dev) {
3914		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3915			mstm = nouveau_encoder(encoder)->dp.mstm;
3916			if (mstm && mstm->modified)
3917				nv50_mstm_prepare(mstm);
3918		}
3919	}
3920
3921	if ((push = evo_wait(core, 5))) {
3922		evo_mthd(push, 0x0084, 1);
3923		evo_data(push, 0x80000000);
3924		evo_mthd(push, 0x0080, 2);
3925		evo_data(push, interlock);
3926		evo_data(push, 0x00000000);
3927		nouveau_bo_wr32(disp->sync, 0, 0x00000000);
3928		evo_kick(push, core);
3929		if (nvif_msec(&drm->client.device, 2000ULL,
3930			if (nouveau_bo_rd32(disp->sync, 0))
3931				break;
3932			usleep_range(1, 2);
3933		) < 0)
3934			NV_ERROR(drm, "EVO timeout\n");
3935	}
3936
3937	drm_for_each_encoder(encoder, drm->dev) {
3938		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3939			mstm = nouveau_encoder(encoder)->dp.mstm;
3940			if (mstm && mstm->modified)
3941				nv50_mstm_cleanup(mstm);
3942		}
3943	}
3944}
3945
3946static void
3947nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
3948{
3949	struct drm_device *dev = state->dev;
3950	struct drm_crtc_state *new_crtc_state, *old_crtc_state;
3951	struct drm_crtc *crtc;
3952	struct drm_plane_state *new_plane_state;
3953	struct drm_plane *plane;
3954	struct nouveau_drm *drm = nouveau_drm(dev);
3955	struct nv50_disp *disp = nv50_disp(dev);
3956	struct nv50_atom *atom = nv50_atom(state);
3957	struct nv50_outp_atom *outp, *outt;
3958	u32 interlock_core = 0;
3959	u32 interlock_chan = 0;
3960	int i;
3961
3962	NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
3963	drm_atomic_helper_wait_for_fences(dev, state, false);
3964	drm_atomic_helper_wait_for_dependencies(state);
3965	drm_atomic_helper_update_legacy_modeset_state(dev, state);
3966
3967	if (atom->lock_core)
3968		mutex_lock(&disp->mutex);
3969
3970	/* Disable head(s). */
3971	for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
3972		struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
3973		struct nv50_head *head = nv50_head(crtc);
3974
3975		NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
3976			  asyh->clr.mask, asyh->set.mask);
3977		if (old_crtc_state->active && !new_crtc_state->active)
3978			drm_crtc_vblank_off(crtc);
3979
3980		if (asyh->clr.mask) {
3981			nv50_head_flush_clr(head, asyh, atom->flush_disable);
3982			interlock_core |= 1;
3983		}
3984	}
3985
3986	/* Disable plane(s). */
3987	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
3988		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
3989		struct nv50_wndw *wndw = nv50_wndw(plane);
3990
3991		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
3992			  asyw->clr.mask, asyw->set.mask);
3993		if (!asyw->clr.mask)
3994			continue;
3995
3996		interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
3997						      atom->flush_disable,
3998						      asyw);
3999	}
4000
4001	/* Disable output path(s). */
4002	list_for_each_entry(outp, &atom->outp, head) {
4003		const struct drm_encoder_helper_funcs *help;
4004		struct drm_encoder *encoder;
4005
4006		encoder = outp->encoder;
4007		help = encoder->helper_private;
4008
4009		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
4010			  outp->clr.mask, outp->set.mask);
4011
4012		if (outp->clr.mask) {
4013			help->disable(encoder);
4014			interlock_core |= 1;
4015			if (outp->flush_disable) {
4016				nv50_disp_atomic_commit_core(drm, interlock_chan);
4017				interlock_core = 0;
4018				interlock_chan = 0;
4019			}
4020		}
4021	}
4022
4023	/* Flush disable. */
4024	if (interlock_core) {
4025		if (atom->flush_disable) {
4026			nv50_disp_atomic_commit_core(drm, interlock_chan);
4027			interlock_core = 0;
4028			interlock_chan = 0;
4029		}
 
 
 
 
 
 
 
 
 
4030	}
4031
4032	/* Update output path(s). */
4033	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4034		const struct drm_encoder_helper_funcs *help;
4035		struct drm_encoder *encoder;
4036
4037		encoder = outp->encoder;
4038		help = encoder->helper_private;
4039
4040		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
4041			  outp->set.mask, outp->clr.mask);
4042
4043		if (outp->set.mask) {
4044			help->enable(encoder);
4045			interlock_core = 1;
4046		}
4047
4048		list_del(&outp->head);
4049		kfree(outp);
4050	}
4051
4052	/* Update head(s). */
4053	for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
4054		struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
4055		struct nv50_head *head = nv50_head(crtc);
4056
4057		NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
4058			  asyh->set.mask, asyh->clr.mask);
4059
4060		if (asyh->set.mask) {
4061			nv50_head_flush_set(head, asyh);
4062			interlock_core = 1;
4063		}
4064
4065		if (new_crtc_state->active) {
4066			if (!old_crtc_state->active)
4067				drm_crtc_vblank_on(crtc);
4068			if (new_crtc_state->event)
4069				drm_crtc_vblank_get(crtc);
4070		}
4071	}
4072
4073	/* Update plane(s). */
4074	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
4075		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
4076		struct nv50_wndw *wndw = nv50_wndw(plane);
4077
4078		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
4079			  asyw->set.mask, asyw->clr.mask);
4080		if ( !asyw->set.mask &&
4081		    (!asyw->clr.mask || atom->flush_disable))
4082			continue;
4083
4084		interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
4085	}
4086
4087	/* Flush update. */
4088	if (interlock_core) {
4089		if (!interlock_chan && atom->state.legacy_cursor_update) {
4090			u32 *push = evo_wait(&disp->mast, 2);
4091			if (push) {
4092				evo_mthd(push, 0x0080, 1);
4093				evo_data(push, 0x00000000);
4094				evo_kick(push, &disp->mast);
4095			}
4096		} else {
4097			nv50_disp_atomic_commit_core(drm, interlock_chan);
4098		}
4099	}
4100
4101	if (atom->lock_core)
4102		mutex_unlock(&disp->mutex);
4103
4104	/* Wait for HW to signal completion. */
4105	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
4106		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
4107		struct nv50_wndw *wndw = nv50_wndw(plane);
4108		int ret = nv50_wndw_wait_armed(wndw, asyw);
4109		if (ret)
4110			NV_ERROR(drm, "%s: timeout\n", plane->name);
4111	}
4112
4113	for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
4114		if (new_crtc_state->event) {
4115			unsigned long flags;
4116			/* Get correct count/ts if racing with vblank irq */
4117			if (new_crtc_state->active)
4118				drm_crtc_accurate_vblank_count(crtc);
4119			spin_lock_irqsave(&crtc->dev->event_lock, flags);
4120			drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
4121			spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
4122
4123			new_crtc_state->event = NULL;
4124			if (new_crtc_state->active)
4125				drm_crtc_vblank_put(crtc);
4126		}
4127	}
4128
4129	drm_atomic_helper_commit_hw_done(state);
4130	drm_atomic_helper_cleanup_planes(dev, state);
4131	drm_atomic_helper_commit_cleanup_done(state);
4132	drm_atomic_state_put(state);
4133}
4134
4135static void
4136nv50_disp_atomic_commit_work(struct work_struct *work)
4137{
4138	struct drm_atomic_state *state =
4139		container_of(work, typeof(*state), commit_work);
4140	nv50_disp_atomic_commit_tail(state);
4141}
4142
4143static int
4144nv50_disp_atomic_commit(struct drm_device *dev,
4145			struct drm_atomic_state *state, bool nonblock)
4146{
4147	struct nouveau_drm *drm = nouveau_drm(dev);
4148	struct nv50_disp *disp = nv50_disp(dev);
4149	struct drm_plane_state *new_plane_state;
4150	struct drm_plane *plane;
4151	struct drm_crtc *crtc;
4152	bool active = false;
4153	int ret, i;
4154
4155	ret = pm_runtime_get_sync(dev->dev);
4156	if (ret < 0 && ret != -EACCES)
4157		return ret;
4158
4159	ret = drm_atomic_helper_setup_commit(state, nonblock);
4160	if (ret)
4161		goto done;
4162
4163	INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
4164
4165	ret = drm_atomic_helper_prepare_planes(dev, state);
4166	if (ret)
4167		goto done;
4168
4169	if (!nonblock) {
4170		ret = drm_atomic_helper_wait_for_fences(dev, state, true);
4171		if (ret)
4172			goto err_cleanup;
4173	}
4174
4175	ret = drm_atomic_helper_swap_state(state, true);
4176	if (ret)
4177		goto err_cleanup;
4178
4179	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
4180		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
4181		struct nv50_wndw *wndw = nv50_wndw(plane);
4182
4183		if (asyw->set.image) {
4184			asyw->ntfy.handle = wndw->dmac->sync.handle;
4185			asyw->ntfy.offset = wndw->ntfy;
4186			asyw->ntfy.awaken = false;
4187			asyw->set.ntfy = true;
4188			nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
4189			wndw->ntfy ^= 0x10;
4190		}
4191	}
4192
4193	drm_atomic_state_get(state);
4194
4195	if (nonblock)
4196		queue_work(system_unbound_wq, &state->commit_work);
4197	else
4198		nv50_disp_atomic_commit_tail(state);
4199
4200	drm_for_each_crtc(crtc, dev) {
4201		if (crtc->state->enable) {
4202			if (!drm->have_disp_power_ref) {
4203				drm->have_disp_power_ref = true;
4204				return 0;
4205			}
4206			active = true;
4207			break;
4208		}
4209	}
4210
4211	if (!active && drm->have_disp_power_ref) {
4212		pm_runtime_put_autosuspend(dev->dev);
4213		drm->have_disp_power_ref = false;
4214	}
4215
4216err_cleanup:
4217	if (ret)
4218		drm_atomic_helper_cleanup_planes(dev, state);
4219done:
4220	pm_runtime_put_autosuspend(dev->dev);
4221	return ret;
4222}
4223
4224static struct nv50_outp_atom *
4225nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
4226{
4227	struct nv50_outp_atom *outp;
4228
4229	list_for_each_entry(outp, &atom->outp, head) {
4230		if (outp->encoder == encoder)
4231			return outp;
4232	}
4233
4234	outp = kzalloc(sizeof(*outp), GFP_KERNEL);
4235	if (!outp)
4236		return ERR_PTR(-ENOMEM);
4237
4238	list_add(&outp->head, &atom->outp);
4239	outp->encoder = encoder;
4240	return outp;
4241}
4242
4243static int
4244nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
4245				struct drm_connector_state *old_connector_state)
4246{
4247	struct drm_encoder *encoder = old_connector_state->best_encoder;
4248	struct drm_crtc_state *old_crtc_state, *new_crtc_state;
4249	struct drm_crtc *crtc;
4250	struct nv50_outp_atom *outp;
4251
4252	if (!(crtc = old_connector_state->crtc))
4253		return 0;
4254
4255	old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
4256	new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
4257	if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
4258		outp = nv50_disp_outp_atomic_add(atom, encoder);
4259		if (IS_ERR(outp))
4260			return PTR_ERR(outp);
4261
4262		if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
4263			outp->flush_disable = true;
4264			atom->flush_disable = true;
4265		}
4266		outp->clr.ctrl = true;
4267		atom->lock_core = true;
4268	}
4269
4270	return 0;
4271}
4272
4273static int
4274nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
4275				struct drm_connector_state *connector_state)
4276{
4277	struct drm_encoder *encoder = connector_state->best_encoder;
4278	struct drm_crtc_state *new_crtc_state;
4279	struct drm_crtc *crtc;
4280	struct nv50_outp_atom *outp;
4281
4282	if (!(crtc = connector_state->crtc))
4283		return 0;
4284
4285	new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
4286	if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
4287		outp = nv50_disp_outp_atomic_add(atom, encoder);
4288		if (IS_ERR(outp))
4289			return PTR_ERR(outp);
4290
4291		outp->set.ctrl = true;
4292		atom->lock_core = true;
4293	}
4294
4295	return 0;
4296}
4297
4298static int
4299nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
4300{
4301	struct nv50_atom *atom = nv50_atom(state);
4302	struct drm_connector_state *old_connector_state, *new_connector_state;
4303	struct drm_connector *connector;
4304	int ret, i;
 
 
4305
4306	ret = drm_atomic_helper_check(dev, state);
4307	if (ret)
4308		return ret;
4309
4310	for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
4311		ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
4312		if (ret)
4313			return ret;
4314
4315		ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
4316		if (ret)
4317			return ret;
 
 
4318	}
4319
4320	return 0;
4321}
4322
4323static void
4324nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
4325{
4326	struct nv50_atom *atom = nv50_atom(state);
4327	struct nv50_outp_atom *outp, *outt;
4328
4329	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4330		list_del(&outp->head);
4331		kfree(outp);
4332	}
 
4333
4334	drm_atomic_state_default_clear(state);
 
4335}
4336
4337static void
4338nv50_disp_atomic_state_free(struct drm_atomic_state *state)
4339{
4340	struct nv50_atom *atom = nv50_atom(state);
4341	drm_atomic_state_default_release(&atom->state);
4342	kfree(atom);
4343}
4344
4345static struct drm_atomic_state *
4346nv50_disp_atomic_state_alloc(struct drm_device *dev)
4347{
4348	struct nv50_atom *atom;
4349	if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
4350	    drm_atomic_state_init(dev, &atom->state) < 0) {
4351		kfree(atom);
4352		return NULL;
4353	}
4354	INIT_LIST_HEAD(&atom->outp);
4355	return &atom->state;
4356}
4357
4358static const struct drm_mode_config_funcs
4359nv50_disp_func = {
4360	.fb_create = nouveau_user_framebuffer_create,
4361	.output_poll_changed = drm_fb_helper_output_poll_changed,
4362	.atomic_check = nv50_disp_atomic_check,
4363	.atomic_commit = nv50_disp_atomic_commit,
4364	.atomic_state_alloc = nv50_disp_atomic_state_alloc,
4365	.atomic_state_clear = nv50_disp_atomic_state_clear,
4366	.atomic_state_free = nv50_disp_atomic_state_free,
4367};
4368
4369/******************************************************************************
4370 * Init
4371 *****************************************************************************/
4372
4373void
4374nv50_display_fini(struct drm_device *dev)
4375{
4376	struct nouveau_encoder *nv_encoder;
4377	struct drm_encoder *encoder;
4378	struct drm_plane *plane;
4379
4380	drm_for_each_plane(plane, dev) {
4381		struct nv50_wndw *wndw = nv50_wndw(plane);
4382		if (plane->funcs != &nv50_wndw)
4383			continue;
4384		nv50_wndw_fini(wndw);
4385	}
4386
4387	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4388		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4389			nv_encoder = nouveau_encoder(encoder);
4390			nv50_mstm_fini(nv_encoder->dp.mstm);
4391		}
4392	}
4393}
4394
4395int
4396nv50_display_init(struct drm_device *dev)
4397{
4398	struct drm_encoder *encoder;
4399	struct drm_plane *plane;
4400	u32 *push;
4401
4402	push = evo_wait(nv50_mast(dev), 32);
4403	if (!push)
4404		return -EBUSY;
4405
 
 
 
 
 
 
 
4406	evo_mthd(push, 0x0088, 1);
4407	evo_data(push, nv50_mast(dev)->base.sync.handle);
4408	evo_kick(push, nv50_mast(dev));
4409
4410	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4411		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4412			struct nouveau_encoder *nv_encoder =
4413				nouveau_encoder(encoder);
4414			nv50_mstm_init(nv_encoder->dp.mstm);
4415		}
4416	}
4417
4418	drm_for_each_plane(plane, dev) {
4419		struct nv50_wndw *wndw = nv50_wndw(plane);
4420		if (plane->funcs != &nv50_wndw)
4421			continue;
4422		nv50_wndw_init(wndw);
4423	}
4424
4425	return 0;
4426}
4427
4428void
4429nv50_display_destroy(struct drm_device *dev)
4430{
4431	struct nv50_disp *disp = nv50_disp(dev);
 
 
 
 
 
4432
4433	nv50_dmac_destroy(&disp->mast.base, disp->disp);
4434
4435	nouveau_bo_unmap(disp->sync);
4436	if (disp->sync)
4437		nouveau_bo_unpin(disp->sync);
4438	nouveau_bo_ref(NULL, &disp->sync);
4439
4440	nouveau_display(dev)->priv = NULL;
4441	kfree(disp);
4442}
4443
4444MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
4445static int nouveau_atomic = 0;
4446module_param_named(atomic, nouveau_atomic, int, 0400);
4447
4448int
4449nv50_display_create(struct drm_device *dev)
4450{
4451	struct nvif_device *device = &nouveau_drm(dev)->client.device;
4452	struct nouveau_drm *drm = nouveau_drm(dev);
4453	struct dcb_table *dcb = &drm->vbios.dcb;
4454	struct drm_connector *connector, *tmp;
4455	struct nv50_disp *disp;
4456	struct dcb_output *dcbe;
4457	int crtcs, ret, i;
4458
4459	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
4460	if (!disp)
4461		return -ENOMEM;
4462
4463	mutex_init(&disp->mutex);
4464
4465	nouveau_display(dev)->priv = disp;
4466	nouveau_display(dev)->dtor = nv50_display_destroy;
4467	nouveau_display(dev)->init = nv50_display_init;
4468	nouveau_display(dev)->fini = nv50_display_fini;
 
 
4469	disp->disp = &nouveau_display(dev)->disp;
4470	dev->mode_config.funcs = &nv50_disp_func;
4471	dev->driver->driver_features |= DRIVER_PREFER_XBGR_30BPP;
4472	if (nouveau_atomic)
4473		dev->driver->driver_features |= DRIVER_ATOMIC;
4474
4475	/* small shared memory area we use for notifiers and semaphores */
4476	ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
4477			     0, 0x0000, NULL, NULL, &disp->sync);
4478	if (!ret) {
4479		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
4480		if (!ret) {
4481			ret = nouveau_bo_map(disp->sync);
4482			if (ret)
4483				nouveau_bo_unpin(disp->sync);
4484		}
4485		if (ret)
4486			nouveau_bo_ref(NULL, &disp->sync);
4487	}
4488
4489	if (ret)
4490		goto out;
4491
4492	/* allocate master evo channel */
4493	ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
4494			      &disp->mast);
4495	if (ret)
4496		goto out;
4497
4498	/* create crtc objects to represent the hw heads */
4499	if (disp->disp->oclass >= GF110_DISP)
4500		crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
4501	else
4502		crtcs = 0x3;
4503
4504	for (i = 0; i < fls(crtcs); i++) {
4505		if (!(crtcs & (1 << i)))
4506			continue;
4507		ret = nv50_head_create(dev, i);
4508		if (ret)
4509			goto out;
4510	}
4511
4512	/* create encoder/connector objects based on VBIOS DCB table */
4513	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
4514		connector = nouveau_connector_create(dev, dcbe->connector);
4515		if (IS_ERR(connector))
4516			continue;
4517
4518		if (dcbe->location == DCB_LOC_ON_CHIP) {
4519			switch (dcbe->type) {
4520			case DCB_OUTPUT_TMDS:
4521			case DCB_OUTPUT_LVDS:
4522			case DCB_OUTPUT_DP:
4523				ret = nv50_sor_create(connector, dcbe);
4524				break;
4525			case DCB_OUTPUT_ANALOG:
4526				ret = nv50_dac_create(connector, dcbe);
4527				break;
4528			default:
4529				ret = -ENODEV;
4530				break;
4531			}
4532		} else {
4533			ret = nv50_pior_create(connector, dcbe);
4534		}
4535
4536		if (ret) {
4537			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
4538				     dcbe->location, dcbe->type,
4539				     ffs(dcbe->or) - 1, ret);
4540			ret = 0;
4541		}
4542	}
4543
4544	/* cull any connectors we created that don't have an encoder */
4545	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
4546		if (connector->encoder_ids[0])
4547			continue;
4548
4549		NV_WARN(drm, "%s has no encoders, removing\n",
4550			connector->name);
4551		connector->funcs->destroy(connector);
4552	}
4553
4554out:
4555	if (ret)
4556		nv50_display_destroy(dev);
4557	return ret;
4558}