Linux Audio

Check our new training course

Loading...
Note: File does not exist in v6.2.
   1/*
   2 * Copyright 2011 Red Hat Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Ben Skeggs
  23 */
  24
  25#include <linux/dma-mapping.h>
  26
  27#include <drm/drmP.h>
  28#include <drm/drm_crtc_helper.h>
  29#include <drm/drm_plane_helper.h>
  30#include <drm/drm_dp_helper.h>
  31#include <drm/drm_fb_helper.h>
  32
  33#include <nvif/class.h>
  34#include <nvif/cl0002.h>
  35#include <nvif/cl5070.h>
  36#include <nvif/cl507a.h>
  37#include <nvif/cl507b.h>
  38#include <nvif/cl507c.h>
  39#include <nvif/cl507d.h>
  40#include <nvif/cl507e.h>
  41
  42#include "nouveau_drm.h"
  43#include "nouveau_dma.h"
  44#include "nouveau_gem.h"
  45#include "nouveau_connector.h"
  46#include "nouveau_encoder.h"
  47#include "nouveau_crtc.h"
  48#include "nouveau_fence.h"
  49#include "nv50_display.h"
  50
  51#define EVO_DMA_NR 9
  52
  53#define EVO_MASTER  (0x00)
  54#define EVO_FLIP(c) (0x01 + (c))
  55#define EVO_OVLY(c) (0x05 + (c))
  56#define EVO_OIMM(c) (0x09 + (c))
  57#define EVO_CURS(c) (0x0d + (c))
  58
  59/* offsets in shared sync bo of various structures */
  60#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  61#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
  62#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
  63#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
  64
  65/******************************************************************************
  66 * EVO channel
  67 *****************************************************************************/
  68
  69struct nv50_chan {
  70	struct nvif_object user;
  71	struct nvif_device *device;
  72};
  73
  74static int
  75nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
  76		 const s32 *oclass, u8 head, void *data, u32 size,
  77		 struct nv50_chan *chan)
  78{
  79	struct nvif_sclass *sclass;
  80	int ret, i, n;
  81
  82	chan->device = device;
  83
  84	ret = n = nvif_object_sclass_get(disp, &sclass);
  85	if (ret < 0)
  86		return ret;
  87
  88	while (oclass[0]) {
  89		for (i = 0; i < n; i++) {
  90			if (sclass[i].oclass == oclass[0]) {
  91				ret = nvif_object_init(disp, 0, oclass[0],
  92						       data, size, &chan->user);
  93				if (ret == 0)
  94					nvif_object_map(&chan->user);
  95				nvif_object_sclass_put(&sclass);
  96				return ret;
  97			}
  98		}
  99		oclass++;
 100	}
 101
 102	nvif_object_sclass_put(&sclass);
 103	return -ENOSYS;
 104}
 105
 106static void
 107nv50_chan_destroy(struct nv50_chan *chan)
 108{
 109	nvif_object_fini(&chan->user);
 110}
 111
 112/******************************************************************************
 113 * PIO EVO channel
 114 *****************************************************************************/
 115
 116struct nv50_pioc {
 117	struct nv50_chan base;
 118};
 119
 120static void
 121nv50_pioc_destroy(struct nv50_pioc *pioc)
 122{
 123	nv50_chan_destroy(&pioc->base);
 124}
 125
 126static int
 127nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
 128		 const s32 *oclass, u8 head, void *data, u32 size,
 129		 struct nv50_pioc *pioc)
 130{
 131	return nv50_chan_create(device, disp, oclass, head, data, size,
 132				&pioc->base);
 133}
 134
 135/******************************************************************************
 136 * Cursor Immediate
 137 *****************************************************************************/
 138
 139struct nv50_curs {
 140	struct nv50_pioc base;
 141};
 142
 143static int
 144nv50_curs_create(struct nvif_device *device, struct nvif_object *disp,
 145		 int head, struct nv50_curs *curs)
 146{
 147	struct nv50_disp_cursor_v0 args = {
 148		.head = head,
 149	};
 150	static const s32 oclass[] = {
 151		GK104_DISP_CURSOR,
 152		GF110_DISP_CURSOR,
 153		GT214_DISP_CURSOR,
 154		G82_DISP_CURSOR,
 155		NV50_DISP_CURSOR,
 156		0
 157	};
 158
 159	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
 160				&curs->base);
 161}
 162
 163/******************************************************************************
 164 * Overlay Immediate
 165 *****************************************************************************/
 166
 167struct nv50_oimm {
 168	struct nv50_pioc base;
 169};
 170
 171static int
 172nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
 173		 int head, struct nv50_oimm *oimm)
 174{
 175	struct nv50_disp_cursor_v0 args = {
 176		.head = head,
 177	};
 178	static const s32 oclass[] = {
 179		GK104_DISP_OVERLAY,
 180		GF110_DISP_OVERLAY,
 181		GT214_DISP_OVERLAY,
 182		G82_DISP_OVERLAY,
 183		NV50_DISP_OVERLAY,
 184		0
 185	};
 186
 187	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
 188				&oimm->base);
 189}
 190
 191/******************************************************************************
 192 * DMA EVO channel
 193 *****************************************************************************/
 194
 195struct nv50_dmac {
 196	struct nv50_chan base;
 197	dma_addr_t handle;
 198	u32 *ptr;
 199
 200	struct nvif_object sync;
 201	struct nvif_object vram;
 202
 203	/* Protects against concurrent pushbuf access to this channel, lock is
 204	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
 205	 * dropped again by evo_kick. */
 206	struct mutex lock;
 207};
 208
 209static void
 210nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
 211{
 212	struct nvif_device *device = dmac->base.device;
 213
 214	nvif_object_fini(&dmac->vram);
 215	nvif_object_fini(&dmac->sync);
 216
 217	nv50_chan_destroy(&dmac->base);
 218
 219	if (dmac->ptr) {
 220		struct device *dev = nvxx_device(device)->dev;
 221		dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
 222	}
 223}
 224
 225static int
 226nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
 227		 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
 228		 struct nv50_dmac *dmac)
 229{
 230	struct nv50_disp_core_channel_dma_v0 *args = data;
 231	struct nvif_object pushbuf;
 232	int ret;
 233
 234	mutex_init(&dmac->lock);
 235
 236	dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
 237				       &dmac->handle, GFP_KERNEL);
 238	if (!dmac->ptr)
 239		return -ENOMEM;
 240
 241	ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
 242			       &(struct nv_dma_v0) {
 243					.target = NV_DMA_V0_TARGET_PCI_US,
 244					.access = NV_DMA_V0_ACCESS_RD,
 245					.start = dmac->handle + 0x0000,
 246					.limit = dmac->handle + 0x0fff,
 247			       }, sizeof(struct nv_dma_v0), &pushbuf);
 248	if (ret)
 249		return ret;
 250
 251	args->pushbuf = nvif_handle(&pushbuf);
 252
 253	ret = nv50_chan_create(device, disp, oclass, head, data, size,
 254			       &dmac->base);
 255	nvif_object_fini(&pushbuf);
 256	if (ret)
 257		return ret;
 258
 259	ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
 260			       &(struct nv_dma_v0) {
 261					.target = NV_DMA_V0_TARGET_VRAM,
 262					.access = NV_DMA_V0_ACCESS_RDWR,
 263					.start = syncbuf + 0x0000,
 264					.limit = syncbuf + 0x0fff,
 265			       }, sizeof(struct nv_dma_v0),
 266			       &dmac->sync);
 267	if (ret)
 268		return ret;
 269
 270	ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
 271			       &(struct nv_dma_v0) {
 272					.target = NV_DMA_V0_TARGET_VRAM,
 273					.access = NV_DMA_V0_ACCESS_RDWR,
 274					.start = 0,
 275					.limit = device->info.ram_user - 1,
 276			       }, sizeof(struct nv_dma_v0),
 277			       &dmac->vram);
 278	if (ret)
 279		return ret;
 280
 281	return ret;
 282}
 283
 284/******************************************************************************
 285 * Core
 286 *****************************************************************************/
 287
 288struct nv50_mast {
 289	struct nv50_dmac base;
 290};
 291
 292static int
 293nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
 294		 u64 syncbuf, struct nv50_mast *core)
 295{
 296	struct nv50_disp_core_channel_dma_v0 args = {
 297		.pushbuf = 0xb0007d00,
 298	};
 299	static const s32 oclass[] = {
 300		GM200_DISP_CORE_CHANNEL_DMA,
 301		GM107_DISP_CORE_CHANNEL_DMA,
 302		GK110_DISP_CORE_CHANNEL_DMA,
 303		GK104_DISP_CORE_CHANNEL_DMA,
 304		GF110_DISP_CORE_CHANNEL_DMA,
 305		GT214_DISP_CORE_CHANNEL_DMA,
 306		GT206_DISP_CORE_CHANNEL_DMA,
 307		GT200_DISP_CORE_CHANNEL_DMA,
 308		G82_DISP_CORE_CHANNEL_DMA,
 309		NV50_DISP_CORE_CHANNEL_DMA,
 310		0
 311	};
 312
 313	return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
 314				syncbuf, &core->base);
 315}
 316
 317/******************************************************************************
 318 * Base
 319 *****************************************************************************/
 320
 321struct nv50_sync {
 322	struct nv50_dmac base;
 323	u32 addr;
 324	u32 data;
 325};
 326
 327static int
 328nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
 329		 int head, u64 syncbuf, struct nv50_sync *base)
 330{
 331	struct nv50_disp_base_channel_dma_v0 args = {
 332		.pushbuf = 0xb0007c00 | head,
 333		.head = head,
 334	};
 335	static const s32 oclass[] = {
 336		GK110_DISP_BASE_CHANNEL_DMA,
 337		GK104_DISP_BASE_CHANNEL_DMA,
 338		GF110_DISP_BASE_CHANNEL_DMA,
 339		GT214_DISP_BASE_CHANNEL_DMA,
 340		GT200_DISP_BASE_CHANNEL_DMA,
 341		G82_DISP_BASE_CHANNEL_DMA,
 342		NV50_DISP_BASE_CHANNEL_DMA,
 343		0
 344	};
 345
 346	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 347				syncbuf, &base->base);
 348}
 349
 350/******************************************************************************
 351 * Overlay
 352 *****************************************************************************/
 353
 354struct nv50_ovly {
 355	struct nv50_dmac base;
 356};
 357
 358static int
 359nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
 360		 int head, u64 syncbuf, struct nv50_ovly *ovly)
 361{
 362	struct nv50_disp_overlay_channel_dma_v0 args = {
 363		.pushbuf = 0xb0007e00 | head,
 364		.head = head,
 365	};
 366	static const s32 oclass[] = {
 367		GK104_DISP_OVERLAY_CONTROL_DMA,
 368		GF110_DISP_OVERLAY_CONTROL_DMA,
 369		GT214_DISP_OVERLAY_CHANNEL_DMA,
 370		GT200_DISP_OVERLAY_CHANNEL_DMA,
 371		G82_DISP_OVERLAY_CHANNEL_DMA,
 372		NV50_DISP_OVERLAY_CHANNEL_DMA,
 373		0
 374	};
 375
 376	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 377				syncbuf, &ovly->base);
 378}
 379
 380struct nv50_head {
 381	struct nouveau_crtc base;
 382	struct nouveau_bo *image;
 383	struct nv50_curs curs;
 384	struct nv50_sync sync;
 385	struct nv50_ovly ovly;
 386	struct nv50_oimm oimm;
 387};
 388
 389#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
 390#define nv50_curs(c) (&nv50_head(c)->curs)
 391#define nv50_sync(c) (&nv50_head(c)->sync)
 392#define nv50_ovly(c) (&nv50_head(c)->ovly)
 393#define nv50_oimm(c) (&nv50_head(c)->oimm)
 394#define nv50_chan(c) (&(c)->base.base)
 395#define nv50_vers(c) nv50_chan(c)->user.oclass
 396
 397struct nv50_fbdma {
 398	struct list_head head;
 399	struct nvif_object core;
 400	struct nvif_object base[4];
 401};
 402
 403struct nv50_disp {
 404	struct nvif_object *disp;
 405	struct nv50_mast mast;
 406
 407	struct list_head fbdma;
 408
 409	struct nouveau_bo *sync;
 410};
 411
 412static struct nv50_disp *
 413nv50_disp(struct drm_device *dev)
 414{
 415	return nouveau_display(dev)->priv;
 416}
 417
 418#define nv50_mast(d) (&nv50_disp(d)->mast)
 419
 420static struct drm_crtc *
 421nv50_display_crtc_get(struct drm_encoder *encoder)
 422{
 423	return nouveau_encoder(encoder)->crtc;
 424}
 425
 426/******************************************************************************
 427 * EVO channel helpers
 428 *****************************************************************************/
 429static u32 *
 430evo_wait(void *evoc, int nr)
 431{
 432	struct nv50_dmac *dmac = evoc;
 433	struct nvif_device *device = dmac->base.device;
 434	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
 435
 436	mutex_lock(&dmac->lock);
 437	if (put + nr >= (PAGE_SIZE / 4) - 8) {
 438		dmac->ptr[put] = 0x20000000;
 439
 440		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
 441		if (nvif_msec(device, 2000,
 442			if (!nvif_rd32(&dmac->base.user, 0x0004))
 443				break;
 444		) < 0) {
 445			mutex_unlock(&dmac->lock);
 446			printk(KERN_ERR "nouveau: evo channel stalled\n");
 447			return NULL;
 448		}
 449
 450		put = 0;
 451	}
 452
 453	return dmac->ptr + put;
 454}
 455
 456static void
 457evo_kick(u32 *push, void *evoc)
 458{
 459	struct nv50_dmac *dmac = evoc;
 460	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
 461	mutex_unlock(&dmac->lock);
 462}
 463
 464#if 1
 465#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
 466#define evo_data(p,d)   *((p)++) = (d)
 467#else
 468#define evo_mthd(p,m,s) do {                                                   \
 469	const u32 _m = (m), _s = (s);                                          \
 470	printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__);                     \
 471	*((p)++) = ((_s << 18) | _m);                                          \
 472} while(0)
 473#define evo_data(p,d) do {                                                     \
 474	const u32 _d = (d);                                                    \
 475	printk(KERN_ERR "\t%08x\n", _d);                                       \
 476	*((p)++) = _d;                                                         \
 477} while(0)
 478#endif
 479
 480static bool
 481evo_sync_wait(void *data)
 482{
 483	if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
 484		return true;
 485	usleep_range(1, 2);
 486	return false;
 487}
 488
 489static int
 490evo_sync(struct drm_device *dev)
 491{
 492	struct nvif_device *device = &nouveau_drm(dev)->device;
 493	struct nv50_disp *disp = nv50_disp(dev);
 494	struct nv50_mast *mast = nv50_mast(dev);
 495	u32 *push = evo_wait(mast, 8);
 496	if (push) {
 497		nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
 498		evo_mthd(push, 0x0084, 1);
 499		evo_data(push, 0x80000000 | EVO_MAST_NTFY);
 500		evo_mthd(push, 0x0080, 2);
 501		evo_data(push, 0x00000000);
 502		evo_data(push, 0x00000000);
 503		evo_kick(push, mast);
 504		if (nvif_msec(device, 2000,
 505			if (evo_sync_wait(disp->sync))
 506				break;
 507		) >= 0)
 508			return 0;
 509	}
 510
 511	return -EBUSY;
 512}
 513
 514/******************************************************************************
 515 * Page flipping channel
 516 *****************************************************************************/
 517struct nouveau_bo *
 518nv50_display_crtc_sema(struct drm_device *dev, int crtc)
 519{
 520	return nv50_disp(dev)->sync;
 521}
 522
 523struct nv50_display_flip {
 524	struct nv50_disp *disp;
 525	struct nv50_sync *chan;
 526};
 527
 528static bool
 529nv50_display_flip_wait(void *data)
 530{
 531	struct nv50_display_flip *flip = data;
 532	if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
 533					      flip->chan->data)
 534		return true;
 535	usleep_range(1, 2);
 536	return false;
 537}
 538
 539void
 540nv50_display_flip_stop(struct drm_crtc *crtc)
 541{
 542	struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
 543	struct nv50_display_flip flip = {
 544		.disp = nv50_disp(crtc->dev),
 545		.chan = nv50_sync(crtc),
 546	};
 547	u32 *push;
 548
 549	push = evo_wait(flip.chan, 8);
 550	if (push) {
 551		evo_mthd(push, 0x0084, 1);
 552		evo_data(push, 0x00000000);
 553		evo_mthd(push, 0x0094, 1);
 554		evo_data(push, 0x00000000);
 555		evo_mthd(push, 0x00c0, 1);
 556		evo_data(push, 0x00000000);
 557		evo_mthd(push, 0x0080, 1);
 558		evo_data(push, 0x00000000);
 559		evo_kick(push, flip.chan);
 560	}
 561
 562	nvif_msec(device, 2000,
 563		if (nv50_display_flip_wait(&flip))
 564			break;
 565	);
 566}
 567
 568int
 569nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
 570		       struct nouveau_channel *chan, u32 swap_interval)
 571{
 572	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
 573	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 574	struct nv50_head *head = nv50_head(crtc);
 575	struct nv50_sync *sync = nv50_sync(crtc);
 576	u32 *push;
 577	int ret;
 578
 579	if (crtc->primary->fb->width != fb->width ||
 580	    crtc->primary->fb->height != fb->height)
 581		return -EINVAL;
 582
 583	swap_interval <<= 4;
 584	if (swap_interval == 0)
 585		swap_interval |= 0x100;
 586	if (chan == NULL)
 587		evo_sync(crtc->dev);
 588
 589	push = evo_wait(sync, 128);
 590	if (unlikely(push == NULL))
 591		return -EBUSY;
 592
 593	if (chan && chan->user.oclass < G82_CHANNEL_GPFIFO) {
 594		ret = RING_SPACE(chan, 8);
 595		if (ret)
 596			return ret;
 597
 598		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
 599		OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
 600		OUT_RING  (chan, sync->addr ^ 0x10);
 601		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
 602		OUT_RING  (chan, sync->data + 1);
 603		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
 604		OUT_RING  (chan, sync->addr);
 605		OUT_RING  (chan, sync->data);
 606	} else
 607	if (chan && chan->user.oclass < FERMI_CHANNEL_GPFIFO) {
 608		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
 609		ret = RING_SPACE(chan, 12);
 610		if (ret)
 611			return ret;
 612
 613		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
 614		OUT_RING  (chan, chan->vram.handle);
 615		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 616		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
 617		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
 618		OUT_RING  (chan, sync->data + 1);
 619		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
 620		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 621		OUT_RING  (chan, upper_32_bits(addr));
 622		OUT_RING  (chan, lower_32_bits(addr));
 623		OUT_RING  (chan, sync->data);
 624		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
 625	} else
 626	if (chan) {
 627		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
 628		ret = RING_SPACE(chan, 10);
 629		if (ret)
 630			return ret;
 631
 632		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 633		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
 634		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
 635		OUT_RING  (chan, sync->data + 1);
 636		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
 637				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
 638		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
 639		OUT_RING  (chan, upper_32_bits(addr));
 640		OUT_RING  (chan, lower_32_bits(addr));
 641		OUT_RING  (chan, sync->data);
 642		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
 643				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
 644	}
 645
 646	if (chan) {
 647		sync->addr ^= 0x10;
 648		sync->data++;
 649		FIRE_RING (chan);
 650	}
 651
 652	/* queue the flip */
 653	evo_mthd(push, 0x0100, 1);
 654	evo_data(push, 0xfffe0000);
 655	evo_mthd(push, 0x0084, 1);
 656	evo_data(push, swap_interval);
 657	if (!(swap_interval & 0x00000100)) {
 658		evo_mthd(push, 0x00e0, 1);
 659		evo_data(push, 0x40000000);
 660	}
 661	evo_mthd(push, 0x0088, 4);
 662	evo_data(push, sync->addr);
 663	evo_data(push, sync->data++);
 664	evo_data(push, sync->data);
 665	evo_data(push, sync->base.sync.handle);
 666	evo_mthd(push, 0x00a0, 2);
 667	evo_data(push, 0x00000000);
 668	evo_data(push, 0x00000000);
 669	evo_mthd(push, 0x00c0, 1);
 670	evo_data(push, nv_fb->r_handle);
 671	evo_mthd(push, 0x0110, 2);
 672	evo_data(push, 0x00000000);
 673	evo_data(push, 0x00000000);
 674	if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) {
 675		evo_mthd(push, 0x0800, 5);
 676		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
 677		evo_data(push, 0);
 678		evo_data(push, (fb->height << 16) | fb->width);
 679		evo_data(push, nv_fb->r_pitch);
 680		evo_data(push, nv_fb->r_format);
 681	} else {
 682		evo_mthd(push, 0x0400, 5);
 683		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
 684		evo_data(push, 0);
 685		evo_data(push, (fb->height << 16) | fb->width);
 686		evo_data(push, nv_fb->r_pitch);
 687		evo_data(push, nv_fb->r_format);
 688	}
 689	evo_mthd(push, 0x0080, 1);
 690	evo_data(push, 0x00000000);
 691	evo_kick(push, sync);
 692
 693	nouveau_bo_ref(nv_fb->nvbo, &head->image);
 694	return 0;
 695}
 696
 697/******************************************************************************
 698 * CRTC
 699 *****************************************************************************/
 700static int
 701nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
 702{
 703	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 704	struct nouveau_connector *nv_connector;
 705	struct drm_connector *connector;
 706	u32 *push, mode = 0x00;
 707
 708	nv_connector = nouveau_crtc_connector_get(nv_crtc);
 709	connector = &nv_connector->base;
 710	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
 711		if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
 712			mode = DITHERING_MODE_DYNAMIC2X2;
 713	} else {
 714		mode = nv_connector->dithering_mode;
 715	}
 716
 717	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
 718		if (connector->display_info.bpc >= 8)
 719			mode |= DITHERING_DEPTH_8BPC;
 720	} else {
 721		mode |= nv_connector->dithering_depth;
 722	}
 723
 724	push = evo_wait(mast, 4);
 725	if (push) {
 726		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 727			evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
 728			evo_data(push, mode);
 729		} else
 730		if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) {
 731			evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
 732			evo_data(push, mode);
 733		} else {
 734			evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
 735			evo_data(push, mode);
 736		}
 737
 738		if (update) {
 739			evo_mthd(push, 0x0080, 1);
 740			evo_data(push, 0x00000000);
 741		}
 742		evo_kick(push, mast);
 743	}
 744
 745	return 0;
 746}
 747
 748static int
 749nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
 750{
 751	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 752	struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
 753	struct drm_crtc *crtc = &nv_crtc->base;
 754	struct nouveau_connector *nv_connector;
 755	int mode = DRM_MODE_SCALE_NONE;
 756	u32 oX, oY, *push;
 757
 758	/* start off at the resolution we programmed the crtc for, this
 759	 * effectively handles NONE/FULL scaling
 760	 */
 761	nv_connector = nouveau_crtc_connector_get(nv_crtc);
 762	if (nv_connector && nv_connector->native_mode) {
 763		mode = nv_connector->scaling_mode;
 764		if (nv_connector->scaling_full) /* non-EDID LVDS/eDP mode */
 765			mode = DRM_MODE_SCALE_FULLSCREEN;
 766	}
 767
 768	if (mode != DRM_MODE_SCALE_NONE)
 769		omode = nv_connector->native_mode;
 770	else
 771		omode = umode;
 772
 773	oX = omode->hdisplay;
 774	oY = omode->vdisplay;
 775	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
 776		oY *= 2;
 777
 778	/* add overscan compensation if necessary, will keep the aspect
 779	 * ratio the same as the backend mode unless overridden by the
 780	 * user setting both hborder and vborder properties.
 781	 */
 782	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
 783			     (nv_connector->underscan == UNDERSCAN_AUTO &&
 784			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
 785		u32 bX = nv_connector->underscan_hborder;
 786		u32 bY = nv_connector->underscan_vborder;
 787		u32 aspect = (oY << 19) / oX;
 788
 789		if (bX) {
 790			oX -= (bX * 2);
 791			if (bY) oY -= (bY * 2);
 792			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
 793		} else {
 794			oX -= (oX >> 4) + 32;
 795			if (bY) oY -= (bY * 2);
 796			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
 797		}
 798	}
 799
 800	/* handle CENTER/ASPECT scaling, taking into account the areas
 801	 * removed already for overscan compensation
 802	 */
 803	switch (mode) {
 804	case DRM_MODE_SCALE_CENTER:
 805		oX = min((u32)umode->hdisplay, oX);
 806		oY = min((u32)umode->vdisplay, oY);
 807		/* fall-through */
 808	case DRM_MODE_SCALE_ASPECT:
 809		if (oY < oX) {
 810			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
 811			oX = ((oY * aspect) + (aspect / 2)) >> 19;
 812		} else {
 813			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
 814			oY = ((oX * aspect) + (aspect / 2)) >> 19;
 815		}
 816		break;
 817	default:
 818		break;
 819	}
 820
 821	push = evo_wait(mast, 8);
 822	if (push) {
 823		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 824			/*XXX: SCALE_CTRL_ACTIVE??? */
 825			evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
 826			evo_data(push, (oY << 16) | oX);
 827			evo_data(push, (oY << 16) | oX);
 828			evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
 829			evo_data(push, 0x00000000);
 830			evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
 831			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
 832		} else {
 833			evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
 834			evo_data(push, (oY << 16) | oX);
 835			evo_data(push, (oY << 16) | oX);
 836			evo_data(push, (oY << 16) | oX);
 837			evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
 838			evo_data(push, 0x00000000);
 839			evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
 840			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
 841		}
 842
 843		evo_kick(push, mast);
 844
 845		if (update) {
 846			nv50_display_flip_stop(crtc);
 847			nv50_display_flip_next(crtc, crtc->primary->fb,
 848					       NULL, 1);
 849		}
 850	}
 851
 852	return 0;
 853}
 854
 855static int
 856nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec)
 857{
 858	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 859	u32 *push;
 860
 861	push = evo_wait(mast, 8);
 862	if (!push)
 863		return -ENOMEM;
 864
 865	evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1);
 866	evo_data(push, usec);
 867	evo_kick(push, mast);
 868	return 0;
 869}
 870
 871static int
 872nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
 873{
 874	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 875	u32 *push, hue, vib;
 876	int adj;
 877
 878	adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
 879	vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
 880	hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
 881
 882	push = evo_wait(mast, 16);
 883	if (push) {
 884		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 885			evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
 886			evo_data(push, (hue << 20) | (vib << 8));
 887		} else {
 888			evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
 889			evo_data(push, (hue << 20) | (vib << 8));
 890		}
 891
 892		if (update) {
 893			evo_mthd(push, 0x0080, 1);
 894			evo_data(push, 0x00000000);
 895		}
 896		evo_kick(push, mast);
 897	}
 898
 899	return 0;
 900}
 901
 902static int
 903nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
 904		    int x, int y, bool update)
 905{
 906	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
 907	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 908	u32 *push;
 909
 910	push = evo_wait(mast, 16);
 911	if (push) {
 912		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 913			evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
 914			evo_data(push, nvfb->nvbo->bo.offset >> 8);
 915			evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
 916			evo_data(push, (fb->height << 16) | fb->width);
 917			evo_data(push, nvfb->r_pitch);
 918			evo_data(push, nvfb->r_format);
 919			evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
 920			evo_data(push, (y << 16) | x);
 921			if (nv50_vers(mast) > NV50_DISP_CORE_CHANNEL_DMA) {
 922				evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
 923				evo_data(push, nvfb->r_handle);
 924			}
 925		} else {
 926			evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
 927			evo_data(push, nvfb->nvbo->bo.offset >> 8);
 928			evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
 929			evo_data(push, (fb->height << 16) | fb->width);
 930			evo_data(push, nvfb->r_pitch);
 931			evo_data(push, nvfb->r_format);
 932			evo_data(push, nvfb->r_handle);
 933			evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
 934			evo_data(push, (y << 16) | x);
 935		}
 936
 937		if (update) {
 938			evo_mthd(push, 0x0080, 1);
 939			evo_data(push, 0x00000000);
 940		}
 941		evo_kick(push, mast);
 942	}
 943
 944	nv_crtc->fb.handle = nvfb->r_handle;
 945	return 0;
 946}
 947
 948static void
 949nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
 950{
 951	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 952	u32 *push = evo_wait(mast, 16);
 953	if (push) {
 954		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
 955			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
 956			evo_data(push, 0x85000000);
 957			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
 958		} else
 959		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 960			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
 961			evo_data(push, 0x85000000);
 962			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
 963			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
 964			evo_data(push, mast->base.vram.handle);
 965		} else {
 966			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
 967			evo_data(push, 0x85000000);
 968			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
 969			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
 970			evo_data(push, mast->base.vram.handle);
 971		}
 972		evo_kick(push, mast);
 973	}
 974	nv_crtc->cursor.visible = true;
 975}
 976
 977static void
 978nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
 979{
 980	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
 981	u32 *push = evo_wait(mast, 16);
 982	if (push) {
 983		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
 984			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
 985			evo_data(push, 0x05000000);
 986		} else
 987		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
 988			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
 989			evo_data(push, 0x05000000);
 990			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
 991			evo_data(push, 0x00000000);
 992		} else {
 993			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
 994			evo_data(push, 0x05000000);
 995			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
 996			evo_data(push, 0x00000000);
 997		}
 998		evo_kick(push, mast);
 999	}
1000	nv_crtc->cursor.visible = false;
1001}
1002
1003static void
1004nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
1005{
1006	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1007
1008	if (show && nv_crtc->cursor.nvbo && nv_crtc->base.enabled)
1009		nv50_crtc_cursor_show(nv_crtc);
1010	else
1011		nv50_crtc_cursor_hide(nv_crtc);
1012
1013	if (update) {
1014		u32 *push = evo_wait(mast, 2);
1015		if (push) {
1016			evo_mthd(push, 0x0080, 1);
1017			evo_data(push, 0x00000000);
1018			evo_kick(push, mast);
1019		}
1020	}
1021}
1022
1023static void
1024nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
1025{
1026}
1027
1028static void
1029nv50_crtc_prepare(struct drm_crtc *crtc)
1030{
1031	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1032	struct nv50_mast *mast = nv50_mast(crtc->dev);
1033	u32 *push;
1034
1035	nv50_display_flip_stop(crtc);
1036
1037	push = evo_wait(mast, 6);
1038	if (push) {
1039		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1040			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1041			evo_data(push, 0x00000000);
1042			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1043			evo_data(push, 0x40000000);
1044		} else
1045		if (nv50_vers(mast) <  GF110_DISP_CORE_CHANNEL_DMA) {
1046			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1047			evo_data(push, 0x00000000);
1048			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1049			evo_data(push, 0x40000000);
1050			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1051			evo_data(push, 0x00000000);
1052		} else {
1053			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1054			evo_data(push, 0x00000000);
1055			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
1056			evo_data(push, 0x03000000);
1057			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1058			evo_data(push, 0x00000000);
1059		}
1060
1061		evo_kick(push, mast);
1062	}
1063
1064	nv50_crtc_cursor_show_hide(nv_crtc, false, false);
1065}
1066
1067static void
1068nv50_crtc_commit(struct drm_crtc *crtc)
1069{
1070	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1071	struct nv50_mast *mast = nv50_mast(crtc->dev);
1072	u32 *push;
1073
1074	push = evo_wait(mast, 32);
1075	if (push) {
1076		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1077			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1078			evo_data(push, nv_crtc->fb.handle);
1079			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1080			evo_data(push, 0xc0000000);
1081			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1082		} else
1083		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1084			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1085			evo_data(push, nv_crtc->fb.handle);
1086			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1087			evo_data(push, 0xc0000000);
1088			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1089			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1090			evo_data(push, mast->base.vram.handle);
1091		} else {
1092			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1093			evo_data(push, nv_crtc->fb.handle);
1094			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1095			evo_data(push, 0x83000000);
1096			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1097			evo_data(push, 0x00000000);
1098			evo_data(push, 0x00000000);
1099			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1100			evo_data(push, mast->base.vram.handle);
1101			evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
1102			evo_data(push, 0xffffff00);
1103		}
1104
1105		evo_kick(push, mast);
1106	}
1107
1108	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1109	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1110}
1111
1112static bool
1113nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1114		     struct drm_display_mode *adjusted_mode)
1115{
1116	drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1117	return true;
1118}
1119
1120static int
1121nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1122{
1123	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1124	struct nv50_head *head = nv50_head(crtc);
1125	int ret;
1126
1127	ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true);
1128	if (ret == 0) {
1129		if (head->image)
1130			nouveau_bo_unpin(head->image);
1131		nouveau_bo_ref(nvfb->nvbo, &head->image);
1132	}
1133
1134	return ret;
1135}
1136
1137static int
1138nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1139		   struct drm_display_mode *mode, int x, int y,
1140		   struct drm_framebuffer *old_fb)
1141{
1142	struct nv50_mast *mast = nv50_mast(crtc->dev);
1143	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1144	struct nouveau_connector *nv_connector;
1145	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1146	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1147	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1148	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1149	u32 vblan2e = 0, vblan2s = 1, vblankus = 0;
1150	u32 *push;
1151	int ret;
1152
1153	hactive = mode->htotal;
1154	hsynce  = mode->hsync_end - mode->hsync_start - 1;
1155	hbackp  = mode->htotal - mode->hsync_end;
1156	hblanke = hsynce + hbackp;
1157	hfrontp = mode->hsync_start - mode->hdisplay;
1158	hblanks = mode->htotal - hfrontp - 1;
1159
1160	vactive = mode->vtotal * vscan / ilace;
1161	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1162	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1163	vblanke = vsynce + vbackp;
1164	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1165	vblanks = vactive - vfrontp - 1;
1166	/* XXX: Safe underestimate, even "0" works */
1167	vblankus = (vactive - mode->vdisplay - 2) * hactive;
1168	vblankus *= 1000;
1169	vblankus /= mode->clock;
1170
1171	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1172		vblan2e = vactive + vsynce + vbackp;
1173		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1174		vactive = (vactive * 2) + 1;
1175	}
1176
1177	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1178	if (ret)
1179		return ret;
1180
1181	push = evo_wait(mast, 64);
1182	if (push) {
1183		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1184			evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1185			evo_data(push, 0x00800000 | mode->clock);
1186			evo_data(push, (ilace == 2) ? 2 : 0);
1187			evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1188			evo_data(push, 0x00000000);
1189			evo_data(push, (vactive << 16) | hactive);
1190			evo_data(push, ( vsynce << 16) | hsynce);
1191			evo_data(push, (vblanke << 16) | hblanke);
1192			evo_data(push, (vblanks << 16) | hblanks);
1193			evo_data(push, (vblan2e << 16) | vblan2s);
1194			evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1195			evo_data(push, 0x00000000);
1196			evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1197			evo_data(push, 0x00000311);
1198			evo_data(push, 0x00000100);
1199		} else {
1200			evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1201			evo_data(push, 0x00000000);
1202			evo_data(push, (vactive << 16) | hactive);
1203			evo_data(push, ( vsynce << 16) | hsynce);
1204			evo_data(push, (vblanke << 16) | hblanke);
1205			evo_data(push, (vblanks << 16) | hblanks);
1206			evo_data(push, (vblan2e << 16) | vblan2s);
1207			evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1208			evo_data(push, 0x00000000); /* ??? */
1209			evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1210			evo_data(push, mode->clock * 1000);
1211			evo_data(push, 0x00200000); /* ??? */
1212			evo_data(push, mode->clock * 1000);
1213			evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1214			evo_data(push, 0x00000311);
1215			evo_data(push, 0x00000100);
1216		}
1217
1218		evo_kick(push, mast);
1219	}
1220
1221	nv_connector = nouveau_crtc_connector_get(nv_crtc);
1222	nv50_crtc_set_dither(nv_crtc, false);
1223	nv50_crtc_set_scale(nv_crtc, false);
1224
1225	/* G94 only accepts this after setting scale */
1226	if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA)
1227		nv50_crtc_set_raster_vblank_dmi(nv_crtc, vblankus);
1228
1229	nv50_crtc_set_color_vibrance(nv_crtc, false);
1230	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1231	return 0;
1232}
1233
1234static int
1235nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1236			struct drm_framebuffer *old_fb)
1237{
1238	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1239	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1240	int ret;
1241
1242	if (!crtc->primary->fb) {
1243		NV_DEBUG(drm, "No FB bound\n");
1244		return 0;
1245	}
1246
1247	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1248	if (ret)
1249		return ret;
1250
1251	nv50_display_flip_stop(crtc);
1252	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1253	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1254	return 0;
1255}
1256
1257static int
1258nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1259			       struct drm_framebuffer *fb, int x, int y,
1260			       enum mode_set_atomic state)
1261{
1262	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1263	nv50_display_flip_stop(crtc);
1264	nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1265	return 0;
1266}
1267
1268static void
1269nv50_crtc_lut_load(struct drm_crtc *crtc)
1270{
1271	struct nv50_disp *disp = nv50_disp(crtc->dev);
1272	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1273	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1274	int i;
1275
1276	for (i = 0; i < 256; i++) {
1277		u16 r = nv_crtc->lut.r[i] >> 2;
1278		u16 g = nv_crtc->lut.g[i] >> 2;
1279		u16 b = nv_crtc->lut.b[i] >> 2;
1280
1281		if (disp->disp->oclass < GF110_DISP) {
1282			writew(r + 0x0000, lut + (i * 0x08) + 0);
1283			writew(g + 0x0000, lut + (i * 0x08) + 2);
1284			writew(b + 0x0000, lut + (i * 0x08) + 4);
1285		} else {
1286			writew(r + 0x6000, lut + (i * 0x20) + 0);
1287			writew(g + 0x6000, lut + (i * 0x20) + 2);
1288			writew(b + 0x6000, lut + (i * 0x20) + 4);
1289		}
1290	}
1291}
1292
1293static void
1294nv50_crtc_disable(struct drm_crtc *crtc)
1295{
1296	struct nv50_head *head = nv50_head(crtc);
1297	evo_sync(crtc->dev);
1298	if (head->image)
1299		nouveau_bo_unpin(head->image);
1300	nouveau_bo_ref(NULL, &head->image);
1301}
1302
1303static int
1304nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1305		     uint32_t handle, uint32_t width, uint32_t height)
1306{
1307	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1308	struct drm_device *dev = crtc->dev;
1309	struct drm_gem_object *gem = NULL;
1310	struct nouveau_bo *nvbo = NULL;
1311	int ret = 0;
1312
1313	if (handle) {
1314		if (width != 64 || height != 64)
1315			return -EINVAL;
1316
1317		gem = drm_gem_object_lookup(dev, file_priv, handle);
1318		if (unlikely(!gem))
1319			return -ENOENT;
1320		nvbo = nouveau_gem_object(gem);
1321
1322		ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true);
1323	}
1324
1325	if (ret == 0) {
1326		if (nv_crtc->cursor.nvbo)
1327			nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1328		nouveau_bo_ref(nvbo, &nv_crtc->cursor.nvbo);
1329	}
1330	drm_gem_object_unreference_unlocked(gem);
1331
1332	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1333	return ret;
1334}
1335
1336static int
1337nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1338{
1339	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1340	struct nv50_curs *curs = nv50_curs(crtc);
1341	struct nv50_chan *chan = nv50_chan(curs);
1342	nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1343	nvif_wr32(&chan->user, 0x0080, 0x00000000);
1344
1345	nv_crtc->cursor_saved_x = x;
1346	nv_crtc->cursor_saved_y = y;
1347	return 0;
1348}
1349
1350static void
1351nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1352		    uint32_t start, uint32_t size)
1353{
1354	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1355	u32 end = min_t(u32, start + size, 256);
1356	u32 i;
1357
1358	for (i = start; i < end; i++) {
1359		nv_crtc->lut.r[i] = r[i];
1360		nv_crtc->lut.g[i] = g[i];
1361		nv_crtc->lut.b[i] = b[i];
1362	}
1363
1364	nv50_crtc_lut_load(crtc);
1365}
1366
1367static void
1368nv50_crtc_cursor_restore(struct nouveau_crtc *nv_crtc, int x, int y)
1369{
1370	nv50_crtc_cursor_move(&nv_crtc->base, x, y);
1371
1372	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1373}
1374
1375static void
1376nv50_crtc_destroy(struct drm_crtc *crtc)
1377{
1378	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1379	struct nv50_disp *disp = nv50_disp(crtc->dev);
1380	struct nv50_head *head = nv50_head(crtc);
1381	struct nv50_fbdma *fbdma;
1382
1383	list_for_each_entry(fbdma, &disp->fbdma, head) {
1384		nvif_object_fini(&fbdma->base[nv_crtc->index]);
1385	}
1386
1387	nv50_dmac_destroy(&head->ovly.base, disp->disp);
1388	nv50_pioc_destroy(&head->oimm.base);
1389	nv50_dmac_destroy(&head->sync.base, disp->disp);
1390	nv50_pioc_destroy(&head->curs.base);
1391
1392	/*XXX: this shouldn't be necessary, but the core doesn't call
1393	 *     disconnect() during the cleanup paths
1394	 */
1395	if (head->image)
1396		nouveau_bo_unpin(head->image);
1397	nouveau_bo_ref(NULL, &head->image);
1398
1399	/*XXX: ditto */
1400	if (nv_crtc->cursor.nvbo)
1401		nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1402	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1403
1404	nouveau_bo_unmap(nv_crtc->lut.nvbo);
1405	if (nv_crtc->lut.nvbo)
1406		nouveau_bo_unpin(nv_crtc->lut.nvbo);
1407	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1408
1409	drm_crtc_cleanup(crtc);
1410	kfree(crtc);
1411}
1412
1413static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1414	.dpms = nv50_crtc_dpms,
1415	.prepare = nv50_crtc_prepare,
1416	.commit = nv50_crtc_commit,
1417	.mode_fixup = nv50_crtc_mode_fixup,
1418	.mode_set = nv50_crtc_mode_set,
1419	.mode_set_base = nv50_crtc_mode_set_base,
1420	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1421	.load_lut = nv50_crtc_lut_load,
1422	.disable = nv50_crtc_disable,
1423};
1424
1425static const struct drm_crtc_funcs nv50_crtc_func = {
1426	.cursor_set = nv50_crtc_cursor_set,
1427	.cursor_move = nv50_crtc_cursor_move,
1428	.gamma_set = nv50_crtc_gamma_set,
1429	.set_config = nouveau_crtc_set_config,
1430	.destroy = nv50_crtc_destroy,
1431	.page_flip = nouveau_crtc_page_flip,
1432};
1433
1434static int
1435nv50_crtc_create(struct drm_device *dev, int index)
1436{
1437	struct nouveau_drm *drm = nouveau_drm(dev);
1438	struct nvif_device *device = &drm->device;
1439	struct nv50_disp *disp = nv50_disp(dev);
1440	struct nv50_head *head;
1441	struct drm_crtc *crtc;
1442	int ret, i;
1443
1444	head = kzalloc(sizeof(*head), GFP_KERNEL);
1445	if (!head)
1446		return -ENOMEM;
1447
1448	head->base.index = index;
1449	head->base.set_dither = nv50_crtc_set_dither;
1450	head->base.set_scale = nv50_crtc_set_scale;
1451	head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1452	head->base.color_vibrance = 50;
1453	head->base.vibrant_hue = 0;
1454	head->base.cursor.set_pos = nv50_crtc_cursor_restore;
1455	for (i = 0; i < 256; i++) {
1456		head->base.lut.r[i] = i << 8;
1457		head->base.lut.g[i] = i << 8;
1458		head->base.lut.b[i] = i << 8;
1459	}
1460
1461	crtc = &head->base.base;
1462	drm_crtc_init(dev, crtc, &nv50_crtc_func);
1463	drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1464	drm_mode_crtc_set_gamma_size(crtc, 256);
1465
1466	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1467			     0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
1468	if (!ret) {
1469		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
1470		if (!ret) {
1471			ret = nouveau_bo_map(head->base.lut.nvbo);
1472			if (ret)
1473				nouveau_bo_unpin(head->base.lut.nvbo);
1474		}
1475		if (ret)
1476			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1477	}
1478
1479	if (ret)
1480		goto out;
1481
1482	/* allocate cursor resources */
1483	ret = nv50_curs_create(device, disp->disp, index, &head->curs);
1484	if (ret)
1485		goto out;
1486
1487	/* allocate page flip / sync resources */
1488	ret = nv50_base_create(device, disp->disp, index, disp->sync->bo.offset,
1489			       &head->sync);
1490	if (ret)
1491		goto out;
1492
1493	head->sync.addr = EVO_FLIP_SEM0(index);
1494	head->sync.data = 0x00000000;
1495
1496	/* allocate overlay resources */
1497	ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
1498	if (ret)
1499		goto out;
1500
1501	ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
1502			       &head->ovly);
1503	if (ret)
1504		goto out;
1505
1506out:
1507	if (ret)
1508		nv50_crtc_destroy(crtc);
1509	return ret;
1510}
1511
1512/******************************************************************************
1513 * Encoder helpers
1514 *****************************************************************************/
1515static bool
1516nv50_encoder_mode_fixup(struct drm_encoder *encoder,
1517			const struct drm_display_mode *mode,
1518			struct drm_display_mode *adjusted_mode)
1519{
1520	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1521	struct nouveau_connector *nv_connector;
1522
1523	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1524	if (nv_connector && nv_connector->native_mode) {
1525		nv_connector->scaling_full = false;
1526		if (nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) {
1527			switch (nv_connector->type) {
1528			case DCB_CONNECTOR_LVDS:
1529			case DCB_CONNECTOR_LVDS_SPWG:
1530			case DCB_CONNECTOR_eDP:
1531				/* force use of scaler for non-edid modes */
1532				if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
1533					return true;
1534				nv_connector->scaling_full = true;
1535				break;
1536			default:
1537				return true;
1538			}
1539		}
1540
1541		drm_mode_copy(adjusted_mode, nv_connector->native_mode);
1542	}
1543
1544	return true;
1545}
1546
1547/******************************************************************************
1548 * DAC
1549 *****************************************************************************/
1550static void
1551nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1552{
1553	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1554	struct nv50_disp *disp = nv50_disp(encoder->dev);
1555	struct {
1556		struct nv50_disp_mthd_v1 base;
1557		struct nv50_disp_dac_pwr_v0 pwr;
1558	} args = {
1559		.base.version = 1,
1560		.base.method = NV50_DISP_MTHD_V1_DAC_PWR,
1561		.base.hasht  = nv_encoder->dcb->hasht,
1562		.base.hashm  = nv_encoder->dcb->hashm,
1563		.pwr.state = 1,
1564		.pwr.data  = 1,
1565		.pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
1566			      mode != DRM_MODE_DPMS_OFF),
1567		.pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
1568			      mode != DRM_MODE_DPMS_OFF),
1569	};
1570
1571	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1572}
1573
1574static void
1575nv50_dac_commit(struct drm_encoder *encoder)
1576{
1577}
1578
1579static void
1580nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1581		  struct drm_display_mode *adjusted_mode)
1582{
1583	struct nv50_mast *mast = nv50_mast(encoder->dev);
1584	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1585	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1586	u32 *push;
1587
1588	nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1589
1590	push = evo_wait(mast, 8);
1591	if (push) {
1592		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1593			u32 syncs = 0x00000000;
1594
1595			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1596				syncs |= 0x00000001;
1597			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1598				syncs |= 0x00000002;
1599
1600			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1601			evo_data(push, 1 << nv_crtc->index);
1602			evo_data(push, syncs);
1603		} else {
1604			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1605			u32 syncs = 0x00000001;
1606
1607			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1608				syncs |= 0x00000008;
1609			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1610				syncs |= 0x00000010;
1611
1612			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1613				magic |= 0x00000001;
1614
1615			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1616			evo_data(push, syncs);
1617			evo_data(push, magic);
1618			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1619			evo_data(push, 1 << nv_crtc->index);
1620		}
1621
1622		evo_kick(push, mast);
1623	}
1624
1625	nv_encoder->crtc = encoder->crtc;
1626}
1627
1628static void
1629nv50_dac_disconnect(struct drm_encoder *encoder)
1630{
1631	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1632	struct nv50_mast *mast = nv50_mast(encoder->dev);
1633	const int or = nv_encoder->or;
1634	u32 *push;
1635
1636	if (nv_encoder->crtc) {
1637		nv50_crtc_prepare(nv_encoder->crtc);
1638
1639		push = evo_wait(mast, 4);
1640		if (push) {
1641			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1642				evo_mthd(push, 0x0400 + (or * 0x080), 1);
1643				evo_data(push, 0x00000000);
1644			} else {
1645				evo_mthd(push, 0x0180 + (or * 0x020), 1);
1646				evo_data(push, 0x00000000);
1647			}
1648			evo_kick(push, mast);
1649		}
1650	}
1651
1652	nv_encoder->crtc = NULL;
1653}
1654
1655static enum drm_connector_status
1656nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1657{
1658	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1659	struct nv50_disp *disp = nv50_disp(encoder->dev);
1660	struct {
1661		struct nv50_disp_mthd_v1 base;
1662		struct nv50_disp_dac_load_v0 load;
1663	} args = {
1664		.base.version = 1,
1665		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
1666		.base.hasht  = nv_encoder->dcb->hasht,
1667		.base.hashm  = nv_encoder->dcb->hashm,
1668	};
1669	int ret;
1670
1671	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
1672	if (args.load.data == 0)
1673		args.load.data = 340;
1674
1675	ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
1676	if (ret || !args.load.load)
1677		return connector_status_disconnected;
1678
1679	return connector_status_connected;
1680}
1681
1682static void
1683nv50_dac_destroy(struct drm_encoder *encoder)
1684{
1685	drm_encoder_cleanup(encoder);
1686	kfree(encoder);
1687}
1688
1689static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1690	.dpms = nv50_dac_dpms,
1691	.mode_fixup = nv50_encoder_mode_fixup,
1692	.prepare = nv50_dac_disconnect,
1693	.commit = nv50_dac_commit,
1694	.mode_set = nv50_dac_mode_set,
1695	.disable = nv50_dac_disconnect,
1696	.get_crtc = nv50_display_crtc_get,
1697	.detect = nv50_dac_detect
1698};
1699
1700static const struct drm_encoder_funcs nv50_dac_func = {
1701	.destroy = nv50_dac_destroy,
1702};
1703
1704static int
1705nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1706{
1707	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1708	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
1709	struct nvkm_i2c_bus *bus;
1710	struct nouveau_encoder *nv_encoder;
1711	struct drm_encoder *encoder;
1712	int type = DRM_MODE_ENCODER_DAC;
1713
1714	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1715	if (!nv_encoder)
1716		return -ENOMEM;
1717	nv_encoder->dcb = dcbe;
1718	nv_encoder->or = ffs(dcbe->or) - 1;
1719
1720	bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1721	if (bus)
1722		nv_encoder->i2c = &bus->i2c;
1723
1724	encoder = to_drm_encoder(nv_encoder);
1725	encoder->possible_crtcs = dcbe->heads;
1726	encoder->possible_clones = 0;
1727	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, NULL);
1728	drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1729
1730	drm_mode_connector_attach_encoder(connector, encoder);
1731	return 0;
1732}
1733
1734/******************************************************************************
1735 * Audio
1736 *****************************************************************************/
1737static void
1738nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1739{
1740	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1741	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1742	struct nouveau_connector *nv_connector;
1743	struct nv50_disp *disp = nv50_disp(encoder->dev);
1744	struct __packed {
1745		struct {
1746			struct nv50_disp_mthd_v1 mthd;
1747			struct nv50_disp_sor_hda_eld_v0 eld;
1748		} base;
1749		u8 data[sizeof(nv_connector->base.eld)];
1750	} args = {
1751		.base.mthd.version = 1,
1752		.base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1753		.base.mthd.hasht   = nv_encoder->dcb->hasht,
1754		.base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
1755				     (0x0100 << nv_crtc->index),
1756	};
1757
1758	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1759	if (!drm_detect_monitor_audio(nv_connector->edid))
1760		return;
1761
1762	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1763	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
1764
1765	nvif_mthd(disp->disp, 0, &args,
1766		  sizeof(args.base) + drm_eld_size(args.data));
1767}
1768
1769static void
1770nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1771{
1772	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1773	struct nv50_disp *disp = nv50_disp(encoder->dev);
1774	struct {
1775		struct nv50_disp_mthd_v1 base;
1776		struct nv50_disp_sor_hda_eld_v0 eld;
1777	} args = {
1778		.base.version = 1,
1779		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1780		.base.hasht   = nv_encoder->dcb->hasht,
1781		.base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
1782				(0x0100 << nv_crtc->index),
1783	};
1784
1785	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1786}
1787
1788/******************************************************************************
1789 * HDMI
1790 *****************************************************************************/
1791static void
1792nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1793{
1794	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1795	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1796	struct nv50_disp *disp = nv50_disp(encoder->dev);
1797	struct {
1798		struct nv50_disp_mthd_v1 base;
1799		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
1800	} args = {
1801		.base.version = 1,
1802		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1803		.base.hasht  = nv_encoder->dcb->hasht,
1804		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
1805			       (0x0100 << nv_crtc->index),
1806		.pwr.state = 1,
1807		.pwr.rekey = 56, /* binary driver, and tegra, constant */
1808	};
1809	struct nouveau_connector *nv_connector;
1810	u32 max_ac_packet;
1811
1812	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1813	if (!drm_detect_hdmi_monitor(nv_connector->edid))
1814		return;
1815
1816	max_ac_packet  = mode->htotal - mode->hdisplay;
1817	max_ac_packet -= args.pwr.rekey;
1818	max_ac_packet -= 18; /* constant from tegra */
1819	args.pwr.max_ac_packet = max_ac_packet / 32;
1820
1821	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1822	nv50_audio_mode_set(encoder, mode);
1823}
1824
1825static void
1826nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1827{
1828	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1829	struct nv50_disp *disp = nv50_disp(encoder->dev);
1830	struct {
1831		struct nv50_disp_mthd_v1 base;
1832		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
1833	} args = {
1834		.base.version = 1,
1835		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1836		.base.hasht  = nv_encoder->dcb->hasht,
1837		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
1838			       (0x0100 << nv_crtc->index),
1839	};
1840
1841	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1842}
1843
1844/******************************************************************************
1845 * SOR
1846 *****************************************************************************/
1847static void
1848nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1849{
1850	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1851	struct nv50_disp *disp = nv50_disp(encoder->dev);
1852	struct {
1853		struct nv50_disp_mthd_v1 base;
1854		struct nv50_disp_sor_pwr_v0 pwr;
1855	} args = {
1856		.base.version = 1,
1857		.base.method = NV50_DISP_MTHD_V1_SOR_PWR,
1858		.base.hasht  = nv_encoder->dcb->hasht,
1859		.base.hashm  = nv_encoder->dcb->hashm,
1860		.pwr.state = mode == DRM_MODE_DPMS_ON,
1861	};
1862	struct {
1863		struct nv50_disp_mthd_v1 base;
1864		struct nv50_disp_sor_dp_pwr_v0 pwr;
1865	} link = {
1866		.base.version = 1,
1867		.base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR,
1868		.base.hasht  = nv_encoder->dcb->hasht,
1869		.base.hashm  = nv_encoder->dcb->hashm,
1870		.pwr.state = mode == DRM_MODE_DPMS_ON,
1871	};
1872	struct drm_device *dev = encoder->dev;
1873	struct drm_encoder *partner;
1874
1875	nv_encoder->last_dpms = mode;
1876
1877	list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1878		struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1879
1880		if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1881			continue;
1882
1883		if (nv_partner != nv_encoder &&
1884		    nv_partner->dcb->or == nv_encoder->dcb->or) {
1885			if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1886				return;
1887			break;
1888		}
1889	}
1890
1891	if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1892		args.pwr.state = 1;
1893		nvif_mthd(disp->disp, 0, &args, sizeof(args));
1894		nvif_mthd(disp->disp, 0, &link, sizeof(link));
1895	} else {
1896		nvif_mthd(disp->disp, 0, &args, sizeof(args));
1897	}
1898}
1899
1900static void
1901nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
1902{
1903	struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
1904	u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
1905	if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
1906		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1907			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
1908			evo_data(push, (nv_encoder->ctrl = temp));
1909		} else {
1910			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1911			evo_data(push, (nv_encoder->ctrl = temp));
1912		}
1913		evo_kick(push, mast);
1914	}
1915}
1916
1917static void
1918nv50_sor_disconnect(struct drm_encoder *encoder)
1919{
1920	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1921	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1922
1923	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1924	nv_encoder->crtc = NULL;
1925
1926	if (nv_crtc) {
1927		nv50_crtc_prepare(&nv_crtc->base);
1928		nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
1929		nv50_audio_disconnect(encoder, nv_crtc);
1930		nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
1931	}
1932}
1933
1934static void
1935nv50_sor_commit(struct drm_encoder *encoder)
1936{
1937}
1938
1939static void
1940nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1941		  struct drm_display_mode *mode)
1942{
1943	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1944	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1945	struct {
1946		struct nv50_disp_mthd_v1 base;
1947		struct nv50_disp_sor_lvds_script_v0 lvds;
1948	} lvds = {
1949		.base.version = 1,
1950		.base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1951		.base.hasht   = nv_encoder->dcb->hasht,
1952		.base.hashm   = nv_encoder->dcb->hashm,
1953	};
1954	struct nv50_disp *disp = nv50_disp(encoder->dev);
1955	struct nv50_mast *mast = nv50_mast(encoder->dev);
1956	struct drm_device *dev = encoder->dev;
1957	struct nouveau_drm *drm = nouveau_drm(dev);
1958	struct nouveau_connector *nv_connector;
1959	struct nvbios *bios = &drm->vbios;
1960	u32 mask, ctrl;
1961	u8 owner = 1 << nv_crtc->index;
1962	u8 proto = 0xf;
1963	u8 depth = 0x0;
1964
1965	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1966	nv_encoder->crtc = encoder->crtc;
1967
1968	switch (nv_encoder->dcb->type) {
1969	case DCB_OUTPUT_TMDS:
1970		if (nv_encoder->dcb->sorconf.link & 1) {
1971			proto = 0x1;
1972			/* Only enable dual-link if:
1973			 *  - Need to (i.e. rate > 165MHz)
1974			 *  - DCB says we can
1975			 *  - Not an HDMI monitor, since there's no dual-link
1976			 *    on HDMI.
1977			 */
1978			if (mode->clock >= 165000 &&
1979			    nv_encoder->dcb->duallink_possible &&
1980			    !drm_detect_hdmi_monitor(nv_connector->edid))
1981				proto |= 0x4;
1982		} else {
1983			proto = 0x2;
1984		}
1985
1986		nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
1987		break;
1988	case DCB_OUTPUT_LVDS:
1989		proto = 0x0;
1990
1991		if (bios->fp_no_ddc) {
1992			if (bios->fp.dual_link)
1993				lvds.lvds.script |= 0x0100;
1994			if (bios->fp.if_is_24bit)
1995				lvds.lvds.script |= 0x0200;
1996		} else {
1997			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1998				if (((u8 *)nv_connector->edid)[121] == 2)
1999					lvds.lvds.script |= 0x0100;
2000			} else
2001			if (mode->clock >= bios->fp.duallink_transition_clk) {
2002				lvds.lvds.script |= 0x0100;
2003			}
2004
2005			if (lvds.lvds.script & 0x0100) {
2006				if (bios->fp.strapless_is_24bit & 2)
2007					lvds.lvds.script |= 0x0200;
2008			} else {
2009				if (bios->fp.strapless_is_24bit & 1)
2010					lvds.lvds.script |= 0x0200;
2011			}
2012
2013			if (nv_connector->base.display_info.bpc == 8)
2014				lvds.lvds.script |= 0x0200;
2015		}
2016
2017		nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
2018		break;
2019	case DCB_OUTPUT_DP:
2020		if (nv_connector->base.display_info.bpc == 6) {
2021			nv_encoder->dp.datarate = mode->clock * 18 / 8;
2022			depth = 0x2;
2023		} else
2024		if (nv_connector->base.display_info.bpc == 8) {
2025			nv_encoder->dp.datarate = mode->clock * 24 / 8;
2026			depth = 0x5;
2027		} else {
2028			nv_encoder->dp.datarate = mode->clock * 30 / 8;
2029			depth = 0x6;
2030		}
2031
2032		if (nv_encoder->dcb->sorconf.link & 1)
2033			proto = 0x8;
2034		else
2035			proto = 0x9;
2036		nv50_audio_mode_set(encoder, mode);
2037		break;
2038	default:
2039		BUG_ON(1);
2040		break;
2041	}
2042
2043	nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
2044
2045	if (nv50_vers(mast) >= GF110_DISP) {
2046		u32 *push = evo_wait(mast, 3);
2047		if (push) {
2048			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2049			u32 syncs = 0x00000001;
2050
2051			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2052				syncs |= 0x00000008;
2053			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2054				syncs |= 0x00000010;
2055
2056			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2057				magic |= 0x00000001;
2058
2059			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2060			evo_data(push, syncs | (depth << 6));
2061			evo_data(push, magic);
2062			evo_kick(push, mast);
2063		}
2064
2065		ctrl = proto << 8;
2066		mask = 0x00000f00;
2067	} else {
2068		ctrl = (depth << 16) | (proto << 8);
2069		if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2070			ctrl |= 0x00001000;
2071		if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2072			ctrl |= 0x00002000;
2073		mask = 0x000f3f00;
2074	}
2075
2076	nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
2077}
2078
2079static void
2080nv50_sor_destroy(struct drm_encoder *encoder)
2081{
2082	drm_encoder_cleanup(encoder);
2083	kfree(encoder);
2084}
2085
2086static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
2087	.dpms = nv50_sor_dpms,
2088	.mode_fixup = nv50_encoder_mode_fixup,
2089	.prepare = nv50_sor_disconnect,
2090	.commit = nv50_sor_commit,
2091	.mode_set = nv50_sor_mode_set,
2092	.disable = nv50_sor_disconnect,
2093	.get_crtc = nv50_display_crtc_get,
2094};
2095
2096static const struct drm_encoder_funcs nv50_sor_func = {
2097	.destroy = nv50_sor_destroy,
2098};
2099
2100static int
2101nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
2102{
2103	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2104	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2105	struct nouveau_encoder *nv_encoder;
2106	struct drm_encoder *encoder;
2107	int type;
2108
2109	switch (dcbe->type) {
2110	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
2111	case DCB_OUTPUT_TMDS:
2112	case DCB_OUTPUT_DP:
2113	default:
2114		type = DRM_MODE_ENCODER_TMDS;
2115		break;
2116	}
2117
2118	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2119	if (!nv_encoder)
2120		return -ENOMEM;
2121	nv_encoder->dcb = dcbe;
2122	nv_encoder->or = ffs(dcbe->or) - 1;
2123	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2124
2125	if (dcbe->type == DCB_OUTPUT_DP) {
2126		struct nvkm_i2c_aux *aux =
2127			nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
2128		if (aux) {
2129			nv_encoder->i2c = &aux->i2c;
2130			nv_encoder->aux = aux;
2131		}
2132	} else {
2133		struct nvkm_i2c_bus *bus =
2134			nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2135		if (bus)
2136			nv_encoder->i2c = &bus->i2c;
2137	}
2138
2139	encoder = to_drm_encoder(nv_encoder);
2140	encoder->possible_crtcs = dcbe->heads;
2141	encoder->possible_clones = 0;
2142	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, NULL);
2143	drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
2144
2145	drm_mode_connector_attach_encoder(connector, encoder);
2146	return 0;
2147}
2148
2149/******************************************************************************
2150 * PIOR
2151 *****************************************************************************/
2152
2153static void
2154nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2155{
2156	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2157	struct nv50_disp *disp = nv50_disp(encoder->dev);
2158	struct {
2159		struct nv50_disp_mthd_v1 base;
2160		struct nv50_disp_pior_pwr_v0 pwr;
2161	} args = {
2162		.base.version = 1,
2163		.base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
2164		.base.hasht  = nv_encoder->dcb->hasht,
2165		.base.hashm  = nv_encoder->dcb->hashm,
2166		.pwr.state = mode == DRM_MODE_DPMS_ON,
2167		.pwr.type = nv_encoder->dcb->type,
2168	};
2169
2170	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2171}
2172
2173static bool
2174nv50_pior_mode_fixup(struct drm_encoder *encoder,
2175		     const struct drm_display_mode *mode,
2176		     struct drm_display_mode *adjusted_mode)
2177{
2178	if (!nv50_encoder_mode_fixup(encoder, mode, adjusted_mode))
2179		return false;
2180	adjusted_mode->clock *= 2;
2181	return true;
2182}
2183
2184static void
2185nv50_pior_commit(struct drm_encoder *encoder)
2186{
2187}
2188
2189static void
2190nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2191		   struct drm_display_mode *adjusted_mode)
2192{
2193	struct nv50_mast *mast = nv50_mast(encoder->dev);
2194	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2195	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2196	struct nouveau_connector *nv_connector;
2197	u8 owner = 1 << nv_crtc->index;
2198	u8 proto, depth;
2199	u32 *push;
2200
2201	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2202	switch (nv_connector->base.display_info.bpc) {
2203	case 10: depth = 0x6; break;
2204	case  8: depth = 0x5; break;
2205	case  6: depth = 0x2; break;
2206	default: depth = 0x0; break;
2207	}
2208
2209	switch (nv_encoder->dcb->type) {
2210	case DCB_OUTPUT_TMDS:
2211	case DCB_OUTPUT_DP:
2212		proto = 0x0;
2213		break;
2214	default:
2215		BUG_ON(1);
2216		break;
2217	}
2218
2219	nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2220
2221	push = evo_wait(mast, 8);
2222	if (push) {
2223		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2224			u32 ctrl = (depth << 16) | (proto << 8) | owner;
2225			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2226				ctrl |= 0x00001000;
2227			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2228				ctrl |= 0x00002000;
2229			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2230			evo_data(push, ctrl);
2231		}
2232
2233		evo_kick(push, mast);
2234	}
2235
2236	nv_encoder->crtc = encoder->crtc;
2237}
2238
2239static void
2240nv50_pior_disconnect(struct drm_encoder *encoder)
2241{
2242	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2243	struct nv50_mast *mast = nv50_mast(encoder->dev);
2244	const int or = nv_encoder->or;
2245	u32 *push;
2246
2247	if (nv_encoder->crtc) {
2248		nv50_crtc_prepare(nv_encoder->crtc);
2249
2250		push = evo_wait(mast, 4);
2251		if (push) {
2252			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2253				evo_mthd(push, 0x0700 + (or * 0x040), 1);
2254				evo_data(push, 0x00000000);
2255			}
2256			evo_kick(push, mast);
2257		}
2258	}
2259
2260	nv_encoder->crtc = NULL;
2261}
2262
2263static void
2264nv50_pior_destroy(struct drm_encoder *encoder)
2265{
2266	drm_encoder_cleanup(encoder);
2267	kfree(encoder);
2268}
2269
2270static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2271	.dpms = nv50_pior_dpms,
2272	.mode_fixup = nv50_pior_mode_fixup,
2273	.prepare = nv50_pior_disconnect,
2274	.commit = nv50_pior_commit,
2275	.mode_set = nv50_pior_mode_set,
2276	.disable = nv50_pior_disconnect,
2277	.get_crtc = nv50_display_crtc_get,
2278};
2279
2280static const struct drm_encoder_funcs nv50_pior_func = {
2281	.destroy = nv50_pior_destroy,
2282};
2283
2284static int
2285nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2286{
2287	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2288	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2289	struct nvkm_i2c_bus *bus = NULL;
2290	struct nvkm_i2c_aux *aux = NULL;
2291	struct i2c_adapter *ddc;
2292	struct nouveau_encoder *nv_encoder;
2293	struct drm_encoder *encoder;
2294	int type;
2295
2296	switch (dcbe->type) {
2297	case DCB_OUTPUT_TMDS:
2298		bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
2299		ddc  = bus ? &bus->i2c : NULL;
2300		type = DRM_MODE_ENCODER_TMDS;
2301		break;
2302	case DCB_OUTPUT_DP:
2303		aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
2304		ddc  = aux ? &aux->i2c : NULL;
2305		type = DRM_MODE_ENCODER_TMDS;
2306		break;
2307	default:
2308		return -ENODEV;
2309	}
2310
2311	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2312	if (!nv_encoder)
2313		return -ENOMEM;
2314	nv_encoder->dcb = dcbe;
2315	nv_encoder->or = ffs(dcbe->or) - 1;
2316	nv_encoder->i2c = ddc;
2317	nv_encoder->aux = aux;
2318
2319	encoder = to_drm_encoder(nv_encoder);
2320	encoder->possible_crtcs = dcbe->heads;
2321	encoder->possible_clones = 0;
2322	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, NULL);
2323	drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2324
2325	drm_mode_connector_attach_encoder(connector, encoder);
2326	return 0;
2327}
2328
2329/******************************************************************************
2330 * Framebuffer
2331 *****************************************************************************/
2332
2333static void
2334nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2335{
2336	int i;
2337	for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2338		nvif_object_fini(&fbdma->base[i]);
2339	nvif_object_fini(&fbdma->core);
2340	list_del(&fbdma->head);
2341	kfree(fbdma);
2342}
2343
2344static int
2345nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2346{
2347	struct nouveau_drm *drm = nouveau_drm(dev);
2348	struct nv50_disp *disp = nv50_disp(dev);
2349	struct nv50_mast *mast = nv50_mast(dev);
2350	struct __attribute__ ((packed)) {
2351		struct nv_dma_v0 base;
2352		union {
2353			struct nv50_dma_v0 nv50;
2354			struct gf100_dma_v0 gf100;
2355			struct gf119_dma_v0 gf119;
2356		};
2357	} args = {};
2358	struct nv50_fbdma *fbdma;
2359	struct drm_crtc *crtc;
2360	u32 size = sizeof(args.base);
2361	int ret;
2362
2363	list_for_each_entry(fbdma, &disp->fbdma, head) {
2364		if (fbdma->core.handle == name)
2365			return 0;
2366	}
2367
2368	fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2369	if (!fbdma)
2370		return -ENOMEM;
2371	list_add(&fbdma->head, &disp->fbdma);
2372
2373	args.base.target = NV_DMA_V0_TARGET_VRAM;
2374	args.base.access = NV_DMA_V0_ACCESS_RDWR;
2375	args.base.start = offset;
2376	args.base.limit = offset + length - 1;
2377
2378	if (drm->device.info.chipset < 0x80) {
2379		args.nv50.part = NV50_DMA_V0_PART_256;
2380		size += sizeof(args.nv50);
2381	} else
2382	if (drm->device.info.chipset < 0xc0) {
2383		args.nv50.part = NV50_DMA_V0_PART_256;
2384		args.nv50.kind = kind;
2385		size += sizeof(args.nv50);
2386	} else
2387	if (drm->device.info.chipset < 0xd0) {
2388		args.gf100.kind = kind;
2389		size += sizeof(args.gf100);
2390	} else {
2391		args.gf119.page = GF119_DMA_V0_PAGE_LP;
2392		args.gf119.kind = kind;
2393		size += sizeof(args.gf119);
2394	}
2395
2396	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2397		struct nv50_head *head = nv50_head(crtc);
2398		int ret = nvif_object_init(&head->sync.base.base.user, name,
2399					   NV_DMA_IN_MEMORY, &args, size,
2400					   &fbdma->base[head->base.index]);
2401		if (ret) {
2402			nv50_fbdma_fini(fbdma);
2403			return ret;
2404		}
2405	}
2406
2407	ret = nvif_object_init(&mast->base.base.user, name, NV_DMA_IN_MEMORY,
2408			       &args, size, &fbdma->core);
2409	if (ret) {
2410		nv50_fbdma_fini(fbdma);
2411		return ret;
2412	}
2413
2414	return 0;
2415}
2416
2417static void
2418nv50_fb_dtor(struct drm_framebuffer *fb)
2419{
2420}
2421
2422static int
2423nv50_fb_ctor(struct drm_framebuffer *fb)
2424{
2425	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2426	struct nouveau_drm *drm = nouveau_drm(fb->dev);
2427	struct nouveau_bo *nvbo = nv_fb->nvbo;
2428	struct nv50_disp *disp = nv50_disp(fb->dev);
2429	u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2430	u8 tile = nvbo->tile_mode;
2431
2432	if (drm->device.info.chipset >= 0xc0)
2433		tile >>= 4; /* yep.. */
2434
2435	switch (fb->depth) {
2436	case  8: nv_fb->r_format = 0x1e00; break;
2437	case 15: nv_fb->r_format = 0xe900; break;
2438	case 16: nv_fb->r_format = 0xe800; break;
2439	case 24:
2440	case 32: nv_fb->r_format = 0xcf00; break;
2441	case 30: nv_fb->r_format = 0xd100; break;
2442	default:
2443		 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
2444		 return -EINVAL;
2445	}
2446
2447	if (disp->disp->oclass < G82_DISP) {
2448		nv_fb->r_pitch   = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2449					    (fb->pitches[0] | 0x00100000);
2450		nv_fb->r_format |= kind << 16;
2451	} else
2452	if (disp->disp->oclass < GF110_DISP) {
2453		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2454					   (fb->pitches[0] | 0x00100000);
2455	} else {
2456		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2457					   (fb->pitches[0] | 0x01000000);
2458	}
2459	nv_fb->r_handle = 0xffff0000 | kind;
2460
2461	return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0,
2462			       drm->device.info.ram_user, kind);
2463}
2464
2465/******************************************************************************
2466 * Init
2467 *****************************************************************************/
2468
2469void
2470nv50_display_fini(struct drm_device *dev)
2471{
2472}
2473
2474int
2475nv50_display_init(struct drm_device *dev)
2476{
2477	struct nv50_disp *disp = nv50_disp(dev);
2478	struct drm_crtc *crtc;
2479	u32 *push;
2480
2481	push = evo_wait(nv50_mast(dev), 32);
2482	if (!push)
2483		return -EBUSY;
2484
2485	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2486		struct nv50_sync *sync = nv50_sync(crtc);
2487
2488		nv50_crtc_lut_load(crtc);
2489		nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2490	}
2491
2492	evo_mthd(push, 0x0088, 1);
2493	evo_data(push, nv50_mast(dev)->base.sync.handle);
2494	evo_kick(push, nv50_mast(dev));
2495	return 0;
2496}
2497
2498void
2499nv50_display_destroy(struct drm_device *dev)
2500{
2501	struct nv50_disp *disp = nv50_disp(dev);
2502	struct nv50_fbdma *fbdma, *fbtmp;
2503
2504	list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
2505		nv50_fbdma_fini(fbdma);
2506	}
2507
2508	nv50_dmac_destroy(&disp->mast.base, disp->disp);
2509
2510	nouveau_bo_unmap(disp->sync);
2511	if (disp->sync)
2512		nouveau_bo_unpin(disp->sync);
2513	nouveau_bo_ref(NULL, &disp->sync);
2514
2515	nouveau_display(dev)->priv = NULL;
2516	kfree(disp);
2517}
2518
2519int
2520nv50_display_create(struct drm_device *dev)
2521{
2522	struct nvif_device *device = &nouveau_drm(dev)->device;
2523	struct nouveau_drm *drm = nouveau_drm(dev);
2524	struct dcb_table *dcb = &drm->vbios.dcb;
2525	struct drm_connector *connector, *tmp;
2526	struct nv50_disp *disp;
2527	struct dcb_output *dcbe;
2528	int crtcs, ret, i;
2529
2530	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2531	if (!disp)
2532		return -ENOMEM;
2533	INIT_LIST_HEAD(&disp->fbdma);
2534
2535	nouveau_display(dev)->priv = disp;
2536	nouveau_display(dev)->dtor = nv50_display_destroy;
2537	nouveau_display(dev)->init = nv50_display_init;
2538	nouveau_display(dev)->fini = nv50_display_fini;
2539	nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
2540	nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
2541	disp->disp = &nouveau_display(dev)->disp;
2542
2543	/* small shared memory area we use for notifiers and semaphores */
2544	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2545			     0, 0x0000, NULL, NULL, &disp->sync);
2546	if (!ret) {
2547		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
2548		if (!ret) {
2549			ret = nouveau_bo_map(disp->sync);
2550			if (ret)
2551				nouveau_bo_unpin(disp->sync);
2552		}
2553		if (ret)
2554			nouveau_bo_ref(NULL, &disp->sync);
2555	}
2556
2557	if (ret)
2558		goto out;
2559
2560	/* allocate master evo channel */
2561	ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
2562			      &disp->mast);
2563	if (ret)
2564		goto out;
2565
2566	/* create crtc objects to represent the hw heads */
2567	if (disp->disp->oclass >= GF110_DISP)
2568		crtcs = nvif_rd32(&device->object, 0x022448);
2569	else
2570		crtcs = 2;
2571
2572	for (i = 0; i < crtcs; i++) {
2573		ret = nv50_crtc_create(dev, i);
2574		if (ret)
2575			goto out;
2576	}
2577
2578	/* create encoder/connector objects based on VBIOS DCB table */
2579	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2580		connector = nouveau_connector_create(dev, dcbe->connector);
2581		if (IS_ERR(connector))
2582			continue;
2583
2584		if (dcbe->location == DCB_LOC_ON_CHIP) {
2585			switch (dcbe->type) {
2586			case DCB_OUTPUT_TMDS:
2587			case DCB_OUTPUT_LVDS:
2588			case DCB_OUTPUT_DP:
2589				ret = nv50_sor_create(connector, dcbe);
2590				break;
2591			case DCB_OUTPUT_ANALOG:
2592				ret = nv50_dac_create(connector, dcbe);
2593				break;
2594			default:
2595				ret = -ENODEV;
2596				break;
2597			}
2598		} else {
2599			ret = nv50_pior_create(connector, dcbe);
2600		}
2601
2602		if (ret) {
2603			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2604				     dcbe->location, dcbe->type,
2605				     ffs(dcbe->or) - 1, ret);
2606			ret = 0;
2607		}
2608	}
2609
2610	/* cull any connectors we created that don't have an encoder */
2611	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2612		if (connector->encoder_ids[0])
2613			continue;
2614
2615		NV_WARN(drm, "%s has no encoders, removing\n",
2616			connector->name);
2617		connector->funcs->destroy(connector);
2618	}
2619
2620out:
2621	if (ret)
2622		nv50_display_destroy(dev);
2623	return ret;
2624}