Linux Audio

Check our new training course

Loading...
v3.1
   1/*
   2 * Copyright (C) 2008 Maarten Maathuis.
   3 * All Rights Reserved.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining
   6 * a copy of this software and associated documentation files (the
   7 * "Software"), to deal in the Software without restriction, including
   8 * without limitation the rights to use, copy, modify, merge, publish,
   9 * distribute, sublicense, and/or sell copies of the Software, and to
  10 * permit persons to whom the Software is furnished to do so, subject to
  11 * the following conditions:
  12 *
  13 * The above copyright notice and this permission notice (including the
  14 * next paragraph) shall be included in all copies or substantial
  15 * portions of the Software.
  16 *
  17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
  21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
  22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  24 *
 
  25 */
  26
  27#define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
  28#include "nv50_display.h"
  29#include "nouveau_crtc.h"
  30#include "nouveau_encoder.h"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  31#include "nouveau_connector.h"
  32#include "nouveau_fb.h"
 
 
  33#include "nouveau_fbcon.h"
  34#include "nouveau_ramht.h"
  35#include "drm_crtc_helper.h"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  36
  37static void nv50_display_isr(struct drm_device *);
  38static void nv50_display_bh(unsigned long);
  39
  40static inline int
  41nv50_sor_nr(struct drm_device *dev)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  42{
  43	struct drm_nouveau_private *dev_priv = dev->dev_private;
 
 
 
  44
  45	if (dev_priv->chipset  < 0x90 ||
  46	    dev_priv->chipset == 0x92 ||
  47	    dev_priv->chipset == 0xa0)
  48		return 2;
 
 
 
 
 
 
 
 
 
 
 
 
 
  49
  50	return 4;
 
  51}
  52
  53int
  54nv50_display_early_init(struct drm_device *dev)
  55{
  56	return 0;
  57}
  58
  59void
  60nv50_display_late_takedown(struct drm_device *dev)
 
 
 
 
 
 
 
 
  61{
 
  62}
  63
  64int
  65nv50_display_init(struct drm_device *dev)
 
 
  66{
  67	struct drm_nouveau_private *dev_priv = dev->dev_private;
  68	struct nouveau_gpio_engine *pgpio = &dev_priv->engine.gpio;
  69	struct drm_connector *connector;
  70	struct nouveau_channel *evo;
  71	int ret, i;
  72	u32 val;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  73
  74	NV_DEBUG_KMS(dev, "\n");
 
 
  75
  76	nv_wr32(dev, 0x00610184, nv_rd32(dev, 0x00614004));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  77
  78	/*
  79	 * I think the 0x006101XX range is some kind of main control area
  80	 * that enables things.
  81	 */
  82	/* CRTC? */
  83	for (i = 0; i < 2; i++) {
  84		val = nv_rd32(dev, 0x00616100 + (i * 0x800));
  85		nv_wr32(dev, 0x00610190 + (i * 0x10), val);
  86		val = nv_rd32(dev, 0x00616104 + (i * 0x800));
  87		nv_wr32(dev, 0x00610194 + (i * 0x10), val);
  88		val = nv_rd32(dev, 0x00616108 + (i * 0x800));
  89		nv_wr32(dev, 0x00610198 + (i * 0x10), val);
  90		val = nv_rd32(dev, 0x0061610c + (i * 0x800));
  91		nv_wr32(dev, 0x0061019c + (i * 0x10), val);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  92	}
  93
  94	/* DAC */
  95	for (i = 0; i < 3; i++) {
  96		val = nv_rd32(dev, 0x0061a000 + (i * 0x800));
  97		nv_wr32(dev, 0x006101d0 + (i * 0x04), val);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  98	}
  99
 100	/* SOR */
 101	for (i = 0; i < nv50_sor_nr(dev); i++) {
 102		val = nv_rd32(dev, 0x0061c000 + (i * 0x800));
 103		nv_wr32(dev, 0x006101e0 + (i * 0x04), val);
 
 104	}
 105
 106	/* EXT */
 107	for (i = 0; i < 3; i++) {
 108		val = nv_rd32(dev, 0x0061e000 + (i * 0x800));
 109		nv_wr32(dev, 0x006101f0 + (i * 0x04), val);
 
 
 
 
 
 
 
 110	}
 111
 112	for (i = 0; i < 3; i++) {
 113		nv_wr32(dev, NV50_PDISPLAY_DAC_DPMS_CTRL(i), 0x00550000 |
 114			NV50_PDISPLAY_DAC_DPMS_CTRL_PENDING);
 115		nv_wr32(dev, NV50_PDISPLAY_DAC_CLK_CTRL1(i), 0x00000001);
 
 
 
 
 116	}
 
 117
 118	/* The precise purpose is unknown, i suspect it has something to do
 119	 * with text mode.
 120	 */
 121	if (nv_rd32(dev, NV50_PDISPLAY_INTR_1) & 0x100) {
 122		nv_wr32(dev, NV50_PDISPLAY_INTR_1, 0x100);
 123		nv_wr32(dev, 0x006194e8, nv_rd32(dev, 0x006194e8) & ~1);
 124		if (!nv_wait(dev, 0x006194e8, 2, 0)) {
 125			NV_ERROR(dev, "timeout: (0x6194e8 & 2) != 0\n");
 126			NV_ERROR(dev, "0x6194e8 = 0x%08x\n",
 127						nv_rd32(dev, 0x6194e8));
 128			return -EBUSY;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 129		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 130	}
 131
 132	for (i = 0; i < 2; i++) {
 133		nv_wr32(dev, NV50_PDISPLAY_CURSOR_CURSOR_CTRL2(i), 0x2000);
 134		if (!nv_wait(dev, NV50_PDISPLAY_CURSOR_CURSOR_CTRL2(i),
 135			     NV50_PDISPLAY_CURSOR_CURSOR_CTRL2_STATUS, 0)) {
 136			NV_ERROR(dev, "timeout: CURSOR_CTRL2_STATUS == 0\n");
 137			NV_ERROR(dev, "CURSOR_CTRL2 = 0x%08x\n",
 138				 nv_rd32(dev, NV50_PDISPLAY_CURSOR_CURSOR_CTRL2(i)));
 139			return -EBUSY;
 140		}
 141
 142		nv_wr32(dev, NV50_PDISPLAY_CURSOR_CURSOR_CTRL2(i),
 143			NV50_PDISPLAY_CURSOR_CURSOR_CTRL2_ON);
 144		if (!nv_wait(dev, NV50_PDISPLAY_CURSOR_CURSOR_CTRL2(i),
 145			     NV50_PDISPLAY_CURSOR_CURSOR_CTRL2_STATUS,
 146			     NV50_PDISPLAY_CURSOR_CURSOR_CTRL2_STATUS_ACTIVE)) {
 147			NV_ERROR(dev, "timeout: "
 148				      "CURSOR_CTRL2_STATUS_ACTIVE(%d)\n", i);
 149			NV_ERROR(dev, "CURSOR_CTRL2(%d) = 0x%08x\n", i,
 150				 nv_rd32(dev, NV50_PDISPLAY_CURSOR_CURSOR_CTRL2(i)));
 151			return -EBUSY;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 152		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 153	}
 154
 155	nv_wr32(dev, NV50_PDISPLAY_PIO_CTRL, 0x00000000);
 156	nv_mask(dev, NV50_PDISPLAY_INTR_0, 0x00000000, 0x00000000);
 157	nv_wr32(dev, NV50_PDISPLAY_INTR_EN_0, 0x00000000);
 158	nv_mask(dev, NV50_PDISPLAY_INTR_1, 0x00000000, 0x00000000);
 159	nv_wr32(dev, NV50_PDISPLAY_INTR_EN_1,
 160		     NV50_PDISPLAY_INTR_EN_1_CLK_UNK10 |
 161		     NV50_PDISPLAY_INTR_EN_1_CLK_UNK20 |
 162		     NV50_PDISPLAY_INTR_EN_1_CLK_UNK40);
 163
 164	/* enable hotplug interrupts */
 165	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
 166		struct nouveau_connector *conn = nouveau_connector(connector);
 167
 168		if (conn->dcb->gpio_tag == 0xff)
 169			continue;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 170
 171		pgpio->irq_enable(dev, conn->dcb->gpio_tag, true);
 
 
 
 
 
 
 
 
 
 
 
 172	}
 173
 174	ret = nv50_evo_init(dev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 175	if (ret)
 176		return ret;
 177	evo = nv50_display(dev)->master;
 178
 179	nv_wr32(dev, NV50_PDISPLAY_OBJECTS, (evo->ramin->vinst >> 8) | 9);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 180
 181	ret = RING_SPACE(evo, 15);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 182	if (ret)
 183		return ret;
 184	BEGIN_RING(evo, 0, NV50_EVO_UNK84, 2);
 185	OUT_RING(evo, NV50_EVO_UNK84_NOTIFY_DISABLED);
 186	OUT_RING(evo, NvEvoSync);
 187	BEGIN_RING(evo, 0, NV50_EVO_CRTC(0, FB_DMA), 1);
 188	OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
 189	BEGIN_RING(evo, 0, NV50_EVO_CRTC(0, UNK0800), 1);
 190	OUT_RING(evo, 0);
 191	BEGIN_RING(evo, 0, NV50_EVO_CRTC(0, DISPLAY_START), 1);
 192	OUT_RING(evo, 0);
 193	BEGIN_RING(evo, 0, NV50_EVO_CRTC(0, UNK082C), 1);
 194	OUT_RING(evo, 0);
 195	/* required to make display sync channels not hate life */
 196	BEGIN_RING(evo, 0, NV50_EVO_CRTC(0, UNK900), 1);
 197	OUT_RING  (evo, 0x00000311);
 198	BEGIN_RING(evo, 0, NV50_EVO_CRTC(1, UNK900), 1);
 199	OUT_RING  (evo, 0x00000311);
 200	FIRE_RING(evo);
 201	if (!nv_wait(dev, 0x640004, 0xffffffff, evo->dma.put << 2))
 202		NV_ERROR(dev, "evo pushbuf stalled\n");
 203
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 204
 
 
 205	return 0;
 206}
 207
 208static int nv50_display_disable(struct drm_device *dev)
 
 209{
 210	struct drm_nouveau_private *dev_priv = dev->dev_private;
 211	struct nv50_display *disp = nv50_display(dev);
 212	struct nouveau_channel *evo = disp->master;
 213	struct drm_crtc *drm_crtc;
 214	int ret, i;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 215
 216	NV_DEBUG_KMS(dev, "\n");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 217
 218	list_for_each_entry(drm_crtc, &dev->mode_config.crtc_list, head) {
 219		struct nouveau_crtc *crtc = nouveau_crtc(drm_crtc);
 
 
 
 
 
 
 
 
 
 
 
 220
 221		nv50_crtc_blank(crtc, true);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 222	}
 
 223
 224	ret = RING_SPACE(evo, 2);
 225	if (ret == 0) {
 226		BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
 227		OUT_RING(evo, 0);
 
 
 
 
 
 
 
 
 
 
 
 
 
 228	}
 229	FIRE_RING(evo);
 230
 231	/* Almost like ack'ing a vblank interrupt, maybe in the spirit of
 232	 * cleaning up?
 233	 */
 234	list_for_each_entry(drm_crtc, &dev->mode_config.crtc_list, head) {
 235		struct nouveau_crtc *crtc = nouveau_crtc(drm_crtc);
 236		uint32_t mask = NV50_PDISPLAY_INTR_1_VBLANK_CRTC_(crtc->index);
 
 
 
 
 
 
 
 
 
 
 
 
 237
 238		if (!crtc->base.enabled)
 239			continue;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 240
 241		nv_wr32(dev, NV50_PDISPLAY_INTR_1, mask);
 242		if (!nv_wait(dev, NV50_PDISPLAY_INTR_1, mask, mask)) {
 243			NV_ERROR(dev, "timeout: (0x610024 & 0x%08x) == "
 244				      "0x%08x\n", mask, mask);
 245			NV_ERROR(dev, "0x610024 = 0x%08x\n",
 246				 nv_rd32(dev, NV50_PDISPLAY_INTR_1));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 247		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 248	}
 249
 250	nv50_evo_fini(dev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 251
 252	for (i = 0; i < 3; i++) {
 253		if (!nv_wait(dev, NV50_PDISPLAY_SOR_DPMS_STATE(i),
 254			     NV50_PDISPLAY_SOR_DPMS_STATE_WAIT, 0)) {
 255			NV_ERROR(dev, "timeout: SOR_DPMS_STATE_WAIT(%d) == 0\n", i);
 256			NV_ERROR(dev, "SOR_DPMS_STATE(%d) = 0x%08x\n", i,
 257				  nv_rd32(dev, NV50_PDISPLAY_SOR_DPMS_STATE(i)));
 
 
 
 
 
 
 
 
 
 258		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 259	}
 
 260
 261	/* disable interrupts. */
 262	nv_wr32(dev, NV50_PDISPLAY_INTR_EN_1, 0x00000000);
 
 263
 264	/* disable hotplug interrupts */
 265	nv_wr32(dev, 0xe054, 0xffffffff);
 266	nv_wr32(dev, 0xe050, 0x00000000);
 267	if (dev_priv->chipset >= 0x90) {
 268		nv_wr32(dev, 0xe074, 0xffffffff);
 269		nv_wr32(dev, 0xe070, 0x00000000);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 270	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 271	return 0;
 272}
 273
 274int nv50_display_create(struct drm_device *dev)
 
 275{
 276	struct drm_nouveau_private *dev_priv = dev->dev_private;
 277	struct dcb_table *dcb = &dev_priv->vbios.dcb;
 278	struct drm_connector *connector, *ct;
 279	struct nv50_display *priv;
 280	int ret, i;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 281
 282	NV_DEBUG_KMS(dev, "\n");
 
 
 
 
 
 283
 284	priv = kzalloc(sizeof(*priv), GFP_KERNEL);
 285	if (!priv)
 
 
 
 
 
 
 
 
 
 
 
 
 
 286		return -ENOMEM;
 287	dev_priv->engine.display.priv = priv;
 288
 289	/* init basic kernel modesetting */
 290	drm_mode_config_init(dev);
 
 
 
 
 
 
 291
 292	/* Initialise some optional connector properties. */
 293	drm_mode_create_scaling_mode_property(dev);
 294	drm_mode_create_dithering_property(dev);
 
 
 295
 296	dev->mode_config.min_width = 0;
 297	dev->mode_config.min_height = 0;
 
 
 
 
 
 
 
 
 
 
 
 298
 299	dev->mode_config.funcs = (void *)&nouveau_mode_config_funcs;
 
 300
 301	dev->mode_config.max_width = 8192;
 302	dev->mode_config.max_height = 8192;
 
 
 
 
 
 
 
 
 
 303
 304	dev->mode_config.fb_base = dev_priv->fb_phys;
 
 
 
 
 
 305
 306	/* Create CRTC objects */
 307	for (i = 0; i < 2; i++)
 308		nv50_crtc_create(dev, i);
 309
 310	/* We setup the encoders from the BIOS table */
 311	for (i = 0 ; i < dcb->entries; i++) {
 312		struct dcb_entry *entry = &dcb->entry[i];
 
 
 
 313
 314		if (entry->location != DCB_LOC_ON_CHIP) {
 315			NV_WARN(dev, "Off-chip encoder %d/%d unsupported\n",
 316				entry->type, ffs(entry->or) - 1);
 317			continue;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 318		}
 
 
 
 319
 320		connector = nouveau_connector_create(dev, entry->connector);
 321		if (IS_ERR(connector))
 322			continue;
 323
 324		switch (entry->type) {
 325		case OUTPUT_TMDS:
 326		case OUTPUT_LVDS:
 327		case OUTPUT_DP:
 328			nv50_sor_create(connector, entry);
 329			break;
 330		case OUTPUT_ANALOG:
 331			nv50_dac_create(connector, entry);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 332			break;
 333		default:
 334			NV_WARN(dev, "DCB encoder %d unknown\n", entry->type);
 335			continue;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 336		}
 337	}
 338
 339	list_for_each_entry_safe(connector, ct,
 340				 &dev->mode_config.connector_list, head) {
 341		if (!connector->encoder_ids[0]) {
 342			NV_WARN(dev, "%s has no encoders, removing\n",
 343				drm_get_connector_name(connector));
 344			connector->funcs->destroy(connector);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 345		}
 346	}
 347
 348	tasklet_init(&priv->tasklet, nv50_display_bh, (unsigned long)dev);
 349	nouveau_irq_register(dev, 26, nv50_display_isr);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 350
 351	ret = nv50_display_init(dev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 352	if (ret) {
 353		nv50_display_destroy(dev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 354		return ret;
 355	}
 356
 
 
 
 
 
 
 
 
 
 
 
 357	return 0;
 358}
 359
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 360void
 361nv50_display_destroy(struct drm_device *dev)
 362{
 363	struct nv50_display *disp = nv50_display(dev);
 
 
 
 364
 365	NV_DEBUG_KMS(dev, "\n");
 
 
 
 
 
 366
 367	drm_mode_config_cleanup(dev);
 
 
 368
 369	nv50_display_disable(dev);
 370	nouveau_irq_unregister(dev, 26);
 371	kfree(disp);
 372}
 373
 374void
 375nv50_display_flip_stop(struct drm_crtc *crtc)
 376{
 377	struct nv50_display *disp = nv50_display(crtc->dev);
 378	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 379	struct nv50_display_crtc *dispc = &disp->crtc[nv_crtc->index];
 380	struct nouveau_channel *evo = dispc->sync;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 381	int ret;
 382
 383	ret = RING_SPACE(evo, 8);
 384	if (ret) {
 385		WARN_ON(1);
 386		return;
 
 
 
 
 
 
 
 
 387	}
 388
 389	BEGIN_RING(evo, 0, 0x0084, 1);
 390	OUT_RING  (evo, 0x00000000);
 391	BEGIN_RING(evo, 0, 0x0094, 1);
 392	OUT_RING  (evo, 0x00000000);
 393	BEGIN_RING(evo, 0, 0x00c0, 1);
 394	OUT_RING  (evo, 0x00000000);
 395	BEGIN_RING(evo, 0, 0x0080, 1);
 396	OUT_RING  (evo, 0x00000000);
 397	FIRE_RING (evo);
 398}
 399
 400int
 401nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
 402		       struct nouveau_channel *chan)
 403{
 404	struct drm_nouveau_private *dev_priv = crtc->dev->dev_private;
 405	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
 406	struct nv50_display *disp = nv50_display(crtc->dev);
 407	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
 408	struct nv50_display_crtc *dispc = &disp->crtc[nv_crtc->index];
 409	struct nouveau_channel *evo = dispc->sync;
 410	int ret;
 
 
 
 
 
 
 
 
 411
 412	ret = RING_SPACE(evo, chan ? 25 : 27);
 413	if (unlikely(ret))
 414		return ret;
 415
 416	/* synchronise with the rendering channel, if necessary */
 417	if (likely(chan)) {
 418		ret = RING_SPACE(chan, 10);
 419		if (ret) {
 420			WIND_RING(evo);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 421			return ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 422		}
 
 
 
 
 423
 424		if (dev_priv->chipset < 0xc0) {
 425			BEGIN_RING(chan, NvSubSw, 0x0060, 2);
 426			OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
 427			OUT_RING  (chan, dispc->sem.offset);
 428			BEGIN_RING(chan, NvSubSw, 0x006c, 1);
 429			OUT_RING  (chan, 0xf00d0000 | dispc->sem.value);
 430			BEGIN_RING(chan, NvSubSw, 0x0064, 2);
 431			OUT_RING  (chan, dispc->sem.offset ^ 0x10);
 432			OUT_RING  (chan, 0x74b1e000);
 433			BEGIN_RING(chan, NvSubSw, 0x0060, 1);
 434			if (dev_priv->chipset < 0x84)
 435				OUT_RING  (chan, NvSema);
 436			else
 437				OUT_RING  (chan, chan->vram_handle);
 438		} else {
 439			u64 offset = chan->dispc_vma[nv_crtc->index].offset;
 440			offset += dispc->sem.offset;
 441			BEGIN_NVC0(chan, 2, NvSubM2MF, 0x0010, 4);
 442			OUT_RING  (chan, upper_32_bits(offset));
 443			OUT_RING  (chan, lower_32_bits(offset));
 444			OUT_RING  (chan, 0xf00d0000 | dispc->sem.value);
 445			OUT_RING  (chan, 0x1002);
 446			BEGIN_NVC0(chan, 2, NvSubM2MF, 0x0010, 4);
 447			OUT_RING  (chan, upper_32_bits(offset));
 448			OUT_RING  (chan, lower_32_bits(offset ^ 0x10));
 449			OUT_RING  (chan, 0x74b1e000);
 450			OUT_RING  (chan, 0x1001);
 451		}
 452		FIRE_RING (chan);
 453	} else {
 454		nouveau_bo_wr32(dispc->sem.bo, dispc->sem.offset / 4,
 455				0xf00d0000 | dispc->sem.value);
 456	}
 457
 458	/* queue the flip on the crtc's "display sync" channel */
 459	BEGIN_RING(evo, 0, 0x0100, 1);
 460	OUT_RING  (evo, 0xfffe0000);
 461	if (chan) {
 462		BEGIN_RING(evo, 0, 0x0084, 1);
 463		OUT_RING  (evo, 0x00000100);
 464	} else {
 465		BEGIN_RING(evo, 0, 0x0084, 1);
 466		OUT_RING  (evo, 0x00000010);
 467		/* allows gamma somehow, PDISP will bitch at you if
 468		 * you don't wait for vblank before changing this..
 469		 */
 470		BEGIN_RING(evo, 0, 0x00e0, 1);
 471		OUT_RING  (evo, 0x40000000);
 472	}
 473	BEGIN_RING(evo, 0, 0x0088, 4);
 474	OUT_RING  (evo, dispc->sem.offset);
 475	OUT_RING  (evo, 0xf00d0000 | dispc->sem.value);
 476	OUT_RING  (evo, 0x74b1e000);
 477	OUT_RING  (evo, NvEvoSync);
 478	BEGIN_RING(evo, 0, 0x00a0, 2);
 479	OUT_RING  (evo, 0x00000000);
 480	OUT_RING  (evo, 0x00000000);
 481	BEGIN_RING(evo, 0, 0x00c0, 1);
 482	OUT_RING  (evo, nv_fb->r_dma);
 483	BEGIN_RING(evo, 0, 0x0110, 2);
 484	OUT_RING  (evo, 0x00000000);
 485	OUT_RING  (evo, 0x00000000);
 486	BEGIN_RING(evo, 0, 0x0800, 5);
 487	OUT_RING  (evo, nv_fb->nvbo->bo.offset >> 8);
 488	OUT_RING  (evo, 0);
 489	OUT_RING  (evo, (fb->height << 16) | fb->width);
 490	OUT_RING  (evo, nv_fb->r_pitch);
 491	OUT_RING  (evo, nv_fb->r_format);
 492	BEGIN_RING(evo, 0, 0x0080, 1);
 493	OUT_RING  (evo, 0x00000000);
 494	FIRE_RING (evo);
 495
 496	dispc->sem.offset ^= 0x10;
 497	dispc->sem.value++;
 498	return 0;
 499}
 500
 501static u16
 502nv50_display_script_select(struct drm_device *dev, struct dcb_entry *dcb,
 503			   u32 mc, int pxclk)
 504{
 505	struct drm_nouveau_private *dev_priv = dev->dev_private;
 506	struct nouveau_connector *nv_connector = NULL;
 507	struct drm_encoder *encoder;
 508	struct nvbios *bios = &dev_priv->vbios;
 509	u32 script = 0, or;
 510
 511	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
 512		struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
 513
 514		if (nv_encoder->dcb != dcb)
 515			continue;
 
 
 
 
 
 
 
 
 
 
 516
 517		nv_connector = nouveau_encoder_connector_get(nv_encoder);
 518		break;
 
 519	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 520
 521	or = ffs(dcb->or) - 1;
 522	switch (dcb->type) {
 523	case OUTPUT_LVDS:
 524		script = (mc >> 8) & 0xf;
 525		if (bios->fp_no_ddc) {
 526			if (bios->fp.dual_link)
 527				script |= 0x0100;
 528			if (bios->fp.if_is_24bit)
 529				script |= 0x0200;
 530		} else {
 531			/* determine number of lvds links */
 532			if (nv_connector && nv_connector->edid &&
 533			    nv_connector->dcb->type == DCB_CONNECTOR_LVDS_SPWG) {
 534				/* http://www.spwg.org */
 535				if (((u8 *)nv_connector->edid)[121] == 2)
 536					script |= 0x0100;
 537			} else
 538			if (pxclk >= bios->fp.duallink_transition_clk) {
 539				script |= 0x0100;
 540			}
 541
 542			/* determine panel depth */
 543			if (script & 0x0100) {
 544				if (bios->fp.strapless_is_24bit & 2)
 545					script |= 0x0200;
 546			} else {
 547				if (bios->fp.strapless_is_24bit & 1)
 548					script |= 0x0200;
 549			}
 550
 551			if (nv_connector && nv_connector->edid &&
 552			    (nv_connector->edid->revision >= 4) &&
 553			    (nv_connector->edid->input & 0x70) >= 0x20)
 554				script |= 0x0200;
 555		}
 556
 557		if (nouveau_uscript_lvds >= 0) {
 558			NV_INFO(dev, "override script 0x%04x with 0x%04x "
 559				     "for output LVDS-%d\n", script,
 560				     nouveau_uscript_lvds, or);
 561			script = nouveau_uscript_lvds;
 562		}
 563		break;
 564	case OUTPUT_TMDS:
 565		script = (mc >> 8) & 0xf;
 566		if (pxclk >= 165000)
 567			script |= 0x0100;
 
 
 
 
 568
 569		if (nouveau_uscript_tmds >= 0) {
 570			NV_INFO(dev, "override script 0x%04x with 0x%04x "
 571				     "for output TMDS-%d\n", script,
 572				     nouveau_uscript_tmds, or);
 573			script = nouveau_uscript_tmds;
 574		}
 575		break;
 576	case OUTPUT_DP:
 577		script = (mc >> 8) & 0xf;
 578		break;
 579	case OUTPUT_ANALOG:
 580		script = 0xff;
 581		break;
 582	default:
 583		NV_ERROR(dev, "modeset on unsupported output type!\n");
 584		break;
 585	}
 586
 587	return script;
 588}
 589
 
 
 
 
 
 
 
 
 590static void
 591nv50_display_vblank_crtc_handler(struct drm_device *dev, int crtc)
 592{
 593	struct drm_nouveau_private *dev_priv = dev->dev_private;
 594	struct nouveau_channel *chan, *tmp;
 
 
 
 595
 596	list_for_each_entry_safe(chan, tmp, &dev_priv->vbl_waiting,
 597				 nvsw.vbl_wait) {
 598		if (chan->nvsw.vblsem_head != crtc)
 599			continue;
 600
 601		nouveau_bo_wr32(chan->notifier_bo, chan->nvsw.vblsem_offset,
 602						chan->nvsw.vblsem_rval);
 603		list_del(&chan->nvsw.vbl_wait);
 604		drm_vblank_put(dev, crtc);
 
 
 
 
 
 
 
 
 
 
 
 
 
 605	}
 606
 607	drm_handle_vblank(dev, crtc);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 608}
 609
 
 
 
 610static void
 611nv50_display_vblank_handler(struct drm_device *dev, uint32_t intr)
 612{
 613	if (intr & NV50_PDISPLAY_INTR_1_VBLANK_CRTC_0)
 614		nv50_display_vblank_crtc_handler(dev, 0);
 
 
 
 
 
 
 
 
 
 
 
 615
 616	if (intr & NV50_PDISPLAY_INTR_1_VBLANK_CRTC_1)
 617		nv50_display_vblank_crtc_handler(dev, 1);
 618
 619	nv_wr32(dev, NV50_PDISPLAY_INTR_1, NV50_PDISPLAY_INTR_1_VBLANK_CRTC);
 
 
 
 
 
 
 
 
 
 620}
 621
 622static void
 623nv50_display_unk10_handler(struct drm_device *dev)
 624{
 625	struct drm_nouveau_private *dev_priv = dev->dev_private;
 626	struct nv50_display *disp = nv50_display(dev);
 627	u32 unk30 = nv_rd32(dev, 0x610030), mc;
 628	int i, crtc, or, type = OUTPUT_ANY;
 
 
 
 
 
 
 
 
 
 
 
 629
 630	NV_DEBUG_KMS(dev, "0x610030: 0x%08x\n", unk30);
 631	disp->irq.dcb = NULL;
 632
 633	nv_wr32(dev, 0x619494, nv_rd32(dev, 0x619494) & ~8);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 634
 635	/* Determine which CRTC we're dealing with, only 1 ever will be
 636	 * signalled at the same time with the current nouveau code.
 637	 */
 638	crtc = ffs((unk30 & 0x00000060) >> 5) - 1;
 639	if (crtc < 0)
 640		goto ack;
 641
 642	/* Nothing needs to be done for the encoder */
 643	crtc = ffs((unk30 & 0x00000180) >> 7) - 1;
 644	if (crtc < 0)
 645		goto ack;
 646
 647	/* Find which encoder was connected to the CRTC */
 648	for (i = 0; type == OUTPUT_ANY && i < 3; i++) {
 649		mc = nv_rd32(dev, NV50_PDISPLAY_DAC_MODE_CTRL_C(i));
 650		NV_DEBUG_KMS(dev, "DAC-%d mc: 0x%08x\n", i, mc);
 651		if (!(mc & (1 << crtc)))
 652			continue;
 653
 654		switch ((mc & 0x00000f00) >> 8) {
 655		case 0: type = OUTPUT_ANALOG; break;
 656		case 1: type = OUTPUT_TV; break;
 657		default:
 658			NV_ERROR(dev, "invalid mc, DAC-%d: 0x%08x\n", i, mc);
 659			goto ack;
 
 
 
 
 660		}
 661
 662		or = i;
 663	}
 664
 665	for (i = 0; type == OUTPUT_ANY && i < nv50_sor_nr(dev); i++) {
 666		if (dev_priv->chipset  < 0x90 ||
 667		    dev_priv->chipset == 0x92 ||
 668		    dev_priv->chipset == 0xa0)
 669			mc = nv_rd32(dev, NV50_PDISPLAY_SOR_MODE_CTRL_C(i));
 670		else
 671			mc = nv_rd32(dev, NV90_PDISPLAY_SOR_MODE_CTRL_C(i));
 672
 673		NV_DEBUG_KMS(dev, "SOR-%d mc: 0x%08x\n", i, mc);
 674		if (!(mc & (1 << crtc)))
 675			continue;
 676
 677		switch ((mc & 0x00000f00) >> 8) {
 678		case 0: type = OUTPUT_LVDS; break;
 679		case 1: type = OUTPUT_TMDS; break;
 680		case 2: type = OUTPUT_TMDS; break;
 681		case 5: type = OUTPUT_TMDS; break;
 682		case 8: type = OUTPUT_DP; break;
 683		case 9: type = OUTPUT_DP; break;
 684		default:
 685			NV_ERROR(dev, "invalid mc, SOR-%d: 0x%08x\n", i, mc);
 686			goto ack;
 687		}
 688
 689		or = i;
 690	}
 
 
 
 
 691
 692	/* There was no encoder to disable */
 693	if (type == OUTPUT_ANY)
 694		goto ack;
 
 695
 696	/* Disable the encoder */
 697	for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
 698		struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
 
 
 
 
 
 
 
 
 699
 700		if (dcb->type == type && (dcb->or & (1 << or))) {
 701			nouveau_bios_run_display_table(dev, dcb, 0, -1);
 702			disp->irq.dcb = dcb;
 703			goto ack;
 704		}
 
 
 
 
 
 
 
 
 705	}
 706
 707	NV_ERROR(dev, "no dcb for %d %d 0x%08x\n", or, type, mc);
 708ack:
 709	nv_wr32(dev, NV50_PDISPLAY_INTR_1, NV50_PDISPLAY_INTR_1_CLK_UNK10);
 710	nv_wr32(dev, 0x610030, 0x80000000);
 
 
 
 
 
 
 
 
 
 
 
 
 
 711}
 712
 
 
 
 
 713static void
 714nv50_display_unk20_dp_hack(struct drm_device *dev, struct dcb_entry *dcb)
 715{
 716	int or = ffs(dcb->or) - 1, link = !(dcb->dpconf.sor.link & 1);
 
 
 717	struct drm_encoder *encoder;
 718	uint32_t tmp, unk0 = 0, unk1 = 0;
 719
 720	if (dcb->type != OUTPUT_DP)
 721		return;
 722
 723	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
 724		struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
 725
 726		if (nv_encoder->dcb == dcb) {
 727			unk0 = nv_encoder->dp.unk0;
 728			unk1 = nv_encoder->dp.unk1;
 729			break;
 
 730		}
 731	}
 732
 733	if (unk0 || unk1) {
 734		tmp  = nv_rd32(dev, NV50_SOR_DP_CTRL(or, link));
 735		tmp &= 0xfffffe03;
 736		nv_wr32(dev, NV50_SOR_DP_CTRL(or, link), tmp | unk0);
 
 
 
 
 
 
 
 
 
 
 
 737
 738		tmp  = nv_rd32(dev, NV50_SOR_DP_UNK128(or, link));
 739		tmp &= 0xfef080c0;
 740		nv_wr32(dev, NV50_SOR_DP_UNK128(or, link), tmp | unk1);
 
 
 
 741	}
 742}
 743
 744static void
 745nv50_display_unk20_handler(struct drm_device *dev)
 746{
 747	struct drm_nouveau_private *dev_priv = dev->dev_private;
 748	struct nv50_display *disp = nv50_display(dev);
 749	u32 unk30 = nv_rd32(dev, 0x610030), tmp, pclk, script, mc = 0;
 750	struct dcb_entry *dcb;
 751	int i, crtc, or, type = OUTPUT_ANY;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 752
 753	NV_DEBUG_KMS(dev, "0x610030: 0x%08x\n", unk30);
 754	dcb = disp->irq.dcb;
 755	if (dcb) {
 756		nouveau_bios_run_display_table(dev, dcb, 0, -2);
 757		disp->irq.dcb = NULL;
 
 
 758	}
 759
 760	/* CRTC clock change requested? */
 761	crtc = ffs((unk30 & 0x00000600) >> 9) - 1;
 762	if (crtc >= 0) {
 763		pclk  = nv_rd32(dev, NV50_PDISPLAY_CRTC_P(crtc, CLOCK));
 764		pclk &= 0x003fffff;
 
 
 
 
 
 
 
 
 
 
 765
 766		nv50_crtc_set_clock(dev, crtc, pclk);
 
 
 767
 768		tmp = nv_rd32(dev, NV50_PDISPLAY_CRTC_CLK_CTRL2(crtc));
 769		tmp &= ~0x000000f;
 770		nv_wr32(dev, NV50_PDISPLAY_CRTC_CLK_CTRL2(crtc), tmp);
 
 
 
 
 
 
 
 
 
 771	}
 772
 773	/* Nothing needs to be done for the encoder */
 774	crtc = ffs((unk30 & 0x00000180) >> 7) - 1;
 775	if (crtc < 0)
 776		goto ack;
 777	pclk  = nv_rd32(dev, NV50_PDISPLAY_CRTC_P(crtc, CLOCK)) & 0x003fffff;
 778
 779	/* Find which encoder is connected to the CRTC */
 780	for (i = 0; type == OUTPUT_ANY && i < 3; i++) {
 781		mc = nv_rd32(dev, NV50_PDISPLAY_DAC_MODE_CTRL_P(i));
 782		NV_DEBUG_KMS(dev, "DAC-%d mc: 0x%08x\n", i, mc);
 783		if (!(mc & (1 << crtc)))
 
 
 
 
 784			continue;
 785
 786		switch ((mc & 0x00000f00) >> 8) {
 787		case 0: type = OUTPUT_ANALOG; break;
 788		case 1: type = OUTPUT_TV; break;
 789		default:
 790			NV_ERROR(dev, "invalid mc, DAC-%d: 0x%08x\n", i, mc);
 791			goto ack;
 792		}
 793
 794		or = i;
 
 
 
 
 
 
 
 
 
 
 
 795	}
 796
 797	for (i = 0; type == OUTPUT_ANY && i < nv50_sor_nr(dev); i++) {
 798		if (dev_priv->chipset  < 0x90 ||
 799		    dev_priv->chipset == 0x92 ||
 800		    dev_priv->chipset == 0xa0)
 801			mc = nv_rd32(dev, NV50_PDISPLAY_SOR_MODE_CTRL_P(i));
 802		else
 803			mc = nv_rd32(dev, NV90_PDISPLAY_SOR_MODE_CTRL_P(i));
 804
 805		NV_DEBUG_KMS(dev, "SOR-%d mc: 0x%08x\n", i, mc);
 806		if (!(mc & (1 << crtc)))
 807			continue;
 
 
 
 
 
 808
 809		switch ((mc & 0x00000f00) >> 8) {
 810		case 0: type = OUTPUT_LVDS; break;
 811		case 1: type = OUTPUT_TMDS; break;
 812		case 2: type = OUTPUT_TMDS; break;
 813		case 5: type = OUTPUT_TMDS; break;
 814		case 8: type = OUTPUT_DP; break;
 815		case 9: type = OUTPUT_DP; break;
 816		default:
 817			NV_ERROR(dev, "invalid mc, SOR-%d: 0x%08x\n", i, mc);
 818			goto ack;
 819		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 820
 821		or = i;
 
 
 
 822	}
 823
 824	if (type == OUTPUT_ANY)
 825		goto ack;
 
 
 
 
 
 
 
 
 
 
 
 
 
 826
 827	/* Enable the encoder */
 828	for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
 829		dcb = &dev_priv->vbios.dcb.entry[i];
 830		if (dcb->type == type && (dcb->or & (1 << or)))
 
 
 
 
 
 
 
 
 831			break;
 
 832	}
 833
 834	if (i == dev_priv->vbios.dcb.entries) {
 835		NV_ERROR(dev, "no dcb for %d %d 0x%08x\n", or, type, mc);
 836		goto ack;
 837	}
 838
 839	script = nv50_display_script_select(dev, dcb, mc, pclk);
 840	nouveau_bios_run_display_table(dev, dcb, script, pclk);
 
 
 841
 842	nv50_display_unk20_dp_hack(dev, dcb);
 
 
 
 843
 844	if (dcb->type != OUTPUT_ANALOG) {
 845		tmp = nv_rd32(dev, NV50_PDISPLAY_SOR_CLK_CTRL2(or));
 846		tmp &= ~0x00000f0f;
 847		if (script & 0x0100)
 848			tmp |= 0x00000101;
 849		nv_wr32(dev, NV50_PDISPLAY_SOR_CLK_CTRL2(or), tmp);
 850	} else {
 851		nv_wr32(dev, NV50_PDISPLAY_DAC_CLK_CTRL2(or), 0);
 852	}
 853
 854	disp->irq.dcb = dcb;
 855	disp->irq.pclk = pclk;
 856	disp->irq.script = script;
 
 
 
 
 
 857
 858ack:
 859	nv_wr32(dev, NV50_PDISPLAY_INTR_1, NV50_PDISPLAY_INTR_1_CLK_UNK20);
 860	nv_wr32(dev, 0x610030, 0x80000000);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 861}
 862
 863/* If programming a TMDS output on a SOR that can also be configured for
 864 * DisplayPort, make sure NV50_SOR_DP_CTRL_ENABLE is forced off.
 865 *
 866 * It looks like the VBIOS TMDS scripts make an attempt at this, however,
 867 * the VBIOS scripts on at least one board I have only switch it off on
 868 * link 0, causing a blank display if the output has previously been
 869 * programmed for DisplayPort.
 870 */
 871static void
 872nv50_display_unk40_dp_set_tmds(struct drm_device *dev, struct dcb_entry *dcb)
 873{
 874	int or = ffs(dcb->or) - 1, link = !(dcb->dpconf.sor.link & 1);
 875	struct drm_encoder *encoder;
 876	u32 tmp;
 
 
 
 
 
 
 
 
 
 
 877
 878	if (dcb->type != OUTPUT_TMDS)
 879		return;
 
 880
 881	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
 882		struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
 883
 884		if (nv_encoder->dcb->type == OUTPUT_DP &&
 885		    nv_encoder->dcb->or & (1 << or)) {
 886			tmp  = nv_rd32(dev, NV50_SOR_DP_CTRL(or, link));
 887			tmp &= ~NV50_SOR_DP_CTRL_ENABLED;
 888			nv_wr32(dev, NV50_SOR_DP_CTRL(or, link), tmp);
 889			break;
 890		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 891	}
 
 
 892}
 893
 894static void
 895nv50_display_unk40_handler(struct drm_device *dev)
 896{
 897	struct nv50_display *disp = nv50_display(dev);
 898	struct dcb_entry *dcb = disp->irq.dcb;
 899	u16 script = disp->irq.script;
 900	u32 unk30 = nv_rd32(dev, 0x610030), pclk = disp->irq.pclk;
 901
 902	NV_DEBUG_KMS(dev, "0x610030: 0x%08x\n", unk30);
 903	disp->irq.dcb = NULL;
 904	if (!dcb)
 905		goto ack;
 906
 907	nouveau_bios_run_display_table(dev, dcb, script, -pclk);
 908	nv50_display_unk40_dp_set_tmds(dev, dcb);
 909
 910ack:
 911	nv_wr32(dev, NV50_PDISPLAY_INTR_1, NV50_PDISPLAY_INTR_1_CLK_UNK40);
 912	nv_wr32(dev, 0x610030, 0x80000000);
 913	nv_wr32(dev, 0x619494, nv_rd32(dev, 0x619494) | 8);
 914}
 915
 916static void
 917nv50_display_bh(unsigned long data)
 918{
 919	struct drm_device *dev = (struct drm_device *)data;
 
 
 
 920
 921	for (;;) {
 922		uint32_t intr0 = nv_rd32(dev, NV50_PDISPLAY_INTR_0);
 923		uint32_t intr1 = nv_rd32(dev, NV50_PDISPLAY_INTR_1);
 
 
 
 
 
 
 
 
 
 924
 925		NV_DEBUG_KMS(dev, "PDISPLAY_INTR_BH 0x%08x 0x%08x\n", intr0, intr1);
 
 
 
 
 
 
 
 
 
 
 
 
 
 926
 927		if (intr1 & NV50_PDISPLAY_INTR_1_CLK_UNK10)
 928			nv50_display_unk10_handler(dev);
 929		else
 930		if (intr1 & NV50_PDISPLAY_INTR_1_CLK_UNK20)
 931			nv50_display_unk20_handler(dev);
 932		else
 933		if (intr1 & NV50_PDISPLAY_INTR_1_CLK_UNK40)
 934			nv50_display_unk40_handler(dev);
 935		else
 936			break;
 
 
 937	}
 938
 939	nv_wr32(dev, NV03_PMC_INTR_EN_0, 1);
 
 
 
 
 
 940}
 941
 942static void
 943nv50_display_error_handler(struct drm_device *dev)
 944{
 945	u32 channels = (nv_rd32(dev, NV50_PDISPLAY_INTR_0) & 0x001f0000) >> 16;
 946	u32 addr, data;
 947	int chid;
 
 
 
 
 
 
 
 
 
 948
 949	for (chid = 0; chid < 5; chid++) {
 950		if (!(channels & (1 << chid)))
 951			continue;
 
 
 
 
 
 
 
 
 
 
 952
 953		nv_wr32(dev, NV50_PDISPLAY_INTR_0, 0x00010000 << chid);
 954		addr = nv_rd32(dev, NV50_PDISPLAY_TRAPPED_ADDR(chid));
 955		data = nv_rd32(dev, NV50_PDISPLAY_TRAPPED_DATA(chid));
 956		NV_ERROR(dev, "EvoCh %d Mthd 0x%04x Data 0x%08x "
 957			      "(0x%04x 0x%02x)\n", chid,
 958			 addr & 0xffc, data, addr >> 16, (addr >> 12) & 0xf);
 959
 960		nv_wr32(dev, NV50_PDISPLAY_TRAPPED_ADDR(chid), 0x90000000);
 
 
 
 
 961	}
 
 
 962}
 963
 964static void
 965nv50_display_isr(struct drm_device *dev)
 966{
 967	struct nv50_display *disp = nv50_display(dev);
 968	uint32_t delayed = 0;
 969
 970	while (nv_rd32(dev, NV50_PMC_INTR_0) & NV50_PMC_INTR_0_DISPLAY) {
 971		uint32_t intr0 = nv_rd32(dev, NV50_PDISPLAY_INTR_0);
 972		uint32_t intr1 = nv_rd32(dev, NV50_PDISPLAY_INTR_1);
 973		uint32_t clock;
 974
 975		NV_DEBUG_KMS(dev, "PDISPLAY_INTR 0x%08x 0x%08x\n", intr0, intr1);
 
 
 
 976
 977		if (!intr0 && !(intr1 & ~delayed))
 978			break;
 
 979
 980		if (intr0 & 0x001f0000) {
 981			nv50_display_error_handler(dev);
 982			intr0 &= ~0x001f0000;
 983		}
 984
 985		if (intr1 & NV50_PDISPLAY_INTR_1_VBLANK_CRTC) {
 986			nv50_display_vblank_handler(dev, intr1);
 987			intr1 &= ~NV50_PDISPLAY_INTR_1_VBLANK_CRTC;
 988		}
 
 
 
 
 
 
 
 
 
 
 
 
 989
 990		clock = (intr1 & (NV50_PDISPLAY_INTR_1_CLK_UNK10 |
 991				  NV50_PDISPLAY_INTR_1_CLK_UNK20 |
 992				  NV50_PDISPLAY_INTR_1_CLK_UNK40));
 993		if (clock) {
 994			nv_wr32(dev, NV03_PMC_INTR_EN_0, 0);
 995			tasklet_schedule(&disp->tasklet);
 996			delayed |= clock;
 997			intr1 &= ~clock;
 
 
 
 
 
 
 
 
 
 
 998		}
 
 
 
 
 
 
 999
1000		if (intr0) {
1001			NV_ERROR(dev, "unknown PDISPLAY_INTR_0: 0x%08x\n", intr0);
1002			nv_wr32(dev, NV50_PDISPLAY_INTR_0, intr0);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1003		}
1004
1005		if (intr1) {
1006			NV_ERROR(dev,
1007				 "unknown PDISPLAY_INTR_1: 0x%08x\n", intr1);
1008			nv_wr32(dev, NV50_PDISPLAY_INTR_1, intr1);
 
1009		}
1010	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1011}
v4.10.11
   1/*
   2 * Copyright 2011 Red Hat Inc.
 
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
 
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
 
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Ben Skeggs
  23 */
  24
  25#include <linux/dma-mapping.h>
  26
  27#include <drm/drmP.h>
  28#include <drm/drm_atomic.h>
  29#include <drm/drm_atomic_helper.h>
  30#include <drm/drm_crtc_helper.h>
  31#include <drm/drm_dp_helper.h>
  32#include <drm/drm_fb_helper.h>
  33#include <drm/drm_plane_helper.h>
  34
  35#include <nvif/class.h>
  36#include <nvif/cl0002.h>
  37#include <nvif/cl5070.h>
  38#include <nvif/cl507a.h>
  39#include <nvif/cl507b.h>
  40#include <nvif/cl507c.h>
  41#include <nvif/cl507d.h>
  42#include <nvif/cl507e.h>
  43#include <nvif/event.h>
  44
  45#include "nouveau_drv.h"
  46#include "nouveau_dma.h"
  47#include "nouveau_gem.h"
  48#include "nouveau_connector.h"
  49#include "nouveau_encoder.h"
  50#include "nouveau_crtc.h"
  51#include "nouveau_fence.h"
  52#include "nouveau_fbcon.h"
  53#include "nv50_display.h"
  54
  55#define EVO_DMA_NR 9
  56
  57#define EVO_MASTER  (0x00)
  58#define EVO_FLIP(c) (0x01 + (c))
  59#define EVO_OVLY(c) (0x05 + (c))
  60#define EVO_OIMM(c) (0x09 + (c))
  61#define EVO_CURS(c) (0x0d + (c))
  62
  63/* offsets in shared sync bo of various structures */
  64#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
  65#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
  66#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
  67#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
  68#define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
  69#define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
  70
  71/******************************************************************************
  72 * Atomic state
  73 *****************************************************************************/
  74#define nv50_atom(p) container_of((p), struct nv50_atom, state)
  75
  76struct nv50_atom {
  77	struct drm_atomic_state state;
  78
  79	struct list_head outp;
  80	bool lock_core;
  81	bool flush_disable;
  82};
  83
  84struct nv50_outp_atom {
  85	struct list_head head;
  86
  87	struct drm_encoder *encoder;
  88	bool flush_disable;
  89
  90	union {
  91		struct {
  92			bool ctrl:1;
  93		};
  94		u8 mask;
  95	} clr;
  96
  97	union {
  98		struct {
  99			bool ctrl:1;
 100		};
 101		u8 mask;
 102	} set;
 103};
 104
 105#define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
 106
 107struct nv50_head_atom {
 108	struct drm_crtc_state state;
 109
 110	struct {
 111		u16 iW;
 112		u16 iH;
 113		u16 oW;
 114		u16 oH;
 115	} view;
 116
 117	struct nv50_head_mode {
 118		bool interlace;
 119		u32 clock;
 120		struct {
 121			u16 active;
 122			u16 synce;
 123			u16 blanke;
 124			u16 blanks;
 125		} h;
 126		struct {
 127			u32 active;
 128			u16 synce;
 129			u16 blanke;
 130			u16 blanks;
 131			u16 blank2s;
 132			u16 blank2e;
 133			u16 blankus;
 134		} v;
 135	} mode;
 136
 137	struct {
 138		u32 handle;
 139		u64 offset:40;
 140	} lut;
 141
 142	struct {
 143		bool visible;
 144		u32 handle;
 145		u64 offset:40;
 146		u8  format;
 147		u8  kind:7;
 148		u8  layout:1;
 149		u8  block:4;
 150		u32 pitch:20;
 151		u16 x;
 152		u16 y;
 153		u16 w;
 154		u16 h;
 155	} core;
 156
 157	struct {
 158		bool visible;
 159		u32 handle;
 160		u64 offset:40;
 161		u8  layout:1;
 162		u8  format:1;
 163	} curs;
 164
 165	struct {
 166		u8  depth;
 167		u8  cpp;
 168		u16 x;
 169		u16 y;
 170		u16 w;
 171		u16 h;
 172	} base;
 173
 174	struct {
 175		u8 cpp;
 176	} ovly;
 177
 178	struct {
 179		bool enable:1;
 180		u8 bits:2;
 181		u8 mode:4;
 182	} dither;
 183
 184	struct {
 185		struct {
 186			u16 cos:12;
 187			u16 sin:12;
 188		} sat;
 189	} procamp;
 190
 191	union {
 192		struct {
 193			bool core:1;
 194			bool curs:1;
 195		};
 196		u8 mask;
 197	} clr;
 198
 199	union {
 200		struct {
 201			bool core:1;
 202			bool curs:1;
 203			bool view:1;
 204			bool mode:1;
 205			bool base:1;
 206			bool ovly:1;
 207			bool dither:1;
 208			bool procamp:1;
 209		};
 210		u16 mask;
 211	} set;
 212};
 213
 214static inline struct nv50_head_atom *
 215nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
 216{
 217	struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
 218	if (IS_ERR(statec))
 219		return (void *)statec;
 220	return nv50_head_atom(statec);
 221}
 222
 223#define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
 224
 225struct nv50_wndw_atom {
 226	struct drm_plane_state state;
 227	u8 interval;
 228
 229	struct drm_rect clip;
 230
 231	struct {
 232		u32  handle;
 233		u16  offset:12;
 234		bool awaken:1;
 235	} ntfy;
 236
 237	struct {
 238		u32 handle;
 239		u16 offset:12;
 240		u32 acquire;
 241		u32 release;
 242	} sema;
 243
 244	struct {
 245		u8 enable:2;
 246	} lut;
 247
 248	struct {
 249		u8  mode:2;
 250		u8  interval:4;
 251
 252		u8  format;
 253		u8  kind:7;
 254		u8  layout:1;
 255		u8  block:4;
 256		u32 pitch:20;
 257		u16 w;
 258		u16 h;
 259
 260		u32 handle;
 261		u64 offset;
 262	} image;
 263
 264	struct {
 265		u16 x;
 266		u16 y;
 267	} point;
 268
 269	union {
 270		struct {
 271			bool ntfy:1;
 272			bool sema:1;
 273			bool image:1;
 274		};
 275		u8 mask;
 276	} clr;
 277
 278	union {
 279		struct {
 280			bool ntfy:1;
 281			bool sema:1;
 282			bool image:1;
 283			bool lut:1;
 284			bool point:1;
 285		};
 286		u8 mask;
 287	} set;
 288};
 289
 290/******************************************************************************
 291 * EVO channel
 292 *****************************************************************************/
 293
 294struct nv50_chan {
 295	struct nvif_object user;
 296	struct nvif_device *device;
 297};
 298
 299static int
 300nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
 301		 const s32 *oclass, u8 head, void *data, u32 size,
 302		 struct nv50_chan *chan)
 303{
 304	struct nvif_sclass *sclass;
 305	int ret, i, n;
 306
 307	chan->device = device;
 308
 309	ret = n = nvif_object_sclass_get(disp, &sclass);
 310	if (ret < 0)
 311		return ret;
 312
 313	while (oclass[0]) {
 314		for (i = 0; i < n; i++) {
 315			if (sclass[i].oclass == oclass[0]) {
 316				ret = nvif_object_init(disp, 0, oclass[0],
 317						       data, size, &chan->user);
 318				if (ret == 0)
 319					nvif_object_map(&chan->user);
 320				nvif_object_sclass_put(&sclass);
 321				return ret;
 322			}
 323		}
 324		oclass++;
 325	}
 326
 327	nvif_object_sclass_put(&sclass);
 328	return -ENOSYS;
 329}
 330
 331static void
 332nv50_chan_destroy(struct nv50_chan *chan)
 333{
 334	nvif_object_fini(&chan->user);
 335}
 336
 337/******************************************************************************
 338 * PIO EVO channel
 339 *****************************************************************************/
 340
 341struct nv50_pioc {
 342	struct nv50_chan base;
 343};
 344
 345static void
 346nv50_pioc_destroy(struct nv50_pioc *pioc)
 347{
 348	nv50_chan_destroy(&pioc->base);
 349}
 350
 351static int
 352nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
 353		 const s32 *oclass, u8 head, void *data, u32 size,
 354		 struct nv50_pioc *pioc)
 355{
 356	return nv50_chan_create(device, disp, oclass, head, data, size,
 357				&pioc->base);
 358}
 359
 360/******************************************************************************
 361 * Overlay Immediate
 362 *****************************************************************************/
 363
 364struct nv50_oimm {
 365	struct nv50_pioc base;
 366};
 367
 368static int
 369nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
 370		 int head, struct nv50_oimm *oimm)
 371{
 372	struct nv50_disp_cursor_v0 args = {
 373		.head = head,
 374	};
 375	static const s32 oclass[] = {
 376		GK104_DISP_OVERLAY,
 377		GF110_DISP_OVERLAY,
 378		GT214_DISP_OVERLAY,
 379		G82_DISP_OVERLAY,
 380		NV50_DISP_OVERLAY,
 381		0
 382	};
 383
 384	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
 385				&oimm->base);
 386}
 387
 388/******************************************************************************
 389 * DMA EVO channel
 390 *****************************************************************************/
 391
 392struct nv50_dmac_ctxdma {
 393	struct list_head head;
 394	struct nvif_object object;
 395};
 396
 397struct nv50_dmac {
 398	struct nv50_chan base;
 399	dma_addr_t handle;
 400	u32 *ptr;
 401
 402	struct nvif_object sync;
 403	struct nvif_object vram;
 404	struct list_head ctxdma;
 405
 406	/* Protects against concurrent pushbuf access to this channel, lock is
 407	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
 408	 * dropped again by evo_kick. */
 409	struct mutex lock;
 410};
 411
 412static void
 413nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
 414{
 415	nvif_object_fini(&ctxdma->object);
 416	list_del(&ctxdma->head);
 417	kfree(ctxdma);
 418}
 419
 420static struct nv50_dmac_ctxdma *
 421nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
 422{
 423	struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
 424	struct nv50_dmac_ctxdma *ctxdma;
 425	const u8    kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
 426	const u32 handle = 0xfb000000 | kind;
 427	struct {
 428		struct nv_dma_v0 base;
 429		union {
 430			struct nv50_dma_v0 nv50;
 431			struct gf100_dma_v0 gf100;
 432			struct gf119_dma_v0 gf119;
 433		};
 434	} args = {};
 435	u32 argc = sizeof(args.base);
 436	int ret;
 437
 438	list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
 439		if (ctxdma->object.handle == handle)
 440			return ctxdma;
 441	}
 442
 443	if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
 444		return ERR_PTR(-ENOMEM);
 445	list_add(&ctxdma->head, &dmac->ctxdma);
 446
 447	args.base.target = NV_DMA_V0_TARGET_VRAM;
 448	args.base.access = NV_DMA_V0_ACCESS_RDWR;
 449	args.base.start  = 0;
 450	args.base.limit  = drm->device.info.ram_user - 1;
 451
 452	if (drm->device.info.chipset < 0x80) {
 453		args.nv50.part = NV50_DMA_V0_PART_256;
 454		argc += sizeof(args.nv50);
 455	} else
 456	if (drm->device.info.chipset < 0xc0) {
 457		args.nv50.part = NV50_DMA_V0_PART_256;
 458		args.nv50.kind = kind;
 459		argc += sizeof(args.nv50);
 460	} else
 461	if (drm->device.info.chipset < 0xd0) {
 462		args.gf100.kind = kind;
 463		argc += sizeof(args.gf100);
 464	} else {
 465		args.gf119.page = GF119_DMA_V0_PAGE_LP;
 466		args.gf119.kind = kind;
 467		argc += sizeof(args.gf119);
 468	}
 469
 470	ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
 471			       &args, argc, &ctxdma->object);
 472	if (ret) {
 473		nv50_dmac_ctxdma_del(ctxdma);
 474		return ERR_PTR(ret);
 475	}
 476
 477	return ctxdma;
 478}
 479
 480static void
 481nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
 482{
 483	struct nvif_device *device = dmac->base.device;
 484	struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
 485
 486	list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
 487		nv50_dmac_ctxdma_del(ctxdma);
 488	}
 489
 490	nvif_object_fini(&dmac->vram);
 491	nvif_object_fini(&dmac->sync);
 492
 493	nv50_chan_destroy(&dmac->base);
 494
 495	if (dmac->ptr) {
 496		struct device *dev = nvxx_device(device)->dev;
 497		dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
 498	}
 499}
 500
 501static int
 502nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
 503		 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
 504		 struct nv50_dmac *dmac)
 505{
 506	struct nv50_disp_core_channel_dma_v0 *args = data;
 507	struct nvif_object pushbuf;
 508	int ret;
 509
 510	mutex_init(&dmac->lock);
 511
 512	dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
 513				       &dmac->handle, GFP_KERNEL);
 514	if (!dmac->ptr)
 515		return -ENOMEM;
 516
 517	ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
 518			       &(struct nv_dma_v0) {
 519					.target = NV_DMA_V0_TARGET_PCI_US,
 520					.access = NV_DMA_V0_ACCESS_RD,
 521					.start = dmac->handle + 0x0000,
 522					.limit = dmac->handle + 0x0fff,
 523			       }, sizeof(struct nv_dma_v0), &pushbuf);
 524	if (ret)
 525		return ret;
 526
 527	args->pushbuf = nvif_handle(&pushbuf);
 528
 529	ret = nv50_chan_create(device, disp, oclass, head, data, size,
 530			       &dmac->base);
 531	nvif_object_fini(&pushbuf);
 532	if (ret)
 533		return ret;
 534
 535	ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
 536			       &(struct nv_dma_v0) {
 537					.target = NV_DMA_V0_TARGET_VRAM,
 538					.access = NV_DMA_V0_ACCESS_RDWR,
 539					.start = syncbuf + 0x0000,
 540					.limit = syncbuf + 0x0fff,
 541			       }, sizeof(struct nv_dma_v0),
 542			       &dmac->sync);
 543	if (ret)
 544		return ret;
 545
 546	ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
 547			       &(struct nv_dma_v0) {
 548					.target = NV_DMA_V0_TARGET_VRAM,
 549					.access = NV_DMA_V0_ACCESS_RDWR,
 550					.start = 0,
 551					.limit = device->info.ram_user - 1,
 552			       }, sizeof(struct nv_dma_v0),
 553			       &dmac->vram);
 554	if (ret)
 555		return ret;
 556
 557	INIT_LIST_HEAD(&dmac->ctxdma);
 558	return ret;
 559}
 560
 561/******************************************************************************
 562 * Core
 563 *****************************************************************************/
 564
 565struct nv50_mast {
 566	struct nv50_dmac base;
 567};
 568
 569static int
 570nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
 571		 u64 syncbuf, struct nv50_mast *core)
 572{
 573	struct nv50_disp_core_channel_dma_v0 args = {
 574		.pushbuf = 0xb0007d00,
 575	};
 576	static const s32 oclass[] = {
 577		GP102_DISP_CORE_CHANNEL_DMA,
 578		GP100_DISP_CORE_CHANNEL_DMA,
 579		GM200_DISP_CORE_CHANNEL_DMA,
 580		GM107_DISP_CORE_CHANNEL_DMA,
 581		GK110_DISP_CORE_CHANNEL_DMA,
 582		GK104_DISP_CORE_CHANNEL_DMA,
 583		GF110_DISP_CORE_CHANNEL_DMA,
 584		GT214_DISP_CORE_CHANNEL_DMA,
 585		GT206_DISP_CORE_CHANNEL_DMA,
 586		GT200_DISP_CORE_CHANNEL_DMA,
 587		G82_DISP_CORE_CHANNEL_DMA,
 588		NV50_DISP_CORE_CHANNEL_DMA,
 589		0
 590	};
 591
 592	return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
 593				syncbuf, &core->base);
 594}
 595
 596/******************************************************************************
 597 * Base
 598 *****************************************************************************/
 599
 600struct nv50_sync {
 601	struct nv50_dmac base;
 602	u32 addr;
 603	u32 data;
 604};
 605
 606static int
 607nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
 608		 int head, u64 syncbuf, struct nv50_sync *base)
 609{
 610	struct nv50_disp_base_channel_dma_v0 args = {
 611		.pushbuf = 0xb0007c00 | head,
 612		.head = head,
 613	};
 614	static const s32 oclass[] = {
 615		GK110_DISP_BASE_CHANNEL_DMA,
 616		GK104_DISP_BASE_CHANNEL_DMA,
 617		GF110_DISP_BASE_CHANNEL_DMA,
 618		GT214_DISP_BASE_CHANNEL_DMA,
 619		GT200_DISP_BASE_CHANNEL_DMA,
 620		G82_DISP_BASE_CHANNEL_DMA,
 621		NV50_DISP_BASE_CHANNEL_DMA,
 622		0
 623	};
 624
 625	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 626				syncbuf, &base->base);
 627}
 628
 629/******************************************************************************
 630 * Overlay
 631 *****************************************************************************/
 632
 633struct nv50_ovly {
 634	struct nv50_dmac base;
 635};
 636
 637static int
 638nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
 639		 int head, u64 syncbuf, struct nv50_ovly *ovly)
 640{
 641	struct nv50_disp_overlay_channel_dma_v0 args = {
 642		.pushbuf = 0xb0007e00 | head,
 643		.head = head,
 644	};
 645	static const s32 oclass[] = {
 646		GK104_DISP_OVERLAY_CONTROL_DMA,
 647		GF110_DISP_OVERLAY_CONTROL_DMA,
 648		GT214_DISP_OVERLAY_CHANNEL_DMA,
 649		GT200_DISP_OVERLAY_CHANNEL_DMA,
 650		G82_DISP_OVERLAY_CHANNEL_DMA,
 651		NV50_DISP_OVERLAY_CHANNEL_DMA,
 652		0
 653	};
 654
 655	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
 656				syncbuf, &ovly->base);
 657}
 658
 659struct nv50_head {
 660	struct nouveau_crtc base;
 661	struct nv50_ovly ovly;
 662	struct nv50_oimm oimm;
 663};
 664
 665#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
 666#define nv50_ovly(c) (&nv50_head(c)->ovly)
 667#define nv50_oimm(c) (&nv50_head(c)->oimm)
 668#define nv50_chan(c) (&(c)->base.base)
 669#define nv50_vers(c) nv50_chan(c)->user.oclass
 670
 671struct nv50_disp {
 672	struct nvif_object *disp;
 673	struct nv50_mast mast;
 674
 675	struct nouveau_bo *sync;
 676
 677	struct mutex mutex;
 678};
 679
 680static struct nv50_disp *
 681nv50_disp(struct drm_device *dev)
 682{
 683	return nouveau_display(dev)->priv;
 684}
 685
 686#define nv50_mast(d) (&nv50_disp(d)->mast)
 687
 688/******************************************************************************
 689 * EVO channel helpers
 690 *****************************************************************************/
 691static u32 *
 692evo_wait(void *evoc, int nr)
 693{
 694	struct nv50_dmac *dmac = evoc;
 695	struct nvif_device *device = dmac->base.device;
 696	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
 697
 698	mutex_lock(&dmac->lock);
 699	if (put + nr >= (PAGE_SIZE / 4) - 8) {
 700		dmac->ptr[put] = 0x20000000;
 701
 702		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
 703		if (nvif_msec(device, 2000,
 704			if (!nvif_rd32(&dmac->base.user, 0x0004))
 705				break;
 706		) < 0) {
 707			mutex_unlock(&dmac->lock);
 708			printk(KERN_ERR "nouveau: evo channel stalled\n");
 709			return NULL;
 710		}
 711
 712		put = 0;
 713	}
 714
 715	return dmac->ptr + put;
 716}
 717
 718static void
 719evo_kick(u32 *push, void *evoc)
 720{
 721	struct nv50_dmac *dmac = evoc;
 722	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
 723	mutex_unlock(&dmac->lock);
 724}
 725
 726#define evo_mthd(p,m,s) do {                                                   \
 727	const u32 _m = (m), _s = (s);                                          \
 728	if (drm_debug & DRM_UT_KMS)                                            \
 729		printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__);             \
 730	*((p)++) = ((_s << 18) | _m);                                          \
 731} while(0)
 732
 733#define evo_data(p,d) do {                                                     \
 734	const u32 _d = (d);                                                    \
 735	if (drm_debug & DRM_UT_KMS)                                            \
 736		printk(KERN_ERR "\t%08x\n", _d);                               \
 737	*((p)++) = _d;                                                         \
 738} while(0)
 739
 740/******************************************************************************
 741 * Plane
 742 *****************************************************************************/
 743#define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
 744
 745struct nv50_wndw {
 746	const struct nv50_wndw_func *func;
 747	struct nv50_dmac *dmac;
 748
 749	struct drm_plane plane;
 750
 751	struct nvif_notify notify;
 752	u16 ntfy;
 753	u16 sema;
 754	u32 data;
 755};
 756
 757struct nv50_wndw_func {
 758	void *(*dtor)(struct nv50_wndw *);
 759	int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
 760		       struct nv50_head_atom *asyh);
 761	void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
 762			struct nv50_head_atom *asyh);
 763	void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
 764			struct nv50_wndw_atom *asyw);
 765
 766	void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 767	void (*sema_clr)(struct nv50_wndw *);
 768	void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 769	void (*ntfy_clr)(struct nv50_wndw *);
 770	int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
 771	void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
 772	void (*image_clr)(struct nv50_wndw *);
 773	void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
 774	void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
 775
 776	u32 (*update)(struct nv50_wndw *, u32 interlock);
 777};
 778
 779static int
 780nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
 781{
 782	if (asyw->set.ntfy)
 783		return wndw->func->ntfy_wait_begun(wndw, asyw);
 784	return 0;
 785}
 786
 787static u32
 788nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
 789		    struct nv50_wndw_atom *asyw)
 790{
 791	if (asyw->clr.sema && (!asyw->set.sema || flush))
 792		wndw->func->sema_clr(wndw);
 793	if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
 794		wndw->func->ntfy_clr(wndw);
 795	if (asyw->clr.image && (!asyw->set.image || flush))
 796		wndw->func->image_clr(wndw);
 797
 798	return flush ? wndw->func->update(wndw, interlock) : 0;
 799}
 800
 801static u32
 802nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
 803		    struct nv50_wndw_atom *asyw)
 804{
 805	if (interlock) {
 806		asyw->image.mode = 0;
 807		asyw->image.interval = 1;
 808	}
 809
 810	if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
 811	if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
 812	if (asyw->set.image) wndw->func->image_set(wndw, asyw);
 813	if (asyw->set.lut  ) wndw->func->lut      (wndw, asyw);
 814	if (asyw->set.point) wndw->func->point    (wndw, asyw);
 815
 816	return wndw->func->update(wndw, interlock);
 817}
 818
 819static void
 820nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
 821			       struct nv50_wndw_atom *asyw,
 822			       struct nv50_head_atom *asyh)
 823{
 824	struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
 825	NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
 826	wndw->func->release(wndw, asyw, asyh);
 827	asyw->ntfy.handle = 0;
 828	asyw->sema.handle = 0;
 829}
 830
 831static int
 832nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
 833			       struct nv50_wndw_atom *asyw,
 834			       struct nv50_head_atom *asyh)
 835{
 836	struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
 837	struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
 838	int ret;
 839
 840	NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
 841	asyw->clip.x1 = 0;
 842	asyw->clip.y1 = 0;
 843	asyw->clip.x2 = asyh->state.mode.hdisplay;
 844	asyw->clip.y2 = asyh->state.mode.vdisplay;
 845
 846	asyw->image.w = fb->base.width;
 847	asyw->image.h = fb->base.height;
 848	asyw->image.kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
 849	if (asyw->image.kind) {
 850		asyw->image.layout = 0;
 851		if (drm->device.info.chipset >= 0xc0)
 852			asyw->image.block = fb->nvbo->tile_mode >> 4;
 853		else
 854			asyw->image.block = fb->nvbo->tile_mode;
 855		asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
 856	} else {
 857		asyw->image.layout = 1;
 858		asyw->image.block  = 0;
 859		asyw->image.pitch  = fb->base.pitches[0];
 860	}
 861
 862	ret = wndw->func->acquire(wndw, asyw, asyh);
 863	if (ret)
 864		return ret;
 865
 866	if (asyw->set.image) {
 867		if (!(asyw->image.mode = asyw->interval ? 0 : 1))
 868			asyw->image.interval = asyw->interval;
 869		else
 870			asyw->image.interval = 0;
 871	}
 872
 873	return 0;
 874}
 875
 876static int
 877nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
 878{
 879	struct nouveau_drm *drm = nouveau_drm(plane->dev);
 880	struct nv50_wndw *wndw = nv50_wndw(plane);
 881	struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
 882	struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 883	struct nv50_head_atom *harm = NULL, *asyh = NULL;
 884	bool varm = false, asyv = false, asym = false;
 885	int ret;
 886
 887	NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
 888	if (asyw->state.crtc) {
 889		asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
 890		if (IS_ERR(asyh))
 891			return PTR_ERR(asyh);
 892		asym = drm_atomic_crtc_needs_modeset(&asyh->state);
 893		asyv = asyh->state.active;
 894	}
 895
 896	if (armw->state.crtc) {
 897		harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
 898		if (IS_ERR(harm))
 899			return PTR_ERR(harm);
 900		varm = harm->state.crtc->state->active;
 901	}
 902
 903	if (asyv) {
 904		asyw->point.x = asyw->state.crtc_x;
 905		asyw->point.y = asyw->state.crtc_y;
 906		if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
 907			asyw->set.point = true;
 908
 909		if (!varm || asym || armw->state.fb != asyw->state.fb) {
 910			ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
 911			if (ret)
 912				return ret;
 913		}
 914	} else
 915	if (varm) {
 916		nv50_wndw_atomic_check_release(wndw, asyw, harm);
 917	} else {
 918		return 0;
 919	}
 920
 921	if (!asyv || asym) {
 922		asyw->clr.ntfy = armw->ntfy.handle != 0;
 923		asyw->clr.sema = armw->sema.handle != 0;
 924		if (wndw->func->image_clr)
 925			asyw->clr.image = armw->image.handle != 0;
 926		asyw->set.lut = wndw->func->lut && asyv;
 927	}
 928
 929	return 0;
 930}
 
 
 
 
 
 
 
 
 
 
 931
 932static void
 933nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
 934{
 935	struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
 936	struct nouveau_drm *drm = nouveau_drm(plane->dev);
 937
 938	NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
 939	if (!old_state->fb)
 940		return;
 941
 942	nouveau_bo_unpin(fb->nvbo);
 943}
 944
 945static int
 946nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
 947{
 948	struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
 949	struct nouveau_drm *drm = nouveau_drm(plane->dev);
 950	struct nv50_wndw *wndw = nv50_wndw(plane);
 951	struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 952	struct nv50_head_atom *asyh;
 953	struct nv50_dmac_ctxdma *ctxdma;
 954	int ret;
 955
 956	NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
 957	if (!asyw->state.fb)
 958		return 0;
 959
 960	ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
 961	if (ret)
 962		return ret;
 963
 964	ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
 965	if (IS_ERR(ctxdma)) {
 966		nouveau_bo_unpin(fb->nvbo);
 967		return PTR_ERR(ctxdma);
 968	}
 969
 970	asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
 971	asyw->image.handle = ctxdma->object.handle;
 972	asyw->image.offset = fb->nvbo->bo.offset;
 973
 974	if (wndw->func->prepare) {
 975		asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
 976		if (IS_ERR(asyh))
 977			return PTR_ERR(asyh);
 978
 979		wndw->func->prepare(wndw, asyh, asyw);
 980	}
 981
 982	return 0;
 983}
 984
 985static const struct drm_plane_helper_funcs
 986nv50_wndw_helper = {
 987	.prepare_fb = nv50_wndw_prepare_fb,
 988	.cleanup_fb = nv50_wndw_cleanup_fb,
 989	.atomic_check = nv50_wndw_atomic_check,
 990};
 991
 992static void
 993nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
 994			       struct drm_plane_state *state)
 995{
 996	struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
 997	__drm_atomic_helper_plane_destroy_state(&asyw->state);
 998	dma_fence_put(asyw->state.fence);
 999	kfree(asyw);
1000}
1001
1002static struct drm_plane_state *
1003nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
1004{
1005	struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
1006	struct nv50_wndw_atom *asyw;
1007	if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
1008		return NULL;
1009	__drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
1010	asyw->state.fence = NULL;
1011	asyw->interval = 1;
1012	asyw->sema = armw->sema;
1013	asyw->ntfy = armw->ntfy;
1014	asyw->image = armw->image;
1015	asyw->point = armw->point;
1016	asyw->lut = armw->lut;
1017	asyw->clr.mask = 0;
1018	asyw->set.mask = 0;
1019	return &asyw->state;
1020}
1021
1022static void
1023nv50_wndw_reset(struct drm_plane *plane)
1024{
1025	struct nv50_wndw_atom *asyw;
1026
1027	if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
1028		return;
1029
1030	if (plane->state)
1031		plane->funcs->atomic_destroy_state(plane, plane->state);
1032	plane->state = &asyw->state;
1033	plane->state->plane = plane;
1034	plane->state->rotation = DRM_ROTATE_0;
1035}
1036
1037static void
1038nv50_wndw_destroy(struct drm_plane *plane)
1039{
1040	struct nv50_wndw *wndw = nv50_wndw(plane);
1041	void *data;
1042	nvif_notify_fini(&wndw->notify);
1043	data = wndw->func->dtor(wndw);
1044	drm_plane_cleanup(&wndw->plane);
1045	kfree(data);
1046}
1047
1048static const struct drm_plane_funcs
1049nv50_wndw = {
1050	.update_plane = drm_atomic_helper_update_plane,
1051	.disable_plane = drm_atomic_helper_disable_plane,
1052	.destroy = nv50_wndw_destroy,
1053	.reset = nv50_wndw_reset,
1054	.set_property = drm_atomic_helper_plane_set_property,
1055	.atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
1056	.atomic_destroy_state = nv50_wndw_atomic_destroy_state,
1057};
1058
1059static void
1060nv50_wndw_fini(struct nv50_wndw *wndw)
1061{
1062	nvif_notify_put(&wndw->notify);
1063}
1064
1065static void
1066nv50_wndw_init(struct nv50_wndw *wndw)
1067{
1068	nvif_notify_get(&wndw->notify);
1069}
1070
1071static int
1072nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
1073	       enum drm_plane_type type, const char *name, int index,
1074	       struct nv50_dmac *dmac, const u32 *format, int nformat,
1075	       struct nv50_wndw *wndw)
1076{
1077	int ret;
1078
1079	wndw->func = func;
1080	wndw->dmac = dmac;
1081
1082	ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw, format,
1083				       nformat, type, "%s-%d", name, index);
1084	if (ret)
1085		return ret;
 
1086
1087	drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
1088	return 0;
1089}
1090
1091/******************************************************************************
1092 * Cursor plane
1093 *****************************************************************************/
1094#define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1095
1096struct nv50_curs {
1097	struct nv50_wndw wndw;
1098	struct nvif_object chan;
1099};
1100
1101static u32
1102nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
1103{
1104	struct nv50_curs *curs = nv50_curs(wndw);
1105	nvif_wr32(&curs->chan, 0x0080, 0x00000000);
1106	return 0;
1107}
1108
1109static void
1110nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1111{
1112	struct nv50_curs *curs = nv50_curs(wndw);
1113	nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
1114}
1115
1116static void
1117nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
1118		  struct nv50_wndw_atom *asyw)
1119{
1120	asyh->curs.handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
1121	asyh->curs.offset = asyw->image.offset;
1122	asyh->set.curs = asyh->curs.visible;
1123}
1124
1125static void
1126nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1127		  struct nv50_head_atom *asyh)
1128{
1129	asyh->curs.visible = false;
1130}
1131
1132static int
1133nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1134		  struct nv50_head_atom *asyh)
1135{
1136	int ret;
1137
1138	ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
1139					   DRM_PLANE_HELPER_NO_SCALING,
1140					   DRM_PLANE_HELPER_NO_SCALING,
1141					   true, true);
1142	asyh->curs.visible = asyw->state.visible;
1143	if (ret || !asyh->curs.visible)
1144		return ret;
1145
1146	switch (asyw->state.fb->width) {
1147	case 32: asyh->curs.layout = 0; break;
1148	case 64: asyh->curs.layout = 1; break;
1149	default:
1150		return -EINVAL;
1151	}
1152
1153	if (asyw->state.fb->width != asyw->state.fb->height)
1154		return -EINVAL;
1155
1156	switch (asyw->state.fb->pixel_format) {
1157	case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
1158	default:
1159		WARN_ON(1);
1160		return -EINVAL;
1161	}
1162
1163	return 0;
1164}
1165
1166static void *
1167nv50_curs_dtor(struct nv50_wndw *wndw)
1168{
1169	struct nv50_curs *curs = nv50_curs(wndw);
1170	nvif_object_fini(&curs->chan);
1171	return curs;
1172}
1173
1174static const u32
1175nv50_curs_format[] = {
1176	DRM_FORMAT_ARGB8888,
1177};
1178
1179static const struct nv50_wndw_func
1180nv50_curs = {
1181	.dtor = nv50_curs_dtor,
1182	.acquire = nv50_curs_acquire,
1183	.release = nv50_curs_release,
1184	.prepare = nv50_curs_prepare,
1185	.point = nv50_curs_point,
1186	.update = nv50_curs_update,
1187};
1188
1189static int
1190nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
1191	      struct nv50_curs **pcurs)
1192{
1193	static const struct nvif_mclass curses[] = {
1194		{ GK104_DISP_CURSOR, 0 },
1195		{ GF110_DISP_CURSOR, 0 },
1196		{ GT214_DISP_CURSOR, 0 },
1197		{   G82_DISP_CURSOR, 0 },
1198		{  NV50_DISP_CURSOR, 0 },
1199		{}
1200	};
1201	struct nv50_disp_cursor_v0 args = {
1202		.head = head->base.index,
1203	};
1204	struct nv50_disp *disp = nv50_disp(drm->dev);
1205	struct nv50_curs *curs;
1206	int cid, ret;
1207
1208	cid = nvif_mclass(disp->disp, curses);
1209	if (cid < 0) {
1210		NV_ERROR(drm, "No supported cursor immediate class\n");
1211		return cid;
1212	}
1213
1214	if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
1215		return -ENOMEM;
1216
1217	ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
1218			     "curs", head->base.index, &disp->mast.base,
1219			     nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
1220			     &curs->wndw);
1221	if (ret) {
1222		kfree(curs);
1223		return ret;
1224	}
1225
1226	ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
1227			       sizeof(args), &curs->chan);
1228	if (ret) {
1229		NV_ERROR(drm, "curs%04x allocation failed: %d\n",
1230			 curses[cid].oclass, ret);
1231		return ret;
1232	}
1233
1234	return 0;
1235}
1236
1237/******************************************************************************
1238 * Primary plane
1239 *****************************************************************************/
1240#define nv50_base(p) container_of((p), struct nv50_base, wndw)
1241
1242struct nv50_base {
1243	struct nv50_wndw wndw;
1244	struct nv50_sync chan;
1245	int id;
1246};
1247
1248static int
1249nv50_base_notify(struct nvif_notify *notify)
1250{
1251	return NVIF_NOTIFY_KEEP;
1252}
1253
1254static void
1255nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1256{
1257	struct nv50_base *base = nv50_base(wndw);
1258	u32 *push;
1259	if ((push = evo_wait(&base->chan, 2))) {
1260		evo_mthd(push, 0x00e0, 1);
1261		evo_data(push, asyw->lut.enable << 30);
1262		evo_kick(push, &base->chan);
1263	}
1264}
1265
1266static void
1267nv50_base_image_clr(struct nv50_wndw *wndw)
1268{
1269	struct nv50_base *base = nv50_base(wndw);
1270	u32 *push;
1271	if ((push = evo_wait(&base->chan, 4))) {
1272		evo_mthd(push, 0x0084, 1);
1273		evo_data(push, 0x00000000);
1274		evo_mthd(push, 0x00c0, 1);
1275		evo_data(push, 0x00000000);
1276		evo_kick(push, &base->chan);
1277	}
1278}
1279
1280static void
1281nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1282{
1283	struct nv50_base *base = nv50_base(wndw);
1284	const s32 oclass = base->chan.base.base.user.oclass;
1285	u32 *push;
1286	if ((push = evo_wait(&base->chan, 10))) {
1287		evo_mthd(push, 0x0084, 1);
1288		evo_data(push, (asyw->image.mode << 8) |
1289			       (asyw->image.interval << 4));
1290		evo_mthd(push, 0x00c0, 1);
1291		evo_data(push, asyw->image.handle);
1292		if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
1293			evo_mthd(push, 0x0800, 5);
1294			evo_data(push, asyw->image.offset >> 8);
1295			evo_data(push, 0x00000000);
1296			evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1297			evo_data(push, (asyw->image.layout << 20) |
1298					asyw->image.pitch |
1299					asyw->image.block);
1300			evo_data(push, (asyw->image.kind << 16) |
1301				       (asyw->image.format << 8));
1302		} else
1303		if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1304			evo_mthd(push, 0x0800, 5);
1305			evo_data(push, asyw->image.offset >> 8);
1306			evo_data(push, 0x00000000);
1307			evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1308			evo_data(push, (asyw->image.layout << 20) |
1309					asyw->image.pitch |
1310					asyw->image.block);
1311			evo_data(push, asyw->image.format << 8);
1312		} else {
1313			evo_mthd(push, 0x0400, 5);
1314			evo_data(push, asyw->image.offset >> 8);
1315			evo_data(push, 0x00000000);
1316			evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1317			evo_data(push, (asyw->image.layout << 24) |
1318					asyw->image.pitch |
1319					asyw->image.block);
1320			evo_data(push, asyw->image.format << 8);
1321		}
1322		evo_kick(push, &base->chan);
1323	}
1324}
1325
1326static void
1327nv50_base_ntfy_clr(struct nv50_wndw *wndw)
1328{
1329	struct nv50_base *base = nv50_base(wndw);
1330	u32 *push;
1331	if ((push = evo_wait(&base->chan, 2))) {
1332		evo_mthd(push, 0x00a4, 1);
1333		evo_data(push, 0x00000000);
1334		evo_kick(push, &base->chan);
1335	}
1336}
1337
1338static void
1339nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1340{
1341	struct nv50_base *base = nv50_base(wndw);
1342	u32 *push;
1343	if ((push = evo_wait(&base->chan, 3))) {
1344		evo_mthd(push, 0x00a0, 2);
1345		evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
1346		evo_data(push, asyw->ntfy.handle);
1347		evo_kick(push, &base->chan);
1348	}
1349}
1350
1351static void
1352nv50_base_sema_clr(struct nv50_wndw *wndw)
1353{
1354	struct nv50_base *base = nv50_base(wndw);
1355	u32 *push;
1356	if ((push = evo_wait(&base->chan, 2))) {
1357		evo_mthd(push, 0x0094, 1);
1358		evo_data(push, 0x00000000);
1359		evo_kick(push, &base->chan);
1360	}
1361}
1362
1363static void
1364nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1365{
1366	struct nv50_base *base = nv50_base(wndw);
1367	u32 *push;
1368	if ((push = evo_wait(&base->chan, 5))) {
1369		evo_mthd(push, 0x0088, 4);
1370		evo_data(push, asyw->sema.offset);
1371		evo_data(push, asyw->sema.acquire);
1372		evo_data(push, asyw->sema.release);
1373		evo_data(push, asyw->sema.handle);
1374		evo_kick(push, &base->chan);
1375	}
1376}
1377
1378static u32
1379nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
1380{
1381	struct nv50_base *base = nv50_base(wndw);
1382	u32 *push;
1383
1384	if (!(push = evo_wait(&base->chan, 2)))
1385		return 0;
1386	evo_mthd(push, 0x0080, 1);
1387	evo_data(push, interlock);
1388	evo_kick(push, &base->chan);
1389
1390	if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
1391		return interlock ? 2 << (base->id * 8) : 0;
1392	return interlock ? 2 << (base->id * 4) : 0;
1393}
1394
1395static int
1396nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1397{
1398	struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
1399	struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1400	if (nvif_msec(&drm->device, 2000ULL,
1401		u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
1402		if ((data & 0xc0000000) == 0x40000000)
1403			break;
1404		usleep_range(1, 2);
1405	) < 0)
1406		return -ETIMEDOUT;
1407	return 0;
1408}
1409
1410static void
1411nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1412		  struct nv50_head_atom *asyh)
1413{
1414	asyh->base.cpp = 0;
1415}
1416
1417static int
1418nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1419		  struct nv50_head_atom *asyh)
1420{
1421	const u32 format = asyw->state.fb->pixel_format;
1422	const struct drm_format_info *info;
1423	int ret;
1424
1425	info = drm_format_info(format);
1426	if (!info || !info->depth)
1427		return -EINVAL;
1428
1429	ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
1430					   DRM_PLANE_HELPER_NO_SCALING,
1431					   DRM_PLANE_HELPER_NO_SCALING,
1432					   false, true);
1433	if (ret)
1434		return ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1435
1436	asyh->base.depth = info->depth;
1437	asyh->base.cpp = info->cpp[0];
1438	asyh->base.x = asyw->state.src.x1 >> 16;
1439	asyh->base.y = asyw->state.src.y1 >> 16;
1440	asyh->base.w = asyw->state.fb->width;
1441	asyh->base.h = asyw->state.fb->height;
1442
1443	switch (format) {
1444	case DRM_FORMAT_C8         : asyw->image.format = 0x1e; break;
1445	case DRM_FORMAT_RGB565     : asyw->image.format = 0xe8; break;
1446	case DRM_FORMAT_XRGB1555   :
1447	case DRM_FORMAT_ARGB1555   : asyw->image.format = 0xe9; break;
1448	case DRM_FORMAT_XRGB8888   :
1449	case DRM_FORMAT_ARGB8888   : asyw->image.format = 0xcf; break;
1450	case DRM_FORMAT_XBGR2101010:
1451	case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
1452	case DRM_FORMAT_XBGR8888   :
1453	case DRM_FORMAT_ABGR8888   : asyw->image.format = 0xd5; break;
1454	default:
1455		WARN_ON(1);
1456		return -EINVAL;
1457	}
1458
1459	asyw->lut.enable = 1;
1460	asyw->set.image = true;
1461	return 0;
1462}
1463
1464static void *
1465nv50_base_dtor(struct nv50_wndw *wndw)
1466{
1467	struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1468	struct nv50_base *base = nv50_base(wndw);
1469	nv50_dmac_destroy(&base->chan.base, disp->disp);
1470	return base;
1471}
1472
1473static const u32
1474nv50_base_format[] = {
1475	DRM_FORMAT_C8,
1476	DRM_FORMAT_RGB565,
1477	DRM_FORMAT_XRGB1555,
1478	DRM_FORMAT_ARGB1555,
1479	DRM_FORMAT_XRGB8888,
1480	DRM_FORMAT_ARGB8888,
1481	DRM_FORMAT_XBGR2101010,
1482	DRM_FORMAT_ABGR2101010,
1483	DRM_FORMAT_XBGR8888,
1484	DRM_FORMAT_ABGR8888,
1485};
1486
1487static const struct nv50_wndw_func
1488nv50_base = {
1489	.dtor = nv50_base_dtor,
1490	.acquire = nv50_base_acquire,
1491	.release = nv50_base_release,
1492	.sema_set = nv50_base_sema_set,
1493	.sema_clr = nv50_base_sema_clr,
1494	.ntfy_set = nv50_base_ntfy_set,
1495	.ntfy_clr = nv50_base_ntfy_clr,
1496	.ntfy_wait_begun = nv50_base_ntfy_wait_begun,
1497	.image_set = nv50_base_image_set,
1498	.image_clr = nv50_base_image_clr,
1499	.lut = nv50_base_lut,
1500	.update = nv50_base_update,
1501};
1502
1503static int
1504nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
1505	      struct nv50_base **pbase)
1506{
1507	struct nv50_disp *disp = nv50_disp(drm->dev);
1508	struct nv50_base *base;
1509	int ret;
1510
1511	if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
1512		return -ENOMEM;
1513	base->id = head->base.index;
1514	base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
1515	base->wndw.sema = EVO_FLIP_SEM0(base->id);
1516	base->wndw.data = 0x00000000;
1517
1518	ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
1519			     "base", base->id, &base->chan.base,
1520			     nv50_base_format, ARRAY_SIZE(nv50_base_format),
1521			     &base->wndw);
1522	if (ret) {
1523		kfree(base);
1524		return ret;
1525	}
1526
1527	ret = nv50_base_create(&drm->device, disp->disp, base->id,
1528			       disp->sync->bo.offset, &base->chan);
1529	if (ret)
1530		return ret;
1531
1532	return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
1533				false,
1534				NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
1535				&(struct nvif_notify_uevent_req) {},
1536				sizeof(struct nvif_notify_uevent_req),
1537				sizeof(struct nvif_notify_uevent_rep),
1538				&base->wndw.notify);
1539}
1540
1541/******************************************************************************
1542 * Head
1543 *****************************************************************************/
1544static void
1545nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
1546{
1547	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1548	u32 *push;
1549	if ((push = evo_wait(core, 2))) {
1550		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1551			evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
1552		else
1553			evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
1554		evo_data(push, (asyh->procamp.sat.sin << 20) |
1555			       (asyh->procamp.sat.cos << 8));
1556		evo_kick(push, core);
1557	}
1558}
1559
1560static void
1561nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
1562{
1563	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1564	u32 *push;
1565	if ((push = evo_wait(core, 2))) {
1566		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1567			evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
1568		else
1569		if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
1570			evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
1571		else
1572			evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
1573		evo_data(push, (asyh->dither.mode << 3) |
1574			       (asyh->dither.bits << 1) |
1575			        asyh->dither.enable);
1576		evo_kick(push, core);
1577	}
1578}
1579
1580static void
1581nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
1582{
1583	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1584	u32 bounds = 0;
1585	u32 *push;
1586
1587	if (asyh->base.cpp) {
1588		switch (asyh->base.cpp) {
1589		case 8: bounds |= 0x00000500; break;
1590		case 4: bounds |= 0x00000300; break;
1591		case 2: bounds |= 0x00000100; break;
1592		default:
1593			WARN_ON(1);
1594			break;
1595		}
1596		bounds |= 0x00000001;
1597	}
1598
1599	if ((push = evo_wait(core, 2))) {
1600		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1601			evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
1602		else
1603			evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
1604		evo_data(push, bounds);
1605		evo_kick(push, core);
1606	}
1607}
1608
1609static void
1610nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
1611{
1612	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1613	u32 bounds = 0;
1614	u32 *push;
1615
1616	if (asyh->base.cpp) {
1617		switch (asyh->base.cpp) {
1618		case 8: bounds |= 0x00000500; break;
1619		case 4: bounds |= 0x00000300; break;
1620		case 2: bounds |= 0x00000100; break;
1621		case 1: bounds |= 0x00000000; break;
1622		default:
1623			WARN_ON(1);
1624			break;
1625		}
1626		bounds |= 0x00000001;
1627	}
1628
1629	if ((push = evo_wait(core, 2))) {
1630		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1631			evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
1632		else
1633			evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
1634		evo_data(push, bounds);
1635		evo_kick(push, core);
1636	}
1637}
1638
1639static void
1640nv50_head_curs_clr(struct nv50_head *head)
1641{
1642	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1643	u32 *push;
1644	if ((push = evo_wait(core, 4))) {
1645		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1646			evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1647			evo_data(push, 0x05000000);
1648		} else
1649		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1650			evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1651			evo_data(push, 0x05000000);
1652			evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1653			evo_data(push, 0x00000000);
1654		} else {
1655			evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
1656			evo_data(push, 0x05000000);
1657			evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1658			evo_data(push, 0x00000000);
1659		}
1660		evo_kick(push, core);
1661	}
1662}
1663
1664static void
1665nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1666{
1667	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1668	u32 *push;
1669	if ((push = evo_wait(core, 5))) {
1670		if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
1671			evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1672			evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1673						    (asyh->curs.format << 24));
1674			evo_data(push, asyh->curs.offset >> 8);
1675		} else
1676		if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1677			evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1678			evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1679						    (asyh->curs.format << 24));
1680			evo_data(push, asyh->curs.offset >> 8);
1681			evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1682			evo_data(push, asyh->curs.handle);
1683		} else {
1684			evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
1685			evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1686						    (asyh->curs.format << 24));
1687			evo_data(push, asyh->curs.offset >> 8);
1688			evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1689			evo_data(push, asyh->curs.handle);
1690		}
1691		evo_kick(push, core);
1692	}
1693}
1694
1695static void
1696nv50_head_core_clr(struct nv50_head *head)
1697{
1698	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1699	u32 *push;
1700	if ((push = evo_wait(core, 2))) {
1701		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1702			evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
1703		else
1704			evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
1705		evo_data(push, 0x00000000);
1706		evo_kick(push, core);
1707	}
1708}
1709
1710static void
1711nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1712{
1713	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1714	u32 *push;
1715	if ((push = evo_wait(core, 9))) {
1716		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1717			evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1718			evo_data(push, asyh->core.offset >> 8);
1719			evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1720			evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1721			evo_data(push, asyh->core.layout << 20 |
1722				       (asyh->core.pitch >> 8) << 8 |
1723				       asyh->core.block);
1724			evo_data(push, asyh->core.kind << 16 |
1725				       asyh->core.format << 8);
1726			evo_data(push, asyh->core.handle);
1727			evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1728			evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1729			/* EVO will complain with INVALID_STATE if we have an
1730			 * active cursor and (re)specify HeadSetContextDmaIso
1731			 * without also updating HeadSetOffsetCursor.
1732			 */
1733			asyh->set.curs = asyh->curs.visible;
1734		} else
1735		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1736			evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1737			evo_data(push, asyh->core.offset >> 8);
1738			evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1739			evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1740			evo_data(push, asyh->core.layout << 20 |
1741				       (asyh->core.pitch >> 8) << 8 |
1742				       asyh->core.block);
1743			evo_data(push, asyh->core.format << 8);
1744			evo_data(push, asyh->core.handle);
1745			evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1746			evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1747		} else {
1748			evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
1749			evo_data(push, asyh->core.offset >> 8);
1750			evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
1751			evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1752			evo_data(push, asyh->core.layout << 24 |
1753				       (asyh->core.pitch >> 8) << 8 |
1754				       asyh->core.block);
1755			evo_data(push, asyh->core.format << 8);
1756			evo_data(push, asyh->core.handle);
1757			evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
1758			evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1759		}
1760		evo_kick(push, core);
1761	}
1762}
1763
1764static void
1765nv50_head_lut_clr(struct nv50_head *head)
1766{
1767	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1768	u32 *push;
1769	if ((push = evo_wait(core, 4))) {
1770		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1771			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1772			evo_data(push, 0x40000000);
1773		} else
1774		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1775			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1776			evo_data(push, 0x40000000);
1777			evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1778			evo_data(push, 0x00000000);
1779		} else {
1780			evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
1781			evo_data(push, 0x03000000);
1782			evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1783			evo_data(push, 0x00000000);
1784		}
1785		evo_kick(push, core);
1786	}
1787}
1788
1789static void
1790nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1791{
1792	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1793	u32 *push;
1794	if ((push = evo_wait(core, 7))) {
1795		if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1796			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1797			evo_data(push, 0xc0000000);
1798			evo_data(push, asyh->lut.offset >> 8);
1799		} else
1800		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1801			evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1802			evo_data(push, 0xc0000000);
1803			evo_data(push, asyh->lut.offset >> 8);
1804			evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1805			evo_data(push, asyh->lut.handle);
1806		} else {
1807			evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
1808			evo_data(push, 0x83000000);
1809			evo_data(push, asyh->lut.offset >> 8);
1810			evo_data(push, 0x00000000);
1811			evo_data(push, 0x00000000);
1812			evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1813			evo_data(push, asyh->lut.handle);
1814		}
1815		evo_kick(push, core);
1816	}
1817}
1818
1819static void
1820nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
1821{
1822	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1823	struct nv50_head_mode *m = &asyh->mode;
1824	u32 *push;
1825	if ((push = evo_wait(core, 14))) {
1826		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1827			evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
1828			evo_data(push, 0x00800000 | m->clock);
1829			evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
1830			evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
1831			evo_data(push, 0x00000000);
1832			evo_data(push, (m->v.active  << 16) | m->h.active );
1833			evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1834			evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1835			evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1836			evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1837			evo_data(push, asyh->mode.v.blankus);
1838			evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
1839			evo_data(push, 0x00000000);
1840		} else {
1841			evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
1842			evo_data(push, 0x00000000);
1843			evo_data(push, (m->v.active  << 16) | m->h.active );
1844			evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1845			evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1846			evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1847			evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1848			evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
1849			evo_data(push, 0x00000000); /* ??? */
1850			evo_data(push, 0xffffff00);
1851			evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
1852			evo_data(push, m->clock * 1000);
1853			evo_data(push, 0x00200000); /* ??? */
1854			evo_data(push, m->clock * 1000);
1855		}
1856		evo_kick(push, core);
1857	}
1858}
1859
1860static void
1861nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
1862{
1863	struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1864	u32 *push;
1865	if ((push = evo_wait(core, 10))) {
1866		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1867			evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
1868			evo_data(push, 0x00000000);
1869			evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
1870			evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1871			evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
1872			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1873			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1874		} else {
1875			evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
1876			evo_data(push, 0x00000000);
1877			evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
1878			evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1879			evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
1880			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1881			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1882			evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1883		}
1884		evo_kick(push, core);
1885	}
1886}
1887
1888static void
1889nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
1890{
1891	if (asyh->clr.core && (!asyh->set.core || y))
1892		nv50_head_lut_clr(head);
1893	if (asyh->clr.core && (!asyh->set.core || y))
1894		nv50_head_core_clr(head);
1895	if (asyh->clr.curs && (!asyh->set.curs || y))
1896		nv50_head_curs_clr(head);
1897}
1898
1899static void
1900nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1901{
1902	if (asyh->set.view   ) nv50_head_view    (head, asyh);
1903	if (asyh->set.mode   ) nv50_head_mode    (head, asyh);
1904	if (asyh->set.core   ) nv50_head_lut_set (head, asyh);
1905	if (asyh->set.core   ) nv50_head_core_set(head, asyh);
1906	if (asyh->set.curs   ) nv50_head_curs_set(head, asyh);
1907	if (asyh->set.base   ) nv50_head_base    (head, asyh);
1908	if (asyh->set.ovly   ) nv50_head_ovly    (head, asyh);
1909	if (asyh->set.dither ) nv50_head_dither  (head, asyh);
1910	if (asyh->set.procamp) nv50_head_procamp (head, asyh);
1911}
1912
1913static void
1914nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
1915			       struct nv50_head_atom *asyh,
1916			       struct nouveau_conn_atom *asyc)
1917{
1918	const int vib = asyc->procamp.color_vibrance - 100;
1919	const int hue = asyc->procamp.vibrant_hue - 90;
1920	const int adj = (vib > 0) ? 50 : 0;
1921	asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
1922	asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
1923	asyh->set.procamp = true;
1924}
1925
1926static void
1927nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
1928			      struct nv50_head_atom *asyh,
1929			      struct nouveau_conn_atom *asyc)
1930{
1931	struct drm_connector *connector = asyc->state.connector;
1932	u32 mode = 0x00;
1933
1934	if (asyc->dither.mode == DITHERING_MODE_AUTO) {
1935		if (asyh->base.depth > connector->display_info.bpc * 3)
1936			mode = DITHERING_MODE_DYNAMIC2X2;
1937	} else {
1938		mode = asyc->dither.mode;
1939	}
1940
1941	if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
1942		if (connector->display_info.bpc >= 8)
1943			mode |= DITHERING_DEPTH_8BPC;
1944	} else {
1945		mode |= asyc->dither.depth;
1946	}
1947
1948	asyh->dither.enable = mode;
1949	asyh->dither.bits = mode >> 1;
1950	asyh->dither.mode = mode >> 3;
1951	asyh->set.dither = true;
1952}
1953
1954static void
1955nv50_head_atomic_check_view(struct nv50_head_atom *armh,
1956			    struct nv50_head_atom *asyh,
1957			    struct nouveau_conn_atom *asyc)
1958{
1959	struct drm_connector *connector = asyc->state.connector;
1960	struct drm_display_mode *omode = &asyh->state.adjusted_mode;
1961	struct drm_display_mode *umode = &asyh->state.mode;
1962	int mode = asyc->scaler.mode;
1963	struct edid *edid;
1964
1965	if (connector->edid_blob_ptr)
1966		edid = (struct edid *)connector->edid_blob_ptr->data;
1967	else
1968		edid = NULL;
1969
1970	if (!asyc->scaler.full) {
1971		if (mode == DRM_MODE_SCALE_NONE)
1972			omode = umode;
1973	} else {
1974		/* Non-EDID LVDS/eDP mode. */
1975		mode = DRM_MODE_SCALE_FULLSCREEN;
1976	}
1977
1978	asyh->view.iW = umode->hdisplay;
1979	asyh->view.iH = umode->vdisplay;
1980	asyh->view.oW = omode->hdisplay;
1981	asyh->view.oH = omode->vdisplay;
1982	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
1983		asyh->view.oH *= 2;
1984
1985	/* Add overscan compensation if necessary, will keep the aspect
1986	 * ratio the same as the backend mode unless overridden by the
1987	 * user setting both hborder and vborder properties.
1988	 */
1989	if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
1990	    (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
1991	     drm_detect_hdmi_monitor(edid)))) {
1992		u32 bX = asyc->scaler.underscan.hborder;
1993		u32 bY = asyc->scaler.underscan.vborder;
1994		u32 r = (asyh->view.oH << 19) / asyh->view.oW;
1995
1996		if (bX) {
1997			asyh->view.oW -= (bX * 2);
1998			if (bY) asyh->view.oH -= (bY * 2);
1999			else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
2000		} else {
2001			asyh->view.oW -= (asyh->view.oW >> 4) + 32;
2002			if (bY) asyh->view.oH -= (bY * 2);
2003			else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
2004		}
2005	}
2006
2007	/* Handle CENTER/ASPECT scaling, taking into account the areas
2008	 * removed already for overscan compensation.
2009	 */
2010	switch (mode) {
2011	case DRM_MODE_SCALE_CENTER:
2012		asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
2013		asyh->view.oH = min((u16)umode->vdisplay, asyh->view.oH);
2014		/* fall-through */
2015	case DRM_MODE_SCALE_ASPECT:
2016		if (asyh->view.oH < asyh->view.oW) {
2017			u32 r = (asyh->view.iW << 19) / asyh->view.iH;
2018			asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
2019		} else {
2020			u32 r = (asyh->view.iH << 19) / asyh->view.iW;
2021			asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2022		}
2023		break;
2024	default:
2025		break;
2026	}
2027
2028	asyh->set.view = true;
2029}
2030
2031static void
2032nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
2033{
2034	struct drm_display_mode *mode = &asyh->state.adjusted_mode;
2035	u32 ilace   = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
2036	u32 vscan   = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
2037	u32 hbackp  =  mode->htotal - mode->hsync_end;
2038	u32 vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
2039	u32 hfrontp =  mode->hsync_start - mode->hdisplay;
2040	u32 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
2041	struct nv50_head_mode *m = &asyh->mode;
2042
2043	m->h.active = mode->htotal;
2044	m->h.synce  = mode->hsync_end - mode->hsync_start - 1;
2045	m->h.blanke = m->h.synce + hbackp;
2046	m->h.blanks = mode->htotal - hfrontp - 1;
2047
2048	m->v.active = mode->vtotal * vscan / ilace;
2049	m->v.synce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
2050	m->v.blanke = m->v.synce + vbackp;
2051	m->v.blanks = m->v.active - vfrontp - 1;
2052
2053	/*XXX: Safe underestimate, even "0" works */
2054	m->v.blankus = (m->v.active - mode->vdisplay - 2) * m->h.active;
2055	m->v.blankus *= 1000;
2056	m->v.blankus /= mode->clock;
2057
2058	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
2059		m->v.blank2e =  m->v.active + m->v.synce + vbackp;
2060		m->v.blank2s =  m->v.blank2e + (mode->vdisplay * vscan / ilace);
2061		m->v.active  = (m->v.active * 2) + 1;
2062		m->interlace = true;
2063	} else {
2064		m->v.blank2e = 0;
2065		m->v.blank2s = 1;
2066		m->interlace = false;
2067	}
2068	m->clock = mode->clock;
2069
2070	drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V);
2071	asyh->set.mode = true;
2072}
2073
2074static int
2075nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
2076{
2077	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
2078	struct nv50_disp *disp = nv50_disp(crtc->dev);
2079	struct nv50_head *head = nv50_head(crtc);
2080	struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2081	struct nv50_head_atom *asyh = nv50_head_atom(state);
2082	struct nouveau_conn_atom *asyc = NULL;
2083	struct drm_connector_state *conns;
2084	struct drm_connector *conn;
2085	int i;
2086
2087	NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
2088	if (asyh->state.active) {
2089		for_each_connector_in_state(asyh->state.state, conn, conns, i) {
2090			if (conns->crtc == crtc) {
2091				asyc = nouveau_conn_atom(conns);
2092				break;
2093			}
2094		}
2095
2096		if (armh->state.active) {
2097			if (asyc) {
2098				if (asyh->state.mode_changed)
2099					asyc->set.scaler = true;
2100				if (armh->base.depth != asyh->base.depth)
2101					asyc->set.dither = true;
2102			}
2103		} else {
2104			asyc->set.mask = ~0;
2105			asyh->set.mask = ~0;
2106		}
2107
2108		if (asyh->state.mode_changed)
2109			nv50_head_atomic_check_mode(head, asyh);
2110
2111		if (asyc) {
2112			if (asyc->set.scaler)
2113				nv50_head_atomic_check_view(armh, asyh, asyc);
2114			if (asyc->set.dither)
2115				nv50_head_atomic_check_dither(armh, asyh, asyc);
2116			if (asyc->set.procamp)
2117				nv50_head_atomic_check_procamp(armh, asyh, asyc);
2118		}
2119
2120		if ((asyh->core.visible = (asyh->base.cpp != 0))) {
2121			asyh->core.x = asyh->base.x;
2122			asyh->core.y = asyh->base.y;
2123			asyh->core.w = asyh->base.w;
2124			asyh->core.h = asyh->base.h;
2125		} else
2126		if ((asyh->core.visible = asyh->curs.visible)) {
2127			/*XXX: We need to either find some way of having the
2128			 *     primary base layer appear black, while still
2129			 *     being able to display the other layers, or we
2130			 *     need to allocate a dummy black surface here.
2131			 */
2132			asyh->core.x = 0;
2133			asyh->core.y = 0;
2134			asyh->core.w = asyh->state.mode.hdisplay;
2135			asyh->core.h = asyh->state.mode.vdisplay;
2136		}
2137		asyh->core.handle = disp->mast.base.vram.handle;
2138		asyh->core.offset = 0;
2139		asyh->core.format = 0xcf;
2140		asyh->core.kind = 0;
2141		asyh->core.layout = 1;
2142		asyh->core.block = 0;
2143		asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
2144		asyh->lut.handle = disp->mast.base.vram.handle;
2145		asyh->lut.offset = head->base.lut.nvbo->bo.offset;
2146		asyh->set.base = armh->base.cpp != asyh->base.cpp;
2147		asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
2148	} else {
2149		asyh->core.visible = false;
2150		asyh->curs.visible = false;
2151		asyh->base.cpp = 0;
2152		asyh->ovly.cpp = 0;
2153	}
2154
2155	if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
2156		if (asyh->core.visible) {
2157			if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
2158				asyh->set.core = true;
2159		} else
2160		if (armh->core.visible) {
2161			asyh->clr.core = true;
2162		}
2163
2164		if (asyh->curs.visible) {
2165			if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
2166				asyh->set.curs = true;
2167		} else
2168		if (armh->curs.visible) {
2169			asyh->clr.curs = true;
2170		}
2171	} else {
2172		asyh->clr.core = armh->core.visible;
2173		asyh->clr.curs = armh->curs.visible;
2174		asyh->set.core = asyh->core.visible;
2175		asyh->set.curs = asyh->curs.visible;
2176	}
2177
2178	if (asyh->clr.mask || asyh->set.mask)
2179		nv50_atom(asyh->state.state)->lock_core = true;
2180	return 0;
2181}
2182
2183static void
2184nv50_head_lut_load(struct drm_crtc *crtc)
2185{
2186	struct nv50_disp *disp = nv50_disp(crtc->dev);
2187	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2188	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
2189	int i;
2190
2191	for (i = 0; i < 256; i++) {
2192		u16 r = nv_crtc->lut.r[i] >> 2;
2193		u16 g = nv_crtc->lut.g[i] >> 2;
2194		u16 b = nv_crtc->lut.b[i] >> 2;
2195
2196		if (disp->disp->oclass < GF110_DISP) {
2197			writew(r + 0x0000, lut + (i * 0x08) + 0);
2198			writew(g + 0x0000, lut + (i * 0x08) + 2);
2199			writew(b + 0x0000, lut + (i * 0x08) + 4);
2200		} else {
2201			writew(r + 0x6000, lut + (i * 0x20) + 0);
2202			writew(g + 0x6000, lut + (i * 0x20) + 2);
2203			writew(b + 0x6000, lut + (i * 0x20) + 4);
2204		}
2205	}
2206}
2207
2208static int
2209nv50_head_mode_set_base_atomic(struct drm_crtc *crtc,
2210			       struct drm_framebuffer *fb, int x, int y,
2211			       enum mode_set_atomic state)
2212{
2213	WARN_ON(1);
2214	return 0;
2215}
2216
2217static const struct drm_crtc_helper_funcs
2218nv50_head_help = {
2219	.mode_set_base_atomic = nv50_head_mode_set_base_atomic,
2220	.load_lut = nv50_head_lut_load,
2221	.atomic_check = nv50_head_atomic_check,
2222};
2223
2224/* This is identical to the version in the atomic helpers, except that
2225 * it supports non-vblanked ("async") page flips.
2226 */
2227static int
2228nv50_head_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
2229		    struct drm_pending_vblank_event *event, u32 flags)
2230{
2231	struct drm_plane *plane = crtc->primary;
2232	struct drm_atomic_state *state;
2233	struct drm_plane_state *plane_state;
2234	struct drm_crtc_state *crtc_state;
2235	int ret = 0;
2236
2237	state = drm_atomic_state_alloc(plane->dev);
2238	if (!state)
2239		return -ENOMEM;
 
2240
2241	state->acquire_ctx = drm_modeset_legacy_acquire_ctx(crtc);
2242retry:
2243	crtc_state = drm_atomic_get_crtc_state(state, crtc);
2244	if (IS_ERR(crtc_state)) {
2245		ret = PTR_ERR(crtc_state);
2246		goto fail;
2247	}
2248	crtc_state->event = event;
2249
2250	plane_state = drm_atomic_get_plane_state(state, plane);
2251	if (IS_ERR(plane_state)) {
2252		ret = PTR_ERR(plane_state);
2253		goto fail;
2254	}
2255
2256	ret = drm_atomic_set_crtc_for_plane(plane_state, crtc);
2257	if (ret != 0)
2258		goto fail;
2259	drm_atomic_set_fb_for_plane(plane_state, fb);
2260
2261	/* Make sure we don't accidentally do a full modeset. */
2262	state->allow_modeset = false;
2263	if (!crtc_state->active) {
2264		DRM_DEBUG_ATOMIC("[CRTC:%d] disabled, rejecting legacy flip\n",
2265				 crtc->base.id);
2266		ret = -EINVAL;
2267		goto fail;
2268	}
2269
2270	if (flags & DRM_MODE_PAGE_FLIP_ASYNC)
2271		nv50_wndw_atom(plane_state)->interval = 0;
2272
2273	ret = drm_atomic_nonblocking_commit(state);
2274fail:
2275	if (ret == -EDEADLK)
2276		goto backoff;
2277
2278	drm_atomic_state_put(state);
2279	return ret;
2280
2281backoff:
2282	drm_atomic_state_clear(state);
2283	drm_atomic_legacy_backoff(state);
2284
2285	/*
2286	 * Someone might have exchanged the framebuffer while we dropped locks
2287	 * in the backoff code. We need to fix up the fb refcount tracking the
2288	 * core does for us.
2289	 */
2290	plane->old_fb = plane->fb;
2291
2292	goto retry;
2293}
 
2294
2295static int
2296nv50_head_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
2297		    uint32_t size)
2298{
2299	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2300	u32 i;
2301
2302	for (i = 0; i < size; i++) {
2303		nv_crtc->lut.r[i] = r[i];
2304		nv_crtc->lut.g[i] = g[i];
2305		nv_crtc->lut.b[i] = b[i];
2306	}
2307
2308	nv50_head_lut_load(crtc);
2309	return 0;
2310}
2311
2312static void
2313nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
2314			       struct drm_crtc_state *state)
2315{
2316	struct nv50_head_atom *asyh = nv50_head_atom(state);
2317	__drm_atomic_helper_crtc_destroy_state(&asyh->state);
2318	kfree(asyh);
2319}
2320
2321static struct drm_crtc_state *
2322nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
2323{
2324	struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2325	struct nv50_head_atom *asyh;
2326	if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
2327		return NULL;
2328	__drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
2329	asyh->view = armh->view;
2330	asyh->mode = armh->mode;
2331	asyh->lut  = armh->lut;
2332	asyh->core = armh->core;
2333	asyh->curs = armh->curs;
2334	asyh->base = armh->base;
2335	asyh->ovly = armh->ovly;
2336	asyh->dither = armh->dither;
2337	asyh->procamp = armh->procamp;
2338	asyh->clr.mask = 0;
2339	asyh->set.mask = 0;
2340	return &asyh->state;
2341}
2342
2343static void
2344__drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
2345			       struct drm_crtc_state *state)
2346{
2347	if (crtc->state)
2348		crtc->funcs->atomic_destroy_state(crtc, crtc->state);
2349	crtc->state = state;
2350	crtc->state->crtc = crtc;
2351}
2352
2353static void
2354nv50_head_reset(struct drm_crtc *crtc)
2355{
2356	struct nv50_head_atom *asyh;
2357
2358	if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
2359		return;
2360
2361	__drm_atomic_helper_crtc_reset(crtc, &asyh->state);
2362}
2363
2364static void
2365nv50_head_destroy(struct drm_crtc *crtc)
2366{
2367	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2368	struct nv50_disp *disp = nv50_disp(crtc->dev);
2369	struct nv50_head *head = nv50_head(crtc);
2370
2371	nv50_dmac_destroy(&head->ovly.base, disp->disp);
2372	nv50_pioc_destroy(&head->oimm.base);
2373
2374	nouveau_bo_unmap(nv_crtc->lut.nvbo);
2375	if (nv_crtc->lut.nvbo)
2376		nouveau_bo_unpin(nv_crtc->lut.nvbo);
2377	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
2378
2379	drm_crtc_cleanup(crtc);
2380	kfree(crtc);
2381}
2382
2383static const struct drm_crtc_funcs
2384nv50_head_func = {
2385	.reset = nv50_head_reset,
2386	.gamma_set = nv50_head_gamma_set,
2387	.destroy = nv50_head_destroy,
2388	.set_config = drm_atomic_helper_set_config,
2389	.page_flip = nv50_head_page_flip,
2390	.set_property = drm_atomic_helper_crtc_set_property,
2391	.atomic_duplicate_state = nv50_head_atomic_duplicate_state,
2392	.atomic_destroy_state = nv50_head_atomic_destroy_state,
2393};
2394
2395static int
2396nv50_head_create(struct drm_device *dev, int index)
2397{
2398	struct nouveau_drm *drm = nouveau_drm(dev);
2399	struct nvif_device *device = &drm->device;
2400	struct nv50_disp *disp = nv50_disp(dev);
2401	struct nv50_head *head;
2402	struct nv50_base *base;
2403	struct nv50_curs *curs;
2404	struct drm_crtc *crtc;
2405	int ret, i;
2406
2407	head = kzalloc(sizeof(*head), GFP_KERNEL);
2408	if (!head)
2409		return -ENOMEM;
2410
2411	head->base.index = index;
2412	for (i = 0; i < 256; i++) {
2413		head->base.lut.r[i] = i << 8;
2414		head->base.lut.g[i] = i << 8;
2415		head->base.lut.b[i] = i << 8;
2416	}
2417
2418	ret = nv50_base_new(drm, head, &base);
2419	if (ret == 0)
2420		ret = nv50_curs_new(drm, head, &curs);
2421	if (ret) {
2422		kfree(head);
2423		return ret;
2424	}
2425
2426	crtc = &head->base.base;
2427	drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
2428				  &curs->wndw.plane, &nv50_head_func,
2429				  "head-%d", head->base.index);
2430	drm_crtc_helper_add(crtc, &nv50_head_help);
2431	drm_mode_crtc_set_gamma_size(crtc, 256);
2432
2433	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
2434			     0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
2435	if (!ret) {
2436		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
2437		if (!ret) {
2438			ret = nouveau_bo_map(head->base.lut.nvbo);
2439			if (ret)
2440				nouveau_bo_unpin(head->base.lut.nvbo);
2441		}
2442		if (ret)
2443			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
2444	}
2445
2446	if (ret)
2447		goto out;
 
2448
2449	/* allocate overlay resources */
2450	ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
2451	if (ret)
2452		goto out;
2453
2454	ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
2455			       &head->ovly);
2456	if (ret)
2457		goto out;
2458
2459out:
2460	if (ret)
2461		nv50_head_destroy(crtc);
2462	return ret;
2463}
2464
2465/******************************************************************************
2466 * Output path helpers
2467 *****************************************************************************/
2468static int
2469nv50_outp_atomic_check_view(struct drm_encoder *encoder,
2470			    struct drm_crtc_state *crtc_state,
2471			    struct drm_connector_state *conn_state,
2472			    struct drm_display_mode *native_mode)
2473{
2474	struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
2475	struct drm_display_mode *mode = &crtc_state->mode;
2476	struct drm_connector *connector = conn_state->connector;
2477	struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
2478	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
2479
2480	NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
2481	asyc->scaler.full = false;
2482	if (!native_mode)
2483		return 0;
2484
2485	if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
2486		switch (connector->connector_type) {
2487		case DRM_MODE_CONNECTOR_LVDS:
2488		case DRM_MODE_CONNECTOR_eDP:
2489			/* Force use of scaler for non-EDID modes. */
2490			if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
2491				break;
2492			mode = native_mode;
2493			asyc->scaler.full = true;
2494			break;
2495		default:
2496			break;
2497		}
2498	} else {
2499		mode = native_mode;
2500	}
2501
2502	if (!drm_mode_equal(adjusted_mode, mode)) {
2503		drm_mode_copy(adjusted_mode, mode);
2504		crtc_state->mode_changed = true;
2505	}
2506
2507	return 0;
2508}
2509
2510static int
2511nv50_outp_atomic_check(struct drm_encoder *encoder,
2512		       struct drm_crtc_state *crtc_state,
2513		       struct drm_connector_state *conn_state)
2514{
2515	struct nouveau_connector *nv_connector =
2516		nouveau_connector(conn_state->connector);
2517	return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2518					   nv_connector->native_mode);
2519}
2520
2521/******************************************************************************
2522 * DAC
2523 *****************************************************************************/
2524static void
2525nv50_dac_dpms(struct drm_encoder *encoder, int mode)
2526{
2527	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2528	struct nv50_disp *disp = nv50_disp(encoder->dev);
2529	struct {
2530		struct nv50_disp_mthd_v1 base;
2531		struct nv50_disp_dac_pwr_v0 pwr;
2532	} args = {
2533		.base.version = 1,
2534		.base.method = NV50_DISP_MTHD_V1_DAC_PWR,
2535		.base.hasht  = nv_encoder->dcb->hasht,
2536		.base.hashm  = nv_encoder->dcb->hashm,
2537		.pwr.state = 1,
2538		.pwr.data  = 1,
2539		.pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
2540			      mode != DRM_MODE_DPMS_OFF),
2541		.pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
2542			      mode != DRM_MODE_DPMS_OFF),
2543	};
2544
2545	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2546}
2547
2548static void
2549nv50_dac_disable(struct drm_encoder *encoder)
2550{
2551	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2552	struct nv50_mast *mast = nv50_mast(encoder->dev);
2553	const int or = nv_encoder->or;
2554	u32 *push;
2555
2556	if (nv_encoder->crtc) {
2557		push = evo_wait(mast, 4);
2558		if (push) {
2559			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2560				evo_mthd(push, 0x0400 + (or * 0x080), 1);
2561				evo_data(push, 0x00000000);
2562			} else {
2563				evo_mthd(push, 0x0180 + (or * 0x020), 1);
2564				evo_data(push, 0x00000000);
2565			}
2566			evo_kick(push, mast);
2567		}
2568	}
2569
2570	nv_encoder->crtc = NULL;
2571}
2572
2573static void
2574nv50_dac_enable(struct drm_encoder *encoder)
2575{
2576	struct nv50_mast *mast = nv50_mast(encoder->dev);
2577	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2578	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2579	struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
2580	u32 *push;
2581
2582	push = evo_wait(mast, 8);
2583	if (push) {
2584		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2585			u32 syncs = 0x00000000;
2586
2587			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2588				syncs |= 0x00000001;
2589			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2590				syncs |= 0x00000002;
2591
2592			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
2593			evo_data(push, 1 << nv_crtc->index);
2594			evo_data(push, syncs);
2595		} else {
2596			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2597			u32 syncs = 0x00000001;
2598
2599			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2600				syncs |= 0x00000008;
2601			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2602				syncs |= 0x00000010;
2603
2604			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2605				magic |= 0x00000001;
2606
2607			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2608			evo_data(push, syncs);
2609			evo_data(push, magic);
2610			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
2611			evo_data(push, 1 << nv_crtc->index);
2612		}
2613
2614		evo_kick(push, mast);
2615	}
2616
2617	nv_encoder->crtc = encoder->crtc;
2618}
2619
2620static enum drm_connector_status
2621nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
2622{
2623	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2624	struct nv50_disp *disp = nv50_disp(encoder->dev);
2625	struct {
2626		struct nv50_disp_mthd_v1 base;
2627		struct nv50_disp_dac_load_v0 load;
2628	} args = {
2629		.base.version = 1,
2630		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
2631		.base.hasht  = nv_encoder->dcb->hasht,
2632		.base.hashm  = nv_encoder->dcb->hashm,
2633	};
2634	int ret;
2635
2636	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
2637	if (args.load.data == 0)
2638		args.load.data = 340;
2639
2640	ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2641	if (ret || !args.load.load)
2642		return connector_status_disconnected;
2643
2644	return connector_status_connected;
2645}
2646
2647static const struct drm_encoder_helper_funcs
2648nv50_dac_help = {
2649	.dpms = nv50_dac_dpms,
2650	.atomic_check = nv50_outp_atomic_check,
2651	.enable = nv50_dac_enable,
2652	.disable = nv50_dac_disable,
2653	.detect = nv50_dac_detect
2654};
2655
2656static void
2657nv50_dac_destroy(struct drm_encoder *encoder)
2658{
2659	drm_encoder_cleanup(encoder);
2660	kfree(encoder);
2661}
2662
2663static const struct drm_encoder_funcs
2664nv50_dac_func = {
2665	.destroy = nv50_dac_destroy,
2666};
2667
2668static int
2669nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
2670{
2671	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2672	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2673	struct nvkm_i2c_bus *bus;
2674	struct nouveau_encoder *nv_encoder;
2675	struct drm_encoder *encoder;
2676	int type = DRM_MODE_ENCODER_DAC;
2677
2678	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2679	if (!nv_encoder)
2680		return -ENOMEM;
2681	nv_encoder->dcb = dcbe;
2682	nv_encoder->or = ffs(dcbe->or) - 1;
2683
2684	bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2685	if (bus)
2686		nv_encoder->i2c = &bus->i2c;
2687
2688	encoder = to_drm_encoder(nv_encoder);
2689	encoder->possible_crtcs = dcbe->heads;
2690	encoder->possible_clones = 0;
2691	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
2692			 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
2693	drm_encoder_helper_add(encoder, &nv50_dac_help);
2694
2695	drm_mode_connector_attach_encoder(connector, encoder);
2696	return 0;
2697}
2698
2699/******************************************************************************
2700 * Audio
2701 *****************************************************************************/
2702static void
2703nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2704{
2705	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2706	struct nv50_disp *disp = nv50_disp(encoder->dev);
2707	struct {
2708		struct nv50_disp_mthd_v1 base;
2709		struct nv50_disp_sor_hda_eld_v0 eld;
2710	} args = {
2711		.base.version = 1,
2712		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2713		.base.hasht   = nv_encoder->dcb->hasht,
2714		.base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2715				(0x0100 << nv_crtc->index),
2716	};
2717
2718	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2719}
2720
2721static void
2722nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2723{
2724	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2725	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2726	struct nouveau_connector *nv_connector;
2727	struct nv50_disp *disp = nv50_disp(encoder->dev);
2728	struct __packed {
2729		struct {
2730			struct nv50_disp_mthd_v1 mthd;
2731			struct nv50_disp_sor_hda_eld_v0 eld;
2732		} base;
2733		u8 data[sizeof(nv_connector->base.eld)];
2734	} args = {
2735		.base.mthd.version = 1,
2736		.base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2737		.base.mthd.hasht   = nv_encoder->dcb->hasht,
2738		.base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2739				     (0x0100 << nv_crtc->index),
2740	};
2741
2742	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2743	if (!drm_detect_monitor_audio(nv_connector->edid))
2744		return;
2745
2746	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
2747	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
2748
2749	nvif_mthd(disp->disp, 0, &args,
2750		  sizeof(args.base) + drm_eld_size(args.data));
2751}
2752
2753/******************************************************************************
2754 * HDMI
2755 *****************************************************************************/
2756static void
2757nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2758{
2759	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2760	struct nv50_disp *disp = nv50_disp(encoder->dev);
2761	struct {
2762		struct nv50_disp_mthd_v1 base;
2763		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2764	} args = {
2765		.base.version = 1,
2766		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2767		.base.hasht  = nv_encoder->dcb->hasht,
2768		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2769			       (0x0100 << nv_crtc->index),
2770	};
2771
2772	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2773}
2774
2775static void
2776nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2777{
2778	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2779	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2780	struct nv50_disp *disp = nv50_disp(encoder->dev);
2781	struct {
2782		struct nv50_disp_mthd_v1 base;
2783		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2784	} args = {
2785		.base.version = 1,
2786		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2787		.base.hasht  = nv_encoder->dcb->hasht,
2788		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2789			       (0x0100 << nv_crtc->index),
2790		.pwr.state = 1,
2791		.pwr.rekey = 56, /* binary driver, and tegra, constant */
2792	};
2793	struct nouveau_connector *nv_connector;
2794	u32 max_ac_packet;
2795
2796	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2797	if (!drm_detect_hdmi_monitor(nv_connector->edid))
2798		return;
2799
2800	max_ac_packet  = mode->htotal - mode->hdisplay;
2801	max_ac_packet -= args.pwr.rekey;
2802	max_ac_packet -= 18; /* constant from tegra */
2803	args.pwr.max_ac_packet = max_ac_packet / 32;
2804
2805	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2806	nv50_audio_enable(encoder, mode);
2807}
2808
2809/******************************************************************************
2810 * MST
2811 *****************************************************************************/
2812#define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2813#define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2814#define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2815
2816struct nv50_mstm {
2817	struct nouveau_encoder *outp;
2818
2819	struct drm_dp_mst_topology_mgr mgr;
2820	struct nv50_msto *msto[4];
2821
2822	bool modified;
2823};
2824
2825struct nv50_mstc {
2826	struct nv50_mstm *mstm;
2827	struct drm_dp_mst_port *port;
2828	struct drm_connector connector;
2829
2830	struct drm_display_mode *native;
2831	struct edid *edid;
2832
2833	int pbn;
2834};
2835
2836struct nv50_msto {
2837	struct drm_encoder encoder;
2838
2839	struct nv50_head *head;
2840	struct nv50_mstc *mstc;
2841	bool disabled;
2842};
2843
2844static struct drm_dp_payload *
2845nv50_msto_payload(struct nv50_msto *msto)
2846{
2847	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2848	struct nv50_mstc *mstc = msto->mstc;
2849	struct nv50_mstm *mstm = mstc->mstm;
2850	int vcpi = mstc->port->vcpi.vcpi, i;
2851
2852	NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
2853	for (i = 0; i < mstm->mgr.max_payloads; i++) {
2854		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2855		NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2856			  mstm->outp->base.base.name, i, payload->vcpi,
2857			  payload->start_slot, payload->num_slots);
2858	}
2859
2860	for (i = 0; i < mstm->mgr.max_payloads; i++) {
2861		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2862		if (payload->vcpi == vcpi)
2863			return payload;
2864	}
2865
2866	return NULL;
2867}
2868
2869static void
2870nv50_msto_cleanup(struct nv50_msto *msto)
2871{
2872	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2873	struct nv50_mstc *mstc = msto->mstc;
2874	struct nv50_mstm *mstm = mstc->mstm;
2875
2876	NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
2877	if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
2878		drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
2879	if (msto->disabled) {
2880		msto->mstc = NULL;
2881		msto->head = NULL;
2882		msto->disabled = false;
2883	}
2884}
2885
2886static void
2887nv50_msto_prepare(struct nv50_msto *msto)
2888{
2889	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2890	struct nv50_mstc *mstc = msto->mstc;
2891	struct nv50_mstm *mstm = mstc->mstm;
2892	struct {
2893		struct nv50_disp_mthd_v1 base;
2894		struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
2895	} args = {
2896		.base.version = 1,
2897		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
2898		.base.hasht  = mstm->outp->dcb->hasht,
2899		.base.hashm  = (0xf0ff & mstm->outp->dcb->hashm) |
2900			       (0x0100 << msto->head->base.index),
2901	};
2902
2903	NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
2904	if (mstc->port && mstc->port->vcpi.vcpi > 0) {
2905		struct drm_dp_payload *payload = nv50_msto_payload(msto);
2906		if (payload) {
2907			args.vcpi.start_slot = payload->start_slot;
2908			args.vcpi.num_slots = payload->num_slots;
2909			args.vcpi.pbn = mstc->port->vcpi.pbn;
2910			args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
2911		}
2912	}
2913
2914	NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
2915		  msto->encoder.name, msto->head->base.base.name,
2916		  args.vcpi.start_slot, args.vcpi.num_slots,
2917		  args.vcpi.pbn, args.vcpi.aligned_pbn);
2918	nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
2919}
2920
2921static int
2922nv50_msto_atomic_check(struct drm_encoder *encoder,
2923		       struct drm_crtc_state *crtc_state,
2924		       struct drm_connector_state *conn_state)
2925{
2926	struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
2927	struct nv50_mstm *mstm = mstc->mstm;
2928	int bpp = conn_state->connector->display_info.bpc * 3;
2929	int slots;
2930
2931	mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
2932
2933	slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2934	if (slots < 0)
2935		return slots;
2936
2937	return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2938					   mstc->native);
2939}
2940
2941static void
2942nv50_msto_enable(struct drm_encoder *encoder)
2943{
2944	struct nv50_head *head = nv50_head(encoder->crtc);
2945	struct nv50_msto *msto = nv50_msto(encoder);
2946	struct nv50_mstc *mstc = NULL;
2947	struct nv50_mstm *mstm = NULL;
2948	struct drm_connector *connector;
2949	u8 proto, depth;
2950	int slots;
2951	bool r;
2952
2953	drm_for_each_connector(connector, encoder->dev) {
2954		if (connector->state->best_encoder == &msto->encoder) {
2955			mstc = nv50_mstc(connector);
2956			mstm = mstc->mstm;
2957			break;
2958		}
2959	}
2960
2961	if (WARN_ON(!mstc))
2962		return;
2963
2964	r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, &slots);
2965	WARN_ON(!r);
2966
2967	if (mstm->outp->dcb->sorconf.link & 1)
2968		proto = 0x8;
2969	else
2970		proto = 0x9;
2971
2972	switch (mstc->connector.display_info.bpc) {
2973	case  6: depth = 0x2; break;
2974	case  8: depth = 0x5; break;
2975	case 10:
2976	default: depth = 0x6; break;
2977	}
2978
2979	mstm->outp->update(mstm->outp, head->base.index,
2980			   &head->base.base.state->adjusted_mode, proto, depth);
2981
2982	msto->head = head;
2983	msto->mstc = mstc;
2984	mstm->modified = true;
2985}
2986
2987static void
2988nv50_msto_disable(struct drm_encoder *encoder)
2989{
2990	struct nv50_msto *msto = nv50_msto(encoder);
2991	struct nv50_mstc *mstc = msto->mstc;
2992	struct nv50_mstm *mstm = mstc->mstm;
2993
2994	if (mstc->port)
2995		drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
2996
2997	mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
2998	mstm->modified = true;
2999	msto->disabled = true;
3000}
3001
3002static const struct drm_encoder_helper_funcs
3003nv50_msto_help = {
3004	.disable = nv50_msto_disable,
3005	.enable = nv50_msto_enable,
3006	.atomic_check = nv50_msto_atomic_check,
3007};
3008
3009static void
3010nv50_msto_destroy(struct drm_encoder *encoder)
3011{
3012	struct nv50_msto *msto = nv50_msto(encoder);
3013	drm_encoder_cleanup(&msto->encoder);
3014	kfree(msto);
3015}
3016
3017static const struct drm_encoder_funcs
3018nv50_msto = {
3019	.destroy = nv50_msto_destroy,
3020};
3021
3022static int
3023nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
3024	      struct nv50_msto **pmsto)
3025{
3026	struct nv50_msto *msto;
3027	int ret;
3028
3029	if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
3030		return -ENOMEM;
3031
3032	ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
3033			       DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
3034	if (ret) {
3035		kfree(*pmsto);
3036		*pmsto = NULL;
3037		return ret;
3038	}
3039
3040	drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
3041	msto->encoder.possible_crtcs = heads;
3042	return 0;
3043}
3044
3045static struct drm_encoder *
3046nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
3047			      struct drm_connector_state *connector_state)
3048{
3049	struct nv50_head *head = nv50_head(connector_state->crtc);
3050	struct nv50_mstc *mstc = nv50_mstc(connector);
3051	if (mstc->port) {
3052		struct nv50_mstm *mstm = mstc->mstm;
3053		return &mstm->msto[head->base.index]->encoder;
3054	}
3055	return NULL;
3056}
3057
3058static struct drm_encoder *
3059nv50_mstc_best_encoder(struct drm_connector *connector)
3060{
3061	struct nv50_mstc *mstc = nv50_mstc(connector);
3062	if (mstc->port) {
3063		struct nv50_mstm *mstm = mstc->mstm;
3064		return &mstm->msto[0]->encoder;
3065	}
3066	return NULL;
3067}
3068
3069static enum drm_mode_status
3070nv50_mstc_mode_valid(struct drm_connector *connector,
3071		     struct drm_display_mode *mode)
3072{
3073	return MODE_OK;
3074}
3075
3076static int
3077nv50_mstc_get_modes(struct drm_connector *connector)
3078{
3079	struct nv50_mstc *mstc = nv50_mstc(connector);
3080	int ret = 0;
3081
3082	mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
3083	drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
3084	if (mstc->edid) {
3085		ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
3086		drm_edid_to_eld(&mstc->connector, mstc->edid);
3087	}
3088
3089	if (!mstc->connector.display_info.bpc)
3090		mstc->connector.display_info.bpc = 8;
3091
3092	if (mstc->native)
3093		drm_mode_destroy(mstc->connector.dev, mstc->native);
3094	mstc->native = nouveau_conn_native_mode(&mstc->connector);
3095	return ret;
3096}
3097
3098static const struct drm_connector_helper_funcs
3099nv50_mstc_help = {
3100	.get_modes = nv50_mstc_get_modes,
3101	.mode_valid = nv50_mstc_mode_valid,
3102	.best_encoder = nv50_mstc_best_encoder,
3103	.atomic_best_encoder = nv50_mstc_atomic_best_encoder,
3104};
3105
3106static enum drm_connector_status
3107nv50_mstc_detect(struct drm_connector *connector, bool force)
3108{
3109	struct nv50_mstc *mstc = nv50_mstc(connector);
3110	if (!mstc->port)
3111		return connector_status_disconnected;
3112	return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
3113}
3114
3115static void
3116nv50_mstc_destroy(struct drm_connector *connector)
3117{
3118	struct nv50_mstc *mstc = nv50_mstc(connector);
3119	drm_connector_cleanup(&mstc->connector);
3120	kfree(mstc);
3121}
3122
3123static const struct drm_connector_funcs
3124nv50_mstc = {
3125	.dpms = drm_atomic_helper_connector_dpms,
3126	.reset = nouveau_conn_reset,
3127	.detect = nv50_mstc_detect,
3128	.fill_modes = drm_helper_probe_single_connector_modes,
3129	.set_property = drm_atomic_helper_connector_set_property,
3130	.destroy = nv50_mstc_destroy,
3131	.atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
3132	.atomic_destroy_state = nouveau_conn_atomic_destroy_state,
3133	.atomic_set_property = nouveau_conn_atomic_set_property,
3134	.atomic_get_property = nouveau_conn_atomic_get_property,
3135};
3136
3137static int
3138nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
3139	      const char *path, struct nv50_mstc **pmstc)
3140{
3141	struct drm_device *dev = mstm->outp->base.base.dev;
3142	struct nv50_mstc *mstc;
3143	int ret, i;
3144
3145	if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
3146		return -ENOMEM;
3147	mstc->mstm = mstm;
3148	mstc->port = port;
3149
3150	ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
3151				 DRM_MODE_CONNECTOR_DisplayPort);
3152	if (ret) {
3153		kfree(*pmstc);
3154		*pmstc = NULL;
3155		return ret;
3156	}
3157
3158	drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
3159
3160	mstc->connector.funcs->reset(&mstc->connector);
3161	nouveau_conn_attach_properties(&mstc->connector);
3162
3163	for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto; i++)
3164		drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
3165
3166	drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
3167	drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
3168	drm_mode_connector_set_path_property(&mstc->connector, path);
3169	return 0;
3170}
3171
3172static void
3173nv50_mstm_cleanup(struct nv50_mstm *mstm)
3174{
3175	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3176	struct drm_encoder *encoder;
3177	int ret;
3178
3179	NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
3180	ret = drm_dp_check_act_status(&mstm->mgr);
3181
3182	ret = drm_dp_update_payload_part2(&mstm->mgr);
3183
3184	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3185		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3186			struct nv50_msto *msto = nv50_msto(encoder);
3187			struct nv50_mstc *mstc = msto->mstc;
3188			if (mstc && mstc->mstm == mstm)
3189				nv50_msto_cleanup(msto);
3190		}
3191	}
3192
3193	mstm->modified = false;
3194}
3195
3196static void
3197nv50_mstm_prepare(struct nv50_mstm *mstm)
3198{
3199	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3200	struct drm_encoder *encoder;
3201	int ret;
3202
3203	NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
3204	ret = drm_dp_update_payload_part1(&mstm->mgr);
3205
3206	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3207		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3208			struct nv50_msto *msto = nv50_msto(encoder);
3209			struct nv50_mstc *mstc = msto->mstc;
3210			if (mstc && mstc->mstm == mstm)
3211				nv50_msto_prepare(msto);
3212		}
3213	}
3214}
3215
3216static void
3217nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
3218{
3219	struct nv50_mstm *mstm = nv50_mstm(mgr);
3220	drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
3221}
3222
3223static void
3224nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
3225			    struct drm_connector *connector)
3226{
3227	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3228	struct nv50_mstc *mstc = nv50_mstc(connector);
3229
3230	drm_connector_unregister(&mstc->connector);
3231
3232	drm_modeset_lock_all(drm->dev);
3233	drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
3234	mstc->port = NULL;
3235	drm_modeset_unlock_all(drm->dev);
3236
3237	drm_connector_unreference(&mstc->connector);
3238}
3239
3240static void
3241nv50_mstm_register_connector(struct drm_connector *connector)
3242{
3243	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3244
3245	drm_modeset_lock_all(drm->dev);
3246	drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
3247	drm_modeset_unlock_all(drm->dev);
3248
3249	drm_connector_register(connector);
3250}
3251
3252static struct drm_connector *
3253nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
3254			struct drm_dp_mst_port *port, const char *path)
3255{
3256	struct nv50_mstm *mstm = nv50_mstm(mgr);
3257	struct nv50_mstc *mstc;
3258	int ret;
3259
3260	ret = nv50_mstc_new(mstm, port, path, &mstc);
3261	if (ret) {
3262		if (mstc)
3263			mstc->connector.funcs->destroy(&mstc->connector);
3264		return NULL;
3265	}
3266
3267	return &mstc->connector;
3268}
3269
3270static const struct drm_dp_mst_topology_cbs
3271nv50_mstm = {
3272	.add_connector = nv50_mstm_add_connector,
3273	.register_connector = nv50_mstm_register_connector,
3274	.destroy_connector = nv50_mstm_destroy_connector,
3275	.hotplug = nv50_mstm_hotplug,
3276};
3277
3278void
3279nv50_mstm_service(struct nv50_mstm *mstm)
3280{
3281	struct drm_dp_aux *aux = mstm->mgr.aux;
3282	bool handled = true;
3283	int ret;
3284	u8 esi[8] = {};
3285
3286	while (handled) {
3287		ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
3288		if (ret != 8) {
3289			drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3290			return;
3291		}
3292
3293		drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
3294		if (!handled)
3295			break;
3296
3297		drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
3298	}
 
3299}
3300
3301void
3302nv50_mstm_remove(struct nv50_mstm *mstm)
3303{
3304	if (mstm)
3305		drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3306}
3307
3308static int
3309nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
3310{
3311	struct nouveau_encoder *outp = mstm->outp;
3312	struct {
3313		struct nv50_disp_mthd_v1 base;
3314		struct nv50_disp_sor_dp_mst_link_v0 mst;
3315	} args = {
3316		.base.version = 1,
3317		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
3318		.base.hasht = outp->dcb->hasht,
3319		.base.hashm = outp->dcb->hashm,
3320		.mst.state = state,
3321	};
3322	struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
3323	struct nvif_object *disp = &drm->display->disp;
3324	int ret;
3325
3326	if (dpcd >= 0x12) {
3327		ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
3328		if (ret < 0)
3329			return ret;
3330
3331		dpcd &= ~DP_MST_EN;
3332		if (state)
3333			dpcd |= DP_MST_EN;
3334
3335		ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
3336		if (ret < 0)
3337			return ret;
3338	}
3339
3340	return nvif_mthd(disp, 0, &args, sizeof(args));
 
 
 
 
 
 
 
 
3341}
3342
3343int
3344nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
 
3345{
3346	int ret, state = 0;
3347
3348	if (!mstm)
3349		return 0;
3350
3351	if (dpcd[0] >= 0x12) {
3352		ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
3353		if (ret < 0)
3354			return ret;
3355
3356		if (!(dpcd[1] & DP_MST_CAP))
3357			dpcd[0] = 0x11;
3358		else
3359			state = allow;
3360	}
3361
3362	ret = nv50_mstm_enable(mstm, dpcd[0], state);
3363	if (ret)
3364		return ret;
3365
3366	ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
3367	if (ret)
3368		return nv50_mstm_enable(mstm, dpcd[0], 0);
3369
3370	return mstm->mgr.mst_state;
3371}
3372
3373static void
3374nv50_mstm_fini(struct nv50_mstm *mstm)
3375{
3376	if (mstm && mstm->mgr.mst_state)
3377		drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
3378}
3379
3380static void
3381nv50_mstm_init(struct nv50_mstm *mstm)
3382{
3383	if (mstm && mstm->mgr.mst_state)
3384		drm_dp_mst_topology_mgr_resume(&mstm->mgr);
3385}
3386
3387static void
3388nv50_mstm_del(struct nv50_mstm **pmstm)
3389{
3390	struct nv50_mstm *mstm = *pmstm;
3391	if (mstm) {
3392		kfree(*pmstm);
3393		*pmstm = NULL;
3394	}
3395}
3396
3397static int
3398nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
3399	      int conn_base_id, struct nv50_mstm **pmstm)
3400{
3401	const int max_payloads = hweight8(outp->dcb->heads);
3402	struct drm_device *dev = outp->base.base.dev;
3403	struct nv50_mstm *mstm;
3404	int ret, i;
3405	u8 dpcd;
3406
3407	/* This is a workaround for some monitors not functioning
3408	 * correctly in MST mode on initial module load.  I think
3409	 * some bad interaction with the VBIOS may be responsible.
3410	 *
3411	 * A good ol' off and on again seems to work here ;)
3412	 */
3413	ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
3414	if (ret >= 0 && dpcd >= 0x12)
3415		drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
3416
3417	if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
3418		return -ENOMEM;
3419	mstm->outp = outp;
3420	mstm->mgr.cbs = &nv50_mstm;
3421
3422	ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev->dev, aux, aux_max,
3423					   max_payloads, conn_base_id);
3424	if (ret)
3425		return ret;
3426
3427	for (i = 0; i < max_payloads; i++) {
3428		ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
3429				    i, &mstm->msto[i]);
3430		if (ret)
3431			return ret;
3432	}
3433
3434	return 0;
3435}
3436
3437/******************************************************************************
3438 * SOR
3439 *****************************************************************************/
3440static void
3441nv50_sor_dpms(struct drm_encoder *encoder, int mode)
3442{
3443	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3444	struct nv50_disp *disp = nv50_disp(encoder->dev);
3445	struct {
3446		struct nv50_disp_mthd_v1 base;
3447		struct nv50_disp_sor_pwr_v0 pwr;
3448	} args = {
3449		.base.version = 1,
3450		.base.method = NV50_DISP_MTHD_V1_SOR_PWR,
3451		.base.hasht  = nv_encoder->dcb->hasht,
3452		.base.hashm  = nv_encoder->dcb->hashm,
3453		.pwr.state = mode == DRM_MODE_DPMS_ON,
3454	};
3455
3456	nvif_mthd(disp->disp, 0, &args, sizeof(args));
3457}
3458
3459static void
3460nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
3461		struct drm_display_mode *mode, u8 proto, u8 depth)
3462{
3463	struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
3464	u32 *push;
3465
3466	if (!mode) {
3467		nv_encoder->ctrl &= ~BIT(head);
3468		if (!(nv_encoder->ctrl & 0x0000000f))
3469			nv_encoder->ctrl = 0;
3470	} else {
3471		nv_encoder->ctrl |= proto << 8;
3472		nv_encoder->ctrl |= BIT(head);
3473	}
3474
3475	if ((push = evo_wait(core, 6))) {
3476		if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
3477			if (mode) {
3478				if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3479					nv_encoder->ctrl |= 0x00001000;
3480				if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3481					nv_encoder->ctrl |= 0x00002000;
3482				nv_encoder->ctrl |= depth << 16;
3483			}
3484			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
3485		} else {
3486			if (mode) {
3487				u32 magic = 0x31ec6000 | (head << 25);
3488				u32 syncs = 0x00000001;
3489				if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3490					syncs |= 0x00000008;
3491				if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3492					syncs |= 0x00000010;
3493				if (mode->flags & DRM_MODE_FLAG_INTERLACE)
3494					magic |= 0x00000001;
3495
3496				evo_mthd(push, 0x0404 + (head * 0x300), 2);
3497				evo_data(push, syncs | (depth << 6));
3498				evo_data(push, magic);
3499			}
3500			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
3501		}
3502		evo_data(push, nv_encoder->ctrl);
3503		evo_kick(push, core);
3504	}
3505}
3506
3507static void
3508nv50_sor_disable(struct drm_encoder *encoder)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3509{
3510	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3511	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
 
 
 
3512
3513	nv_encoder->crtc = NULL;
 
3514
3515	if (nv_crtc) {
3516		struct nvkm_i2c_aux *aux = nv_encoder->aux;
3517		u8 pwr;
3518
3519		if (aux) {
3520			int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
3521			if (ret == 0) {
3522				pwr &= ~DP_SET_POWER_MASK;
3523				pwr |=  DP_SET_POWER_D3;
3524				nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
3525			}
3526		}
3527
3528		nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
3529		nv50_audio_disable(encoder, nv_crtc);
3530		nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
3531	}
3532}
3533
3534static void
3535nv50_sor_enable(struct drm_encoder *encoder)
3536{
3537	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3538	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3539	struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3540	struct {
3541		struct nv50_disp_mthd_v1 base;
3542		struct nv50_disp_sor_lvds_script_v0 lvds;
3543	} lvds = {
3544		.base.version = 1,
3545		.base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
3546		.base.hasht   = nv_encoder->dcb->hasht,
3547		.base.hashm   = nv_encoder->dcb->hashm,
3548	};
3549	struct nv50_disp *disp = nv50_disp(encoder->dev);
3550	struct drm_device *dev = encoder->dev;
3551	struct nouveau_drm *drm = nouveau_drm(dev);
3552	struct nouveau_connector *nv_connector;
3553	struct nvbios *bios = &drm->vbios;
3554	u8 proto = 0xf;
3555	u8 depth = 0x0;
3556
3557	nv_connector = nouveau_encoder_connector_get(nv_encoder);
3558	nv_encoder->crtc = encoder->crtc;
3559
3560	switch (nv_encoder->dcb->type) {
3561	case DCB_OUTPUT_TMDS:
3562		if (nv_encoder->dcb->sorconf.link & 1) {
3563			proto = 0x1;
3564			/* Only enable dual-link if:
3565			 *  - Need to (i.e. rate > 165MHz)
3566			 *  - DCB says we can
3567			 *  - Not an HDMI monitor, since there's no dual-link
3568			 *    on HDMI.
3569			 */
3570			if (mode->clock >= 165000 &&
3571			    nv_encoder->dcb->duallink_possible &&
3572			    !drm_detect_hdmi_monitor(nv_connector->edid))
3573				proto |= 0x4;
3574		} else {
3575			proto = 0x2;
3576		}
3577
3578		nv50_hdmi_enable(&nv_encoder->base.base, mode);
3579		break;
3580	case DCB_OUTPUT_LVDS:
3581		proto = 0x0;
3582
 
 
 
 
3583		if (bios->fp_no_ddc) {
3584			if (bios->fp.dual_link)
3585				lvds.lvds.script |= 0x0100;
3586			if (bios->fp.if_is_24bit)
3587				lvds.lvds.script |= 0x0200;
3588		} else {
3589			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
 
 
 
3590				if (((u8 *)nv_connector->edid)[121] == 2)
3591					lvds.lvds.script |= 0x0100;
3592			} else
3593			if (mode->clock >= bios->fp.duallink_transition_clk) {
3594				lvds.lvds.script |= 0x0100;
3595			}
3596
3597			if (lvds.lvds.script & 0x0100) {
 
3598				if (bios->fp.strapless_is_24bit & 2)
3599					lvds.lvds.script |= 0x0200;
3600			} else {
3601				if (bios->fp.strapless_is_24bit & 1)
3602					lvds.lvds.script |= 0x0200;
3603			}
3604
3605			if (nv_connector->base.display_info.bpc == 8)
3606				lvds.lvds.script |= 0x0200;
 
 
3607		}
3608
3609		nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
 
 
 
 
 
3610		break;
3611	case DCB_OUTPUT_DP:
3612		if (nv_connector->base.display_info.bpc == 6)
3613			depth = 0x2;
3614		else
3615		if (nv_connector->base.display_info.bpc == 8)
3616			depth = 0x5;
3617		else
3618			depth = 0x6;
3619
3620		if (nv_encoder->dcb->sorconf.link & 1)
3621			proto = 0x8;
3622		else
3623			proto = 0x9;
3624
3625		nv50_audio_enable(encoder, mode);
 
 
 
 
 
 
3626		break;
3627	default:
3628		BUG_ON(1);
3629		break;
3630	}
3631
3632	nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
3633}
3634
3635static const struct drm_encoder_helper_funcs
3636nv50_sor_help = {
3637	.dpms = nv50_sor_dpms,
3638	.atomic_check = nv50_outp_atomic_check,
3639	.enable = nv50_sor_enable,
3640	.disable = nv50_sor_disable,
3641};
3642
3643static void
3644nv50_sor_destroy(struct drm_encoder *encoder)
3645{
3646	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3647	nv50_mstm_del(&nv_encoder->dp.mstm);
3648	drm_encoder_cleanup(encoder);
3649	kfree(encoder);
3650}
3651
3652static const struct drm_encoder_funcs
3653nv50_sor_func = {
3654	.destroy = nv50_sor_destroy,
3655};
3656
3657static int
3658nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
3659{
3660	struct nouveau_connector *nv_connector = nouveau_connector(connector);
3661	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3662	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
3663	struct nouveau_encoder *nv_encoder;
3664	struct drm_encoder *encoder;
3665	int type, ret;
3666
3667	switch (dcbe->type) {
3668	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
3669	case DCB_OUTPUT_TMDS:
3670	case DCB_OUTPUT_DP:
3671	default:
3672		type = DRM_MODE_ENCODER_TMDS;
3673		break;
3674	}
3675
3676	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3677	if (!nv_encoder)
3678		return -ENOMEM;
3679	nv_encoder->dcb = dcbe;
3680	nv_encoder->or = ffs(dcbe->or) - 1;
3681	nv_encoder->update = nv50_sor_update;
3682
3683	encoder = to_drm_encoder(nv_encoder);
3684	encoder->possible_crtcs = dcbe->heads;
3685	encoder->possible_clones = 0;
3686	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
3687			 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
3688	drm_encoder_helper_add(encoder, &nv50_sor_help);
3689
3690	drm_mode_connector_attach_encoder(connector, encoder);
3691
3692	if (dcbe->type == DCB_OUTPUT_DP) {
3693		struct nvkm_i2c_aux *aux =
3694			nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
3695		if (aux) {
3696			nv_encoder->i2c = &aux->i2c;
3697			nv_encoder->aux = aux;
3698		}
3699
3700		/*TODO: Use DP Info Table to check for support. */
3701		if (nv50_disp(encoder->dev)->disp->oclass >= GF110_DISP) {
3702			ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
3703					    nv_connector->base.base.id,
3704					    &nv_encoder->dp.mstm);
3705			if (ret)
3706				return ret;
3707		}
3708	} else {
3709		struct nvkm_i2c_bus *bus =
3710			nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
3711		if (bus)
3712			nv_encoder->i2c = &bus->i2c;
3713	}
3714
3715	return 0;
3716}
3717
3718/******************************************************************************
3719 * PIOR
3720 *****************************************************************************/
3721static void
3722nv50_pior_dpms(struct drm_encoder *encoder, int mode)
3723{
3724	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3725	struct nv50_disp *disp = nv50_disp(encoder->dev);
3726	struct {
3727		struct nv50_disp_mthd_v1 base;
3728		struct nv50_disp_pior_pwr_v0 pwr;
3729	} args = {
3730		.base.version = 1,
3731		.base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
3732		.base.hasht  = nv_encoder->dcb->hasht,
3733		.base.hashm  = nv_encoder->dcb->hashm,
3734		.pwr.state = mode == DRM_MODE_DPMS_ON,
3735		.pwr.type = nv_encoder->dcb->type,
3736	};
3737
3738	nvif_mthd(disp->disp, 0, &args, sizeof(args));
3739}
3740
3741static int
3742nv50_pior_atomic_check(struct drm_encoder *encoder,
3743		       struct drm_crtc_state *crtc_state,
3744		       struct drm_connector_state *conn_state)
3745{
3746	int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
3747	if (ret)
3748		return ret;
3749	crtc_state->adjusted_mode.clock *= 2;
3750	return 0;
3751}
3752
3753static void
3754nv50_pior_disable(struct drm_encoder *encoder)
3755{
3756	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3757	struct nv50_mast *mast = nv50_mast(encoder->dev);
3758	const int or = nv_encoder->or;
3759	u32 *push;
3760
3761	if (nv_encoder->crtc) {
3762		push = evo_wait(mast, 4);
3763		if (push) {
3764			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3765				evo_mthd(push, 0x0700 + (or * 0x040), 1);
3766				evo_data(push, 0x00000000);
3767			}
3768			evo_kick(push, mast);
3769		}
3770	}
3771
3772	nv_encoder->crtc = NULL;
3773}
3774
3775static void
3776nv50_pior_enable(struct drm_encoder *encoder)
3777{
3778	struct nv50_mast *mast = nv50_mast(encoder->dev);
3779	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3780	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3781	struct nouveau_connector *nv_connector;
3782	struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3783	u8 owner = 1 << nv_crtc->index;
3784	u8 proto, depth;
3785	u32 *push;
3786
3787	nv_connector = nouveau_encoder_connector_get(nv_encoder);
3788	switch (nv_connector->base.display_info.bpc) {
3789	case 10: depth = 0x6; break;
3790	case  8: depth = 0x5; break;
3791	case  6: depth = 0x2; break;
3792	default: depth = 0x0; break;
3793	}
3794
3795	switch (nv_encoder->dcb->type) {
3796	case DCB_OUTPUT_TMDS:
3797	case DCB_OUTPUT_DP:
3798		proto = 0x0;
3799		break;
3800	default:
3801		BUG_ON(1);
3802		break;
3803	}
 
 
 
 
 
 
 
 
 
3804
3805	push = evo_wait(mast, 8);
3806	if (push) {
3807		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3808			u32 ctrl = (depth << 16) | (proto << 8) | owner;
3809			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3810				ctrl |= 0x00001000;
3811			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3812				ctrl |= 0x00002000;
3813			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
3814			evo_data(push, ctrl);
3815		}
3816
3817		evo_kick(push, mast);
3818	}
3819
3820	nv_encoder->crtc = encoder->crtc;
3821}
 
 
 
 
 
 
 
 
 
3822
3823static const struct drm_encoder_helper_funcs
3824nv50_pior_help = {
3825	.dpms = nv50_pior_dpms,
3826	.atomic_check = nv50_pior_atomic_check,
3827	.enable = nv50_pior_enable,
3828	.disable = nv50_pior_disable,
3829};
 
 
 
 
3830
3831static void
3832nv50_pior_destroy(struct drm_encoder *encoder)
3833{
3834	drm_encoder_cleanup(encoder);
3835	kfree(encoder);
3836}
3837
3838static const struct drm_encoder_funcs
3839nv50_pior_func = {
3840	.destroy = nv50_pior_destroy,
3841};
3842
3843static int
3844nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
3845{
3846	struct nouveau_drm *drm = nouveau_drm(connector->dev);
3847	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
3848	struct nvkm_i2c_bus *bus = NULL;
3849	struct nvkm_i2c_aux *aux = NULL;
3850	struct i2c_adapter *ddc;
3851	struct nouveau_encoder *nv_encoder;
3852	struct drm_encoder *encoder;
3853	int type;
3854
3855	switch (dcbe->type) {
3856	case DCB_OUTPUT_TMDS:
3857		bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
3858		ddc  = bus ? &bus->i2c : NULL;
3859		type = DRM_MODE_ENCODER_TMDS;
3860		break;
3861	case DCB_OUTPUT_DP:
3862		aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
3863		ddc  = aux ? &aux->i2c : NULL;
3864		type = DRM_MODE_ENCODER_TMDS;
3865		break;
3866	default:
3867		return -ENODEV;
3868	}
3869
3870	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3871	if (!nv_encoder)
3872		return -ENOMEM;
3873	nv_encoder->dcb = dcbe;
3874	nv_encoder->or = ffs(dcbe->or) - 1;
3875	nv_encoder->i2c = ddc;
3876	nv_encoder->aux = aux;
3877
3878	encoder = to_drm_encoder(nv_encoder);
3879	encoder->possible_crtcs = dcbe->heads;
3880	encoder->possible_clones = 0;
3881	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
3882			 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
3883	drm_encoder_helper_add(encoder, &nv50_pior_help);
3884
3885	drm_mode_connector_attach_encoder(connector, encoder);
3886	return 0;
3887}
3888
3889/******************************************************************************
3890 * Atomic
3891 *****************************************************************************/
3892
3893static void
3894nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
3895{
3896	struct nv50_disp *disp = nv50_disp(drm->dev);
3897	struct nv50_dmac *core = &disp->mast.base;
3898	struct nv50_mstm *mstm;
3899	struct drm_encoder *encoder;
3900	u32 *push;
 
 
 
3901
3902	NV_ATOMIC(drm, "commit core %08x\n", interlock);
 
3903
3904	drm_for_each_encoder(encoder, drm->dev) {
3905		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3906			mstm = nouveau_encoder(encoder)->dp.mstm;
3907			if (mstm && mstm->modified)
3908				nv50_mstm_prepare(mstm);
3909		}
3910	}
3911
3912	if ((push = evo_wait(core, 5))) {
3913		evo_mthd(push, 0x0084, 1);
3914		evo_data(push, 0x80000000);
3915		evo_mthd(push, 0x0080, 2);
3916		evo_data(push, interlock);
3917		evo_data(push, 0x00000000);
3918		nouveau_bo_wr32(disp->sync, 0, 0x00000000);
3919		evo_kick(push, core);
3920		if (nvif_msec(&drm->device, 2000ULL,
3921			if (nouveau_bo_rd32(disp->sync, 0))
3922				break;
3923			usleep_range(1, 2);
3924		) < 0)
3925			NV_ERROR(drm, "EVO timeout\n");
3926	}
3927
3928	drm_for_each_encoder(encoder, drm->dev) {
3929		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3930			mstm = nouveau_encoder(encoder)->dp.mstm;
3931			if (mstm && mstm->modified)
3932				nv50_mstm_cleanup(mstm);
3933		}
3934	}
3935}
3936
3937static void
3938nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
3939{
3940	struct drm_device *dev = state->dev;
3941	struct drm_crtc_state *crtc_state;
3942	struct drm_crtc *crtc;
3943	struct drm_plane_state *plane_state;
3944	struct drm_plane *plane;
3945	struct nouveau_drm *drm = nouveau_drm(dev);
3946	struct nv50_disp *disp = nv50_disp(dev);
3947	struct nv50_atom *atom = nv50_atom(state);
3948	struct nv50_outp_atom *outp, *outt;
3949	u32 interlock_core = 0;
3950	u32 interlock_chan = 0;
3951	int i;
3952
3953	NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
3954	drm_atomic_helper_wait_for_fences(dev, state, false);
3955	drm_atomic_helper_wait_for_dependencies(state);
3956	drm_atomic_helper_update_legacy_modeset_state(dev, state);
3957
3958	if (atom->lock_core)
3959		mutex_lock(&disp->mutex);
3960
3961	/* Disable head(s). */
3962	for_each_crtc_in_state(state, crtc, crtc_state, i) {
3963		struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
3964		struct nv50_head *head = nv50_head(crtc);
3965
3966		NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
3967			  asyh->clr.mask, asyh->set.mask);
3968
3969		if (asyh->clr.mask) {
3970			nv50_head_flush_clr(head, asyh, atom->flush_disable);
3971			interlock_core |= 1;
3972		}
3973	}
3974
3975	/* Disable plane(s). */
3976	for_each_plane_in_state(state, plane, plane_state, i) {
3977		struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
3978		struct nv50_wndw *wndw = nv50_wndw(plane);
3979
3980		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
3981			  asyw->clr.mask, asyw->set.mask);
3982		if (!asyw->clr.mask)
3983			continue;
3984
3985		interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
3986						      atom->flush_disable,
3987						      asyw);
3988	}
3989
3990	/* Disable output path(s). */
3991	list_for_each_entry(outp, &atom->outp, head) {
3992		const struct drm_encoder_helper_funcs *help;
3993		struct drm_encoder *encoder;
3994
3995		encoder = outp->encoder;
3996		help = encoder->helper_private;
3997
3998		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
3999			  outp->clr.mask, outp->set.mask);
4000
4001		if (outp->clr.mask) {
4002			help->disable(encoder);
4003			interlock_core |= 1;
4004			if (outp->flush_disable) {
4005				nv50_disp_atomic_commit_core(drm, interlock_chan);
4006				interlock_core = 0;
4007				interlock_chan = 0;
4008			}
4009		}
4010	}
4011
4012	/* Flush disable. */
4013	if (interlock_core) {
4014		if (atom->flush_disable) {
4015			nv50_disp_atomic_commit_core(drm, interlock_chan);
4016			interlock_core = 0;
4017			interlock_chan = 0;
4018		}
4019	}
4020
4021	/* Update output path(s). */
4022	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4023		const struct drm_encoder_helper_funcs *help;
4024		struct drm_encoder *encoder;
4025
4026		encoder = outp->encoder;
4027		help = encoder->helper_private;
4028
4029		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
4030			  outp->set.mask, outp->clr.mask);
4031
4032		if (outp->set.mask) {
4033			help->enable(encoder);
4034			interlock_core = 1;
4035		}
4036
4037		list_del(&outp->head);
4038		kfree(outp);
4039	}
4040
4041	/* Update head(s). */
4042	for_each_crtc_in_state(state, crtc, crtc_state, i) {
4043		struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
4044		struct nv50_head *head = nv50_head(crtc);
4045
4046		NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
4047			  asyh->set.mask, asyh->clr.mask);
4048
4049		if (asyh->set.mask) {
4050			nv50_head_flush_set(head, asyh);
4051			interlock_core = 1;
4052		}
4053	}
4054
4055	for_each_crtc_in_state(state, crtc, crtc_state, i) {
4056		if (crtc->state->event)
4057			drm_crtc_vblank_get(crtc);
4058	}
 
4059
4060	/* Update plane(s). */
4061	for_each_plane_in_state(state, plane, plane_state, i) {
4062		struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
4063		struct nv50_wndw *wndw = nv50_wndw(plane);
4064
4065		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
4066			  asyw->set.mask, asyw->clr.mask);
4067		if ( !asyw->set.mask &&
4068		    (!asyw->clr.mask || atom->flush_disable))
4069			continue;
4070
4071		interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
4072	}
 
 
 
 
 
4073
4074	/* Flush update. */
4075	if (interlock_core) {
4076		if (!interlock_chan && atom->state.legacy_cursor_update) {
4077			u32 *push = evo_wait(&disp->mast, 2);
4078			if (push) {
4079				evo_mthd(push, 0x0080, 1);
4080				evo_data(push, 0x00000000);
4081				evo_kick(push, &disp->mast);
4082			}
4083		} else {
4084			nv50_disp_atomic_commit_core(drm, interlock_chan);
4085		}
4086	}
4087
4088	if (atom->lock_core)
4089		mutex_unlock(&disp->mutex);
 
 
 
 
 
4090
4091	/* Wait for HW to signal completion. */
4092	for_each_plane_in_state(state, plane, plane_state, i) {
4093		struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
4094		struct nv50_wndw *wndw = nv50_wndw(plane);
4095		int ret = nv50_wndw_wait_armed(wndw, asyw);
4096		if (ret)
4097			NV_ERROR(drm, "%s: timeout\n", plane->name);
4098	}
4099
4100	for_each_crtc_in_state(state, crtc, crtc_state, i) {
4101		if (crtc->state->event) {
4102			unsigned long flags;
4103			/* Get correct count/ts if racing with vblank irq */
4104			drm_accurate_vblank_count(crtc);
4105			spin_lock_irqsave(&crtc->dev->event_lock, flags);
4106			drm_crtc_send_vblank_event(crtc, crtc->state->event);
4107			spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
4108			crtc->state->event = NULL;
4109			drm_crtc_vblank_put(crtc);
4110		}
4111	}
4112
4113	drm_atomic_helper_commit_hw_done(state);
4114	drm_atomic_helper_cleanup_planes(dev, state);
4115	drm_atomic_helper_commit_cleanup_done(state);
4116	drm_atomic_state_put(state);
4117}
4118
4119static void
4120nv50_disp_atomic_commit_work(struct work_struct *work)
4121{
4122	struct drm_atomic_state *state =
4123		container_of(work, typeof(*state), commit_work);
4124	nv50_disp_atomic_commit_tail(state);
4125}
4126
4127static int
4128nv50_disp_atomic_commit(struct drm_device *dev,
4129			struct drm_atomic_state *state, bool nonblock)
4130{
4131	struct nouveau_drm *drm = nouveau_drm(dev);
4132	struct nv50_disp *disp = nv50_disp(dev);
4133	struct drm_plane_state *plane_state;
4134	struct drm_plane *plane;
4135	struct drm_crtc *crtc;
4136	bool active = false;
4137	int ret, i;
4138
4139	ret = pm_runtime_get_sync(dev->dev);
4140	if (ret < 0 && ret != -EACCES)
4141		return ret;
4142
4143	ret = drm_atomic_helper_setup_commit(state, nonblock);
4144	if (ret)
4145		goto done;
4146
4147	INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
4148
4149	ret = drm_atomic_helper_prepare_planes(dev, state);
4150	if (ret)
4151		goto done;
4152
4153	if (!nonblock) {
4154		ret = drm_atomic_helper_wait_for_fences(dev, state, true);
4155		if (ret)
4156			goto done;
4157	}
4158
4159	for_each_plane_in_state(state, plane, plane_state, i) {
4160		struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane_state);
4161		struct nv50_wndw *wndw = nv50_wndw(plane);
4162		if (asyw->set.image) {
4163			asyw->ntfy.handle = wndw->dmac->sync.handle;
4164			asyw->ntfy.offset = wndw->ntfy;
4165			asyw->ntfy.awaken = false;
4166			asyw->set.ntfy = true;
4167			nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
4168			wndw->ntfy ^= 0x10;
4169		}
4170	}
4171
4172	drm_atomic_helper_swap_state(state, true);
4173	drm_atomic_state_get(state);
4174
4175	if (nonblock)
4176		queue_work(system_unbound_wq, &state->commit_work);
4177	else
4178		nv50_disp_atomic_commit_tail(state);
4179
4180	drm_for_each_crtc(crtc, dev) {
4181		if (crtc->state->enable) {
4182			if (!drm->have_disp_power_ref) {
4183				drm->have_disp_power_ref = true;
4184				return ret;
4185			}
4186			active = true;
4187			break;
4188		}
4189	}
4190
4191	if (!active && drm->have_disp_power_ref) {
4192		pm_runtime_put_autosuspend(dev->dev);
4193		drm->have_disp_power_ref = false;
4194	}
4195
4196done:
4197	pm_runtime_put_autosuspend(dev->dev);
4198	return ret;
4199}
4200
4201static struct nv50_outp_atom *
4202nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
4203{
4204	struct nv50_outp_atom *outp;
4205
4206	list_for_each_entry(outp, &atom->outp, head) {
4207		if (outp->encoder == encoder)
4208			return outp;
 
 
 
 
 
4209	}
4210
4211	outp = kzalloc(sizeof(*outp), GFP_KERNEL);
4212	if (!outp)
4213		return ERR_PTR(-ENOMEM);
4214
4215	list_add(&outp->head, &atom->outp);
4216	outp->encoder = encoder;
4217	return outp;
4218}
4219
4220static int
4221nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
4222				struct drm_connector *connector)
4223{
4224	struct drm_encoder *encoder = connector->state->best_encoder;
4225	struct drm_crtc_state *crtc_state;
4226	struct drm_crtc *crtc;
4227	struct nv50_outp_atom *outp;
4228
4229	if (!(crtc = connector->state->crtc))
4230		return 0;
4231
4232	crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
4233	if (crtc->state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
4234		outp = nv50_disp_outp_atomic_add(atom, encoder);
4235		if (IS_ERR(outp))
4236			return PTR_ERR(outp);
4237
4238		if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
4239			outp->flush_disable = true;
4240			atom->flush_disable = true;
4241		}
4242		outp->clr.ctrl = true;
4243		atom->lock_core = true;
4244	}
4245
4246	return 0;
4247}
4248
4249static int
4250nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
4251				struct drm_connector_state *connector_state)
 
 
 
 
 
 
 
4252{
4253	struct drm_encoder *encoder = connector_state->best_encoder;
4254	struct drm_crtc_state *crtc_state;
4255	struct drm_crtc *crtc;
4256	struct nv50_outp_atom *outp;
4257
4258	if (!(crtc = connector_state->crtc))
4259		return 0;
4260
4261	crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
4262	if (crtc_state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
4263		outp = nv50_disp_outp_atomic_add(atom, encoder);
4264		if (IS_ERR(outp))
4265			return PTR_ERR(outp);
4266
4267		outp->set.ctrl = true;
4268		atom->lock_core = true;
4269	}
4270
4271	return 0;
4272}
4273
4274static int
4275nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
4276{
4277	struct nv50_atom *atom = nv50_atom(state);
4278	struct drm_connector_state *connector_state;
4279	struct drm_connector *connector;
4280	int ret, i;
4281
4282	ret = drm_atomic_helper_check(dev, state);
4283	if (ret)
4284		return ret;
4285
4286	for_each_connector_in_state(state, connector, connector_state, i) {
4287		ret = nv50_disp_outp_atomic_check_clr(atom, connector);
4288		if (ret)
4289			return ret;
4290
4291		ret = nv50_disp_outp_atomic_check_set(atom, connector_state);
4292		if (ret)
4293			return ret;
4294	}
4295
4296	return 0;
4297}
4298
4299static void
4300nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
4301{
4302	struct nv50_atom *atom = nv50_atom(state);
4303	struct nv50_outp_atom *outp, *outt;
 
 
4304
4305	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4306		list_del(&outp->head);
4307		kfree(outp);
4308	}
 
 
 
4309
4310	drm_atomic_state_default_clear(state);
 
 
 
4311}
4312
4313static void
4314nv50_disp_atomic_state_free(struct drm_atomic_state *state)
4315{
4316	struct nv50_atom *atom = nv50_atom(state);
4317	drm_atomic_state_default_release(&atom->state);
4318	kfree(atom);
4319}
4320
4321static struct drm_atomic_state *
4322nv50_disp_atomic_state_alloc(struct drm_device *dev)
4323{
4324	struct nv50_atom *atom;
4325	if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
4326	    drm_atomic_state_init(dev, &atom->state) < 0) {
4327		kfree(atom);
4328		return NULL;
4329	}
4330	INIT_LIST_HEAD(&atom->outp);
4331	return &atom->state;
4332}
4333
4334static const struct drm_mode_config_funcs
4335nv50_disp_func = {
4336	.fb_create = nouveau_user_framebuffer_create,
4337	.output_poll_changed = nouveau_fbcon_output_poll_changed,
4338	.atomic_check = nv50_disp_atomic_check,
4339	.atomic_commit = nv50_disp_atomic_commit,
4340	.atomic_state_alloc = nv50_disp_atomic_state_alloc,
4341	.atomic_state_clear = nv50_disp_atomic_state_clear,
4342	.atomic_state_free = nv50_disp_atomic_state_free,
4343};
4344
4345/******************************************************************************
4346 * Init
4347 *****************************************************************************/
4348
4349void
4350nv50_display_fini(struct drm_device *dev)
4351{
4352	struct nouveau_encoder *nv_encoder;
4353	struct drm_encoder *encoder;
4354	struct drm_plane *plane;
4355
4356	drm_for_each_plane(plane, dev) {
4357		struct nv50_wndw *wndw = nv50_wndw(plane);
4358		if (plane->funcs != &nv50_wndw)
4359			continue;
4360		nv50_wndw_fini(wndw);
4361	}
4362
4363	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4364		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4365			nv_encoder = nouveau_encoder(encoder);
4366			nv50_mstm_fini(nv_encoder->dp.mstm);
4367		}
4368	}
4369}
4370
4371int
4372nv50_display_init(struct drm_device *dev)
4373{
4374	struct drm_encoder *encoder;
4375	struct drm_plane *plane;
4376	struct drm_crtc *crtc;
4377	u32 *push;
4378
4379	push = evo_wait(nv50_mast(dev), 32);
4380	if (!push)
4381		return -EBUSY;
4382
4383	evo_mthd(push, 0x0088, 1);
4384	evo_data(push, nv50_mast(dev)->base.sync.handle);
4385	evo_kick(push, nv50_mast(dev));
4386
4387	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4388		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4389			const struct drm_encoder_helper_funcs *help;
4390			struct nouveau_encoder *nv_encoder;
4391
4392			nv_encoder = nouveau_encoder(encoder);
4393			help = encoder->helper_private;
4394			if (help && help->dpms)
4395				help->dpms(encoder, DRM_MODE_DPMS_ON);
4396
4397			nv50_mstm_init(nv_encoder->dp.mstm);
4398		}
4399	}
4400
4401	drm_for_each_crtc(crtc, dev) {
4402		nv50_head_lut_load(crtc);
4403	}
 
 
 
4404
4405	drm_for_each_plane(plane, dev) {
4406		struct nv50_wndw *wndw = nv50_wndw(plane);
4407		if (plane->funcs != &nv50_wndw)
4408			continue;
4409		nv50_wndw_init(wndw);
4410	}
4411
4412	return 0;
4413}
4414
4415void
4416nv50_display_destroy(struct drm_device *dev)
4417{
4418	struct nv50_disp *disp = nv50_disp(dev);
 
4419
4420	nv50_dmac_destroy(&disp->mast.base, disp->disp);
 
 
 
4421
4422	nouveau_bo_unmap(disp->sync);
4423	if (disp->sync)
4424		nouveau_bo_unpin(disp->sync);
4425	nouveau_bo_ref(NULL, &disp->sync);
4426
4427	nouveau_display(dev)->priv = NULL;
4428	kfree(disp);
4429}
4430
4431MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
4432static int nouveau_atomic = 0;
4433module_param_named(atomic, nouveau_atomic, int, 0400);
 
4434
4435int
4436nv50_display_create(struct drm_device *dev)
4437{
4438	struct nvif_device *device = &nouveau_drm(dev)->device;
4439	struct nouveau_drm *drm = nouveau_drm(dev);
4440	struct dcb_table *dcb = &drm->vbios.dcb;
4441	struct drm_connector *connector, *tmp;
4442	struct nv50_disp *disp;
4443	struct dcb_output *dcbe;
4444	int crtcs, ret, i;
4445
4446	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
4447	if (!disp)
4448		return -ENOMEM;
4449
4450	mutex_init(&disp->mutex);
4451
4452	nouveau_display(dev)->priv = disp;
4453	nouveau_display(dev)->dtor = nv50_display_destroy;
4454	nouveau_display(dev)->init = nv50_display_init;
4455	nouveau_display(dev)->fini = nv50_display_fini;
4456	disp->disp = &nouveau_display(dev)->disp;
4457	dev->mode_config.funcs = &nv50_disp_func;
4458	if (nouveau_atomic)
4459		dev->driver->driver_features |= DRIVER_ATOMIC;
4460
4461	/* small shared memory area we use for notifiers and semaphores */
4462	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
4463			     0, 0x0000, NULL, NULL, &disp->sync);
4464	if (!ret) {
4465		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
4466		if (!ret) {
4467			ret = nouveau_bo_map(disp->sync);
4468			if (ret)
4469				nouveau_bo_unpin(disp->sync);
4470		}
4471		if (ret)
4472			nouveau_bo_ref(NULL, &disp->sync);
4473	}
4474
4475	if (ret)
4476		goto out;
4477
4478	/* allocate master evo channel */
4479	ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
4480			      &disp->mast);
4481	if (ret)
4482		goto out;
4483
4484	/* create crtc objects to represent the hw heads */
4485	if (disp->disp->oclass >= GF110_DISP)
4486		crtcs = nvif_rd32(&device->object, 0x022448);
4487	else
4488		crtcs = 2;
4489
4490	for (i = 0; i < crtcs; i++) {
4491		ret = nv50_head_create(dev, i);
4492		if (ret)
4493			goto out;
4494	}
4495
4496	/* create encoder/connector objects based on VBIOS DCB table */
4497	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
4498		connector = nouveau_connector_create(dev, dcbe->connector);
4499		if (IS_ERR(connector))
4500			continue;
4501
4502		if (dcbe->location == DCB_LOC_ON_CHIP) {
4503			switch (dcbe->type) {
4504			case DCB_OUTPUT_TMDS:
4505			case DCB_OUTPUT_LVDS:
4506			case DCB_OUTPUT_DP:
4507				ret = nv50_sor_create(connector, dcbe);
4508				break;
4509			case DCB_OUTPUT_ANALOG:
4510				ret = nv50_dac_create(connector, dcbe);
4511				break;
4512			default:
4513				ret = -ENODEV;
4514				break;
4515			}
4516		} else {
4517			ret = nv50_pior_create(connector, dcbe);
4518		}
4519
4520		if (ret) {
4521			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
4522				     dcbe->location, dcbe->type,
4523				     ffs(dcbe->or) - 1, ret);
4524			ret = 0;
4525		}
4526	}
4527
4528	/* cull any connectors we created that don't have an encoder */
4529	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
4530		if (connector->encoder_ids[0])
4531			continue;
4532
4533		NV_WARN(drm, "%s has no encoders, removing\n",
4534			connector->name);
4535		connector->funcs->destroy(connector);
4536	}
4537
4538out:
4539	if (ret)
4540		nv50_display_destroy(dev);
4541	return ret;
4542}