Loading...
1/*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27#include <acpi/video.h>
28
29#include <drm/drm_atomic.h>
30#include <drm/drm_atomic_helper.h>
31#include <drm/drm_crtc_helper.h>
32#include <drm/drm_fb_helper.h>
33#include <drm/drm_fourcc.h>
34#include <drm/drm_probe_helper.h>
35#include <drm/drm_vblank.h>
36
37#include "nouveau_fbcon.h"
38#include "nouveau_crtc.h"
39#include "nouveau_gem.h"
40#include "nouveau_connector.h"
41#include "nv50_display.h"
42
43#include <nvif/class.h>
44#include <nvif/cl0046.h>
45#include <nvif/event.h>
46
47static int
48nouveau_display_vblank_handler(struct nvif_notify *notify)
49{
50 struct nouveau_crtc *nv_crtc =
51 container_of(notify, typeof(*nv_crtc), vblank);
52 drm_crtc_handle_vblank(&nv_crtc->base);
53 return NVIF_NOTIFY_KEEP;
54}
55
56int
57nouveau_display_vblank_enable(struct drm_device *dev, unsigned int pipe)
58{
59 struct drm_crtc *crtc;
60 struct nouveau_crtc *nv_crtc;
61
62 crtc = drm_crtc_from_index(dev, pipe);
63 if (!crtc)
64 return -EINVAL;
65
66 nv_crtc = nouveau_crtc(crtc);
67 nvif_notify_get(&nv_crtc->vblank);
68
69 return 0;
70}
71
72void
73nouveau_display_vblank_disable(struct drm_device *dev, unsigned int pipe)
74{
75 struct drm_crtc *crtc;
76 struct nouveau_crtc *nv_crtc;
77
78 crtc = drm_crtc_from_index(dev, pipe);
79 if (!crtc)
80 return;
81
82 nv_crtc = nouveau_crtc(crtc);
83 nvif_notify_put(&nv_crtc->vblank);
84}
85
86static inline int
87calc(int blanks, int blanke, int total, int line)
88{
89 if (blanke >= blanks) {
90 if (line >= blanks)
91 line -= total;
92 } else {
93 if (line >= blanks)
94 line -= total;
95 line -= blanke + 1;
96 }
97 return line;
98}
99
100static bool
101nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos,
102 ktime_t *stime, ktime_t *etime)
103{
104 struct {
105 struct nv04_disp_mthd_v0 base;
106 struct nv04_disp_scanoutpos_v0 scan;
107 } args = {
108 .base.method = NV04_DISP_SCANOUTPOS,
109 .base.head = nouveau_crtc(crtc)->index,
110 };
111 struct nouveau_display *disp = nouveau_display(crtc->dev);
112 struct drm_vblank_crtc *vblank = &crtc->dev->vblank[drm_crtc_index(crtc)];
113 int retry = 20;
114 bool ret = false;
115
116 do {
117 ret = nvif_mthd(&disp->disp.object, 0, &args, sizeof(args));
118 if (ret != 0)
119 return false;
120
121 if (args.scan.vline) {
122 ret = true;
123 break;
124 }
125
126 if (retry) ndelay(vblank->linedur_ns);
127 } while (retry--);
128
129 *hpos = args.scan.hline;
130 *vpos = calc(args.scan.vblanks, args.scan.vblanke,
131 args.scan.vtotal, args.scan.vline);
132 if (stime) *stime = ns_to_ktime(args.scan.time[0]);
133 if (etime) *etime = ns_to_ktime(args.scan.time[1]);
134
135 return ret;
136}
137
138bool
139nouveau_display_scanoutpos(struct drm_device *dev, unsigned int pipe,
140 bool in_vblank_irq, int *vpos, int *hpos,
141 ktime_t *stime, ktime_t *etime,
142 const struct drm_display_mode *mode)
143{
144 struct drm_crtc *crtc;
145
146 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
147 if (nouveau_crtc(crtc)->index == pipe) {
148 return nouveau_display_scanoutpos_head(crtc, vpos, hpos,
149 stime, etime);
150 }
151 }
152
153 return false;
154}
155
156static void
157nouveau_display_vblank_fini(struct drm_device *dev)
158{
159 struct drm_crtc *crtc;
160
161 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
162 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
163 nvif_notify_fini(&nv_crtc->vblank);
164 }
165}
166
167static int
168nouveau_display_vblank_init(struct drm_device *dev)
169{
170 struct nouveau_display *disp = nouveau_display(dev);
171 struct drm_crtc *crtc;
172 int ret;
173
174 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
175 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
176 ret = nvif_notify_init(&disp->disp.object,
177 nouveau_display_vblank_handler, false,
178 NV04_DISP_NTFY_VBLANK,
179 &(struct nvif_notify_head_req_v0) {
180 .head = nv_crtc->index,
181 },
182 sizeof(struct nvif_notify_head_req_v0),
183 sizeof(struct nvif_notify_head_rep_v0),
184 &nv_crtc->vblank);
185 if (ret) {
186 nouveau_display_vblank_fini(dev);
187 return ret;
188 }
189 }
190
191 ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
192 if (ret) {
193 nouveau_display_vblank_fini(dev);
194 return ret;
195 }
196
197 return 0;
198}
199
200static void
201nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb)
202{
203 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
204
205 if (fb->nvbo)
206 drm_gem_object_put_unlocked(&fb->nvbo->bo.base);
207
208 drm_framebuffer_cleanup(drm_fb);
209 kfree(fb);
210}
211
212static int
213nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb,
214 struct drm_file *file_priv,
215 unsigned int *handle)
216{
217 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
218
219 return drm_gem_handle_create(file_priv, &fb->nvbo->bo.base, handle);
220}
221
222static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
223 .destroy = nouveau_user_framebuffer_destroy,
224 .create_handle = nouveau_user_framebuffer_create_handle,
225};
226
227int
228nouveau_framebuffer_new(struct drm_device *dev,
229 const struct drm_mode_fb_cmd2 *mode_cmd,
230 struct nouveau_bo *nvbo,
231 struct nouveau_framebuffer **pfb)
232{
233 struct nouveau_drm *drm = nouveau_drm(dev);
234 struct nouveau_framebuffer *fb;
235 int ret;
236
237 /* YUV overlays have special requirements pre-NV50 */
238 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA &&
239
240 (mode_cmd->pixel_format == DRM_FORMAT_YUYV ||
241 mode_cmd->pixel_format == DRM_FORMAT_UYVY ||
242 mode_cmd->pixel_format == DRM_FORMAT_NV12 ||
243 mode_cmd->pixel_format == DRM_FORMAT_NV21) &&
244 (mode_cmd->pitches[0] & 0x3f || /* align 64 */
245 mode_cmd->pitches[0] >= 0x10000 || /* at most 64k pitch */
246 (mode_cmd->pitches[1] && /* pitches for planes must match */
247 mode_cmd->pitches[0] != mode_cmd->pitches[1]))) {
248 struct drm_format_name_buf format_name;
249 DRM_DEBUG_KMS("Unsuitable framebuffer: format: %s; pitches: 0x%x\n 0x%x\n",
250 drm_get_format_name(mode_cmd->pixel_format,
251 &format_name),
252 mode_cmd->pitches[0],
253 mode_cmd->pitches[1]);
254 return -EINVAL;
255 }
256
257 if (!(fb = *pfb = kzalloc(sizeof(*fb), GFP_KERNEL)))
258 return -ENOMEM;
259
260 drm_helper_mode_fill_fb_struct(dev, &fb->base, mode_cmd);
261 fb->nvbo = nvbo;
262
263 ret = drm_framebuffer_init(dev, &fb->base, &nouveau_framebuffer_funcs);
264 if (ret)
265 kfree(fb);
266 return ret;
267}
268
269struct drm_framebuffer *
270nouveau_user_framebuffer_create(struct drm_device *dev,
271 struct drm_file *file_priv,
272 const struct drm_mode_fb_cmd2 *mode_cmd)
273{
274 struct nouveau_framebuffer *fb;
275 struct nouveau_bo *nvbo;
276 struct drm_gem_object *gem;
277 int ret;
278
279 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
280 if (!gem)
281 return ERR_PTR(-ENOENT);
282 nvbo = nouveau_gem_object(gem);
283
284 ret = nouveau_framebuffer_new(dev, mode_cmd, nvbo, &fb);
285 if (ret == 0)
286 return &fb->base;
287
288 drm_gem_object_put_unlocked(gem);
289 return ERR_PTR(ret);
290}
291
292static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
293 .fb_create = nouveau_user_framebuffer_create,
294 .output_poll_changed = nouveau_fbcon_output_poll_changed,
295};
296
297
298struct nouveau_drm_prop_enum_list {
299 u8 gen_mask;
300 int type;
301 char *name;
302};
303
304static struct nouveau_drm_prop_enum_list underscan[] = {
305 { 6, UNDERSCAN_AUTO, "auto" },
306 { 6, UNDERSCAN_OFF, "off" },
307 { 6, UNDERSCAN_ON, "on" },
308 {}
309};
310
311static struct nouveau_drm_prop_enum_list dither_mode[] = {
312 { 7, DITHERING_MODE_AUTO, "auto" },
313 { 7, DITHERING_MODE_OFF, "off" },
314 { 1, DITHERING_MODE_ON, "on" },
315 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
316 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
317 { 4, DITHERING_MODE_TEMPORAL, "temporal" },
318 {}
319};
320
321static struct nouveau_drm_prop_enum_list dither_depth[] = {
322 { 6, DITHERING_DEPTH_AUTO, "auto" },
323 { 6, DITHERING_DEPTH_6BPC, "6 bpc" },
324 { 6, DITHERING_DEPTH_8BPC, "8 bpc" },
325 {}
326};
327
328#define PROP_ENUM(p,gen,n,list) do { \
329 struct nouveau_drm_prop_enum_list *l = (list); \
330 int c = 0; \
331 while (l->gen_mask) { \
332 if (l->gen_mask & (1 << (gen))) \
333 c++; \
334 l++; \
335 } \
336 if (c) { \
337 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
338 l = (list); \
339 while (p && l->gen_mask) { \
340 if (l->gen_mask & (1 << (gen))) { \
341 drm_property_add_enum(p, l->type, l->name); \
342 } \
343 l++; \
344 } \
345 } \
346} while(0)
347
348static void
349nouveau_display_hpd_work(struct work_struct *work)
350{
351 struct nouveau_drm *drm = container_of(work, typeof(*drm), hpd_work);
352
353 pm_runtime_get_sync(drm->dev->dev);
354
355 drm_helper_hpd_irq_event(drm->dev);
356
357 pm_runtime_mark_last_busy(drm->dev->dev);
358 pm_runtime_put_sync(drm->dev->dev);
359}
360
361#ifdef CONFIG_ACPI
362
363static int
364nouveau_display_acpi_ntfy(struct notifier_block *nb, unsigned long val,
365 void *data)
366{
367 struct nouveau_drm *drm = container_of(nb, typeof(*drm), acpi_nb);
368 struct acpi_bus_event *info = data;
369 int ret;
370
371 if (!strcmp(info->device_class, ACPI_VIDEO_CLASS)) {
372 if (info->type == ACPI_VIDEO_NOTIFY_PROBE) {
373 ret = pm_runtime_get(drm->dev->dev);
374 if (ret == 1 || ret == -EACCES) {
375 /* If the GPU is already awake, or in a state
376 * where we can't wake it up, it can handle
377 * it's own hotplug events.
378 */
379 pm_runtime_put_autosuspend(drm->dev->dev);
380 } else if (ret == 0) {
381 /* This may be the only indication we receive
382 * of a connector hotplug on a runtime
383 * suspended GPU, schedule hpd_work to check.
384 */
385 NV_DEBUG(drm, "ACPI requested connector reprobe\n");
386 schedule_work(&drm->hpd_work);
387 pm_runtime_put_noidle(drm->dev->dev);
388 } else {
389 NV_WARN(drm, "Dropped ACPI reprobe event due to RPM error: %d\n",
390 ret);
391 }
392
393 /* acpi-video should not generate keypresses for this */
394 return NOTIFY_BAD;
395 }
396 }
397
398 return NOTIFY_DONE;
399}
400#endif
401
402int
403nouveau_display_init(struct drm_device *dev, bool resume, bool runtime)
404{
405 struct nouveau_display *disp = nouveau_display(dev);
406 struct drm_connector *connector;
407 struct drm_connector_list_iter conn_iter;
408 int ret;
409
410 ret = disp->init(dev, resume, runtime);
411 if (ret)
412 return ret;
413
414 /* enable connector detection and polling for connectors without HPD
415 * support
416 */
417 drm_kms_helper_poll_enable(dev);
418
419 /* enable hotplug interrupts */
420 drm_connector_list_iter_begin(dev, &conn_iter);
421 nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
422 struct nouveau_connector *conn = nouveau_connector(connector);
423 nvif_notify_get(&conn->hpd);
424 }
425 drm_connector_list_iter_end(&conn_iter);
426
427 return ret;
428}
429
430void
431nouveau_display_fini(struct drm_device *dev, bool suspend, bool runtime)
432{
433 struct nouveau_display *disp = nouveau_display(dev);
434 struct nouveau_drm *drm = nouveau_drm(dev);
435 struct drm_connector *connector;
436 struct drm_connector_list_iter conn_iter;
437
438 if (!suspend) {
439 if (drm_drv_uses_atomic_modeset(dev))
440 drm_atomic_helper_shutdown(dev);
441 else
442 drm_helper_force_disable_all(dev);
443 }
444
445 /* disable hotplug interrupts */
446 drm_connector_list_iter_begin(dev, &conn_iter);
447 nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
448 struct nouveau_connector *conn = nouveau_connector(connector);
449 nvif_notify_put(&conn->hpd);
450 }
451 drm_connector_list_iter_end(&conn_iter);
452
453 if (!runtime)
454 cancel_work_sync(&drm->hpd_work);
455
456 drm_kms_helper_poll_disable(dev);
457 disp->fini(dev, suspend);
458}
459
460static void
461nouveau_display_create_properties(struct drm_device *dev)
462{
463 struct nouveau_display *disp = nouveau_display(dev);
464 int gen;
465
466 if (disp->disp.object.oclass < NV50_DISP)
467 gen = 0;
468 else
469 if (disp->disp.object.oclass < GF110_DISP)
470 gen = 1;
471 else
472 gen = 2;
473
474 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
475 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
476 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
477
478 disp->underscan_hborder_property =
479 drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
480
481 disp->underscan_vborder_property =
482 drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
483
484 if (gen < 1)
485 return;
486
487 /* -90..+90 */
488 disp->vibrant_hue_property =
489 drm_property_create_range(dev, 0, "vibrant hue", 0, 180);
490
491 /* -100..+100 */
492 disp->color_vibrance_property =
493 drm_property_create_range(dev, 0, "color vibrance", 0, 200);
494}
495
496int
497nouveau_display_create(struct drm_device *dev)
498{
499 struct nouveau_drm *drm = nouveau_drm(dev);
500 struct nvkm_device *device = nvxx_device(&drm->client.device);
501 struct nouveau_display *disp;
502 int ret;
503
504 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL);
505 if (!disp)
506 return -ENOMEM;
507
508 drm_mode_config_init(dev);
509 drm_mode_create_scaling_mode_property(dev);
510 drm_mode_create_dvi_i_properties(dev);
511
512 dev->mode_config.funcs = &nouveau_mode_config_funcs;
513 dev->mode_config.fb_base = device->func->resource_addr(device, 1);
514
515 dev->mode_config.min_width = 0;
516 dev->mode_config.min_height = 0;
517 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_CELSIUS) {
518 dev->mode_config.max_width = 2048;
519 dev->mode_config.max_height = 2048;
520 } else
521 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) {
522 dev->mode_config.max_width = 4096;
523 dev->mode_config.max_height = 4096;
524 } else
525 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI) {
526 dev->mode_config.max_width = 8192;
527 dev->mode_config.max_height = 8192;
528 } else {
529 dev->mode_config.max_width = 16384;
530 dev->mode_config.max_height = 16384;
531 }
532
533 dev->mode_config.preferred_depth = 24;
534 dev->mode_config.prefer_shadow = 1;
535
536 if (drm->client.device.info.chipset < 0x11)
537 dev->mode_config.async_page_flip = false;
538 else
539 dev->mode_config.async_page_flip = true;
540
541 drm_kms_helper_poll_init(dev);
542 drm_kms_helper_poll_disable(dev);
543
544 if (nouveau_modeset != 2 && drm->vbios.dcb.entries) {
545 ret = nvif_disp_ctor(&drm->client.device, 0, &disp->disp);
546 if (ret == 0) {
547 nouveau_display_create_properties(dev);
548 if (disp->disp.object.oclass < NV50_DISP)
549 ret = nv04_display_create(dev);
550 else
551 ret = nv50_display_create(dev);
552 }
553 } else {
554 ret = 0;
555 }
556
557 if (ret)
558 goto disp_create_err;
559
560 drm_mode_config_reset(dev);
561
562 if (dev->mode_config.num_crtc) {
563 ret = nouveau_display_vblank_init(dev);
564 if (ret)
565 goto vblank_err;
566 }
567
568 INIT_WORK(&drm->hpd_work, nouveau_display_hpd_work);
569#ifdef CONFIG_ACPI
570 drm->acpi_nb.notifier_call = nouveau_display_acpi_ntfy;
571 register_acpi_notifier(&drm->acpi_nb);
572#endif
573
574 return 0;
575
576vblank_err:
577 disp->dtor(dev);
578disp_create_err:
579 drm_kms_helper_poll_fini(dev);
580 drm_mode_config_cleanup(dev);
581 return ret;
582}
583
584void
585nouveau_display_destroy(struct drm_device *dev)
586{
587 struct nouveau_display *disp = nouveau_display(dev);
588
589#ifdef CONFIG_ACPI
590 unregister_acpi_notifier(&nouveau_drm(dev)->acpi_nb);
591#endif
592 nouveau_display_vblank_fini(dev);
593
594 drm_kms_helper_poll_fini(dev);
595 drm_mode_config_cleanup(dev);
596
597 if (disp->dtor)
598 disp->dtor(dev);
599
600 nvif_disp_dtor(&disp->disp);
601
602 nouveau_drm(dev)->display = NULL;
603 kfree(disp);
604}
605
606int
607nouveau_display_suspend(struct drm_device *dev, bool runtime)
608{
609 struct nouveau_display *disp = nouveau_display(dev);
610
611 if (drm_drv_uses_atomic_modeset(dev)) {
612 if (!runtime) {
613 disp->suspend = drm_atomic_helper_suspend(dev);
614 if (IS_ERR(disp->suspend)) {
615 int ret = PTR_ERR(disp->suspend);
616 disp->suspend = NULL;
617 return ret;
618 }
619 }
620 }
621
622 nouveau_display_fini(dev, true, runtime);
623 return 0;
624}
625
626void
627nouveau_display_resume(struct drm_device *dev, bool runtime)
628{
629 struct nouveau_display *disp = nouveau_display(dev);
630
631 nouveau_display_init(dev, true, runtime);
632
633 if (drm_drv_uses_atomic_modeset(dev)) {
634 if (disp->suspend) {
635 drm_atomic_helper_resume(dev, disp->suspend);
636 disp->suspend = NULL;
637 }
638 return;
639 }
640}
641
642int
643nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
644 struct drm_mode_create_dumb *args)
645{
646 struct nouveau_cli *cli = nouveau_cli(file_priv);
647 struct nouveau_bo *bo;
648 uint32_t domain;
649 int ret;
650
651 args->pitch = roundup(args->width * (args->bpp / 8), 256);
652 args->size = args->pitch * args->height;
653 args->size = roundup(args->size, PAGE_SIZE);
654
655 /* Use VRAM if there is any ; otherwise fallback to system memory */
656 if (nouveau_drm(dev)->client.device.info.ram_size != 0)
657 domain = NOUVEAU_GEM_DOMAIN_VRAM;
658 else
659 domain = NOUVEAU_GEM_DOMAIN_GART;
660
661 ret = nouveau_gem_new(cli, args->size, 0, domain, 0, 0, &bo);
662 if (ret)
663 return ret;
664
665 ret = drm_gem_handle_create(file_priv, &bo->bo.base, &args->handle);
666 drm_gem_object_put_unlocked(&bo->bo.base);
667 return ret;
668}
669
670int
671nouveau_display_dumb_map_offset(struct drm_file *file_priv,
672 struct drm_device *dev,
673 uint32_t handle, uint64_t *poffset)
674{
675 struct drm_gem_object *gem;
676
677 gem = drm_gem_object_lookup(file_priv, handle);
678 if (gem) {
679 struct nouveau_bo *bo = nouveau_gem_object(gem);
680 *poffset = drm_vma_node_offset_addr(&bo->bo.base.vma_node);
681 drm_gem_object_put_unlocked(gem);
682 return 0;
683 }
684
685 return -ENOENT;
686}
1/*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27#include <drm/drmP.h>
28#include <drm/drm_crtc_helper.h>
29
30#include "nouveau_fbcon.h"
31#include "dispnv04/hw.h"
32#include "nouveau_crtc.h"
33#include "nouveau_dma.h"
34#include "nouveau_gem.h"
35#include "nouveau_connector.h"
36#include "nv50_display.h"
37
38#include "nouveau_fence.h"
39
40#include <engine/disp.h>
41
42#include <core/class.h>
43
44static int
45nouveau_display_vblank_handler(void *data, int head)
46{
47 struct nouveau_drm *drm = data;
48 drm_handle_vblank(drm->dev, head);
49 return NVKM_EVENT_KEEP;
50}
51
52int
53nouveau_display_vblank_enable(struct drm_device *dev, int head)
54{
55 struct nouveau_display *disp = nouveau_display(dev);
56 if (disp) {
57 nouveau_event_get(disp->vblank[head]);
58 return 0;
59 }
60 return -EIO;
61}
62
63void
64nouveau_display_vblank_disable(struct drm_device *dev, int head)
65{
66 struct nouveau_display *disp = nouveau_display(dev);
67 if (disp)
68 nouveau_event_put(disp->vblank[head]);
69}
70
71static inline int
72calc(int blanks, int blanke, int total, int line)
73{
74 if (blanke >= blanks) {
75 if (line >= blanks)
76 line -= total;
77 } else {
78 if (line >= blanks)
79 line -= total;
80 line -= blanke + 1;
81 }
82 return line;
83}
84
85int
86nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos,
87 ktime_t *stime, ktime_t *etime)
88{
89 const u32 mthd = NV04_DISP_SCANOUTPOS + nouveau_crtc(crtc)->index;
90 struct nouveau_display *disp = nouveau_display(crtc->dev);
91 struct nv04_display_scanoutpos args;
92 int ret, retry = 1;
93
94 do {
95 ret = nv_exec(disp->core, mthd, &args, sizeof(args));
96 if (ret != 0)
97 return 0;
98
99 if (args.vline) {
100 ret |= DRM_SCANOUTPOS_ACCURATE;
101 ret |= DRM_SCANOUTPOS_VALID;
102 break;
103 }
104
105 if (retry) ndelay(crtc->linedur_ns);
106 } while (retry--);
107
108 *hpos = args.hline;
109 *vpos = calc(args.vblanks, args.vblanke, args.vtotal, args.vline);
110 if (stime) *stime = ns_to_ktime(args.time[0]);
111 if (etime) *etime = ns_to_ktime(args.time[1]);
112
113 if (*vpos < 0)
114 ret |= DRM_SCANOUTPOS_INVBL;
115 return ret;
116}
117
118int
119nouveau_display_scanoutpos(struct drm_device *dev, int head, unsigned int flags,
120 int *vpos, int *hpos, ktime_t *stime, ktime_t *etime)
121{
122 struct drm_crtc *crtc;
123
124 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
125 if (nouveau_crtc(crtc)->index == head) {
126 return nouveau_display_scanoutpos_head(crtc, vpos, hpos,
127 stime, etime);
128 }
129 }
130
131 return 0;
132}
133
134int
135nouveau_display_vblstamp(struct drm_device *dev, int head, int *max_error,
136 struct timeval *time, unsigned flags)
137{
138 struct drm_crtc *crtc;
139
140 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
141 if (nouveau_crtc(crtc)->index == head) {
142 return drm_calc_vbltimestamp_from_scanoutpos(dev,
143 head, max_error, time, flags, crtc,
144 &crtc->hwmode);
145 }
146 }
147
148 return -EINVAL;
149}
150
151static void
152nouveau_display_vblank_fini(struct drm_device *dev)
153{
154 struct nouveau_display *disp = nouveau_display(dev);
155 int i;
156
157 drm_vblank_cleanup(dev);
158
159 if (disp->vblank) {
160 for (i = 0; i < dev->mode_config.num_crtc; i++)
161 nouveau_event_ref(NULL, &disp->vblank[i]);
162 kfree(disp->vblank);
163 disp->vblank = NULL;
164 }
165}
166
167static int
168nouveau_display_vblank_init(struct drm_device *dev)
169{
170 struct nouveau_display *disp = nouveau_display(dev);
171 struct nouveau_drm *drm = nouveau_drm(dev);
172 struct nouveau_disp *pdisp = nouveau_disp(drm->device);
173 int ret, i;
174
175 disp->vblank = kzalloc(dev->mode_config.num_crtc *
176 sizeof(*disp->vblank), GFP_KERNEL);
177 if (!disp->vblank)
178 return -ENOMEM;
179
180 for (i = 0; i < dev->mode_config.num_crtc; i++) {
181 ret = nouveau_event_new(pdisp->vblank, i,
182 nouveau_display_vblank_handler,
183 drm, &disp->vblank[i]);
184 if (ret) {
185 nouveau_display_vblank_fini(dev);
186 return ret;
187 }
188 }
189
190 ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
191 if (ret) {
192 nouveau_display_vblank_fini(dev);
193 return ret;
194 }
195
196 return 0;
197}
198
199static void
200nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb)
201{
202 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
203
204 if (fb->nvbo)
205 drm_gem_object_unreference_unlocked(&fb->nvbo->gem);
206
207 drm_framebuffer_cleanup(drm_fb);
208 kfree(fb);
209}
210
211static int
212nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb,
213 struct drm_file *file_priv,
214 unsigned int *handle)
215{
216 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
217
218 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle);
219}
220
221static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
222 .destroy = nouveau_user_framebuffer_destroy,
223 .create_handle = nouveau_user_framebuffer_create_handle,
224};
225
226int
227nouveau_framebuffer_init(struct drm_device *dev,
228 struct nouveau_framebuffer *nv_fb,
229 struct drm_mode_fb_cmd2 *mode_cmd,
230 struct nouveau_bo *nvbo)
231{
232 struct nouveau_drm *drm = nouveau_drm(dev);
233 struct drm_framebuffer *fb = &nv_fb->base;
234 int ret;
235
236 drm_helper_mode_fill_fb_struct(fb, mode_cmd);
237 nv_fb->nvbo = nvbo;
238
239 if (nv_device(drm->device)->card_type >= NV_50) {
240 u32 tile_flags = nouveau_bo_tile_layout(nvbo);
241 if (tile_flags == 0x7a00 ||
242 tile_flags == 0xfe00)
243 nv_fb->r_dma = NvEvoFB32;
244 else
245 if (tile_flags == 0x7000)
246 nv_fb->r_dma = NvEvoFB16;
247 else
248 nv_fb->r_dma = NvEvoVRAM_LP;
249
250 switch (fb->depth) {
251 case 8: nv_fb->r_format = 0x1e00; break;
252 case 15: nv_fb->r_format = 0xe900; break;
253 case 16: nv_fb->r_format = 0xe800; break;
254 case 24:
255 case 32: nv_fb->r_format = 0xcf00; break;
256 case 30: nv_fb->r_format = 0xd100; break;
257 default:
258 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
259 return -EINVAL;
260 }
261
262 if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) {
263 NV_ERROR(drm, "framebuffer requires contiguous bo\n");
264 return -EINVAL;
265 }
266
267 if (nv_device(drm->device)->chipset == 0x50)
268 nv_fb->r_format |= (tile_flags << 8);
269
270 if (!tile_flags) {
271 if (nv_device(drm->device)->card_type < NV_D0)
272 nv_fb->r_pitch = 0x00100000 | fb->pitches[0];
273 else
274 nv_fb->r_pitch = 0x01000000 | fb->pitches[0];
275 } else {
276 u32 mode = nvbo->tile_mode;
277 if (nv_device(drm->device)->card_type >= NV_C0)
278 mode >>= 4;
279 nv_fb->r_pitch = ((fb->pitches[0] / 4) << 4) | mode;
280 }
281 }
282
283 ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
284 if (ret) {
285 return ret;
286 }
287
288 return 0;
289}
290
291static struct drm_framebuffer *
292nouveau_user_framebuffer_create(struct drm_device *dev,
293 struct drm_file *file_priv,
294 struct drm_mode_fb_cmd2 *mode_cmd)
295{
296 struct nouveau_framebuffer *nouveau_fb;
297 struct drm_gem_object *gem;
298 int ret = -ENOMEM;
299
300 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
301 if (!gem)
302 return ERR_PTR(-ENOENT);
303
304 nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL);
305 if (!nouveau_fb)
306 goto err_unref;
307
308 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem));
309 if (ret)
310 goto err;
311
312 return &nouveau_fb->base;
313
314err:
315 kfree(nouveau_fb);
316err_unref:
317 drm_gem_object_unreference(gem);
318 return ERR_PTR(ret);
319}
320
321static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
322 .fb_create = nouveau_user_framebuffer_create,
323 .output_poll_changed = nouveau_fbcon_output_poll_changed,
324};
325
326
327struct nouveau_drm_prop_enum_list {
328 u8 gen_mask;
329 int type;
330 char *name;
331};
332
333static struct nouveau_drm_prop_enum_list underscan[] = {
334 { 6, UNDERSCAN_AUTO, "auto" },
335 { 6, UNDERSCAN_OFF, "off" },
336 { 6, UNDERSCAN_ON, "on" },
337 {}
338};
339
340static struct nouveau_drm_prop_enum_list dither_mode[] = {
341 { 7, DITHERING_MODE_AUTO, "auto" },
342 { 7, DITHERING_MODE_OFF, "off" },
343 { 1, DITHERING_MODE_ON, "on" },
344 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
345 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
346 { 4, DITHERING_MODE_TEMPORAL, "temporal" },
347 {}
348};
349
350static struct nouveau_drm_prop_enum_list dither_depth[] = {
351 { 6, DITHERING_DEPTH_AUTO, "auto" },
352 { 6, DITHERING_DEPTH_6BPC, "6 bpc" },
353 { 6, DITHERING_DEPTH_8BPC, "8 bpc" },
354 {}
355};
356
357#define PROP_ENUM(p,gen,n,list) do { \
358 struct nouveau_drm_prop_enum_list *l = (list); \
359 int c = 0; \
360 while (l->gen_mask) { \
361 if (l->gen_mask & (1 << (gen))) \
362 c++; \
363 l++; \
364 } \
365 if (c) { \
366 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
367 l = (list); \
368 c = 0; \
369 while (p && l->gen_mask) { \
370 if (l->gen_mask & (1 << (gen))) { \
371 drm_property_add_enum(p, c, l->type, l->name); \
372 c++; \
373 } \
374 l++; \
375 } \
376 } \
377} while(0)
378
379int
380nouveau_display_init(struct drm_device *dev)
381{
382 struct nouveau_display *disp = nouveau_display(dev);
383 struct drm_connector *connector;
384 int ret;
385
386 ret = disp->init(dev);
387 if (ret)
388 return ret;
389
390 /* enable polling for external displays */
391 drm_kms_helper_poll_enable(dev);
392
393 /* enable hotplug interrupts */
394 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
395 struct nouveau_connector *conn = nouveau_connector(connector);
396 if (conn->hpd_func) nouveau_event_get(conn->hpd_func);
397 }
398
399 return ret;
400}
401
402void
403nouveau_display_fini(struct drm_device *dev)
404{
405 struct nouveau_display *disp = nouveau_display(dev);
406 struct drm_connector *connector;
407
408 /* disable hotplug interrupts */
409 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
410 struct nouveau_connector *conn = nouveau_connector(connector);
411 if (conn->hpd_func) nouveau_event_put(conn->hpd_func);
412 }
413
414 drm_kms_helper_poll_disable(dev);
415 disp->fini(dev);
416}
417
418int
419nouveau_display_create(struct drm_device *dev)
420{
421 struct nouveau_drm *drm = nouveau_drm(dev);
422 struct nouveau_device *device = nouveau_dev(dev);
423 struct nouveau_display *disp;
424 int ret, gen;
425
426 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL);
427 if (!disp)
428 return -ENOMEM;
429
430 drm_mode_config_init(dev);
431 drm_mode_create_scaling_mode_property(dev);
432 drm_mode_create_dvi_i_properties(dev);
433
434 if (nv_device(drm->device)->card_type < NV_50)
435 gen = 0;
436 else
437 if (nv_device(drm->device)->card_type < NV_D0)
438 gen = 1;
439 else
440 gen = 2;
441
442 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
443 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
444 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
445
446 disp->underscan_hborder_property =
447 drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
448
449 disp->underscan_vborder_property =
450 drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
451
452 if (gen >= 1) {
453 /* -90..+90 */
454 disp->vibrant_hue_property =
455 drm_property_create_range(dev, 0, "vibrant hue", 0, 180);
456
457 /* -100..+100 */
458 disp->color_vibrance_property =
459 drm_property_create_range(dev, 0, "color vibrance", 0, 200);
460 }
461
462 dev->mode_config.funcs = &nouveau_mode_config_funcs;
463 dev->mode_config.fb_base = nv_device_resource_start(device, 1);
464
465 dev->mode_config.min_width = 0;
466 dev->mode_config.min_height = 0;
467 if (nv_device(drm->device)->card_type < NV_10) {
468 dev->mode_config.max_width = 2048;
469 dev->mode_config.max_height = 2048;
470 } else
471 if (nv_device(drm->device)->card_type < NV_50) {
472 dev->mode_config.max_width = 4096;
473 dev->mode_config.max_height = 4096;
474 } else {
475 dev->mode_config.max_width = 8192;
476 dev->mode_config.max_height = 8192;
477 }
478
479 dev->mode_config.preferred_depth = 24;
480 dev->mode_config.prefer_shadow = 1;
481
482 if (nv_device(drm->device)->chipset < 0x11)
483 dev->mode_config.async_page_flip = false;
484 else
485 dev->mode_config.async_page_flip = true;
486
487 drm_kms_helper_poll_init(dev);
488 drm_kms_helper_poll_disable(dev);
489
490 if (drm->vbios.dcb.entries) {
491 static const u16 oclass[] = {
492 GM107_DISP_CLASS,
493 NVF0_DISP_CLASS,
494 NVE0_DISP_CLASS,
495 NVD0_DISP_CLASS,
496 NVA3_DISP_CLASS,
497 NV94_DISP_CLASS,
498 NVA0_DISP_CLASS,
499 NV84_DISP_CLASS,
500 NV50_DISP_CLASS,
501 NV04_DISP_CLASS,
502 };
503 int i;
504
505 for (i = 0, ret = -ENODEV; ret && i < ARRAY_SIZE(oclass); i++) {
506 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
507 NVDRM_DISPLAY, oclass[i],
508 NULL, 0, &disp->core);
509 }
510
511 if (ret == 0) {
512 if (nv_mclass(disp->core) < NV50_DISP_CLASS)
513 ret = nv04_display_create(dev);
514 else
515 ret = nv50_display_create(dev);
516 }
517 } else {
518 ret = 0;
519 }
520
521 if (ret)
522 goto disp_create_err;
523
524 if (dev->mode_config.num_crtc) {
525 ret = nouveau_display_vblank_init(dev);
526 if (ret)
527 goto vblank_err;
528 }
529
530 nouveau_backlight_init(dev);
531 return 0;
532
533vblank_err:
534 disp->dtor(dev);
535disp_create_err:
536 drm_kms_helper_poll_fini(dev);
537 drm_mode_config_cleanup(dev);
538 return ret;
539}
540
541void
542nouveau_display_destroy(struct drm_device *dev)
543{
544 struct nouveau_display *disp = nouveau_display(dev);
545 struct nouveau_drm *drm = nouveau_drm(dev);
546
547 nouveau_backlight_exit(dev);
548 nouveau_display_vblank_fini(dev);
549
550 drm_kms_helper_poll_fini(dev);
551 drm_mode_config_cleanup(dev);
552
553 if (disp->dtor)
554 disp->dtor(dev);
555
556 nouveau_object_del(nv_object(drm), NVDRM_DEVICE, NVDRM_DISPLAY);
557
558 nouveau_drm(dev)->display = NULL;
559 kfree(disp);
560}
561
562int
563nouveau_display_suspend(struct drm_device *dev)
564{
565 struct nouveau_drm *drm = nouveau_drm(dev);
566 struct drm_crtc *crtc;
567
568 nouveau_display_fini(dev);
569
570 NV_INFO(drm, "unpinning framebuffer(s)...\n");
571 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
572 struct nouveau_framebuffer *nouveau_fb;
573
574 nouveau_fb = nouveau_framebuffer(crtc->primary->fb);
575 if (!nouveau_fb || !nouveau_fb->nvbo)
576 continue;
577
578 nouveau_bo_unpin(nouveau_fb->nvbo);
579 }
580
581 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
582 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
583
584 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
585 nouveau_bo_unpin(nv_crtc->cursor.nvbo);
586 }
587
588 return 0;
589}
590
591void
592nouveau_display_repin(struct drm_device *dev)
593{
594 struct nouveau_drm *drm = nouveau_drm(dev);
595 struct drm_crtc *crtc;
596 int ret;
597
598 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
599 struct nouveau_framebuffer *nouveau_fb;
600
601 nouveau_fb = nouveau_framebuffer(crtc->primary->fb);
602 if (!nouveau_fb || !nouveau_fb->nvbo)
603 continue;
604
605 nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM);
606 }
607
608 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
609 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
610
611 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
612 if (!ret)
613 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
614 if (ret)
615 NV_ERROR(drm, "Could not pin/map cursor.\n");
616 }
617}
618
619void
620nouveau_display_resume(struct drm_device *dev)
621{
622 struct drm_crtc *crtc;
623 nouveau_display_init(dev);
624
625 /* Force CLUT to get re-loaded during modeset */
626 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
627 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
628
629 nv_crtc->lut.depth = 0;
630 }
631
632 drm_helper_resume_force_mode(dev);
633
634 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
635 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
636 u32 offset = nv_crtc->cursor.nvbo->bo.offset;
637
638 nv_crtc->cursor.set_offset(nv_crtc, offset);
639 nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x,
640 nv_crtc->cursor_saved_y);
641 }
642}
643
644static int
645nouveau_page_flip_emit(struct nouveau_channel *chan,
646 struct nouveau_bo *old_bo,
647 struct nouveau_bo *new_bo,
648 struct nouveau_page_flip_state *s,
649 struct nouveau_fence **pfence)
650{
651 struct nouveau_fence_chan *fctx = chan->fence;
652 struct nouveau_drm *drm = chan->drm;
653 struct drm_device *dev = drm->dev;
654 unsigned long flags;
655 int ret;
656
657 /* Queue it to the pending list */
658 spin_lock_irqsave(&dev->event_lock, flags);
659 list_add_tail(&s->head, &fctx->flip);
660 spin_unlock_irqrestore(&dev->event_lock, flags);
661
662 /* Synchronize with the old framebuffer */
663 ret = nouveau_fence_sync(old_bo->bo.sync_obj, chan);
664 if (ret)
665 goto fail;
666
667 /* Emit the pageflip */
668 ret = RING_SPACE(chan, 2);
669 if (ret)
670 goto fail;
671
672 if (nv_device(drm->device)->card_type < NV_C0)
673 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1);
674 else
675 BEGIN_NVC0(chan, FermiSw, NV_SW_PAGE_FLIP, 1);
676 OUT_RING (chan, 0x00000000);
677 FIRE_RING (chan);
678
679 ret = nouveau_fence_new(chan, false, pfence);
680 if (ret)
681 goto fail;
682
683 return 0;
684fail:
685 spin_lock_irqsave(&dev->event_lock, flags);
686 list_del(&s->head);
687 spin_unlock_irqrestore(&dev->event_lock, flags);
688 return ret;
689}
690
691int
692nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
693 struct drm_pending_vblank_event *event, u32 flags)
694{
695 const int swap_interval = (flags & DRM_MODE_PAGE_FLIP_ASYNC) ? 0 : 1;
696 struct drm_device *dev = crtc->dev;
697 struct nouveau_drm *drm = nouveau_drm(dev);
698 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->primary->fb)->nvbo;
699 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo;
700 struct nouveau_page_flip_state *s;
701 struct nouveau_channel *chan = drm->channel;
702 struct nouveau_fence *fence;
703 int ret;
704
705 if (!drm->channel)
706 return -ENODEV;
707
708 s = kzalloc(sizeof(*s), GFP_KERNEL);
709 if (!s)
710 return -ENOMEM;
711
712 if (new_bo != old_bo) {
713 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM);
714 if (ret)
715 goto fail_free;
716 }
717
718 mutex_lock(&chan->cli->mutex);
719
720 /* synchronise rendering channel with the kernel's channel */
721 spin_lock(&new_bo->bo.bdev->fence_lock);
722 fence = nouveau_fence_ref(new_bo->bo.sync_obj);
723 spin_unlock(&new_bo->bo.bdev->fence_lock);
724 ret = nouveau_fence_sync(fence, chan);
725 nouveau_fence_unref(&fence);
726 if (ret)
727 goto fail_unpin;
728
729 ret = ttm_bo_reserve(&old_bo->bo, true, false, false, NULL);
730 if (ret)
731 goto fail_unpin;
732
733 /* Initialize a page flip struct */
734 *s = (struct nouveau_page_flip_state)
735 { { }, event, nouveau_crtc(crtc)->index,
736 fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y,
737 new_bo->bo.offset };
738
739 /* Emit a page flip */
740 if (nv_device(drm->device)->card_type >= NV_50) {
741 ret = nv50_display_flip_next(crtc, fb, chan, swap_interval);
742 if (ret)
743 goto fail_unreserve;
744 } else {
745 struct nv04_display *dispnv04 = nv04_display(dev);
746 int head = nouveau_crtc(crtc)->index;
747
748 if (swap_interval) {
749 ret = RING_SPACE(chan, 8);
750 if (ret)
751 goto fail_unreserve;
752
753 BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1);
754 OUT_RING (chan, 0);
755 BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1);
756 OUT_RING (chan, head);
757 BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1);
758 OUT_RING (chan, 0);
759 BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1);
760 OUT_RING (chan, 0);
761 }
762
763 nouveau_bo_ref(new_bo, &dispnv04->image[head]);
764 }
765
766 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence);
767 if (ret)
768 goto fail_unreserve;
769 mutex_unlock(&chan->cli->mutex);
770
771 /* Update the crtc struct and cleanup */
772 crtc->primary->fb = fb;
773
774 nouveau_bo_fence(old_bo, fence);
775 ttm_bo_unreserve(&old_bo->bo);
776 if (old_bo != new_bo)
777 nouveau_bo_unpin(old_bo);
778 nouveau_fence_unref(&fence);
779 return 0;
780
781fail_unreserve:
782 ttm_bo_unreserve(&old_bo->bo);
783fail_unpin:
784 mutex_unlock(&chan->cli->mutex);
785 if (old_bo != new_bo)
786 nouveau_bo_unpin(new_bo);
787fail_free:
788 kfree(s);
789 return ret;
790}
791
792int
793nouveau_finish_page_flip(struct nouveau_channel *chan,
794 struct nouveau_page_flip_state *ps)
795{
796 struct nouveau_fence_chan *fctx = chan->fence;
797 struct nouveau_drm *drm = chan->drm;
798 struct drm_device *dev = drm->dev;
799 struct nouveau_page_flip_state *s;
800 unsigned long flags;
801
802 spin_lock_irqsave(&dev->event_lock, flags);
803
804 if (list_empty(&fctx->flip)) {
805 NV_ERROR(drm, "unexpected pageflip\n");
806 spin_unlock_irqrestore(&dev->event_lock, flags);
807 return -EINVAL;
808 }
809
810 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head);
811 if (s->event)
812 drm_send_vblank_event(dev, s->crtc, s->event);
813
814 list_del(&s->head);
815 if (ps)
816 *ps = *s;
817 kfree(s);
818
819 spin_unlock_irqrestore(&dev->event_lock, flags);
820 return 0;
821}
822
823int
824nouveau_flip_complete(void *data)
825{
826 struct nouveau_channel *chan = data;
827 struct nouveau_drm *drm = chan->drm;
828 struct nouveau_page_flip_state state;
829
830 if (!nouveau_finish_page_flip(chan, &state)) {
831 if (nv_device(drm->device)->card_type < NV_50) {
832 nv_set_crtc_base(drm->dev, state.crtc, state.offset +
833 state.y * state.pitch +
834 state.x * state.bpp / 8);
835 }
836 }
837
838 return 0;
839}
840
841int
842nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
843 struct drm_mode_create_dumb *args)
844{
845 struct nouveau_bo *bo;
846 int ret;
847
848 args->pitch = roundup(args->width * (args->bpp / 8), 256);
849 args->size = args->pitch * args->height;
850 args->size = roundup(args->size, PAGE_SIZE);
851
852 ret = nouveau_gem_new(dev, args->size, 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0, &bo);
853 if (ret)
854 return ret;
855
856 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle);
857 drm_gem_object_unreference_unlocked(&bo->gem);
858 return ret;
859}
860
861int
862nouveau_display_dumb_map_offset(struct drm_file *file_priv,
863 struct drm_device *dev,
864 uint32_t handle, uint64_t *poffset)
865{
866 struct drm_gem_object *gem;
867
868 gem = drm_gem_object_lookup(dev, file_priv, handle);
869 if (gem) {
870 struct nouveau_bo *bo = nouveau_gem_object(gem);
871 *poffset = drm_vma_node_offset_addr(&bo->bo.vma_node);
872 drm_gem_object_unreference_unlocked(gem);
873 return 0;
874 }
875
876 return -ENOENT;
877}