Loading...
1/*
2 * Copyright 2014 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23
24#include <drm/drm_vblank.h>
25
26#include "amdgpu.h"
27#include "amdgpu_pm.h"
28#include "amdgpu_i2c.h"
29#include "atom.h"
30#include "amdgpu_pll.h"
31#include "amdgpu_connectors.h"
32#ifdef CONFIG_DRM_AMDGPU_SI
33#include "dce_v6_0.h"
34#endif
35#ifdef CONFIG_DRM_AMDGPU_CIK
36#include "dce_v8_0.h"
37#endif
38#include "dce_v10_0.h"
39#include "dce_v11_0.h"
40#include "dce_virtual.h"
41#include "ivsrcid/ivsrcid_vislands30.h"
42
43#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
44
45
46static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
47static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
48static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
49 int index);
50static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
51 int crtc,
52 enum amdgpu_interrupt_state state);
53
54static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
55{
56 return 0;
57}
58
59static void dce_virtual_page_flip(struct amdgpu_device *adev,
60 int crtc_id, u64 crtc_base, bool async)
61{
62 return;
63}
64
65static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
66 u32 *vbl, u32 *position)
67{
68 *vbl = 0;
69 *position = 0;
70
71 return -EINVAL;
72}
73
74static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
75 enum amdgpu_hpd_id hpd)
76{
77 return true;
78}
79
80static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
81 enum amdgpu_hpd_id hpd)
82{
83 return;
84}
85
86static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
87{
88 return 0;
89}
90
91/**
92 * dce_virtual_bandwidth_update - program display watermarks
93 *
94 * @adev: amdgpu_device pointer
95 *
96 * Calculate and program the display watermarks and line
97 * buffer allocation (CIK).
98 */
99static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
100{
101 return;
102}
103
104static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
105 u16 *green, u16 *blue, uint32_t size,
106 struct drm_modeset_acquire_ctx *ctx)
107{
108 return 0;
109}
110
111static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
112{
113 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
114
115 drm_crtc_cleanup(crtc);
116 kfree(amdgpu_crtc);
117}
118
119static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
120 .cursor_set2 = NULL,
121 .cursor_move = NULL,
122 .gamma_set = dce_virtual_crtc_gamma_set,
123 .set_config = amdgpu_display_crtc_set_config,
124 .destroy = dce_virtual_crtc_destroy,
125 .page_flip_target = amdgpu_display_crtc_page_flip_target,
126};
127
128static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
129{
130 struct drm_device *dev = crtc->dev;
131 struct amdgpu_device *adev = dev->dev_private;
132 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
133 unsigned type;
134
135 if (amdgpu_sriov_vf(adev))
136 return;
137
138 switch (mode) {
139 case DRM_MODE_DPMS_ON:
140 amdgpu_crtc->enabled = true;
141 /* Make sure VBLANK interrupts are still enabled */
142 type = amdgpu_display_crtc_idx_to_irq_type(adev,
143 amdgpu_crtc->crtc_id);
144 amdgpu_irq_update(adev, &adev->crtc_irq, type);
145 drm_crtc_vblank_on(crtc);
146 break;
147 case DRM_MODE_DPMS_STANDBY:
148 case DRM_MODE_DPMS_SUSPEND:
149 case DRM_MODE_DPMS_OFF:
150 drm_crtc_vblank_off(crtc);
151 amdgpu_crtc->enabled = false;
152 break;
153 }
154}
155
156
157static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
158{
159 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
160}
161
162static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
163{
164 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
165}
166
167static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
168{
169 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
170
171 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
172
173 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
174 amdgpu_crtc->encoder = NULL;
175 amdgpu_crtc->connector = NULL;
176}
177
178static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
179 struct drm_display_mode *mode,
180 struct drm_display_mode *adjusted_mode,
181 int x, int y, struct drm_framebuffer *old_fb)
182{
183 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
184
185 /* update the hw version fpr dpm */
186 amdgpu_crtc->hw_mode = *adjusted_mode;
187
188 return 0;
189}
190
191static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
192 const struct drm_display_mode *mode,
193 struct drm_display_mode *adjusted_mode)
194{
195 return true;
196}
197
198
199static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
200 struct drm_framebuffer *old_fb)
201{
202 return 0;
203}
204
205static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
206 struct drm_framebuffer *fb,
207 int x, int y, enum mode_set_atomic state)
208{
209 return 0;
210}
211
212static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
213 .dpms = dce_virtual_crtc_dpms,
214 .mode_fixup = dce_virtual_crtc_mode_fixup,
215 .mode_set = dce_virtual_crtc_mode_set,
216 .mode_set_base = dce_virtual_crtc_set_base,
217 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
218 .prepare = dce_virtual_crtc_prepare,
219 .commit = dce_virtual_crtc_commit,
220 .disable = dce_virtual_crtc_disable,
221};
222
223static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
224{
225 struct amdgpu_crtc *amdgpu_crtc;
226
227 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
228 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
229 if (amdgpu_crtc == NULL)
230 return -ENOMEM;
231
232 drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
233
234 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
235 amdgpu_crtc->crtc_id = index;
236 adev->mode_info.crtcs[index] = amdgpu_crtc;
237
238 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
239 amdgpu_crtc->encoder = NULL;
240 amdgpu_crtc->connector = NULL;
241 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
242 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
243
244 return 0;
245}
246
247static int dce_virtual_early_init(void *handle)
248{
249 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
250
251 dce_virtual_set_display_funcs(adev);
252 dce_virtual_set_irq_funcs(adev);
253
254 adev->mode_info.num_hpd = 1;
255 adev->mode_info.num_dig = 1;
256 return 0;
257}
258
259static struct drm_encoder *
260dce_virtual_encoder(struct drm_connector *connector)
261{
262 struct drm_encoder *encoder;
263 int i;
264
265 drm_connector_for_each_possible_encoder(connector, encoder, i) {
266 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
267 return encoder;
268 }
269
270 /* pick the first one */
271 drm_connector_for_each_possible_encoder(connector, encoder, i)
272 return encoder;
273
274 return NULL;
275}
276
277static int dce_virtual_get_modes(struct drm_connector *connector)
278{
279 struct drm_device *dev = connector->dev;
280 struct drm_display_mode *mode = NULL;
281 unsigned i;
282 static const struct mode_size {
283 int w;
284 int h;
285 } common_modes[17] = {
286 { 640, 480},
287 { 720, 480},
288 { 800, 600},
289 { 848, 480},
290 {1024, 768},
291 {1152, 768},
292 {1280, 720},
293 {1280, 800},
294 {1280, 854},
295 {1280, 960},
296 {1280, 1024},
297 {1440, 900},
298 {1400, 1050},
299 {1680, 1050},
300 {1600, 1200},
301 {1920, 1080},
302 {1920, 1200}
303 };
304
305 for (i = 0; i < 17; i++) {
306 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
307 drm_mode_probed_add(connector, mode);
308 }
309
310 return 0;
311}
312
313static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
314 struct drm_display_mode *mode)
315{
316 return MODE_OK;
317}
318
319static int
320dce_virtual_dpms(struct drm_connector *connector, int mode)
321{
322 return 0;
323}
324
325static int
326dce_virtual_set_property(struct drm_connector *connector,
327 struct drm_property *property,
328 uint64_t val)
329{
330 return 0;
331}
332
333static void dce_virtual_destroy(struct drm_connector *connector)
334{
335 drm_connector_unregister(connector);
336 drm_connector_cleanup(connector);
337 kfree(connector);
338}
339
340static void dce_virtual_force(struct drm_connector *connector)
341{
342 return;
343}
344
345static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
346 .get_modes = dce_virtual_get_modes,
347 .mode_valid = dce_virtual_mode_valid,
348 .best_encoder = dce_virtual_encoder,
349};
350
351static const struct drm_connector_funcs dce_virtual_connector_funcs = {
352 .dpms = dce_virtual_dpms,
353 .fill_modes = drm_helper_probe_single_connector_modes,
354 .set_property = dce_virtual_set_property,
355 .destroy = dce_virtual_destroy,
356 .force = dce_virtual_force,
357};
358
359static int dce_virtual_sw_init(void *handle)
360{
361 int r, i;
362 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
363
364 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
365 if (r)
366 return r;
367
368 adev->ddev->max_vblank_count = 0;
369
370 adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
371
372 adev->ddev->mode_config.max_width = 16384;
373 adev->ddev->mode_config.max_height = 16384;
374
375 adev->ddev->mode_config.preferred_depth = 24;
376 adev->ddev->mode_config.prefer_shadow = 1;
377
378 adev->ddev->mode_config.fb_base = adev->gmc.aper_base;
379
380 r = amdgpu_display_modeset_create_props(adev);
381 if (r)
382 return r;
383
384 adev->ddev->mode_config.max_width = 16384;
385 adev->ddev->mode_config.max_height = 16384;
386
387 /* allocate crtcs, encoders, connectors */
388 for (i = 0; i < adev->mode_info.num_crtc; i++) {
389 r = dce_virtual_crtc_init(adev, i);
390 if (r)
391 return r;
392 r = dce_virtual_connector_encoder_init(adev, i);
393 if (r)
394 return r;
395 }
396
397 drm_kms_helper_poll_init(adev->ddev);
398
399 adev->mode_info.mode_config_initialized = true;
400 return 0;
401}
402
403static int dce_virtual_sw_fini(void *handle)
404{
405 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
406
407 kfree(adev->mode_info.bios_hardcoded_edid);
408
409 drm_kms_helper_poll_fini(adev->ddev);
410
411 drm_mode_config_cleanup(adev->ddev);
412 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
413 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
414 adev->mode_info.mode_config_initialized = false;
415 return 0;
416}
417
418static int dce_virtual_hw_init(void *handle)
419{
420 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
421
422 switch (adev->asic_type) {
423#ifdef CONFIG_DRM_AMDGPU_SI
424 case CHIP_TAHITI:
425 case CHIP_PITCAIRN:
426 case CHIP_VERDE:
427 case CHIP_OLAND:
428 dce_v6_0_disable_dce(adev);
429 break;
430#endif
431#ifdef CONFIG_DRM_AMDGPU_CIK
432 case CHIP_BONAIRE:
433 case CHIP_HAWAII:
434 case CHIP_KAVERI:
435 case CHIP_KABINI:
436 case CHIP_MULLINS:
437 dce_v8_0_disable_dce(adev);
438 break;
439#endif
440 case CHIP_FIJI:
441 case CHIP_TONGA:
442 dce_v10_0_disable_dce(adev);
443 break;
444 case CHIP_CARRIZO:
445 case CHIP_STONEY:
446 case CHIP_POLARIS10:
447 case CHIP_POLARIS11:
448 case CHIP_VEGAM:
449 dce_v11_0_disable_dce(adev);
450 break;
451 case CHIP_TOPAZ:
452#ifdef CONFIG_DRM_AMDGPU_SI
453 case CHIP_HAINAN:
454#endif
455 /* no DCE */
456 break;
457 default:
458 break;
459 }
460 return 0;
461}
462
463static int dce_virtual_hw_fini(void *handle)
464{
465 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
466 int i = 0;
467
468 for (i = 0; i<adev->mode_info.num_crtc; i++)
469 if (adev->mode_info.crtcs[i])
470 dce_virtual_set_crtc_vblank_interrupt_state(adev, i, AMDGPU_IRQ_STATE_DISABLE);
471
472 return 0;
473}
474
475static int dce_virtual_suspend(void *handle)
476{
477 return dce_virtual_hw_fini(handle);
478}
479
480static int dce_virtual_resume(void *handle)
481{
482 return dce_virtual_hw_init(handle);
483}
484
485static bool dce_virtual_is_idle(void *handle)
486{
487 return true;
488}
489
490static int dce_virtual_wait_for_idle(void *handle)
491{
492 return 0;
493}
494
495static int dce_virtual_soft_reset(void *handle)
496{
497 return 0;
498}
499
500static int dce_virtual_set_clockgating_state(void *handle,
501 enum amd_clockgating_state state)
502{
503 return 0;
504}
505
506static int dce_virtual_set_powergating_state(void *handle,
507 enum amd_powergating_state state)
508{
509 return 0;
510}
511
512static const struct amd_ip_funcs dce_virtual_ip_funcs = {
513 .name = "dce_virtual",
514 .early_init = dce_virtual_early_init,
515 .late_init = NULL,
516 .sw_init = dce_virtual_sw_init,
517 .sw_fini = dce_virtual_sw_fini,
518 .hw_init = dce_virtual_hw_init,
519 .hw_fini = dce_virtual_hw_fini,
520 .suspend = dce_virtual_suspend,
521 .resume = dce_virtual_resume,
522 .is_idle = dce_virtual_is_idle,
523 .wait_for_idle = dce_virtual_wait_for_idle,
524 .soft_reset = dce_virtual_soft_reset,
525 .set_clockgating_state = dce_virtual_set_clockgating_state,
526 .set_powergating_state = dce_virtual_set_powergating_state,
527};
528
529/* these are handled by the primary encoders */
530static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
531{
532 return;
533}
534
535static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
536{
537 return;
538}
539
540static void
541dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
542 struct drm_display_mode *mode,
543 struct drm_display_mode *adjusted_mode)
544{
545 return;
546}
547
548static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
549{
550 return;
551}
552
553static void
554dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
555{
556 return;
557}
558
559static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
560 const struct drm_display_mode *mode,
561 struct drm_display_mode *adjusted_mode)
562{
563 return true;
564}
565
566static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
567 .dpms = dce_virtual_encoder_dpms,
568 .mode_fixup = dce_virtual_encoder_mode_fixup,
569 .prepare = dce_virtual_encoder_prepare,
570 .mode_set = dce_virtual_encoder_mode_set,
571 .commit = dce_virtual_encoder_commit,
572 .disable = dce_virtual_encoder_disable,
573};
574
575static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
576{
577 drm_encoder_cleanup(encoder);
578 kfree(encoder);
579}
580
581static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
582 .destroy = dce_virtual_encoder_destroy,
583};
584
585static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
586 int index)
587{
588 struct drm_encoder *encoder;
589 struct drm_connector *connector;
590
591 /* add a new encoder */
592 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
593 if (!encoder)
594 return -ENOMEM;
595 encoder->possible_crtcs = 1 << index;
596 drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs,
597 DRM_MODE_ENCODER_VIRTUAL, NULL);
598 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
599
600 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
601 if (!connector) {
602 kfree(encoder);
603 return -ENOMEM;
604 }
605
606 /* add a new connector */
607 drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs,
608 DRM_MODE_CONNECTOR_VIRTUAL);
609 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
610 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
611 connector->interlace_allowed = false;
612 connector->doublescan_allowed = false;
613 drm_connector_register(connector);
614
615 /* link them */
616 drm_connector_attach_encoder(connector, encoder);
617
618 return 0;
619}
620
621static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
622 .bandwidth_update = &dce_virtual_bandwidth_update,
623 .vblank_get_counter = &dce_virtual_vblank_get_counter,
624 .backlight_set_level = NULL,
625 .backlight_get_level = NULL,
626 .hpd_sense = &dce_virtual_hpd_sense,
627 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
628 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
629 .page_flip = &dce_virtual_page_flip,
630 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
631 .add_encoder = NULL,
632 .add_connector = NULL,
633};
634
635static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
636{
637 adev->mode_info.funcs = &dce_virtual_display_funcs;
638}
639
640static int dce_virtual_pageflip(struct amdgpu_device *adev,
641 unsigned crtc_id)
642{
643 unsigned long flags;
644 struct amdgpu_crtc *amdgpu_crtc;
645 struct amdgpu_flip_work *works;
646
647 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
648
649 if (crtc_id >= adev->mode_info.num_crtc) {
650 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
651 return -EINVAL;
652 }
653
654 /* IRQ could occur when in initial stage */
655 if (amdgpu_crtc == NULL)
656 return 0;
657
658 spin_lock_irqsave(&adev->ddev->event_lock, flags);
659 works = amdgpu_crtc->pflip_works;
660 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
661 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
662 "AMDGPU_FLIP_SUBMITTED(%d)\n",
663 amdgpu_crtc->pflip_status,
664 AMDGPU_FLIP_SUBMITTED);
665 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
666 return 0;
667 }
668
669 /* page flip completed. clean up */
670 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
671 amdgpu_crtc->pflip_works = NULL;
672
673 /* wakeup usersapce */
674 if (works->event)
675 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
676
677 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
678
679 drm_crtc_vblank_put(&amdgpu_crtc->base);
680 amdgpu_bo_unref(&works->old_abo);
681 kfree(works->shared);
682 kfree(works);
683
684 return 0;
685}
686
687static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
688{
689 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
690 struct amdgpu_crtc, vblank_timer);
691 struct drm_device *ddev = amdgpu_crtc->base.dev;
692 struct amdgpu_device *adev = ddev->dev_private;
693
694 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
695 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
696 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
697 HRTIMER_MODE_REL);
698
699 return HRTIMER_NORESTART;
700}
701
702static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
703 int crtc,
704 enum amdgpu_interrupt_state state)
705{
706 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
707 DRM_DEBUG("invalid crtc %d\n", crtc);
708 return;
709 }
710
711 if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
712 DRM_DEBUG("Enable software vsync timer\n");
713 hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer,
714 CLOCK_MONOTONIC, HRTIMER_MODE_REL);
715 hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer,
716 DCE_VIRTUAL_VBLANK_PERIOD);
717 adev->mode_info.crtcs[crtc]->vblank_timer.function =
718 dce_virtual_vblank_timer_handle;
719 hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer,
720 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
721 } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
722 DRM_DEBUG("Disable software vsync timer\n");
723 hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer);
724 }
725
726 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
727 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
728}
729
730
731static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
732 struct amdgpu_irq_src *source,
733 unsigned type,
734 enum amdgpu_interrupt_state state)
735{
736 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
737 return -EINVAL;
738
739 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
740
741 return 0;
742}
743
744static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
745 .set = dce_virtual_set_crtc_irq_state,
746 .process = NULL,
747};
748
749static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
750{
751 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
752 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
753}
754
755const struct amdgpu_ip_block_version dce_virtual_ip_block =
756{
757 .type = AMD_IP_BLOCK_TYPE_DCE,
758 .major = 1,
759 .minor = 0,
760 .rev = 0,
761 .funcs = &dce_virtual_ip_funcs,
762};
1/*
2 * Copyright 2014 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23
24#include <drm/drm_vblank.h>
25
26#include "amdgpu.h"
27#include "amdgpu_pm.h"
28#include "amdgpu_i2c.h"
29#include "atom.h"
30#include "amdgpu_pll.h"
31#include "amdgpu_connectors.h"
32#ifdef CONFIG_DRM_AMDGPU_SI
33#include "dce_v6_0.h"
34#endif
35#ifdef CONFIG_DRM_AMDGPU_CIK
36#include "dce_v8_0.h"
37#endif
38#include "dce_v10_0.h"
39#include "dce_v11_0.h"
40#include "dce_virtual.h"
41#include "ivsrcid/ivsrcid_vislands30.h"
42#include "amdgpu_display.h"
43
44#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
45
46
47static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
48static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
49static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
50 int index);
51static int dce_virtual_pageflip(struct amdgpu_device *adev,
52 unsigned crtc_id);
53static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer);
54static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
55 int crtc,
56 enum amdgpu_interrupt_state state);
57
58static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
59{
60 return 0;
61}
62
63static void dce_virtual_page_flip(struct amdgpu_device *adev,
64 int crtc_id, u64 crtc_base, bool async)
65{
66 return;
67}
68
69static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
70 u32 *vbl, u32 *position)
71{
72 *vbl = 0;
73 *position = 0;
74
75 return -EINVAL;
76}
77
78static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
79 enum amdgpu_hpd_id hpd)
80{
81 return true;
82}
83
84static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
85 enum amdgpu_hpd_id hpd)
86{
87 return;
88}
89
90static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
91{
92 return 0;
93}
94
95/**
96 * dce_virtual_bandwidth_update - program display watermarks
97 *
98 * @adev: amdgpu_device pointer
99 *
100 * Calculate and program the display watermarks and line
101 * buffer allocation (CIK).
102 */
103static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
104{
105 return;
106}
107
108static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
109 u16 *green, u16 *blue, uint32_t size,
110 struct drm_modeset_acquire_ctx *ctx)
111{
112 return 0;
113}
114
115static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
116{
117 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
118
119 drm_crtc_cleanup(crtc);
120 kfree(amdgpu_crtc);
121}
122
123static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
124 .cursor_set2 = NULL,
125 .cursor_move = NULL,
126 .gamma_set = dce_virtual_crtc_gamma_set,
127 .set_config = amdgpu_display_crtc_set_config,
128 .destroy = dce_virtual_crtc_destroy,
129 .page_flip_target = amdgpu_display_crtc_page_flip_target,
130 .get_vblank_counter = amdgpu_get_vblank_counter_kms,
131 .enable_vblank = amdgpu_enable_vblank_kms,
132 .disable_vblank = amdgpu_disable_vblank_kms,
133 .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
134};
135
136static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
137{
138 struct drm_device *dev = crtc->dev;
139 struct amdgpu_device *adev = drm_to_adev(dev);
140 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
141 unsigned type;
142
143 switch (mode) {
144 case DRM_MODE_DPMS_ON:
145 amdgpu_crtc->enabled = true;
146 /* Make sure VBLANK interrupts are still enabled */
147 type = amdgpu_display_crtc_idx_to_irq_type(adev,
148 amdgpu_crtc->crtc_id);
149 amdgpu_irq_update(adev, &adev->crtc_irq, type);
150 drm_crtc_vblank_on(crtc);
151 break;
152 case DRM_MODE_DPMS_STANDBY:
153 case DRM_MODE_DPMS_SUSPEND:
154 case DRM_MODE_DPMS_OFF:
155 drm_crtc_vblank_off(crtc);
156 amdgpu_crtc->enabled = false;
157 break;
158 }
159}
160
161
162static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
163{
164 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
165}
166
167static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
168{
169 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
170}
171
172static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
173{
174 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
175 struct drm_device *dev = crtc->dev;
176
177 if (dev->num_crtcs)
178 drm_crtc_vblank_off(crtc);
179
180 amdgpu_crtc->enabled = false;
181 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
182 amdgpu_crtc->encoder = NULL;
183 amdgpu_crtc->connector = NULL;
184}
185
186static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
187 struct drm_display_mode *mode,
188 struct drm_display_mode *adjusted_mode,
189 int x, int y, struct drm_framebuffer *old_fb)
190{
191 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
192
193 /* update the hw version fpr dpm */
194 amdgpu_crtc->hw_mode = *adjusted_mode;
195
196 return 0;
197}
198
199static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
200 const struct drm_display_mode *mode,
201 struct drm_display_mode *adjusted_mode)
202{
203 return true;
204}
205
206
207static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
208 struct drm_framebuffer *old_fb)
209{
210 return 0;
211}
212
213static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
214 struct drm_framebuffer *fb,
215 int x, int y, enum mode_set_atomic state)
216{
217 return 0;
218}
219
220static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
221 .dpms = dce_virtual_crtc_dpms,
222 .mode_fixup = dce_virtual_crtc_mode_fixup,
223 .mode_set = dce_virtual_crtc_mode_set,
224 .mode_set_base = dce_virtual_crtc_set_base,
225 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
226 .prepare = dce_virtual_crtc_prepare,
227 .commit = dce_virtual_crtc_commit,
228 .disable = dce_virtual_crtc_disable,
229 .get_scanout_position = amdgpu_crtc_get_scanout_position,
230};
231
232static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
233{
234 struct amdgpu_crtc *amdgpu_crtc;
235
236 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
237 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
238 if (amdgpu_crtc == NULL)
239 return -ENOMEM;
240
241 drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
242
243 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
244 amdgpu_crtc->crtc_id = index;
245 adev->mode_info.crtcs[index] = amdgpu_crtc;
246
247 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
248 amdgpu_crtc->encoder = NULL;
249 amdgpu_crtc->connector = NULL;
250 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
251 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
252
253 hrtimer_init(&amdgpu_crtc->vblank_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
254 hrtimer_set_expires(&amdgpu_crtc->vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD);
255 amdgpu_crtc->vblank_timer.function = dce_virtual_vblank_timer_handle;
256 hrtimer_start(&amdgpu_crtc->vblank_timer,
257 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
258 return 0;
259}
260
261static int dce_virtual_early_init(void *handle)
262{
263 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
264
265 dce_virtual_set_display_funcs(adev);
266 dce_virtual_set_irq_funcs(adev);
267
268 adev->mode_info.num_hpd = 1;
269 adev->mode_info.num_dig = 1;
270 return 0;
271}
272
273static struct drm_encoder *
274dce_virtual_encoder(struct drm_connector *connector)
275{
276 struct drm_encoder *encoder;
277
278 drm_connector_for_each_possible_encoder(connector, encoder) {
279 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
280 return encoder;
281 }
282
283 /* pick the first one */
284 drm_connector_for_each_possible_encoder(connector, encoder)
285 return encoder;
286
287 return NULL;
288}
289
290static int dce_virtual_get_modes(struct drm_connector *connector)
291{
292 struct drm_device *dev = connector->dev;
293 struct drm_display_mode *mode = NULL;
294 unsigned i;
295 static const struct mode_size {
296 int w;
297 int h;
298 } common_modes[] = {
299 { 640, 480},
300 { 720, 480},
301 { 800, 600},
302 { 848, 480},
303 {1024, 768},
304 {1152, 768},
305 {1280, 720},
306 {1280, 800},
307 {1280, 854},
308 {1280, 960},
309 {1280, 1024},
310 {1440, 900},
311 {1400, 1050},
312 {1680, 1050},
313 {1600, 1200},
314 {1920, 1080},
315 {1920, 1200},
316 {2560, 1440},
317 {4096, 3112},
318 {3656, 2664},
319 {3840, 2160},
320 {4096, 2160},
321 };
322
323 for (i = 0; i < ARRAY_SIZE(common_modes); i++) {
324 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
325 drm_mode_probed_add(connector, mode);
326 }
327
328 return 0;
329}
330
331static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
332 struct drm_display_mode *mode)
333{
334 return MODE_OK;
335}
336
337static int
338dce_virtual_dpms(struct drm_connector *connector, int mode)
339{
340 return 0;
341}
342
343static int
344dce_virtual_set_property(struct drm_connector *connector,
345 struct drm_property *property,
346 uint64_t val)
347{
348 return 0;
349}
350
351static void dce_virtual_destroy(struct drm_connector *connector)
352{
353 drm_connector_unregister(connector);
354 drm_connector_cleanup(connector);
355 kfree(connector);
356}
357
358static void dce_virtual_force(struct drm_connector *connector)
359{
360 return;
361}
362
363static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
364 .get_modes = dce_virtual_get_modes,
365 .mode_valid = dce_virtual_mode_valid,
366 .best_encoder = dce_virtual_encoder,
367};
368
369static const struct drm_connector_funcs dce_virtual_connector_funcs = {
370 .dpms = dce_virtual_dpms,
371 .fill_modes = drm_helper_probe_single_connector_modes,
372 .set_property = dce_virtual_set_property,
373 .destroy = dce_virtual_destroy,
374 .force = dce_virtual_force,
375};
376
377static int dce_virtual_sw_init(void *handle)
378{
379 int r, i;
380 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
381
382 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
383 if (r)
384 return r;
385
386 adev_to_drm(adev)->max_vblank_count = 0;
387
388 adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs;
389
390 adev_to_drm(adev)->mode_config.max_width = 16384;
391 adev_to_drm(adev)->mode_config.max_height = 16384;
392
393 adev_to_drm(adev)->mode_config.preferred_depth = 24;
394 adev_to_drm(adev)->mode_config.prefer_shadow = 1;
395
396 adev_to_drm(adev)->mode_config.fb_base = adev->gmc.aper_base;
397
398 r = amdgpu_display_modeset_create_props(adev);
399 if (r)
400 return r;
401
402 adev_to_drm(adev)->mode_config.max_width = 16384;
403 adev_to_drm(adev)->mode_config.max_height = 16384;
404
405 /* allocate crtcs, encoders, connectors */
406 for (i = 0; i < adev->mode_info.num_crtc; i++) {
407 r = dce_virtual_crtc_init(adev, i);
408 if (r)
409 return r;
410 r = dce_virtual_connector_encoder_init(adev, i);
411 if (r)
412 return r;
413 }
414
415 drm_kms_helper_poll_init(adev_to_drm(adev));
416
417 adev->mode_info.mode_config_initialized = true;
418 return 0;
419}
420
421static int dce_virtual_sw_fini(void *handle)
422{
423 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
424 int i = 0;
425
426 for (i = 0; i < adev->mode_info.num_crtc; i++)
427 if (adev->mode_info.crtcs[i])
428 hrtimer_cancel(&adev->mode_info.crtcs[i]->vblank_timer);
429
430 kfree(adev->mode_info.bios_hardcoded_edid);
431
432 drm_kms_helper_poll_fini(adev_to_drm(adev));
433
434 drm_mode_config_cleanup(adev_to_drm(adev));
435 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
436 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
437 adev->mode_info.mode_config_initialized = false;
438 return 0;
439}
440
441static int dce_virtual_hw_init(void *handle)
442{
443 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
444
445 switch (adev->asic_type) {
446#ifdef CONFIG_DRM_AMDGPU_SI
447 case CHIP_TAHITI:
448 case CHIP_PITCAIRN:
449 case CHIP_VERDE:
450 case CHIP_OLAND:
451 dce_v6_0_disable_dce(adev);
452 break;
453#endif
454#ifdef CONFIG_DRM_AMDGPU_CIK
455 case CHIP_BONAIRE:
456 case CHIP_HAWAII:
457 case CHIP_KAVERI:
458 case CHIP_KABINI:
459 case CHIP_MULLINS:
460 dce_v8_0_disable_dce(adev);
461 break;
462#endif
463 case CHIP_FIJI:
464 case CHIP_TONGA:
465 dce_v10_0_disable_dce(adev);
466 break;
467 case CHIP_CARRIZO:
468 case CHIP_STONEY:
469 case CHIP_POLARIS10:
470 case CHIP_POLARIS11:
471 case CHIP_VEGAM:
472 dce_v11_0_disable_dce(adev);
473 break;
474 case CHIP_TOPAZ:
475#ifdef CONFIG_DRM_AMDGPU_SI
476 case CHIP_HAINAN:
477#endif
478 /* no DCE */
479 break;
480 default:
481 break;
482 }
483 return 0;
484}
485
486static int dce_virtual_hw_fini(void *handle)
487{
488 return 0;
489}
490
491static int dce_virtual_suspend(void *handle)
492{
493 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
494 int r;
495
496 r = amdgpu_display_suspend_helper(adev);
497 if (r)
498 return r;
499 return dce_virtual_hw_fini(handle);
500}
501
502static int dce_virtual_resume(void *handle)
503{
504 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
505 int r;
506
507 r = dce_virtual_hw_init(handle);
508 if (r)
509 return r;
510 return amdgpu_display_resume_helper(adev);
511}
512
513static bool dce_virtual_is_idle(void *handle)
514{
515 return true;
516}
517
518static int dce_virtual_wait_for_idle(void *handle)
519{
520 return 0;
521}
522
523static int dce_virtual_soft_reset(void *handle)
524{
525 return 0;
526}
527
528static int dce_virtual_set_clockgating_state(void *handle,
529 enum amd_clockgating_state state)
530{
531 return 0;
532}
533
534static int dce_virtual_set_powergating_state(void *handle,
535 enum amd_powergating_state state)
536{
537 return 0;
538}
539
540static const struct amd_ip_funcs dce_virtual_ip_funcs = {
541 .name = "dce_virtual",
542 .early_init = dce_virtual_early_init,
543 .late_init = NULL,
544 .sw_init = dce_virtual_sw_init,
545 .sw_fini = dce_virtual_sw_fini,
546 .hw_init = dce_virtual_hw_init,
547 .hw_fini = dce_virtual_hw_fini,
548 .suspend = dce_virtual_suspend,
549 .resume = dce_virtual_resume,
550 .is_idle = dce_virtual_is_idle,
551 .wait_for_idle = dce_virtual_wait_for_idle,
552 .soft_reset = dce_virtual_soft_reset,
553 .set_clockgating_state = dce_virtual_set_clockgating_state,
554 .set_powergating_state = dce_virtual_set_powergating_state,
555};
556
557/* these are handled by the primary encoders */
558static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
559{
560 return;
561}
562
563static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
564{
565 return;
566}
567
568static void
569dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
570 struct drm_display_mode *mode,
571 struct drm_display_mode *adjusted_mode)
572{
573 return;
574}
575
576static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
577{
578 return;
579}
580
581static void
582dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
583{
584 return;
585}
586
587static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
588 const struct drm_display_mode *mode,
589 struct drm_display_mode *adjusted_mode)
590{
591 return true;
592}
593
594static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
595 .dpms = dce_virtual_encoder_dpms,
596 .mode_fixup = dce_virtual_encoder_mode_fixup,
597 .prepare = dce_virtual_encoder_prepare,
598 .mode_set = dce_virtual_encoder_mode_set,
599 .commit = dce_virtual_encoder_commit,
600 .disable = dce_virtual_encoder_disable,
601};
602
603static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
604{
605 drm_encoder_cleanup(encoder);
606 kfree(encoder);
607}
608
609static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
610 .destroy = dce_virtual_encoder_destroy,
611};
612
613static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
614 int index)
615{
616 struct drm_encoder *encoder;
617 struct drm_connector *connector;
618
619 /* add a new encoder */
620 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
621 if (!encoder)
622 return -ENOMEM;
623 encoder->possible_crtcs = 1 << index;
624 drm_encoder_init(adev_to_drm(adev), encoder, &dce_virtual_encoder_funcs,
625 DRM_MODE_ENCODER_VIRTUAL, NULL);
626 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
627
628 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
629 if (!connector) {
630 kfree(encoder);
631 return -ENOMEM;
632 }
633
634 /* add a new connector */
635 drm_connector_init(adev_to_drm(adev), connector, &dce_virtual_connector_funcs,
636 DRM_MODE_CONNECTOR_VIRTUAL);
637 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
638 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
639 connector->interlace_allowed = false;
640 connector->doublescan_allowed = false;
641
642 /* link them */
643 drm_connector_attach_encoder(connector, encoder);
644
645 return 0;
646}
647
648static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
649 .bandwidth_update = &dce_virtual_bandwidth_update,
650 .vblank_get_counter = &dce_virtual_vblank_get_counter,
651 .backlight_set_level = NULL,
652 .backlight_get_level = NULL,
653 .hpd_sense = &dce_virtual_hpd_sense,
654 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
655 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
656 .page_flip = &dce_virtual_page_flip,
657 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
658 .add_encoder = NULL,
659 .add_connector = NULL,
660};
661
662static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
663{
664 adev->mode_info.funcs = &dce_virtual_display_funcs;
665}
666
667static int dce_virtual_pageflip(struct amdgpu_device *adev,
668 unsigned crtc_id)
669{
670 unsigned long flags;
671 struct amdgpu_crtc *amdgpu_crtc;
672 struct amdgpu_flip_work *works;
673
674 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
675
676 if (crtc_id >= adev->mode_info.num_crtc) {
677 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
678 return -EINVAL;
679 }
680
681 /* IRQ could occur when in initial stage */
682 if (amdgpu_crtc == NULL)
683 return 0;
684
685 spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
686 works = amdgpu_crtc->pflip_works;
687 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
688 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
689 "AMDGPU_FLIP_SUBMITTED(%d)\n",
690 amdgpu_crtc->pflip_status,
691 AMDGPU_FLIP_SUBMITTED);
692 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
693 return 0;
694 }
695
696 /* page flip completed. clean up */
697 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
698 amdgpu_crtc->pflip_works = NULL;
699
700 /* wakeup usersapce */
701 if (works->event)
702 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
703
704 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
705
706 drm_crtc_vblank_put(&amdgpu_crtc->base);
707 amdgpu_bo_unref(&works->old_abo);
708 kfree(works->shared);
709 kfree(works);
710
711 return 0;
712}
713
714static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
715{
716 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
717 struct amdgpu_crtc, vblank_timer);
718 struct drm_device *ddev = amdgpu_crtc->base.dev;
719 struct amdgpu_device *adev = drm_to_adev(ddev);
720 struct amdgpu_irq_src *source = adev->irq.client[AMDGPU_IRQ_CLIENTID_LEGACY].sources
721 [VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER];
722 int irq_type = amdgpu_display_crtc_idx_to_irq_type(adev,
723 amdgpu_crtc->crtc_id);
724
725 if (amdgpu_irq_enabled(adev, source, irq_type)) {
726 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
727 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
728 }
729 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
730 HRTIMER_MODE_REL);
731
732 return HRTIMER_NORESTART;
733}
734
735static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
736 int crtc,
737 enum amdgpu_interrupt_state state)
738{
739 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
740 DRM_DEBUG("invalid crtc %d\n", crtc);
741 return;
742 }
743
744 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
745 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
746}
747
748
749static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
750 struct amdgpu_irq_src *source,
751 unsigned type,
752 enum amdgpu_interrupt_state state)
753{
754 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
755 return -EINVAL;
756
757 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
758
759 return 0;
760}
761
762static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
763 .set = dce_virtual_set_crtc_irq_state,
764 .process = NULL,
765};
766
767static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
768{
769 adev->crtc_irq.num_types = adev->mode_info.num_crtc;
770 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
771}
772
773const struct amdgpu_ip_block_version dce_virtual_ip_block =
774{
775 .type = AMD_IP_BLOCK_TYPE_DCE,
776 .major = 1,
777 .minor = 0,
778 .rev = 0,
779 .funcs = &dce_virtual_ip_funcs,
780};