Loading...
1/*
2 * Copyright 2014 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23#include "drmP.h"
24#include "amdgpu.h"
25#include "amdgpu_pm.h"
26#include "amdgpu_i2c.h"
27#include "atom.h"
28#include "amdgpu_pll.h"
29#include "amdgpu_connectors.h"
30#ifdef CONFIG_DRM_AMDGPU_SI
31#include "dce_v6_0.h"
32#endif
33#ifdef CONFIG_DRM_AMDGPU_CIK
34#include "dce_v8_0.h"
35#endif
36#include "dce_v10_0.h"
37#include "dce_v11_0.h"
38#include "dce_virtual.h"
39
40#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
41
42
43static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
44static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
45static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
46 int index);
47
48/**
49 * dce_virtual_vblank_wait - vblank wait asic callback.
50 *
51 * @adev: amdgpu_device pointer
52 * @crtc: crtc to wait for vblank on
53 *
54 * Wait for vblank on the requested crtc (evergreen+).
55 */
56static void dce_virtual_vblank_wait(struct amdgpu_device *adev, int crtc)
57{
58 return;
59}
60
61static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
62{
63 return 0;
64}
65
66static void dce_virtual_page_flip(struct amdgpu_device *adev,
67 int crtc_id, u64 crtc_base, bool async)
68{
69 return;
70}
71
72static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
73 u32 *vbl, u32 *position)
74{
75 *vbl = 0;
76 *position = 0;
77
78 return -EINVAL;
79}
80
81static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
82 enum amdgpu_hpd_id hpd)
83{
84 return true;
85}
86
87static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
88 enum amdgpu_hpd_id hpd)
89{
90 return;
91}
92
93static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
94{
95 return 0;
96}
97
98static void dce_virtual_stop_mc_access(struct amdgpu_device *adev,
99 struct amdgpu_mode_mc_save *save)
100{
101 switch (adev->asic_type) {
102#ifdef CONFIG_DRM_AMDGPU_SI
103 case CHIP_TAHITI:
104 case CHIP_PITCAIRN:
105 case CHIP_VERDE:
106 case CHIP_OLAND:
107 dce_v6_0_disable_dce(adev);
108 break;
109#endif
110#ifdef CONFIG_DRM_AMDGPU_CIK
111 case CHIP_BONAIRE:
112 case CHIP_HAWAII:
113 case CHIP_KAVERI:
114 case CHIP_KABINI:
115 case CHIP_MULLINS:
116 dce_v8_0_disable_dce(adev);
117 break;
118#endif
119 case CHIP_FIJI:
120 case CHIP_TONGA:
121 dce_v10_0_disable_dce(adev);
122 break;
123 case CHIP_CARRIZO:
124 case CHIP_STONEY:
125 case CHIP_POLARIS11:
126 case CHIP_POLARIS10:
127 dce_v11_0_disable_dce(adev);
128 break;
129 case CHIP_TOPAZ:
130#ifdef CONFIG_DRM_AMDGPU_SI
131 case CHIP_HAINAN:
132#endif
133 /* no DCE */
134 return;
135 default:
136 DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n", adev->asic_type);
137 }
138
139 return;
140}
141static void dce_virtual_resume_mc_access(struct amdgpu_device *adev,
142 struct amdgpu_mode_mc_save *save)
143{
144 return;
145}
146
147static void dce_virtual_set_vga_render_state(struct amdgpu_device *adev,
148 bool render)
149{
150 return;
151}
152
153/**
154 * dce_virtual_bandwidth_update - program display watermarks
155 *
156 * @adev: amdgpu_device pointer
157 *
158 * Calculate and program the display watermarks and line
159 * buffer allocation (CIK).
160 */
161static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
162{
163 return;
164}
165
166static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
167 u16 *green, u16 *blue, uint32_t size)
168{
169 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
170 int i;
171
172 /* userspace palettes are always correct as is */
173 for (i = 0; i < size; i++) {
174 amdgpu_crtc->lut_r[i] = red[i] >> 6;
175 amdgpu_crtc->lut_g[i] = green[i] >> 6;
176 amdgpu_crtc->lut_b[i] = blue[i] >> 6;
177 }
178
179 return 0;
180}
181
182static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
183{
184 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
185
186 drm_crtc_cleanup(crtc);
187 kfree(amdgpu_crtc);
188}
189
190static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
191 .cursor_set2 = NULL,
192 .cursor_move = NULL,
193 .gamma_set = dce_virtual_crtc_gamma_set,
194 .set_config = amdgpu_crtc_set_config,
195 .destroy = dce_virtual_crtc_destroy,
196 .page_flip_target = amdgpu_crtc_page_flip_target,
197};
198
199static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
200{
201 struct drm_device *dev = crtc->dev;
202 struct amdgpu_device *adev = dev->dev_private;
203 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
204 unsigned type;
205
206 switch (mode) {
207 case DRM_MODE_DPMS_ON:
208 amdgpu_crtc->enabled = true;
209 /* Make sure VBLANK interrupts are still enabled */
210 type = amdgpu_crtc_idx_to_irq_type(adev, amdgpu_crtc->crtc_id);
211 amdgpu_irq_update(adev, &adev->crtc_irq, type);
212 drm_crtc_vblank_on(crtc);
213 break;
214 case DRM_MODE_DPMS_STANDBY:
215 case DRM_MODE_DPMS_SUSPEND:
216 case DRM_MODE_DPMS_OFF:
217 drm_crtc_vblank_off(crtc);
218 amdgpu_crtc->enabled = false;
219 break;
220 }
221}
222
223
224static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
225{
226 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
227}
228
229static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
230{
231 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
232}
233
234static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
235{
236 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
237
238 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
239 if (crtc->primary->fb) {
240 int r;
241 struct amdgpu_framebuffer *amdgpu_fb;
242 struct amdgpu_bo *abo;
243
244 amdgpu_fb = to_amdgpu_framebuffer(crtc->primary->fb);
245 abo = gem_to_amdgpu_bo(amdgpu_fb->obj);
246 r = amdgpu_bo_reserve(abo, false);
247 if (unlikely(r))
248 DRM_ERROR("failed to reserve abo before unpin\n");
249 else {
250 amdgpu_bo_unpin(abo);
251 amdgpu_bo_unreserve(abo);
252 }
253 }
254
255 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
256 amdgpu_crtc->encoder = NULL;
257 amdgpu_crtc->connector = NULL;
258}
259
260static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
261 struct drm_display_mode *mode,
262 struct drm_display_mode *adjusted_mode,
263 int x, int y, struct drm_framebuffer *old_fb)
264{
265 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
266
267 /* update the hw version fpr dpm */
268 amdgpu_crtc->hw_mode = *adjusted_mode;
269
270 return 0;
271}
272
273static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
274 const struct drm_display_mode *mode,
275 struct drm_display_mode *adjusted_mode)
276{
277 return true;
278}
279
280
281static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
282 struct drm_framebuffer *old_fb)
283{
284 return 0;
285}
286
287static void dce_virtual_crtc_load_lut(struct drm_crtc *crtc)
288{
289 return;
290}
291
292static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
293 struct drm_framebuffer *fb,
294 int x, int y, enum mode_set_atomic state)
295{
296 return 0;
297}
298
299static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
300 .dpms = dce_virtual_crtc_dpms,
301 .mode_fixup = dce_virtual_crtc_mode_fixup,
302 .mode_set = dce_virtual_crtc_mode_set,
303 .mode_set_base = dce_virtual_crtc_set_base,
304 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
305 .prepare = dce_virtual_crtc_prepare,
306 .commit = dce_virtual_crtc_commit,
307 .load_lut = dce_virtual_crtc_load_lut,
308 .disable = dce_virtual_crtc_disable,
309};
310
311static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
312{
313 struct amdgpu_crtc *amdgpu_crtc;
314 int i;
315
316 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
317 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
318 if (amdgpu_crtc == NULL)
319 return -ENOMEM;
320
321 drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
322
323 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
324 amdgpu_crtc->crtc_id = index;
325 adev->mode_info.crtcs[index] = amdgpu_crtc;
326
327 for (i = 0; i < 256; i++) {
328 amdgpu_crtc->lut_r[i] = i << 2;
329 amdgpu_crtc->lut_g[i] = i << 2;
330 amdgpu_crtc->lut_b[i] = i << 2;
331 }
332
333 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
334 amdgpu_crtc->encoder = NULL;
335 amdgpu_crtc->connector = NULL;
336 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
337 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
338
339 return 0;
340}
341
342static int dce_virtual_early_init(void *handle)
343{
344 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
345
346 dce_virtual_set_display_funcs(adev);
347 dce_virtual_set_irq_funcs(adev);
348
349 adev->mode_info.num_hpd = 1;
350 adev->mode_info.num_dig = 1;
351 return 0;
352}
353
354static struct drm_encoder *
355dce_virtual_encoder(struct drm_connector *connector)
356{
357 int enc_id = connector->encoder_ids[0];
358 struct drm_encoder *encoder;
359 int i;
360
361 for (i = 0; i < DRM_CONNECTOR_MAX_ENCODER; i++) {
362 if (connector->encoder_ids[i] == 0)
363 break;
364
365 encoder = drm_encoder_find(connector->dev, connector->encoder_ids[i]);
366 if (!encoder)
367 continue;
368
369 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
370 return encoder;
371 }
372
373 /* pick the first one */
374 if (enc_id)
375 return drm_encoder_find(connector->dev, enc_id);
376 return NULL;
377}
378
379static int dce_virtual_get_modes(struct drm_connector *connector)
380{
381 struct drm_device *dev = connector->dev;
382 struct drm_display_mode *mode = NULL;
383 unsigned i;
384 static const struct mode_size {
385 int w;
386 int h;
387 } common_modes[17] = {
388 { 640, 480},
389 { 720, 480},
390 { 800, 600},
391 { 848, 480},
392 {1024, 768},
393 {1152, 768},
394 {1280, 720},
395 {1280, 800},
396 {1280, 854},
397 {1280, 960},
398 {1280, 1024},
399 {1440, 900},
400 {1400, 1050},
401 {1680, 1050},
402 {1600, 1200},
403 {1920, 1080},
404 {1920, 1200}
405 };
406
407 for (i = 0; i < 17; i++) {
408 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
409 drm_mode_probed_add(connector, mode);
410 }
411
412 return 0;
413}
414
415static int dce_virtual_mode_valid(struct drm_connector *connector,
416 struct drm_display_mode *mode)
417{
418 return MODE_OK;
419}
420
421static int
422dce_virtual_dpms(struct drm_connector *connector, int mode)
423{
424 return 0;
425}
426
427static int
428dce_virtual_set_property(struct drm_connector *connector,
429 struct drm_property *property,
430 uint64_t val)
431{
432 return 0;
433}
434
435static void dce_virtual_destroy(struct drm_connector *connector)
436{
437 drm_connector_unregister(connector);
438 drm_connector_cleanup(connector);
439 kfree(connector);
440}
441
442static void dce_virtual_force(struct drm_connector *connector)
443{
444 return;
445}
446
447static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
448 .get_modes = dce_virtual_get_modes,
449 .mode_valid = dce_virtual_mode_valid,
450 .best_encoder = dce_virtual_encoder,
451};
452
453static const struct drm_connector_funcs dce_virtual_connector_funcs = {
454 .dpms = dce_virtual_dpms,
455 .fill_modes = drm_helper_probe_single_connector_modes,
456 .set_property = dce_virtual_set_property,
457 .destroy = dce_virtual_destroy,
458 .force = dce_virtual_force,
459};
460
461static int dce_virtual_sw_init(void *handle)
462{
463 int r, i;
464 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
465
466 r = amdgpu_irq_add_id(adev, 229, &adev->crtc_irq);
467 if (r)
468 return r;
469
470 adev->ddev->max_vblank_count = 0;
471
472 adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
473
474 adev->ddev->mode_config.max_width = 16384;
475 adev->ddev->mode_config.max_height = 16384;
476
477 adev->ddev->mode_config.preferred_depth = 24;
478 adev->ddev->mode_config.prefer_shadow = 1;
479
480 adev->ddev->mode_config.fb_base = adev->mc.aper_base;
481
482 r = amdgpu_modeset_create_props(adev);
483 if (r)
484 return r;
485
486 adev->ddev->mode_config.max_width = 16384;
487 adev->ddev->mode_config.max_height = 16384;
488
489 /* allocate crtcs, encoders, connectors */
490 for (i = 0; i < adev->mode_info.num_crtc; i++) {
491 r = dce_virtual_crtc_init(adev, i);
492 if (r)
493 return r;
494 r = dce_virtual_connector_encoder_init(adev, i);
495 if (r)
496 return r;
497 }
498
499 drm_kms_helper_poll_init(adev->ddev);
500
501 adev->mode_info.mode_config_initialized = true;
502 return 0;
503}
504
505static int dce_virtual_sw_fini(void *handle)
506{
507 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
508
509 kfree(adev->mode_info.bios_hardcoded_edid);
510
511 drm_kms_helper_poll_fini(adev->ddev);
512
513 drm_mode_config_cleanup(adev->ddev);
514 adev->mode_info.mode_config_initialized = false;
515 return 0;
516}
517
518static int dce_virtual_hw_init(void *handle)
519{
520 return 0;
521}
522
523static int dce_virtual_hw_fini(void *handle)
524{
525 return 0;
526}
527
528static int dce_virtual_suspend(void *handle)
529{
530 return dce_virtual_hw_fini(handle);
531}
532
533static int dce_virtual_resume(void *handle)
534{
535 return dce_virtual_hw_init(handle);
536}
537
538static bool dce_virtual_is_idle(void *handle)
539{
540 return true;
541}
542
543static int dce_virtual_wait_for_idle(void *handle)
544{
545 return 0;
546}
547
548static int dce_virtual_soft_reset(void *handle)
549{
550 return 0;
551}
552
553static int dce_virtual_set_clockgating_state(void *handle,
554 enum amd_clockgating_state state)
555{
556 return 0;
557}
558
559static int dce_virtual_set_powergating_state(void *handle,
560 enum amd_powergating_state state)
561{
562 return 0;
563}
564
565static const struct amd_ip_funcs dce_virtual_ip_funcs = {
566 .name = "dce_virtual",
567 .early_init = dce_virtual_early_init,
568 .late_init = NULL,
569 .sw_init = dce_virtual_sw_init,
570 .sw_fini = dce_virtual_sw_fini,
571 .hw_init = dce_virtual_hw_init,
572 .hw_fini = dce_virtual_hw_fini,
573 .suspend = dce_virtual_suspend,
574 .resume = dce_virtual_resume,
575 .is_idle = dce_virtual_is_idle,
576 .wait_for_idle = dce_virtual_wait_for_idle,
577 .soft_reset = dce_virtual_soft_reset,
578 .set_clockgating_state = dce_virtual_set_clockgating_state,
579 .set_powergating_state = dce_virtual_set_powergating_state,
580};
581
582/* these are handled by the primary encoders */
583static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
584{
585 return;
586}
587
588static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
589{
590 return;
591}
592
593static void
594dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
595 struct drm_display_mode *mode,
596 struct drm_display_mode *adjusted_mode)
597{
598 return;
599}
600
601static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
602{
603 return;
604}
605
606static void
607dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
608{
609 return;
610}
611
612static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
613 const struct drm_display_mode *mode,
614 struct drm_display_mode *adjusted_mode)
615{
616 return true;
617}
618
619static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
620 .dpms = dce_virtual_encoder_dpms,
621 .mode_fixup = dce_virtual_encoder_mode_fixup,
622 .prepare = dce_virtual_encoder_prepare,
623 .mode_set = dce_virtual_encoder_mode_set,
624 .commit = dce_virtual_encoder_commit,
625 .disable = dce_virtual_encoder_disable,
626};
627
628static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
629{
630 drm_encoder_cleanup(encoder);
631 kfree(encoder);
632}
633
634static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
635 .destroy = dce_virtual_encoder_destroy,
636};
637
638static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
639 int index)
640{
641 struct drm_encoder *encoder;
642 struct drm_connector *connector;
643
644 /* add a new encoder */
645 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
646 if (!encoder)
647 return -ENOMEM;
648 encoder->possible_crtcs = 1 << index;
649 drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs,
650 DRM_MODE_ENCODER_VIRTUAL, NULL);
651 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
652
653 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
654 if (!connector) {
655 kfree(encoder);
656 return -ENOMEM;
657 }
658
659 /* add a new connector */
660 drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs,
661 DRM_MODE_CONNECTOR_VIRTUAL);
662 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
663 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
664 connector->interlace_allowed = false;
665 connector->doublescan_allowed = false;
666 drm_connector_register(connector);
667
668 /* link them */
669 drm_mode_connector_attach_encoder(connector, encoder);
670
671 return 0;
672}
673
674static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
675 .set_vga_render_state = &dce_virtual_set_vga_render_state,
676 .bandwidth_update = &dce_virtual_bandwidth_update,
677 .vblank_get_counter = &dce_virtual_vblank_get_counter,
678 .vblank_wait = &dce_virtual_vblank_wait,
679 .backlight_set_level = NULL,
680 .backlight_get_level = NULL,
681 .hpd_sense = &dce_virtual_hpd_sense,
682 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
683 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
684 .page_flip = &dce_virtual_page_flip,
685 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
686 .add_encoder = NULL,
687 .add_connector = NULL,
688 .stop_mc_access = &dce_virtual_stop_mc_access,
689 .resume_mc_access = &dce_virtual_resume_mc_access,
690};
691
692static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
693{
694 if (adev->mode_info.funcs == NULL)
695 adev->mode_info.funcs = &dce_virtual_display_funcs;
696}
697
698static int dce_virtual_pageflip(struct amdgpu_device *adev,
699 unsigned crtc_id)
700{
701 unsigned long flags;
702 struct amdgpu_crtc *amdgpu_crtc;
703 struct amdgpu_flip_work *works;
704
705 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
706
707 if (crtc_id >= adev->mode_info.num_crtc) {
708 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
709 return -EINVAL;
710 }
711
712 /* IRQ could occur when in initial stage */
713 if (amdgpu_crtc == NULL)
714 return 0;
715
716 spin_lock_irqsave(&adev->ddev->event_lock, flags);
717 works = amdgpu_crtc->pflip_works;
718 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
719 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
720 "AMDGPU_FLIP_SUBMITTED(%d)\n",
721 amdgpu_crtc->pflip_status,
722 AMDGPU_FLIP_SUBMITTED);
723 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
724 return 0;
725 }
726
727 /* page flip completed. clean up */
728 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
729 amdgpu_crtc->pflip_works = NULL;
730
731 /* wakeup usersapce */
732 if (works->event)
733 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
734
735 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
736
737 drm_crtc_vblank_put(&amdgpu_crtc->base);
738 schedule_work(&works->unpin_work);
739
740 return 0;
741}
742
743static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
744{
745 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
746 struct amdgpu_crtc, vblank_timer);
747 struct drm_device *ddev = amdgpu_crtc->base.dev;
748 struct amdgpu_device *adev = ddev->dev_private;
749
750 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
751 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
752 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
753 HRTIMER_MODE_REL);
754
755 return HRTIMER_NORESTART;
756}
757
758static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
759 int crtc,
760 enum amdgpu_interrupt_state state)
761{
762 if (crtc >= adev->mode_info.num_crtc) {
763 DRM_DEBUG("invalid crtc %d\n", crtc);
764 return;
765 }
766
767 if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
768 DRM_DEBUG("Enable software vsync timer\n");
769 hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer,
770 CLOCK_MONOTONIC, HRTIMER_MODE_REL);
771 hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer,
772 DCE_VIRTUAL_VBLANK_PERIOD);
773 adev->mode_info.crtcs[crtc]->vblank_timer.function =
774 dce_virtual_vblank_timer_handle;
775 hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer,
776 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
777 } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
778 DRM_DEBUG("Disable software vsync timer\n");
779 hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer);
780 }
781
782 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
783 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
784}
785
786
787static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
788 struct amdgpu_irq_src *source,
789 unsigned type,
790 enum amdgpu_interrupt_state state)
791{
792 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
793 return -EINVAL;
794
795 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
796
797 return 0;
798}
799
800static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
801 .set = dce_virtual_set_crtc_irq_state,
802 .process = NULL,
803};
804
805static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
806{
807 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_LAST;
808 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
809}
810
811const struct amdgpu_ip_block_version dce_virtual_ip_block =
812{
813 .type = AMD_IP_BLOCK_TYPE_DCE,
814 .major = 1,
815 .minor = 0,
816 .rev = 0,
817 .funcs = &dce_virtual_ip_funcs,
818};
1/*
2 * Copyright 2014 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23
24#include <drm/drm_vblank.h>
25
26#include "amdgpu.h"
27#include "amdgpu_pm.h"
28#include "amdgpu_i2c.h"
29#include "atom.h"
30#include "amdgpu_pll.h"
31#include "amdgpu_connectors.h"
32#ifdef CONFIG_DRM_AMDGPU_SI
33#include "dce_v6_0.h"
34#endif
35#ifdef CONFIG_DRM_AMDGPU_CIK
36#include "dce_v8_0.h"
37#endif
38#include "dce_v10_0.h"
39#include "dce_v11_0.h"
40#include "dce_virtual.h"
41#include "ivsrcid/ivsrcid_vislands30.h"
42#include "amdgpu_display.h"
43
44#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
45
46
47static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
48static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
49static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
50 int index);
51static int dce_virtual_pageflip(struct amdgpu_device *adev,
52 unsigned crtc_id);
53static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer);
54static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
55 int crtc,
56 enum amdgpu_interrupt_state state);
57
58static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
59{
60 return 0;
61}
62
63static void dce_virtual_page_flip(struct amdgpu_device *adev,
64 int crtc_id, u64 crtc_base, bool async)
65{
66 return;
67}
68
69static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
70 u32 *vbl, u32 *position)
71{
72 *vbl = 0;
73 *position = 0;
74
75 return -EINVAL;
76}
77
78static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
79 enum amdgpu_hpd_id hpd)
80{
81 return true;
82}
83
84static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
85 enum amdgpu_hpd_id hpd)
86{
87 return;
88}
89
90static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
91{
92 return 0;
93}
94
95/**
96 * dce_virtual_bandwidth_update - program display watermarks
97 *
98 * @adev: amdgpu_device pointer
99 *
100 * Calculate and program the display watermarks and line
101 * buffer allocation (CIK).
102 */
103static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
104{
105 return;
106}
107
108static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
109 u16 *green, u16 *blue, uint32_t size,
110 struct drm_modeset_acquire_ctx *ctx)
111{
112 return 0;
113}
114
115static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
116{
117 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
118
119 drm_crtc_cleanup(crtc);
120 kfree(amdgpu_crtc);
121}
122
123static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
124 .cursor_set2 = NULL,
125 .cursor_move = NULL,
126 .gamma_set = dce_virtual_crtc_gamma_set,
127 .set_config = amdgpu_display_crtc_set_config,
128 .destroy = dce_virtual_crtc_destroy,
129 .page_flip_target = amdgpu_display_crtc_page_flip_target,
130 .get_vblank_counter = amdgpu_get_vblank_counter_kms,
131 .enable_vblank = amdgpu_enable_vblank_kms,
132 .disable_vblank = amdgpu_disable_vblank_kms,
133 .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
134};
135
136static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
137{
138 struct drm_device *dev = crtc->dev;
139 struct amdgpu_device *adev = drm_to_adev(dev);
140 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
141 unsigned type;
142
143 switch (mode) {
144 case DRM_MODE_DPMS_ON:
145 amdgpu_crtc->enabled = true;
146 /* Make sure VBLANK interrupts are still enabled */
147 type = amdgpu_display_crtc_idx_to_irq_type(adev,
148 amdgpu_crtc->crtc_id);
149 amdgpu_irq_update(adev, &adev->crtc_irq, type);
150 drm_crtc_vblank_on(crtc);
151 break;
152 case DRM_MODE_DPMS_STANDBY:
153 case DRM_MODE_DPMS_SUSPEND:
154 case DRM_MODE_DPMS_OFF:
155 drm_crtc_vblank_off(crtc);
156 amdgpu_crtc->enabled = false;
157 break;
158 }
159}
160
161
162static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
163{
164 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
165}
166
167static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
168{
169 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
170}
171
172static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
173{
174 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
175 struct drm_device *dev = crtc->dev;
176
177 if (dev->num_crtcs)
178 drm_crtc_vblank_off(crtc);
179
180 amdgpu_crtc->enabled = false;
181 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
182 amdgpu_crtc->encoder = NULL;
183 amdgpu_crtc->connector = NULL;
184}
185
186static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
187 struct drm_display_mode *mode,
188 struct drm_display_mode *adjusted_mode,
189 int x, int y, struct drm_framebuffer *old_fb)
190{
191 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
192
193 /* update the hw version fpr dpm */
194 amdgpu_crtc->hw_mode = *adjusted_mode;
195
196 return 0;
197}
198
199static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
200 const struct drm_display_mode *mode,
201 struct drm_display_mode *adjusted_mode)
202{
203 return true;
204}
205
206
207static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
208 struct drm_framebuffer *old_fb)
209{
210 return 0;
211}
212
213static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
214 struct drm_framebuffer *fb,
215 int x, int y, enum mode_set_atomic state)
216{
217 return 0;
218}
219
220static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
221 .dpms = dce_virtual_crtc_dpms,
222 .mode_fixup = dce_virtual_crtc_mode_fixup,
223 .mode_set = dce_virtual_crtc_mode_set,
224 .mode_set_base = dce_virtual_crtc_set_base,
225 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
226 .prepare = dce_virtual_crtc_prepare,
227 .commit = dce_virtual_crtc_commit,
228 .disable = dce_virtual_crtc_disable,
229 .get_scanout_position = amdgpu_crtc_get_scanout_position,
230};
231
232static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
233{
234 struct amdgpu_crtc *amdgpu_crtc;
235
236 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
237 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
238 if (amdgpu_crtc == NULL)
239 return -ENOMEM;
240
241 drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
242
243 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
244 amdgpu_crtc->crtc_id = index;
245 adev->mode_info.crtcs[index] = amdgpu_crtc;
246
247 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
248 amdgpu_crtc->encoder = NULL;
249 amdgpu_crtc->connector = NULL;
250 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
251 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
252
253 hrtimer_init(&amdgpu_crtc->vblank_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
254 hrtimer_set_expires(&amdgpu_crtc->vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD);
255 amdgpu_crtc->vblank_timer.function = dce_virtual_vblank_timer_handle;
256 hrtimer_start(&amdgpu_crtc->vblank_timer,
257 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
258 return 0;
259}
260
261static int dce_virtual_early_init(void *handle)
262{
263 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
264
265 dce_virtual_set_display_funcs(adev);
266 dce_virtual_set_irq_funcs(adev);
267
268 adev->mode_info.num_hpd = 1;
269 adev->mode_info.num_dig = 1;
270 return 0;
271}
272
273static struct drm_encoder *
274dce_virtual_encoder(struct drm_connector *connector)
275{
276 struct drm_encoder *encoder;
277
278 drm_connector_for_each_possible_encoder(connector, encoder) {
279 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
280 return encoder;
281 }
282
283 /* pick the first one */
284 drm_connector_for_each_possible_encoder(connector, encoder)
285 return encoder;
286
287 return NULL;
288}
289
290static int dce_virtual_get_modes(struct drm_connector *connector)
291{
292 struct drm_device *dev = connector->dev;
293 struct drm_display_mode *mode = NULL;
294 unsigned i;
295 static const struct mode_size {
296 int w;
297 int h;
298 } common_modes[] = {
299 { 640, 480},
300 { 720, 480},
301 { 800, 600},
302 { 848, 480},
303 {1024, 768},
304 {1152, 768},
305 {1280, 720},
306 {1280, 800},
307 {1280, 854},
308 {1280, 960},
309 {1280, 1024},
310 {1440, 900},
311 {1400, 1050},
312 {1680, 1050},
313 {1600, 1200},
314 {1920, 1080},
315 {1920, 1200},
316 {2560, 1440},
317 {4096, 3112},
318 {3656, 2664},
319 {3840, 2160},
320 {4096, 2160},
321 };
322
323 for (i = 0; i < ARRAY_SIZE(common_modes); i++) {
324 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
325 drm_mode_probed_add(connector, mode);
326 }
327
328 return 0;
329}
330
331static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
332 struct drm_display_mode *mode)
333{
334 return MODE_OK;
335}
336
337static int
338dce_virtual_dpms(struct drm_connector *connector, int mode)
339{
340 return 0;
341}
342
343static int
344dce_virtual_set_property(struct drm_connector *connector,
345 struct drm_property *property,
346 uint64_t val)
347{
348 return 0;
349}
350
351static void dce_virtual_destroy(struct drm_connector *connector)
352{
353 drm_connector_unregister(connector);
354 drm_connector_cleanup(connector);
355 kfree(connector);
356}
357
358static void dce_virtual_force(struct drm_connector *connector)
359{
360 return;
361}
362
363static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
364 .get_modes = dce_virtual_get_modes,
365 .mode_valid = dce_virtual_mode_valid,
366 .best_encoder = dce_virtual_encoder,
367};
368
369static const struct drm_connector_funcs dce_virtual_connector_funcs = {
370 .dpms = dce_virtual_dpms,
371 .fill_modes = drm_helper_probe_single_connector_modes,
372 .set_property = dce_virtual_set_property,
373 .destroy = dce_virtual_destroy,
374 .force = dce_virtual_force,
375};
376
377static int dce_virtual_sw_init(void *handle)
378{
379 int r, i;
380 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
381
382 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
383 if (r)
384 return r;
385
386 adev_to_drm(adev)->max_vblank_count = 0;
387
388 adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs;
389
390 adev_to_drm(adev)->mode_config.max_width = 16384;
391 adev_to_drm(adev)->mode_config.max_height = 16384;
392
393 adev_to_drm(adev)->mode_config.preferred_depth = 24;
394 adev_to_drm(adev)->mode_config.prefer_shadow = 1;
395
396 adev_to_drm(adev)->mode_config.fb_base = adev->gmc.aper_base;
397
398 r = amdgpu_display_modeset_create_props(adev);
399 if (r)
400 return r;
401
402 adev_to_drm(adev)->mode_config.max_width = 16384;
403 adev_to_drm(adev)->mode_config.max_height = 16384;
404
405 /* allocate crtcs, encoders, connectors */
406 for (i = 0; i < adev->mode_info.num_crtc; i++) {
407 r = dce_virtual_crtc_init(adev, i);
408 if (r)
409 return r;
410 r = dce_virtual_connector_encoder_init(adev, i);
411 if (r)
412 return r;
413 }
414
415 drm_kms_helper_poll_init(adev_to_drm(adev));
416
417 adev->mode_info.mode_config_initialized = true;
418 return 0;
419}
420
421static int dce_virtual_sw_fini(void *handle)
422{
423 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
424 int i = 0;
425
426 for (i = 0; i < adev->mode_info.num_crtc; i++)
427 if (adev->mode_info.crtcs[i])
428 hrtimer_cancel(&adev->mode_info.crtcs[i]->vblank_timer);
429
430 kfree(adev->mode_info.bios_hardcoded_edid);
431
432 drm_kms_helper_poll_fini(adev_to_drm(adev));
433
434 drm_mode_config_cleanup(adev_to_drm(adev));
435 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
436 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
437 adev->mode_info.mode_config_initialized = false;
438 return 0;
439}
440
441static int dce_virtual_hw_init(void *handle)
442{
443 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
444
445 switch (adev->asic_type) {
446#ifdef CONFIG_DRM_AMDGPU_SI
447 case CHIP_TAHITI:
448 case CHIP_PITCAIRN:
449 case CHIP_VERDE:
450 case CHIP_OLAND:
451 dce_v6_0_disable_dce(adev);
452 break;
453#endif
454#ifdef CONFIG_DRM_AMDGPU_CIK
455 case CHIP_BONAIRE:
456 case CHIP_HAWAII:
457 case CHIP_KAVERI:
458 case CHIP_KABINI:
459 case CHIP_MULLINS:
460 dce_v8_0_disable_dce(adev);
461 break;
462#endif
463 case CHIP_FIJI:
464 case CHIP_TONGA:
465 dce_v10_0_disable_dce(adev);
466 break;
467 case CHIP_CARRIZO:
468 case CHIP_STONEY:
469 case CHIP_POLARIS10:
470 case CHIP_POLARIS11:
471 case CHIP_VEGAM:
472 dce_v11_0_disable_dce(adev);
473 break;
474 case CHIP_TOPAZ:
475#ifdef CONFIG_DRM_AMDGPU_SI
476 case CHIP_HAINAN:
477#endif
478 /* no DCE */
479 break;
480 default:
481 break;
482 }
483 return 0;
484}
485
486static int dce_virtual_hw_fini(void *handle)
487{
488 return 0;
489}
490
491static int dce_virtual_suspend(void *handle)
492{
493 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
494 int r;
495
496 r = amdgpu_display_suspend_helper(adev);
497 if (r)
498 return r;
499 return dce_virtual_hw_fini(handle);
500}
501
502static int dce_virtual_resume(void *handle)
503{
504 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
505 int r;
506
507 r = dce_virtual_hw_init(handle);
508 if (r)
509 return r;
510 return amdgpu_display_resume_helper(adev);
511}
512
513static bool dce_virtual_is_idle(void *handle)
514{
515 return true;
516}
517
518static int dce_virtual_wait_for_idle(void *handle)
519{
520 return 0;
521}
522
523static int dce_virtual_soft_reset(void *handle)
524{
525 return 0;
526}
527
528static int dce_virtual_set_clockgating_state(void *handle,
529 enum amd_clockgating_state state)
530{
531 return 0;
532}
533
534static int dce_virtual_set_powergating_state(void *handle,
535 enum amd_powergating_state state)
536{
537 return 0;
538}
539
540static const struct amd_ip_funcs dce_virtual_ip_funcs = {
541 .name = "dce_virtual",
542 .early_init = dce_virtual_early_init,
543 .late_init = NULL,
544 .sw_init = dce_virtual_sw_init,
545 .sw_fini = dce_virtual_sw_fini,
546 .hw_init = dce_virtual_hw_init,
547 .hw_fini = dce_virtual_hw_fini,
548 .suspend = dce_virtual_suspend,
549 .resume = dce_virtual_resume,
550 .is_idle = dce_virtual_is_idle,
551 .wait_for_idle = dce_virtual_wait_for_idle,
552 .soft_reset = dce_virtual_soft_reset,
553 .set_clockgating_state = dce_virtual_set_clockgating_state,
554 .set_powergating_state = dce_virtual_set_powergating_state,
555};
556
557/* these are handled by the primary encoders */
558static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
559{
560 return;
561}
562
563static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
564{
565 return;
566}
567
568static void
569dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
570 struct drm_display_mode *mode,
571 struct drm_display_mode *adjusted_mode)
572{
573 return;
574}
575
576static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
577{
578 return;
579}
580
581static void
582dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
583{
584 return;
585}
586
587static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
588 const struct drm_display_mode *mode,
589 struct drm_display_mode *adjusted_mode)
590{
591 return true;
592}
593
594static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
595 .dpms = dce_virtual_encoder_dpms,
596 .mode_fixup = dce_virtual_encoder_mode_fixup,
597 .prepare = dce_virtual_encoder_prepare,
598 .mode_set = dce_virtual_encoder_mode_set,
599 .commit = dce_virtual_encoder_commit,
600 .disable = dce_virtual_encoder_disable,
601};
602
603static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
604{
605 drm_encoder_cleanup(encoder);
606 kfree(encoder);
607}
608
609static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
610 .destroy = dce_virtual_encoder_destroy,
611};
612
613static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
614 int index)
615{
616 struct drm_encoder *encoder;
617 struct drm_connector *connector;
618
619 /* add a new encoder */
620 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
621 if (!encoder)
622 return -ENOMEM;
623 encoder->possible_crtcs = 1 << index;
624 drm_encoder_init(adev_to_drm(adev), encoder, &dce_virtual_encoder_funcs,
625 DRM_MODE_ENCODER_VIRTUAL, NULL);
626 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
627
628 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
629 if (!connector) {
630 kfree(encoder);
631 return -ENOMEM;
632 }
633
634 /* add a new connector */
635 drm_connector_init(adev_to_drm(adev), connector, &dce_virtual_connector_funcs,
636 DRM_MODE_CONNECTOR_VIRTUAL);
637 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
638 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
639 connector->interlace_allowed = false;
640 connector->doublescan_allowed = false;
641
642 /* link them */
643 drm_connector_attach_encoder(connector, encoder);
644
645 return 0;
646}
647
648static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
649 .bandwidth_update = &dce_virtual_bandwidth_update,
650 .vblank_get_counter = &dce_virtual_vblank_get_counter,
651 .backlight_set_level = NULL,
652 .backlight_get_level = NULL,
653 .hpd_sense = &dce_virtual_hpd_sense,
654 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
655 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
656 .page_flip = &dce_virtual_page_flip,
657 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
658 .add_encoder = NULL,
659 .add_connector = NULL,
660};
661
662static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
663{
664 adev->mode_info.funcs = &dce_virtual_display_funcs;
665}
666
667static int dce_virtual_pageflip(struct amdgpu_device *adev,
668 unsigned crtc_id)
669{
670 unsigned long flags;
671 struct amdgpu_crtc *amdgpu_crtc;
672 struct amdgpu_flip_work *works;
673
674 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
675
676 if (crtc_id >= adev->mode_info.num_crtc) {
677 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
678 return -EINVAL;
679 }
680
681 /* IRQ could occur when in initial stage */
682 if (amdgpu_crtc == NULL)
683 return 0;
684
685 spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
686 works = amdgpu_crtc->pflip_works;
687 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
688 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
689 "AMDGPU_FLIP_SUBMITTED(%d)\n",
690 amdgpu_crtc->pflip_status,
691 AMDGPU_FLIP_SUBMITTED);
692 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
693 return 0;
694 }
695
696 /* page flip completed. clean up */
697 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
698 amdgpu_crtc->pflip_works = NULL;
699
700 /* wakeup usersapce */
701 if (works->event)
702 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
703
704 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
705
706 drm_crtc_vblank_put(&amdgpu_crtc->base);
707 amdgpu_bo_unref(&works->old_abo);
708 kfree(works->shared);
709 kfree(works);
710
711 return 0;
712}
713
714static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
715{
716 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
717 struct amdgpu_crtc, vblank_timer);
718 struct drm_device *ddev = amdgpu_crtc->base.dev;
719 struct amdgpu_device *adev = drm_to_adev(ddev);
720 struct amdgpu_irq_src *source = adev->irq.client[AMDGPU_IRQ_CLIENTID_LEGACY].sources
721 [VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER];
722 int irq_type = amdgpu_display_crtc_idx_to_irq_type(adev,
723 amdgpu_crtc->crtc_id);
724
725 if (amdgpu_irq_enabled(adev, source, irq_type)) {
726 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
727 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
728 }
729 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
730 HRTIMER_MODE_REL);
731
732 return HRTIMER_NORESTART;
733}
734
735static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
736 int crtc,
737 enum amdgpu_interrupt_state state)
738{
739 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
740 DRM_DEBUG("invalid crtc %d\n", crtc);
741 return;
742 }
743
744 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
745 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
746}
747
748
749static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
750 struct amdgpu_irq_src *source,
751 unsigned type,
752 enum amdgpu_interrupt_state state)
753{
754 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
755 return -EINVAL;
756
757 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
758
759 return 0;
760}
761
762static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
763 .set = dce_virtual_set_crtc_irq_state,
764 .process = NULL,
765};
766
767static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
768{
769 adev->crtc_irq.num_types = adev->mode_info.num_crtc;
770 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
771}
772
773const struct amdgpu_ip_block_version dce_virtual_ip_block =
774{
775 .type = AMD_IP_BLOCK_TYPE_DCE,
776 .major = 1,
777 .minor = 0,
778 .rev = 0,
779 .funcs = &dce_virtual_ip_funcs,
780};