Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1/*
2 * Copyright 2014 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23
24#include <drm/drm_vblank.h>
25
26#include "amdgpu.h"
27#include "amdgpu_pm.h"
28#include "amdgpu_i2c.h"
29#include "atom.h"
30#include "amdgpu_pll.h"
31#include "amdgpu_connectors.h"
32#ifdef CONFIG_DRM_AMDGPU_SI
33#include "dce_v6_0.h"
34#endif
35#ifdef CONFIG_DRM_AMDGPU_CIK
36#include "dce_v8_0.h"
37#endif
38#include "dce_v10_0.h"
39#include "dce_v11_0.h"
40#include "dce_virtual.h"
41#include "ivsrcid/ivsrcid_vislands30.h"
42#include "amdgpu_display.h"
43
44#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
45
46
47static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
48static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
49static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
50 int index);
51static int dce_virtual_pageflip(struct amdgpu_device *adev,
52 unsigned crtc_id);
53static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer);
54static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
55 int crtc,
56 enum amdgpu_interrupt_state state);
57
58static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
59{
60 return 0;
61}
62
63static void dce_virtual_page_flip(struct amdgpu_device *adev,
64 int crtc_id, u64 crtc_base, bool async)
65{
66 return;
67}
68
69static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
70 u32 *vbl, u32 *position)
71{
72 *vbl = 0;
73 *position = 0;
74
75 return -EINVAL;
76}
77
78static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
79 enum amdgpu_hpd_id hpd)
80{
81 return true;
82}
83
84static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
85 enum amdgpu_hpd_id hpd)
86{
87 return;
88}
89
90static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
91{
92 return 0;
93}
94
95/**
96 * dce_virtual_bandwidth_update - program display watermarks
97 *
98 * @adev: amdgpu_device pointer
99 *
100 * Calculate and program the display watermarks and line
101 * buffer allocation (CIK).
102 */
103static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
104{
105 return;
106}
107
108static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
109 u16 *green, u16 *blue, uint32_t size,
110 struct drm_modeset_acquire_ctx *ctx)
111{
112 return 0;
113}
114
115static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
116{
117 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
118
119 drm_crtc_cleanup(crtc);
120 kfree(amdgpu_crtc);
121}
122
123static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
124 .cursor_set2 = NULL,
125 .cursor_move = NULL,
126 .gamma_set = dce_virtual_crtc_gamma_set,
127 .set_config = amdgpu_display_crtc_set_config,
128 .destroy = dce_virtual_crtc_destroy,
129 .page_flip_target = amdgpu_display_crtc_page_flip_target,
130 .get_vblank_counter = amdgpu_get_vblank_counter_kms,
131 .enable_vblank = amdgpu_enable_vblank_kms,
132 .disable_vblank = amdgpu_disable_vblank_kms,
133 .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
134};
135
136static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
137{
138 struct drm_device *dev = crtc->dev;
139 struct amdgpu_device *adev = drm_to_adev(dev);
140 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
141 unsigned type;
142
143 switch (mode) {
144 case DRM_MODE_DPMS_ON:
145 amdgpu_crtc->enabled = true;
146 /* Make sure VBLANK interrupts are still enabled */
147 type = amdgpu_display_crtc_idx_to_irq_type(adev,
148 amdgpu_crtc->crtc_id);
149 amdgpu_irq_update(adev, &adev->crtc_irq, type);
150 drm_crtc_vblank_on(crtc);
151 break;
152 case DRM_MODE_DPMS_STANDBY:
153 case DRM_MODE_DPMS_SUSPEND:
154 case DRM_MODE_DPMS_OFF:
155 drm_crtc_vblank_off(crtc);
156 amdgpu_crtc->enabled = false;
157 break;
158 }
159}
160
161
162static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
163{
164 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
165}
166
167static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
168{
169 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
170}
171
172static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
173{
174 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
175 struct drm_device *dev = crtc->dev;
176
177 if (dev->num_crtcs)
178 drm_crtc_vblank_off(crtc);
179
180 amdgpu_crtc->enabled = false;
181 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
182 amdgpu_crtc->encoder = NULL;
183 amdgpu_crtc->connector = NULL;
184}
185
186static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
187 struct drm_display_mode *mode,
188 struct drm_display_mode *adjusted_mode,
189 int x, int y, struct drm_framebuffer *old_fb)
190{
191 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
192
193 /* update the hw version fpr dpm */
194 amdgpu_crtc->hw_mode = *adjusted_mode;
195
196 return 0;
197}
198
199static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
200 const struct drm_display_mode *mode,
201 struct drm_display_mode *adjusted_mode)
202{
203 return true;
204}
205
206
207static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
208 struct drm_framebuffer *old_fb)
209{
210 return 0;
211}
212
213static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
214 struct drm_framebuffer *fb,
215 int x, int y, enum mode_set_atomic state)
216{
217 return 0;
218}
219
220static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
221 .dpms = dce_virtual_crtc_dpms,
222 .mode_fixup = dce_virtual_crtc_mode_fixup,
223 .mode_set = dce_virtual_crtc_mode_set,
224 .mode_set_base = dce_virtual_crtc_set_base,
225 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
226 .prepare = dce_virtual_crtc_prepare,
227 .commit = dce_virtual_crtc_commit,
228 .disable = dce_virtual_crtc_disable,
229 .get_scanout_position = amdgpu_crtc_get_scanout_position,
230};
231
232static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
233{
234 struct amdgpu_crtc *amdgpu_crtc;
235
236 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
237 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
238 if (amdgpu_crtc == NULL)
239 return -ENOMEM;
240
241 drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
242
243 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
244 amdgpu_crtc->crtc_id = index;
245 adev->mode_info.crtcs[index] = amdgpu_crtc;
246
247 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
248 amdgpu_crtc->encoder = NULL;
249 amdgpu_crtc->connector = NULL;
250 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
251 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
252
253 hrtimer_init(&amdgpu_crtc->vblank_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
254 hrtimer_set_expires(&amdgpu_crtc->vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD);
255 amdgpu_crtc->vblank_timer.function = dce_virtual_vblank_timer_handle;
256 hrtimer_start(&amdgpu_crtc->vblank_timer,
257 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
258 return 0;
259}
260
261static int dce_virtual_early_init(void *handle)
262{
263 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
264
265 dce_virtual_set_display_funcs(adev);
266 dce_virtual_set_irq_funcs(adev);
267
268 adev->mode_info.num_hpd = 1;
269 adev->mode_info.num_dig = 1;
270 return 0;
271}
272
273static struct drm_encoder *
274dce_virtual_encoder(struct drm_connector *connector)
275{
276 struct drm_encoder *encoder;
277
278 drm_connector_for_each_possible_encoder(connector, encoder) {
279 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
280 return encoder;
281 }
282
283 /* pick the first one */
284 drm_connector_for_each_possible_encoder(connector, encoder)
285 return encoder;
286
287 return NULL;
288}
289
290static int dce_virtual_get_modes(struct drm_connector *connector)
291{
292 struct drm_device *dev = connector->dev;
293 struct drm_display_mode *mode = NULL;
294 unsigned i;
295 static const struct mode_size {
296 int w;
297 int h;
298 } common_modes[] = {
299 { 640, 480},
300 { 720, 480},
301 { 800, 600},
302 { 848, 480},
303 {1024, 768},
304 {1152, 768},
305 {1280, 720},
306 {1280, 800},
307 {1280, 854},
308 {1280, 960},
309 {1280, 1024},
310 {1440, 900},
311 {1400, 1050},
312 {1680, 1050},
313 {1600, 1200},
314 {1920, 1080},
315 {1920, 1200},
316 {2560, 1440},
317 {4096, 3112},
318 {3656, 2664},
319 {3840, 2160},
320 {4096, 2160},
321 };
322
323 for (i = 0; i < ARRAY_SIZE(common_modes); i++) {
324 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
325 drm_mode_probed_add(connector, mode);
326 }
327
328 return 0;
329}
330
331static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
332 struct drm_display_mode *mode)
333{
334 return MODE_OK;
335}
336
337static int
338dce_virtual_dpms(struct drm_connector *connector, int mode)
339{
340 return 0;
341}
342
343static int
344dce_virtual_set_property(struct drm_connector *connector,
345 struct drm_property *property,
346 uint64_t val)
347{
348 return 0;
349}
350
351static void dce_virtual_destroy(struct drm_connector *connector)
352{
353 drm_connector_unregister(connector);
354 drm_connector_cleanup(connector);
355 kfree(connector);
356}
357
358static void dce_virtual_force(struct drm_connector *connector)
359{
360 return;
361}
362
363static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
364 .get_modes = dce_virtual_get_modes,
365 .mode_valid = dce_virtual_mode_valid,
366 .best_encoder = dce_virtual_encoder,
367};
368
369static const struct drm_connector_funcs dce_virtual_connector_funcs = {
370 .dpms = dce_virtual_dpms,
371 .fill_modes = drm_helper_probe_single_connector_modes,
372 .set_property = dce_virtual_set_property,
373 .destroy = dce_virtual_destroy,
374 .force = dce_virtual_force,
375};
376
377static int dce_virtual_sw_init(void *handle)
378{
379 int r, i;
380 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
381
382 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
383 if (r)
384 return r;
385
386 adev_to_drm(adev)->max_vblank_count = 0;
387
388 adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs;
389
390 adev_to_drm(adev)->mode_config.max_width = 16384;
391 adev_to_drm(adev)->mode_config.max_height = 16384;
392
393 adev_to_drm(adev)->mode_config.preferred_depth = 24;
394 adev_to_drm(adev)->mode_config.prefer_shadow = 1;
395
396 adev_to_drm(adev)->mode_config.fb_base = adev->gmc.aper_base;
397
398 r = amdgpu_display_modeset_create_props(adev);
399 if (r)
400 return r;
401
402 adev_to_drm(adev)->mode_config.max_width = 16384;
403 adev_to_drm(adev)->mode_config.max_height = 16384;
404
405 /* allocate crtcs, encoders, connectors */
406 for (i = 0; i < adev->mode_info.num_crtc; i++) {
407 r = dce_virtual_crtc_init(adev, i);
408 if (r)
409 return r;
410 r = dce_virtual_connector_encoder_init(adev, i);
411 if (r)
412 return r;
413 }
414
415 drm_kms_helper_poll_init(adev_to_drm(adev));
416
417 adev->mode_info.mode_config_initialized = true;
418 return 0;
419}
420
421static int dce_virtual_sw_fini(void *handle)
422{
423 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
424
425 kfree(adev->mode_info.bios_hardcoded_edid);
426
427 drm_kms_helper_poll_fini(adev_to_drm(adev));
428
429 drm_mode_config_cleanup(adev_to_drm(adev));
430 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
431 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
432 adev->mode_info.mode_config_initialized = false;
433 return 0;
434}
435
436static int dce_virtual_hw_init(void *handle)
437{
438 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
439
440 switch (adev->asic_type) {
441#ifdef CONFIG_DRM_AMDGPU_SI
442 case CHIP_TAHITI:
443 case CHIP_PITCAIRN:
444 case CHIP_VERDE:
445 case CHIP_OLAND:
446 dce_v6_0_disable_dce(adev);
447 break;
448#endif
449#ifdef CONFIG_DRM_AMDGPU_CIK
450 case CHIP_BONAIRE:
451 case CHIP_HAWAII:
452 case CHIP_KAVERI:
453 case CHIP_KABINI:
454 case CHIP_MULLINS:
455 dce_v8_0_disable_dce(adev);
456 break;
457#endif
458 case CHIP_FIJI:
459 case CHIP_TONGA:
460 dce_v10_0_disable_dce(adev);
461 break;
462 case CHIP_CARRIZO:
463 case CHIP_STONEY:
464 case CHIP_POLARIS10:
465 case CHIP_POLARIS11:
466 case CHIP_VEGAM:
467 dce_v11_0_disable_dce(adev);
468 break;
469 case CHIP_TOPAZ:
470#ifdef CONFIG_DRM_AMDGPU_SI
471 case CHIP_HAINAN:
472#endif
473 /* no DCE */
474 break;
475 default:
476 break;
477 }
478 return 0;
479}
480
481static int dce_virtual_hw_fini(void *handle)
482{
483 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
484 int i = 0;
485
486 for (i = 0; i<adev->mode_info.num_crtc; i++)
487 if (adev->mode_info.crtcs[i])
488 hrtimer_cancel(&adev->mode_info.crtcs[i]->vblank_timer);
489
490 return 0;
491}
492
493static int dce_virtual_suspend(void *handle)
494{
495 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
496 int r;
497
498 r = amdgpu_display_suspend_helper(adev);
499 if (r)
500 return r;
501 return dce_virtual_hw_fini(handle);
502}
503
504static int dce_virtual_resume(void *handle)
505{
506 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
507 int r;
508
509 r = dce_virtual_hw_init(handle);
510 if (r)
511 return r;
512 return amdgpu_display_resume_helper(adev);
513}
514
515static bool dce_virtual_is_idle(void *handle)
516{
517 return true;
518}
519
520static int dce_virtual_wait_for_idle(void *handle)
521{
522 return 0;
523}
524
525static int dce_virtual_soft_reset(void *handle)
526{
527 return 0;
528}
529
530static int dce_virtual_set_clockgating_state(void *handle,
531 enum amd_clockgating_state state)
532{
533 return 0;
534}
535
536static int dce_virtual_set_powergating_state(void *handle,
537 enum amd_powergating_state state)
538{
539 return 0;
540}
541
542static const struct amd_ip_funcs dce_virtual_ip_funcs = {
543 .name = "dce_virtual",
544 .early_init = dce_virtual_early_init,
545 .late_init = NULL,
546 .sw_init = dce_virtual_sw_init,
547 .sw_fini = dce_virtual_sw_fini,
548 .hw_init = dce_virtual_hw_init,
549 .hw_fini = dce_virtual_hw_fini,
550 .suspend = dce_virtual_suspend,
551 .resume = dce_virtual_resume,
552 .is_idle = dce_virtual_is_idle,
553 .wait_for_idle = dce_virtual_wait_for_idle,
554 .soft_reset = dce_virtual_soft_reset,
555 .set_clockgating_state = dce_virtual_set_clockgating_state,
556 .set_powergating_state = dce_virtual_set_powergating_state,
557};
558
559/* these are handled by the primary encoders */
560static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
561{
562 return;
563}
564
565static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
566{
567 return;
568}
569
570static void
571dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
572 struct drm_display_mode *mode,
573 struct drm_display_mode *adjusted_mode)
574{
575 return;
576}
577
578static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
579{
580 return;
581}
582
583static void
584dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
585{
586 return;
587}
588
589static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
590 const struct drm_display_mode *mode,
591 struct drm_display_mode *adjusted_mode)
592{
593 return true;
594}
595
596static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
597 .dpms = dce_virtual_encoder_dpms,
598 .mode_fixup = dce_virtual_encoder_mode_fixup,
599 .prepare = dce_virtual_encoder_prepare,
600 .mode_set = dce_virtual_encoder_mode_set,
601 .commit = dce_virtual_encoder_commit,
602 .disable = dce_virtual_encoder_disable,
603};
604
605static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
606{
607 drm_encoder_cleanup(encoder);
608 kfree(encoder);
609}
610
611static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
612 .destroy = dce_virtual_encoder_destroy,
613};
614
615static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
616 int index)
617{
618 struct drm_encoder *encoder;
619 struct drm_connector *connector;
620
621 /* add a new encoder */
622 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
623 if (!encoder)
624 return -ENOMEM;
625 encoder->possible_crtcs = 1 << index;
626 drm_encoder_init(adev_to_drm(adev), encoder, &dce_virtual_encoder_funcs,
627 DRM_MODE_ENCODER_VIRTUAL, NULL);
628 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
629
630 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
631 if (!connector) {
632 kfree(encoder);
633 return -ENOMEM;
634 }
635
636 /* add a new connector */
637 drm_connector_init(adev_to_drm(adev), connector, &dce_virtual_connector_funcs,
638 DRM_MODE_CONNECTOR_VIRTUAL);
639 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
640 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
641 connector->interlace_allowed = false;
642 connector->doublescan_allowed = false;
643
644 /* link them */
645 drm_connector_attach_encoder(connector, encoder);
646
647 return 0;
648}
649
650static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
651 .bandwidth_update = &dce_virtual_bandwidth_update,
652 .vblank_get_counter = &dce_virtual_vblank_get_counter,
653 .backlight_set_level = NULL,
654 .backlight_get_level = NULL,
655 .hpd_sense = &dce_virtual_hpd_sense,
656 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
657 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
658 .page_flip = &dce_virtual_page_flip,
659 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
660 .add_encoder = NULL,
661 .add_connector = NULL,
662};
663
664static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
665{
666 adev->mode_info.funcs = &dce_virtual_display_funcs;
667}
668
669static int dce_virtual_pageflip(struct amdgpu_device *adev,
670 unsigned crtc_id)
671{
672 unsigned long flags;
673 struct amdgpu_crtc *amdgpu_crtc;
674 struct amdgpu_flip_work *works;
675
676 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
677
678 if (crtc_id >= adev->mode_info.num_crtc) {
679 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
680 return -EINVAL;
681 }
682
683 /* IRQ could occur when in initial stage */
684 if (amdgpu_crtc == NULL)
685 return 0;
686
687 spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
688 works = amdgpu_crtc->pflip_works;
689 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
690 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
691 "AMDGPU_FLIP_SUBMITTED(%d)\n",
692 amdgpu_crtc->pflip_status,
693 AMDGPU_FLIP_SUBMITTED);
694 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
695 return 0;
696 }
697
698 /* page flip completed. clean up */
699 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
700 amdgpu_crtc->pflip_works = NULL;
701
702 /* wakeup usersapce */
703 if (works->event)
704 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
705
706 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
707
708 drm_crtc_vblank_put(&amdgpu_crtc->base);
709 amdgpu_bo_unref(&works->old_abo);
710 kfree(works->shared);
711 kfree(works);
712
713 return 0;
714}
715
716static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
717{
718 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
719 struct amdgpu_crtc, vblank_timer);
720 struct drm_device *ddev = amdgpu_crtc->base.dev;
721 struct amdgpu_device *adev = drm_to_adev(ddev);
722 struct amdgpu_irq_src *source = adev->irq.client[AMDGPU_IRQ_CLIENTID_LEGACY].sources
723 [VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER];
724 int irq_type = amdgpu_display_crtc_idx_to_irq_type(adev,
725 amdgpu_crtc->crtc_id);
726
727 if (amdgpu_irq_enabled(adev, source, irq_type)) {
728 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
729 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
730 }
731 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
732 HRTIMER_MODE_REL);
733
734 return HRTIMER_NORESTART;
735}
736
737static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
738 int crtc,
739 enum amdgpu_interrupt_state state)
740{
741 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
742 DRM_DEBUG("invalid crtc %d\n", crtc);
743 return;
744 }
745
746 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
747 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
748}
749
750
751static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
752 struct amdgpu_irq_src *source,
753 unsigned type,
754 enum amdgpu_interrupt_state state)
755{
756 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
757 return -EINVAL;
758
759 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
760
761 return 0;
762}
763
764static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
765 .set = dce_virtual_set_crtc_irq_state,
766 .process = NULL,
767};
768
769static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
770{
771 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
772 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
773}
774
775const struct amdgpu_ip_block_version dce_virtual_ip_block =
776{
777 .type = AMD_IP_BLOCK_TYPE_DCE,
778 .major = 1,
779 .minor = 0,
780 .rev = 0,
781 .funcs = &dce_virtual_ip_funcs,
782};