]> git.kernelconcepts.de Git - karo-tx-linux.git/blob - drivers/gpu/drm/amd/amdgpu/dce_virtual.c
5499693f1dcf767d9751b786577db0225f665af7
[karo-tx-linux.git] / drivers / gpu / drm / amd / amdgpu / dce_virtual.c
1 /*
2  * Copyright 2014 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 #include "drmP.h"
24 #include "amdgpu.h"
25 #include "amdgpu_pm.h"
26 #include "amdgpu_i2c.h"
27 #include "atom.h"
28 #include "amdgpu_pll.h"
29 #include "amdgpu_connectors.h"
30 #ifdef CONFIG_DRM_AMDGPU_CIK
31 #include "dce_v8_0.h"
32 #endif
33 #include "dce_v10_0.h"
34 #include "dce_v11_0.h"
35 #include "dce_virtual.h"
36
37 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
38 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
39 static int dce_virtual_pageflip_irq(struct amdgpu_device *adev,
40                                   struct amdgpu_irq_src *source,
41                                   struct amdgpu_iv_entry *entry);
42
43 /**
44  * dce_virtual_vblank_wait - vblank wait asic callback.
45  *
46  * @adev: amdgpu_device pointer
47  * @crtc: crtc to wait for vblank on
48  *
49  * Wait for vblank on the requested crtc (evergreen+).
50  */
51 static void dce_virtual_vblank_wait(struct amdgpu_device *adev, int crtc)
52 {
53         return;
54 }
55
56 static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
57 {
58         if (crtc >= adev->mode_info.num_crtc)
59                 return 0;
60         else
61                 return adev->ddev->vblank[crtc].count;
62 }
63
64 static void dce_virtual_page_flip(struct amdgpu_device *adev,
65                               int crtc_id, u64 crtc_base, bool async)
66 {
67         return;
68 }
69
70 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
71                                         u32 *vbl, u32 *position)
72 {
73         if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc))
74                 return -EINVAL;
75
76         *vbl = 0;
77         *position = 0;
78
79         return 0;
80 }
81
82 static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
83                                enum amdgpu_hpd_id hpd)
84 {
85         return true;
86 }
87
88 static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
89                                       enum amdgpu_hpd_id hpd)
90 {
91         return;
92 }
93
94 static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
95 {
96         return 0;
97 }
98
99 static bool dce_virtual_is_display_hung(struct amdgpu_device *adev)
100 {
101         return false;
102 }
103
104 void dce_virtual_stop_mc_access(struct amdgpu_device *adev,
105                               struct amdgpu_mode_mc_save *save)
106 {
107         switch (adev->asic_type) {
108         case CHIP_BONAIRE:
109         case CHIP_HAWAII:
110         case CHIP_KAVERI:
111         case CHIP_KABINI:
112         case CHIP_MULLINS:
113 #ifdef CONFIG_DRM_AMDGPU_CIK
114                 dce_v8_0_disable_dce(adev);
115 #endif
116                 break;
117         case CHIP_FIJI:
118         case CHIP_TONGA:
119                 dce_v10_0_disable_dce(adev);
120                 break;
121         case CHIP_CARRIZO:
122         case CHIP_STONEY:
123         case CHIP_POLARIS11:
124         case CHIP_POLARIS10:
125                 dce_v11_0_disable_dce(adev);
126                 break;
127         case CHIP_TOPAZ:
128                 /* no DCE */
129                 return;
130         default:
131                 DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n", adev->asic_type);
132         }
133
134         return;
135 }
136 void dce_virtual_resume_mc_access(struct amdgpu_device *adev,
137                                 struct amdgpu_mode_mc_save *save)
138 {
139         return;
140 }
141
142 void dce_virtual_set_vga_render_state(struct amdgpu_device *adev,
143                                     bool render)
144 {
145         return;
146 }
147
148 /**
149  * dce_virtual_bandwidth_update - program display watermarks
150  *
151  * @adev: amdgpu_device pointer
152  *
153  * Calculate and program the display watermarks and line
154  * buffer allocation (CIK).
155  */
156 static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
157 {
158         return;
159 }
160
161 static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
162                                       u16 *green, u16 *blue, uint32_t size)
163 {
164         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
165         int i;
166
167         /* userspace palettes are always correct as is */
168         for (i = 0; i < size; i++) {
169                 amdgpu_crtc->lut_r[i] = red[i] >> 6;
170                 amdgpu_crtc->lut_g[i] = green[i] >> 6;
171                 amdgpu_crtc->lut_b[i] = blue[i] >> 6;
172         }
173
174         return 0;
175 }
176
177 static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
178 {
179         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
180
181         drm_crtc_cleanup(crtc);
182         kfree(amdgpu_crtc);
183 }
184
185 static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
186         .cursor_set2 = NULL,
187         .cursor_move = NULL,
188         .gamma_set = dce_virtual_crtc_gamma_set,
189         .set_config = amdgpu_crtc_set_config,
190         .destroy = dce_virtual_crtc_destroy,
191         .page_flip = amdgpu_crtc_page_flip,
192 };
193
194 static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
195 {
196         struct drm_device *dev = crtc->dev;
197         struct amdgpu_device *adev = dev->dev_private;
198         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
199         unsigned type;
200
201         switch (mode) {
202         case DRM_MODE_DPMS_ON:
203                 amdgpu_crtc->enabled = true;
204                 /* Make sure VBLANK and PFLIP interrupts are still enabled */
205                 type = amdgpu_crtc_idx_to_irq_type(adev, amdgpu_crtc->crtc_id);
206                 amdgpu_irq_update(adev, &adev->crtc_irq, type);
207                 amdgpu_irq_update(adev, &adev->pageflip_irq, type);
208                 drm_vblank_on(dev, amdgpu_crtc->crtc_id);
209                 break;
210         case DRM_MODE_DPMS_STANDBY:
211         case DRM_MODE_DPMS_SUSPEND:
212         case DRM_MODE_DPMS_OFF:
213                 drm_vblank_off(dev, amdgpu_crtc->crtc_id);
214                 amdgpu_crtc->enabled = false;
215                 break;
216         }
217 }
218
219
220 static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
221 {
222         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
223 }
224
225 static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
226 {
227         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
228 }
229
230 static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
231 {
232         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
233
234         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
235         if (crtc->primary->fb) {
236                 int r;
237                 struct amdgpu_framebuffer *amdgpu_fb;
238                 struct amdgpu_bo *rbo;
239
240                 amdgpu_fb = to_amdgpu_framebuffer(crtc->primary->fb);
241                 rbo = gem_to_amdgpu_bo(amdgpu_fb->obj);
242                 r = amdgpu_bo_reserve(rbo, false);
243                 if (unlikely(r))
244                         DRM_ERROR("failed to reserve rbo before unpin\n");
245                 else {
246                         amdgpu_bo_unpin(rbo);
247                         amdgpu_bo_unreserve(rbo);
248                 }
249         }
250
251         amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
252         amdgpu_crtc->encoder = NULL;
253         amdgpu_crtc->connector = NULL;
254 }
255
256 static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
257                                   struct drm_display_mode *mode,
258                                   struct drm_display_mode *adjusted_mode,
259                                   int x, int y, struct drm_framebuffer *old_fb)
260 {
261         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
262
263         /* update the hw version fpr dpm */
264         amdgpu_crtc->hw_mode = *adjusted_mode;
265
266         return 0;
267 }
268
269 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
270                                      const struct drm_display_mode *mode,
271                                      struct drm_display_mode *adjusted_mode)
272 {
273         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
274         struct drm_device *dev = crtc->dev;
275         struct drm_encoder *encoder;
276
277         /* assign the encoder to the amdgpu crtc to avoid repeated lookups later */
278         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
279                 if (encoder->crtc == crtc) {
280                         amdgpu_crtc->encoder = encoder;
281                         amdgpu_crtc->connector = amdgpu_get_connector_for_encoder(encoder);
282                         break;
283                 }
284         }
285         if ((amdgpu_crtc->encoder == NULL) || (amdgpu_crtc->connector == NULL)) {
286                 amdgpu_crtc->encoder = NULL;
287                 amdgpu_crtc->connector = NULL;
288                 return false;
289         }
290
291         return true;
292 }
293
294
295 static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
296                                   struct drm_framebuffer *old_fb)
297 {
298         return 0;
299 }
300
301 static void dce_virtual_crtc_load_lut(struct drm_crtc *crtc)
302 {
303         return;
304 }
305
306 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
307                                          struct drm_framebuffer *fb,
308                                          int x, int y, enum mode_set_atomic state)
309 {
310         return 0;
311 }
312
313 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
314         .dpms = dce_virtual_crtc_dpms,
315         .mode_fixup = dce_virtual_crtc_mode_fixup,
316         .mode_set = dce_virtual_crtc_mode_set,
317         .mode_set_base = dce_virtual_crtc_set_base,
318         .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
319         .prepare = dce_virtual_crtc_prepare,
320         .commit = dce_virtual_crtc_commit,
321         .load_lut = dce_virtual_crtc_load_lut,
322         .disable = dce_virtual_crtc_disable,
323 };
324
325 static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
326 {
327         struct amdgpu_crtc *amdgpu_crtc;
328         int i;
329
330         amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
331                               (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
332         if (amdgpu_crtc == NULL)
333                 return -ENOMEM;
334
335         drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
336
337         drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
338         amdgpu_crtc->crtc_id = index;
339         adev->mode_info.crtcs[index] = amdgpu_crtc;
340
341         for (i = 0; i < 256; i++) {
342                 amdgpu_crtc->lut_r[i] = i << 2;
343                 amdgpu_crtc->lut_g[i] = i << 2;
344                 amdgpu_crtc->lut_b[i] = i << 2;
345         }
346
347         amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
348         amdgpu_crtc->encoder = NULL;
349         amdgpu_crtc->connector = NULL;
350         drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
351
352         return 0;
353 }
354
355 static int dce_virtual_early_init(void *handle)
356 {
357         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
358
359         adev->mode_info.vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
360         dce_virtual_set_display_funcs(adev);
361         dce_virtual_set_irq_funcs(adev);
362
363         adev->mode_info.num_crtc = 1;
364         adev->mode_info.num_hpd = 1;
365         adev->mode_info.num_dig = 1;
366         return 0;
367 }
368
369 static bool dce_virtual_get_connector_info(struct amdgpu_device *adev)
370 {
371         struct amdgpu_i2c_bus_rec ddc_bus;
372         struct amdgpu_router router;
373         struct amdgpu_hpd hpd;
374
375         /* look up gpio for ddc, hpd */
376         ddc_bus.valid = false;
377         hpd.hpd = AMDGPU_HPD_NONE;
378         /* needed for aux chan transactions */
379         ddc_bus.hpd = hpd.hpd;
380
381         memset(&router, 0, sizeof(router));
382         router.ddc_valid = false;
383         router.cd_valid = false;
384         amdgpu_display_add_connector(adev,
385                                       0,
386                                       ATOM_DEVICE_CRT1_SUPPORT,
387                                       DRM_MODE_CONNECTOR_VIRTUAL, &ddc_bus,
388                                       CONNECTOR_OBJECT_ID_VIRTUAL,
389                                       &hpd,
390                                       &router);
391
392         amdgpu_display_add_encoder(adev, ENCODER_VIRTUAL_ENUM_VIRTUAL,
393                                                         ATOM_DEVICE_CRT1_SUPPORT,
394                                                         0);
395
396         amdgpu_link_encoder_connector(adev->ddev);
397
398         return true;
399 }
400
401 static int dce_virtual_sw_init(void *handle)
402 {
403         int r, i;
404         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
405
406         r = amdgpu_irq_add_id(adev, 229, &adev->crtc_irq);
407         if (r)
408                 return r;
409
410         adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
411
412         adev->ddev->mode_config.max_width = 16384;
413         adev->ddev->mode_config.max_height = 16384;
414
415         adev->ddev->mode_config.preferred_depth = 24;
416         adev->ddev->mode_config.prefer_shadow = 1;
417
418         adev->ddev->mode_config.fb_base = adev->mc.aper_base;
419
420         r = amdgpu_modeset_create_props(adev);
421         if (r)
422                 return r;
423
424         adev->ddev->mode_config.max_width = 16384;
425         adev->ddev->mode_config.max_height = 16384;
426
427         /* allocate crtcs */
428         for (i = 0; i < adev->mode_info.num_crtc; i++) {
429                 r = dce_virtual_crtc_init(adev, i);
430                 if (r)
431                         return r;
432         }
433
434         dce_virtual_get_connector_info(adev);
435         amdgpu_print_display_setup(adev->ddev);
436
437         drm_kms_helper_poll_init(adev->ddev);
438
439         adev->mode_info.mode_config_initialized = true;
440         return 0;
441 }
442
443 static int dce_virtual_sw_fini(void *handle)
444 {
445         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
446
447         kfree(adev->mode_info.bios_hardcoded_edid);
448
449         drm_kms_helper_poll_fini(adev->ddev);
450
451         drm_mode_config_cleanup(adev->ddev);
452         adev->mode_info.mode_config_initialized = false;
453         return 0;
454 }
455
456 static int dce_virtual_hw_init(void *handle)
457 {
458         return 0;
459 }
460
461 static int dce_virtual_hw_fini(void *handle)
462 {
463         return 0;
464 }
465
466 static int dce_virtual_suspend(void *handle)
467 {
468         return dce_virtual_hw_fini(handle);
469 }
470
471 static int dce_virtual_resume(void *handle)
472 {
473         int ret;
474
475         ret = dce_virtual_hw_init(handle);
476
477         return ret;
478 }
479
480 static bool dce_virtual_is_idle(void *handle)
481 {
482         return true;
483 }
484
485 static int dce_virtual_wait_for_idle(void *handle)
486 {
487         return 0;
488 }
489
490 static int dce_virtual_soft_reset(void *handle)
491 {
492         return 0;
493 }
494
495 static int dce_virtual_set_clockgating_state(void *handle,
496                                           enum amd_clockgating_state state)
497 {
498         return 0;
499 }
500
501 static int dce_virtual_set_powergating_state(void *handle,
502                                           enum amd_powergating_state state)
503 {
504         return 0;
505 }
506
507 const struct amd_ip_funcs dce_virtual_ip_funcs = {
508         .name = "dce_virtual",
509         .early_init = dce_virtual_early_init,
510         .late_init = NULL,
511         .sw_init = dce_virtual_sw_init,
512         .sw_fini = dce_virtual_sw_fini,
513         .hw_init = dce_virtual_hw_init,
514         .hw_fini = dce_virtual_hw_fini,
515         .suspend = dce_virtual_suspend,
516         .resume = dce_virtual_resume,
517         .is_idle = dce_virtual_is_idle,
518         .wait_for_idle = dce_virtual_wait_for_idle,
519         .soft_reset = dce_virtual_soft_reset,
520         .set_clockgating_state = dce_virtual_set_clockgating_state,
521         .set_powergating_state = dce_virtual_set_powergating_state,
522 };
523
524 /* these are handled by the primary encoders */
525 static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
526 {
527         return;
528 }
529
530 static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
531 {
532         return;
533 }
534
535 static void
536 dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
537                       struct drm_display_mode *mode,
538                       struct drm_display_mode *adjusted_mode)
539 {
540         return;
541 }
542
543 static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
544 {
545         return;
546 }
547
548 static void
549 dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
550 {
551         return;
552 }
553
554 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
555                                     const struct drm_display_mode *mode,
556                                     struct drm_display_mode *adjusted_mode)
557 {
558
559         /* set the active encoder to connector routing */
560         amdgpu_encoder_set_active_device(encoder);
561
562         return true;
563 }
564
565 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
566         .dpms = dce_virtual_encoder_dpms,
567         .mode_fixup = dce_virtual_encoder_mode_fixup,
568         .prepare = dce_virtual_encoder_prepare,
569         .mode_set = dce_virtual_encoder_mode_set,
570         .commit = dce_virtual_encoder_commit,
571         .disable = dce_virtual_encoder_disable,
572 };
573
574 static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
575 {
576         struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
577
578         kfree(amdgpu_encoder->enc_priv);
579         drm_encoder_cleanup(encoder);
580         kfree(amdgpu_encoder);
581 }
582
583 static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
584         .destroy = dce_virtual_encoder_destroy,
585 };
586
587 static void dce_virtual_encoder_add(struct amdgpu_device *adev,
588                                  uint32_t encoder_enum,
589                                  uint32_t supported_device,
590                                  u16 caps)
591 {
592         struct drm_device *dev = adev->ddev;
593         struct drm_encoder *encoder;
594         struct amdgpu_encoder *amdgpu_encoder;
595
596         /* see if we already added it */
597         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
598                 amdgpu_encoder = to_amdgpu_encoder(encoder);
599                 if (amdgpu_encoder->encoder_enum == encoder_enum) {
600                         amdgpu_encoder->devices |= supported_device;
601                         return;
602                 }
603
604         }
605
606         /* add a new one */
607         amdgpu_encoder = kzalloc(sizeof(struct amdgpu_encoder), GFP_KERNEL);
608         if (!amdgpu_encoder)
609                 return;
610
611         encoder = &amdgpu_encoder->base;
612         encoder->possible_crtcs = 0x1;
613         amdgpu_encoder->enc_priv = NULL;
614         amdgpu_encoder->encoder_enum = encoder_enum;
615         amdgpu_encoder->encoder_id = (encoder_enum & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
616         amdgpu_encoder->devices = supported_device;
617         amdgpu_encoder->rmx_type = RMX_OFF;
618         amdgpu_encoder->underscan_type = UNDERSCAN_OFF;
619         amdgpu_encoder->is_ext_encoder = false;
620         amdgpu_encoder->caps = caps;
621
622         drm_encoder_init(dev, encoder, &dce_virtual_encoder_funcs,
623                                          DRM_MODE_ENCODER_VIRTUAL, NULL);
624         drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
625         DRM_INFO("[FM]encoder: %d is VIRTUAL\n", amdgpu_encoder->encoder_id);
626 }
627
628 static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
629         .set_vga_render_state = &dce_virtual_set_vga_render_state,
630         .bandwidth_update = &dce_virtual_bandwidth_update,
631         .vblank_get_counter = &dce_virtual_vblank_get_counter,
632         .vblank_wait = &dce_virtual_vblank_wait,
633         .is_display_hung = &dce_virtual_is_display_hung,
634         .backlight_set_level = NULL,
635         .backlight_get_level = NULL,
636         .hpd_sense = &dce_virtual_hpd_sense,
637         .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
638         .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
639         .page_flip = &dce_virtual_page_flip,
640         .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
641         .add_encoder = &dce_virtual_encoder_add,
642         .add_connector = &amdgpu_connector_add,
643         .stop_mc_access = &dce_virtual_stop_mc_access,
644         .resume_mc_access = &dce_virtual_resume_mc_access,
645 };
646
647 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
648 {
649         if (adev->mode_info.funcs == NULL)
650                 adev->mode_info.funcs = &dce_virtual_display_funcs;
651 }
652
653 static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
654 {
655         struct amdgpu_mode_info *mode_info = container_of(vblank_timer, struct amdgpu_mode_info ,vblank_timer);
656         struct amdgpu_device *adev = container_of(mode_info, struct amdgpu_device ,mode_info);
657         unsigned crtc = 0;
658         adev->ddev->vblank[0].count++;
659         drm_handle_vblank(adev->ddev, crtc);
660         dce_virtual_pageflip_irq(adev, NULL, NULL);
661         hrtimer_start(vblank_timer, ktime_set(0, DCE_VIRTUAL_VBLANK_PERIOD), HRTIMER_MODE_REL);
662         return HRTIMER_NORESTART;
663 }
664
665 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
666                                                      int crtc,
667                                                      enum amdgpu_interrupt_state state)
668 {
669         if (crtc >= adev->mode_info.num_crtc) {
670                 DRM_DEBUG("invalid crtc %d\n", crtc);
671                 return;
672         }
673
674         if (state && !adev->mode_info.vsync_timer_enabled) {
675                 DRM_DEBUG("Enable software vsync timer\n");
676                 hrtimer_init(&adev->mode_info.vblank_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
677                 hrtimer_set_expires(&adev->mode_info.vblank_timer, ktime_set(0, DCE_VIRTUAL_VBLANK_PERIOD));
678                 adev->mode_info.vblank_timer.function = dce_virtual_vblank_timer_handle;
679                 hrtimer_start(&adev->mode_info.vblank_timer, ktime_set(0, DCE_VIRTUAL_VBLANK_PERIOD), HRTIMER_MODE_REL);
680         } else if (!state && adev->mode_info.vsync_timer_enabled) {
681                 DRM_DEBUG("Disable software vsync timer\n");
682                 hrtimer_cancel(&adev->mode_info.vblank_timer);
683         }
684
685         if (!state || (state && !adev->mode_info.vsync_timer_enabled))
686                 adev->ddev->vblank[0].count = 0;
687         adev->mode_info.vsync_timer_enabled = state;
688         DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
689 }
690
691
692 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
693                                        struct amdgpu_irq_src *source,
694                                        unsigned type,
695                                        enum amdgpu_interrupt_state state)
696 {
697         switch (type) {
698         case AMDGPU_CRTC_IRQ_VBLANK1:
699                 dce_virtual_set_crtc_vblank_interrupt_state(adev, 0, state);
700                 break;
701         default:
702                 break;
703         }
704         return 0;
705 }
706
707 static void dce_virtual_crtc_vblank_int_ack(struct amdgpu_device *adev,
708                                           int crtc)
709 {
710         if (crtc >= adev->mode_info.num_crtc) {
711                 DRM_DEBUG("invalid crtc %d\n", crtc);
712                 return;
713         }
714 }
715
716 static int dce_virtual_crtc_irq(struct amdgpu_device *adev,
717                               struct amdgpu_irq_src *source,
718                               struct amdgpu_iv_entry *entry)
719 {
720         unsigned crtc = 0;
721         unsigned irq_type = AMDGPU_CRTC_IRQ_VBLANK1;
722
723         adev->ddev->vblank[crtc].count++;
724         dce_virtual_crtc_vblank_int_ack(adev, crtc);
725
726         if (amdgpu_irq_enabled(adev, source, irq_type)) {
727                 drm_handle_vblank(adev->ddev, crtc);
728         }
729         dce_virtual_pageflip_irq(adev, NULL, NULL);
730         DRM_DEBUG("IH: D%d vblank\n", crtc + 1);
731         return 0;
732 }
733
734 static int dce_virtual_set_pageflip_irq_state(struct amdgpu_device *adev,
735                                             struct amdgpu_irq_src *src,
736                                             unsigned type,
737                                             enum amdgpu_interrupt_state state)
738 {
739         if (type >= adev->mode_info.num_crtc) {
740                 DRM_ERROR("invalid pageflip crtc %d\n", type);
741                 return -EINVAL;
742         }
743         DRM_DEBUG("[FM]set pageflip irq type %d state %d\n", type, state);
744
745         return 0;
746 }
747
748 static int dce_virtual_pageflip_irq(struct amdgpu_device *adev,
749                                   struct amdgpu_irq_src *source,
750                                   struct amdgpu_iv_entry *entry)
751 {
752         unsigned long flags;
753         unsigned crtc_id = 0;
754         struct amdgpu_crtc *amdgpu_crtc;
755         struct amdgpu_flip_work *works;
756
757         crtc_id = 0;
758         amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
759
760         if (crtc_id >= adev->mode_info.num_crtc) {
761                 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
762                 return -EINVAL;
763         }
764
765         /* IRQ could occur when in initial stage */
766         if (amdgpu_crtc == NULL)
767                 return 0;
768
769         spin_lock_irqsave(&adev->ddev->event_lock, flags);
770         works = amdgpu_crtc->pflip_works;
771         if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
772                 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
773                         "AMDGPU_FLIP_SUBMITTED(%d)\n",
774                         amdgpu_crtc->pflip_status,
775                         AMDGPU_FLIP_SUBMITTED);
776                 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
777                 return 0;
778         }
779
780         /* page flip completed. clean up */
781         amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
782         amdgpu_crtc->pflip_works = NULL;
783
784         /* wakeup usersapce */
785         if (works->event)
786                 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
787
788         spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
789
790         drm_crtc_vblank_put(&amdgpu_crtc->base);
791         schedule_work(&works->unpin_work);
792
793         return 0;
794 }
795
796 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
797         .set = dce_virtual_set_crtc_irq_state,
798         .process = dce_virtual_crtc_irq,
799 };
800
801 static const struct amdgpu_irq_src_funcs dce_virtual_pageflip_irq_funcs = {
802         .set = dce_virtual_set_pageflip_irq_state,
803         .process = dce_virtual_pageflip_irq,
804 };
805
806 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
807 {
808         adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_LAST;
809         adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
810
811         adev->pageflip_irq.num_types = AMDGPU_PAGEFLIP_IRQ_LAST;
812         adev->pageflip_irq.funcs = &dce_virtual_pageflip_irq_funcs;
813 }
814