2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
28 #include <drm/drm_atomic.h>
29 #include <drm/drm_atomic_helper.h>
30 #include <drm/drm_crtc_helper.h>
31 #include <drm/drm_dp_helper.h>
32 #include <drm/drm_fb_helper.h>
33 #include <drm/drm_plane_helper.h>
35 #include <nvif/class.h>
36 #include <nvif/cl0002.h>
37 #include <nvif/cl5070.h>
38 #include <nvif/cl507a.h>
39 #include <nvif/cl507b.h>
40 #include <nvif/cl507c.h>
41 #include <nvif/cl507d.h>
42 #include <nvif/cl507e.h>
43 #include <nvif/event.h>
45 #include "nouveau_drv.h"
46 #include "nouveau_dma.h"
47 #include "nouveau_gem.h"
48 #include "nouveau_connector.h"
49 #include "nouveau_encoder.h"
50 #include "nouveau_crtc.h"
51 #include "nouveau_fence.h"
52 #include "nouveau_fbcon.h"
53 #include "nv50_display.h"
57 #define EVO_MASTER (0x00)
58 #define EVO_FLIP(c) (0x01 + (c))
59 #define EVO_OVLY(c) (0x05 + (c))
60 #define EVO_OIMM(c) (0x09 + (c))
61 #define EVO_CURS(c) (0x0d + (c))
63 /* offsets in shared sync bo of various structures */
64 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
65 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
66 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
67 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
68 #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
69 #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
71 /******************************************************************************
73 *****************************************************************************/
74 #define nv50_atom(p) container_of((p), struct nv50_atom, state)
77 struct drm_atomic_state state;
79 struct list_head outp;
84 struct nv50_outp_atom {
85 struct list_head head;
87 struct drm_encoder *encoder;
105 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
107 struct nv50_head_atom {
108 struct drm_crtc_state state;
117 struct nv50_head_mode {
214 static inline struct nv50_head_atom *
215 nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
217 struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
219 return (void *)statec;
220 return nv50_head_atom(statec);
223 #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
225 struct nv50_wndw_atom {
226 struct drm_plane_state state;
229 struct drm_rect clip;
290 /******************************************************************************
292 *****************************************************************************/
295 struct nvif_object user;
296 struct nvif_device *device;
300 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
301 const s32 *oclass, u8 head, void *data, u32 size,
302 struct nv50_chan *chan)
304 struct nvif_sclass *sclass;
307 chan->device = device;
309 ret = n = nvif_object_sclass_get(disp, &sclass);
314 for (i = 0; i < n; i++) {
315 if (sclass[i].oclass == oclass[0]) {
316 ret = nvif_object_init(disp, 0, oclass[0],
317 data, size, &chan->user);
319 nvif_object_map(&chan->user);
320 nvif_object_sclass_put(&sclass);
327 nvif_object_sclass_put(&sclass);
332 nv50_chan_destroy(struct nv50_chan *chan)
334 nvif_object_fini(&chan->user);
337 /******************************************************************************
339 *****************************************************************************/
342 struct nv50_chan base;
346 nv50_pioc_destroy(struct nv50_pioc *pioc)
348 nv50_chan_destroy(&pioc->base);
352 nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
353 const s32 *oclass, u8 head, void *data, u32 size,
354 struct nv50_pioc *pioc)
356 return nv50_chan_create(device, disp, oclass, head, data, size,
360 /******************************************************************************
362 *****************************************************************************/
365 struct nv50_pioc base;
369 nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
370 int head, struct nv50_oimm *oimm)
372 struct nv50_disp_cursor_v0 args = {
375 static const s32 oclass[] = {
384 return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
388 /******************************************************************************
390 *****************************************************************************/
392 struct nv50_dmac_ctxdma {
393 struct list_head head;
394 struct nvif_object object;
398 struct nv50_chan base;
402 struct nvif_object sync;
403 struct nvif_object vram;
404 struct list_head ctxdma;
406 /* Protects against concurrent pushbuf access to this channel, lock is
407 * grabbed by evo_wait (if the pushbuf reservation is successful) and
408 * dropped again by evo_kick. */
413 nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
415 nvif_object_fini(&ctxdma->object);
416 list_del(&ctxdma->head);
420 static struct nv50_dmac_ctxdma *
421 nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
423 struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
424 struct nv50_dmac_ctxdma *ctxdma;
425 const u8 kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
426 const u32 handle = 0xfb000000 | kind;
428 struct nv_dma_v0 base;
430 struct nv50_dma_v0 nv50;
431 struct gf100_dma_v0 gf100;
432 struct gf119_dma_v0 gf119;
435 u32 argc = sizeof(args.base);
438 list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
439 if (ctxdma->object.handle == handle)
443 if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
444 return ERR_PTR(-ENOMEM);
445 list_add(&ctxdma->head, &dmac->ctxdma);
447 args.base.target = NV_DMA_V0_TARGET_VRAM;
448 args.base.access = NV_DMA_V0_ACCESS_RDWR;
450 args.base.limit = drm->client.device.info.ram_user - 1;
452 if (drm->client.device.info.chipset < 0x80) {
453 args.nv50.part = NV50_DMA_V0_PART_256;
454 argc += sizeof(args.nv50);
456 if (drm->client.device.info.chipset < 0xc0) {
457 args.nv50.part = NV50_DMA_V0_PART_256;
458 args.nv50.kind = kind;
459 argc += sizeof(args.nv50);
461 if (drm->client.device.info.chipset < 0xd0) {
462 args.gf100.kind = kind;
463 argc += sizeof(args.gf100);
465 args.gf119.page = GF119_DMA_V0_PAGE_LP;
466 args.gf119.kind = kind;
467 argc += sizeof(args.gf119);
470 ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
471 &args, argc, &ctxdma->object);
473 nv50_dmac_ctxdma_del(ctxdma);
481 nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
483 struct nvif_device *device = dmac->base.device;
484 struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
486 list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
487 nv50_dmac_ctxdma_del(ctxdma);
490 nvif_object_fini(&dmac->vram);
491 nvif_object_fini(&dmac->sync);
493 nv50_chan_destroy(&dmac->base);
496 struct device *dev = nvxx_device(device)->dev;
497 dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
502 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
503 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
504 struct nv50_dmac *dmac)
506 struct nv50_disp_core_channel_dma_v0 *args = data;
507 struct nvif_object pushbuf;
510 mutex_init(&dmac->lock);
512 dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
513 &dmac->handle, GFP_KERNEL);
517 ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
518 &(struct nv_dma_v0) {
519 .target = NV_DMA_V0_TARGET_PCI_US,
520 .access = NV_DMA_V0_ACCESS_RD,
521 .start = dmac->handle + 0x0000,
522 .limit = dmac->handle + 0x0fff,
523 }, sizeof(struct nv_dma_v0), &pushbuf);
527 args->pushbuf = nvif_handle(&pushbuf);
529 ret = nv50_chan_create(device, disp, oclass, head, data, size,
531 nvif_object_fini(&pushbuf);
535 ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
536 &(struct nv_dma_v0) {
537 .target = NV_DMA_V0_TARGET_VRAM,
538 .access = NV_DMA_V0_ACCESS_RDWR,
539 .start = syncbuf + 0x0000,
540 .limit = syncbuf + 0x0fff,
541 }, sizeof(struct nv_dma_v0),
546 ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
547 &(struct nv_dma_v0) {
548 .target = NV_DMA_V0_TARGET_VRAM,
549 .access = NV_DMA_V0_ACCESS_RDWR,
551 .limit = device->info.ram_user - 1,
552 }, sizeof(struct nv_dma_v0),
557 INIT_LIST_HEAD(&dmac->ctxdma);
561 /******************************************************************************
563 *****************************************************************************/
566 struct nv50_dmac base;
570 nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
571 u64 syncbuf, struct nv50_mast *core)
573 struct nv50_disp_core_channel_dma_v0 args = {
574 .pushbuf = 0xb0007d00,
576 static const s32 oclass[] = {
577 GP102_DISP_CORE_CHANNEL_DMA,
578 GP100_DISP_CORE_CHANNEL_DMA,
579 GM200_DISP_CORE_CHANNEL_DMA,
580 GM107_DISP_CORE_CHANNEL_DMA,
581 GK110_DISP_CORE_CHANNEL_DMA,
582 GK104_DISP_CORE_CHANNEL_DMA,
583 GF110_DISP_CORE_CHANNEL_DMA,
584 GT214_DISP_CORE_CHANNEL_DMA,
585 GT206_DISP_CORE_CHANNEL_DMA,
586 GT200_DISP_CORE_CHANNEL_DMA,
587 G82_DISP_CORE_CHANNEL_DMA,
588 NV50_DISP_CORE_CHANNEL_DMA,
592 return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
593 syncbuf, &core->base);
596 /******************************************************************************
598 *****************************************************************************/
601 struct nv50_dmac base;
607 nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
608 int head, u64 syncbuf, struct nv50_sync *base)
610 struct nv50_disp_base_channel_dma_v0 args = {
611 .pushbuf = 0xb0007c00 | head,
614 static const s32 oclass[] = {
615 GK110_DISP_BASE_CHANNEL_DMA,
616 GK104_DISP_BASE_CHANNEL_DMA,
617 GF110_DISP_BASE_CHANNEL_DMA,
618 GT214_DISP_BASE_CHANNEL_DMA,
619 GT200_DISP_BASE_CHANNEL_DMA,
620 G82_DISP_BASE_CHANNEL_DMA,
621 NV50_DISP_BASE_CHANNEL_DMA,
625 return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
626 syncbuf, &base->base);
629 /******************************************************************************
631 *****************************************************************************/
634 struct nv50_dmac base;
638 nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
639 int head, u64 syncbuf, struct nv50_ovly *ovly)
641 struct nv50_disp_overlay_channel_dma_v0 args = {
642 .pushbuf = 0xb0007e00 | head,
645 static const s32 oclass[] = {
646 GK104_DISP_OVERLAY_CONTROL_DMA,
647 GF110_DISP_OVERLAY_CONTROL_DMA,
648 GT214_DISP_OVERLAY_CHANNEL_DMA,
649 GT200_DISP_OVERLAY_CHANNEL_DMA,
650 G82_DISP_OVERLAY_CHANNEL_DMA,
651 NV50_DISP_OVERLAY_CHANNEL_DMA,
655 return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
656 syncbuf, &ovly->base);
660 struct nouveau_crtc base;
661 struct nv50_ovly ovly;
662 struct nv50_oimm oimm;
665 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
666 #define nv50_ovly(c) (&nv50_head(c)->ovly)
667 #define nv50_oimm(c) (&nv50_head(c)->oimm)
668 #define nv50_chan(c) (&(c)->base.base)
669 #define nv50_vers(c) nv50_chan(c)->user.oclass
672 struct nvif_object *disp;
673 struct nv50_mast mast;
675 struct nouveau_bo *sync;
680 static struct nv50_disp *
681 nv50_disp(struct drm_device *dev)
683 return nouveau_display(dev)->priv;
686 #define nv50_mast(d) (&nv50_disp(d)->mast)
688 /******************************************************************************
689 * EVO channel helpers
690 *****************************************************************************/
692 evo_wait(void *evoc, int nr)
694 struct nv50_dmac *dmac = evoc;
695 struct nvif_device *device = dmac->base.device;
696 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
698 mutex_lock(&dmac->lock);
699 if (put + nr >= (PAGE_SIZE / 4) - 8) {
700 dmac->ptr[put] = 0x20000000;
702 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
703 if (nvif_msec(device, 2000,
704 if (!nvif_rd32(&dmac->base.user, 0x0004))
707 mutex_unlock(&dmac->lock);
708 pr_err("nouveau: evo channel stalled\n");
715 return dmac->ptr + put;
719 evo_kick(u32 *push, void *evoc)
721 struct nv50_dmac *dmac = evoc;
722 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
723 mutex_unlock(&dmac->lock);
726 #define evo_mthd(p, m, s) do { \
727 const u32 _m = (m), _s = (s); \
728 if (drm_debug & DRM_UT_KMS) \
729 pr_err("%04x %d %s\n", _m, _s, __func__); \
730 *((p)++) = ((_s << 18) | _m); \
733 #define evo_data(p, d) do { \
734 const u32 _d = (d); \
735 if (drm_debug & DRM_UT_KMS) \
736 pr_err("\t%08x\n", _d); \
740 /******************************************************************************
742 *****************************************************************************/
743 #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
746 const struct nv50_wndw_func *func;
747 struct nv50_dmac *dmac;
749 struct drm_plane plane;
751 struct nvif_notify notify;
757 struct nv50_wndw_func {
758 void *(*dtor)(struct nv50_wndw *);
759 int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
760 struct nv50_head_atom *asyh);
761 void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
762 struct nv50_head_atom *asyh);
763 void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
764 struct nv50_wndw_atom *asyw);
766 void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
767 void (*sema_clr)(struct nv50_wndw *);
768 void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
769 void (*ntfy_clr)(struct nv50_wndw *);
770 int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
771 void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
772 void (*image_clr)(struct nv50_wndw *);
773 void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
774 void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
776 u32 (*update)(struct nv50_wndw *, u32 interlock);
780 nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
783 return wndw->func->ntfy_wait_begun(wndw, asyw);
788 nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
789 struct nv50_wndw_atom *asyw)
791 if (asyw->clr.sema && (!asyw->set.sema || flush))
792 wndw->func->sema_clr(wndw);
793 if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
794 wndw->func->ntfy_clr(wndw);
795 if (asyw->clr.image && (!asyw->set.image || flush))
796 wndw->func->image_clr(wndw);
798 return flush ? wndw->func->update(wndw, interlock) : 0;
802 nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
803 struct nv50_wndw_atom *asyw)
806 asyw->image.mode = 0;
807 asyw->image.interval = 1;
810 if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
811 if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
812 if (asyw->set.image) wndw->func->image_set(wndw, asyw);
813 if (asyw->set.lut ) wndw->func->lut (wndw, asyw);
814 if (asyw->set.point) wndw->func->point (wndw, asyw);
816 return wndw->func->update(wndw, interlock);
820 nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
821 struct nv50_wndw_atom *asyw,
822 struct nv50_head_atom *asyh)
824 struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
825 NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
826 wndw->func->release(wndw, asyw, asyh);
827 asyw->ntfy.handle = 0;
828 asyw->sema.handle = 0;
832 nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
833 struct nv50_wndw_atom *asyw,
834 struct nv50_head_atom *asyh)
836 struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
837 struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
840 NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
843 asyw->clip.x2 = asyh->state.mode.hdisplay;
844 asyw->clip.y2 = asyh->state.mode.vdisplay;
846 asyw->image.w = fb->base.width;
847 asyw->image.h = fb->base.height;
848 asyw->image.kind = (fb->nvbo->tile_flags & 0x0000ff00) >> 8;
850 if (asyh->state.pageflip_flags & DRM_MODE_PAGE_FLIP_ASYNC)
855 if (asyw->image.kind) {
856 asyw->image.layout = 0;
857 if (drm->client.device.info.chipset >= 0xc0)
858 asyw->image.block = fb->nvbo->tile_mode >> 4;
860 asyw->image.block = fb->nvbo->tile_mode;
861 asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
863 asyw->image.layout = 1;
864 asyw->image.block = 0;
865 asyw->image.pitch = fb->base.pitches[0];
868 ret = wndw->func->acquire(wndw, asyw, asyh);
872 if (asyw->set.image) {
873 if (!(asyw->image.mode = asyw->interval ? 0 : 1))
874 asyw->image.interval = asyw->interval;
876 asyw->image.interval = 0;
883 nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
885 struct nouveau_drm *drm = nouveau_drm(plane->dev);
886 struct nv50_wndw *wndw = nv50_wndw(plane);
887 struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
888 struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
889 struct nv50_head_atom *harm = NULL, *asyh = NULL;
890 bool varm = false, asyv = false, asym = false;
893 NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
894 if (asyw->state.crtc) {
895 asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
897 return PTR_ERR(asyh);
898 asym = drm_atomic_crtc_needs_modeset(&asyh->state);
899 asyv = asyh->state.active;
902 if (armw->state.crtc) {
903 harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
905 return PTR_ERR(harm);
906 varm = harm->state.crtc->state->active;
910 asyw->point.x = asyw->state.crtc_x;
911 asyw->point.y = asyw->state.crtc_y;
912 if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
913 asyw->set.point = true;
915 ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
920 nv50_wndw_atomic_check_release(wndw, asyw, harm);
926 asyw->clr.ntfy = armw->ntfy.handle != 0;
927 asyw->clr.sema = armw->sema.handle != 0;
928 if (wndw->func->image_clr)
929 asyw->clr.image = armw->image.handle != 0;
930 asyw->set.lut = wndw->func->lut && asyv;
937 nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
939 struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
940 struct nouveau_drm *drm = nouveau_drm(plane->dev);
942 NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
946 nouveau_bo_unpin(fb->nvbo);
950 nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
952 struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
953 struct nouveau_drm *drm = nouveau_drm(plane->dev);
954 struct nv50_wndw *wndw = nv50_wndw(plane);
955 struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
956 struct nv50_head_atom *asyh;
957 struct nv50_dmac_ctxdma *ctxdma;
960 NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
964 ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
968 ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
969 if (IS_ERR(ctxdma)) {
970 nouveau_bo_unpin(fb->nvbo);
971 return PTR_ERR(ctxdma);
974 asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
975 asyw->image.handle = ctxdma->object.handle;
976 asyw->image.offset = fb->nvbo->bo.offset;
978 if (wndw->func->prepare) {
979 asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
981 return PTR_ERR(asyh);
983 wndw->func->prepare(wndw, asyh, asyw);
989 static const struct drm_plane_helper_funcs
991 .prepare_fb = nv50_wndw_prepare_fb,
992 .cleanup_fb = nv50_wndw_cleanup_fb,
993 .atomic_check = nv50_wndw_atomic_check,
997 nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
998 struct drm_plane_state *state)
1000 struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
1001 __drm_atomic_helper_plane_destroy_state(&asyw->state);
1005 static struct drm_plane_state *
1006 nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
1008 struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
1009 struct nv50_wndw_atom *asyw;
1010 if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
1012 __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
1014 asyw->sema = armw->sema;
1015 asyw->ntfy = armw->ntfy;
1016 asyw->image = armw->image;
1017 asyw->point = armw->point;
1018 asyw->lut = armw->lut;
1021 return &asyw->state;
1025 nv50_wndw_reset(struct drm_plane *plane)
1027 struct nv50_wndw_atom *asyw;
1029 if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
1033 plane->funcs->atomic_destroy_state(plane, plane->state);
1034 plane->state = &asyw->state;
1035 plane->state->plane = plane;
1036 plane->state->rotation = DRM_ROTATE_0;
1040 nv50_wndw_destroy(struct drm_plane *plane)
1042 struct nv50_wndw *wndw = nv50_wndw(plane);
1044 nvif_notify_fini(&wndw->notify);
1045 data = wndw->func->dtor(wndw);
1046 drm_plane_cleanup(&wndw->plane);
1050 static const struct drm_plane_funcs
1052 .update_plane = drm_atomic_helper_update_plane,
1053 .disable_plane = drm_atomic_helper_disable_plane,
1054 .destroy = nv50_wndw_destroy,
1055 .reset = nv50_wndw_reset,
1056 .set_property = drm_atomic_helper_plane_set_property,
1057 .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
1058 .atomic_destroy_state = nv50_wndw_atomic_destroy_state,
1062 nv50_wndw_fini(struct nv50_wndw *wndw)
1064 nvif_notify_put(&wndw->notify);
1068 nv50_wndw_init(struct nv50_wndw *wndw)
1070 nvif_notify_get(&wndw->notify);
1074 nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
1075 enum drm_plane_type type, const char *name, int index,
1076 struct nv50_dmac *dmac, const u32 *format, int nformat,
1077 struct nv50_wndw *wndw)
1084 ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw, format,
1085 nformat, type, "%s-%d", name, index);
1089 drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
1093 /******************************************************************************
1095 *****************************************************************************/
1096 #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1099 struct nv50_wndw wndw;
1100 struct nvif_object chan;
1104 nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
1106 struct nv50_curs *curs = nv50_curs(wndw);
1107 nvif_wr32(&curs->chan, 0x0080, 0x00000000);
1112 nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1114 struct nv50_curs *curs = nv50_curs(wndw);
1115 nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
1119 nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
1120 struct nv50_wndw_atom *asyw)
1122 asyh->curs.handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
1123 asyh->curs.offset = asyw->image.offset;
1124 asyh->set.curs = asyh->curs.visible;
1128 nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1129 struct nv50_head_atom *asyh)
1131 asyh->curs.visible = false;
1135 nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1136 struct nv50_head_atom *asyh)
1140 ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
1141 DRM_PLANE_HELPER_NO_SCALING,
1142 DRM_PLANE_HELPER_NO_SCALING,
1144 asyh->curs.visible = asyw->state.visible;
1145 if (ret || !asyh->curs.visible)
1148 switch (asyw->state.fb->width) {
1149 case 32: asyh->curs.layout = 0; break;
1150 case 64: asyh->curs.layout = 1; break;
1155 if (asyw->state.fb->width != asyw->state.fb->height)
1158 switch (asyw->state.fb->format->format) {
1159 case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
1169 nv50_curs_dtor(struct nv50_wndw *wndw)
1171 struct nv50_curs *curs = nv50_curs(wndw);
1172 nvif_object_fini(&curs->chan);
1177 nv50_curs_format[] = {
1178 DRM_FORMAT_ARGB8888,
1181 static const struct nv50_wndw_func
1183 .dtor = nv50_curs_dtor,
1184 .acquire = nv50_curs_acquire,
1185 .release = nv50_curs_release,
1186 .prepare = nv50_curs_prepare,
1187 .point = nv50_curs_point,
1188 .update = nv50_curs_update,
1192 nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
1193 struct nv50_curs **pcurs)
1195 static const struct nvif_mclass curses[] = {
1196 { GK104_DISP_CURSOR, 0 },
1197 { GF110_DISP_CURSOR, 0 },
1198 { GT214_DISP_CURSOR, 0 },
1199 { G82_DISP_CURSOR, 0 },
1200 { NV50_DISP_CURSOR, 0 },
1203 struct nv50_disp_cursor_v0 args = {
1204 .head = head->base.index,
1206 struct nv50_disp *disp = nv50_disp(drm->dev);
1207 struct nv50_curs *curs;
1210 cid = nvif_mclass(disp->disp, curses);
1212 NV_ERROR(drm, "No supported cursor immediate class\n");
1216 if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
1219 ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
1220 "curs", head->base.index, &disp->mast.base,
1221 nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
1228 ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
1229 sizeof(args), &curs->chan);
1231 NV_ERROR(drm, "curs%04x allocation failed: %d\n",
1232 curses[cid].oclass, ret);
1239 /******************************************************************************
1241 *****************************************************************************/
1242 #define nv50_base(p) container_of((p), struct nv50_base, wndw)
1245 struct nv50_wndw wndw;
1246 struct nv50_sync chan;
1251 nv50_base_notify(struct nvif_notify *notify)
1253 return NVIF_NOTIFY_KEEP;
1257 nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1259 struct nv50_base *base = nv50_base(wndw);
1261 if ((push = evo_wait(&base->chan, 2))) {
1262 evo_mthd(push, 0x00e0, 1);
1263 evo_data(push, asyw->lut.enable << 30);
1264 evo_kick(push, &base->chan);
1269 nv50_base_image_clr(struct nv50_wndw *wndw)
1271 struct nv50_base *base = nv50_base(wndw);
1273 if ((push = evo_wait(&base->chan, 4))) {
1274 evo_mthd(push, 0x0084, 1);
1275 evo_data(push, 0x00000000);
1276 evo_mthd(push, 0x00c0, 1);
1277 evo_data(push, 0x00000000);
1278 evo_kick(push, &base->chan);
1283 nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1285 struct nv50_base *base = nv50_base(wndw);
1286 const s32 oclass = base->chan.base.base.user.oclass;
1288 if ((push = evo_wait(&base->chan, 10))) {
1289 evo_mthd(push, 0x0084, 1);
1290 evo_data(push, (asyw->image.mode << 8) |
1291 (asyw->image.interval << 4));
1292 evo_mthd(push, 0x00c0, 1);
1293 evo_data(push, asyw->image.handle);
1294 if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
1295 evo_mthd(push, 0x0800, 5);
1296 evo_data(push, asyw->image.offset >> 8);
1297 evo_data(push, 0x00000000);
1298 evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1299 evo_data(push, (asyw->image.layout << 20) |
1302 evo_data(push, (asyw->image.kind << 16) |
1303 (asyw->image.format << 8));
1305 if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1306 evo_mthd(push, 0x0800, 5);
1307 evo_data(push, asyw->image.offset >> 8);
1308 evo_data(push, 0x00000000);
1309 evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1310 evo_data(push, (asyw->image.layout << 20) |
1313 evo_data(push, asyw->image.format << 8);
1315 evo_mthd(push, 0x0400, 5);
1316 evo_data(push, asyw->image.offset >> 8);
1317 evo_data(push, 0x00000000);
1318 evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1319 evo_data(push, (asyw->image.layout << 24) |
1322 evo_data(push, asyw->image.format << 8);
1324 evo_kick(push, &base->chan);
1329 nv50_base_ntfy_clr(struct nv50_wndw *wndw)
1331 struct nv50_base *base = nv50_base(wndw);
1333 if ((push = evo_wait(&base->chan, 2))) {
1334 evo_mthd(push, 0x00a4, 1);
1335 evo_data(push, 0x00000000);
1336 evo_kick(push, &base->chan);
1341 nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1343 struct nv50_base *base = nv50_base(wndw);
1345 if ((push = evo_wait(&base->chan, 3))) {
1346 evo_mthd(push, 0x00a0, 2);
1347 evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
1348 evo_data(push, asyw->ntfy.handle);
1349 evo_kick(push, &base->chan);
1354 nv50_base_sema_clr(struct nv50_wndw *wndw)
1356 struct nv50_base *base = nv50_base(wndw);
1358 if ((push = evo_wait(&base->chan, 2))) {
1359 evo_mthd(push, 0x0094, 1);
1360 evo_data(push, 0x00000000);
1361 evo_kick(push, &base->chan);
1366 nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1368 struct nv50_base *base = nv50_base(wndw);
1370 if ((push = evo_wait(&base->chan, 5))) {
1371 evo_mthd(push, 0x0088, 4);
1372 evo_data(push, asyw->sema.offset);
1373 evo_data(push, asyw->sema.acquire);
1374 evo_data(push, asyw->sema.release);
1375 evo_data(push, asyw->sema.handle);
1376 evo_kick(push, &base->chan);
1381 nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
1383 struct nv50_base *base = nv50_base(wndw);
1386 if (!(push = evo_wait(&base->chan, 2)))
1388 evo_mthd(push, 0x0080, 1);
1389 evo_data(push, interlock);
1390 evo_kick(push, &base->chan);
1392 if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
1393 return interlock ? 2 << (base->id * 8) : 0;
1394 return interlock ? 2 << (base->id * 4) : 0;
1398 nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1400 struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
1401 struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1402 if (nvif_msec(&drm->client.device, 2000ULL,
1403 u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
1404 if ((data & 0xc0000000) == 0x40000000)
1413 nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1414 struct nv50_head_atom *asyh)
1420 nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1421 struct nv50_head_atom *asyh)
1423 const struct drm_framebuffer *fb = asyw->state.fb;
1426 if (!fb->format->depth)
1429 ret = drm_plane_helper_check_state(&asyw->state, &asyw->clip,
1430 DRM_PLANE_HELPER_NO_SCALING,
1431 DRM_PLANE_HELPER_NO_SCALING,
1436 asyh->base.depth = fb->format->depth;
1437 asyh->base.cpp = fb->format->cpp[0];
1438 asyh->base.x = asyw->state.src.x1 >> 16;
1439 asyh->base.y = asyw->state.src.y1 >> 16;
1440 asyh->base.w = asyw->state.fb->width;
1441 asyh->base.h = asyw->state.fb->height;
1443 switch (fb->format->format) {
1444 case DRM_FORMAT_C8 : asyw->image.format = 0x1e; break;
1445 case DRM_FORMAT_RGB565 : asyw->image.format = 0xe8; break;
1446 case DRM_FORMAT_XRGB1555 :
1447 case DRM_FORMAT_ARGB1555 : asyw->image.format = 0xe9; break;
1448 case DRM_FORMAT_XRGB8888 :
1449 case DRM_FORMAT_ARGB8888 : asyw->image.format = 0xcf; break;
1450 case DRM_FORMAT_XBGR2101010:
1451 case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
1452 case DRM_FORMAT_XBGR8888 :
1453 case DRM_FORMAT_ABGR8888 : asyw->image.format = 0xd5; break;
1459 asyw->lut.enable = 1;
1460 asyw->set.image = true;
1465 nv50_base_dtor(struct nv50_wndw *wndw)
1467 struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1468 struct nv50_base *base = nv50_base(wndw);
1469 nv50_dmac_destroy(&base->chan.base, disp->disp);
1474 nv50_base_format[] = {
1477 DRM_FORMAT_XRGB1555,
1478 DRM_FORMAT_ARGB1555,
1479 DRM_FORMAT_XRGB8888,
1480 DRM_FORMAT_ARGB8888,
1481 DRM_FORMAT_XBGR2101010,
1482 DRM_FORMAT_ABGR2101010,
1483 DRM_FORMAT_XBGR8888,
1484 DRM_FORMAT_ABGR8888,
1487 static const struct nv50_wndw_func
1489 .dtor = nv50_base_dtor,
1490 .acquire = nv50_base_acquire,
1491 .release = nv50_base_release,
1492 .sema_set = nv50_base_sema_set,
1493 .sema_clr = nv50_base_sema_clr,
1494 .ntfy_set = nv50_base_ntfy_set,
1495 .ntfy_clr = nv50_base_ntfy_clr,
1496 .ntfy_wait_begun = nv50_base_ntfy_wait_begun,
1497 .image_set = nv50_base_image_set,
1498 .image_clr = nv50_base_image_clr,
1499 .lut = nv50_base_lut,
1500 .update = nv50_base_update,
1504 nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
1505 struct nv50_base **pbase)
1507 struct nv50_disp *disp = nv50_disp(drm->dev);
1508 struct nv50_base *base;
1511 if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
1513 base->id = head->base.index;
1514 base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
1515 base->wndw.sema = EVO_FLIP_SEM0(base->id);
1516 base->wndw.data = 0x00000000;
1518 ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
1519 "base", base->id, &base->chan.base,
1520 nv50_base_format, ARRAY_SIZE(nv50_base_format),
1527 ret = nv50_base_create(&drm->client.device, disp->disp, base->id,
1528 disp->sync->bo.offset, &base->chan);
1532 return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
1534 NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
1535 &(struct nvif_notify_uevent_req) {},
1536 sizeof(struct nvif_notify_uevent_req),
1537 sizeof(struct nvif_notify_uevent_rep),
1538 &base->wndw.notify);
1541 /******************************************************************************
1543 *****************************************************************************/
1545 nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
1547 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1549 if ((push = evo_wait(core, 2))) {
1550 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1551 evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
1553 evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
1554 evo_data(push, (asyh->procamp.sat.sin << 20) |
1555 (asyh->procamp.sat.cos << 8));
1556 evo_kick(push, core);
1561 nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
1563 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1565 if ((push = evo_wait(core, 2))) {
1566 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1567 evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
1569 if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
1570 evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
1572 evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
1573 evo_data(push, (asyh->dither.mode << 3) |
1574 (asyh->dither.bits << 1) |
1575 asyh->dither.enable);
1576 evo_kick(push, core);
1581 nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
1583 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1587 if (asyh->base.cpp) {
1588 switch (asyh->base.cpp) {
1589 case 8: bounds |= 0x00000500; break;
1590 case 4: bounds |= 0x00000300; break;
1591 case 2: bounds |= 0x00000100; break;
1596 bounds |= 0x00000001;
1599 if ((push = evo_wait(core, 2))) {
1600 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1601 evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
1603 evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
1604 evo_data(push, bounds);
1605 evo_kick(push, core);
1610 nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
1612 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1616 if (asyh->base.cpp) {
1617 switch (asyh->base.cpp) {
1618 case 8: bounds |= 0x00000500; break;
1619 case 4: bounds |= 0x00000300; break;
1620 case 2: bounds |= 0x00000100; break;
1621 case 1: bounds |= 0x00000000; break;
1626 bounds |= 0x00000001;
1629 if ((push = evo_wait(core, 2))) {
1630 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1631 evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
1633 evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
1634 evo_data(push, bounds);
1635 evo_kick(push, core);
1640 nv50_head_curs_clr(struct nv50_head *head)
1642 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1644 if ((push = evo_wait(core, 4))) {
1645 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1646 evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1647 evo_data(push, 0x05000000);
1649 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1650 evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1651 evo_data(push, 0x05000000);
1652 evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1653 evo_data(push, 0x00000000);
1655 evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
1656 evo_data(push, 0x05000000);
1657 evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1658 evo_data(push, 0x00000000);
1660 evo_kick(push, core);
1665 nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1667 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1669 if ((push = evo_wait(core, 5))) {
1670 if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
1671 evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1672 evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1673 (asyh->curs.format << 24));
1674 evo_data(push, asyh->curs.offset >> 8);
1676 if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1677 evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1678 evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1679 (asyh->curs.format << 24));
1680 evo_data(push, asyh->curs.offset >> 8);
1681 evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1682 evo_data(push, asyh->curs.handle);
1684 evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
1685 evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1686 (asyh->curs.format << 24));
1687 evo_data(push, asyh->curs.offset >> 8);
1688 evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1689 evo_data(push, asyh->curs.handle);
1691 evo_kick(push, core);
1696 nv50_head_core_clr(struct nv50_head *head)
1698 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1700 if ((push = evo_wait(core, 2))) {
1701 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1702 evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
1704 evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
1705 evo_data(push, 0x00000000);
1706 evo_kick(push, core);
1711 nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1713 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1715 if ((push = evo_wait(core, 9))) {
1716 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1717 evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1718 evo_data(push, asyh->core.offset >> 8);
1719 evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1720 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1721 evo_data(push, asyh->core.layout << 20 |
1722 (asyh->core.pitch >> 8) << 8 |
1724 evo_data(push, asyh->core.kind << 16 |
1725 asyh->core.format << 8);
1726 evo_data(push, asyh->core.handle);
1727 evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1728 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1729 /* EVO will complain with INVALID_STATE if we have an
1730 * active cursor and (re)specify HeadSetContextDmaIso
1731 * without also updating HeadSetOffsetCursor.
1733 asyh->set.curs = asyh->curs.visible;
1735 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1736 evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1737 evo_data(push, asyh->core.offset >> 8);
1738 evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1739 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1740 evo_data(push, asyh->core.layout << 20 |
1741 (asyh->core.pitch >> 8) << 8 |
1743 evo_data(push, asyh->core.format << 8);
1744 evo_data(push, asyh->core.handle);
1745 evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1746 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1748 evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
1749 evo_data(push, asyh->core.offset >> 8);
1750 evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
1751 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1752 evo_data(push, asyh->core.layout << 24 |
1753 (asyh->core.pitch >> 8) << 8 |
1755 evo_data(push, asyh->core.format << 8);
1756 evo_data(push, asyh->core.handle);
1757 evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
1758 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1760 evo_kick(push, core);
1765 nv50_head_lut_clr(struct nv50_head *head)
1767 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1769 if ((push = evo_wait(core, 4))) {
1770 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1771 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1772 evo_data(push, 0x40000000);
1774 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1775 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1776 evo_data(push, 0x40000000);
1777 evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1778 evo_data(push, 0x00000000);
1780 evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
1781 evo_data(push, 0x03000000);
1782 evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1783 evo_data(push, 0x00000000);
1785 evo_kick(push, core);
1790 nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1792 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1794 if ((push = evo_wait(core, 7))) {
1795 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1796 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1797 evo_data(push, 0xc0000000);
1798 evo_data(push, asyh->lut.offset >> 8);
1800 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1801 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1802 evo_data(push, 0xc0000000);
1803 evo_data(push, asyh->lut.offset >> 8);
1804 evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1805 evo_data(push, asyh->lut.handle);
1807 evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
1808 evo_data(push, 0x83000000);
1809 evo_data(push, asyh->lut.offset >> 8);
1810 evo_data(push, 0x00000000);
1811 evo_data(push, 0x00000000);
1812 evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1813 evo_data(push, asyh->lut.handle);
1815 evo_kick(push, core);
1820 nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
1822 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1823 struct nv50_head_mode *m = &asyh->mode;
1825 if ((push = evo_wait(core, 14))) {
1826 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1827 evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
1828 evo_data(push, 0x00800000 | m->clock);
1829 evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
1830 evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
1831 evo_data(push, 0x00000000);
1832 evo_data(push, (m->v.active << 16) | m->h.active );
1833 evo_data(push, (m->v.synce << 16) | m->h.synce );
1834 evo_data(push, (m->v.blanke << 16) | m->h.blanke );
1835 evo_data(push, (m->v.blanks << 16) | m->h.blanks );
1836 evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1837 evo_data(push, asyh->mode.v.blankus);
1838 evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
1839 evo_data(push, 0x00000000);
1841 evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
1842 evo_data(push, 0x00000000);
1843 evo_data(push, (m->v.active << 16) | m->h.active );
1844 evo_data(push, (m->v.synce << 16) | m->h.synce );
1845 evo_data(push, (m->v.blanke << 16) | m->h.blanke );
1846 evo_data(push, (m->v.blanks << 16) | m->h.blanks );
1847 evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1848 evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
1849 evo_data(push, 0x00000000); /* ??? */
1850 evo_data(push, 0xffffff00);
1851 evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
1852 evo_data(push, m->clock * 1000);
1853 evo_data(push, 0x00200000); /* ??? */
1854 evo_data(push, m->clock * 1000);
1856 evo_kick(push, core);
1861 nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
1863 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1865 if ((push = evo_wait(core, 10))) {
1866 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1867 evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
1868 evo_data(push, 0x00000000);
1869 evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
1870 evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1871 evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
1872 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1873 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1875 evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
1876 evo_data(push, 0x00000000);
1877 evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
1878 evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1879 evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
1880 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1881 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1882 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1884 evo_kick(push, core);
1889 nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
1891 if (asyh->clr.core && (!asyh->set.core || y))
1892 nv50_head_lut_clr(head);
1893 if (asyh->clr.core && (!asyh->set.core || y))
1894 nv50_head_core_clr(head);
1895 if (asyh->clr.curs && (!asyh->set.curs || y))
1896 nv50_head_curs_clr(head);
1900 nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1902 if (asyh->set.view ) nv50_head_view (head, asyh);
1903 if (asyh->set.mode ) nv50_head_mode (head, asyh);
1904 if (asyh->set.core ) nv50_head_lut_set (head, asyh);
1905 if (asyh->set.core ) nv50_head_core_set(head, asyh);
1906 if (asyh->set.curs ) nv50_head_curs_set(head, asyh);
1907 if (asyh->set.base ) nv50_head_base (head, asyh);
1908 if (asyh->set.ovly ) nv50_head_ovly (head, asyh);
1909 if (asyh->set.dither ) nv50_head_dither (head, asyh);
1910 if (asyh->set.procamp) nv50_head_procamp (head, asyh);
1914 nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
1915 struct nv50_head_atom *asyh,
1916 struct nouveau_conn_atom *asyc)
1918 const int vib = asyc->procamp.color_vibrance - 100;
1919 const int hue = asyc->procamp.vibrant_hue - 90;
1920 const int adj = (vib > 0) ? 50 : 0;
1921 asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
1922 asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
1923 asyh->set.procamp = true;
1927 nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
1928 struct nv50_head_atom *asyh,
1929 struct nouveau_conn_atom *asyc)
1931 struct drm_connector *connector = asyc->state.connector;
1934 if (asyc->dither.mode == DITHERING_MODE_AUTO) {
1935 if (asyh->base.depth > connector->display_info.bpc * 3)
1936 mode = DITHERING_MODE_DYNAMIC2X2;
1938 mode = asyc->dither.mode;
1941 if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
1942 if (connector->display_info.bpc >= 8)
1943 mode |= DITHERING_DEPTH_8BPC;
1945 mode |= asyc->dither.depth;
1948 asyh->dither.enable = mode;
1949 asyh->dither.bits = mode >> 1;
1950 asyh->dither.mode = mode >> 3;
1951 asyh->set.dither = true;
1955 nv50_head_atomic_check_view(struct nv50_head_atom *armh,
1956 struct nv50_head_atom *asyh,
1957 struct nouveau_conn_atom *asyc)
1959 struct drm_connector *connector = asyc->state.connector;
1960 struct drm_display_mode *omode = &asyh->state.adjusted_mode;
1961 struct drm_display_mode *umode = &asyh->state.mode;
1962 int mode = asyc->scaler.mode;
1965 if (connector->edid_blob_ptr)
1966 edid = (struct edid *)connector->edid_blob_ptr->data;
1970 if (!asyc->scaler.full) {
1971 if (mode == DRM_MODE_SCALE_NONE)
1974 /* Non-EDID LVDS/eDP mode. */
1975 mode = DRM_MODE_SCALE_FULLSCREEN;
1978 asyh->view.iW = umode->hdisplay;
1979 asyh->view.iH = umode->vdisplay;
1980 asyh->view.oW = omode->hdisplay;
1981 asyh->view.oH = omode->vdisplay;
1982 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
1985 /* Add overscan compensation if necessary, will keep the aspect
1986 * ratio the same as the backend mode unless overridden by the
1987 * user setting both hborder and vborder properties.
1989 if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
1990 (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
1991 drm_detect_hdmi_monitor(edid)))) {
1992 u32 bX = asyc->scaler.underscan.hborder;
1993 u32 bY = asyc->scaler.underscan.vborder;
1994 u32 r = (asyh->view.oH << 19) / asyh->view.oW;
1997 asyh->view.oW -= (bX * 2);
1998 if (bY) asyh->view.oH -= (bY * 2);
1999 else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2001 asyh->view.oW -= (asyh->view.oW >> 4) + 32;
2002 if (bY) asyh->view.oH -= (bY * 2);
2003 else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2007 /* Handle CENTER/ASPECT scaling, taking into account the areas
2008 * removed already for overscan compensation.
2011 case DRM_MODE_SCALE_CENTER:
2012 asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
2013 asyh->view.oH = min((u16)umode->vdisplay, asyh->view.oH);
2015 case DRM_MODE_SCALE_ASPECT:
2016 if (asyh->view.oH < asyh->view.oW) {
2017 u32 r = (asyh->view.iW << 19) / asyh->view.iH;
2018 asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
2020 u32 r = (asyh->view.iH << 19) / asyh->view.iW;
2021 asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2028 asyh->set.view = true;
2032 nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
2034 struct drm_display_mode *mode = &asyh->state.adjusted_mode;
2035 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
2036 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
2037 u32 hbackp = mode->htotal - mode->hsync_end;
2038 u32 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
2039 u32 hfrontp = mode->hsync_start - mode->hdisplay;
2040 u32 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
2042 struct nv50_head_mode *m = &asyh->mode;
2044 m->h.active = mode->htotal;
2045 m->h.synce = mode->hsync_end - mode->hsync_start - 1;
2046 m->h.blanke = m->h.synce + hbackp;
2047 m->h.blanks = mode->htotal - hfrontp - 1;
2049 m->v.active = mode->vtotal * vscan / ilace;
2050 m->v.synce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
2051 m->v.blanke = m->v.synce + vbackp;
2052 m->v.blanks = m->v.active - vfrontp - 1;
2054 /*XXX: Safe underestimate, even "0" works */
2055 blankus = (m->v.active - mode->vdisplay - 2) * m->h.active;
2057 blankus /= mode->clock;
2058 m->v.blankus = blankus;
2060 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
2061 m->v.blank2e = m->v.active + m->v.synce + vbackp;
2062 m->v.blank2s = m->v.blank2e + (mode->vdisplay * vscan / ilace);
2063 m->v.active = (m->v.active * 2) + 1;
2064 m->interlace = true;
2068 m->interlace = false;
2070 m->clock = mode->clock;
2072 drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V);
2073 asyh->set.mode = true;
2077 nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
2079 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
2080 struct nv50_disp *disp = nv50_disp(crtc->dev);
2081 struct nv50_head *head = nv50_head(crtc);
2082 struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2083 struct nv50_head_atom *asyh = nv50_head_atom(state);
2084 struct nouveau_conn_atom *asyc = NULL;
2085 struct drm_connector_state *conns;
2086 struct drm_connector *conn;
2089 NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
2090 if (asyh->state.active) {
2091 for_each_connector_in_state(asyh->state.state, conn, conns, i) {
2092 if (conns->crtc == crtc) {
2093 asyc = nouveau_conn_atom(conns);
2098 if (armh->state.active) {
2100 if (asyh->state.mode_changed)
2101 asyc->set.scaler = true;
2102 if (armh->base.depth != asyh->base.depth)
2103 asyc->set.dither = true;
2106 asyc->set.mask = ~0;
2107 asyh->set.mask = ~0;
2110 if (asyh->state.mode_changed)
2111 nv50_head_atomic_check_mode(head, asyh);
2114 if (asyc->set.scaler)
2115 nv50_head_atomic_check_view(armh, asyh, asyc);
2116 if (asyc->set.dither)
2117 nv50_head_atomic_check_dither(armh, asyh, asyc);
2118 if (asyc->set.procamp)
2119 nv50_head_atomic_check_procamp(armh, asyh, asyc);
2122 if ((asyh->core.visible = (asyh->base.cpp != 0))) {
2123 asyh->core.x = asyh->base.x;
2124 asyh->core.y = asyh->base.y;
2125 asyh->core.w = asyh->base.w;
2126 asyh->core.h = asyh->base.h;
2128 if ((asyh->core.visible = asyh->curs.visible)) {
2129 /*XXX: We need to either find some way of having the
2130 * primary base layer appear black, while still
2131 * being able to display the other layers, or we
2132 * need to allocate a dummy black surface here.
2136 asyh->core.w = asyh->state.mode.hdisplay;
2137 asyh->core.h = asyh->state.mode.vdisplay;
2139 asyh->core.handle = disp->mast.base.vram.handle;
2140 asyh->core.offset = 0;
2141 asyh->core.format = 0xcf;
2142 asyh->core.kind = 0;
2143 asyh->core.layout = 1;
2144 asyh->core.block = 0;
2145 asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
2146 asyh->lut.handle = disp->mast.base.vram.handle;
2147 asyh->lut.offset = head->base.lut.nvbo->bo.offset;
2148 asyh->set.base = armh->base.cpp != asyh->base.cpp;
2149 asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
2151 asyh->core.visible = false;
2152 asyh->curs.visible = false;
2157 if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
2158 if (asyh->core.visible) {
2159 if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
2160 asyh->set.core = true;
2162 if (armh->core.visible) {
2163 asyh->clr.core = true;
2166 if (asyh->curs.visible) {
2167 if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
2168 asyh->set.curs = true;
2170 if (armh->curs.visible) {
2171 asyh->clr.curs = true;
2174 asyh->clr.core = armh->core.visible;
2175 asyh->clr.curs = armh->curs.visible;
2176 asyh->set.core = asyh->core.visible;
2177 asyh->set.curs = asyh->curs.visible;
2180 if (asyh->clr.mask || asyh->set.mask)
2181 nv50_atom(asyh->state.state)->lock_core = true;
2186 nv50_head_lut_load(struct drm_crtc *crtc)
2188 struct nv50_disp *disp = nv50_disp(crtc->dev);
2189 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2190 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
2193 for (i = 0; i < 256; i++) {
2194 u16 r = nv_crtc->lut.r[i] >> 2;
2195 u16 g = nv_crtc->lut.g[i] >> 2;
2196 u16 b = nv_crtc->lut.b[i] >> 2;
2198 if (disp->disp->oclass < GF110_DISP) {
2199 writew(r + 0x0000, lut + (i * 0x08) + 0);
2200 writew(g + 0x0000, lut + (i * 0x08) + 2);
2201 writew(b + 0x0000, lut + (i * 0x08) + 4);
2203 writew(r + 0x6000, lut + (i * 0x20) + 0);
2204 writew(g + 0x6000, lut + (i * 0x20) + 2);
2205 writew(b + 0x6000, lut + (i * 0x20) + 4);
2211 nv50_head_mode_set_base_atomic(struct drm_crtc *crtc,
2212 struct drm_framebuffer *fb, int x, int y,
2213 enum mode_set_atomic state)
2219 static const struct drm_crtc_helper_funcs
2221 .mode_set_base_atomic = nv50_head_mode_set_base_atomic,
2222 .load_lut = nv50_head_lut_load,
2223 .atomic_check = nv50_head_atomic_check,
2227 nv50_head_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
2230 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2233 for (i = 0; i < size; i++) {
2234 nv_crtc->lut.r[i] = r[i];
2235 nv_crtc->lut.g[i] = g[i];
2236 nv_crtc->lut.b[i] = b[i];
2239 nv50_head_lut_load(crtc);
2244 nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
2245 struct drm_crtc_state *state)
2247 struct nv50_head_atom *asyh = nv50_head_atom(state);
2248 __drm_atomic_helper_crtc_destroy_state(&asyh->state);
2252 static struct drm_crtc_state *
2253 nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
2255 struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2256 struct nv50_head_atom *asyh;
2257 if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
2259 __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
2260 asyh->view = armh->view;
2261 asyh->mode = armh->mode;
2262 asyh->lut = armh->lut;
2263 asyh->core = armh->core;
2264 asyh->curs = armh->curs;
2265 asyh->base = armh->base;
2266 asyh->ovly = armh->ovly;
2267 asyh->dither = armh->dither;
2268 asyh->procamp = armh->procamp;
2271 return &asyh->state;
2275 __drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
2276 struct drm_crtc_state *state)
2279 crtc->funcs->atomic_destroy_state(crtc, crtc->state);
2280 crtc->state = state;
2281 crtc->state->crtc = crtc;
2285 nv50_head_reset(struct drm_crtc *crtc)
2287 struct nv50_head_atom *asyh;
2289 if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
2292 __drm_atomic_helper_crtc_reset(crtc, &asyh->state);
2296 nv50_head_destroy(struct drm_crtc *crtc)
2298 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
2299 struct nv50_disp *disp = nv50_disp(crtc->dev);
2300 struct nv50_head *head = nv50_head(crtc);
2302 nv50_dmac_destroy(&head->ovly.base, disp->disp);
2303 nv50_pioc_destroy(&head->oimm.base);
2305 nouveau_bo_unmap(nv_crtc->lut.nvbo);
2306 if (nv_crtc->lut.nvbo)
2307 nouveau_bo_unpin(nv_crtc->lut.nvbo);
2308 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
2310 drm_crtc_cleanup(crtc);
2314 static const struct drm_crtc_funcs
2316 .reset = nv50_head_reset,
2317 .gamma_set = nv50_head_gamma_set,
2318 .destroy = nv50_head_destroy,
2319 .set_config = drm_atomic_helper_set_config,
2320 .page_flip = drm_atomic_helper_page_flip,
2321 .set_property = drm_atomic_helper_crtc_set_property,
2322 .atomic_duplicate_state = nv50_head_atomic_duplicate_state,
2323 .atomic_destroy_state = nv50_head_atomic_destroy_state,
2327 nv50_head_create(struct drm_device *dev, int index)
2329 struct nouveau_drm *drm = nouveau_drm(dev);
2330 struct nvif_device *device = &drm->client.device;
2331 struct nv50_disp *disp = nv50_disp(dev);
2332 struct nv50_head *head;
2333 struct nv50_base *base;
2334 struct nv50_curs *curs;
2335 struct drm_crtc *crtc;
2338 head = kzalloc(sizeof(*head), GFP_KERNEL);
2342 head->base.index = index;
2343 for (i = 0; i < 256; i++) {
2344 head->base.lut.r[i] = i << 8;
2345 head->base.lut.g[i] = i << 8;
2346 head->base.lut.b[i] = i << 8;
2349 ret = nv50_base_new(drm, head, &base);
2351 ret = nv50_curs_new(drm, head, &curs);
2357 crtc = &head->base.base;
2358 drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
2359 &curs->wndw.plane, &nv50_head_func,
2360 "head-%d", head->base.index);
2361 drm_crtc_helper_add(crtc, &nv50_head_help);
2362 drm_mode_crtc_set_gamma_size(crtc, 256);
2364 ret = nouveau_bo_new(&drm->client, 8192, 0x100, TTM_PL_FLAG_VRAM,
2365 0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
2367 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
2369 ret = nouveau_bo_map(head->base.lut.nvbo);
2371 nouveau_bo_unpin(head->base.lut.nvbo);
2374 nouveau_bo_ref(NULL, &head->base.lut.nvbo);
2380 /* allocate overlay resources */
2381 ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
2385 ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
2392 nv50_head_destroy(crtc);
2396 /******************************************************************************
2397 * Output path helpers
2398 *****************************************************************************/
2400 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
2401 struct drm_crtc_state *crtc_state,
2402 struct drm_connector_state *conn_state,
2403 struct drm_display_mode *native_mode)
2405 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
2406 struct drm_display_mode *mode = &crtc_state->mode;
2407 struct drm_connector *connector = conn_state->connector;
2408 struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
2409 struct nouveau_drm *drm = nouveau_drm(encoder->dev);
2411 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
2412 asyc->scaler.full = false;
2416 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
2417 switch (connector->connector_type) {
2418 case DRM_MODE_CONNECTOR_LVDS:
2419 case DRM_MODE_CONNECTOR_eDP:
2420 /* Force use of scaler for non-EDID modes. */
2421 if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
2424 asyc->scaler.full = true;
2433 if (!drm_mode_equal(adjusted_mode, mode)) {
2434 drm_mode_copy(adjusted_mode, mode);
2435 crtc_state->mode_changed = true;
2442 nv50_outp_atomic_check(struct drm_encoder *encoder,
2443 struct drm_crtc_state *crtc_state,
2444 struct drm_connector_state *conn_state)
2446 struct nouveau_connector *nv_connector =
2447 nouveau_connector(conn_state->connector);
2448 return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2449 nv_connector->native_mode);
2452 /******************************************************************************
2454 *****************************************************************************/
2456 nv50_dac_dpms(struct drm_encoder *encoder, int mode)
2458 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2459 struct nv50_disp *disp = nv50_disp(encoder->dev);
2461 struct nv50_disp_mthd_v1 base;
2462 struct nv50_disp_dac_pwr_v0 pwr;
2465 .base.method = NV50_DISP_MTHD_V1_DAC_PWR,
2466 .base.hasht = nv_encoder->dcb->hasht,
2467 .base.hashm = nv_encoder->dcb->hashm,
2470 .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
2471 mode != DRM_MODE_DPMS_OFF),
2472 .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
2473 mode != DRM_MODE_DPMS_OFF),
2476 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2480 nv50_dac_disable(struct drm_encoder *encoder)
2482 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2483 struct nv50_mast *mast = nv50_mast(encoder->dev);
2484 const int or = nv_encoder->or;
2487 if (nv_encoder->crtc) {
2488 push = evo_wait(mast, 4);
2490 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2491 evo_mthd(push, 0x0400 + (or * 0x080), 1);
2492 evo_data(push, 0x00000000);
2494 evo_mthd(push, 0x0180 + (or * 0x020), 1);
2495 evo_data(push, 0x00000000);
2497 evo_kick(push, mast);
2501 nv_encoder->crtc = NULL;
2505 nv50_dac_enable(struct drm_encoder *encoder)
2507 struct nv50_mast *mast = nv50_mast(encoder->dev);
2508 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2509 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2510 struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
2513 push = evo_wait(mast, 8);
2515 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2516 u32 syncs = 0x00000000;
2518 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2519 syncs |= 0x00000001;
2520 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2521 syncs |= 0x00000002;
2523 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
2524 evo_data(push, 1 << nv_crtc->index);
2525 evo_data(push, syncs);
2527 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2528 u32 syncs = 0x00000001;
2530 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2531 syncs |= 0x00000008;
2532 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2533 syncs |= 0x00000010;
2535 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2536 magic |= 0x00000001;
2538 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2539 evo_data(push, syncs);
2540 evo_data(push, magic);
2541 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
2542 evo_data(push, 1 << nv_crtc->index);
2545 evo_kick(push, mast);
2548 nv_encoder->crtc = encoder->crtc;
2551 static enum drm_connector_status
2552 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
2554 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2555 struct nv50_disp *disp = nv50_disp(encoder->dev);
2557 struct nv50_disp_mthd_v1 base;
2558 struct nv50_disp_dac_load_v0 load;
2561 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
2562 .base.hasht = nv_encoder->dcb->hasht,
2563 .base.hashm = nv_encoder->dcb->hashm,
2567 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
2568 if (args.load.data == 0)
2569 args.load.data = 340;
2571 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2572 if (ret || !args.load.load)
2573 return connector_status_disconnected;
2575 return connector_status_connected;
2578 static const struct drm_encoder_helper_funcs
2580 .dpms = nv50_dac_dpms,
2581 .atomic_check = nv50_outp_atomic_check,
2582 .enable = nv50_dac_enable,
2583 .disable = nv50_dac_disable,
2584 .detect = nv50_dac_detect
2588 nv50_dac_destroy(struct drm_encoder *encoder)
2590 drm_encoder_cleanup(encoder);
2594 static const struct drm_encoder_funcs
2596 .destroy = nv50_dac_destroy,
2600 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
2602 struct nouveau_drm *drm = nouveau_drm(connector->dev);
2603 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
2604 struct nvkm_i2c_bus *bus;
2605 struct nouveau_encoder *nv_encoder;
2606 struct drm_encoder *encoder;
2607 int type = DRM_MODE_ENCODER_DAC;
2609 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2612 nv_encoder->dcb = dcbe;
2613 nv_encoder->or = ffs(dcbe->or) - 1;
2615 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2617 nv_encoder->i2c = &bus->i2c;
2619 encoder = to_drm_encoder(nv_encoder);
2620 encoder->possible_crtcs = dcbe->heads;
2621 encoder->possible_clones = 0;
2622 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
2623 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
2624 drm_encoder_helper_add(encoder, &nv50_dac_help);
2626 drm_mode_connector_attach_encoder(connector, encoder);
2630 /******************************************************************************
2632 *****************************************************************************/
2634 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2636 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2637 struct nv50_disp *disp = nv50_disp(encoder->dev);
2639 struct nv50_disp_mthd_v1 base;
2640 struct nv50_disp_sor_hda_eld_v0 eld;
2643 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2644 .base.hasht = nv_encoder->dcb->hasht,
2645 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2646 (0x0100 << nv_crtc->index),
2649 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2653 nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2655 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2656 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2657 struct nouveau_connector *nv_connector;
2658 struct nv50_disp *disp = nv50_disp(encoder->dev);
2661 struct nv50_disp_mthd_v1 mthd;
2662 struct nv50_disp_sor_hda_eld_v0 eld;
2664 u8 data[sizeof(nv_connector->base.eld)];
2666 .base.mthd.version = 1,
2667 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2668 .base.mthd.hasht = nv_encoder->dcb->hasht,
2669 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2670 (0x0100 << nv_crtc->index),
2673 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2674 if (!drm_detect_monitor_audio(nv_connector->edid))
2677 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
2678 memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
2680 nvif_mthd(disp->disp, 0, &args,
2681 sizeof(args.base) + drm_eld_size(args.data));
2684 /******************************************************************************
2686 *****************************************************************************/
2688 nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2690 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2691 struct nv50_disp *disp = nv50_disp(encoder->dev);
2693 struct nv50_disp_mthd_v1 base;
2694 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2697 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2698 .base.hasht = nv_encoder->dcb->hasht,
2699 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2700 (0x0100 << nv_crtc->index),
2703 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2707 nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2709 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2710 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2711 struct nv50_disp *disp = nv50_disp(encoder->dev);
2713 struct nv50_disp_mthd_v1 base;
2714 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2717 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2718 .base.hasht = nv_encoder->dcb->hasht,
2719 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2720 (0x0100 << nv_crtc->index),
2722 .pwr.rekey = 56, /* binary driver, and tegra, constant */
2724 struct nouveau_connector *nv_connector;
2727 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2728 if (!drm_detect_hdmi_monitor(nv_connector->edid))
2731 max_ac_packet = mode->htotal - mode->hdisplay;
2732 max_ac_packet -= args.pwr.rekey;
2733 max_ac_packet -= 18; /* constant from tegra */
2734 args.pwr.max_ac_packet = max_ac_packet / 32;
2736 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2737 nv50_audio_enable(encoder, mode);
2740 /******************************************************************************
2742 *****************************************************************************/
2743 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2744 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2745 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2748 struct nouveau_encoder *outp;
2750 struct drm_dp_mst_topology_mgr mgr;
2751 struct nv50_msto *msto[4];
2757 struct nv50_mstm *mstm;
2758 struct drm_dp_mst_port *port;
2759 struct drm_connector connector;
2761 struct drm_display_mode *native;
2768 struct drm_encoder encoder;
2770 struct nv50_head *head;
2771 struct nv50_mstc *mstc;
2775 static struct drm_dp_payload *
2776 nv50_msto_payload(struct nv50_msto *msto)
2778 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2779 struct nv50_mstc *mstc = msto->mstc;
2780 struct nv50_mstm *mstm = mstc->mstm;
2781 int vcpi = mstc->port->vcpi.vcpi, i;
2783 NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
2784 for (i = 0; i < mstm->mgr.max_payloads; i++) {
2785 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2786 NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2787 mstm->outp->base.base.name, i, payload->vcpi,
2788 payload->start_slot, payload->num_slots);
2791 for (i = 0; i < mstm->mgr.max_payloads; i++) {
2792 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2793 if (payload->vcpi == vcpi)
2801 nv50_msto_cleanup(struct nv50_msto *msto)
2803 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2804 struct nv50_mstc *mstc = msto->mstc;
2805 struct nv50_mstm *mstm = mstc->mstm;
2807 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
2808 if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
2809 drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
2810 if (msto->disabled) {
2813 msto->disabled = false;
2818 nv50_msto_prepare(struct nv50_msto *msto)
2820 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2821 struct nv50_mstc *mstc = msto->mstc;
2822 struct nv50_mstm *mstm = mstc->mstm;
2824 struct nv50_disp_mthd_v1 base;
2825 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
2828 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
2829 .base.hasht = mstm->outp->dcb->hasht,
2830 .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) |
2831 (0x0100 << msto->head->base.index),
2834 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
2835 if (mstc->port && mstc->port->vcpi.vcpi > 0) {
2836 struct drm_dp_payload *payload = nv50_msto_payload(msto);
2838 args.vcpi.start_slot = payload->start_slot;
2839 args.vcpi.num_slots = payload->num_slots;
2840 args.vcpi.pbn = mstc->port->vcpi.pbn;
2841 args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
2845 NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
2846 msto->encoder.name, msto->head->base.base.name,
2847 args.vcpi.start_slot, args.vcpi.num_slots,
2848 args.vcpi.pbn, args.vcpi.aligned_pbn);
2849 nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
2853 nv50_msto_atomic_check(struct drm_encoder *encoder,
2854 struct drm_crtc_state *crtc_state,
2855 struct drm_connector_state *conn_state)
2857 struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
2858 struct nv50_mstm *mstm = mstc->mstm;
2859 int bpp = conn_state->connector->display_info.bpc * 3;
2862 mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
2864 slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2868 return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2873 nv50_msto_enable(struct drm_encoder *encoder)
2875 struct nv50_head *head = nv50_head(encoder->crtc);
2876 struct nv50_msto *msto = nv50_msto(encoder);
2877 struct nv50_mstc *mstc = NULL;
2878 struct nv50_mstm *mstm = NULL;
2879 struct drm_connector *connector;
2884 drm_for_each_connector(connector, encoder->dev) {
2885 if (connector->state->best_encoder == &msto->encoder) {
2886 mstc = nv50_mstc(connector);
2895 slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2896 r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, slots);
2899 if (mstm->outp->dcb->sorconf.link & 1)
2904 switch (mstc->connector.display_info.bpc) {
2905 case 6: depth = 0x2; break;
2906 case 8: depth = 0x5; break;
2908 default: depth = 0x6; break;
2911 mstm->outp->update(mstm->outp, head->base.index,
2912 &head->base.base.state->adjusted_mode, proto, depth);
2916 mstm->modified = true;
2920 nv50_msto_disable(struct drm_encoder *encoder)
2922 struct nv50_msto *msto = nv50_msto(encoder);
2923 struct nv50_mstc *mstc = msto->mstc;
2924 struct nv50_mstm *mstm = mstc->mstm;
2927 drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
2929 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
2930 mstm->modified = true;
2931 msto->disabled = true;
2934 static const struct drm_encoder_helper_funcs
2936 .disable = nv50_msto_disable,
2937 .enable = nv50_msto_enable,
2938 .atomic_check = nv50_msto_atomic_check,
2942 nv50_msto_destroy(struct drm_encoder *encoder)
2944 struct nv50_msto *msto = nv50_msto(encoder);
2945 drm_encoder_cleanup(&msto->encoder);
2949 static const struct drm_encoder_funcs
2951 .destroy = nv50_msto_destroy,
2955 nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
2956 struct nv50_msto **pmsto)
2958 struct nv50_msto *msto;
2961 if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
2964 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
2965 DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
2972 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
2973 msto->encoder.possible_crtcs = heads;
2977 static struct drm_encoder *
2978 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
2979 struct drm_connector_state *connector_state)
2981 struct nv50_head *head = nv50_head(connector_state->crtc);
2982 struct nv50_mstc *mstc = nv50_mstc(connector);
2984 struct nv50_mstm *mstm = mstc->mstm;
2985 return &mstm->msto[head->base.index]->encoder;
2990 static struct drm_encoder *
2991 nv50_mstc_best_encoder(struct drm_connector *connector)
2993 struct nv50_mstc *mstc = nv50_mstc(connector);
2995 struct nv50_mstm *mstm = mstc->mstm;
2996 return &mstm->msto[0]->encoder;
3001 static enum drm_mode_status
3002 nv50_mstc_mode_valid(struct drm_connector *connector,
3003 struct drm_display_mode *mode)
3009 nv50_mstc_get_modes(struct drm_connector *connector)
3011 struct nv50_mstc *mstc = nv50_mstc(connector);
3014 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
3015 drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
3017 ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
3018 drm_edid_to_eld(&mstc->connector, mstc->edid);
3021 if (!mstc->connector.display_info.bpc)
3022 mstc->connector.display_info.bpc = 8;
3025 drm_mode_destroy(mstc->connector.dev, mstc->native);
3026 mstc->native = nouveau_conn_native_mode(&mstc->connector);
3030 static const struct drm_connector_helper_funcs
3032 .get_modes = nv50_mstc_get_modes,
3033 .mode_valid = nv50_mstc_mode_valid,
3034 .best_encoder = nv50_mstc_best_encoder,
3035 .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
3038 static enum drm_connector_status
3039 nv50_mstc_detect(struct drm_connector *connector, bool force)
3041 struct nv50_mstc *mstc = nv50_mstc(connector);
3043 return connector_status_disconnected;
3044 return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
3048 nv50_mstc_destroy(struct drm_connector *connector)
3050 struct nv50_mstc *mstc = nv50_mstc(connector);
3051 drm_connector_cleanup(&mstc->connector);
3055 static const struct drm_connector_funcs
3057 .dpms = drm_atomic_helper_connector_dpms,
3058 .reset = nouveau_conn_reset,
3059 .detect = nv50_mstc_detect,
3060 .fill_modes = drm_helper_probe_single_connector_modes,
3061 .set_property = drm_atomic_helper_connector_set_property,
3062 .destroy = nv50_mstc_destroy,
3063 .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
3064 .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
3065 .atomic_set_property = nouveau_conn_atomic_set_property,
3066 .atomic_get_property = nouveau_conn_atomic_get_property,
3070 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
3071 const char *path, struct nv50_mstc **pmstc)
3073 struct drm_device *dev = mstm->outp->base.base.dev;
3074 struct nv50_mstc *mstc;
3077 if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
3082 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
3083 DRM_MODE_CONNECTOR_DisplayPort);
3090 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
3092 mstc->connector.funcs->reset(&mstc->connector);
3093 nouveau_conn_attach_properties(&mstc->connector);
3095 for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto; i++)
3096 drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
3098 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
3099 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
3100 drm_mode_connector_set_path_property(&mstc->connector, path);
3105 nv50_mstm_cleanup(struct nv50_mstm *mstm)
3107 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3108 struct drm_encoder *encoder;
3111 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
3112 ret = drm_dp_check_act_status(&mstm->mgr);
3114 ret = drm_dp_update_payload_part2(&mstm->mgr);
3116 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3117 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3118 struct nv50_msto *msto = nv50_msto(encoder);
3119 struct nv50_mstc *mstc = msto->mstc;
3120 if (mstc && mstc->mstm == mstm)
3121 nv50_msto_cleanup(msto);
3125 mstm->modified = false;
3129 nv50_mstm_prepare(struct nv50_mstm *mstm)
3131 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3132 struct drm_encoder *encoder;
3135 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
3136 ret = drm_dp_update_payload_part1(&mstm->mgr);
3138 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3139 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3140 struct nv50_msto *msto = nv50_msto(encoder);
3141 struct nv50_mstc *mstc = msto->mstc;
3142 if (mstc && mstc->mstm == mstm)
3143 nv50_msto_prepare(msto);
3149 nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
3151 struct nv50_mstm *mstm = nv50_mstm(mgr);
3152 drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
3156 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
3157 struct drm_connector *connector)
3159 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3160 struct nv50_mstc *mstc = nv50_mstc(connector);
3162 drm_connector_unregister(&mstc->connector);
3164 drm_modeset_lock_all(drm->dev);
3165 drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
3167 drm_modeset_unlock_all(drm->dev);
3169 drm_connector_unreference(&mstc->connector);
3173 nv50_mstm_register_connector(struct drm_connector *connector)
3175 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3177 drm_modeset_lock_all(drm->dev);
3178 drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
3179 drm_modeset_unlock_all(drm->dev);
3181 drm_connector_register(connector);
3184 static struct drm_connector *
3185 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
3186 struct drm_dp_mst_port *port, const char *path)
3188 struct nv50_mstm *mstm = nv50_mstm(mgr);
3189 struct nv50_mstc *mstc;
3192 ret = nv50_mstc_new(mstm, port, path, &mstc);
3195 mstc->connector.funcs->destroy(&mstc->connector);
3199 return &mstc->connector;
3202 static const struct drm_dp_mst_topology_cbs
3204 .add_connector = nv50_mstm_add_connector,
3205 .register_connector = nv50_mstm_register_connector,
3206 .destroy_connector = nv50_mstm_destroy_connector,
3207 .hotplug = nv50_mstm_hotplug,
3211 nv50_mstm_service(struct nv50_mstm *mstm)
3213 struct drm_dp_aux *aux = mstm->mgr.aux;
3214 bool handled = true;
3219 ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
3221 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3225 drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
3229 drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
3234 nv50_mstm_remove(struct nv50_mstm *mstm)
3237 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3241 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
3243 struct nouveau_encoder *outp = mstm->outp;
3245 struct nv50_disp_mthd_v1 base;
3246 struct nv50_disp_sor_dp_mst_link_v0 mst;
3249 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
3250 .base.hasht = outp->dcb->hasht,
3251 .base.hashm = outp->dcb->hashm,
3254 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
3255 struct nvif_object *disp = &drm->display->disp;
3259 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
3267 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
3272 return nvif_mthd(disp, 0, &args, sizeof(args));
3276 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
3283 if (dpcd[0] >= 0x12) {
3284 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
3288 if (!(dpcd[1] & DP_MST_CAP))
3294 ret = nv50_mstm_enable(mstm, dpcd[0], state);
3298 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
3300 return nv50_mstm_enable(mstm, dpcd[0], 0);
3302 return mstm->mgr.mst_state;
3306 nv50_mstm_fini(struct nv50_mstm *mstm)
3308 if (mstm && mstm->mgr.mst_state)
3309 drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
3313 nv50_mstm_init(struct nv50_mstm *mstm)
3315 if (mstm && mstm->mgr.mst_state)
3316 drm_dp_mst_topology_mgr_resume(&mstm->mgr);
3320 nv50_mstm_del(struct nv50_mstm **pmstm)
3322 struct nv50_mstm *mstm = *pmstm;
3330 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
3331 int conn_base_id, struct nv50_mstm **pmstm)
3333 const int max_payloads = hweight8(outp->dcb->heads);
3334 struct drm_device *dev = outp->base.base.dev;
3335 struct nv50_mstm *mstm;
3339 /* This is a workaround for some monitors not functioning
3340 * correctly in MST mode on initial module load. I think
3341 * some bad interaction with the VBIOS may be responsible.
3343 * A good ol' off and on again seems to work here ;)
3345 ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
3346 if (ret >= 0 && dpcd >= 0x12)
3347 drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
3349 if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
3352 mstm->mgr.cbs = &nv50_mstm;
3354 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
3355 max_payloads, conn_base_id);
3359 for (i = 0; i < max_payloads; i++) {
3360 ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
3369 /******************************************************************************
3371 *****************************************************************************/
3373 nv50_sor_dpms(struct drm_encoder *encoder, int mode)
3375 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3376 struct nv50_disp *disp = nv50_disp(encoder->dev);
3378 struct nv50_disp_mthd_v1 base;
3379 struct nv50_disp_sor_pwr_v0 pwr;
3382 .base.method = NV50_DISP_MTHD_V1_SOR_PWR,
3383 .base.hasht = nv_encoder->dcb->hasht,
3384 .base.hashm = nv_encoder->dcb->hashm,
3385 .pwr.state = mode == DRM_MODE_DPMS_ON,
3388 nvif_mthd(disp->disp, 0, &args, sizeof(args));
3392 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
3393 struct drm_display_mode *mode, u8 proto, u8 depth)
3395 struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
3399 nv_encoder->ctrl &= ~BIT(head);
3400 if (!(nv_encoder->ctrl & 0x0000000f))
3401 nv_encoder->ctrl = 0;
3403 nv_encoder->ctrl |= proto << 8;
3404 nv_encoder->ctrl |= BIT(head);
3407 if ((push = evo_wait(core, 6))) {
3408 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
3410 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3411 nv_encoder->ctrl |= 0x00001000;
3412 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3413 nv_encoder->ctrl |= 0x00002000;
3414 nv_encoder->ctrl |= depth << 16;
3416 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
3419 u32 magic = 0x31ec6000 | (head << 25);
3420 u32 syncs = 0x00000001;
3421 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3422 syncs |= 0x00000008;
3423 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3424 syncs |= 0x00000010;
3425 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
3426 magic |= 0x00000001;
3428 evo_mthd(push, 0x0404 + (head * 0x300), 2);
3429 evo_data(push, syncs | (depth << 6));
3430 evo_data(push, magic);
3432 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
3434 evo_data(push, nv_encoder->ctrl);
3435 evo_kick(push, core);
3440 nv50_sor_disable(struct drm_encoder *encoder)
3442 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3443 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
3445 nv_encoder->crtc = NULL;
3448 struct nvkm_i2c_aux *aux = nv_encoder->aux;
3452 int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
3454 pwr &= ~DP_SET_POWER_MASK;
3455 pwr |= DP_SET_POWER_D3;
3456 nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
3460 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
3461 nv50_audio_disable(encoder, nv_crtc);
3462 nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
3467 nv50_sor_enable(struct drm_encoder *encoder)
3469 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3470 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3471 struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3473 struct nv50_disp_mthd_v1 base;
3474 struct nv50_disp_sor_lvds_script_v0 lvds;
3477 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
3478 .base.hasht = nv_encoder->dcb->hasht,
3479 .base.hashm = nv_encoder->dcb->hashm,
3481 struct nv50_disp *disp = nv50_disp(encoder->dev);
3482 struct drm_device *dev = encoder->dev;
3483 struct nouveau_drm *drm = nouveau_drm(dev);
3484 struct nouveau_connector *nv_connector;
3485 struct nvbios *bios = &drm->vbios;
3489 nv_connector = nouveau_encoder_connector_get(nv_encoder);
3490 nv_encoder->crtc = encoder->crtc;
3492 switch (nv_encoder->dcb->type) {
3493 case DCB_OUTPUT_TMDS:
3494 if (nv_encoder->dcb->sorconf.link & 1) {
3496 /* Only enable dual-link if:
3497 * - Need to (i.e. rate > 165MHz)
3499 * - Not an HDMI monitor, since there's no dual-link
3502 if (mode->clock >= 165000 &&
3503 nv_encoder->dcb->duallink_possible &&
3504 !drm_detect_hdmi_monitor(nv_connector->edid))
3510 nv50_hdmi_enable(&nv_encoder->base.base, mode);
3512 case DCB_OUTPUT_LVDS:
3515 if (bios->fp_no_ddc) {
3516 if (bios->fp.dual_link)
3517 lvds.lvds.script |= 0x0100;
3518 if (bios->fp.if_is_24bit)
3519 lvds.lvds.script |= 0x0200;
3521 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
3522 if (((u8 *)nv_connector->edid)[121] == 2)
3523 lvds.lvds.script |= 0x0100;
3525 if (mode->clock >= bios->fp.duallink_transition_clk) {
3526 lvds.lvds.script |= 0x0100;
3529 if (lvds.lvds.script & 0x0100) {
3530 if (bios->fp.strapless_is_24bit & 2)
3531 lvds.lvds.script |= 0x0200;
3533 if (bios->fp.strapless_is_24bit & 1)
3534 lvds.lvds.script |= 0x0200;
3537 if (nv_connector->base.display_info.bpc == 8)
3538 lvds.lvds.script |= 0x0200;
3541 nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
3544 if (nv_connector->base.display_info.bpc == 6)
3547 if (nv_connector->base.display_info.bpc == 8)
3552 if (nv_encoder->dcb->sorconf.link & 1)
3557 nv50_audio_enable(encoder, mode);
3564 nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
3567 static const struct drm_encoder_helper_funcs
3569 .dpms = nv50_sor_dpms,
3570 .atomic_check = nv50_outp_atomic_check,
3571 .enable = nv50_sor_enable,
3572 .disable = nv50_sor_disable,
3576 nv50_sor_destroy(struct drm_encoder *encoder)
3578 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3579 nv50_mstm_del(&nv_encoder->dp.mstm);
3580 drm_encoder_cleanup(encoder);
3584 static const struct drm_encoder_funcs
3586 .destroy = nv50_sor_destroy,
3590 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
3592 struct nouveau_connector *nv_connector = nouveau_connector(connector);
3593 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3594 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
3595 struct nouveau_encoder *nv_encoder;
3596 struct drm_encoder *encoder;
3599 switch (dcbe->type) {
3600 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
3601 case DCB_OUTPUT_TMDS:
3604 type = DRM_MODE_ENCODER_TMDS;
3608 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3611 nv_encoder->dcb = dcbe;
3612 nv_encoder->or = ffs(dcbe->or) - 1;
3613 nv_encoder->update = nv50_sor_update;
3615 encoder = to_drm_encoder(nv_encoder);
3616 encoder->possible_crtcs = dcbe->heads;
3617 encoder->possible_clones = 0;
3618 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
3619 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
3620 drm_encoder_helper_add(encoder, &nv50_sor_help);
3622 drm_mode_connector_attach_encoder(connector, encoder);
3624 if (dcbe->type == DCB_OUTPUT_DP) {
3625 struct nvkm_i2c_aux *aux =
3626 nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
3628 nv_encoder->i2c = &nv_connector->aux.ddc;
3629 nv_encoder->aux = aux;
3632 /*TODO: Use DP Info Table to check for support. */
3633 if (nv50_disp(encoder->dev)->disp->oclass >= GF110_DISP) {
3634 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
3635 nv_connector->base.base.id,
3636 &nv_encoder->dp.mstm);
3641 struct nvkm_i2c_bus *bus =
3642 nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
3644 nv_encoder->i2c = &bus->i2c;
3650 /******************************************************************************
3652 *****************************************************************************/
3654 nv50_pior_dpms(struct drm_encoder *encoder, int mode)
3656 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3657 struct nv50_disp *disp = nv50_disp(encoder->dev);
3659 struct nv50_disp_mthd_v1 base;
3660 struct nv50_disp_pior_pwr_v0 pwr;
3663 .base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
3664 .base.hasht = nv_encoder->dcb->hasht,
3665 .base.hashm = nv_encoder->dcb->hashm,
3666 .pwr.state = mode == DRM_MODE_DPMS_ON,
3667 .pwr.type = nv_encoder->dcb->type,
3670 nvif_mthd(disp->disp, 0, &args, sizeof(args));
3674 nv50_pior_atomic_check(struct drm_encoder *encoder,
3675 struct drm_crtc_state *crtc_state,
3676 struct drm_connector_state *conn_state)
3678 int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
3681 crtc_state->adjusted_mode.clock *= 2;
3686 nv50_pior_disable(struct drm_encoder *encoder)
3688 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3689 struct nv50_mast *mast = nv50_mast(encoder->dev);
3690 const int or = nv_encoder->or;
3693 if (nv_encoder->crtc) {
3694 push = evo_wait(mast, 4);
3696 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3697 evo_mthd(push, 0x0700 + (or * 0x040), 1);
3698 evo_data(push, 0x00000000);
3700 evo_kick(push, mast);
3704 nv_encoder->crtc = NULL;
3708 nv50_pior_enable(struct drm_encoder *encoder)
3710 struct nv50_mast *mast = nv50_mast(encoder->dev);
3711 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3712 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3713 struct nouveau_connector *nv_connector;
3714 struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3715 u8 owner = 1 << nv_crtc->index;
3719 nv_connector = nouveau_encoder_connector_get(nv_encoder);
3720 switch (nv_connector->base.display_info.bpc) {
3721 case 10: depth = 0x6; break;
3722 case 8: depth = 0x5; break;
3723 case 6: depth = 0x2; break;
3724 default: depth = 0x0; break;
3727 switch (nv_encoder->dcb->type) {
3728 case DCB_OUTPUT_TMDS:
3737 push = evo_wait(mast, 8);
3739 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3740 u32 ctrl = (depth << 16) | (proto << 8) | owner;
3741 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3743 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3745 evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
3746 evo_data(push, ctrl);
3749 evo_kick(push, mast);
3752 nv_encoder->crtc = encoder->crtc;
3755 static const struct drm_encoder_helper_funcs
3757 .dpms = nv50_pior_dpms,
3758 .atomic_check = nv50_pior_atomic_check,
3759 .enable = nv50_pior_enable,
3760 .disable = nv50_pior_disable,
3764 nv50_pior_destroy(struct drm_encoder *encoder)
3766 drm_encoder_cleanup(encoder);
3770 static const struct drm_encoder_funcs
3772 .destroy = nv50_pior_destroy,
3776 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
3778 struct nouveau_connector *nv_connector = nouveau_connector(connector);
3779 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3780 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
3781 struct nvkm_i2c_bus *bus = NULL;
3782 struct nvkm_i2c_aux *aux = NULL;
3783 struct i2c_adapter *ddc;
3784 struct nouveau_encoder *nv_encoder;
3785 struct drm_encoder *encoder;
3788 switch (dcbe->type) {
3789 case DCB_OUTPUT_TMDS:
3790 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
3791 ddc = bus ? &bus->i2c : NULL;
3792 type = DRM_MODE_ENCODER_TMDS;
3795 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
3796 ddc = aux ? &nv_connector->aux.ddc : NULL;
3797 type = DRM_MODE_ENCODER_TMDS;
3803 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3806 nv_encoder->dcb = dcbe;
3807 nv_encoder->or = ffs(dcbe->or) - 1;
3808 nv_encoder->i2c = ddc;
3809 nv_encoder->aux = aux;
3811 encoder = to_drm_encoder(nv_encoder);
3812 encoder->possible_crtcs = dcbe->heads;
3813 encoder->possible_clones = 0;
3814 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
3815 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
3816 drm_encoder_helper_add(encoder, &nv50_pior_help);
3818 drm_mode_connector_attach_encoder(connector, encoder);
3822 /******************************************************************************
3824 *****************************************************************************/
3827 nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
3829 struct nv50_disp *disp = nv50_disp(drm->dev);
3830 struct nv50_dmac *core = &disp->mast.base;
3831 struct nv50_mstm *mstm;
3832 struct drm_encoder *encoder;
3835 NV_ATOMIC(drm, "commit core %08x\n", interlock);
3837 drm_for_each_encoder(encoder, drm->dev) {
3838 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3839 mstm = nouveau_encoder(encoder)->dp.mstm;
3840 if (mstm && mstm->modified)
3841 nv50_mstm_prepare(mstm);
3845 if ((push = evo_wait(core, 5))) {
3846 evo_mthd(push, 0x0084, 1);
3847 evo_data(push, 0x80000000);
3848 evo_mthd(push, 0x0080, 2);
3849 evo_data(push, interlock);
3850 evo_data(push, 0x00000000);
3851 nouveau_bo_wr32(disp->sync, 0, 0x00000000);
3852 evo_kick(push, core);
3853 if (nvif_msec(&drm->client.device, 2000ULL,
3854 if (nouveau_bo_rd32(disp->sync, 0))
3858 NV_ERROR(drm, "EVO timeout\n");
3861 drm_for_each_encoder(encoder, drm->dev) {
3862 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3863 mstm = nouveau_encoder(encoder)->dp.mstm;
3864 if (mstm && mstm->modified)
3865 nv50_mstm_cleanup(mstm);
3871 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
3873 struct drm_device *dev = state->dev;
3874 struct drm_crtc_state *crtc_state;
3875 struct drm_crtc *crtc;
3876 struct drm_plane_state *plane_state;
3877 struct drm_plane *plane;
3878 struct nouveau_drm *drm = nouveau_drm(dev);
3879 struct nv50_disp *disp = nv50_disp(dev);
3880 struct nv50_atom *atom = nv50_atom(state);
3881 struct nv50_outp_atom *outp, *outt;
3882 u32 interlock_core = 0;
3883 u32 interlock_chan = 0;
3886 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
3887 drm_atomic_helper_wait_for_fences(dev, state, false);
3888 drm_atomic_helper_wait_for_dependencies(state);
3889 drm_atomic_helper_update_legacy_modeset_state(dev, state);
3891 if (atom->lock_core)
3892 mutex_lock(&disp->mutex);
3894 /* Disable head(s). */
3895 for_each_crtc_in_state(state, crtc, crtc_state, i) {
3896 struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
3897 struct nv50_head *head = nv50_head(crtc);
3899 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
3900 asyh->clr.mask, asyh->set.mask);
3902 if (asyh->clr.mask) {
3903 nv50_head_flush_clr(head, asyh, atom->flush_disable);
3904 interlock_core |= 1;
3908 /* Disable plane(s). */
3909 for_each_plane_in_state(state, plane, plane_state, i) {
3910 struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
3911 struct nv50_wndw *wndw = nv50_wndw(plane);
3913 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
3914 asyw->clr.mask, asyw->set.mask);
3915 if (!asyw->clr.mask)
3918 interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
3919 atom->flush_disable,
3923 /* Disable output path(s). */
3924 list_for_each_entry(outp, &atom->outp, head) {
3925 const struct drm_encoder_helper_funcs *help;
3926 struct drm_encoder *encoder;
3928 encoder = outp->encoder;
3929 help = encoder->helper_private;
3931 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
3932 outp->clr.mask, outp->set.mask);
3934 if (outp->clr.mask) {
3935 help->disable(encoder);
3936 interlock_core |= 1;
3937 if (outp->flush_disable) {
3938 nv50_disp_atomic_commit_core(drm, interlock_chan);
3945 /* Flush disable. */
3946 if (interlock_core) {
3947 if (atom->flush_disable) {
3948 nv50_disp_atomic_commit_core(drm, interlock_chan);
3954 /* Update output path(s). */
3955 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
3956 const struct drm_encoder_helper_funcs *help;
3957 struct drm_encoder *encoder;
3959 encoder = outp->encoder;
3960 help = encoder->helper_private;
3962 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
3963 outp->set.mask, outp->clr.mask);
3965 if (outp->set.mask) {
3966 help->enable(encoder);
3970 list_del(&outp->head);
3974 /* Update head(s). */
3975 for_each_crtc_in_state(state, crtc, crtc_state, i) {
3976 struct nv50_head_atom *asyh = nv50_head_atom(crtc->state);
3977 struct nv50_head *head = nv50_head(crtc);
3979 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
3980 asyh->set.mask, asyh->clr.mask);
3982 if (asyh->set.mask) {
3983 nv50_head_flush_set(head, asyh);
3988 for_each_crtc_in_state(state, crtc, crtc_state, i) {
3989 if (crtc->state->event)
3990 drm_crtc_vblank_get(crtc);
3993 /* Update plane(s). */
3994 for_each_plane_in_state(state, plane, plane_state, i) {
3995 struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
3996 struct nv50_wndw *wndw = nv50_wndw(plane);
3998 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
3999 asyw->set.mask, asyw->clr.mask);
4000 if ( !asyw->set.mask &&
4001 (!asyw->clr.mask || atom->flush_disable))
4004 interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
4008 if (interlock_core) {
4009 if (!interlock_chan && atom->state.legacy_cursor_update) {
4010 u32 *push = evo_wait(&disp->mast, 2);
4012 evo_mthd(push, 0x0080, 1);
4013 evo_data(push, 0x00000000);
4014 evo_kick(push, &disp->mast);
4017 nv50_disp_atomic_commit_core(drm, interlock_chan);
4021 if (atom->lock_core)
4022 mutex_unlock(&disp->mutex);
4024 /* Wait for HW to signal completion. */
4025 for_each_plane_in_state(state, plane, plane_state, i) {
4026 struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane->state);
4027 struct nv50_wndw *wndw = nv50_wndw(plane);
4028 int ret = nv50_wndw_wait_armed(wndw, asyw);
4030 NV_ERROR(drm, "%s: timeout\n", plane->name);
4033 for_each_crtc_in_state(state, crtc, crtc_state, i) {
4034 if (crtc->state->event) {
4035 unsigned long flags;
4036 /* Get correct count/ts if racing with vblank irq */
4037 drm_accurate_vblank_count(crtc);
4038 spin_lock_irqsave(&crtc->dev->event_lock, flags);
4039 drm_crtc_send_vblank_event(crtc, crtc->state->event);
4040 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
4041 crtc->state->event = NULL;
4042 drm_crtc_vblank_put(crtc);
4046 drm_atomic_helper_commit_hw_done(state);
4047 drm_atomic_helper_cleanup_planes(dev, state);
4048 drm_atomic_helper_commit_cleanup_done(state);
4049 drm_atomic_state_put(state);
4053 nv50_disp_atomic_commit_work(struct work_struct *work)
4055 struct drm_atomic_state *state =
4056 container_of(work, typeof(*state), commit_work);
4057 nv50_disp_atomic_commit_tail(state);
4061 nv50_disp_atomic_commit(struct drm_device *dev,
4062 struct drm_atomic_state *state, bool nonblock)
4064 struct nouveau_drm *drm = nouveau_drm(dev);
4065 struct nv50_disp *disp = nv50_disp(dev);
4066 struct drm_plane_state *plane_state;
4067 struct drm_plane *plane;
4068 struct drm_crtc *crtc;
4069 bool active = false;
4072 ret = pm_runtime_get_sync(dev->dev);
4073 if (ret < 0 && ret != -EACCES)
4076 ret = drm_atomic_helper_setup_commit(state, nonblock);
4080 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
4082 ret = drm_atomic_helper_prepare_planes(dev, state);
4087 ret = drm_atomic_helper_wait_for_fences(dev, state, true);
4092 for_each_plane_in_state(state, plane, plane_state, i) {
4093 struct nv50_wndw_atom *asyw = nv50_wndw_atom(plane_state);
4094 struct nv50_wndw *wndw = nv50_wndw(plane);
4095 if (asyw->set.image) {
4096 asyw->ntfy.handle = wndw->dmac->sync.handle;
4097 asyw->ntfy.offset = wndw->ntfy;
4098 asyw->ntfy.awaken = false;
4099 asyw->set.ntfy = true;
4100 nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
4105 drm_atomic_helper_swap_state(state, true);
4106 drm_atomic_state_get(state);
4109 queue_work(system_unbound_wq, &state->commit_work);
4111 nv50_disp_atomic_commit_tail(state);
4113 drm_for_each_crtc(crtc, dev) {
4114 if (crtc->state->enable) {
4115 if (!drm->have_disp_power_ref) {
4116 drm->have_disp_power_ref = true;
4124 if (!active && drm->have_disp_power_ref) {
4125 pm_runtime_put_autosuspend(dev->dev);
4126 drm->have_disp_power_ref = false;
4130 pm_runtime_put_autosuspend(dev->dev);
4134 static struct nv50_outp_atom *
4135 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
4137 struct nv50_outp_atom *outp;
4139 list_for_each_entry(outp, &atom->outp, head) {
4140 if (outp->encoder == encoder)
4144 outp = kzalloc(sizeof(*outp), GFP_KERNEL);
4146 return ERR_PTR(-ENOMEM);
4148 list_add(&outp->head, &atom->outp);
4149 outp->encoder = encoder;
4154 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
4155 struct drm_connector *connector)
4157 struct drm_encoder *encoder = connector->state->best_encoder;
4158 struct drm_crtc_state *crtc_state;
4159 struct drm_crtc *crtc;
4160 struct nv50_outp_atom *outp;
4162 if (!(crtc = connector->state->crtc))
4165 crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
4166 if (crtc->state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
4167 outp = nv50_disp_outp_atomic_add(atom, encoder);
4169 return PTR_ERR(outp);
4171 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
4172 outp->flush_disable = true;
4173 atom->flush_disable = true;
4175 outp->clr.ctrl = true;
4176 atom->lock_core = true;
4183 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
4184 struct drm_connector_state *connector_state)
4186 struct drm_encoder *encoder = connector_state->best_encoder;
4187 struct drm_crtc_state *crtc_state;
4188 struct drm_crtc *crtc;
4189 struct nv50_outp_atom *outp;
4191 if (!(crtc = connector_state->crtc))
4194 crtc_state = drm_atomic_get_existing_crtc_state(&atom->state, crtc);
4195 if (crtc_state->active && drm_atomic_crtc_needs_modeset(crtc_state)) {
4196 outp = nv50_disp_outp_atomic_add(atom, encoder);
4198 return PTR_ERR(outp);
4200 outp->set.ctrl = true;
4201 atom->lock_core = true;
4208 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
4210 struct nv50_atom *atom = nv50_atom(state);
4211 struct drm_connector_state *connector_state;
4212 struct drm_connector *connector;
4215 ret = drm_atomic_helper_check(dev, state);
4219 for_each_connector_in_state(state, connector, connector_state, i) {
4220 ret = nv50_disp_outp_atomic_check_clr(atom, connector);
4224 ret = nv50_disp_outp_atomic_check_set(atom, connector_state);
4233 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
4235 struct nv50_atom *atom = nv50_atom(state);
4236 struct nv50_outp_atom *outp, *outt;
4238 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4239 list_del(&outp->head);
4243 drm_atomic_state_default_clear(state);
4247 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
4249 struct nv50_atom *atom = nv50_atom(state);
4250 drm_atomic_state_default_release(&atom->state);
4254 static struct drm_atomic_state *
4255 nv50_disp_atomic_state_alloc(struct drm_device *dev)
4257 struct nv50_atom *atom;
4258 if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
4259 drm_atomic_state_init(dev, &atom->state) < 0) {
4263 INIT_LIST_HEAD(&atom->outp);
4264 return &atom->state;
4267 static const struct drm_mode_config_funcs
4269 .fb_create = nouveau_user_framebuffer_create,
4270 .output_poll_changed = nouveau_fbcon_output_poll_changed,
4271 .atomic_check = nv50_disp_atomic_check,
4272 .atomic_commit = nv50_disp_atomic_commit,
4273 .atomic_state_alloc = nv50_disp_atomic_state_alloc,
4274 .atomic_state_clear = nv50_disp_atomic_state_clear,
4275 .atomic_state_free = nv50_disp_atomic_state_free,
4278 /******************************************************************************
4280 *****************************************************************************/
4283 nv50_display_fini(struct drm_device *dev)
4285 struct nouveau_encoder *nv_encoder;
4286 struct drm_encoder *encoder;
4287 struct drm_plane *plane;
4289 drm_for_each_plane(plane, dev) {
4290 struct nv50_wndw *wndw = nv50_wndw(plane);
4291 if (plane->funcs != &nv50_wndw)
4293 nv50_wndw_fini(wndw);
4296 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4297 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4298 nv_encoder = nouveau_encoder(encoder);
4299 nv50_mstm_fini(nv_encoder->dp.mstm);
4305 nv50_display_init(struct drm_device *dev)
4307 struct drm_encoder *encoder;
4308 struct drm_plane *plane;
4309 struct drm_crtc *crtc;
4312 push = evo_wait(nv50_mast(dev), 32);
4316 evo_mthd(push, 0x0088, 1);
4317 evo_data(push, nv50_mast(dev)->base.sync.handle);
4318 evo_kick(push, nv50_mast(dev));
4320 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4321 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4322 const struct drm_encoder_helper_funcs *help;
4323 struct nouveau_encoder *nv_encoder;
4325 nv_encoder = nouveau_encoder(encoder);
4326 help = encoder->helper_private;
4327 if (help && help->dpms)
4328 help->dpms(encoder, DRM_MODE_DPMS_ON);
4330 nv50_mstm_init(nv_encoder->dp.mstm);
4334 drm_for_each_crtc(crtc, dev) {
4335 nv50_head_lut_load(crtc);
4338 drm_for_each_plane(plane, dev) {
4339 struct nv50_wndw *wndw = nv50_wndw(plane);
4340 if (plane->funcs != &nv50_wndw)
4342 nv50_wndw_init(wndw);
4349 nv50_display_destroy(struct drm_device *dev)
4351 struct nv50_disp *disp = nv50_disp(dev);
4353 nv50_dmac_destroy(&disp->mast.base, disp->disp);
4355 nouveau_bo_unmap(disp->sync);
4357 nouveau_bo_unpin(disp->sync);
4358 nouveau_bo_ref(NULL, &disp->sync);
4360 nouveau_display(dev)->priv = NULL;
4364 MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
4365 static int nouveau_atomic = 0;
4366 module_param_named(atomic, nouveau_atomic, int, 0400);
4369 nv50_display_create(struct drm_device *dev)
4371 struct nvif_device *device = &nouveau_drm(dev)->client.device;
4372 struct nouveau_drm *drm = nouveau_drm(dev);
4373 struct dcb_table *dcb = &drm->vbios.dcb;
4374 struct drm_connector *connector, *tmp;
4375 struct nv50_disp *disp;
4376 struct dcb_output *dcbe;
4379 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
4383 mutex_init(&disp->mutex);
4385 nouveau_display(dev)->priv = disp;
4386 nouveau_display(dev)->dtor = nv50_display_destroy;
4387 nouveau_display(dev)->init = nv50_display_init;
4388 nouveau_display(dev)->fini = nv50_display_fini;
4389 disp->disp = &nouveau_display(dev)->disp;
4390 dev->mode_config.funcs = &nv50_disp_func;
4392 dev->driver->driver_features |= DRIVER_ATOMIC;
4394 /* small shared memory area we use for notifiers and semaphores */
4395 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
4396 0, 0x0000, NULL, NULL, &disp->sync);
4398 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
4400 ret = nouveau_bo_map(disp->sync);
4402 nouveau_bo_unpin(disp->sync);
4405 nouveau_bo_ref(NULL, &disp->sync);
4411 /* allocate master evo channel */
4412 ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
4417 /* create crtc objects to represent the hw heads */
4418 if (disp->disp->oclass >= GF110_DISP)
4419 crtcs = nvif_rd32(&device->object, 0x022448);
4423 for (i = 0; i < crtcs; i++) {
4424 ret = nv50_head_create(dev, i);
4429 /* create encoder/connector objects based on VBIOS DCB table */
4430 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
4431 connector = nouveau_connector_create(dev, dcbe->connector);
4432 if (IS_ERR(connector))
4435 if (dcbe->location == DCB_LOC_ON_CHIP) {
4436 switch (dcbe->type) {
4437 case DCB_OUTPUT_TMDS:
4438 case DCB_OUTPUT_LVDS:
4440 ret = nv50_sor_create(connector, dcbe);
4442 case DCB_OUTPUT_ANALOG:
4443 ret = nv50_dac_create(connector, dcbe);
4450 ret = nv50_pior_create(connector, dcbe);
4454 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
4455 dcbe->location, dcbe->type,
4456 ffs(dcbe->or) - 1, ret);
4461 /* cull any connectors we created that don't have an encoder */
4462 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
4463 if (connector->encoder_ids[0])
4466 NV_WARN(drm, "%s has no encoders, removing\n",
4468 connector->funcs->destroy(connector);
4473 nv50_display_destroy(dev);