]> git.kernelconcepts.de Git - karo-tx-linux.git/blob - drivers/gpu/drm/nouveau/nvd0_display.c
f8ff7786ea95227fa943b48e4bf2ac0a2e4ec0a2
[karo-tx-linux.git] / drivers / gpu / drm / nouveau / nvd0_display.c
1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24
25 #include <linux/dma-mapping.h>
26
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29
30 #include "nouveau_drm.h"
31 #include "nouveau_dma.h"
32 #include "nouveau_gem.h"
33 #include "nouveau_connector.h"
34 #include "nouveau_encoder.h"
35 #include "nouveau_crtc.h"
36 #include "nouveau_fence.h"
37 #include "nv50_display.h"
38
39 #include <core/client.h>
40 #include <core/gpuobj.h>
41 #include <core/class.h>
42
43 #include <subdev/timer.h>
44 #include <subdev/bar.h>
45 #include <subdev/fb.h>
46
47 #define EVO_DMA_NR 9
48
49 #define EVO_MASTER  (0x00)
50 #define EVO_FLIP(c) (0x01 + (c))
51 #define EVO_OVLY(c) (0x05 + (c))
52 #define EVO_OIMM(c) (0x09 + (c))
53 #define EVO_CURS(c) (0x0d + (c))
54
55 /* offsets in shared sync bo of various structures */
56 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
57 #define EVO_MAST_NTFY     EVO_SYNC(  0, 0x00)
58 #define EVO_FLIP_SEM0(c)  EVO_SYNC((c), 0x00)
59 #define EVO_FLIP_SEM1(c)  EVO_SYNC((c), 0x10)
60
61 #define EVO_CORE_HANDLE      (0xd1500000)
62 #define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
63 #define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
64 #define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) |                               \
65                               (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
66
67 /******************************************************************************
68  * EVO channel
69  *****************************************************************************/
70
71 struct nvd0_chan {
72         struct nouveau_object *user;
73         u32 handle;
74 };
75
76 static int
77 nvd0_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
78                  void *data, u32 size, struct nvd0_chan *chan)
79 {
80         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
81         const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
82         const u32 handle = EVO_CHAN_HANDLE(bclass, head);
83         int ret;
84
85         ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
86                                  oclass, data, size, &chan->user);
87         if (ret)
88                 return ret;
89
90         chan->handle = handle;
91         return 0;
92 }
93
94 static void
95 nvd0_chan_destroy(struct nouveau_object *core, struct nvd0_chan *chan)
96 {
97         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
98         if (chan->handle)
99                 nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
100 }
101
102 /******************************************************************************
103  * PIO EVO channel
104  *****************************************************************************/
105
106 struct nvd0_pioc {
107         struct nvd0_chan base;
108 };
109
110 static void
111 nvd0_pioc_destroy(struct nouveau_object *core, struct nvd0_pioc *pioc)
112 {
113         nvd0_chan_destroy(core, &pioc->base);
114 }
115
116 static int
117 nvd0_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
118                  void *data, u32 size, struct nvd0_pioc *pioc)
119 {
120         return nvd0_chan_create(core, bclass, head, data, size, &pioc->base);
121 }
122
123 /******************************************************************************
124  * DMA EVO channel
125  *****************************************************************************/
126
127 struct nvd0_dmac {
128         struct nvd0_chan base;
129         dma_addr_t handle;
130         u32 *ptr;
131 };
132
133 static void
134 nvd0_dmac_destroy(struct nouveau_object *core, struct nvd0_dmac *dmac)
135 {
136         if (dmac->ptr) {
137                 struct pci_dev *pdev = nv_device(core)->pdev;
138                 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
139         }
140
141         nvd0_chan_destroy(core, &dmac->base);
142 }
143
144 static int
145 nv50_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
146 {
147         struct nouveau_fb *pfb = nouveau_fb(core);
148         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
149         struct nouveau_object *object;
150         int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
151                                      NV_DMA_IN_MEMORY_CLASS,
152                                      &(struct nv_dma_class) {
153                                         .flags = NV_DMA_TARGET_VRAM |
154                                                  NV_DMA_ACCESS_RDWR,
155                                         .start = 0,
156                                         .limit = pfb->ram.size - 1,
157                                         .conf0 = NV50_DMA_CONF0_ENABLE |
158                                                  NV50_DMA_CONF0_PART_256,
159                                      }, sizeof(struct nv_dma_class), &object);
160         if (ret)
161                 return ret;
162
163         ret = nouveau_object_new(client, parent, NvEvoFB16,
164                                  NV_DMA_IN_MEMORY_CLASS,
165                                  &(struct nv_dma_class) {
166                                         .flags = NV_DMA_TARGET_VRAM |
167                                                  NV_DMA_ACCESS_RDWR,
168                                         .start = 0,
169                                         .limit = pfb->ram.size - 1,
170                                         .conf0 = NV50_DMA_CONF0_ENABLE | 0x70 |
171                                                  NV50_DMA_CONF0_PART_256,
172                                  }, sizeof(struct nv_dma_class), &object);
173         if (ret)
174                 return ret;
175
176         ret = nouveau_object_new(client, parent, NvEvoFB32,
177                                  NV_DMA_IN_MEMORY_CLASS,
178                                  &(struct nv_dma_class) {
179                                         .flags = NV_DMA_TARGET_VRAM |
180                                                  NV_DMA_ACCESS_RDWR,
181                                         .start = 0,
182                                         .limit = pfb->ram.size - 1,
183                                         .conf0 = NV50_DMA_CONF0_ENABLE | 0x7a |
184                                                  NV50_DMA_CONF0_PART_256,
185                                  }, sizeof(struct nv_dma_class), &object);
186         return ret;
187 }
188
189 static int
190 nvc0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
191 {
192         struct nouveau_fb *pfb = nouveau_fb(core);
193         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
194         struct nouveau_object *object;
195         int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
196                                      NV_DMA_IN_MEMORY_CLASS,
197                                      &(struct nv_dma_class) {
198                                         .flags = NV_DMA_TARGET_VRAM |
199                                                  NV_DMA_ACCESS_RDWR,
200                                         .start = 0,
201                                         .limit = pfb->ram.size - 1,
202                                         .conf0 = NVC0_DMA_CONF0_ENABLE,
203                                      }, sizeof(struct nv_dma_class), &object);
204         if (ret)
205                 return ret;
206
207         ret = nouveau_object_new(client, parent, NvEvoFB16,
208                                  NV_DMA_IN_MEMORY_CLASS,
209                                  &(struct nv_dma_class) {
210                                         .flags = NV_DMA_TARGET_VRAM |
211                                                  NV_DMA_ACCESS_RDWR,
212                                         .start = 0,
213                                         .limit = pfb->ram.size - 1,
214                                         .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
215                                  }, sizeof(struct nv_dma_class), &object);
216         if (ret)
217                 return ret;
218
219         ret = nouveau_object_new(client, parent, NvEvoFB32,
220                                  NV_DMA_IN_MEMORY_CLASS,
221                                  &(struct nv_dma_class) {
222                                         .flags = NV_DMA_TARGET_VRAM |
223                                                  NV_DMA_ACCESS_RDWR,
224                                         .start = 0,
225                                         .limit = pfb->ram.size - 1,
226                                         .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
227                                  }, sizeof(struct nv_dma_class), &object);
228         return ret;
229 }
230
231 static int
232 nvd0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
233 {
234         struct nouveau_fb *pfb = nouveau_fb(core);
235         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
236         struct nouveau_object *object;
237         int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
238                                      NV_DMA_IN_MEMORY_CLASS,
239                                      &(struct nv_dma_class) {
240                                         .flags = NV_DMA_TARGET_VRAM |
241                                                  NV_DMA_ACCESS_RDWR,
242                                         .start = 0,
243                                         .limit = pfb->ram.size - 1,
244                                         .conf0 = NVD0_DMA_CONF0_ENABLE |
245                                                  NVD0_DMA_CONF0_PAGE_LP,
246                                      }, sizeof(struct nv_dma_class), &object);
247         if (ret)
248                 return ret;
249
250         ret = nouveau_object_new(client, parent, NvEvoFB32,
251                                  NV_DMA_IN_MEMORY_CLASS,
252                                  &(struct nv_dma_class) {
253                                         .flags = NV_DMA_TARGET_VRAM |
254                                                  NV_DMA_ACCESS_RDWR,
255                                         .start = 0,
256                                         .limit = pfb->ram.size - 1,
257                                         .conf0 = NVD0_DMA_CONF0_ENABLE | 0xfe |
258                                                  NVD0_DMA_CONF0_PAGE_LP,
259                                  }, sizeof(struct nv_dma_class), &object);
260         return ret;
261 }
262
263 static int
264 nvd0_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
265                  void *data, u32 size, u64 syncbuf,
266                  struct nvd0_dmac *dmac)
267 {
268         struct nouveau_fb *pfb = nouveau_fb(core);
269         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
270         struct nouveau_object *object;
271         u32 pushbuf = *(u32 *)data;
272         int ret;
273
274         dmac->ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE,
275                                         &dmac->handle);
276         if (!dmac->ptr)
277                 return -ENOMEM;
278
279         ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
280                                  NV_DMA_FROM_MEMORY_CLASS,
281                                  &(struct nv_dma_class) {
282                                         .flags = NV_DMA_TARGET_PCI_US |
283                                                  NV_DMA_ACCESS_RD,
284                                         .start = dmac->handle + 0x0000,
285                                         .limit = dmac->handle + 0x0fff,
286                                  }, sizeof(struct nv_dma_class), &object);
287         if (ret)
288                 return ret;
289
290         ret = nvd0_chan_create(core, bclass, head, data, size, &dmac->base);
291         if (ret)
292                 return ret;
293
294         ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
295                                  NV_DMA_IN_MEMORY_CLASS,
296                                  &(struct nv_dma_class) {
297                                         .flags = NV_DMA_TARGET_VRAM |
298                                                  NV_DMA_ACCESS_RDWR,
299                                         .start = syncbuf + 0x0000,
300                                         .limit = syncbuf + 0x0fff,
301                                  }, sizeof(struct nv_dma_class), &object);
302         if (ret)
303                 return ret;
304
305         ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
306                                  NV_DMA_IN_MEMORY_CLASS,
307                                  &(struct nv_dma_class) {
308                                         .flags = NV_DMA_TARGET_VRAM |
309                                                  NV_DMA_ACCESS_RDWR,
310                                         .start = 0,
311                                         .limit = pfb->ram.size - 1,
312                                  }, sizeof(struct nv_dma_class), &object);
313         if (ret)
314                 return ret;
315
316         if (nv_device(core)->card_type < NV_C0)
317                 ret = nv50_dmac_create_fbdma(core, dmac->base.handle);
318         else
319         if (nv_device(core)->card_type < NV_D0)
320                 ret = nvc0_dmac_create_fbdma(core, dmac->base.handle);
321         else
322                 ret = nvd0_dmac_create_fbdma(core, dmac->base.handle);
323         return ret;
324 }
325
326 struct nvd0_mast {
327         struct nvd0_dmac base;
328 };
329
330 struct nvd0_curs {
331         struct nvd0_pioc base;
332 };
333
334 struct nvd0_sync {
335         struct nvd0_dmac base;
336         struct {
337                 u32 offset;
338                 u16 value;
339         } sem;
340 };
341
342 struct nvd0_ovly {
343         struct nvd0_dmac base;
344 };
345
346 struct nvd0_oimm {
347         struct nvd0_pioc base;
348 };
349
350 struct nvd0_head {
351         struct nouveau_crtc base;
352         struct nvd0_curs curs;
353         struct nvd0_sync sync;
354         struct nvd0_ovly ovly;
355         struct nvd0_oimm oimm;
356 };
357
358 #define nvd0_head(c) ((struct nvd0_head *)nouveau_crtc(c))
359 #define nvd0_curs(c) (&nvd0_head(c)->curs)
360 #define nvd0_sync(c) (&nvd0_head(c)->sync)
361 #define nvd0_ovly(c) (&nvd0_head(c)->ovly)
362 #define nvd0_oimm(c) (&nvd0_head(c)->oimm)
363 #define nvd0_chan(c) (&(c)->base.base)
364 #define nvd0_vers(c) nv_mclass(nvd0_chan(c)->user)
365
366 struct nvd0_disp {
367         struct nouveau_object *core;
368         struct nvd0_mast mast;
369
370         u32 modeset;
371
372         struct nouveau_bo *sync;
373 };
374
375 static struct nvd0_disp *
376 nvd0_disp(struct drm_device *dev)
377 {
378         return nouveau_display(dev)->priv;
379 }
380
381 #define nvd0_mast(d) (&nvd0_disp(d)->mast)
382
383 static struct drm_crtc *
384 nvd0_display_crtc_get(struct drm_encoder *encoder)
385 {
386         return nouveau_encoder(encoder)->crtc;
387 }
388
389 /******************************************************************************
390  * EVO channel helpers
391  *****************************************************************************/
392 static u32 *
393 evo_wait(void *evoc, int nr)
394 {
395         struct nvd0_dmac *dmac = evoc;
396         u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
397
398         if (put + nr >= (PAGE_SIZE / 4) - 8) {
399                 dmac->ptr[put] = 0x20000000;
400
401                 nv_wo32(dmac->base.user, 0x0000, 0x00000000);
402                 if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
403                         NV_ERROR(dmac->base.user, "channel stalled\n");
404                         return NULL;
405                 }
406
407                 put = 0;
408         }
409
410         return dmac->ptr + put;
411 }
412
413 static void
414 evo_kick(u32 *push, void *evoc)
415 {
416         struct nvd0_dmac *dmac = evoc;
417         nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
418 }
419
420 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
421 #define evo_data(p,d)   *((p)++) = (d)
422
423 static bool
424 evo_sync_wait(void *data)
425 {
426         return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
427 }
428
429 static int
430 evo_sync(struct drm_device *dev)
431 {
432         struct nouveau_device *device = nouveau_dev(dev);
433         struct nvd0_disp *disp = nvd0_disp(dev);
434         struct nvd0_mast *mast = nvd0_mast(dev);
435         u32 *push = evo_wait(mast, 8);
436         if (push) {
437                 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
438                 evo_mthd(push, 0x0084, 1);
439                 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
440                 evo_mthd(push, 0x0080, 2);
441                 evo_data(push, 0x00000000);
442                 evo_data(push, 0x00000000);
443                 evo_kick(push, mast);
444                 if (nv_wait_cb(device, evo_sync_wait, disp->sync))
445                         return 0;
446         }
447
448         return -EBUSY;
449 }
450
451 /******************************************************************************
452  * Page flipping channel
453  *****************************************************************************/
454 struct nouveau_bo *
455 nvd0_display_crtc_sema(struct drm_device *dev, int crtc)
456 {
457         return nvd0_disp(dev)->sync;
458 }
459
460 void
461 nvd0_display_flip_stop(struct drm_crtc *crtc)
462 {
463         struct nvd0_sync *sync = nvd0_sync(crtc);
464         u32 *push;
465
466         push = evo_wait(sync, 8);
467         if (push) {
468                 evo_mthd(push, 0x0084, 1);
469                 evo_data(push, 0x00000000);
470                 evo_mthd(push, 0x0094, 1);
471                 evo_data(push, 0x00000000);
472                 evo_mthd(push, 0x00c0, 1);
473                 evo_data(push, 0x00000000);
474                 evo_mthd(push, 0x0080, 1);
475                 evo_data(push, 0x00000000);
476                 evo_kick(push, sync);
477         }
478 }
479
480 int
481 nvd0_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
482                        struct nouveau_channel *chan, u32 swap_interval)
483 {
484         struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
485         struct nvd0_disp *disp = nvd0_disp(crtc->dev);
486         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
487         struct nvd0_sync *sync = nvd0_sync(crtc);
488         u32 *push;
489         int ret;
490
491         swap_interval <<= 4;
492         if (swap_interval == 0)
493                 swap_interval |= 0x100;
494
495         push = evo_wait(sync, 128);
496         if (unlikely(push == NULL))
497                 return -EBUSY;
498
499         /* synchronise with the rendering channel, if necessary */
500         if (likely(chan)) {
501                 ret = RING_SPACE(chan, 10);
502                 if (ret)
503                         return ret;
504
505                 if (nv_mclass(chan->object) < NVC0_CHANNEL_IND_CLASS) {
506                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
507                         OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
508                         OUT_RING  (chan, sync->sem.offset);
509                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
510                         OUT_RING  (chan, 0xf00d0000 | sync->sem.value);
511                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
512                         OUT_RING  (chan, sync->sem.offset ^ 0x10);
513                         OUT_RING  (chan, 0x74b1e000);
514                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
515                         if (nv_mclass(chan->object) < NV84_CHANNEL_DMA_CLASS)
516                                 OUT_RING  (chan, NvSema);
517                         else
518                                 OUT_RING  (chan, chan->vram);
519                 } else {
520                         u64 offset = nvc0_fence_crtc(chan, nv_crtc->index);
521                         offset += sync->sem.offset;
522
523                         BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
524                         OUT_RING  (chan, upper_32_bits(offset));
525                         OUT_RING  (chan, lower_32_bits(offset));
526                         OUT_RING  (chan, 0xf00d0000 | sync->sem.value);
527                         OUT_RING  (chan, 0x1002);
528                         BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
529                         OUT_RING  (chan, upper_32_bits(offset));
530                         OUT_RING  (chan, lower_32_bits(offset ^ 0x10));
531                         OUT_RING  (chan, 0x74b1e000);
532                         OUT_RING  (chan, 0x1001);
533                 }
534
535                 FIRE_RING (chan);
536         } else {
537                 nouveau_bo_wr32(disp->sync, sync->sem.offset / 4,
538                                 0xf00d0000 | sync->sem.value);
539                 evo_sync(crtc->dev);
540         }
541
542         /* queue the flip */
543         evo_mthd(push, 0x0100, 1);
544         evo_data(push, 0xfffe0000);
545         evo_mthd(push, 0x0084, 1);
546         evo_data(push, swap_interval);
547         if (!(swap_interval & 0x00000100)) {
548                 evo_mthd(push, 0x00e0, 1);
549                 evo_data(push, 0x40000000);
550         }
551         evo_mthd(push, 0x0088, 4);
552         evo_data(push, sync->sem.offset);
553         evo_data(push, 0xf00d0000 | sync->sem.value);
554         evo_data(push, 0x74b1e000);
555         evo_data(push, NvEvoSync);
556         evo_mthd(push, 0x00a0, 2);
557         evo_data(push, 0x00000000);
558         evo_data(push, 0x00000000);
559         evo_mthd(push, 0x00c0, 1);
560         evo_data(push, nv_fb->r_dma);
561         evo_mthd(push, 0x0110, 2);
562         evo_data(push, 0x00000000);
563         evo_data(push, 0x00000000);
564         if (nvd0_vers(sync) < NVD0_DISP_SYNC_CLASS) {
565                 evo_mthd(push, 0x0800, 5);
566                 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
567                 evo_data(push, 0);
568                 evo_data(push, (fb->height << 16) | fb->width);
569                 evo_data(push, nv_fb->r_pitch);
570                 evo_data(push, nv_fb->r_format);
571         } else {
572                 evo_mthd(push, 0x0400, 5);
573                 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
574                 evo_data(push, 0);
575                 evo_data(push, (fb->height << 16) | fb->width);
576                 evo_data(push, nv_fb->r_pitch);
577                 evo_data(push, nv_fb->r_format);
578         }
579         evo_mthd(push, 0x0080, 1);
580         evo_data(push, 0x00000000);
581         evo_kick(push, sync);
582
583         sync->sem.offset ^= 0x10;
584         sync->sem.value++;
585         return 0;
586 }
587
588 /******************************************************************************
589  * CRTC
590  *****************************************************************************/
591 static int
592 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
593 {
594         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
595         struct nouveau_connector *nv_connector;
596         struct drm_connector *connector;
597         u32 *push, mode = 0x00;
598
599         nv_connector = nouveau_crtc_connector_get(nv_crtc);
600         connector = &nv_connector->base;
601         if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
602                 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
603                         mode = DITHERING_MODE_DYNAMIC2X2;
604         } else {
605                 mode = nv_connector->dithering_mode;
606         }
607
608         if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
609                 if (connector->display_info.bpc >= 8)
610                         mode |= DITHERING_DEPTH_8BPC;
611         } else {
612                 mode |= nv_connector->dithering_depth;
613         }
614
615         push = evo_wait(mast, 4);
616         if (push) {
617                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
618                         evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
619                         evo_data(push, mode);
620                 } else
621                 if (nvd0_vers(mast) < NVE0_DISP_MAST_CLASS) {
622                         evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
623                         evo_data(push, mode);
624                 } else {
625                         evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
626                         evo_data(push, mode);
627                 }
628
629                 if (update) {
630                         evo_mthd(push, 0x0080, 1);
631                         evo_data(push, 0x00000000);
632                 }
633                 evo_kick(push, mast);
634         }
635
636         return 0;
637 }
638
639 static int
640 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
641 {
642         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
643         struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
644         struct drm_crtc *crtc = &nv_crtc->base;
645         struct nouveau_connector *nv_connector;
646         int mode = DRM_MODE_SCALE_NONE;
647         u32 oX, oY, *push;
648
649         /* start off at the resolution we programmed the crtc for, this
650          * effectively handles NONE/FULL scaling
651          */
652         nv_connector = nouveau_crtc_connector_get(nv_crtc);
653         if (nv_connector && nv_connector->native_mode)
654                 mode = nv_connector->scaling_mode;
655
656         if (mode != DRM_MODE_SCALE_NONE)
657                 omode = nv_connector->native_mode;
658         else
659                 omode = umode;
660
661         oX = omode->hdisplay;
662         oY = omode->vdisplay;
663         if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
664                 oY *= 2;
665
666         /* add overscan compensation if necessary, will keep the aspect
667          * ratio the same as the backend mode unless overridden by the
668          * user setting both hborder and vborder properties.
669          */
670         if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
671                              (nv_connector->underscan == UNDERSCAN_AUTO &&
672                               nv_connector->edid &&
673                               drm_detect_hdmi_monitor(nv_connector->edid)))) {
674                 u32 bX = nv_connector->underscan_hborder;
675                 u32 bY = nv_connector->underscan_vborder;
676                 u32 aspect = (oY << 19) / oX;
677
678                 if (bX) {
679                         oX -= (bX * 2);
680                         if (bY) oY -= (bY * 2);
681                         else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
682                 } else {
683                         oX -= (oX >> 4) + 32;
684                         if (bY) oY -= (bY * 2);
685                         else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
686                 }
687         }
688
689         /* handle CENTER/ASPECT scaling, taking into account the areas
690          * removed already for overscan compensation
691          */
692         switch (mode) {
693         case DRM_MODE_SCALE_CENTER:
694                 oX = min((u32)umode->hdisplay, oX);
695                 oY = min((u32)umode->vdisplay, oY);
696                 /* fall-through */
697         case DRM_MODE_SCALE_ASPECT:
698                 if (oY < oX) {
699                         u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
700                         oX = ((oY * aspect) + (aspect / 2)) >> 19;
701                 } else {
702                         u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
703                         oY = ((oX * aspect) + (aspect / 2)) >> 19;
704                 }
705                 break;
706         default:
707                 break;
708         }
709
710         push = evo_wait(mast, 8);
711         if (push) {
712                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
713                         /*XXX: SCALE_CTRL_ACTIVE??? */
714                         evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
715                         evo_data(push, (oY << 16) | oX);
716                         evo_data(push, (oY << 16) | oX);
717                         evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
718                         evo_data(push, 0x00000000);
719                         evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
720                         evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
721                 } else {
722                         evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
723                         evo_data(push, (oY << 16) | oX);
724                         evo_data(push, (oY << 16) | oX);
725                         evo_data(push, (oY << 16) | oX);
726                         evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
727                         evo_data(push, 0x00000000);
728                         evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
729                         evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
730                 }
731
732                 evo_kick(push, mast);
733
734                 if (update) {
735                         nvd0_display_flip_stop(crtc);
736                         nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
737                 }
738         }
739
740         return 0;
741 }
742
743 static int
744 nvd0_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
745 {
746         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
747         u32 *push, hue, vib;
748         int adj;
749
750         adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
751         vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
752         hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
753
754         push = evo_wait(mast, 16);
755         if (push) {
756                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
757                         evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
758                         evo_data(push, (hue << 20) | (vib << 8));
759                 } else {
760                         evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
761                         evo_data(push, (hue << 20) | (vib << 8));
762                 }
763
764                 if (update) {
765                         evo_mthd(push, 0x0080, 1);
766                         evo_data(push, 0x00000000);
767                 }
768                 evo_kick(push, mast);
769         }
770
771         return 0;
772 }
773
774 static int
775 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
776                     int x, int y, bool update)
777 {
778         struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
779         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
780         u32 *push;
781
782         push = evo_wait(mast, 16);
783         if (push) {
784                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
785                         evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
786                         evo_data(push, nvfb->nvbo->bo.offset >> 8);
787                         evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
788                         evo_data(push, (fb->height << 16) | fb->width);
789                         evo_data(push, nvfb->r_pitch);
790                         evo_data(push, nvfb->r_format);
791                         evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
792                         evo_data(push, (y << 16) | x);
793                         if (nvd0_vers(mast) > NV50_DISP_MAST_CLASS) {
794                                 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
795                                 evo_data(push, nvfb->r_dma);
796                         }
797                 } else {
798                         evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
799                         evo_data(push, nvfb->nvbo->bo.offset >> 8);
800                         evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
801                         evo_data(push, (fb->height << 16) | fb->width);
802                         evo_data(push, nvfb->r_pitch);
803                         evo_data(push, nvfb->r_format);
804                         evo_data(push, nvfb->r_dma);
805                         evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
806                         evo_data(push, (y << 16) | x);
807                 }
808
809                 if (update) {
810                         evo_mthd(push, 0x0080, 1);
811                         evo_data(push, 0x00000000);
812                 }
813                 evo_kick(push, mast);
814         }
815
816         nv_crtc->fb.tile_flags = nvfb->r_dma;
817         return 0;
818 }
819
820 static void
821 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
822 {
823         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
824         u32 *push = evo_wait(mast, 16);
825         if (push) {
826                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
827                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
828                         evo_data(push, 0x85000000);
829                         evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
830                 } else
831                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
832                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
833                         evo_data(push, 0x85000000);
834                         evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
835                         evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
836                         evo_data(push, NvEvoVRAM);
837                 } else {
838                         evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
839                         evo_data(push, 0x85000000);
840                         evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
841                         evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
842                         evo_data(push, NvEvoVRAM);
843                 }
844                 evo_kick(push, mast);
845         }
846 }
847
848 static void
849 nvd0_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
850 {
851         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
852         u32 *push = evo_wait(mast, 16);
853         if (push) {
854                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
855                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
856                         evo_data(push, 0x05000000);
857                 } else
858                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
859                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
860                         evo_data(push, 0x05000000);
861                         evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
862                         evo_data(push, 0x00000000);
863                 } else {
864                         evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
865                         evo_data(push, 0x05000000);
866                         evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
867                         evo_data(push, 0x00000000);
868                 }
869                 evo_kick(push, mast);
870         }
871 }
872
873 static void
874 nvd0_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
875 {
876         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
877
878         if (show)
879                 nvd0_crtc_cursor_show(nv_crtc);
880         else
881                 nvd0_crtc_cursor_hide(nv_crtc);
882
883         if (update) {
884                 u32 *push = evo_wait(mast, 2);
885                 if (push) {
886                         evo_mthd(push, 0x0080, 1);
887                         evo_data(push, 0x00000000);
888                         evo_kick(push, mast);
889                 }
890         }
891 }
892
893 static void
894 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
895 {
896 }
897
898 static void
899 nvd0_crtc_prepare(struct drm_crtc *crtc)
900 {
901         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
902         struct nvd0_mast *mast = nvd0_mast(crtc->dev);
903         u32 *push;
904
905         nvd0_display_flip_stop(crtc);
906
907         push = evo_wait(mast, 2);
908         if (push) {
909                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
910                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
911                         evo_data(push, 0x00000000);
912                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
913                         evo_data(push, 0x40000000);
914                 } else
915                 if (nvd0_vers(mast) <  NVD0_DISP_MAST_CLASS) {
916                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
917                         evo_data(push, 0x00000000);
918                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
919                         evo_data(push, 0x40000000);
920                         evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
921                         evo_data(push, 0x00000000);
922                 } else {
923                         evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
924                         evo_data(push, 0x00000000);
925                         evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
926                         evo_data(push, 0x03000000);
927                         evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
928                         evo_data(push, 0x00000000);
929                 }
930
931                 evo_kick(push, mast);
932         }
933
934         nvd0_crtc_cursor_show_hide(nv_crtc, false, false);
935 }
936
937 static void
938 nvd0_crtc_commit(struct drm_crtc *crtc)
939 {
940         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
941         struct nvd0_mast *mast = nvd0_mast(crtc->dev);
942         u32 *push;
943
944         push = evo_wait(mast, 32);
945         if (push) {
946                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
947                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
948                         evo_data(push, NvEvoVRAM_LP);
949                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
950                         evo_data(push, 0xc0000000);
951                         evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
952                 } else
953                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
954                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
955                         evo_data(push, nv_crtc->fb.tile_flags);
956                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
957                         evo_data(push, 0xc0000000);
958                         evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
959                         evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
960                         evo_data(push, NvEvoVRAM);
961                 } else {
962                         evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
963                         evo_data(push, nv_crtc->fb.tile_flags);
964                         evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
965                         evo_data(push, 0x83000000);
966                         evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
967                         evo_data(push, 0x00000000);
968                         evo_data(push, 0x00000000);
969                         evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
970                         evo_data(push, NvEvoVRAM);
971                         evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
972                         evo_data(push, 0xffffff00);
973                 }
974
975                 evo_kick(push, mast);
976         }
977
978         nvd0_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
979         nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
980 }
981
982 static bool
983 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
984                      struct drm_display_mode *adjusted_mode)
985 {
986         return true;
987 }
988
989 static int
990 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
991 {
992         struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
993         int ret;
994
995         ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
996         if (ret)
997                 return ret;
998
999         if (old_fb) {
1000                 nvfb = nouveau_framebuffer(old_fb);
1001                 nouveau_bo_unpin(nvfb->nvbo);
1002         }
1003
1004         return 0;
1005 }
1006
1007 static int
1008 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1009                    struct drm_display_mode *mode, int x, int y,
1010                    struct drm_framebuffer *old_fb)
1011 {
1012         struct nvd0_mast *mast = nvd0_mast(crtc->dev);
1013         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1014         struct nouveau_connector *nv_connector;
1015         u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1016         u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1017         u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1018         u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1019         u32 vblan2e = 0, vblan2s = 1;
1020         u32 *push;
1021         int ret;
1022
1023         hactive = mode->htotal;
1024         hsynce  = mode->hsync_end - mode->hsync_start - 1;
1025         hbackp  = mode->htotal - mode->hsync_end;
1026         hblanke = hsynce + hbackp;
1027         hfrontp = mode->hsync_start - mode->hdisplay;
1028         hblanks = mode->htotal - hfrontp - 1;
1029
1030         vactive = mode->vtotal * vscan / ilace;
1031         vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1032         vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1033         vblanke = vsynce + vbackp;
1034         vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1035         vblanks = vactive - vfrontp - 1;
1036         if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1037                 vblan2e = vactive + vsynce + vbackp;
1038                 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1039                 vactive = (vactive * 2) + 1;
1040         }
1041
1042         ret = nvd0_crtc_swap_fbs(crtc, old_fb);
1043         if (ret)
1044                 return ret;
1045
1046         push = evo_wait(mast, 64);
1047         if (push) {
1048                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1049                         evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1050                         evo_data(push, 0x00800000 | mode->clock);
1051                         evo_data(push, (ilace == 2) ? 2 : 0);
1052                         evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1053                         evo_data(push, 0x00000000);
1054                         evo_data(push, (vactive << 16) | hactive);
1055                         evo_data(push, ( vsynce << 16) | hsynce);
1056                         evo_data(push, (vblanke << 16) | hblanke);
1057                         evo_data(push, (vblanks << 16) | hblanks);
1058                         evo_data(push, (vblan2e << 16) | vblan2s);
1059                         evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1060                         evo_data(push, 0x00000000);
1061                         evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1062                         evo_data(push, 0x00000311);
1063                         evo_data(push, 0x00000100);
1064                 } else {
1065                         evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1066                         evo_data(push, 0x00000000);
1067                         evo_data(push, (vactive << 16) | hactive);
1068                         evo_data(push, ( vsynce << 16) | hsynce);
1069                         evo_data(push, (vblanke << 16) | hblanke);
1070                         evo_data(push, (vblanks << 16) | hblanks);
1071                         evo_data(push, (vblan2e << 16) | vblan2s);
1072                         evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1073                         evo_data(push, 0x00000000); /* ??? */
1074                         evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1075                         evo_data(push, mode->clock * 1000);
1076                         evo_data(push, 0x00200000); /* ??? */
1077                         evo_data(push, mode->clock * 1000);
1078                         evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1079                         evo_data(push, 0x00000311);
1080                         evo_data(push, 0x00000100);
1081                 }
1082
1083                 evo_kick(push, mast);
1084         }
1085
1086         nv_connector = nouveau_crtc_connector_get(nv_crtc);
1087         nvd0_crtc_set_dither(nv_crtc, false);
1088         nvd0_crtc_set_scale(nv_crtc, false);
1089         nvd0_crtc_set_color_vibrance(nv_crtc, false);
1090         nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
1091         return 0;
1092 }
1093
1094 static int
1095 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1096                         struct drm_framebuffer *old_fb)
1097 {
1098         struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1099         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1100         int ret;
1101
1102         if (!crtc->fb) {
1103                 NV_DEBUG(drm, "No FB bound\n");
1104                 return 0;
1105         }
1106
1107         ret = nvd0_crtc_swap_fbs(crtc, old_fb);
1108         if (ret)
1109                 return ret;
1110
1111         nvd0_display_flip_stop(crtc);
1112         nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
1113         nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
1114         return 0;
1115 }
1116
1117 static int
1118 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1119                                struct drm_framebuffer *fb, int x, int y,
1120                                enum mode_set_atomic state)
1121 {
1122         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1123         nvd0_display_flip_stop(crtc);
1124         nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
1125         return 0;
1126 }
1127
1128 static void
1129 nvd0_crtc_lut_load(struct drm_crtc *crtc)
1130 {
1131         struct nvd0_disp *disp = nvd0_disp(crtc->dev);
1132         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1133         void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1134         int i;
1135
1136         for (i = 0; i < 256; i++) {
1137                 u16 r = nv_crtc->lut.r[i] >> 2;
1138                 u16 g = nv_crtc->lut.g[i] >> 2;
1139                 u16 b = nv_crtc->lut.b[i] >> 2;
1140
1141                 if (nv_mclass(disp->core) < NVD0_DISP_CLASS) {
1142                         writew(r + 0x0000, lut + (i * 0x08) + 0);
1143                         writew(g + 0x0000, lut + (i * 0x08) + 2);
1144                         writew(b + 0x0000, lut + (i * 0x08) + 4);
1145                 } else {
1146                         writew(r + 0x6000, lut + (i * 0x20) + 0);
1147                         writew(g + 0x6000, lut + (i * 0x20) + 2);
1148                         writew(b + 0x6000, lut + (i * 0x20) + 4);
1149                 }
1150         }
1151 }
1152
1153 static int
1154 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1155                      uint32_t handle, uint32_t width, uint32_t height)
1156 {
1157         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1158         struct drm_device *dev = crtc->dev;
1159         struct drm_gem_object *gem;
1160         struct nouveau_bo *nvbo;
1161         bool visible = (handle != 0);
1162         int i, ret = 0;
1163
1164         if (visible) {
1165                 if (width != 64 || height != 64)
1166                         return -EINVAL;
1167
1168                 gem = drm_gem_object_lookup(dev, file_priv, handle);
1169                 if (unlikely(!gem))
1170                         return -ENOENT;
1171                 nvbo = nouveau_gem_object(gem);
1172
1173                 ret = nouveau_bo_map(nvbo);
1174                 if (ret == 0) {
1175                         for (i = 0; i < 64 * 64; i++) {
1176                                 u32 v = nouveau_bo_rd32(nvbo, i);
1177                                 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1178                         }
1179                         nouveau_bo_unmap(nvbo);
1180                 }
1181
1182                 drm_gem_object_unreference_unlocked(gem);
1183         }
1184
1185         if (visible != nv_crtc->cursor.visible) {
1186                 nvd0_crtc_cursor_show_hide(nv_crtc, visible, true);
1187                 nv_crtc->cursor.visible = visible;
1188         }
1189
1190         return ret;
1191 }
1192
1193 static int
1194 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1195 {
1196         struct nvd0_curs *curs = nvd0_curs(crtc);
1197         struct nvd0_chan *chan = nvd0_chan(curs);
1198         nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
1199         nv_wo32(chan->user, 0x0080, 0x00000000);
1200         return 0;
1201 }
1202
1203 static void
1204 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1205                     uint32_t start, uint32_t size)
1206 {
1207         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1208         u32 end = max(start + size, (u32)256);
1209         u32 i;
1210
1211         for (i = start; i < end; i++) {
1212                 nv_crtc->lut.r[i] = r[i];
1213                 nv_crtc->lut.g[i] = g[i];
1214                 nv_crtc->lut.b[i] = b[i];
1215         }
1216
1217         nvd0_crtc_lut_load(crtc);
1218 }
1219
1220 static void
1221 nvd0_crtc_destroy(struct drm_crtc *crtc)
1222 {
1223         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1224         struct nvd0_disp *disp = nvd0_disp(crtc->dev);
1225         struct nvd0_head *head = nvd0_head(crtc);
1226         nvd0_dmac_destroy(disp->core, &head->ovly.base);
1227         nvd0_pioc_destroy(disp->core, &head->oimm.base);
1228         nvd0_dmac_destroy(disp->core, &head->sync.base);
1229         nvd0_pioc_destroy(disp->core, &head->curs.base);
1230         nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1231         nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1232         nouveau_bo_unmap(nv_crtc->lut.nvbo);
1233         nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1234         drm_crtc_cleanup(crtc);
1235         kfree(crtc);
1236 }
1237
1238 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
1239         .dpms = nvd0_crtc_dpms,
1240         .prepare = nvd0_crtc_prepare,
1241         .commit = nvd0_crtc_commit,
1242         .mode_fixup = nvd0_crtc_mode_fixup,
1243         .mode_set = nvd0_crtc_mode_set,
1244         .mode_set_base = nvd0_crtc_mode_set_base,
1245         .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
1246         .load_lut = nvd0_crtc_lut_load,
1247 };
1248
1249 static const struct drm_crtc_funcs nvd0_crtc_func = {
1250         .cursor_set = nvd0_crtc_cursor_set,
1251         .cursor_move = nvd0_crtc_cursor_move,
1252         .gamma_set = nvd0_crtc_gamma_set,
1253         .set_config = drm_crtc_helper_set_config,
1254         .destroy = nvd0_crtc_destroy,
1255         .page_flip = nouveau_crtc_page_flip,
1256 };
1257
1258 static void
1259 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1260 {
1261 }
1262
1263 static void
1264 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1265 {
1266 }
1267
1268 static int
1269 nvd0_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
1270 {
1271         struct nvd0_disp *disp = nvd0_disp(dev);
1272         struct nvd0_head *head;
1273         struct drm_crtc *crtc;
1274         int ret, i;
1275
1276         head = kzalloc(sizeof(*head), GFP_KERNEL);
1277         if (!head)
1278                 return -ENOMEM;
1279
1280         head->base.index = index;
1281         head->base.set_dither = nvd0_crtc_set_dither;
1282         head->base.set_scale = nvd0_crtc_set_scale;
1283         head->base.set_color_vibrance = nvd0_crtc_set_color_vibrance;
1284         head->base.color_vibrance = 50;
1285         head->base.vibrant_hue = 0;
1286         head->base.cursor.set_offset = nvd0_cursor_set_offset;
1287         head->base.cursor.set_pos = nvd0_cursor_set_pos;
1288         for (i = 0; i < 256; i++) {
1289                 head->base.lut.r[i] = i << 8;
1290                 head->base.lut.g[i] = i << 8;
1291                 head->base.lut.b[i] = i << 8;
1292         }
1293
1294         crtc = &head->base.base;
1295         drm_crtc_init(dev, crtc, &nvd0_crtc_func);
1296         drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
1297         drm_mode_crtc_set_gamma_size(crtc, 256);
1298
1299         ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1300                              0, 0x0000, NULL, &head->base.lut.nvbo);
1301         if (!ret) {
1302                 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1303                 if (!ret)
1304                         ret = nouveau_bo_map(head->base.lut.nvbo);
1305                 if (ret)
1306                         nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1307         }
1308
1309         if (ret)
1310                 goto out;
1311
1312         nvd0_crtc_lut_load(crtc);
1313
1314         /* allocate cursor resources */
1315         ret = nvd0_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
1316                               &(struct nv50_display_curs_class) {
1317                                         .head = index,
1318                               }, sizeof(struct nv50_display_curs_class),
1319                               &head->curs.base);
1320         if (ret)
1321                 goto out;
1322
1323         ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1324                              0, 0x0000, NULL, &head->base.cursor.nvbo);
1325         if (!ret) {
1326                 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1327                 if (!ret)
1328                         ret = nouveau_bo_map(head->base.cursor.nvbo);
1329                 if (ret)
1330                         nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1331         }
1332
1333         if (ret)
1334                 goto out;
1335
1336         /* allocate page flip / sync resources */
1337         ret = nvd0_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
1338                               &(struct nv50_display_sync_class) {
1339                                         .pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1340                                         .head = index,
1341                               }, sizeof(struct nv50_display_sync_class),
1342                               disp->sync->bo.offset, &head->sync.base);
1343         if (ret)
1344                 goto out;
1345
1346         head->sync.sem.offset = EVO_SYNC(1 + index, 0x00);
1347
1348         /* allocate overlay resources */
1349         ret = nvd0_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
1350                               &(struct nv50_display_oimm_class) {
1351                                         .head = index,
1352                               }, sizeof(struct nv50_display_oimm_class),
1353                               &head->oimm.base);
1354         if (ret)
1355                 goto out;
1356
1357         ret = nvd0_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
1358                               &(struct nv50_display_ovly_class) {
1359                                         .pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1360                                         .head = index,
1361                               }, sizeof(struct nv50_display_ovly_class),
1362                               disp->sync->bo.offset, &head->ovly.base);
1363         if (ret)
1364                 goto out;
1365
1366 out:
1367         if (ret)
1368                 nvd0_crtc_destroy(crtc);
1369         return ret;
1370 }
1371
1372 /******************************************************************************
1373  * DAC
1374  *****************************************************************************/
1375 static void
1376 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
1377 {
1378         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1379         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1380         int or = nv_encoder->or;
1381         u32 dpms_ctrl;
1382
1383         dpms_ctrl = 0x00000000;
1384         if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1385                 dpms_ctrl |= 0x00000001;
1386         if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1387                 dpms_ctrl |= 0x00000004;
1388
1389         nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
1390 }
1391
1392 static bool
1393 nvd0_dac_mode_fixup(struct drm_encoder *encoder,
1394                     const struct drm_display_mode *mode,
1395                     struct drm_display_mode *adjusted_mode)
1396 {
1397         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1398         struct nouveau_connector *nv_connector;
1399
1400         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1401         if (nv_connector && nv_connector->native_mode) {
1402                 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1403                         int id = adjusted_mode->base.id;
1404                         *adjusted_mode = *nv_connector->native_mode;
1405                         adjusted_mode->base.id = id;
1406                 }
1407         }
1408
1409         return true;
1410 }
1411
1412 static void
1413 nvd0_dac_commit(struct drm_encoder *encoder)
1414 {
1415 }
1416
1417 static void
1418 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1419                   struct drm_display_mode *adjusted_mode)
1420 {
1421         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1422         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1423         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1424         u32 *push;
1425
1426         nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1427
1428         push = evo_wait(mast, 8);
1429         if (push) {
1430                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1431                         u32 syncs = 0x00000000;
1432
1433                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1434                                 syncs |= 0x00000001;
1435                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1436                                 syncs |= 0x00000002;
1437
1438                         evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1439                         evo_data(push, 1 << nv_crtc->index);
1440                         evo_data(push, syncs);
1441                 } else {
1442                         u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1443                         u32 syncs = 0x00000001;
1444
1445                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1446                                 syncs |= 0x00000008;
1447                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1448                                 syncs |= 0x00000010;
1449
1450                         if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1451                                 magic |= 0x00000001;
1452
1453                         evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1454                         evo_data(push, syncs);
1455                         evo_data(push, magic);
1456                         evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1457                         evo_data(push, 1 << nv_crtc->index);
1458                 }
1459
1460                 evo_kick(push, mast);
1461         }
1462
1463         nv_encoder->crtc = encoder->crtc;
1464 }
1465
1466 static void
1467 nvd0_dac_disconnect(struct drm_encoder *encoder)
1468 {
1469         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1470         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1471         const int or = nv_encoder->or;
1472         u32 *push;
1473
1474         if (nv_encoder->crtc) {
1475                 nvd0_crtc_prepare(nv_encoder->crtc);
1476
1477                 push = evo_wait(mast, 4);
1478                 if (push) {
1479                         if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1480                                 evo_mthd(push, 0x0400 + (or * 0x080), 1);
1481                                 evo_data(push, 0x00000000);
1482                         } else {
1483                                 evo_mthd(push, 0x0180 + (or * 0x020), 1);
1484                                 evo_data(push, 0x00000000);
1485                         }
1486
1487                         evo_mthd(push, 0x0080, 1);
1488                         evo_data(push, 0x00000000);
1489                         evo_kick(push, mast);
1490                 }
1491         }
1492
1493         nv_encoder->crtc = NULL;
1494 }
1495
1496 static enum drm_connector_status
1497 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1498 {
1499         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1500         int ret, or = nouveau_encoder(encoder)->or;
1501         u32 load = 0;
1502
1503         ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1504         if (ret || load != 7)
1505                 return connector_status_disconnected;
1506
1507         return connector_status_connected;
1508 }
1509
1510 static void
1511 nvd0_dac_destroy(struct drm_encoder *encoder)
1512 {
1513         drm_encoder_cleanup(encoder);
1514         kfree(encoder);
1515 }
1516
1517 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1518         .dpms = nvd0_dac_dpms,
1519         .mode_fixup = nvd0_dac_mode_fixup,
1520         .prepare = nvd0_dac_disconnect,
1521         .commit = nvd0_dac_commit,
1522         .mode_set = nvd0_dac_mode_set,
1523         .disable = nvd0_dac_disconnect,
1524         .get_crtc = nvd0_display_crtc_get,
1525         .detect = nvd0_dac_detect
1526 };
1527
1528 static const struct drm_encoder_funcs nvd0_dac_func = {
1529         .destroy = nvd0_dac_destroy,
1530 };
1531
1532 static int
1533 nvd0_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1534 {
1535         struct drm_device *dev = connector->dev;
1536         struct nouveau_encoder *nv_encoder;
1537         struct drm_encoder *encoder;
1538
1539         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1540         if (!nv_encoder)
1541                 return -ENOMEM;
1542         nv_encoder->dcb = dcbe;
1543         nv_encoder->or = ffs(dcbe->or) - 1;
1544
1545         encoder = to_drm_encoder(nv_encoder);
1546         encoder->possible_crtcs = dcbe->heads;
1547         encoder->possible_clones = 0;
1548         drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1549         drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1550
1551         drm_mode_connector_attach_encoder(connector, encoder);
1552         return 0;
1553 }
1554
1555 /******************************************************************************
1556  * Audio
1557  *****************************************************************************/
1558 static void
1559 nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1560 {
1561         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1562         struct nouveau_connector *nv_connector;
1563         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1564
1565         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1566         if (!drm_detect_monitor_audio(nv_connector->edid))
1567                 return;
1568
1569         drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1570
1571         nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
1572                             nv_connector->base.eld,
1573                             nv_connector->base.eld[2] * 4);
1574 }
1575
1576 static void
1577 nvd0_audio_disconnect(struct drm_encoder *encoder)
1578 {
1579         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1580         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1581
1582         nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
1583 }
1584
1585 /******************************************************************************
1586  * HDMI
1587  *****************************************************************************/
1588 static void
1589 nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1590 {
1591         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1592         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1593         struct nouveau_connector *nv_connector;
1594         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1595         const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1596         u32 rekey = 56; /* binary driver, and tegra constant */
1597         u32 max_ac_packet;
1598
1599         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1600         if (!drm_detect_hdmi_monitor(nv_connector->edid))
1601                 return;
1602
1603         max_ac_packet  = mode->htotal - mode->hdisplay;
1604         max_ac_packet -= rekey;
1605         max_ac_packet -= 18; /* constant from tegra */
1606         max_ac_packet /= 32;
1607
1608         nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff,
1609                             NV84_DISP_SOR_HDMI_PWR_STATE_ON |
1610                             (max_ac_packet << 16) | rekey);
1611
1612         nvd0_audio_mode_set(encoder, mode);
1613 }
1614
1615 static void
1616 nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1617 {
1618         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1619         struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1620         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1621         const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1622
1623         nvd0_audio_disconnect(encoder);
1624
1625         nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff, 0x00000000);
1626 }
1627
1628 /******************************************************************************
1629  * SOR
1630  *****************************************************************************/
1631 static void
1632 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1633 {
1634         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1635         struct drm_device *dev = encoder->dev;
1636         struct nvd0_disp *disp = nvd0_disp(dev);
1637         struct drm_encoder *partner;
1638         int or = nv_encoder->or;
1639
1640         nv_encoder->last_dpms = mode;
1641
1642         list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1643                 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1644
1645                 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1646                         continue;
1647
1648                 if (nv_partner != nv_encoder &&
1649                     nv_partner->dcb->or == nv_encoder->dcb->or) {
1650                         if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1651                                 return;
1652                         break;
1653                 }
1654         }
1655
1656         nv_call(disp->core, NV50_DISP_SOR_PWR + or, (mode == DRM_MODE_DPMS_ON));
1657
1658         if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1659                 nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, disp->core);
1660 }
1661
1662 static bool
1663 nvd0_sor_mode_fixup(struct drm_encoder *encoder,
1664                     const struct drm_display_mode *mode,
1665                     struct drm_display_mode *adjusted_mode)
1666 {
1667         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1668         struct nouveau_connector *nv_connector;
1669
1670         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1671         if (nv_connector && nv_connector->native_mode) {
1672                 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1673                         int id = adjusted_mode->base.id;
1674                         *adjusted_mode = *nv_connector->native_mode;
1675                         adjusted_mode->base.id = id;
1676                 }
1677         }
1678
1679         return true;
1680 }
1681
1682 static void
1683 nvd0_sor_disconnect(struct drm_encoder *encoder)
1684 {
1685         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1686         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1687         const int or = nv_encoder->or;
1688         u32 *push;
1689
1690         if (nv_encoder->crtc) {
1691                 nvd0_crtc_prepare(nv_encoder->crtc);
1692
1693                 push = evo_wait(mast, 4);
1694                 if (push) {
1695                         if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1696                                 evo_mthd(push, 0x0600 + (or * 0x40), 1);
1697                                 evo_data(push, 0x00000000);
1698                         } else {
1699                                 evo_mthd(push, 0x0200 + (or * 0x20), 1);
1700                                 evo_data(push, 0x00000000);
1701                         }
1702
1703                         evo_mthd(push, 0x0080, 1);
1704                         evo_data(push, 0x00000000);
1705                         evo_kick(push, mast);
1706                 }
1707
1708                 nvd0_hdmi_disconnect(encoder);
1709         }
1710
1711         nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1712         nv_encoder->crtc = NULL;
1713 }
1714
1715 static void
1716 nvd0_sor_prepare(struct drm_encoder *encoder)
1717 {
1718         nvd0_sor_disconnect(encoder);
1719         if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
1720                 evo_sync(encoder->dev);
1721 }
1722
1723 static void
1724 nvd0_sor_commit(struct drm_encoder *encoder)
1725 {
1726 }
1727
1728 static void
1729 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1730                   struct drm_display_mode *mode)
1731 {
1732         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1733         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1734         struct drm_device *dev = encoder->dev;
1735         struct nouveau_drm *drm = nouveau_drm(dev);
1736         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1737         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1738         struct nouveau_connector *nv_connector;
1739         struct nvbios *bios = &drm->vbios;
1740         u32 *push, lvds = 0;
1741         u8 owner = 1 << nv_crtc->index;
1742         u8 proto = 0xf;
1743         u8 depth = 0x0;
1744
1745         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1746         switch (nv_encoder->dcb->type) {
1747         case DCB_OUTPUT_TMDS:
1748                 if (nv_encoder->dcb->sorconf.link & 1) {
1749                         if (mode->clock < 165000)
1750                                 proto = 0x1;
1751                         else
1752                                 proto = 0x5;
1753                 } else {
1754                         proto = 0x2;
1755                 }
1756
1757                 nvd0_hdmi_mode_set(encoder, mode);
1758                 break;
1759         case DCB_OUTPUT_LVDS:
1760                 proto = 0x0;
1761
1762                 if (bios->fp_no_ddc) {
1763                         if (bios->fp.dual_link)
1764                                 lvds |= 0x0100;
1765                         if (bios->fp.if_is_24bit)
1766                                 lvds |= 0x0200;
1767                 } else {
1768                         if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1769                                 if (((u8 *)nv_connector->edid)[121] == 2)
1770                                         lvds |= 0x0100;
1771                         } else
1772                         if (mode->clock >= bios->fp.duallink_transition_clk) {
1773                                 lvds |= 0x0100;
1774                         }
1775
1776                         if (lvds & 0x0100) {
1777                                 if (bios->fp.strapless_is_24bit & 2)
1778                                         lvds |= 0x0200;
1779                         } else {
1780                                 if (bios->fp.strapless_is_24bit & 1)
1781                                         lvds |= 0x0200;
1782                         }
1783
1784                         if (nv_connector->base.display_info.bpc == 8)
1785                                 lvds |= 0x0200;
1786                 }
1787
1788                 nv_call(disp->core, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, lvds);
1789                 break;
1790         case DCB_OUTPUT_DP:
1791                 if (nv_connector->base.display_info.bpc == 6) {
1792                         nv_encoder->dp.datarate = mode->clock * 18 / 8;
1793                         depth = 0x2;
1794                 } else {
1795                         nv_encoder->dp.datarate = mode->clock * 24 / 8;
1796                         depth = 0x5;
1797                 }
1798
1799                 if (nv_encoder->dcb->sorconf.link & 1)
1800                         proto = 0x8;
1801                 else
1802                         proto = 0x9;
1803                 break;
1804         default:
1805                 BUG_ON(1);
1806                 break;
1807         }
1808
1809         nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1810
1811         push = evo_wait(nvd0_mast(dev), 8);
1812         if (push) {
1813                 if (nvd0_vers(mast) < NVD0_DISP_CLASS) {
1814                         evo_mthd(push, 0x0600 + (nv_encoder->or * 0x040), 1);
1815                         evo_data(push, (depth << 16) | (proto << 8) | owner);
1816                 } else {
1817                         u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1818                         u32 syncs = 0x00000001;
1819
1820                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1821                                 syncs |= 0x00000008;
1822                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1823                                 syncs |= 0x00000010;
1824
1825                         if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1826                                 magic |= 0x00000001;
1827
1828                         evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1829                         evo_data(push, syncs | (depth << 6));
1830                         evo_data(push, magic);
1831                         evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 1);
1832                         evo_data(push, owner | (proto << 8));
1833                 }
1834
1835                 evo_kick(push, mast);
1836         }
1837
1838         nv_encoder->crtc = encoder->crtc;
1839 }
1840
1841 static void
1842 nvd0_sor_destroy(struct drm_encoder *encoder)
1843 {
1844         drm_encoder_cleanup(encoder);
1845         kfree(encoder);
1846 }
1847
1848 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1849         .dpms = nvd0_sor_dpms,
1850         .mode_fixup = nvd0_sor_mode_fixup,
1851         .prepare = nvd0_sor_prepare,
1852         .commit = nvd0_sor_commit,
1853         .mode_set = nvd0_sor_mode_set,
1854         .disable = nvd0_sor_disconnect,
1855         .get_crtc = nvd0_display_crtc_get,
1856 };
1857
1858 static const struct drm_encoder_funcs nvd0_sor_func = {
1859         .destroy = nvd0_sor_destroy,
1860 };
1861
1862 static int
1863 nvd0_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1864 {
1865         struct drm_device *dev = connector->dev;
1866         struct nouveau_encoder *nv_encoder;
1867         struct drm_encoder *encoder;
1868
1869         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1870         if (!nv_encoder)
1871                 return -ENOMEM;
1872         nv_encoder->dcb = dcbe;
1873         nv_encoder->or = ffs(dcbe->or) - 1;
1874         nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1875
1876         encoder = to_drm_encoder(nv_encoder);
1877         encoder->possible_crtcs = dcbe->heads;
1878         encoder->possible_clones = 0;
1879         drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1880         drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1881
1882         drm_mode_connector_attach_encoder(connector, encoder);
1883         return 0;
1884 }
1885
1886 /******************************************************************************
1887  * Init
1888  *****************************************************************************/
1889 void
1890 nvd0_display_fini(struct drm_device *dev)
1891 {
1892 }
1893
1894 int
1895 nvd0_display_init(struct drm_device *dev)
1896 {
1897         u32 *push = evo_wait(nvd0_mast(dev), 32);
1898         if (push) {
1899                 evo_mthd(push, 0x0088, 1);
1900                 evo_data(push, NvEvoSync);
1901                 evo_kick(push, nvd0_mast(dev));
1902                 return evo_sync(dev);
1903         }
1904
1905         return -EBUSY;
1906 }
1907
1908 void
1909 nvd0_display_destroy(struct drm_device *dev)
1910 {
1911         struct nvd0_disp *disp = nvd0_disp(dev);
1912
1913         nvd0_dmac_destroy(disp->core, &disp->mast.base);
1914
1915         nouveau_bo_unmap(disp->sync);
1916         nouveau_bo_ref(NULL, &disp->sync);
1917
1918         nouveau_display(dev)->priv = NULL;
1919         kfree(disp);
1920 }
1921
1922 int
1923 nvd0_display_create(struct drm_device *dev)
1924 {
1925         static const u16 oclass[] = {
1926                 NVE0_DISP_CLASS,
1927                 NVD0_DISP_CLASS,
1928                 NVA3_DISP_CLASS,
1929                 NV94_DISP_CLASS,
1930                 NVA0_DISP_CLASS,
1931                 NV84_DISP_CLASS,
1932                 NV50_DISP_CLASS,
1933         };
1934         struct nouveau_device *device = nouveau_dev(dev);
1935         struct nouveau_drm *drm = nouveau_drm(dev);
1936         struct dcb_table *dcb = &drm->vbios.dcb;
1937         struct drm_connector *connector, *tmp;
1938         struct nvd0_disp *disp;
1939         struct dcb_output *dcbe;
1940         int crtcs, ret, i;
1941
1942         disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1943         if (!disp)
1944                 return -ENOMEM;
1945
1946         nouveau_display(dev)->priv = disp;
1947         nouveau_display(dev)->dtor = nvd0_display_destroy;
1948         nouveau_display(dev)->init = nvd0_display_init;
1949         nouveau_display(dev)->fini = nvd0_display_fini;
1950
1951         /* small shared memory area we use for notifiers and semaphores */
1952         ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1953                              0, 0x0000, NULL, &disp->sync);
1954         if (!ret) {
1955                 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1956                 if (!ret)
1957                         ret = nouveau_bo_map(disp->sync);
1958                 if (ret)
1959                         nouveau_bo_ref(NULL, &disp->sync);
1960         }
1961
1962         if (ret)
1963                 goto out;
1964
1965         /* attempt to allocate a supported evo display class */
1966         ret = -ENODEV;
1967         for (i = 0; ret && i < ARRAY_SIZE(oclass); i++) {
1968                 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
1969                                          0xd1500000, oclass[i], NULL, 0,
1970                                          &disp->core);
1971         }
1972
1973         if (ret)
1974                 goto out;
1975
1976         /* allocate master evo channel */
1977         ret = nvd0_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
1978                               &(struct nv50_display_mast_class) {
1979                                         .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
1980                               }, sizeof(struct nv50_display_mast_class),
1981                               disp->sync->bo.offset, &disp->mast.base);
1982         if (ret)
1983                 goto out;
1984
1985         /* create crtc objects to represent the hw heads */
1986         if (nv_mclass(disp->core) >= NVD0_DISP_CLASS)
1987                 crtcs = nv_rd32(device, 0x022448);
1988         else
1989                 crtcs = 2;
1990
1991         for (i = 0; i < crtcs; i++) {
1992                 ret = nvd0_crtc_create(dev, disp->core, i);
1993                 if (ret)
1994                         goto out;
1995         }
1996
1997         /* create encoder/connector objects based on VBIOS DCB table */
1998         for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1999                 connector = nouveau_connector_create(dev, dcbe->connector);
2000                 if (IS_ERR(connector))
2001                         continue;
2002
2003                 if (dcbe->location != DCB_LOC_ON_CHIP) {
2004                         NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
2005                                 dcbe->type, ffs(dcbe->or) - 1);
2006                         continue;
2007                 }
2008
2009                 switch (dcbe->type) {
2010                 case DCB_OUTPUT_TMDS:
2011                 case DCB_OUTPUT_LVDS:
2012                 case DCB_OUTPUT_DP:
2013                         nvd0_sor_create(connector, dcbe);
2014                         break;
2015                 case DCB_OUTPUT_ANALOG:
2016                         nvd0_dac_create(connector, dcbe);
2017                         break;
2018                 default:
2019                         NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
2020                                 dcbe->type, ffs(dcbe->or) - 1);
2021                         continue;
2022                 }
2023         }
2024
2025         /* cull any connectors we created that don't have an encoder */
2026         list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2027                 if (connector->encoder_ids[0])
2028                         continue;
2029
2030                 NV_WARN(drm, "%s has no encoders, removing\n",
2031                         drm_get_connector_name(connector));
2032                 connector->funcs->destroy(connector);
2033         }
2034
2035 out:
2036         if (ret)
2037                 nvd0_display_destroy(dev);
2038         return ret;
2039 }