]> git.karo-electronics.de Git - karo-tx-linux.git/blob - drivers/gpu/drm/nouveau/nv50_display.c
drm/nouveau/kms/nv50: separate out vblank dmi commit
[karo-tx-linux.git] / drivers / gpu / drm / nouveau / nv50_display.c
1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24
25 #include <linux/dma-mapping.h>
26
27 #include <drm/drmP.h>
28 #include <drm/drm_atomic.h>
29 #include <drm/drm_crtc_helper.h>
30 #include <drm/drm_dp_helper.h>
31 #include <drm/drm_fb_helper.h>
32 #include <drm/drm_plane_helper.h>
33
34 #include <nvif/class.h>
35 #include <nvif/cl0002.h>
36 #include <nvif/cl5070.h>
37 #include <nvif/cl507a.h>
38 #include <nvif/cl507b.h>
39 #include <nvif/cl507c.h>
40 #include <nvif/cl507d.h>
41 #include <nvif/cl507e.h>
42
43 #include "nouveau_drv.h"
44 #include "nouveau_dma.h"
45 #include "nouveau_gem.h"
46 #include "nouveau_connector.h"
47 #include "nouveau_encoder.h"
48 #include "nouveau_crtc.h"
49 #include "nouveau_fence.h"
50 #include "nv50_display.h"
51
52 #define EVO_DMA_NR 9
53
54 #define EVO_MASTER  (0x00)
55 #define EVO_FLIP(c) (0x01 + (c))
56 #define EVO_OVLY(c) (0x05 + (c))
57 #define EVO_OIMM(c) (0x09 + (c))
58 #define EVO_CURS(c) (0x0d + (c))
59
60 /* offsets in shared sync bo of various structures */
61 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
62 #define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
63 #define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
64 #define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
65
66 /******************************************************************************
67  * Atomic state
68  *****************************************************************************/
69 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
70
71 struct nv50_head_atom {
72         struct drm_crtc_state state;
73
74         struct {
75                 u16 iW;
76                 u16 iH;
77                 u16 oW;
78                 u16 oH;
79         } view;
80
81         struct nv50_head_mode {
82                 bool interlace;
83                 u32 clock;
84                 struct {
85                         u16 active;
86                         u16 synce;
87                         u16 blanke;
88                         u16 blanks;
89                 } h;
90                 struct {
91                         u32 active;
92                         u16 synce;
93                         u16 blanke;
94                         u16 blanks;
95                         u16 blank2s;
96                         u16 blank2e;
97                         u16 blankus;
98                 } v;
99         } mode;
100
101         struct {
102                 u32 handle;
103                 u64 offset:40;
104         } lut;
105
106         struct {
107                 bool visible;
108                 u32 handle;
109                 u64 offset:40;
110                 u8  format;
111                 u8  kind:7;
112                 u8  layout:1;
113                 u8  block:4;
114                 u32 pitch:20;
115                 u16 x;
116                 u16 y;
117                 u16 w;
118                 u16 h;
119         } core;
120
121         struct {
122                 bool visible;
123                 u32 handle;
124                 u64 offset:40;
125                 u8  layout:1;
126                 u8  format:1;
127         } curs;
128
129         struct {
130                 u8  depth;
131                 u8  cpp;
132                 u16 x;
133                 u16 y;
134                 u16 w;
135                 u16 h;
136         } base;
137
138         struct {
139                 u8 cpp;
140         } ovly;
141
142         struct {
143                 bool enable:1;
144                 u8 bits:2;
145                 u8 mode:4;
146         } dither;
147
148         struct {
149                 struct {
150                         u16 cos:12;
151                         u16 sin:12;
152                 } sat;
153         } procamp;
154
155         union {
156                 struct {
157                         bool core:1;
158                         bool curs:1;
159                 };
160                 u8 mask;
161         } clr;
162
163         union {
164                 struct {
165                         bool core:1;
166                         bool curs:1;
167                         bool view:1;
168                         bool mode:1;
169                         bool base:1;
170                         bool ovly:1;
171                         bool dither:1;
172                         bool procamp:1;
173                 };
174                 u16 mask;
175         } set;
176 };
177
178 /******************************************************************************
179  * EVO channel
180  *****************************************************************************/
181
182 struct nv50_chan {
183         struct nvif_object user;
184         struct nvif_device *device;
185 };
186
187 static int
188 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
189                  const s32 *oclass, u8 head, void *data, u32 size,
190                  struct nv50_chan *chan)
191 {
192         struct nvif_sclass *sclass;
193         int ret, i, n;
194
195         chan->device = device;
196
197         ret = n = nvif_object_sclass_get(disp, &sclass);
198         if (ret < 0)
199                 return ret;
200
201         while (oclass[0]) {
202                 for (i = 0; i < n; i++) {
203                         if (sclass[i].oclass == oclass[0]) {
204                                 ret = nvif_object_init(disp, 0, oclass[0],
205                                                        data, size, &chan->user);
206                                 if (ret == 0)
207                                         nvif_object_map(&chan->user);
208                                 nvif_object_sclass_put(&sclass);
209                                 return ret;
210                         }
211                 }
212                 oclass++;
213         }
214
215         nvif_object_sclass_put(&sclass);
216         return -ENOSYS;
217 }
218
219 static void
220 nv50_chan_destroy(struct nv50_chan *chan)
221 {
222         nvif_object_fini(&chan->user);
223 }
224
225 /******************************************************************************
226  * PIO EVO channel
227  *****************************************************************************/
228
229 struct nv50_pioc {
230         struct nv50_chan base;
231 };
232
233 static void
234 nv50_pioc_destroy(struct nv50_pioc *pioc)
235 {
236         nv50_chan_destroy(&pioc->base);
237 }
238
239 static int
240 nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
241                  const s32 *oclass, u8 head, void *data, u32 size,
242                  struct nv50_pioc *pioc)
243 {
244         return nv50_chan_create(device, disp, oclass, head, data, size,
245                                 &pioc->base);
246 }
247
248 /******************************************************************************
249  * Cursor Immediate
250  *****************************************************************************/
251
252 struct nv50_curs {
253         struct nv50_pioc base;
254 };
255
256 static int
257 nv50_curs_create(struct nvif_device *device, struct nvif_object *disp,
258                  int head, struct nv50_curs *curs)
259 {
260         struct nv50_disp_cursor_v0 args = {
261                 .head = head,
262         };
263         static const s32 oclass[] = {
264                 GK104_DISP_CURSOR,
265                 GF110_DISP_CURSOR,
266                 GT214_DISP_CURSOR,
267                 G82_DISP_CURSOR,
268                 NV50_DISP_CURSOR,
269                 0
270         };
271
272         return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
273                                 &curs->base);
274 }
275
276 /******************************************************************************
277  * Overlay Immediate
278  *****************************************************************************/
279
280 struct nv50_oimm {
281         struct nv50_pioc base;
282 };
283
284 static int
285 nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
286                  int head, struct nv50_oimm *oimm)
287 {
288         struct nv50_disp_cursor_v0 args = {
289                 .head = head,
290         };
291         static const s32 oclass[] = {
292                 GK104_DISP_OVERLAY,
293                 GF110_DISP_OVERLAY,
294                 GT214_DISP_OVERLAY,
295                 G82_DISP_OVERLAY,
296                 NV50_DISP_OVERLAY,
297                 0
298         };
299
300         return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
301                                 &oimm->base);
302 }
303
304 /******************************************************************************
305  * DMA EVO channel
306  *****************************************************************************/
307
308 struct nv50_dmac {
309         struct nv50_chan base;
310         dma_addr_t handle;
311         u32 *ptr;
312
313         struct nvif_object sync;
314         struct nvif_object vram;
315
316         /* Protects against concurrent pushbuf access to this channel, lock is
317          * grabbed by evo_wait (if the pushbuf reservation is successful) and
318          * dropped again by evo_kick. */
319         struct mutex lock;
320 };
321
322 static void
323 nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
324 {
325         struct nvif_device *device = dmac->base.device;
326
327         nvif_object_fini(&dmac->vram);
328         nvif_object_fini(&dmac->sync);
329
330         nv50_chan_destroy(&dmac->base);
331
332         if (dmac->ptr) {
333                 struct device *dev = nvxx_device(device)->dev;
334                 dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
335         }
336 }
337
338 static int
339 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
340                  const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
341                  struct nv50_dmac *dmac)
342 {
343         struct nv50_disp_core_channel_dma_v0 *args = data;
344         struct nvif_object pushbuf;
345         int ret;
346
347         mutex_init(&dmac->lock);
348
349         dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
350                                        &dmac->handle, GFP_KERNEL);
351         if (!dmac->ptr)
352                 return -ENOMEM;
353
354         ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
355                                &(struct nv_dma_v0) {
356                                         .target = NV_DMA_V0_TARGET_PCI_US,
357                                         .access = NV_DMA_V0_ACCESS_RD,
358                                         .start = dmac->handle + 0x0000,
359                                         .limit = dmac->handle + 0x0fff,
360                                }, sizeof(struct nv_dma_v0), &pushbuf);
361         if (ret)
362                 return ret;
363
364         args->pushbuf = nvif_handle(&pushbuf);
365
366         ret = nv50_chan_create(device, disp, oclass, head, data, size,
367                                &dmac->base);
368         nvif_object_fini(&pushbuf);
369         if (ret)
370                 return ret;
371
372         ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
373                                &(struct nv_dma_v0) {
374                                         .target = NV_DMA_V0_TARGET_VRAM,
375                                         .access = NV_DMA_V0_ACCESS_RDWR,
376                                         .start = syncbuf + 0x0000,
377                                         .limit = syncbuf + 0x0fff,
378                                }, sizeof(struct nv_dma_v0),
379                                &dmac->sync);
380         if (ret)
381                 return ret;
382
383         ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
384                                &(struct nv_dma_v0) {
385                                         .target = NV_DMA_V0_TARGET_VRAM,
386                                         .access = NV_DMA_V0_ACCESS_RDWR,
387                                         .start = 0,
388                                         .limit = device->info.ram_user - 1,
389                                }, sizeof(struct nv_dma_v0),
390                                &dmac->vram);
391         if (ret)
392                 return ret;
393
394         return ret;
395 }
396
397 /******************************************************************************
398  * Core
399  *****************************************************************************/
400
401 struct nv50_mast {
402         struct nv50_dmac base;
403 };
404
405 static int
406 nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
407                  u64 syncbuf, struct nv50_mast *core)
408 {
409         struct nv50_disp_core_channel_dma_v0 args = {
410                 .pushbuf = 0xb0007d00,
411         };
412         static const s32 oclass[] = {
413                 GP104_DISP_CORE_CHANNEL_DMA,
414                 GP100_DISP_CORE_CHANNEL_DMA,
415                 GM200_DISP_CORE_CHANNEL_DMA,
416                 GM107_DISP_CORE_CHANNEL_DMA,
417                 GK110_DISP_CORE_CHANNEL_DMA,
418                 GK104_DISP_CORE_CHANNEL_DMA,
419                 GF110_DISP_CORE_CHANNEL_DMA,
420                 GT214_DISP_CORE_CHANNEL_DMA,
421                 GT206_DISP_CORE_CHANNEL_DMA,
422                 GT200_DISP_CORE_CHANNEL_DMA,
423                 G82_DISP_CORE_CHANNEL_DMA,
424                 NV50_DISP_CORE_CHANNEL_DMA,
425                 0
426         };
427
428         return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
429                                 syncbuf, &core->base);
430 }
431
432 /******************************************************************************
433  * Base
434  *****************************************************************************/
435
436 struct nv50_sync {
437         struct nv50_dmac base;
438         u32 addr;
439         u32 data;
440 };
441
442 static int
443 nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
444                  int head, u64 syncbuf, struct nv50_sync *base)
445 {
446         struct nv50_disp_base_channel_dma_v0 args = {
447                 .pushbuf = 0xb0007c00 | head,
448                 .head = head,
449         };
450         static const s32 oclass[] = {
451                 GK110_DISP_BASE_CHANNEL_DMA,
452                 GK104_DISP_BASE_CHANNEL_DMA,
453                 GF110_DISP_BASE_CHANNEL_DMA,
454                 GT214_DISP_BASE_CHANNEL_DMA,
455                 GT200_DISP_BASE_CHANNEL_DMA,
456                 G82_DISP_BASE_CHANNEL_DMA,
457                 NV50_DISP_BASE_CHANNEL_DMA,
458                 0
459         };
460
461         return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
462                                 syncbuf, &base->base);
463 }
464
465 /******************************************************************************
466  * Overlay
467  *****************************************************************************/
468
469 struct nv50_ovly {
470         struct nv50_dmac base;
471 };
472
473 static int
474 nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
475                  int head, u64 syncbuf, struct nv50_ovly *ovly)
476 {
477         struct nv50_disp_overlay_channel_dma_v0 args = {
478                 .pushbuf = 0xb0007e00 | head,
479                 .head = head,
480         };
481         static const s32 oclass[] = {
482                 GK104_DISP_OVERLAY_CONTROL_DMA,
483                 GF110_DISP_OVERLAY_CONTROL_DMA,
484                 GT214_DISP_OVERLAY_CHANNEL_DMA,
485                 GT200_DISP_OVERLAY_CHANNEL_DMA,
486                 G82_DISP_OVERLAY_CHANNEL_DMA,
487                 NV50_DISP_OVERLAY_CHANNEL_DMA,
488                 0
489         };
490
491         return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
492                                 syncbuf, &ovly->base);
493 }
494
495 struct nv50_head {
496         struct nouveau_crtc base;
497         struct nouveau_bo *image;
498         struct nv50_curs curs;
499         struct nv50_sync sync;
500         struct nv50_ovly ovly;
501         struct nv50_oimm oimm;
502
503         struct nv50_head_atom arm;
504         struct nv50_head_atom asy;
505 };
506
507 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
508 #define nv50_curs(c) (&nv50_head(c)->curs)
509 #define nv50_sync(c) (&nv50_head(c)->sync)
510 #define nv50_ovly(c) (&nv50_head(c)->ovly)
511 #define nv50_oimm(c) (&nv50_head(c)->oimm)
512 #define nv50_chan(c) (&(c)->base.base)
513 #define nv50_vers(c) nv50_chan(c)->user.oclass
514
515 struct nv50_fbdma {
516         struct list_head head;
517         struct nvif_object core;
518         struct nvif_object base[4];
519 };
520
521 struct nv50_disp {
522         struct nvif_object *disp;
523         struct nv50_mast mast;
524
525         struct list_head fbdma;
526
527         struct nouveau_bo *sync;
528 };
529
530 static struct nv50_disp *
531 nv50_disp(struct drm_device *dev)
532 {
533         return nouveau_display(dev)->priv;
534 }
535
536 #define nv50_mast(d) (&nv50_disp(d)->mast)
537
538 static struct drm_crtc *
539 nv50_display_crtc_get(struct drm_encoder *encoder)
540 {
541         return nouveau_encoder(encoder)->crtc;
542 }
543
544 /******************************************************************************
545  * EVO channel helpers
546  *****************************************************************************/
547 static u32 *
548 evo_wait(void *evoc, int nr)
549 {
550         struct nv50_dmac *dmac = evoc;
551         struct nvif_device *device = dmac->base.device;
552         u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
553
554         mutex_lock(&dmac->lock);
555         if (put + nr >= (PAGE_SIZE / 4) - 8) {
556                 dmac->ptr[put] = 0x20000000;
557
558                 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
559                 if (nvif_msec(device, 2000,
560                         if (!nvif_rd32(&dmac->base.user, 0x0004))
561                                 break;
562                 ) < 0) {
563                         mutex_unlock(&dmac->lock);
564                         printk(KERN_ERR "nouveau: evo channel stalled\n");
565                         return NULL;
566                 }
567
568                 put = 0;
569         }
570
571         return dmac->ptr + put;
572 }
573
574 static void
575 evo_kick(u32 *push, void *evoc)
576 {
577         struct nv50_dmac *dmac = evoc;
578         nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
579         mutex_unlock(&dmac->lock);
580 }
581
582 #define evo_mthd(p,m,s) do {                                                   \
583         const u32 _m = (m), _s = (s);                                          \
584         if (drm_debug & DRM_UT_KMS)                                            \
585                 printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__);             \
586         *((p)++) = ((_s << 18) | _m);                                          \
587 } while(0)
588
589 #define evo_data(p,d) do {                                                     \
590         const u32 _d = (d);                                                    \
591         if (drm_debug & DRM_UT_KMS)                                            \
592                 printk(KERN_ERR "\t%08x\n", _d);                               \
593         *((p)++) = _d;                                                         \
594 } while(0)
595
596 static bool
597 evo_sync_wait(void *data)
598 {
599         if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
600                 return true;
601         usleep_range(1, 2);
602         return false;
603 }
604
605 static int
606 evo_sync(struct drm_device *dev)
607 {
608         struct nvif_device *device = &nouveau_drm(dev)->device;
609         struct nv50_disp *disp = nv50_disp(dev);
610         struct nv50_mast *mast = nv50_mast(dev);
611         u32 *push = evo_wait(mast, 8);
612         if (push) {
613                 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
614                 evo_mthd(push, 0x0084, 1);
615                 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
616                 evo_mthd(push, 0x0080, 2);
617                 evo_data(push, 0x00000000);
618                 evo_data(push, 0x00000000);
619                 evo_kick(push, mast);
620                 if (nvif_msec(device, 2000,
621                         if (evo_sync_wait(disp->sync))
622                                 break;
623                 ) >= 0)
624                         return 0;
625         }
626
627         return -EBUSY;
628 }
629
630 /******************************************************************************
631  * Page flipping channel
632  *****************************************************************************/
633 struct nouveau_bo *
634 nv50_display_crtc_sema(struct drm_device *dev, int crtc)
635 {
636         return nv50_disp(dev)->sync;
637 }
638
639 struct nv50_display_flip {
640         struct nv50_disp *disp;
641         struct nv50_sync *chan;
642 };
643
644 static bool
645 nv50_display_flip_wait(void *data)
646 {
647         struct nv50_display_flip *flip = data;
648         if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
649                                               flip->chan->data)
650                 return true;
651         usleep_range(1, 2);
652         return false;
653 }
654
655 void
656 nv50_display_flip_stop(struct drm_crtc *crtc)
657 {
658         struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
659         struct nv50_display_flip flip = {
660                 .disp = nv50_disp(crtc->dev),
661                 .chan = nv50_sync(crtc),
662         };
663         u32 *push;
664
665         push = evo_wait(flip.chan, 8);
666         if (push) {
667                 evo_mthd(push, 0x0084, 1);
668                 evo_data(push, 0x00000000);
669                 evo_mthd(push, 0x0094, 1);
670                 evo_data(push, 0x00000000);
671                 evo_mthd(push, 0x00c0, 1);
672                 evo_data(push, 0x00000000);
673                 evo_mthd(push, 0x0080, 1);
674                 evo_data(push, 0x00000000);
675                 evo_kick(push, flip.chan);
676         }
677
678         nvif_msec(device, 2000,
679                 if (nv50_display_flip_wait(&flip))
680                         break;
681         );
682 }
683
684 int
685 nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
686                        struct nouveau_channel *chan, u32 swap_interval)
687 {
688         struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
689         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
690         struct nv50_head *head = nv50_head(crtc);
691         struct nv50_sync *sync = nv50_sync(crtc);
692         u32 *push;
693         int ret;
694
695         if (crtc->primary->fb->width != fb->width ||
696             crtc->primary->fb->height != fb->height)
697                 return -EINVAL;
698
699         swap_interval <<= 4;
700         if (swap_interval == 0)
701                 swap_interval |= 0x100;
702         if (chan == NULL)
703                 evo_sync(crtc->dev);
704
705         push = evo_wait(sync, 128);
706         if (unlikely(push == NULL))
707                 return -EBUSY;
708
709         if (chan && chan->user.oclass < G82_CHANNEL_GPFIFO) {
710                 ret = RING_SPACE(chan, 8);
711                 if (ret)
712                         return ret;
713
714                 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
715                 OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
716                 OUT_RING  (chan, sync->addr ^ 0x10);
717                 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
718                 OUT_RING  (chan, sync->data + 1);
719                 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
720                 OUT_RING  (chan, sync->addr);
721                 OUT_RING  (chan, sync->data);
722         } else
723         if (chan && chan->user.oclass < FERMI_CHANNEL_GPFIFO) {
724                 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
725                 ret = RING_SPACE(chan, 12);
726                 if (ret)
727                         return ret;
728
729                 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
730                 OUT_RING  (chan, chan->vram.handle);
731                 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
732                 OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
733                 OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
734                 OUT_RING  (chan, sync->data + 1);
735                 OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
736                 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
737                 OUT_RING  (chan, upper_32_bits(addr));
738                 OUT_RING  (chan, lower_32_bits(addr));
739                 OUT_RING  (chan, sync->data);
740                 OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
741         } else
742         if (chan) {
743                 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
744                 ret = RING_SPACE(chan, 10);
745                 if (ret)
746                         return ret;
747
748                 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
749                 OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
750                 OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
751                 OUT_RING  (chan, sync->data + 1);
752                 OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
753                                  NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
754                 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
755                 OUT_RING  (chan, upper_32_bits(addr));
756                 OUT_RING  (chan, lower_32_bits(addr));
757                 OUT_RING  (chan, sync->data);
758                 OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
759                                  NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
760         }
761
762         if (chan) {
763                 sync->addr ^= 0x10;
764                 sync->data++;
765                 FIRE_RING (chan);
766         }
767
768         /* queue the flip */
769         evo_mthd(push, 0x0100, 1);
770         evo_data(push, 0xfffe0000);
771         evo_mthd(push, 0x0084, 1);
772         evo_data(push, swap_interval);
773         if (!(swap_interval & 0x00000100)) {
774                 evo_mthd(push, 0x00e0, 1);
775                 evo_data(push, 0x40000000);
776         }
777         evo_mthd(push, 0x0088, 4);
778         evo_data(push, sync->addr);
779         evo_data(push, sync->data++);
780         evo_data(push, sync->data);
781         evo_data(push, sync->base.sync.handle);
782         evo_mthd(push, 0x00a0, 2);
783         evo_data(push, 0x00000000);
784         evo_data(push, 0x00000000);
785         evo_mthd(push, 0x00c0, 1);
786         evo_data(push, nv_fb->r_handle);
787         evo_mthd(push, 0x0110, 2);
788         evo_data(push, 0x00000000);
789         evo_data(push, 0x00000000);
790         if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) {
791                 evo_mthd(push, 0x0800, 5);
792                 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
793                 evo_data(push, 0);
794                 evo_data(push, (fb->height << 16) | fb->width);
795                 evo_data(push, nv_fb->r_pitch);
796                 evo_data(push, nv_fb->r_format);
797         } else {
798                 evo_mthd(push, 0x0400, 5);
799                 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
800                 evo_data(push, 0);
801                 evo_data(push, (fb->height << 16) | fb->width);
802                 evo_data(push, nv_fb->r_pitch);
803                 evo_data(push, nv_fb->r_format);
804         }
805         evo_mthd(push, 0x0080, 1);
806         evo_data(push, 0x00000000);
807         evo_kick(push, sync);
808
809         nouveau_bo_ref(nv_fb->nvbo, &head->image);
810         return 0;
811 }
812
813 /******************************************************************************
814  * Head
815  *****************************************************************************/
816 static void
817 nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
818 {
819         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
820         u32 *push;
821         if ((push = evo_wait(core, 2))) {
822                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
823                         evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
824                 else
825                         evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
826                 evo_data(push, (asyh->procamp.sat.sin << 20) |
827                                (asyh->procamp.sat.cos << 8));
828                 evo_kick(push, core);
829         }
830 }
831
832 static void
833 nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
834 {
835         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
836         u32 *push;
837         if ((push = evo_wait(core, 2))) {
838                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
839                         evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
840                 else
841                 if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
842                         evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
843                 else
844                         evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
845                 evo_data(push, (asyh->dither.mode << 3) |
846                                (asyh->dither.bits << 1) |
847                                 asyh->dither.enable);
848                 evo_kick(push, core);
849         }
850 }
851
852 static void
853 nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
854 {
855         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
856         u32 bounds = 0;
857         u32 *push;
858
859         if (asyh->base.cpp) {
860                 switch (asyh->base.cpp) {
861                 case 8: bounds |= 0x00000500; break;
862                 case 4: bounds |= 0x00000300; break;
863                 case 2: bounds |= 0x00000100; break;
864                 default:
865                         WARN_ON(1);
866                         break;
867                 }
868                 bounds |= 0x00000001;
869         }
870
871         if ((push = evo_wait(core, 2))) {
872                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
873                         evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
874                 else
875                         evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
876                 evo_data(push, bounds);
877                 evo_kick(push, core);
878         }
879 }
880
881 static void
882 nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
883 {
884         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
885         u32 bounds = 0;
886         u32 *push;
887
888         if (asyh->base.cpp) {
889                 switch (asyh->base.cpp) {
890                 case 8: bounds |= 0x00000500; break;
891                 case 4: bounds |= 0x00000300; break;
892                 case 2: bounds |= 0x00000100; break;
893                 case 1: bounds |= 0x00000000; break;
894                 default:
895                         WARN_ON(1);
896                         break;
897                 }
898                 bounds |= 0x00000001;
899         }
900
901         if ((push = evo_wait(core, 2))) {
902                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
903                         evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
904                 else
905                         evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
906                 evo_data(push, bounds);
907                 evo_kick(push, core);
908         }
909 }
910
911 static void
912 nv50_head_curs_clr(struct nv50_head *head)
913 {
914         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
915         u32 *push;
916         if ((push = evo_wait(core, 4))) {
917                 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
918                         evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
919                         evo_data(push, 0x05000000);
920                 } else
921                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
922                         evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
923                         evo_data(push, 0x05000000);
924                         evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
925                         evo_data(push, 0x00000000);
926                 } else {
927                         evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
928                         evo_data(push, 0x05000000);
929                         evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
930                         evo_data(push, 0x00000000);
931                 }
932                 evo_kick(push, core);
933         }
934 }
935
936 static void
937 nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
938 {
939         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
940         u32 *push;
941         if ((push = evo_wait(core, 5))) {
942                 if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
943                         evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
944                         evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
945                                                     (asyh->curs.format << 24));
946                         evo_data(push, asyh->curs.offset >> 8);
947                 } else
948                 if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
949                         evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
950                         evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
951                                                     (asyh->curs.format << 24));
952                         evo_data(push, asyh->curs.offset >> 8);
953                         evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
954                         evo_data(push, asyh->curs.handle);
955                 } else {
956                         evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
957                         evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
958                                                     (asyh->curs.format << 24));
959                         evo_data(push, asyh->curs.offset >> 8);
960                         evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
961                         evo_data(push, asyh->curs.handle);
962                 }
963                 evo_kick(push, core);
964         }
965 }
966
967 static void
968 nv50_head_core_clr(struct nv50_head *head)
969 {
970         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
971         u32 *push;
972         if ((push = evo_wait(core, 2))) {
973                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
974                         evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
975                 else
976                         evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
977                 evo_data(push, 0x00000000);
978                 evo_kick(push, core);
979         }
980 }
981
982 static void
983 nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
984 {
985         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
986         u32 *push;
987         if ((push = evo_wait(core, 9))) {
988                 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
989                         evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
990                         evo_data(push, asyh->core.offset >> 8);
991                         evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
992                         evo_data(push, (asyh->core.h << 16) | asyh->core.w);
993                         evo_data(push, asyh->core.layout << 20 |
994                                        (asyh->core.pitch >> 8) << 8 |
995                                        asyh->core.block);
996                         evo_data(push, asyh->core.kind << 16 |
997                                        asyh->core.format << 8);
998                         evo_data(push, asyh->core.handle);
999                         evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1000                         evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1001                 } else
1002                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1003                         evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1004                         evo_data(push, asyh->core.offset >> 8);
1005                         evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1006                         evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1007                         evo_data(push, asyh->core.layout << 20 |
1008                                        (asyh->core.pitch >> 8) << 8 |
1009                                        asyh->core.block);
1010                         evo_data(push, asyh->core.format << 8);
1011                         evo_data(push, asyh->core.handle);
1012                         evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1013                         evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1014                 } else {
1015                         evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
1016                         evo_data(push, asyh->core.offset >> 8);
1017                         evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
1018                         evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1019                         evo_data(push, asyh->core.layout << 24 |
1020                                        (asyh->core.pitch >> 8) << 8 |
1021                                        asyh->core.block);
1022                         evo_data(push, asyh->core.format << 8);
1023                         evo_data(push, asyh->core.handle);
1024                         evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
1025                         evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1026                 }
1027                 evo_kick(push, core);
1028         }
1029 }
1030
1031 static void
1032 nv50_head_lut_clr(struct nv50_head *head)
1033 {
1034         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1035         u32 *push;
1036         if ((push = evo_wait(core, 4))) {
1037                 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1038                         evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1039                         evo_data(push, 0x40000000);
1040                 } else
1041                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1042                         evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1043                         evo_data(push, 0x40000000);
1044                         evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1045                         evo_data(push, 0x00000000);
1046                 } else {
1047                         evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
1048                         evo_data(push, 0x03000000);
1049                         evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1050                         evo_data(push, 0x00000000);
1051                 }
1052                 evo_kick(push, core);
1053         }
1054 }
1055
1056 static void
1057 nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1058 {
1059         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1060         u32 *push;
1061         if ((push = evo_wait(core, 7))) {
1062                 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1063                         evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1064                         evo_data(push, 0xc0000000);
1065                         evo_data(push, asyh->lut.offset >> 8);
1066                 } else
1067                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1068                         evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1069                         evo_data(push, 0xc0000000);
1070                         evo_data(push, asyh->lut.offset >> 8);
1071                         evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1072                         evo_data(push, asyh->lut.handle);
1073                 } else {
1074                         evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
1075                         evo_data(push, 0x83000000);
1076                         evo_data(push, asyh->lut.offset >> 8);
1077                         evo_data(push, 0x00000000);
1078                         evo_data(push, 0x00000000);
1079                         evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1080                         evo_data(push, asyh->lut.handle);
1081                 }
1082                 evo_kick(push, core);
1083         }
1084 }
1085
1086 static void
1087 nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
1088 {
1089         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1090         struct nv50_head_mode *m = &asyh->mode;
1091         u32 *push;
1092         if ((push = evo_wait(core, 14))) {
1093                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1094                         evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
1095                         evo_data(push, 0x00800000 | m->clock);
1096                         evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
1097                         evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
1098                         evo_data(push, 0x00000000);
1099                         evo_data(push, (m->v.active  << 16) | m->h.active );
1100                         evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1101                         evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1102                         evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1103                         evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1104                         evo_data(push, asyh->mode.v.blankus);
1105                         evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
1106                         evo_data(push, 0x00000000);
1107                 } else {
1108                         evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
1109                         evo_data(push, 0x00000000);
1110                         evo_data(push, (m->v.active  << 16) | m->h.active );
1111                         evo_data(push, (m->v.synce   << 16) | m->h.synce  );
1112                         evo_data(push, (m->v.blanke  << 16) | m->h.blanke );
1113                         evo_data(push, (m->v.blanks  << 16) | m->h.blanks );
1114                         evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1115                         evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
1116                         evo_data(push, 0x00000000); /* ??? */
1117                         evo_data(push, 0xffffff00);
1118                         evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
1119                         evo_data(push, m->clock * 1000);
1120                         evo_data(push, 0x00200000); /* ??? */
1121                         evo_data(push, m->clock * 1000);
1122                 }
1123                 evo_kick(push, core);
1124         }
1125 }
1126
1127 static void
1128 nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
1129 {
1130         struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1131         u32 *push;
1132         if ((push = evo_wait(core, 10))) {
1133                 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1134                         evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
1135                         evo_data(push, 0x00000000);
1136                         evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
1137                         evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1138                         evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
1139                         evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1140                         evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1141                 } else {
1142                         evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
1143                         evo_data(push, 0x00000000);
1144                         evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
1145                         evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1146                         evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
1147                         evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1148                         evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1149                         evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1150                 }
1151                 evo_kick(push, core);
1152         }
1153 }
1154
1155 static void
1156 nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
1157 {
1158         if (asyh->clr.core && (!asyh->set.core || y))
1159                 nv50_head_lut_clr(head);
1160         if (asyh->clr.core && (!asyh->set.core || y))
1161                 nv50_head_core_clr(head);
1162         if (asyh->clr.curs && (!asyh->set.curs || y))
1163                 nv50_head_curs_clr(head);
1164 }
1165
1166 static void
1167 nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1168 {
1169         if (asyh->set.view   ) nv50_head_view    (head, asyh);
1170         if (asyh->set.mode   ) nv50_head_mode    (head, asyh);
1171         if (asyh->set.core   ) nv50_head_lut_set (head, asyh);
1172         if (asyh->set.core   ) nv50_head_core_set(head, asyh);
1173         if (asyh->set.curs   ) nv50_head_curs_set(head, asyh);
1174         if (asyh->set.base   ) nv50_head_base    (head, asyh);
1175         if (asyh->set.ovly   ) nv50_head_ovly    (head, asyh);
1176         if (asyh->set.dither ) nv50_head_dither  (head, asyh);
1177         if (asyh->set.procamp) nv50_head_procamp (head, asyh);
1178 }
1179
1180 static void
1181 nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
1182                                struct nv50_head_atom *asyh,
1183                                struct nouveau_conn_atom *asyc)
1184 {
1185         const int vib = asyc->procamp.color_vibrance - 100;
1186         const int hue = asyc->procamp.vibrant_hue - 90;
1187         const int adj = (vib > 0) ? 50 : 0;
1188         asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
1189         asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
1190         asyh->set.procamp = true;
1191 }
1192
1193 static void
1194 nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
1195                               struct nv50_head_atom *asyh,
1196                               struct nouveau_conn_atom *asyc)
1197 {
1198         struct drm_connector *connector = asyc->state.connector;
1199         u32 mode = 0x00;
1200
1201         if (asyc->dither.mode == DITHERING_MODE_AUTO) {
1202                 if (asyh->base.depth > connector->display_info.bpc * 3)
1203                         mode = DITHERING_MODE_DYNAMIC2X2;
1204         } else {
1205                 mode = asyc->dither.mode;
1206         }
1207
1208         if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
1209                 if (connector->display_info.bpc >= 8)
1210                         mode |= DITHERING_DEPTH_8BPC;
1211         } else {
1212                 mode |= asyc->dither.depth;
1213         }
1214
1215         asyh->dither.enable = mode;
1216         asyh->dither.bits = mode >> 1;
1217         asyh->dither.mode = mode >> 3;
1218         asyh->set.dither = true;
1219 }
1220
1221 static void
1222 nv50_head_atomic_check_view(struct nv50_head_atom *armh,
1223                             struct nv50_head_atom *asyh,
1224                             struct nouveau_conn_atom *asyc)
1225 {
1226         struct drm_connector *connector = asyc->state.connector;
1227         struct drm_display_mode *omode = &asyh->state.adjusted_mode;
1228         struct drm_display_mode *umode = &asyh->state.mode;
1229         int mode = asyc->scaler.mode;
1230         struct edid *edid;
1231
1232         if (connector->edid_blob_ptr)
1233                 edid = (struct edid *)connector->edid_blob_ptr->data;
1234         else
1235                 edid = NULL;
1236
1237         if (!asyc->scaler.full) {
1238                 if (mode == DRM_MODE_SCALE_NONE)
1239                         omode = umode;
1240         } else {
1241                 /* Non-EDID LVDS/eDP mode. */
1242                 mode = DRM_MODE_SCALE_FULLSCREEN;
1243         }
1244
1245         asyh->view.iW = umode->hdisplay;
1246         asyh->view.iH = umode->vdisplay;
1247         asyh->view.oW = omode->hdisplay;
1248         asyh->view.oH = omode->vdisplay;
1249         if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
1250                 asyh->view.oH *= 2;
1251
1252         /* Add overscan compensation if necessary, will keep the aspect
1253          * ratio the same as the backend mode unless overridden by the
1254          * user setting both hborder and vborder properties.
1255          */
1256         if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
1257             (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
1258              drm_detect_hdmi_monitor(edid)))) {
1259                 u32 bX = asyc->scaler.underscan.hborder;
1260                 u32 bY = asyc->scaler.underscan.vborder;
1261                 u32 r = (asyh->view.oH << 19) / asyh->view.oW;
1262
1263                 if (bX) {
1264                         asyh->view.oW -= (bX * 2);
1265                         if (bY) asyh->view.oH -= (bY * 2);
1266                         else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
1267                 } else {
1268                         asyh->view.oW -= (asyh->view.oW >> 4) + 32;
1269                         if (bY) asyh->view.oH -= (bY * 2);
1270                         else    asyh->view.oH  = ((asyh->view.oW * r) + (r / 2)) >> 19;
1271                 }
1272         }
1273
1274         /* Handle CENTER/ASPECT scaling, taking into account the areas
1275          * removed already for overscan compensation.
1276          */
1277         switch (mode) {
1278         case DRM_MODE_SCALE_CENTER:
1279                 asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
1280                 asyh->view.oH = min((u16)umode->vdisplay, asyh->view.oH);
1281                 /* fall-through */
1282         case DRM_MODE_SCALE_ASPECT:
1283                 if (asyh->view.oH < asyh->view.oW) {
1284                         u32 r = (asyh->view.iW << 19) / asyh->view.iH;
1285                         asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
1286                 } else {
1287                         u32 r = (asyh->view.iH << 19) / asyh->view.iW;
1288                         asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
1289                 }
1290                 break;
1291         default:
1292                 break;
1293         }
1294
1295         asyh->set.view = true;
1296 }
1297
1298 static void
1299 nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
1300 {
1301         struct drm_display_mode *mode = &asyh->state.adjusted_mode;
1302         u32 ilace   = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1303         u32 vscan   = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1304         u32 hbackp  =  mode->htotal - mode->hsync_end;
1305         u32 vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1306         u32 hfrontp =  mode->hsync_start - mode->hdisplay;
1307         u32 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1308         struct nv50_head_mode *m = &asyh->mode;
1309
1310         m->h.active = mode->htotal;
1311         m->h.synce  = mode->hsync_end - mode->hsync_start - 1;
1312         m->h.blanke = m->h.synce + hbackp;
1313         m->h.blanks = mode->htotal - hfrontp - 1;
1314
1315         m->v.active = mode->vtotal * vscan / ilace;
1316         m->v.synce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1317         m->v.blanke = m->v.synce + vbackp;
1318         m->v.blanks = m->v.active - vfrontp - 1;
1319
1320         /*XXX: Safe underestimate, even "0" works */
1321         m->v.blankus = (m->v.active - mode->vdisplay - 2) * m->h.active;
1322         m->v.blankus *= 1000;
1323         m->v.blankus /= mode->clock;
1324
1325         if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1326                 m->v.blank2e =  m->v.active + m->v.synce + vbackp;
1327                 m->v.blank2s =  m->v.blank2e + (mode->vdisplay * vscan / ilace);
1328                 m->v.active  = (m->v.active * 2) + 1;
1329                 m->interlace = true;
1330         } else {
1331                 m->v.blank2e = 0;
1332                 m->v.blank2s = 1;
1333                 m->interlace = false;
1334         }
1335         m->clock = mode->clock;
1336
1337         drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V);
1338         asyh->set.mode = true;
1339 }
1340
1341 static int
1342 nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
1343 {
1344         struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1345         struct nv50_disp *disp = nv50_disp(crtc->dev);
1346         struct nv50_head *head = nv50_head(crtc);
1347         struct nv50_head_atom *armh = &head->arm;
1348         struct nv50_head_atom *asyh = nv50_head_atom(state);
1349
1350         NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
1351         asyh->clr.mask = 0;
1352         asyh->set.mask = 0;
1353
1354         if (asyh->state.active) {
1355                 if (asyh->state.mode_changed)
1356                         nv50_head_atomic_check_mode(head, asyh);
1357
1358                 if ((asyh->core.visible = (asyh->base.cpp != 0))) {
1359                         asyh->core.x = asyh->base.x;
1360                         asyh->core.y = asyh->base.y;
1361                         asyh->core.w = asyh->base.w;
1362                         asyh->core.h = asyh->base.h;
1363                 } else
1364                 if ((asyh->core.visible = asyh->curs.visible)) {
1365                         /*XXX: We need to either find some way of having the
1366                          *     primary base layer appear black, while still
1367                          *     being able to display the other layers, or we
1368                          *     need to allocate a dummy black surface here.
1369                          */
1370                         asyh->core.x = 0;
1371                         asyh->core.y = 0;
1372                         asyh->core.w = asyh->state.mode.hdisplay;
1373                         asyh->core.h = asyh->state.mode.vdisplay;
1374                 }
1375                 asyh->core.handle = disp->mast.base.vram.handle;
1376                 asyh->core.offset = 0;
1377                 asyh->core.format = 0xcf;
1378                 asyh->core.kind = 0;
1379                 asyh->core.layout = 1;
1380                 asyh->core.block = 0;
1381                 asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
1382                 asyh->lut.handle = disp->mast.base.vram.handle;
1383                 asyh->lut.offset = head->base.lut.nvbo->bo.offset;
1384                 asyh->set.base = armh->base.cpp != asyh->base.cpp;
1385                 asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
1386         } else {
1387                 asyh->core.visible = false;
1388                 asyh->curs.visible = false;
1389                 asyh->base.cpp = 0;
1390                 asyh->ovly.cpp = 0;
1391         }
1392
1393         if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
1394                 if (asyh->core.visible) {
1395                         if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
1396                                 asyh->set.core = true;
1397                 } else
1398                 if (armh->core.visible) {
1399                         asyh->clr.core = true;
1400                 }
1401
1402                 if (asyh->curs.visible) {
1403                         if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
1404                                 asyh->set.curs = true;
1405                 } else
1406                 if (armh->curs.visible) {
1407                         asyh->clr.curs = true;
1408                 }
1409         } else {
1410                 asyh->clr.core = armh->core.visible;
1411                 asyh->clr.curs = armh->curs.visible;
1412                 asyh->set.core = asyh->core.visible;
1413                 asyh->set.curs = asyh->curs.visible;
1414         }
1415
1416         memcpy(armh, asyh, sizeof(*asyh));
1417         asyh->state.mode_changed = 0;
1418         return 0;
1419 }
1420
1421 /******************************************************************************
1422  * CRTC
1423  *****************************************************************************/
1424 static int
1425 nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
1426 {
1427         struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1428         struct nv50_head *head = nv50_head(&nv_crtc->base);
1429         struct nv50_head_atom *asyh = &head->asy;
1430         struct nouveau_connector *nv_connector;
1431         struct nouveau_conn_atom asyc;
1432         u32 *push;
1433
1434         nv_connector = nouveau_crtc_connector_get(nv_crtc);
1435
1436         asyc.state.connector = &nv_connector->base;
1437         asyc.dither.mode = nv_connector->dithering_mode;
1438         asyc.dither.depth = nv_connector->dithering_depth;
1439         asyh->state.crtc = &nv_crtc->base;
1440         nv50_head_atomic_check(&head->base.base, &asyh->state);
1441         nv50_head_atomic_check_dither(&head->arm, asyh, &asyc);
1442         nv50_head_flush_set(head, asyh);
1443
1444         if (update) {
1445                 if ((push = evo_wait(mast, 2))) {
1446                         evo_mthd(push, 0x0080, 1);
1447                         evo_data(push, 0x00000000);
1448                         evo_kick(push, mast);
1449                 }
1450         }
1451
1452         return 0;
1453 }
1454
1455 static int
1456 nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
1457 {
1458         struct nv50_head *head = nv50_head(&nv_crtc->base);
1459         struct nv50_head_atom *asyh = &head->asy;
1460         struct drm_crtc *crtc = &nv_crtc->base;
1461         struct nouveau_connector *nv_connector;
1462         struct nouveau_conn_atom asyc;
1463
1464         nv_connector = nouveau_crtc_connector_get(nv_crtc);
1465
1466         asyc.state.connector = &nv_connector->base;
1467         asyc.scaler.mode = nv_connector->scaling_mode;
1468         asyc.scaler.full = nv_connector->scaling_full;
1469         asyc.scaler.underscan.mode = nv_connector->underscan;
1470         asyc.scaler.underscan.hborder = nv_connector->underscan_hborder;
1471         asyc.scaler.underscan.vborder = nv_connector->underscan_vborder;
1472         nv50_head_atomic_check(&head->base.base, &asyh->state);
1473         nv50_head_atomic_check_view(&head->arm, asyh, &asyc);
1474         nv50_head_flush_set(head, asyh);
1475
1476         if (update) {
1477                 nv50_display_flip_stop(crtc);
1478                 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1479         }
1480
1481         return 0;
1482 }
1483
1484 static int
1485 nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
1486 {
1487         struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1488         struct nv50_head *head = nv50_head(&nv_crtc->base);
1489         struct nv50_head_atom *asyh = &head->asy;
1490         struct nouveau_conn_atom asyc;
1491         u32 *push;
1492
1493         asyc.procamp.color_vibrance = nv_crtc->color_vibrance + 100;
1494         asyc.procamp.vibrant_hue = nv_crtc->vibrant_hue + 90;
1495         nv50_head_atomic_check(&head->base.base, &asyh->state);
1496         nv50_head_atomic_check_procamp(&head->arm, asyh, &asyc);
1497         nv50_head_flush_set(head, asyh);
1498
1499         if (update) {
1500                 if ((push = evo_wait(mast, 2))) {
1501                         evo_mthd(push, 0x0080, 1);
1502                         evo_data(push, 0x00000000);
1503                         evo_kick(push, mast);
1504                 }
1505         }
1506
1507         return 0;
1508 }
1509
1510 static int
1511 nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
1512                     int x, int y, bool update)
1513 {
1514         struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
1515         struct nv50_head *head = nv50_head(&nv_crtc->base);
1516         struct nv50_head_atom *asyh = &head->asy;
1517         const struct drm_format_info *info;
1518
1519         info = drm_format_info(nvfb->base.pixel_format);
1520         if (!info || !info->depth)
1521                 return -EINVAL;
1522
1523         asyh->base.depth = info->depth;
1524         asyh->base.cpp = info->cpp[0];
1525         asyh->base.x = x;
1526         asyh->base.y = y;
1527         asyh->base.w = nvfb->base.width;
1528         asyh->base.h = nvfb->base.height;
1529         nv50_head_atomic_check(&head->base.base, &asyh->state);
1530         nv50_head_flush_set(head, asyh);
1531
1532         if (update) {
1533                 struct nv50_mast *core = nv50_mast(nv_crtc->base.dev);
1534                 u32 *push = evo_wait(core, 2);
1535                 if (push) {
1536                         evo_mthd(push, 0x0080, 1);
1537                         evo_data(push, 0x00000000);
1538                         evo_kick(push, core);
1539                 }
1540         }
1541
1542         nv_crtc->fb.handle = nvfb->r_handle;
1543         return 0;
1544 }
1545
1546 static void
1547 nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
1548 {
1549         struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1550         struct nv50_head *head = nv50_head(&nv_crtc->base);
1551         struct nv50_head_atom *asyh = &head->asy;
1552
1553         asyh->curs.visible = true;
1554         asyh->curs.handle = mast->base.vram.handle;
1555         asyh->curs.offset = nv_crtc->cursor.nvbo->bo.offset;
1556         asyh->curs.layout = 1;
1557         asyh->curs.format = 1;
1558         nv50_head_atomic_check(&head->base.base, &asyh->state);
1559         nv50_head_flush_set(head, asyh);
1560 }
1561
1562 static void
1563 nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
1564 {
1565         struct nv50_head *head = nv50_head(&nv_crtc->base);
1566         struct nv50_head_atom *asyh = &head->asy;
1567
1568         asyh->curs.visible = false;
1569         nv50_head_atomic_check(&head->base.base, &asyh->state);
1570         nv50_head_flush_clr(head, asyh, false);
1571 }
1572
1573 static void
1574 nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
1575 {
1576         struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1577
1578         if (show && nv_crtc->cursor.nvbo && nv_crtc->base.enabled)
1579                 nv50_crtc_cursor_show(nv_crtc);
1580         else
1581                 nv50_crtc_cursor_hide(nv_crtc);
1582
1583         if (update) {
1584                 u32 *push = evo_wait(mast, 2);
1585                 if (push) {
1586                         evo_mthd(push, 0x0080, 1);
1587                         evo_data(push, 0x00000000);
1588                         evo_kick(push, mast);
1589                 }
1590         }
1591 }
1592
1593 static void
1594 nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
1595 {
1596 }
1597
1598 static void
1599 nv50_crtc_prepare(struct drm_crtc *crtc)
1600 {
1601         struct nv50_head *head = nv50_head(crtc);
1602         struct nv50_head_atom *asyh = &head->asy;
1603
1604         nv50_display_flip_stop(crtc);
1605
1606         asyh->state.active = false;
1607         nv50_head_atomic_check(&head->base.base, &asyh->state);
1608         nv50_head_flush_clr(head, asyh, false);
1609 }
1610
1611 static void
1612 nv50_crtc_commit(struct drm_crtc *crtc)
1613 {
1614         struct nv50_head *head = nv50_head(crtc);
1615         struct nv50_head_atom *asyh = &head->asy;
1616
1617         asyh->state.active = true;
1618         nv50_head_atomic_check(&head->base.base, &asyh->state);
1619         nv50_head_flush_set(head, asyh);
1620
1621         nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1622 }
1623
1624 static bool
1625 nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1626                      struct drm_display_mode *adjusted_mode)
1627 {
1628         drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1629         return true;
1630 }
1631
1632 static int
1633 nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1634 {
1635         struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1636         struct nv50_head *head = nv50_head(crtc);
1637         int ret;
1638
1639         ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true);
1640         if (ret == 0) {
1641                 if (head->image)
1642                         nouveau_bo_unpin(head->image);
1643                 nouveau_bo_ref(nvfb->nvbo, &head->image);
1644         }
1645
1646         return ret;
1647 }
1648
1649 static int
1650 nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1651                    struct drm_display_mode *mode, int x, int y,
1652                    struct drm_framebuffer *old_fb)
1653 {
1654         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1655         struct nouveau_connector *nv_connector;
1656         int ret;
1657         struct nv50_head *head = nv50_head(crtc);
1658         struct nv50_head_atom *asyh = &head->asy;
1659
1660         memcpy(&asyh->state.mode, umode, sizeof(*umode));
1661         memcpy(&asyh->state.adjusted_mode, mode, sizeof(*mode));
1662         asyh->state.active = true;
1663         asyh->state.mode_changed = true;
1664         nv50_head_atomic_check(&head->base.base, &asyh->state);
1665
1666         ret = nv50_crtc_swap_fbs(crtc, old_fb);
1667         if (ret)
1668                 return ret;
1669
1670         nv50_head_flush_set(head, asyh);
1671
1672         nv_connector = nouveau_crtc_connector_get(nv_crtc);
1673         nv50_crtc_set_dither(nv_crtc, false);
1674         nv50_crtc_set_scale(nv_crtc, false);
1675
1676         nv50_crtc_set_color_vibrance(nv_crtc, false);
1677         nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1678         return 0;
1679 }
1680
1681 static int
1682 nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1683                         struct drm_framebuffer *old_fb)
1684 {
1685         struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1686         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1687         int ret;
1688
1689         if (!crtc->primary->fb) {
1690                 NV_DEBUG(drm, "No FB bound\n");
1691                 return 0;
1692         }
1693
1694         ret = nv50_crtc_swap_fbs(crtc, old_fb);
1695         if (ret)
1696                 return ret;
1697
1698         nv50_display_flip_stop(crtc);
1699         nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1700         nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1701         return 0;
1702 }
1703
1704 static int
1705 nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1706                                struct drm_framebuffer *fb, int x, int y,
1707                                enum mode_set_atomic state)
1708 {
1709         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1710         nv50_display_flip_stop(crtc);
1711         nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1712         return 0;
1713 }
1714
1715 static void
1716 nv50_crtc_lut_load(struct drm_crtc *crtc)
1717 {
1718         struct nv50_disp *disp = nv50_disp(crtc->dev);
1719         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1720         void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1721         int i;
1722
1723         for (i = 0; i < 256; i++) {
1724                 u16 r = nv_crtc->lut.r[i] >> 2;
1725                 u16 g = nv_crtc->lut.g[i] >> 2;
1726                 u16 b = nv_crtc->lut.b[i] >> 2;
1727
1728                 if (disp->disp->oclass < GF110_DISP) {
1729                         writew(r + 0x0000, lut + (i * 0x08) + 0);
1730                         writew(g + 0x0000, lut + (i * 0x08) + 2);
1731                         writew(b + 0x0000, lut + (i * 0x08) + 4);
1732                 } else {
1733                         writew(r + 0x6000, lut + (i * 0x20) + 0);
1734                         writew(g + 0x6000, lut + (i * 0x20) + 2);
1735                         writew(b + 0x6000, lut + (i * 0x20) + 4);
1736                 }
1737         }
1738 }
1739
1740 static void
1741 nv50_crtc_disable(struct drm_crtc *crtc)
1742 {
1743         struct nv50_head *head = nv50_head(crtc);
1744         evo_sync(crtc->dev);
1745         if (head->image)
1746                 nouveau_bo_unpin(head->image);
1747         nouveau_bo_ref(NULL, &head->image);
1748 }
1749
1750 static int
1751 nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1752                      uint32_t handle, uint32_t width, uint32_t height)
1753 {
1754         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1755         struct drm_gem_object *gem = NULL;
1756         struct nouveau_bo *nvbo = NULL;
1757         int ret = 0;
1758
1759         if (handle) {
1760                 if (width != 64 || height != 64)
1761                         return -EINVAL;
1762
1763                 gem = drm_gem_object_lookup(file_priv, handle);
1764                 if (unlikely(!gem))
1765                         return -ENOENT;
1766                 nvbo = nouveau_gem_object(gem);
1767
1768                 ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true);
1769         }
1770
1771         if (ret == 0) {
1772                 if (nv_crtc->cursor.nvbo)
1773                         nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1774                 nouveau_bo_ref(nvbo, &nv_crtc->cursor.nvbo);
1775         }
1776         drm_gem_object_unreference_unlocked(gem);
1777
1778         nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1779         return ret;
1780 }
1781
1782 static int
1783 nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1784 {
1785         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1786         struct nv50_curs *curs = nv50_curs(crtc);
1787         struct nv50_chan *chan = nv50_chan(curs);
1788         nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1789         nvif_wr32(&chan->user, 0x0080, 0x00000000);
1790
1791         nv_crtc->cursor_saved_x = x;
1792         nv_crtc->cursor_saved_y = y;
1793         return 0;
1794 }
1795
1796 static int
1797 nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1798                     uint32_t size)
1799 {
1800         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1801         u32 i;
1802
1803         for (i = 0; i < size; i++) {
1804                 nv_crtc->lut.r[i] = r[i];
1805                 nv_crtc->lut.g[i] = g[i];
1806                 nv_crtc->lut.b[i] = b[i];
1807         }
1808
1809         nv50_crtc_lut_load(crtc);
1810
1811         return 0;
1812 }
1813
1814 static void
1815 nv50_crtc_cursor_restore(struct nouveau_crtc *nv_crtc, int x, int y)
1816 {
1817         nv50_crtc_cursor_move(&nv_crtc->base, x, y);
1818
1819         nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1820 }
1821
1822 static void
1823 nv50_crtc_destroy(struct drm_crtc *crtc)
1824 {
1825         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1826         struct nv50_disp *disp = nv50_disp(crtc->dev);
1827         struct nv50_head *head = nv50_head(crtc);
1828         struct nv50_fbdma *fbdma;
1829
1830         list_for_each_entry(fbdma, &disp->fbdma, head) {
1831                 nvif_object_fini(&fbdma->base[nv_crtc->index]);
1832         }
1833
1834         nv50_dmac_destroy(&head->ovly.base, disp->disp);
1835         nv50_pioc_destroy(&head->oimm.base);
1836         nv50_dmac_destroy(&head->sync.base, disp->disp);
1837         nv50_pioc_destroy(&head->curs.base);
1838
1839         /*XXX: this shouldn't be necessary, but the core doesn't call
1840          *     disconnect() during the cleanup paths
1841          */
1842         if (head->image)
1843                 nouveau_bo_unpin(head->image);
1844         nouveau_bo_ref(NULL, &head->image);
1845
1846         /*XXX: ditto */
1847         if (nv_crtc->cursor.nvbo)
1848                 nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1849         nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1850
1851         nouveau_bo_unmap(nv_crtc->lut.nvbo);
1852         if (nv_crtc->lut.nvbo)
1853                 nouveau_bo_unpin(nv_crtc->lut.nvbo);
1854         nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1855
1856         drm_crtc_cleanup(crtc);
1857         kfree(crtc);
1858 }
1859
1860 static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1861         .dpms = nv50_crtc_dpms,
1862         .prepare = nv50_crtc_prepare,
1863         .commit = nv50_crtc_commit,
1864         .mode_fixup = nv50_crtc_mode_fixup,
1865         .mode_set = nv50_crtc_mode_set,
1866         .mode_set_base = nv50_crtc_mode_set_base,
1867         .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1868         .load_lut = nv50_crtc_lut_load,
1869         .disable = nv50_crtc_disable,
1870 };
1871
1872 static const struct drm_crtc_funcs nv50_crtc_func = {
1873         .cursor_set = nv50_crtc_cursor_set,
1874         .cursor_move = nv50_crtc_cursor_move,
1875         .gamma_set = nv50_crtc_gamma_set,
1876         .set_config = nouveau_crtc_set_config,
1877         .destroy = nv50_crtc_destroy,
1878         .page_flip = nouveau_crtc_page_flip,
1879 };
1880
1881 static int
1882 nv50_crtc_create(struct drm_device *dev, int index)
1883 {
1884         struct nouveau_drm *drm = nouveau_drm(dev);
1885         struct nvif_device *device = &drm->device;
1886         struct nv50_disp *disp = nv50_disp(dev);
1887         struct nv50_head *head;
1888         struct drm_crtc *crtc;
1889         int ret, i;
1890
1891         head = kzalloc(sizeof(*head), GFP_KERNEL);
1892         if (!head)
1893                 return -ENOMEM;
1894
1895         head->base.index = index;
1896         head->base.color_vibrance = 50;
1897         head->base.vibrant_hue = 0;
1898         head->base.cursor.set_pos = nv50_crtc_cursor_restore;
1899         for (i = 0; i < 256; i++) {
1900                 head->base.lut.r[i] = i << 8;
1901                 head->base.lut.g[i] = i << 8;
1902                 head->base.lut.b[i] = i << 8;
1903         }
1904
1905         crtc = &head->base.base;
1906         drm_crtc_init(dev, crtc, &nv50_crtc_func);
1907         drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1908         drm_mode_crtc_set_gamma_size(crtc, 256);
1909
1910         ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1911                              0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
1912         if (!ret) {
1913                 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
1914                 if (!ret) {
1915                         ret = nouveau_bo_map(head->base.lut.nvbo);
1916                         if (ret)
1917                                 nouveau_bo_unpin(head->base.lut.nvbo);
1918                 }
1919                 if (ret)
1920                         nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1921         }
1922
1923         if (ret)
1924                 goto out;
1925
1926         /* allocate cursor resources */
1927         ret = nv50_curs_create(device, disp->disp, index, &head->curs);
1928         if (ret)
1929                 goto out;
1930
1931         /* allocate page flip / sync resources */
1932         ret = nv50_base_create(device, disp->disp, index, disp->sync->bo.offset,
1933                                &head->sync);
1934         if (ret)
1935                 goto out;
1936
1937         head->sync.addr = EVO_FLIP_SEM0(index);
1938         head->sync.data = 0x00000000;
1939
1940         /* allocate overlay resources */
1941         ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
1942         if (ret)
1943                 goto out;
1944
1945         ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
1946                                &head->ovly);
1947         if (ret)
1948                 goto out;
1949
1950 out:
1951         if (ret)
1952                 nv50_crtc_destroy(crtc);
1953         return ret;
1954 }
1955
1956 /******************************************************************************
1957  * Encoder helpers
1958  *****************************************************************************/
1959 static bool
1960 nv50_encoder_mode_fixup(struct drm_encoder *encoder,
1961                         const struct drm_display_mode *mode,
1962                         struct drm_display_mode *adjusted_mode)
1963 {
1964         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1965         struct nouveau_connector *nv_connector;
1966
1967         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1968         if (nv_connector && nv_connector->native_mode) {
1969                 nv_connector->scaling_full = false;
1970                 if (nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) {
1971                         switch (nv_connector->type) {
1972                         case DCB_CONNECTOR_LVDS:
1973                         case DCB_CONNECTOR_LVDS_SPWG:
1974                         case DCB_CONNECTOR_eDP:
1975                                 /* force use of scaler for non-edid modes */
1976                                 if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
1977                                         return true;
1978                                 nv_connector->scaling_full = true;
1979                                 break;
1980                         default:
1981                                 return true;
1982                         }
1983                 }
1984
1985                 drm_mode_copy(adjusted_mode, nv_connector->native_mode);
1986         }
1987
1988         return true;
1989 }
1990
1991 /******************************************************************************
1992  * DAC
1993  *****************************************************************************/
1994 static void
1995 nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1996 {
1997         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1998         struct nv50_disp *disp = nv50_disp(encoder->dev);
1999         struct {
2000                 struct nv50_disp_mthd_v1 base;
2001                 struct nv50_disp_dac_pwr_v0 pwr;
2002         } args = {
2003                 .base.version = 1,
2004                 .base.method = NV50_DISP_MTHD_V1_DAC_PWR,
2005                 .base.hasht  = nv_encoder->dcb->hasht,
2006                 .base.hashm  = nv_encoder->dcb->hashm,
2007                 .pwr.state = 1,
2008                 .pwr.data  = 1,
2009                 .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
2010                               mode != DRM_MODE_DPMS_OFF),
2011                 .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
2012                               mode != DRM_MODE_DPMS_OFF),
2013         };
2014
2015         nvif_mthd(disp->disp, 0, &args, sizeof(args));
2016 }
2017
2018 static void
2019 nv50_dac_commit(struct drm_encoder *encoder)
2020 {
2021 }
2022
2023 static void
2024 nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2025                   struct drm_display_mode *adjusted_mode)
2026 {
2027         struct nv50_mast *mast = nv50_mast(encoder->dev);
2028         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2029         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2030         u32 *push;
2031
2032         nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
2033
2034         push = evo_wait(mast, 8);
2035         if (push) {
2036                 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2037                         u32 syncs = 0x00000000;
2038
2039                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2040                                 syncs |= 0x00000001;
2041                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2042                                 syncs |= 0x00000002;
2043
2044                         evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
2045                         evo_data(push, 1 << nv_crtc->index);
2046                         evo_data(push, syncs);
2047                 } else {
2048                         u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2049                         u32 syncs = 0x00000001;
2050
2051                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2052                                 syncs |= 0x00000008;
2053                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2054                                 syncs |= 0x00000010;
2055
2056                         if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2057                                 magic |= 0x00000001;
2058
2059                         evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2060                         evo_data(push, syncs);
2061                         evo_data(push, magic);
2062                         evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
2063                         evo_data(push, 1 << nv_crtc->index);
2064                 }
2065
2066                 evo_kick(push, mast);
2067         }
2068
2069         nv_encoder->crtc = encoder->crtc;
2070 }
2071
2072 static void
2073 nv50_dac_disconnect(struct drm_encoder *encoder)
2074 {
2075         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2076         struct nv50_mast *mast = nv50_mast(encoder->dev);
2077         const int or = nv_encoder->or;
2078         u32 *push;
2079
2080         if (nv_encoder->crtc) {
2081                 nv50_crtc_prepare(nv_encoder->crtc);
2082
2083                 push = evo_wait(mast, 4);
2084                 if (push) {
2085                         if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2086                                 evo_mthd(push, 0x0400 + (or * 0x080), 1);
2087                                 evo_data(push, 0x00000000);
2088                         } else {
2089                                 evo_mthd(push, 0x0180 + (or * 0x020), 1);
2090                                 evo_data(push, 0x00000000);
2091                         }
2092                         evo_kick(push, mast);
2093                 }
2094         }
2095
2096         nv_encoder->crtc = NULL;
2097 }
2098
2099 static enum drm_connector_status
2100 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
2101 {
2102         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2103         struct nv50_disp *disp = nv50_disp(encoder->dev);
2104         struct {
2105                 struct nv50_disp_mthd_v1 base;
2106                 struct nv50_disp_dac_load_v0 load;
2107         } args = {
2108                 .base.version = 1,
2109                 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
2110                 .base.hasht  = nv_encoder->dcb->hasht,
2111                 .base.hashm  = nv_encoder->dcb->hashm,
2112         };
2113         int ret;
2114
2115         args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
2116         if (args.load.data == 0)
2117                 args.load.data = 340;
2118
2119         ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2120         if (ret || !args.load.load)
2121                 return connector_status_disconnected;
2122
2123         return connector_status_connected;
2124 }
2125
2126 static void
2127 nv50_dac_destroy(struct drm_encoder *encoder)
2128 {
2129         drm_encoder_cleanup(encoder);
2130         kfree(encoder);
2131 }
2132
2133 static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
2134         .dpms = nv50_dac_dpms,
2135         .mode_fixup = nv50_encoder_mode_fixup,
2136         .prepare = nv50_dac_disconnect,
2137         .commit = nv50_dac_commit,
2138         .mode_set = nv50_dac_mode_set,
2139         .disable = nv50_dac_disconnect,
2140         .get_crtc = nv50_display_crtc_get,
2141         .detect = nv50_dac_detect
2142 };
2143
2144 static const struct drm_encoder_funcs nv50_dac_func = {
2145         .destroy = nv50_dac_destroy,
2146 };
2147
2148 static int
2149 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
2150 {
2151         struct nouveau_drm *drm = nouveau_drm(connector->dev);
2152         struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2153         struct nvkm_i2c_bus *bus;
2154         struct nouveau_encoder *nv_encoder;
2155         struct drm_encoder *encoder;
2156         int type = DRM_MODE_ENCODER_DAC;
2157
2158         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2159         if (!nv_encoder)
2160                 return -ENOMEM;
2161         nv_encoder->dcb = dcbe;
2162         nv_encoder->or = ffs(dcbe->or) - 1;
2163
2164         bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2165         if (bus)
2166                 nv_encoder->i2c = &bus->i2c;
2167
2168         encoder = to_drm_encoder(nv_encoder);
2169         encoder->possible_crtcs = dcbe->heads;
2170         encoder->possible_clones = 0;
2171         drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
2172                          "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
2173         drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
2174
2175         drm_mode_connector_attach_encoder(connector, encoder);
2176         return 0;
2177 }
2178
2179 /******************************************************************************
2180  * Audio
2181  *****************************************************************************/
2182 static void
2183 nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
2184 {
2185         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2186         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2187         struct nouveau_connector *nv_connector;
2188         struct nv50_disp *disp = nv50_disp(encoder->dev);
2189         struct __packed {
2190                 struct {
2191                         struct nv50_disp_mthd_v1 mthd;
2192                         struct nv50_disp_sor_hda_eld_v0 eld;
2193                 } base;
2194                 u8 data[sizeof(nv_connector->base.eld)];
2195         } args = {
2196                 .base.mthd.version = 1,
2197                 .base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2198                 .base.mthd.hasht   = nv_encoder->dcb->hasht,
2199                 .base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2200                                      (0x0100 << nv_crtc->index),
2201         };
2202
2203         nv_connector = nouveau_encoder_connector_get(nv_encoder);
2204         if (!drm_detect_monitor_audio(nv_connector->edid))
2205                 return;
2206
2207         drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
2208         memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
2209
2210         nvif_mthd(disp->disp, 0, &args,
2211                   sizeof(args.base) + drm_eld_size(args.data));
2212 }
2213
2214 static void
2215 nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2216 {
2217         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2218         struct nv50_disp *disp = nv50_disp(encoder->dev);
2219         struct {
2220                 struct nv50_disp_mthd_v1 base;
2221                 struct nv50_disp_sor_hda_eld_v0 eld;
2222         } args = {
2223                 .base.version = 1,
2224                 .base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2225                 .base.hasht   = nv_encoder->dcb->hasht,
2226                 .base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
2227                                 (0x0100 << nv_crtc->index),
2228         };
2229
2230         nvif_mthd(disp->disp, 0, &args, sizeof(args));
2231 }
2232
2233 /******************************************************************************
2234  * HDMI
2235  *****************************************************************************/
2236 static void
2237 nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
2238 {
2239         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2240         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2241         struct nv50_disp *disp = nv50_disp(encoder->dev);
2242         struct {
2243                 struct nv50_disp_mthd_v1 base;
2244                 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2245         } args = {
2246                 .base.version = 1,
2247                 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2248                 .base.hasht  = nv_encoder->dcb->hasht,
2249                 .base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2250                                (0x0100 << nv_crtc->index),
2251                 .pwr.state = 1,
2252                 .pwr.rekey = 56, /* binary driver, and tegra, constant */
2253         };
2254         struct nouveau_connector *nv_connector;
2255         u32 max_ac_packet;
2256
2257         nv_connector = nouveau_encoder_connector_get(nv_encoder);
2258         if (!drm_detect_hdmi_monitor(nv_connector->edid))
2259                 return;
2260
2261         max_ac_packet  = mode->htotal - mode->hdisplay;
2262         max_ac_packet -= args.pwr.rekey;
2263         max_ac_packet -= 18; /* constant from tegra */
2264         args.pwr.max_ac_packet = max_ac_packet / 32;
2265
2266         nvif_mthd(disp->disp, 0, &args, sizeof(args));
2267         nv50_audio_mode_set(encoder, mode);
2268 }
2269
2270 static void
2271 nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2272 {
2273         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2274         struct nv50_disp *disp = nv50_disp(encoder->dev);
2275         struct {
2276                 struct nv50_disp_mthd_v1 base;
2277                 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2278         } args = {
2279                 .base.version = 1,
2280                 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2281                 .base.hasht  = nv_encoder->dcb->hasht,
2282                 .base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
2283                                (0x0100 << nv_crtc->index),
2284         };
2285
2286         nvif_mthd(disp->disp, 0, &args, sizeof(args));
2287 }
2288
2289 /******************************************************************************
2290  * MST
2291  *****************************************************************************/
2292 struct nv50_mstm {
2293         struct nouveau_encoder *outp;
2294
2295         struct drm_dp_mst_topology_mgr mgr;
2296 };
2297
2298 static int
2299 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
2300 {
2301         struct nouveau_encoder *outp = mstm->outp;
2302         struct {
2303                 struct nv50_disp_mthd_v1 base;
2304                 struct nv50_disp_sor_dp_mst_link_v0 mst;
2305         } args = {
2306                 .base.version = 1,
2307                 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
2308                 .base.hasht = outp->dcb->hasht,
2309                 .base.hashm = outp->dcb->hashm,
2310                 .mst.state = state,
2311         };
2312         struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
2313         struct nvif_object *disp = &drm->display->disp;
2314         int ret;
2315
2316         if (dpcd >= 0x12) {
2317                 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
2318                 if (ret < 0)
2319                         return ret;
2320
2321                 dpcd &= ~DP_MST_EN;
2322                 if (state)
2323                         dpcd |= DP_MST_EN;
2324
2325                 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
2326                 if (ret < 0)
2327                         return ret;
2328         }
2329
2330         return nvif_mthd(disp, 0, &args, sizeof(args));
2331 }
2332
2333 int
2334 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
2335 {
2336         int ret, state = 0;
2337
2338         if (!mstm)
2339                 return 0;
2340
2341         if (dpcd[0] >= 0x12 && allow) {
2342                 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
2343                 if (ret < 0)
2344                         return ret;
2345
2346                 state = dpcd[1] & DP_MST_CAP;
2347         }
2348
2349         ret = nv50_mstm_enable(mstm, dpcd[0], state);
2350         if (ret)
2351                 return ret;
2352
2353         ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
2354         if (ret)
2355                 return nv50_mstm_enable(mstm, dpcd[0], 0);
2356
2357         return mstm->mgr.mst_state;
2358 }
2359
2360 static void
2361 nv50_mstm_del(struct nv50_mstm **pmstm)
2362 {
2363         struct nv50_mstm *mstm = *pmstm;
2364         if (mstm) {
2365                 kfree(*pmstm);
2366                 *pmstm = NULL;
2367         }
2368 }
2369
2370 static int
2371 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
2372               int conn_base_id, struct nv50_mstm **pmstm)
2373 {
2374         const int max_payloads = hweight8(outp->dcb->heads);
2375         struct drm_device *dev = outp->base.base.dev;
2376         struct nv50_mstm *mstm;
2377         int ret;
2378
2379         if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
2380                 return -ENOMEM;
2381         mstm->outp = outp;
2382
2383         ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev->dev, aux, aux_max,
2384                                            max_payloads, conn_base_id);
2385         if (ret)
2386                 return ret;
2387
2388         return 0;
2389 }
2390
2391 /******************************************************************************
2392  * SOR
2393  *****************************************************************************/
2394 static void
2395 nv50_sor_dpms(struct drm_encoder *encoder, int mode)
2396 {
2397         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2398         struct nv50_disp *disp = nv50_disp(encoder->dev);
2399         struct {
2400                 struct nv50_disp_mthd_v1 base;
2401                 struct nv50_disp_sor_pwr_v0 pwr;
2402         } args = {
2403                 .base.version = 1,
2404                 .base.method = NV50_DISP_MTHD_V1_SOR_PWR,
2405                 .base.hasht  = nv_encoder->dcb->hasht,
2406                 .base.hashm  = nv_encoder->dcb->hashm,
2407                 .pwr.state = mode == DRM_MODE_DPMS_ON,
2408         };
2409         struct {
2410                 struct nv50_disp_mthd_v1 base;
2411                 struct nv50_disp_sor_dp_pwr_v0 pwr;
2412         } link = {
2413                 .base.version = 1,
2414                 .base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR,
2415                 .base.hasht  = nv_encoder->dcb->hasht,
2416                 .base.hashm  = nv_encoder->dcb->hashm,
2417                 .pwr.state = mode == DRM_MODE_DPMS_ON,
2418         };
2419         struct drm_device *dev = encoder->dev;
2420         struct drm_encoder *partner;
2421
2422         nv_encoder->last_dpms = mode;
2423
2424         list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
2425                 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
2426
2427                 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
2428                         continue;
2429
2430                 if (nv_partner != nv_encoder &&
2431                     nv_partner->dcb->or == nv_encoder->dcb->or) {
2432                         if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
2433                                 return;
2434                         break;
2435                 }
2436         }
2437
2438         if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
2439                 args.pwr.state = 1;
2440                 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2441                 nvif_mthd(disp->disp, 0, &link, sizeof(link));
2442         } else {
2443                 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2444         }
2445 }
2446
2447 static void
2448 nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
2449 {
2450         struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
2451         u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
2452         if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
2453                 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2454                         evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
2455                         evo_data(push, (nv_encoder->ctrl = temp));
2456                 } else {
2457                         evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
2458                         evo_data(push, (nv_encoder->ctrl = temp));
2459                 }
2460                 evo_kick(push, mast);
2461         }
2462 }
2463
2464 static void
2465 nv50_sor_disconnect(struct drm_encoder *encoder)
2466 {
2467         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2468         struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
2469
2470         nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2471         nv_encoder->crtc = NULL;
2472
2473         if (nv_crtc) {
2474                 nv50_crtc_prepare(&nv_crtc->base);
2475                 nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
2476                 nv50_audio_disconnect(encoder, nv_crtc);
2477                 nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
2478         }
2479 }
2480
2481 static void
2482 nv50_sor_commit(struct drm_encoder *encoder)
2483 {
2484 }
2485
2486 static void
2487 nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
2488                   struct drm_display_mode *mode)
2489 {
2490         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2491         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2492         struct {
2493                 struct nv50_disp_mthd_v1 base;
2494                 struct nv50_disp_sor_lvds_script_v0 lvds;
2495         } lvds = {
2496                 .base.version = 1,
2497                 .base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
2498                 .base.hasht   = nv_encoder->dcb->hasht,
2499                 .base.hashm   = nv_encoder->dcb->hashm,
2500         };
2501         struct nv50_disp *disp = nv50_disp(encoder->dev);
2502         struct nv50_mast *mast = nv50_mast(encoder->dev);
2503         struct drm_device *dev = encoder->dev;
2504         struct nouveau_drm *drm = nouveau_drm(dev);
2505         struct nouveau_connector *nv_connector;
2506         struct nvbios *bios = &drm->vbios;
2507         u32 mask, ctrl;
2508         u8 owner = 1 << nv_crtc->index;
2509         u8 proto = 0xf;
2510         u8 depth = 0x0;
2511
2512         nv_connector = nouveau_encoder_connector_get(nv_encoder);
2513         nv_encoder->crtc = encoder->crtc;
2514
2515         switch (nv_encoder->dcb->type) {
2516         case DCB_OUTPUT_TMDS:
2517                 if (nv_encoder->dcb->sorconf.link & 1) {
2518                         proto = 0x1;
2519                         /* Only enable dual-link if:
2520                          *  - Need to (i.e. rate > 165MHz)
2521                          *  - DCB says we can
2522                          *  - Not an HDMI monitor, since there's no dual-link
2523                          *    on HDMI.
2524                          */
2525                         if (mode->clock >= 165000 &&
2526                             nv_encoder->dcb->duallink_possible &&
2527                             !drm_detect_hdmi_monitor(nv_connector->edid))
2528                                 proto |= 0x4;
2529                 } else {
2530                         proto = 0x2;
2531                 }
2532
2533                 nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
2534                 break;
2535         case DCB_OUTPUT_LVDS:
2536                 proto = 0x0;
2537
2538                 if (bios->fp_no_ddc) {
2539                         if (bios->fp.dual_link)
2540                                 lvds.lvds.script |= 0x0100;
2541                         if (bios->fp.if_is_24bit)
2542                                 lvds.lvds.script |= 0x0200;
2543                 } else {
2544                         if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
2545                                 if (((u8 *)nv_connector->edid)[121] == 2)
2546                                         lvds.lvds.script |= 0x0100;
2547                         } else
2548                         if (mode->clock >= bios->fp.duallink_transition_clk) {
2549                                 lvds.lvds.script |= 0x0100;
2550                         }
2551
2552                         if (lvds.lvds.script & 0x0100) {
2553                                 if (bios->fp.strapless_is_24bit & 2)
2554                                         lvds.lvds.script |= 0x0200;
2555                         } else {
2556                                 if (bios->fp.strapless_is_24bit & 1)
2557                                         lvds.lvds.script |= 0x0200;
2558                         }
2559
2560                         if (nv_connector->base.display_info.bpc == 8)
2561                                 lvds.lvds.script |= 0x0200;
2562                 }
2563
2564                 nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
2565                 break;
2566         case DCB_OUTPUT_DP:
2567                 if (nv_connector->base.display_info.bpc == 6) {
2568                         nv_encoder->dp.datarate = mode->clock * 18 / 8;
2569                         depth = 0x2;
2570                 } else
2571                 if (nv_connector->base.display_info.bpc == 8) {
2572                         nv_encoder->dp.datarate = mode->clock * 24 / 8;
2573                         depth = 0x5;
2574                 } else {
2575                         nv_encoder->dp.datarate = mode->clock * 30 / 8;
2576                         depth = 0x6;
2577                 }
2578
2579                 if (nv_encoder->dcb->sorconf.link & 1)
2580                         proto = 0x8;
2581                 else
2582                         proto = 0x9;
2583                 nv50_audio_mode_set(encoder, mode);
2584                 break;
2585         default:
2586                 BUG_ON(1);
2587                 break;
2588         }
2589
2590         nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
2591
2592         if (nv50_vers(mast) >= GF110_DISP) {
2593                 u32 *push = evo_wait(mast, 3);
2594                 if (push) {
2595                         u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2596                         u32 syncs = 0x00000001;
2597
2598                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2599                                 syncs |= 0x00000008;
2600                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2601                                 syncs |= 0x00000010;
2602
2603                         if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2604                                 magic |= 0x00000001;
2605
2606                         evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2607                         evo_data(push, syncs | (depth << 6));
2608                         evo_data(push, magic);
2609                         evo_kick(push, mast);
2610                 }
2611
2612                 ctrl = proto << 8;
2613                 mask = 0x00000f00;
2614         } else {
2615                 ctrl = (depth << 16) | (proto << 8);
2616                 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2617                         ctrl |= 0x00001000;
2618                 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2619                         ctrl |= 0x00002000;
2620                 mask = 0x000f3f00;
2621         }
2622
2623         nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
2624 }
2625
2626 static void
2627 nv50_sor_destroy(struct drm_encoder *encoder)
2628 {
2629         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2630         nv50_mstm_del(&nv_encoder->dp.mstm);
2631         drm_encoder_cleanup(encoder);
2632         kfree(encoder);
2633 }
2634
2635 static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
2636         .dpms = nv50_sor_dpms,
2637         .mode_fixup = nv50_encoder_mode_fixup,
2638         .prepare = nv50_sor_disconnect,
2639         .commit = nv50_sor_commit,
2640         .mode_set = nv50_sor_mode_set,
2641         .disable = nv50_sor_disconnect,
2642         .get_crtc = nv50_display_crtc_get,
2643 };
2644
2645 static const struct drm_encoder_funcs nv50_sor_func = {
2646         .destroy = nv50_sor_destroy,
2647 };
2648
2649 static int
2650 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
2651 {
2652         struct nouveau_connector *nv_connector = nouveau_connector(connector);
2653         struct nouveau_drm *drm = nouveau_drm(connector->dev);
2654         struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2655         struct nouveau_encoder *nv_encoder;
2656         struct drm_encoder *encoder;
2657         int type, ret;
2658
2659         switch (dcbe->type) {
2660         case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
2661         case DCB_OUTPUT_TMDS:
2662         case DCB_OUTPUT_DP:
2663         default:
2664                 type = DRM_MODE_ENCODER_TMDS;
2665                 break;
2666         }
2667
2668         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2669         if (!nv_encoder)
2670                 return -ENOMEM;
2671         nv_encoder->dcb = dcbe;
2672         nv_encoder->or = ffs(dcbe->or) - 1;
2673         nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2674
2675         encoder = to_drm_encoder(nv_encoder);
2676         encoder->possible_crtcs = dcbe->heads;
2677         encoder->possible_clones = 0;
2678         drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
2679                          "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
2680         drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
2681
2682         drm_mode_connector_attach_encoder(connector, encoder);
2683
2684         if (dcbe->type == DCB_OUTPUT_DP) {
2685                 struct nvkm_i2c_aux *aux =
2686                         nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
2687                 if (aux) {
2688                         nv_encoder->i2c = &aux->i2c;
2689                         nv_encoder->aux = aux;
2690                 }
2691
2692                 /*TODO: Use DP Info Table to check for support. */
2693                 if (nv50_disp(encoder->dev)->disp->oclass >= GF110_DISP) {
2694                         ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
2695                                             nv_connector->base.base.id,
2696                                             &nv_encoder->dp.mstm);
2697                         if (ret)
2698                                 return ret;
2699                 }
2700         } else {
2701                 struct nvkm_i2c_bus *bus =
2702                         nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2703                 if (bus)
2704                         nv_encoder->i2c = &bus->i2c;
2705         }
2706
2707         return 0;
2708 }
2709
2710 /******************************************************************************
2711  * PIOR
2712  *****************************************************************************/
2713
2714 static void
2715 nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2716 {
2717         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2718         struct nv50_disp *disp = nv50_disp(encoder->dev);
2719         struct {
2720                 struct nv50_disp_mthd_v1 base;
2721                 struct nv50_disp_pior_pwr_v0 pwr;
2722         } args = {
2723                 .base.version = 1,
2724                 .base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
2725                 .base.hasht  = nv_encoder->dcb->hasht,
2726                 .base.hashm  = nv_encoder->dcb->hashm,
2727                 .pwr.state = mode == DRM_MODE_DPMS_ON,
2728                 .pwr.type = nv_encoder->dcb->type,
2729         };
2730
2731         nvif_mthd(disp->disp, 0, &args, sizeof(args));
2732 }
2733
2734 static bool
2735 nv50_pior_mode_fixup(struct drm_encoder *encoder,
2736                      const struct drm_display_mode *mode,
2737                      struct drm_display_mode *adjusted_mode)
2738 {
2739         if (!nv50_encoder_mode_fixup(encoder, mode, adjusted_mode))
2740                 return false;
2741         adjusted_mode->clock *= 2;
2742         return true;
2743 }
2744
2745 static void
2746 nv50_pior_commit(struct drm_encoder *encoder)
2747 {
2748 }
2749
2750 static void
2751 nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2752                    struct drm_display_mode *adjusted_mode)
2753 {
2754         struct nv50_mast *mast = nv50_mast(encoder->dev);
2755         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2756         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2757         struct nouveau_connector *nv_connector;
2758         u8 owner = 1 << nv_crtc->index;
2759         u8 proto, depth;
2760         u32 *push;
2761
2762         nv_connector = nouveau_encoder_connector_get(nv_encoder);
2763         switch (nv_connector->base.display_info.bpc) {
2764         case 10: depth = 0x6; break;
2765         case  8: depth = 0x5; break;
2766         case  6: depth = 0x2; break;
2767         default: depth = 0x0; break;
2768         }
2769
2770         switch (nv_encoder->dcb->type) {
2771         case DCB_OUTPUT_TMDS:
2772         case DCB_OUTPUT_DP:
2773                 proto = 0x0;
2774                 break;
2775         default:
2776                 BUG_ON(1);
2777                 break;
2778         }
2779
2780         nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2781
2782         push = evo_wait(mast, 8);
2783         if (push) {
2784                 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2785                         u32 ctrl = (depth << 16) | (proto << 8) | owner;
2786                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2787                                 ctrl |= 0x00001000;
2788                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2789                                 ctrl |= 0x00002000;
2790                         evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2791                         evo_data(push, ctrl);
2792                 }
2793
2794                 evo_kick(push, mast);
2795         }
2796
2797         nv_encoder->crtc = encoder->crtc;
2798 }
2799
2800 static void
2801 nv50_pior_disconnect(struct drm_encoder *encoder)
2802 {
2803         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2804         struct nv50_mast *mast = nv50_mast(encoder->dev);
2805         const int or = nv_encoder->or;
2806         u32 *push;
2807
2808         if (nv_encoder->crtc) {
2809                 nv50_crtc_prepare(nv_encoder->crtc);
2810
2811                 push = evo_wait(mast, 4);
2812                 if (push) {
2813                         if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2814                                 evo_mthd(push, 0x0700 + (or * 0x040), 1);
2815                                 evo_data(push, 0x00000000);
2816                         }
2817                         evo_kick(push, mast);
2818                 }
2819         }
2820
2821         nv_encoder->crtc = NULL;
2822 }
2823
2824 static void
2825 nv50_pior_destroy(struct drm_encoder *encoder)
2826 {
2827         drm_encoder_cleanup(encoder);
2828         kfree(encoder);
2829 }
2830
2831 static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2832         .dpms = nv50_pior_dpms,
2833         .mode_fixup = nv50_pior_mode_fixup,
2834         .prepare = nv50_pior_disconnect,
2835         .commit = nv50_pior_commit,
2836         .mode_set = nv50_pior_mode_set,
2837         .disable = nv50_pior_disconnect,
2838         .get_crtc = nv50_display_crtc_get,
2839 };
2840
2841 static const struct drm_encoder_funcs nv50_pior_func = {
2842         .destroy = nv50_pior_destroy,
2843 };
2844
2845 static int
2846 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2847 {
2848         struct nouveau_drm *drm = nouveau_drm(connector->dev);
2849         struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2850         struct nvkm_i2c_bus *bus = NULL;
2851         struct nvkm_i2c_aux *aux = NULL;
2852         struct i2c_adapter *ddc;
2853         struct nouveau_encoder *nv_encoder;
2854         struct drm_encoder *encoder;
2855         int type;
2856
2857         switch (dcbe->type) {
2858         case DCB_OUTPUT_TMDS:
2859                 bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
2860                 ddc  = bus ? &bus->i2c : NULL;
2861                 type = DRM_MODE_ENCODER_TMDS;
2862                 break;
2863         case DCB_OUTPUT_DP:
2864                 aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
2865                 ddc  = aux ? &aux->i2c : NULL;
2866                 type = DRM_MODE_ENCODER_TMDS;
2867                 break;
2868         default:
2869                 return -ENODEV;
2870         }
2871
2872         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2873         if (!nv_encoder)
2874                 return -ENOMEM;
2875         nv_encoder->dcb = dcbe;
2876         nv_encoder->or = ffs(dcbe->or) - 1;
2877         nv_encoder->i2c = ddc;
2878         nv_encoder->aux = aux;
2879
2880         encoder = to_drm_encoder(nv_encoder);
2881         encoder->possible_crtcs = dcbe->heads;
2882         encoder->possible_clones = 0;
2883         drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
2884                          "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
2885         drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2886
2887         drm_mode_connector_attach_encoder(connector, encoder);
2888         return 0;
2889 }
2890
2891 /******************************************************************************
2892  * Framebuffer
2893  *****************************************************************************/
2894
2895 static void
2896 nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2897 {
2898         int i;
2899         for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2900                 nvif_object_fini(&fbdma->base[i]);
2901         nvif_object_fini(&fbdma->core);
2902         list_del(&fbdma->head);
2903         kfree(fbdma);
2904 }
2905
2906 static int
2907 nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2908 {
2909         struct nouveau_drm *drm = nouveau_drm(dev);
2910         struct nv50_disp *disp = nv50_disp(dev);
2911         struct nv50_mast *mast = nv50_mast(dev);
2912         struct __attribute__ ((packed)) {
2913                 struct nv_dma_v0 base;
2914                 union {
2915                         struct nv50_dma_v0 nv50;
2916                         struct gf100_dma_v0 gf100;
2917                         struct gf119_dma_v0 gf119;
2918                 };
2919         } args = {};
2920         struct nv50_fbdma *fbdma;
2921         struct drm_crtc *crtc;
2922         u32 size = sizeof(args.base);
2923         int ret;
2924
2925         list_for_each_entry(fbdma, &disp->fbdma, head) {
2926                 if (fbdma->core.handle == name)
2927                         return 0;
2928         }
2929
2930         fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2931         if (!fbdma)
2932                 return -ENOMEM;
2933         list_add(&fbdma->head, &disp->fbdma);
2934
2935         args.base.target = NV_DMA_V0_TARGET_VRAM;
2936         args.base.access = NV_DMA_V0_ACCESS_RDWR;
2937         args.base.start = offset;
2938         args.base.limit = offset + length - 1;
2939
2940         if (drm->device.info.chipset < 0x80) {
2941                 args.nv50.part = NV50_DMA_V0_PART_256;
2942                 size += sizeof(args.nv50);
2943         } else
2944         if (drm->device.info.chipset < 0xc0) {
2945                 args.nv50.part = NV50_DMA_V0_PART_256;
2946                 args.nv50.kind = kind;
2947                 size += sizeof(args.nv50);
2948         } else
2949         if (drm->device.info.chipset < 0xd0) {
2950                 args.gf100.kind = kind;
2951                 size += sizeof(args.gf100);
2952         } else {
2953                 args.gf119.page = GF119_DMA_V0_PAGE_LP;
2954                 args.gf119.kind = kind;
2955                 size += sizeof(args.gf119);
2956         }
2957
2958         list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2959                 struct nv50_head *head = nv50_head(crtc);
2960                 int ret = nvif_object_init(&head->sync.base.base.user, name,
2961                                            NV_DMA_IN_MEMORY, &args, size,
2962                                            &fbdma->base[head->base.index]);
2963                 if (ret) {
2964                         nv50_fbdma_fini(fbdma);
2965                         return ret;
2966                 }
2967         }
2968
2969         ret = nvif_object_init(&mast->base.base.user, name, NV_DMA_IN_MEMORY,
2970                                &args, size, &fbdma->core);
2971         if (ret) {
2972                 nv50_fbdma_fini(fbdma);
2973                 return ret;
2974         }
2975
2976         return 0;
2977 }
2978
2979 static void
2980 nv50_fb_dtor(struct drm_framebuffer *fb)
2981 {
2982 }
2983
2984 static int
2985 nv50_fb_ctor(struct drm_framebuffer *fb)
2986 {
2987         struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2988         struct nouveau_drm *drm = nouveau_drm(fb->dev);
2989         struct nouveau_bo *nvbo = nv_fb->nvbo;
2990         struct nv50_disp *disp = nv50_disp(fb->dev);
2991         u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2992         u8 tile = nvbo->tile_mode;
2993
2994         if (drm->device.info.chipset >= 0xc0)
2995                 tile >>= 4; /* yep.. */
2996
2997         switch (fb->depth) {
2998         case  8: nv_fb->r_format = 0x1e00; break;
2999         case 15: nv_fb->r_format = 0xe900; break;
3000         case 16: nv_fb->r_format = 0xe800; break;
3001         case 24:
3002         case 32: nv_fb->r_format = 0xcf00; break;
3003         case 30: nv_fb->r_format = 0xd100; break;
3004         default:
3005                  NV_ERROR(drm, "unknown depth %d\n", fb->depth);
3006                  return -EINVAL;
3007         }
3008
3009         if (disp->disp->oclass < G82_DISP) {
3010                 nv_fb->r_pitch   = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
3011                                             (fb->pitches[0] | 0x00100000);
3012                 nv_fb->r_format |= kind << 16;
3013         } else
3014         if (disp->disp->oclass < GF110_DISP) {
3015                 nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
3016                                            (fb->pitches[0] | 0x00100000);
3017         } else {
3018                 nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
3019                                            (fb->pitches[0] | 0x01000000);
3020         }
3021         nv_fb->r_handle = 0xffff0000 | kind;
3022
3023         return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0,
3024                                drm->device.info.ram_user, kind);
3025 }
3026
3027 /******************************************************************************
3028  * Init
3029  *****************************************************************************/
3030
3031 void
3032 nv50_display_fini(struct drm_device *dev)
3033 {
3034 }
3035
3036 int
3037 nv50_display_init(struct drm_device *dev)
3038 {
3039         struct nv50_disp *disp = nv50_disp(dev);
3040         struct drm_crtc *crtc;
3041         u32 *push;
3042
3043         push = evo_wait(nv50_mast(dev), 32);
3044         if (!push)
3045                 return -EBUSY;
3046
3047         list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
3048                 struct nv50_sync *sync = nv50_sync(crtc);
3049
3050                 nv50_crtc_lut_load(crtc);
3051                 nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
3052         }
3053
3054         evo_mthd(push, 0x0088, 1);
3055         evo_data(push, nv50_mast(dev)->base.sync.handle);
3056         evo_kick(push, nv50_mast(dev));
3057         return 0;
3058 }
3059
3060 void
3061 nv50_display_destroy(struct drm_device *dev)
3062 {
3063         struct nv50_disp *disp = nv50_disp(dev);
3064         struct nv50_fbdma *fbdma, *fbtmp;
3065
3066         list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
3067                 nv50_fbdma_fini(fbdma);
3068         }
3069
3070         nv50_dmac_destroy(&disp->mast.base, disp->disp);
3071
3072         nouveau_bo_unmap(disp->sync);
3073         if (disp->sync)
3074                 nouveau_bo_unpin(disp->sync);
3075         nouveau_bo_ref(NULL, &disp->sync);
3076
3077         nouveau_display(dev)->priv = NULL;
3078         kfree(disp);
3079 }
3080
3081 int
3082 nv50_display_create(struct drm_device *dev)
3083 {
3084         struct nvif_device *device = &nouveau_drm(dev)->device;
3085         struct nouveau_drm *drm = nouveau_drm(dev);
3086         struct dcb_table *dcb = &drm->vbios.dcb;
3087         struct drm_connector *connector, *tmp;
3088         struct nv50_disp *disp;
3089         struct dcb_output *dcbe;
3090         int crtcs, ret, i;
3091
3092         disp = kzalloc(sizeof(*disp), GFP_KERNEL);
3093         if (!disp)
3094                 return -ENOMEM;
3095         INIT_LIST_HEAD(&disp->fbdma);
3096
3097         nouveau_display(dev)->priv = disp;
3098         nouveau_display(dev)->dtor = nv50_display_destroy;
3099         nouveau_display(dev)->init = nv50_display_init;
3100         nouveau_display(dev)->fini = nv50_display_fini;
3101         nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
3102         nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
3103         disp->disp = &nouveau_display(dev)->disp;
3104
3105         /* small shared memory area we use for notifiers and semaphores */
3106         ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
3107                              0, 0x0000, NULL, NULL, &disp->sync);
3108         if (!ret) {
3109                 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
3110                 if (!ret) {
3111                         ret = nouveau_bo_map(disp->sync);
3112                         if (ret)
3113                                 nouveau_bo_unpin(disp->sync);
3114                 }
3115                 if (ret)
3116                         nouveau_bo_ref(NULL, &disp->sync);
3117         }
3118
3119         if (ret)
3120                 goto out;
3121
3122         /* allocate master evo channel */
3123         ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
3124                               &disp->mast);
3125         if (ret)
3126                 goto out;
3127
3128         /* create crtc objects to represent the hw heads */
3129         if (disp->disp->oclass >= GF110_DISP)
3130                 crtcs = nvif_rd32(&device->object, 0x022448);
3131         else
3132                 crtcs = 2;
3133
3134         for (i = 0; i < crtcs; i++) {
3135                 ret = nv50_crtc_create(dev, i);
3136                 if (ret)
3137                         goto out;
3138         }
3139
3140         /* create encoder/connector objects based on VBIOS DCB table */
3141         for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
3142                 connector = nouveau_connector_create(dev, dcbe->connector);
3143                 if (IS_ERR(connector))
3144                         continue;
3145
3146                 if (dcbe->location == DCB_LOC_ON_CHIP) {
3147                         switch (dcbe->type) {
3148                         case DCB_OUTPUT_TMDS:
3149                         case DCB_OUTPUT_LVDS:
3150                         case DCB_OUTPUT_DP:
3151                                 ret = nv50_sor_create(connector, dcbe);
3152                                 break;
3153                         case DCB_OUTPUT_ANALOG:
3154                                 ret = nv50_dac_create(connector, dcbe);
3155                                 break;
3156                         default:
3157                                 ret = -ENODEV;
3158                                 break;
3159                         }
3160                 } else {
3161                         ret = nv50_pior_create(connector, dcbe);
3162                 }
3163
3164                 if (ret) {
3165                         NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
3166                                      dcbe->location, dcbe->type,
3167                                      ffs(dcbe->or) - 1, ret);
3168                         ret = 0;
3169                 }
3170         }
3171
3172         /* cull any connectors we created that don't have an encoder */
3173         list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
3174                 if (connector->encoder_ids[0])
3175                         continue;
3176
3177                 NV_WARN(drm, "%s has no encoders, removing\n",
3178                         connector->name);
3179                 connector->funcs->destroy(connector);
3180         }
3181
3182 out:
3183         if (ret)
3184                 nv50_display_destroy(dev);
3185         return ret;
3186 }