]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - drivers/gpu/drm/nouveau/nv50_display.c
drm/nouveau/kms/nv50: separate out core surface commit
[mirror_ubuntu-jammy-kernel.git] / drivers / gpu / drm / nouveau / nv50_display.c
1 /*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
25 #include <linux/dma-mapping.h>
26
27 #include <drm/drmP.h>
28 #include <drm/drm_atomic.h>
29 #include <drm/drm_crtc_helper.h>
30 #include <drm/drm_dp_helper.h>
31 #include <drm/drm_fb_helper.h>
32 #include <drm/drm_plane_helper.h>
33
34 #include <nvif/class.h>
35 #include <nvif/cl0002.h>
36 #include <nvif/cl5070.h>
37 #include <nvif/cl507a.h>
38 #include <nvif/cl507b.h>
39 #include <nvif/cl507c.h>
40 #include <nvif/cl507d.h>
41 #include <nvif/cl507e.h>
42
43 #include "nouveau_drv.h"
44 #include "nouveau_dma.h"
45 #include "nouveau_gem.h"
46 #include "nouveau_connector.h"
47 #include "nouveau_encoder.h"
48 #include "nouveau_crtc.h"
49 #include "nouveau_fence.h"
50 #include "nv50_display.h"
51
52 #define EVO_DMA_NR 9
53
54 #define EVO_MASTER (0x00)
55 #define EVO_FLIP(c) (0x01 + (c))
56 #define EVO_OVLY(c) (0x05 + (c))
57 #define EVO_OIMM(c) (0x09 + (c))
58 #define EVO_CURS(c) (0x0d + (c))
59
60 /* offsets in shared sync bo of various structures */
61 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
62 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
63 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
64 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
65
66 /******************************************************************************
67 * Atomic state
68 *****************************************************************************/
69 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
70
71 struct nv50_head_atom {
72 struct drm_crtc_state state;
73
74 struct nv50_head_mode {
75 bool interlace;
76 u32 clock;
77 struct {
78 u16 active;
79 u16 synce;
80 u16 blanke;
81 u16 blanks;
82 } h;
83 struct {
84 u32 active;
85 u16 synce;
86 u16 blanke;
87 u16 blanks;
88 u16 blank2s;
89 u16 blank2e;
90 u16 blankus;
91 } v;
92 } mode;
93
94 struct {
95 bool visible;
96 u32 handle;
97 u64 offset:40;
98 u8 format;
99 u8 kind:7;
100 u8 layout:1;
101 u8 block:4;
102 u32 pitch:20;
103 u16 x;
104 u16 y;
105 u16 w;
106 u16 h;
107 } core;
108
109 struct {
110 u8 depth;
111 u8 cpp;
112 u16 x;
113 u16 y;
114 u16 w;
115 u16 h;
116 } base;
117
118 union {
119 struct {
120 bool core:1;
121 };
122 u8 mask;
123 } clr;
124
125 union {
126 struct {
127 bool core:1;
128 bool view:1;
129 bool mode:1;
130 };
131 u16 mask;
132 } set;
133 };
134
135 /******************************************************************************
136 * EVO channel
137 *****************************************************************************/
138
139 struct nv50_chan {
140 struct nvif_object user;
141 struct nvif_device *device;
142 };
143
144 static int
145 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
146 const s32 *oclass, u8 head, void *data, u32 size,
147 struct nv50_chan *chan)
148 {
149 struct nvif_sclass *sclass;
150 int ret, i, n;
151
152 chan->device = device;
153
154 ret = n = nvif_object_sclass_get(disp, &sclass);
155 if (ret < 0)
156 return ret;
157
158 while (oclass[0]) {
159 for (i = 0; i < n; i++) {
160 if (sclass[i].oclass == oclass[0]) {
161 ret = nvif_object_init(disp, 0, oclass[0],
162 data, size, &chan->user);
163 if (ret == 0)
164 nvif_object_map(&chan->user);
165 nvif_object_sclass_put(&sclass);
166 return ret;
167 }
168 }
169 oclass++;
170 }
171
172 nvif_object_sclass_put(&sclass);
173 return -ENOSYS;
174 }
175
176 static void
177 nv50_chan_destroy(struct nv50_chan *chan)
178 {
179 nvif_object_fini(&chan->user);
180 }
181
182 /******************************************************************************
183 * PIO EVO channel
184 *****************************************************************************/
185
186 struct nv50_pioc {
187 struct nv50_chan base;
188 };
189
190 static void
191 nv50_pioc_destroy(struct nv50_pioc *pioc)
192 {
193 nv50_chan_destroy(&pioc->base);
194 }
195
196 static int
197 nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
198 const s32 *oclass, u8 head, void *data, u32 size,
199 struct nv50_pioc *pioc)
200 {
201 return nv50_chan_create(device, disp, oclass, head, data, size,
202 &pioc->base);
203 }
204
205 /******************************************************************************
206 * Cursor Immediate
207 *****************************************************************************/
208
209 struct nv50_curs {
210 struct nv50_pioc base;
211 };
212
213 static int
214 nv50_curs_create(struct nvif_device *device, struct nvif_object *disp,
215 int head, struct nv50_curs *curs)
216 {
217 struct nv50_disp_cursor_v0 args = {
218 .head = head,
219 };
220 static const s32 oclass[] = {
221 GK104_DISP_CURSOR,
222 GF110_DISP_CURSOR,
223 GT214_DISP_CURSOR,
224 G82_DISP_CURSOR,
225 NV50_DISP_CURSOR,
226 0
227 };
228
229 return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
230 &curs->base);
231 }
232
233 /******************************************************************************
234 * Overlay Immediate
235 *****************************************************************************/
236
237 struct nv50_oimm {
238 struct nv50_pioc base;
239 };
240
241 static int
242 nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
243 int head, struct nv50_oimm *oimm)
244 {
245 struct nv50_disp_cursor_v0 args = {
246 .head = head,
247 };
248 static const s32 oclass[] = {
249 GK104_DISP_OVERLAY,
250 GF110_DISP_OVERLAY,
251 GT214_DISP_OVERLAY,
252 G82_DISP_OVERLAY,
253 NV50_DISP_OVERLAY,
254 0
255 };
256
257 return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
258 &oimm->base);
259 }
260
261 /******************************************************************************
262 * DMA EVO channel
263 *****************************************************************************/
264
265 struct nv50_dmac {
266 struct nv50_chan base;
267 dma_addr_t handle;
268 u32 *ptr;
269
270 struct nvif_object sync;
271 struct nvif_object vram;
272
273 /* Protects against concurrent pushbuf access to this channel, lock is
274 * grabbed by evo_wait (if the pushbuf reservation is successful) and
275 * dropped again by evo_kick. */
276 struct mutex lock;
277 };
278
279 static void
280 nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
281 {
282 struct nvif_device *device = dmac->base.device;
283
284 nvif_object_fini(&dmac->vram);
285 nvif_object_fini(&dmac->sync);
286
287 nv50_chan_destroy(&dmac->base);
288
289 if (dmac->ptr) {
290 struct device *dev = nvxx_device(device)->dev;
291 dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
292 }
293 }
294
295 static int
296 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
297 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
298 struct nv50_dmac *dmac)
299 {
300 struct nv50_disp_core_channel_dma_v0 *args = data;
301 struct nvif_object pushbuf;
302 int ret;
303
304 mutex_init(&dmac->lock);
305
306 dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
307 &dmac->handle, GFP_KERNEL);
308 if (!dmac->ptr)
309 return -ENOMEM;
310
311 ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
312 &(struct nv_dma_v0) {
313 .target = NV_DMA_V0_TARGET_PCI_US,
314 .access = NV_DMA_V0_ACCESS_RD,
315 .start = dmac->handle + 0x0000,
316 .limit = dmac->handle + 0x0fff,
317 }, sizeof(struct nv_dma_v0), &pushbuf);
318 if (ret)
319 return ret;
320
321 args->pushbuf = nvif_handle(&pushbuf);
322
323 ret = nv50_chan_create(device, disp, oclass, head, data, size,
324 &dmac->base);
325 nvif_object_fini(&pushbuf);
326 if (ret)
327 return ret;
328
329 ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
330 &(struct nv_dma_v0) {
331 .target = NV_DMA_V0_TARGET_VRAM,
332 .access = NV_DMA_V0_ACCESS_RDWR,
333 .start = syncbuf + 0x0000,
334 .limit = syncbuf + 0x0fff,
335 }, sizeof(struct nv_dma_v0),
336 &dmac->sync);
337 if (ret)
338 return ret;
339
340 ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
341 &(struct nv_dma_v0) {
342 .target = NV_DMA_V0_TARGET_VRAM,
343 .access = NV_DMA_V0_ACCESS_RDWR,
344 .start = 0,
345 .limit = device->info.ram_user - 1,
346 }, sizeof(struct nv_dma_v0),
347 &dmac->vram);
348 if (ret)
349 return ret;
350
351 return ret;
352 }
353
354 /******************************************************************************
355 * Core
356 *****************************************************************************/
357
358 struct nv50_mast {
359 struct nv50_dmac base;
360 };
361
362 static int
363 nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
364 u64 syncbuf, struct nv50_mast *core)
365 {
366 struct nv50_disp_core_channel_dma_v0 args = {
367 .pushbuf = 0xb0007d00,
368 };
369 static const s32 oclass[] = {
370 GP104_DISP_CORE_CHANNEL_DMA,
371 GP100_DISP_CORE_CHANNEL_DMA,
372 GM200_DISP_CORE_CHANNEL_DMA,
373 GM107_DISP_CORE_CHANNEL_DMA,
374 GK110_DISP_CORE_CHANNEL_DMA,
375 GK104_DISP_CORE_CHANNEL_DMA,
376 GF110_DISP_CORE_CHANNEL_DMA,
377 GT214_DISP_CORE_CHANNEL_DMA,
378 GT206_DISP_CORE_CHANNEL_DMA,
379 GT200_DISP_CORE_CHANNEL_DMA,
380 G82_DISP_CORE_CHANNEL_DMA,
381 NV50_DISP_CORE_CHANNEL_DMA,
382 0
383 };
384
385 return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
386 syncbuf, &core->base);
387 }
388
389 /******************************************************************************
390 * Base
391 *****************************************************************************/
392
393 struct nv50_sync {
394 struct nv50_dmac base;
395 u32 addr;
396 u32 data;
397 };
398
399 static int
400 nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
401 int head, u64 syncbuf, struct nv50_sync *base)
402 {
403 struct nv50_disp_base_channel_dma_v0 args = {
404 .pushbuf = 0xb0007c00 | head,
405 .head = head,
406 };
407 static const s32 oclass[] = {
408 GK110_DISP_BASE_CHANNEL_DMA,
409 GK104_DISP_BASE_CHANNEL_DMA,
410 GF110_DISP_BASE_CHANNEL_DMA,
411 GT214_DISP_BASE_CHANNEL_DMA,
412 GT200_DISP_BASE_CHANNEL_DMA,
413 G82_DISP_BASE_CHANNEL_DMA,
414 NV50_DISP_BASE_CHANNEL_DMA,
415 0
416 };
417
418 return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
419 syncbuf, &base->base);
420 }
421
422 /******************************************************************************
423 * Overlay
424 *****************************************************************************/
425
426 struct nv50_ovly {
427 struct nv50_dmac base;
428 };
429
430 static int
431 nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
432 int head, u64 syncbuf, struct nv50_ovly *ovly)
433 {
434 struct nv50_disp_overlay_channel_dma_v0 args = {
435 .pushbuf = 0xb0007e00 | head,
436 .head = head,
437 };
438 static const s32 oclass[] = {
439 GK104_DISP_OVERLAY_CONTROL_DMA,
440 GF110_DISP_OVERLAY_CONTROL_DMA,
441 GT214_DISP_OVERLAY_CHANNEL_DMA,
442 GT200_DISP_OVERLAY_CHANNEL_DMA,
443 G82_DISP_OVERLAY_CHANNEL_DMA,
444 NV50_DISP_OVERLAY_CHANNEL_DMA,
445 0
446 };
447
448 return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
449 syncbuf, &ovly->base);
450 }
451
452 struct nv50_head {
453 struct nouveau_crtc base;
454 struct nouveau_bo *image;
455 struct nv50_curs curs;
456 struct nv50_sync sync;
457 struct nv50_ovly ovly;
458 struct nv50_oimm oimm;
459
460 struct nv50_head_atom arm;
461 struct nv50_head_atom asy;
462 };
463
464 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
465 #define nv50_curs(c) (&nv50_head(c)->curs)
466 #define nv50_sync(c) (&nv50_head(c)->sync)
467 #define nv50_ovly(c) (&nv50_head(c)->ovly)
468 #define nv50_oimm(c) (&nv50_head(c)->oimm)
469 #define nv50_chan(c) (&(c)->base.base)
470 #define nv50_vers(c) nv50_chan(c)->user.oclass
471
472 struct nv50_fbdma {
473 struct list_head head;
474 struct nvif_object core;
475 struct nvif_object base[4];
476 };
477
478 struct nv50_disp {
479 struct nvif_object *disp;
480 struct nv50_mast mast;
481
482 struct list_head fbdma;
483
484 struct nouveau_bo *sync;
485 };
486
487 static struct nv50_disp *
488 nv50_disp(struct drm_device *dev)
489 {
490 return nouveau_display(dev)->priv;
491 }
492
493 #define nv50_mast(d) (&nv50_disp(d)->mast)
494
495 static struct drm_crtc *
496 nv50_display_crtc_get(struct drm_encoder *encoder)
497 {
498 return nouveau_encoder(encoder)->crtc;
499 }
500
501 /******************************************************************************
502 * EVO channel helpers
503 *****************************************************************************/
504 static u32 *
505 evo_wait(void *evoc, int nr)
506 {
507 struct nv50_dmac *dmac = evoc;
508 struct nvif_device *device = dmac->base.device;
509 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
510
511 mutex_lock(&dmac->lock);
512 if (put + nr >= (PAGE_SIZE / 4) - 8) {
513 dmac->ptr[put] = 0x20000000;
514
515 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
516 if (nvif_msec(device, 2000,
517 if (!nvif_rd32(&dmac->base.user, 0x0004))
518 break;
519 ) < 0) {
520 mutex_unlock(&dmac->lock);
521 printk(KERN_ERR "nouveau: evo channel stalled\n");
522 return NULL;
523 }
524
525 put = 0;
526 }
527
528 return dmac->ptr + put;
529 }
530
531 static void
532 evo_kick(u32 *push, void *evoc)
533 {
534 struct nv50_dmac *dmac = evoc;
535 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
536 mutex_unlock(&dmac->lock);
537 }
538
539 #define evo_mthd(p,m,s) do { \
540 const u32 _m = (m), _s = (s); \
541 if (drm_debug & DRM_UT_KMS) \
542 printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__); \
543 *((p)++) = ((_s << 18) | _m); \
544 } while(0)
545
546 #define evo_data(p,d) do { \
547 const u32 _d = (d); \
548 if (drm_debug & DRM_UT_KMS) \
549 printk(KERN_ERR "\t%08x\n", _d); \
550 *((p)++) = _d; \
551 } while(0)
552
553 static bool
554 evo_sync_wait(void *data)
555 {
556 if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
557 return true;
558 usleep_range(1, 2);
559 return false;
560 }
561
562 static int
563 evo_sync(struct drm_device *dev)
564 {
565 struct nvif_device *device = &nouveau_drm(dev)->device;
566 struct nv50_disp *disp = nv50_disp(dev);
567 struct nv50_mast *mast = nv50_mast(dev);
568 u32 *push = evo_wait(mast, 8);
569 if (push) {
570 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
571 evo_mthd(push, 0x0084, 1);
572 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
573 evo_mthd(push, 0x0080, 2);
574 evo_data(push, 0x00000000);
575 evo_data(push, 0x00000000);
576 evo_kick(push, mast);
577 if (nvif_msec(device, 2000,
578 if (evo_sync_wait(disp->sync))
579 break;
580 ) >= 0)
581 return 0;
582 }
583
584 return -EBUSY;
585 }
586
587 /******************************************************************************
588 * Page flipping channel
589 *****************************************************************************/
590 struct nouveau_bo *
591 nv50_display_crtc_sema(struct drm_device *dev, int crtc)
592 {
593 return nv50_disp(dev)->sync;
594 }
595
596 struct nv50_display_flip {
597 struct nv50_disp *disp;
598 struct nv50_sync *chan;
599 };
600
601 static bool
602 nv50_display_flip_wait(void *data)
603 {
604 struct nv50_display_flip *flip = data;
605 if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
606 flip->chan->data)
607 return true;
608 usleep_range(1, 2);
609 return false;
610 }
611
612 void
613 nv50_display_flip_stop(struct drm_crtc *crtc)
614 {
615 struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
616 struct nv50_display_flip flip = {
617 .disp = nv50_disp(crtc->dev),
618 .chan = nv50_sync(crtc),
619 };
620 u32 *push;
621
622 push = evo_wait(flip.chan, 8);
623 if (push) {
624 evo_mthd(push, 0x0084, 1);
625 evo_data(push, 0x00000000);
626 evo_mthd(push, 0x0094, 1);
627 evo_data(push, 0x00000000);
628 evo_mthd(push, 0x00c0, 1);
629 evo_data(push, 0x00000000);
630 evo_mthd(push, 0x0080, 1);
631 evo_data(push, 0x00000000);
632 evo_kick(push, flip.chan);
633 }
634
635 nvif_msec(device, 2000,
636 if (nv50_display_flip_wait(&flip))
637 break;
638 );
639 }
640
641 int
642 nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
643 struct nouveau_channel *chan, u32 swap_interval)
644 {
645 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
646 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
647 struct nv50_head *head = nv50_head(crtc);
648 struct nv50_sync *sync = nv50_sync(crtc);
649 u32 *push;
650 int ret;
651
652 if (crtc->primary->fb->width != fb->width ||
653 crtc->primary->fb->height != fb->height)
654 return -EINVAL;
655
656 swap_interval <<= 4;
657 if (swap_interval == 0)
658 swap_interval |= 0x100;
659 if (chan == NULL)
660 evo_sync(crtc->dev);
661
662 push = evo_wait(sync, 128);
663 if (unlikely(push == NULL))
664 return -EBUSY;
665
666 if (chan && chan->user.oclass < G82_CHANNEL_GPFIFO) {
667 ret = RING_SPACE(chan, 8);
668 if (ret)
669 return ret;
670
671 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
672 OUT_RING (chan, NvEvoSema0 + nv_crtc->index);
673 OUT_RING (chan, sync->addr ^ 0x10);
674 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
675 OUT_RING (chan, sync->data + 1);
676 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
677 OUT_RING (chan, sync->addr);
678 OUT_RING (chan, sync->data);
679 } else
680 if (chan && chan->user.oclass < FERMI_CHANNEL_GPFIFO) {
681 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
682 ret = RING_SPACE(chan, 12);
683 if (ret)
684 return ret;
685
686 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
687 OUT_RING (chan, chan->vram.handle);
688 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
689 OUT_RING (chan, upper_32_bits(addr ^ 0x10));
690 OUT_RING (chan, lower_32_bits(addr ^ 0x10));
691 OUT_RING (chan, sync->data + 1);
692 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
693 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
694 OUT_RING (chan, upper_32_bits(addr));
695 OUT_RING (chan, lower_32_bits(addr));
696 OUT_RING (chan, sync->data);
697 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
698 } else
699 if (chan) {
700 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
701 ret = RING_SPACE(chan, 10);
702 if (ret)
703 return ret;
704
705 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
706 OUT_RING (chan, upper_32_bits(addr ^ 0x10));
707 OUT_RING (chan, lower_32_bits(addr ^ 0x10));
708 OUT_RING (chan, sync->data + 1);
709 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
710 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
711 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
712 OUT_RING (chan, upper_32_bits(addr));
713 OUT_RING (chan, lower_32_bits(addr));
714 OUT_RING (chan, sync->data);
715 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
716 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
717 }
718
719 if (chan) {
720 sync->addr ^= 0x10;
721 sync->data++;
722 FIRE_RING (chan);
723 }
724
725 /* queue the flip */
726 evo_mthd(push, 0x0100, 1);
727 evo_data(push, 0xfffe0000);
728 evo_mthd(push, 0x0084, 1);
729 evo_data(push, swap_interval);
730 if (!(swap_interval & 0x00000100)) {
731 evo_mthd(push, 0x00e0, 1);
732 evo_data(push, 0x40000000);
733 }
734 evo_mthd(push, 0x0088, 4);
735 evo_data(push, sync->addr);
736 evo_data(push, sync->data++);
737 evo_data(push, sync->data);
738 evo_data(push, sync->base.sync.handle);
739 evo_mthd(push, 0x00a0, 2);
740 evo_data(push, 0x00000000);
741 evo_data(push, 0x00000000);
742 evo_mthd(push, 0x00c0, 1);
743 evo_data(push, nv_fb->r_handle);
744 evo_mthd(push, 0x0110, 2);
745 evo_data(push, 0x00000000);
746 evo_data(push, 0x00000000);
747 if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) {
748 evo_mthd(push, 0x0800, 5);
749 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
750 evo_data(push, 0);
751 evo_data(push, (fb->height << 16) | fb->width);
752 evo_data(push, nv_fb->r_pitch);
753 evo_data(push, nv_fb->r_format);
754 } else {
755 evo_mthd(push, 0x0400, 5);
756 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
757 evo_data(push, 0);
758 evo_data(push, (fb->height << 16) | fb->width);
759 evo_data(push, nv_fb->r_pitch);
760 evo_data(push, nv_fb->r_format);
761 }
762 evo_mthd(push, 0x0080, 1);
763 evo_data(push, 0x00000000);
764 evo_kick(push, sync);
765
766 nouveau_bo_ref(nv_fb->nvbo, &head->image);
767 return 0;
768 }
769
770 /******************************************************************************
771 * Head
772 *****************************************************************************/
773
774 static void
775 nv50_head_core_clr(struct nv50_head *head)
776 {
777 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
778 u32 *push;
779 if ((push = evo_wait(core, 2))) {
780 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
781 evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
782 else
783 evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
784 evo_data(push, 0x00000000);
785 evo_kick(push, core);
786 }
787 }
788
789 static void
790 nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
791 {
792 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
793 u32 *push;
794 if ((push = evo_wait(core, 9))) {
795 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
796 evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
797 evo_data(push, asyh->core.offset >> 8);
798 evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
799 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
800 evo_data(push, asyh->core.layout << 20 |
801 (asyh->core.pitch >> 8) << 8 |
802 asyh->core.block);
803 evo_data(push, asyh->core.kind << 16 |
804 asyh->core.format << 8);
805 evo_data(push, asyh->core.handle);
806 evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
807 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
808 } else
809 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
810 evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
811 evo_data(push, asyh->core.offset >> 8);
812 evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
813 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
814 evo_data(push, asyh->core.layout << 20 |
815 (asyh->core.pitch >> 8) << 8 |
816 asyh->core.block);
817 evo_data(push, asyh->core.format << 8);
818 evo_data(push, asyh->core.handle);
819 evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
820 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
821 } else {
822 evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
823 evo_data(push, asyh->core.offset >> 8);
824 evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
825 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
826 evo_data(push, asyh->core.layout << 24 |
827 (asyh->core.pitch >> 8) << 8 |
828 asyh->core.block);
829 evo_data(push, asyh->core.format << 8);
830 evo_data(push, asyh->core.handle);
831 evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
832 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
833 }
834 evo_kick(push, core);
835 }
836 }
837
838 static void
839 nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
840 {
841 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
842 struct nv50_head_mode *m = &asyh->mode;
843 u32 *push;
844 if ((push = evo_wait(core, 14))) {
845 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
846 evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
847 evo_data(push, 0x00800000 | m->clock);
848 evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
849 evo_mthd(push, 0x0810 + (head->base.index * 0x400), 6);
850 evo_data(push, 0x00000000);
851 evo_data(push, (m->v.active << 16) | m->h.active );
852 evo_data(push, (m->v.synce << 16) | m->h.synce );
853 evo_data(push, (m->v.blanke << 16) | m->h.blanke );
854 evo_data(push, (m->v.blanks << 16) | m->h.blanks );
855 evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
856 evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
857 evo_data(push, 0x00000000);
858 } else {
859 evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
860 evo_data(push, 0x00000000);
861 evo_data(push, (m->v.active << 16) | m->h.active );
862 evo_data(push, (m->v.synce << 16) | m->h.synce );
863 evo_data(push, (m->v.blanke << 16) | m->h.blanke );
864 evo_data(push, (m->v.blanks << 16) | m->h.blanks );
865 evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
866 evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
867 evo_data(push, 0x00000000); /* ??? */
868 evo_data(push, 0xffffff00);
869 evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
870 evo_data(push, m->clock * 1000);
871 evo_data(push, 0x00200000); /* ??? */
872 evo_data(push, m->clock * 1000);
873 }
874 evo_kick(push, core);
875 }
876 }
877
878 static void
879 nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
880 {
881 if (asyh->clr.core && (!asyh->set.core || y))
882 nv50_head_core_clr(head);
883 }
884
885 static void
886 nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
887 {
888 if (asyh->set.mode ) nv50_head_mode (head, asyh);
889 if (asyh->set.core ) nv50_head_core_set(head, asyh);
890 }
891
892 static void
893 nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
894 {
895 struct drm_display_mode *mode = &asyh->state.adjusted_mode;
896 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
897 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
898 u32 hbackp = mode->htotal - mode->hsync_end;
899 u32 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
900 u32 hfrontp = mode->hsync_start - mode->hdisplay;
901 u32 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
902 struct nv50_head_mode *m = &asyh->mode;
903
904 m->h.active = mode->htotal;
905 m->h.synce = mode->hsync_end - mode->hsync_start - 1;
906 m->h.blanke = m->h.synce + hbackp;
907 m->h.blanks = mode->htotal - hfrontp - 1;
908
909 m->v.active = mode->vtotal * vscan / ilace;
910 m->v.synce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
911 m->v.blanke = m->v.synce + vbackp;
912 m->v.blanks = m->v.active - vfrontp - 1;
913
914 /*XXX: Safe underestimate, even "0" works */
915 m->v.blankus = (m->v.active - mode->vdisplay - 2) * m->h.active;
916 m->v.blankus *= 1000;
917 m->v.blankus /= mode->clock;
918
919 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
920 m->v.blank2e = m->v.active + m->v.synce + vbackp;
921 m->v.blank2s = m->v.blank2e + (mode->vdisplay * vscan / ilace);
922 m->v.active = (m->v.active * 2) + 1;
923 m->interlace = true;
924 } else {
925 m->v.blank2e = 0;
926 m->v.blank2s = 1;
927 m->interlace = false;
928 }
929 m->clock = mode->clock;
930
931 drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V);
932 asyh->set.mode = true;
933 }
934
935 static int
936 nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
937 {
938 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
939 struct nv50_disp *disp = nv50_disp(crtc->dev);
940 struct nv50_head *head = nv50_head(crtc);
941 struct nv50_head_atom *armh = &head->arm;
942 struct nv50_head_atom *asyh = nv50_head_atom(state);
943
944 NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
945 asyh->clr.mask = 0;
946 asyh->set.mask = 0;
947
948 if (asyh->state.active) {
949 if (asyh->state.mode_changed)
950 nv50_head_atomic_check_mode(head, asyh);
951
952 if ((asyh->core.visible = (asyh->base.cpp != 0))) {
953 asyh->core.x = asyh->base.x;
954 asyh->core.y = asyh->base.y;
955 asyh->core.w = asyh->base.w;
956 asyh->core.h = asyh->base.h;
957 } else
958 if ((asyh->core.visible = true)) {
959 /*XXX: We need to either find some way of having the
960 * primary base layer appear black, while still
961 * being able to display the other layers, or we
962 * need to allocate a dummy black surface here.
963 */
964 asyh->core.x = 0;
965 asyh->core.y = 0;
966 asyh->core.w = asyh->state.mode.hdisplay;
967 asyh->core.h = asyh->state.mode.vdisplay;
968 }
969 asyh->core.handle = disp->mast.base.vram.handle;
970 asyh->core.offset = 0;
971 asyh->core.format = 0xcf;
972 asyh->core.kind = 0;
973 asyh->core.layout = 1;
974 asyh->core.block = 0;
975 asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
976 } else {
977 asyh->core.visible = false;
978 }
979
980 if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
981 if (asyh->core.visible) {
982 if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
983 asyh->set.core = true;
984 } else
985 if (armh->core.visible) {
986 asyh->clr.core = true;
987 }
988 } else {
989 asyh->clr.core = armh->core.visible;
990 asyh->set.core = asyh->core.visible;
991 }
992
993 memcpy(armh, asyh, sizeof(*asyh));
994 asyh->state.mode_changed = 0;
995 return 0;
996 }
997
998 /******************************************************************************
999 * CRTC
1000 *****************************************************************************/
1001 static int
1002 nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
1003 {
1004 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1005 struct nouveau_connector *nv_connector;
1006 struct drm_connector *connector;
1007 u32 *push, mode = 0x00;
1008
1009 nv_connector = nouveau_crtc_connector_get(nv_crtc);
1010 connector = &nv_connector->base;
1011 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
1012 if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
1013 mode = DITHERING_MODE_DYNAMIC2X2;
1014 } else {
1015 mode = nv_connector->dithering_mode;
1016 }
1017
1018 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
1019 if (connector->display_info.bpc >= 8)
1020 mode |= DITHERING_DEPTH_8BPC;
1021 } else {
1022 mode |= nv_connector->dithering_depth;
1023 }
1024
1025 push = evo_wait(mast, 4);
1026 if (push) {
1027 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1028 evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
1029 evo_data(push, mode);
1030 } else
1031 if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) {
1032 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
1033 evo_data(push, mode);
1034 } else {
1035 evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
1036 evo_data(push, mode);
1037 }
1038
1039 if (update) {
1040 evo_mthd(push, 0x0080, 1);
1041 evo_data(push, 0x00000000);
1042 }
1043 evo_kick(push, mast);
1044 }
1045
1046 return 0;
1047 }
1048
1049 static int
1050 nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
1051 {
1052 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1053 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
1054 struct drm_crtc *crtc = &nv_crtc->base;
1055 struct nouveau_connector *nv_connector;
1056 int mode = DRM_MODE_SCALE_NONE;
1057 u32 oX, oY, *push;
1058
1059 /* start off at the resolution we programmed the crtc for, this
1060 * effectively handles NONE/FULL scaling
1061 */
1062 nv_connector = nouveau_crtc_connector_get(nv_crtc);
1063 if (nv_connector && nv_connector->native_mode) {
1064 mode = nv_connector->scaling_mode;
1065 if (nv_connector->scaling_full) /* non-EDID LVDS/eDP mode */
1066 mode = DRM_MODE_SCALE_FULLSCREEN;
1067 }
1068
1069 if (mode != DRM_MODE_SCALE_NONE)
1070 omode = nv_connector->native_mode;
1071 else
1072 omode = umode;
1073
1074 oX = omode->hdisplay;
1075 oY = omode->vdisplay;
1076 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
1077 oY *= 2;
1078
1079 /* add overscan compensation if necessary, will keep the aspect
1080 * ratio the same as the backend mode unless overridden by the
1081 * user setting both hborder and vborder properties.
1082 */
1083 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
1084 (nv_connector->underscan == UNDERSCAN_AUTO &&
1085 drm_detect_hdmi_monitor(nv_connector->edid)))) {
1086 u32 bX = nv_connector->underscan_hborder;
1087 u32 bY = nv_connector->underscan_vborder;
1088 u32 aspect = (oY << 19) / oX;
1089
1090 if (bX) {
1091 oX -= (bX * 2);
1092 if (bY) oY -= (bY * 2);
1093 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
1094 } else {
1095 oX -= (oX >> 4) + 32;
1096 if (bY) oY -= (bY * 2);
1097 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
1098 }
1099 }
1100
1101 /* handle CENTER/ASPECT scaling, taking into account the areas
1102 * removed already for overscan compensation
1103 */
1104 switch (mode) {
1105 case DRM_MODE_SCALE_CENTER:
1106 oX = min((u32)umode->hdisplay, oX);
1107 oY = min((u32)umode->vdisplay, oY);
1108 /* fall-through */
1109 case DRM_MODE_SCALE_ASPECT:
1110 if (oY < oX) {
1111 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
1112 oX = ((oY * aspect) + (aspect / 2)) >> 19;
1113 } else {
1114 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
1115 oY = ((oX * aspect) + (aspect / 2)) >> 19;
1116 }
1117 break;
1118 default:
1119 break;
1120 }
1121
1122 push = evo_wait(mast, 8);
1123 if (push) {
1124 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1125 /*XXX: SCALE_CTRL_ACTIVE??? */
1126 evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
1127 evo_data(push, (oY << 16) | oX);
1128 evo_data(push, (oY << 16) | oX);
1129 evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
1130 evo_data(push, 0x00000000);
1131 evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
1132 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
1133 } else {
1134 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
1135 evo_data(push, (oY << 16) | oX);
1136 evo_data(push, (oY << 16) | oX);
1137 evo_data(push, (oY << 16) | oX);
1138 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
1139 evo_data(push, 0x00000000);
1140 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
1141 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
1142 }
1143
1144 evo_kick(push, mast);
1145
1146 if (update) {
1147 nv50_display_flip_stop(crtc);
1148 nv50_display_flip_next(crtc, crtc->primary->fb,
1149 NULL, 1);
1150 }
1151 }
1152
1153 return 0;
1154 }
1155
1156 static int
1157 nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec)
1158 {
1159 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1160 u32 *push;
1161
1162 push = evo_wait(mast, 8);
1163 if (!push)
1164 return -ENOMEM;
1165
1166 evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1);
1167 evo_data(push, usec);
1168 evo_kick(push, mast);
1169 return 0;
1170 }
1171
1172 static int
1173 nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
1174 {
1175 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1176 u32 *push, hue, vib;
1177 int adj;
1178
1179 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
1180 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
1181 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
1182
1183 push = evo_wait(mast, 16);
1184 if (push) {
1185 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1186 evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
1187 evo_data(push, (hue << 20) | (vib << 8));
1188 } else {
1189 evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
1190 evo_data(push, (hue << 20) | (vib << 8));
1191 }
1192
1193 if (update) {
1194 evo_mthd(push, 0x0080, 1);
1195 evo_data(push, 0x00000000);
1196 }
1197 evo_kick(push, mast);
1198 }
1199
1200 return 0;
1201 }
1202
1203 static int
1204 nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
1205 int x, int y, bool update)
1206 {
1207 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
1208 struct nv50_head *head = nv50_head(&nv_crtc->base);
1209 struct nv50_head_atom *asyh = &head->asy;
1210 const struct drm_format_info *info;
1211
1212 info = drm_format_info(nvfb->base.pixel_format);
1213 if (!info || !info->depth)
1214 return -EINVAL;
1215
1216 asyh->base.depth = info->depth;
1217 asyh->base.cpp = info->cpp[0];
1218 asyh->base.x = x;
1219 asyh->base.y = y;
1220 asyh->base.w = nvfb->base.width;
1221 asyh->base.h = nvfb->base.height;
1222 nv50_head_atomic_check(&head->base.base, &asyh->state);
1223 nv50_head_flush_set(head, asyh);
1224
1225 if (update) {
1226 struct nv50_mast *core = nv50_mast(nv_crtc->base.dev);
1227 u32 *push = evo_wait(core, 2);
1228 if (push) {
1229 evo_mthd(push, 0x0080, 1);
1230 evo_data(push, 0x00000000);
1231 evo_kick(push, core);
1232 }
1233 }
1234
1235 nv_crtc->fb.handle = nvfb->r_handle;
1236 return 0;
1237 }
1238
1239 static void
1240 nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
1241 {
1242 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1243 u32 *push = evo_wait(mast, 16);
1244 if (push) {
1245 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1246 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
1247 evo_data(push, 0x85000000);
1248 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
1249 } else
1250 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1251 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
1252 evo_data(push, 0x85000000);
1253 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
1254 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
1255 evo_data(push, mast->base.vram.handle);
1256 } else {
1257 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
1258 evo_data(push, 0x85000000);
1259 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
1260 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
1261 evo_data(push, mast->base.vram.handle);
1262 }
1263 evo_kick(push, mast);
1264 }
1265 nv_crtc->cursor.visible = true;
1266 }
1267
1268 static void
1269 nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
1270 {
1271 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1272 u32 *push = evo_wait(mast, 16);
1273 if (push) {
1274 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1275 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
1276 evo_data(push, 0x05000000);
1277 } else
1278 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1279 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
1280 evo_data(push, 0x05000000);
1281 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
1282 evo_data(push, 0x00000000);
1283 } else {
1284 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
1285 evo_data(push, 0x05000000);
1286 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
1287 evo_data(push, 0x00000000);
1288 }
1289 evo_kick(push, mast);
1290 }
1291 nv_crtc->cursor.visible = false;
1292 }
1293
1294 static void
1295 nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
1296 {
1297 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1298
1299 if (show && nv_crtc->cursor.nvbo && nv_crtc->base.enabled)
1300 nv50_crtc_cursor_show(nv_crtc);
1301 else
1302 nv50_crtc_cursor_hide(nv_crtc);
1303
1304 if (update) {
1305 u32 *push = evo_wait(mast, 2);
1306 if (push) {
1307 evo_mthd(push, 0x0080, 1);
1308 evo_data(push, 0x00000000);
1309 evo_kick(push, mast);
1310 }
1311 }
1312 }
1313
1314 static void
1315 nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
1316 {
1317 }
1318
1319 static void
1320 nv50_crtc_prepare(struct drm_crtc *crtc)
1321 {
1322 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1323 struct nv50_mast *mast = nv50_mast(crtc->dev);
1324 struct nv50_head *head = nv50_head(crtc);
1325 struct nv50_head_atom *asyh = &head->asy;
1326 u32 *push;
1327
1328 nv50_display_flip_stop(crtc);
1329
1330 asyh->state.active = false;
1331 nv50_head_atomic_check(&head->base.base, &asyh->state);
1332 nv50_head_flush_clr(head, asyh, false);
1333
1334 push = evo_wait(mast, 6);
1335 if (push) {
1336 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1337 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1338 evo_data(push, 0x40000000);
1339 } else
1340 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1341 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1342 evo_data(push, 0x40000000);
1343 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1344 evo_data(push, 0x00000000);
1345 } else {
1346 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
1347 evo_data(push, 0x03000000);
1348 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1349 evo_data(push, 0x00000000);
1350 }
1351
1352 evo_kick(push, mast);
1353 }
1354
1355 nv50_crtc_cursor_show_hide(nv_crtc, false, false);
1356 }
1357
1358 static void
1359 nv50_crtc_commit(struct drm_crtc *crtc)
1360 {
1361 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1362 struct nv50_mast *mast = nv50_mast(crtc->dev);
1363 u32 *push;
1364
1365 push = evo_wait(mast, 32);
1366 if (push) {
1367 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1368 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1369 evo_data(push, 0xc0000000);
1370 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1371 } else
1372 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1373 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1374 evo_data(push, 0xc0000000);
1375 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1376 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1377 evo_data(push, mast->base.vram.handle);
1378 } else {
1379 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1380 evo_data(push, 0x83000000);
1381 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1382 evo_data(push, 0x00000000);
1383 evo_data(push, 0x00000000);
1384 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1385 evo_data(push, mast->base.vram.handle);
1386 }
1387
1388 evo_kick(push, mast);
1389 }
1390
1391 nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1392 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1393 }
1394
1395 static bool
1396 nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1397 struct drm_display_mode *adjusted_mode)
1398 {
1399 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1400 return true;
1401 }
1402
1403 static int
1404 nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1405 {
1406 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1407 struct nv50_head *head = nv50_head(crtc);
1408 int ret;
1409
1410 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true);
1411 if (ret == 0) {
1412 if (head->image)
1413 nouveau_bo_unpin(head->image);
1414 nouveau_bo_ref(nvfb->nvbo, &head->image);
1415 }
1416
1417 return ret;
1418 }
1419
1420 static int
1421 nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1422 struct drm_display_mode *mode, int x, int y,
1423 struct drm_framebuffer *old_fb)
1424 {
1425 struct nv50_mast *mast = nv50_mast(crtc->dev);
1426 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1427 struct nouveau_connector *nv_connector;
1428 u32 *push;
1429 int ret;
1430 struct nv50_head *head = nv50_head(crtc);
1431 struct nv50_head_atom *asyh = &head->asy;
1432
1433 memcpy(&asyh->state.mode, umode, sizeof(*umode));
1434 memcpy(&asyh->state.adjusted_mode, mode, sizeof(*mode));
1435 asyh->state.active = true;
1436 asyh->state.mode_changed = true;
1437 nv50_head_atomic_check(&head->base.base, &asyh->state);
1438
1439 ret = nv50_crtc_swap_fbs(crtc, old_fb);
1440 if (ret)
1441 return ret;
1442
1443 nv50_head_flush_set(head, asyh);
1444
1445 push = evo_wait(mast, 64);
1446 if (push) {
1447 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1448 evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1449 evo_data(push, 0x00000311);
1450 evo_data(push, 0x00000100);
1451 } else {
1452 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1453 evo_data(push, 0x00000311);
1454 evo_data(push, 0x00000100);
1455 }
1456 evo_kick(push, mast);
1457 }
1458
1459 nv_connector = nouveau_crtc_connector_get(nv_crtc);
1460 nv50_crtc_set_dither(nv_crtc, false);
1461 nv50_crtc_set_scale(nv_crtc, false);
1462
1463 /* G94 only accepts this after setting scale */
1464 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA)
1465 nv50_crtc_set_raster_vblank_dmi(nv_crtc, asyh->mode.v.blankus);
1466
1467 nv50_crtc_set_color_vibrance(nv_crtc, false);
1468 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1469 return 0;
1470 }
1471
1472 static int
1473 nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1474 struct drm_framebuffer *old_fb)
1475 {
1476 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1477 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1478 int ret;
1479
1480 if (!crtc->primary->fb) {
1481 NV_DEBUG(drm, "No FB bound\n");
1482 return 0;
1483 }
1484
1485 ret = nv50_crtc_swap_fbs(crtc, old_fb);
1486 if (ret)
1487 return ret;
1488
1489 nv50_display_flip_stop(crtc);
1490 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1491 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1492 return 0;
1493 }
1494
1495 static int
1496 nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1497 struct drm_framebuffer *fb, int x, int y,
1498 enum mode_set_atomic state)
1499 {
1500 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1501 nv50_display_flip_stop(crtc);
1502 nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1503 return 0;
1504 }
1505
1506 static void
1507 nv50_crtc_lut_load(struct drm_crtc *crtc)
1508 {
1509 struct nv50_disp *disp = nv50_disp(crtc->dev);
1510 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1511 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1512 int i;
1513
1514 for (i = 0; i < 256; i++) {
1515 u16 r = nv_crtc->lut.r[i] >> 2;
1516 u16 g = nv_crtc->lut.g[i] >> 2;
1517 u16 b = nv_crtc->lut.b[i] >> 2;
1518
1519 if (disp->disp->oclass < GF110_DISP) {
1520 writew(r + 0x0000, lut + (i * 0x08) + 0);
1521 writew(g + 0x0000, lut + (i * 0x08) + 2);
1522 writew(b + 0x0000, lut + (i * 0x08) + 4);
1523 } else {
1524 writew(r + 0x6000, lut + (i * 0x20) + 0);
1525 writew(g + 0x6000, lut + (i * 0x20) + 2);
1526 writew(b + 0x6000, lut + (i * 0x20) + 4);
1527 }
1528 }
1529 }
1530
1531 static void
1532 nv50_crtc_disable(struct drm_crtc *crtc)
1533 {
1534 struct nv50_head *head = nv50_head(crtc);
1535 evo_sync(crtc->dev);
1536 if (head->image)
1537 nouveau_bo_unpin(head->image);
1538 nouveau_bo_ref(NULL, &head->image);
1539 }
1540
1541 static int
1542 nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1543 uint32_t handle, uint32_t width, uint32_t height)
1544 {
1545 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1546 struct drm_gem_object *gem = NULL;
1547 struct nouveau_bo *nvbo = NULL;
1548 int ret = 0;
1549
1550 if (handle) {
1551 if (width != 64 || height != 64)
1552 return -EINVAL;
1553
1554 gem = drm_gem_object_lookup(file_priv, handle);
1555 if (unlikely(!gem))
1556 return -ENOENT;
1557 nvbo = nouveau_gem_object(gem);
1558
1559 ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true);
1560 }
1561
1562 if (ret == 0) {
1563 if (nv_crtc->cursor.nvbo)
1564 nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1565 nouveau_bo_ref(nvbo, &nv_crtc->cursor.nvbo);
1566 }
1567 drm_gem_object_unreference_unlocked(gem);
1568
1569 nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1570 return ret;
1571 }
1572
1573 static int
1574 nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1575 {
1576 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1577 struct nv50_curs *curs = nv50_curs(crtc);
1578 struct nv50_chan *chan = nv50_chan(curs);
1579 nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1580 nvif_wr32(&chan->user, 0x0080, 0x00000000);
1581
1582 nv_crtc->cursor_saved_x = x;
1583 nv_crtc->cursor_saved_y = y;
1584 return 0;
1585 }
1586
1587 static int
1588 nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1589 uint32_t size)
1590 {
1591 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1592 u32 i;
1593
1594 for (i = 0; i < size; i++) {
1595 nv_crtc->lut.r[i] = r[i];
1596 nv_crtc->lut.g[i] = g[i];
1597 nv_crtc->lut.b[i] = b[i];
1598 }
1599
1600 nv50_crtc_lut_load(crtc);
1601
1602 return 0;
1603 }
1604
1605 static void
1606 nv50_crtc_cursor_restore(struct nouveau_crtc *nv_crtc, int x, int y)
1607 {
1608 nv50_crtc_cursor_move(&nv_crtc->base, x, y);
1609
1610 nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1611 }
1612
1613 static void
1614 nv50_crtc_destroy(struct drm_crtc *crtc)
1615 {
1616 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1617 struct nv50_disp *disp = nv50_disp(crtc->dev);
1618 struct nv50_head *head = nv50_head(crtc);
1619 struct nv50_fbdma *fbdma;
1620
1621 list_for_each_entry(fbdma, &disp->fbdma, head) {
1622 nvif_object_fini(&fbdma->base[nv_crtc->index]);
1623 }
1624
1625 nv50_dmac_destroy(&head->ovly.base, disp->disp);
1626 nv50_pioc_destroy(&head->oimm.base);
1627 nv50_dmac_destroy(&head->sync.base, disp->disp);
1628 nv50_pioc_destroy(&head->curs.base);
1629
1630 /*XXX: this shouldn't be necessary, but the core doesn't call
1631 * disconnect() during the cleanup paths
1632 */
1633 if (head->image)
1634 nouveau_bo_unpin(head->image);
1635 nouveau_bo_ref(NULL, &head->image);
1636
1637 /*XXX: ditto */
1638 if (nv_crtc->cursor.nvbo)
1639 nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1640 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1641
1642 nouveau_bo_unmap(nv_crtc->lut.nvbo);
1643 if (nv_crtc->lut.nvbo)
1644 nouveau_bo_unpin(nv_crtc->lut.nvbo);
1645 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1646
1647 drm_crtc_cleanup(crtc);
1648 kfree(crtc);
1649 }
1650
1651 static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1652 .dpms = nv50_crtc_dpms,
1653 .prepare = nv50_crtc_prepare,
1654 .commit = nv50_crtc_commit,
1655 .mode_fixup = nv50_crtc_mode_fixup,
1656 .mode_set = nv50_crtc_mode_set,
1657 .mode_set_base = nv50_crtc_mode_set_base,
1658 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1659 .load_lut = nv50_crtc_lut_load,
1660 .disable = nv50_crtc_disable,
1661 };
1662
1663 static const struct drm_crtc_funcs nv50_crtc_func = {
1664 .cursor_set = nv50_crtc_cursor_set,
1665 .cursor_move = nv50_crtc_cursor_move,
1666 .gamma_set = nv50_crtc_gamma_set,
1667 .set_config = nouveau_crtc_set_config,
1668 .destroy = nv50_crtc_destroy,
1669 .page_flip = nouveau_crtc_page_flip,
1670 };
1671
1672 static int
1673 nv50_crtc_create(struct drm_device *dev, int index)
1674 {
1675 struct nouveau_drm *drm = nouveau_drm(dev);
1676 struct nvif_device *device = &drm->device;
1677 struct nv50_disp *disp = nv50_disp(dev);
1678 struct nv50_head *head;
1679 struct drm_crtc *crtc;
1680 int ret, i;
1681
1682 head = kzalloc(sizeof(*head), GFP_KERNEL);
1683 if (!head)
1684 return -ENOMEM;
1685
1686 head->base.index = index;
1687 head->base.color_vibrance = 50;
1688 head->base.vibrant_hue = 0;
1689 head->base.cursor.set_pos = nv50_crtc_cursor_restore;
1690 for (i = 0; i < 256; i++) {
1691 head->base.lut.r[i] = i << 8;
1692 head->base.lut.g[i] = i << 8;
1693 head->base.lut.b[i] = i << 8;
1694 }
1695
1696 crtc = &head->base.base;
1697 drm_crtc_init(dev, crtc, &nv50_crtc_func);
1698 drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1699 drm_mode_crtc_set_gamma_size(crtc, 256);
1700
1701 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1702 0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
1703 if (!ret) {
1704 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
1705 if (!ret) {
1706 ret = nouveau_bo_map(head->base.lut.nvbo);
1707 if (ret)
1708 nouveau_bo_unpin(head->base.lut.nvbo);
1709 }
1710 if (ret)
1711 nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1712 }
1713
1714 if (ret)
1715 goto out;
1716
1717 /* allocate cursor resources */
1718 ret = nv50_curs_create(device, disp->disp, index, &head->curs);
1719 if (ret)
1720 goto out;
1721
1722 /* allocate page flip / sync resources */
1723 ret = nv50_base_create(device, disp->disp, index, disp->sync->bo.offset,
1724 &head->sync);
1725 if (ret)
1726 goto out;
1727
1728 head->sync.addr = EVO_FLIP_SEM0(index);
1729 head->sync.data = 0x00000000;
1730
1731 /* allocate overlay resources */
1732 ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
1733 if (ret)
1734 goto out;
1735
1736 ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
1737 &head->ovly);
1738 if (ret)
1739 goto out;
1740
1741 out:
1742 if (ret)
1743 nv50_crtc_destroy(crtc);
1744 return ret;
1745 }
1746
1747 /******************************************************************************
1748 * Encoder helpers
1749 *****************************************************************************/
1750 static bool
1751 nv50_encoder_mode_fixup(struct drm_encoder *encoder,
1752 const struct drm_display_mode *mode,
1753 struct drm_display_mode *adjusted_mode)
1754 {
1755 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1756 struct nouveau_connector *nv_connector;
1757
1758 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1759 if (nv_connector && nv_connector->native_mode) {
1760 nv_connector->scaling_full = false;
1761 if (nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) {
1762 switch (nv_connector->type) {
1763 case DCB_CONNECTOR_LVDS:
1764 case DCB_CONNECTOR_LVDS_SPWG:
1765 case DCB_CONNECTOR_eDP:
1766 /* force use of scaler for non-edid modes */
1767 if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
1768 return true;
1769 nv_connector->scaling_full = true;
1770 break;
1771 default:
1772 return true;
1773 }
1774 }
1775
1776 drm_mode_copy(adjusted_mode, nv_connector->native_mode);
1777 }
1778
1779 return true;
1780 }
1781
1782 /******************************************************************************
1783 * DAC
1784 *****************************************************************************/
1785 static void
1786 nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1787 {
1788 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1789 struct nv50_disp *disp = nv50_disp(encoder->dev);
1790 struct {
1791 struct nv50_disp_mthd_v1 base;
1792 struct nv50_disp_dac_pwr_v0 pwr;
1793 } args = {
1794 .base.version = 1,
1795 .base.method = NV50_DISP_MTHD_V1_DAC_PWR,
1796 .base.hasht = nv_encoder->dcb->hasht,
1797 .base.hashm = nv_encoder->dcb->hashm,
1798 .pwr.state = 1,
1799 .pwr.data = 1,
1800 .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
1801 mode != DRM_MODE_DPMS_OFF),
1802 .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
1803 mode != DRM_MODE_DPMS_OFF),
1804 };
1805
1806 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1807 }
1808
1809 static void
1810 nv50_dac_commit(struct drm_encoder *encoder)
1811 {
1812 }
1813
1814 static void
1815 nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1816 struct drm_display_mode *adjusted_mode)
1817 {
1818 struct nv50_mast *mast = nv50_mast(encoder->dev);
1819 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1820 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1821 u32 *push;
1822
1823 nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1824
1825 push = evo_wait(mast, 8);
1826 if (push) {
1827 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1828 u32 syncs = 0x00000000;
1829
1830 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1831 syncs |= 0x00000001;
1832 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1833 syncs |= 0x00000002;
1834
1835 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1836 evo_data(push, 1 << nv_crtc->index);
1837 evo_data(push, syncs);
1838 } else {
1839 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1840 u32 syncs = 0x00000001;
1841
1842 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1843 syncs |= 0x00000008;
1844 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1845 syncs |= 0x00000010;
1846
1847 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1848 magic |= 0x00000001;
1849
1850 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1851 evo_data(push, syncs);
1852 evo_data(push, magic);
1853 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1854 evo_data(push, 1 << nv_crtc->index);
1855 }
1856
1857 evo_kick(push, mast);
1858 }
1859
1860 nv_encoder->crtc = encoder->crtc;
1861 }
1862
1863 static void
1864 nv50_dac_disconnect(struct drm_encoder *encoder)
1865 {
1866 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1867 struct nv50_mast *mast = nv50_mast(encoder->dev);
1868 const int or = nv_encoder->or;
1869 u32 *push;
1870
1871 if (nv_encoder->crtc) {
1872 nv50_crtc_prepare(nv_encoder->crtc);
1873
1874 push = evo_wait(mast, 4);
1875 if (push) {
1876 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1877 evo_mthd(push, 0x0400 + (or * 0x080), 1);
1878 evo_data(push, 0x00000000);
1879 } else {
1880 evo_mthd(push, 0x0180 + (or * 0x020), 1);
1881 evo_data(push, 0x00000000);
1882 }
1883 evo_kick(push, mast);
1884 }
1885 }
1886
1887 nv_encoder->crtc = NULL;
1888 }
1889
1890 static enum drm_connector_status
1891 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1892 {
1893 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1894 struct nv50_disp *disp = nv50_disp(encoder->dev);
1895 struct {
1896 struct nv50_disp_mthd_v1 base;
1897 struct nv50_disp_dac_load_v0 load;
1898 } args = {
1899 .base.version = 1,
1900 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
1901 .base.hasht = nv_encoder->dcb->hasht,
1902 .base.hashm = nv_encoder->dcb->hashm,
1903 };
1904 int ret;
1905
1906 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
1907 if (args.load.data == 0)
1908 args.load.data = 340;
1909
1910 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
1911 if (ret || !args.load.load)
1912 return connector_status_disconnected;
1913
1914 return connector_status_connected;
1915 }
1916
1917 static void
1918 nv50_dac_destroy(struct drm_encoder *encoder)
1919 {
1920 drm_encoder_cleanup(encoder);
1921 kfree(encoder);
1922 }
1923
1924 static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1925 .dpms = nv50_dac_dpms,
1926 .mode_fixup = nv50_encoder_mode_fixup,
1927 .prepare = nv50_dac_disconnect,
1928 .commit = nv50_dac_commit,
1929 .mode_set = nv50_dac_mode_set,
1930 .disable = nv50_dac_disconnect,
1931 .get_crtc = nv50_display_crtc_get,
1932 .detect = nv50_dac_detect
1933 };
1934
1935 static const struct drm_encoder_funcs nv50_dac_func = {
1936 .destroy = nv50_dac_destroy,
1937 };
1938
1939 static int
1940 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1941 {
1942 struct nouveau_drm *drm = nouveau_drm(connector->dev);
1943 struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
1944 struct nvkm_i2c_bus *bus;
1945 struct nouveau_encoder *nv_encoder;
1946 struct drm_encoder *encoder;
1947 int type = DRM_MODE_ENCODER_DAC;
1948
1949 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1950 if (!nv_encoder)
1951 return -ENOMEM;
1952 nv_encoder->dcb = dcbe;
1953 nv_encoder->or = ffs(dcbe->or) - 1;
1954
1955 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1956 if (bus)
1957 nv_encoder->i2c = &bus->i2c;
1958
1959 encoder = to_drm_encoder(nv_encoder);
1960 encoder->possible_crtcs = dcbe->heads;
1961 encoder->possible_clones = 0;
1962 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
1963 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
1964 drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1965
1966 drm_mode_connector_attach_encoder(connector, encoder);
1967 return 0;
1968 }
1969
1970 /******************************************************************************
1971 * Audio
1972 *****************************************************************************/
1973 static void
1974 nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1975 {
1976 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1977 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1978 struct nouveau_connector *nv_connector;
1979 struct nv50_disp *disp = nv50_disp(encoder->dev);
1980 struct __packed {
1981 struct {
1982 struct nv50_disp_mthd_v1 mthd;
1983 struct nv50_disp_sor_hda_eld_v0 eld;
1984 } base;
1985 u8 data[sizeof(nv_connector->base.eld)];
1986 } args = {
1987 .base.mthd.version = 1,
1988 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1989 .base.mthd.hasht = nv_encoder->dcb->hasht,
1990 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
1991 (0x0100 << nv_crtc->index),
1992 };
1993
1994 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1995 if (!drm_detect_monitor_audio(nv_connector->edid))
1996 return;
1997
1998 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1999 memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
2000
2001 nvif_mthd(disp->disp, 0, &args,
2002 sizeof(args.base) + drm_eld_size(args.data));
2003 }
2004
2005 static void
2006 nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2007 {
2008 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2009 struct nv50_disp *disp = nv50_disp(encoder->dev);
2010 struct {
2011 struct nv50_disp_mthd_v1 base;
2012 struct nv50_disp_sor_hda_eld_v0 eld;
2013 } args = {
2014 .base.version = 1,
2015 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2016 .base.hasht = nv_encoder->dcb->hasht,
2017 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2018 (0x0100 << nv_crtc->index),
2019 };
2020
2021 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2022 }
2023
2024 /******************************************************************************
2025 * HDMI
2026 *****************************************************************************/
2027 static void
2028 nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
2029 {
2030 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2031 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2032 struct nv50_disp *disp = nv50_disp(encoder->dev);
2033 struct {
2034 struct nv50_disp_mthd_v1 base;
2035 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2036 } args = {
2037 .base.version = 1,
2038 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2039 .base.hasht = nv_encoder->dcb->hasht,
2040 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2041 (0x0100 << nv_crtc->index),
2042 .pwr.state = 1,
2043 .pwr.rekey = 56, /* binary driver, and tegra, constant */
2044 };
2045 struct nouveau_connector *nv_connector;
2046 u32 max_ac_packet;
2047
2048 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2049 if (!drm_detect_hdmi_monitor(nv_connector->edid))
2050 return;
2051
2052 max_ac_packet = mode->htotal - mode->hdisplay;
2053 max_ac_packet -= args.pwr.rekey;
2054 max_ac_packet -= 18; /* constant from tegra */
2055 args.pwr.max_ac_packet = max_ac_packet / 32;
2056
2057 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2058 nv50_audio_mode_set(encoder, mode);
2059 }
2060
2061 static void
2062 nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2063 {
2064 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2065 struct nv50_disp *disp = nv50_disp(encoder->dev);
2066 struct {
2067 struct nv50_disp_mthd_v1 base;
2068 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2069 } args = {
2070 .base.version = 1,
2071 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2072 .base.hasht = nv_encoder->dcb->hasht,
2073 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2074 (0x0100 << nv_crtc->index),
2075 };
2076
2077 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2078 }
2079
2080 /******************************************************************************
2081 * MST
2082 *****************************************************************************/
2083 struct nv50_mstm {
2084 struct nouveau_encoder *outp;
2085
2086 struct drm_dp_mst_topology_mgr mgr;
2087 };
2088
2089 static int
2090 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
2091 {
2092 struct nouveau_encoder *outp = mstm->outp;
2093 struct {
2094 struct nv50_disp_mthd_v1 base;
2095 struct nv50_disp_sor_dp_mst_link_v0 mst;
2096 } args = {
2097 .base.version = 1,
2098 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
2099 .base.hasht = outp->dcb->hasht,
2100 .base.hashm = outp->dcb->hashm,
2101 .mst.state = state,
2102 };
2103 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
2104 struct nvif_object *disp = &drm->display->disp;
2105 int ret;
2106
2107 if (dpcd >= 0x12) {
2108 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
2109 if (ret < 0)
2110 return ret;
2111
2112 dpcd &= ~DP_MST_EN;
2113 if (state)
2114 dpcd |= DP_MST_EN;
2115
2116 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
2117 if (ret < 0)
2118 return ret;
2119 }
2120
2121 return nvif_mthd(disp, 0, &args, sizeof(args));
2122 }
2123
2124 int
2125 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
2126 {
2127 int ret, state = 0;
2128
2129 if (!mstm)
2130 return 0;
2131
2132 if (dpcd[0] >= 0x12 && allow) {
2133 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
2134 if (ret < 0)
2135 return ret;
2136
2137 state = dpcd[1] & DP_MST_CAP;
2138 }
2139
2140 ret = nv50_mstm_enable(mstm, dpcd[0], state);
2141 if (ret)
2142 return ret;
2143
2144 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
2145 if (ret)
2146 return nv50_mstm_enable(mstm, dpcd[0], 0);
2147
2148 return mstm->mgr.mst_state;
2149 }
2150
2151 static void
2152 nv50_mstm_del(struct nv50_mstm **pmstm)
2153 {
2154 struct nv50_mstm *mstm = *pmstm;
2155 if (mstm) {
2156 kfree(*pmstm);
2157 *pmstm = NULL;
2158 }
2159 }
2160
2161 static int
2162 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
2163 int conn_base_id, struct nv50_mstm **pmstm)
2164 {
2165 const int max_payloads = hweight8(outp->dcb->heads);
2166 struct drm_device *dev = outp->base.base.dev;
2167 struct nv50_mstm *mstm;
2168 int ret;
2169
2170 if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
2171 return -ENOMEM;
2172 mstm->outp = outp;
2173
2174 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev->dev, aux, aux_max,
2175 max_payloads, conn_base_id);
2176 if (ret)
2177 return ret;
2178
2179 return 0;
2180 }
2181
2182 /******************************************************************************
2183 * SOR
2184 *****************************************************************************/
2185 static void
2186 nv50_sor_dpms(struct drm_encoder *encoder, int mode)
2187 {
2188 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2189 struct nv50_disp *disp = nv50_disp(encoder->dev);
2190 struct {
2191 struct nv50_disp_mthd_v1 base;
2192 struct nv50_disp_sor_pwr_v0 pwr;
2193 } args = {
2194 .base.version = 1,
2195 .base.method = NV50_DISP_MTHD_V1_SOR_PWR,
2196 .base.hasht = nv_encoder->dcb->hasht,
2197 .base.hashm = nv_encoder->dcb->hashm,
2198 .pwr.state = mode == DRM_MODE_DPMS_ON,
2199 };
2200 struct {
2201 struct nv50_disp_mthd_v1 base;
2202 struct nv50_disp_sor_dp_pwr_v0 pwr;
2203 } link = {
2204 .base.version = 1,
2205 .base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR,
2206 .base.hasht = nv_encoder->dcb->hasht,
2207 .base.hashm = nv_encoder->dcb->hashm,
2208 .pwr.state = mode == DRM_MODE_DPMS_ON,
2209 };
2210 struct drm_device *dev = encoder->dev;
2211 struct drm_encoder *partner;
2212
2213 nv_encoder->last_dpms = mode;
2214
2215 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
2216 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
2217
2218 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
2219 continue;
2220
2221 if (nv_partner != nv_encoder &&
2222 nv_partner->dcb->or == nv_encoder->dcb->or) {
2223 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
2224 return;
2225 break;
2226 }
2227 }
2228
2229 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
2230 args.pwr.state = 1;
2231 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2232 nvif_mthd(disp->disp, 0, &link, sizeof(link));
2233 } else {
2234 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2235 }
2236 }
2237
2238 static void
2239 nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
2240 {
2241 struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
2242 u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
2243 if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
2244 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2245 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
2246 evo_data(push, (nv_encoder->ctrl = temp));
2247 } else {
2248 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
2249 evo_data(push, (nv_encoder->ctrl = temp));
2250 }
2251 evo_kick(push, mast);
2252 }
2253 }
2254
2255 static void
2256 nv50_sor_disconnect(struct drm_encoder *encoder)
2257 {
2258 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2259 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
2260
2261 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2262 nv_encoder->crtc = NULL;
2263
2264 if (nv_crtc) {
2265 nv50_crtc_prepare(&nv_crtc->base);
2266 nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
2267 nv50_audio_disconnect(encoder, nv_crtc);
2268 nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
2269 }
2270 }
2271
2272 static void
2273 nv50_sor_commit(struct drm_encoder *encoder)
2274 {
2275 }
2276
2277 static void
2278 nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
2279 struct drm_display_mode *mode)
2280 {
2281 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2282 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2283 struct {
2284 struct nv50_disp_mthd_v1 base;
2285 struct nv50_disp_sor_lvds_script_v0 lvds;
2286 } lvds = {
2287 .base.version = 1,
2288 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
2289 .base.hasht = nv_encoder->dcb->hasht,
2290 .base.hashm = nv_encoder->dcb->hashm,
2291 };
2292 struct nv50_disp *disp = nv50_disp(encoder->dev);
2293 struct nv50_mast *mast = nv50_mast(encoder->dev);
2294 struct drm_device *dev = encoder->dev;
2295 struct nouveau_drm *drm = nouveau_drm(dev);
2296 struct nouveau_connector *nv_connector;
2297 struct nvbios *bios = &drm->vbios;
2298 u32 mask, ctrl;
2299 u8 owner = 1 << nv_crtc->index;
2300 u8 proto = 0xf;
2301 u8 depth = 0x0;
2302
2303 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2304 nv_encoder->crtc = encoder->crtc;
2305
2306 switch (nv_encoder->dcb->type) {
2307 case DCB_OUTPUT_TMDS:
2308 if (nv_encoder->dcb->sorconf.link & 1) {
2309 proto = 0x1;
2310 /* Only enable dual-link if:
2311 * - Need to (i.e. rate > 165MHz)
2312 * - DCB says we can
2313 * - Not an HDMI monitor, since there's no dual-link
2314 * on HDMI.
2315 */
2316 if (mode->clock >= 165000 &&
2317 nv_encoder->dcb->duallink_possible &&
2318 !drm_detect_hdmi_monitor(nv_connector->edid))
2319 proto |= 0x4;
2320 } else {
2321 proto = 0x2;
2322 }
2323
2324 nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
2325 break;
2326 case DCB_OUTPUT_LVDS:
2327 proto = 0x0;
2328
2329 if (bios->fp_no_ddc) {
2330 if (bios->fp.dual_link)
2331 lvds.lvds.script |= 0x0100;
2332 if (bios->fp.if_is_24bit)
2333 lvds.lvds.script |= 0x0200;
2334 } else {
2335 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
2336 if (((u8 *)nv_connector->edid)[121] == 2)
2337 lvds.lvds.script |= 0x0100;
2338 } else
2339 if (mode->clock >= bios->fp.duallink_transition_clk) {
2340 lvds.lvds.script |= 0x0100;
2341 }
2342
2343 if (lvds.lvds.script & 0x0100) {
2344 if (bios->fp.strapless_is_24bit & 2)
2345 lvds.lvds.script |= 0x0200;
2346 } else {
2347 if (bios->fp.strapless_is_24bit & 1)
2348 lvds.lvds.script |= 0x0200;
2349 }
2350
2351 if (nv_connector->base.display_info.bpc == 8)
2352 lvds.lvds.script |= 0x0200;
2353 }
2354
2355 nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
2356 break;
2357 case DCB_OUTPUT_DP:
2358 if (nv_connector->base.display_info.bpc == 6) {
2359 nv_encoder->dp.datarate = mode->clock * 18 / 8;
2360 depth = 0x2;
2361 } else
2362 if (nv_connector->base.display_info.bpc == 8) {
2363 nv_encoder->dp.datarate = mode->clock * 24 / 8;
2364 depth = 0x5;
2365 } else {
2366 nv_encoder->dp.datarate = mode->clock * 30 / 8;
2367 depth = 0x6;
2368 }
2369
2370 if (nv_encoder->dcb->sorconf.link & 1)
2371 proto = 0x8;
2372 else
2373 proto = 0x9;
2374 nv50_audio_mode_set(encoder, mode);
2375 break;
2376 default:
2377 BUG_ON(1);
2378 break;
2379 }
2380
2381 nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
2382
2383 if (nv50_vers(mast) >= GF110_DISP) {
2384 u32 *push = evo_wait(mast, 3);
2385 if (push) {
2386 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2387 u32 syncs = 0x00000001;
2388
2389 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2390 syncs |= 0x00000008;
2391 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2392 syncs |= 0x00000010;
2393
2394 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2395 magic |= 0x00000001;
2396
2397 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2398 evo_data(push, syncs | (depth << 6));
2399 evo_data(push, magic);
2400 evo_kick(push, mast);
2401 }
2402
2403 ctrl = proto << 8;
2404 mask = 0x00000f00;
2405 } else {
2406 ctrl = (depth << 16) | (proto << 8);
2407 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2408 ctrl |= 0x00001000;
2409 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2410 ctrl |= 0x00002000;
2411 mask = 0x000f3f00;
2412 }
2413
2414 nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
2415 }
2416
2417 static void
2418 nv50_sor_destroy(struct drm_encoder *encoder)
2419 {
2420 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2421 nv50_mstm_del(&nv_encoder->dp.mstm);
2422 drm_encoder_cleanup(encoder);
2423 kfree(encoder);
2424 }
2425
2426 static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
2427 .dpms = nv50_sor_dpms,
2428 .mode_fixup = nv50_encoder_mode_fixup,
2429 .prepare = nv50_sor_disconnect,
2430 .commit = nv50_sor_commit,
2431 .mode_set = nv50_sor_mode_set,
2432 .disable = nv50_sor_disconnect,
2433 .get_crtc = nv50_display_crtc_get,
2434 };
2435
2436 static const struct drm_encoder_funcs nv50_sor_func = {
2437 .destroy = nv50_sor_destroy,
2438 };
2439
2440 static int
2441 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
2442 {
2443 struct nouveau_connector *nv_connector = nouveau_connector(connector);
2444 struct nouveau_drm *drm = nouveau_drm(connector->dev);
2445 struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2446 struct nouveau_encoder *nv_encoder;
2447 struct drm_encoder *encoder;
2448 int type, ret;
2449
2450 switch (dcbe->type) {
2451 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
2452 case DCB_OUTPUT_TMDS:
2453 case DCB_OUTPUT_DP:
2454 default:
2455 type = DRM_MODE_ENCODER_TMDS;
2456 break;
2457 }
2458
2459 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2460 if (!nv_encoder)
2461 return -ENOMEM;
2462 nv_encoder->dcb = dcbe;
2463 nv_encoder->or = ffs(dcbe->or) - 1;
2464 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2465
2466 encoder = to_drm_encoder(nv_encoder);
2467 encoder->possible_crtcs = dcbe->heads;
2468 encoder->possible_clones = 0;
2469 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
2470 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
2471 drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
2472
2473 drm_mode_connector_attach_encoder(connector, encoder);
2474
2475 if (dcbe->type == DCB_OUTPUT_DP) {
2476 struct nvkm_i2c_aux *aux =
2477 nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
2478 if (aux) {
2479 nv_encoder->i2c = &aux->i2c;
2480 nv_encoder->aux = aux;
2481 }
2482
2483 /*TODO: Use DP Info Table to check for support. */
2484 if (nv50_disp(encoder->dev)->disp->oclass >= GF110_DISP) {
2485 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
2486 nv_connector->base.base.id,
2487 &nv_encoder->dp.mstm);
2488 if (ret)
2489 return ret;
2490 }
2491 } else {
2492 struct nvkm_i2c_bus *bus =
2493 nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2494 if (bus)
2495 nv_encoder->i2c = &bus->i2c;
2496 }
2497
2498 return 0;
2499 }
2500
2501 /******************************************************************************
2502 * PIOR
2503 *****************************************************************************/
2504
2505 static void
2506 nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2507 {
2508 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2509 struct nv50_disp *disp = nv50_disp(encoder->dev);
2510 struct {
2511 struct nv50_disp_mthd_v1 base;
2512 struct nv50_disp_pior_pwr_v0 pwr;
2513 } args = {
2514 .base.version = 1,
2515 .base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
2516 .base.hasht = nv_encoder->dcb->hasht,
2517 .base.hashm = nv_encoder->dcb->hashm,
2518 .pwr.state = mode == DRM_MODE_DPMS_ON,
2519 .pwr.type = nv_encoder->dcb->type,
2520 };
2521
2522 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2523 }
2524
2525 static bool
2526 nv50_pior_mode_fixup(struct drm_encoder *encoder,
2527 const struct drm_display_mode *mode,
2528 struct drm_display_mode *adjusted_mode)
2529 {
2530 if (!nv50_encoder_mode_fixup(encoder, mode, adjusted_mode))
2531 return false;
2532 adjusted_mode->clock *= 2;
2533 return true;
2534 }
2535
2536 static void
2537 nv50_pior_commit(struct drm_encoder *encoder)
2538 {
2539 }
2540
2541 static void
2542 nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2543 struct drm_display_mode *adjusted_mode)
2544 {
2545 struct nv50_mast *mast = nv50_mast(encoder->dev);
2546 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2547 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2548 struct nouveau_connector *nv_connector;
2549 u8 owner = 1 << nv_crtc->index;
2550 u8 proto, depth;
2551 u32 *push;
2552
2553 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2554 switch (nv_connector->base.display_info.bpc) {
2555 case 10: depth = 0x6; break;
2556 case 8: depth = 0x5; break;
2557 case 6: depth = 0x2; break;
2558 default: depth = 0x0; break;
2559 }
2560
2561 switch (nv_encoder->dcb->type) {
2562 case DCB_OUTPUT_TMDS:
2563 case DCB_OUTPUT_DP:
2564 proto = 0x0;
2565 break;
2566 default:
2567 BUG_ON(1);
2568 break;
2569 }
2570
2571 nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2572
2573 push = evo_wait(mast, 8);
2574 if (push) {
2575 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2576 u32 ctrl = (depth << 16) | (proto << 8) | owner;
2577 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2578 ctrl |= 0x00001000;
2579 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2580 ctrl |= 0x00002000;
2581 evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2582 evo_data(push, ctrl);
2583 }
2584
2585 evo_kick(push, mast);
2586 }
2587
2588 nv_encoder->crtc = encoder->crtc;
2589 }
2590
2591 static void
2592 nv50_pior_disconnect(struct drm_encoder *encoder)
2593 {
2594 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2595 struct nv50_mast *mast = nv50_mast(encoder->dev);
2596 const int or = nv_encoder->or;
2597 u32 *push;
2598
2599 if (nv_encoder->crtc) {
2600 nv50_crtc_prepare(nv_encoder->crtc);
2601
2602 push = evo_wait(mast, 4);
2603 if (push) {
2604 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2605 evo_mthd(push, 0x0700 + (or * 0x040), 1);
2606 evo_data(push, 0x00000000);
2607 }
2608 evo_kick(push, mast);
2609 }
2610 }
2611
2612 nv_encoder->crtc = NULL;
2613 }
2614
2615 static void
2616 nv50_pior_destroy(struct drm_encoder *encoder)
2617 {
2618 drm_encoder_cleanup(encoder);
2619 kfree(encoder);
2620 }
2621
2622 static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2623 .dpms = nv50_pior_dpms,
2624 .mode_fixup = nv50_pior_mode_fixup,
2625 .prepare = nv50_pior_disconnect,
2626 .commit = nv50_pior_commit,
2627 .mode_set = nv50_pior_mode_set,
2628 .disable = nv50_pior_disconnect,
2629 .get_crtc = nv50_display_crtc_get,
2630 };
2631
2632 static const struct drm_encoder_funcs nv50_pior_func = {
2633 .destroy = nv50_pior_destroy,
2634 };
2635
2636 static int
2637 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2638 {
2639 struct nouveau_drm *drm = nouveau_drm(connector->dev);
2640 struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2641 struct nvkm_i2c_bus *bus = NULL;
2642 struct nvkm_i2c_aux *aux = NULL;
2643 struct i2c_adapter *ddc;
2644 struct nouveau_encoder *nv_encoder;
2645 struct drm_encoder *encoder;
2646 int type;
2647
2648 switch (dcbe->type) {
2649 case DCB_OUTPUT_TMDS:
2650 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
2651 ddc = bus ? &bus->i2c : NULL;
2652 type = DRM_MODE_ENCODER_TMDS;
2653 break;
2654 case DCB_OUTPUT_DP:
2655 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
2656 ddc = aux ? &aux->i2c : NULL;
2657 type = DRM_MODE_ENCODER_TMDS;
2658 break;
2659 default:
2660 return -ENODEV;
2661 }
2662
2663 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2664 if (!nv_encoder)
2665 return -ENOMEM;
2666 nv_encoder->dcb = dcbe;
2667 nv_encoder->or = ffs(dcbe->or) - 1;
2668 nv_encoder->i2c = ddc;
2669 nv_encoder->aux = aux;
2670
2671 encoder = to_drm_encoder(nv_encoder);
2672 encoder->possible_crtcs = dcbe->heads;
2673 encoder->possible_clones = 0;
2674 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
2675 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
2676 drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2677
2678 drm_mode_connector_attach_encoder(connector, encoder);
2679 return 0;
2680 }
2681
2682 /******************************************************************************
2683 * Framebuffer
2684 *****************************************************************************/
2685
2686 static void
2687 nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2688 {
2689 int i;
2690 for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2691 nvif_object_fini(&fbdma->base[i]);
2692 nvif_object_fini(&fbdma->core);
2693 list_del(&fbdma->head);
2694 kfree(fbdma);
2695 }
2696
2697 static int
2698 nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2699 {
2700 struct nouveau_drm *drm = nouveau_drm(dev);
2701 struct nv50_disp *disp = nv50_disp(dev);
2702 struct nv50_mast *mast = nv50_mast(dev);
2703 struct __attribute__ ((packed)) {
2704 struct nv_dma_v0 base;
2705 union {
2706 struct nv50_dma_v0 nv50;
2707 struct gf100_dma_v0 gf100;
2708 struct gf119_dma_v0 gf119;
2709 };
2710 } args = {};
2711 struct nv50_fbdma *fbdma;
2712 struct drm_crtc *crtc;
2713 u32 size = sizeof(args.base);
2714 int ret;
2715
2716 list_for_each_entry(fbdma, &disp->fbdma, head) {
2717 if (fbdma->core.handle == name)
2718 return 0;
2719 }
2720
2721 fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2722 if (!fbdma)
2723 return -ENOMEM;
2724 list_add(&fbdma->head, &disp->fbdma);
2725
2726 args.base.target = NV_DMA_V0_TARGET_VRAM;
2727 args.base.access = NV_DMA_V0_ACCESS_RDWR;
2728 args.base.start = offset;
2729 args.base.limit = offset + length - 1;
2730
2731 if (drm->device.info.chipset < 0x80) {
2732 args.nv50.part = NV50_DMA_V0_PART_256;
2733 size += sizeof(args.nv50);
2734 } else
2735 if (drm->device.info.chipset < 0xc0) {
2736 args.nv50.part = NV50_DMA_V0_PART_256;
2737 args.nv50.kind = kind;
2738 size += sizeof(args.nv50);
2739 } else
2740 if (drm->device.info.chipset < 0xd0) {
2741 args.gf100.kind = kind;
2742 size += sizeof(args.gf100);
2743 } else {
2744 args.gf119.page = GF119_DMA_V0_PAGE_LP;
2745 args.gf119.kind = kind;
2746 size += sizeof(args.gf119);
2747 }
2748
2749 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2750 struct nv50_head *head = nv50_head(crtc);
2751 int ret = nvif_object_init(&head->sync.base.base.user, name,
2752 NV_DMA_IN_MEMORY, &args, size,
2753 &fbdma->base[head->base.index]);
2754 if (ret) {
2755 nv50_fbdma_fini(fbdma);
2756 return ret;
2757 }
2758 }
2759
2760 ret = nvif_object_init(&mast->base.base.user, name, NV_DMA_IN_MEMORY,
2761 &args, size, &fbdma->core);
2762 if (ret) {
2763 nv50_fbdma_fini(fbdma);
2764 return ret;
2765 }
2766
2767 return 0;
2768 }
2769
2770 static void
2771 nv50_fb_dtor(struct drm_framebuffer *fb)
2772 {
2773 }
2774
2775 static int
2776 nv50_fb_ctor(struct drm_framebuffer *fb)
2777 {
2778 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2779 struct nouveau_drm *drm = nouveau_drm(fb->dev);
2780 struct nouveau_bo *nvbo = nv_fb->nvbo;
2781 struct nv50_disp *disp = nv50_disp(fb->dev);
2782 u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2783 u8 tile = nvbo->tile_mode;
2784
2785 if (drm->device.info.chipset >= 0xc0)
2786 tile >>= 4; /* yep.. */
2787
2788 switch (fb->depth) {
2789 case 8: nv_fb->r_format = 0x1e00; break;
2790 case 15: nv_fb->r_format = 0xe900; break;
2791 case 16: nv_fb->r_format = 0xe800; break;
2792 case 24:
2793 case 32: nv_fb->r_format = 0xcf00; break;
2794 case 30: nv_fb->r_format = 0xd100; break;
2795 default:
2796 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
2797 return -EINVAL;
2798 }
2799
2800 if (disp->disp->oclass < G82_DISP) {
2801 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2802 (fb->pitches[0] | 0x00100000);
2803 nv_fb->r_format |= kind << 16;
2804 } else
2805 if (disp->disp->oclass < GF110_DISP) {
2806 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2807 (fb->pitches[0] | 0x00100000);
2808 } else {
2809 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2810 (fb->pitches[0] | 0x01000000);
2811 }
2812 nv_fb->r_handle = 0xffff0000 | kind;
2813
2814 return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0,
2815 drm->device.info.ram_user, kind);
2816 }
2817
2818 /******************************************************************************
2819 * Init
2820 *****************************************************************************/
2821
2822 void
2823 nv50_display_fini(struct drm_device *dev)
2824 {
2825 }
2826
2827 int
2828 nv50_display_init(struct drm_device *dev)
2829 {
2830 struct nv50_disp *disp = nv50_disp(dev);
2831 struct drm_crtc *crtc;
2832 u32 *push;
2833
2834 push = evo_wait(nv50_mast(dev), 32);
2835 if (!push)
2836 return -EBUSY;
2837
2838 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2839 struct nv50_sync *sync = nv50_sync(crtc);
2840
2841 nv50_crtc_lut_load(crtc);
2842 nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2843 }
2844
2845 evo_mthd(push, 0x0088, 1);
2846 evo_data(push, nv50_mast(dev)->base.sync.handle);
2847 evo_kick(push, nv50_mast(dev));
2848 return 0;
2849 }
2850
2851 void
2852 nv50_display_destroy(struct drm_device *dev)
2853 {
2854 struct nv50_disp *disp = nv50_disp(dev);
2855 struct nv50_fbdma *fbdma, *fbtmp;
2856
2857 list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
2858 nv50_fbdma_fini(fbdma);
2859 }
2860
2861 nv50_dmac_destroy(&disp->mast.base, disp->disp);
2862
2863 nouveau_bo_unmap(disp->sync);
2864 if (disp->sync)
2865 nouveau_bo_unpin(disp->sync);
2866 nouveau_bo_ref(NULL, &disp->sync);
2867
2868 nouveau_display(dev)->priv = NULL;
2869 kfree(disp);
2870 }
2871
2872 int
2873 nv50_display_create(struct drm_device *dev)
2874 {
2875 struct nvif_device *device = &nouveau_drm(dev)->device;
2876 struct nouveau_drm *drm = nouveau_drm(dev);
2877 struct dcb_table *dcb = &drm->vbios.dcb;
2878 struct drm_connector *connector, *tmp;
2879 struct nv50_disp *disp;
2880 struct dcb_output *dcbe;
2881 int crtcs, ret, i;
2882
2883 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2884 if (!disp)
2885 return -ENOMEM;
2886 INIT_LIST_HEAD(&disp->fbdma);
2887
2888 nouveau_display(dev)->priv = disp;
2889 nouveau_display(dev)->dtor = nv50_display_destroy;
2890 nouveau_display(dev)->init = nv50_display_init;
2891 nouveau_display(dev)->fini = nv50_display_fini;
2892 nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
2893 nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
2894 disp->disp = &nouveau_display(dev)->disp;
2895
2896 /* small shared memory area we use for notifiers and semaphores */
2897 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2898 0, 0x0000, NULL, NULL, &disp->sync);
2899 if (!ret) {
2900 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
2901 if (!ret) {
2902 ret = nouveau_bo_map(disp->sync);
2903 if (ret)
2904 nouveau_bo_unpin(disp->sync);
2905 }
2906 if (ret)
2907 nouveau_bo_ref(NULL, &disp->sync);
2908 }
2909
2910 if (ret)
2911 goto out;
2912
2913 /* allocate master evo channel */
2914 ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
2915 &disp->mast);
2916 if (ret)
2917 goto out;
2918
2919 /* create crtc objects to represent the hw heads */
2920 if (disp->disp->oclass >= GF110_DISP)
2921 crtcs = nvif_rd32(&device->object, 0x022448);
2922 else
2923 crtcs = 2;
2924
2925 for (i = 0; i < crtcs; i++) {
2926 ret = nv50_crtc_create(dev, i);
2927 if (ret)
2928 goto out;
2929 }
2930
2931 /* create encoder/connector objects based on VBIOS DCB table */
2932 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2933 connector = nouveau_connector_create(dev, dcbe->connector);
2934 if (IS_ERR(connector))
2935 continue;
2936
2937 if (dcbe->location == DCB_LOC_ON_CHIP) {
2938 switch (dcbe->type) {
2939 case DCB_OUTPUT_TMDS:
2940 case DCB_OUTPUT_LVDS:
2941 case DCB_OUTPUT_DP:
2942 ret = nv50_sor_create(connector, dcbe);
2943 break;
2944 case DCB_OUTPUT_ANALOG:
2945 ret = nv50_dac_create(connector, dcbe);
2946 break;
2947 default:
2948 ret = -ENODEV;
2949 break;
2950 }
2951 } else {
2952 ret = nv50_pior_create(connector, dcbe);
2953 }
2954
2955 if (ret) {
2956 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2957 dcbe->location, dcbe->type,
2958 ffs(dcbe->or) - 1, ret);
2959 ret = 0;
2960 }
2961 }
2962
2963 /* cull any connectors we created that don't have an encoder */
2964 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2965 if (connector->encoder_ids[0])
2966 continue;
2967
2968 NV_WARN(drm, "%s has no encoders, removing\n",
2969 connector->name);
2970 connector->funcs->destroy(connector);
2971 }
2972
2973 out:
2974 if (ret)
2975 nv50_display_destroy(dev);
2976 return ret;
2977 }