2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
26 #include <linux/hdmi.h>
29 #include <drm/drm_atomic.h>
30 #include <drm/drm_atomic_helper.h>
31 #include <drm/drm_crtc_helper.h>
32 #include <drm/drm_dp_helper.h>
33 #include <drm/drm_fb_helper.h>
34 #include <drm/drm_plane_helper.h>
35 #include <drm/drm_edid.h>
37 #include <nvif/class.h>
38 #include <nvif/cl0002.h>
39 #include <nvif/cl5070.h>
40 #include <nvif/cl507a.h>
41 #include <nvif/cl507b.h>
42 #include <nvif/cl507c.h>
43 #include <nvif/cl507d.h>
44 #include <nvif/cl507e.h>
45 #include <nvif/event.h>
47 #include "nouveau_drv.h"
48 #include "nouveau_dma.h"
49 #include "nouveau_gem.h"
50 #include "nouveau_connector.h"
51 #include "nouveau_encoder.h"
52 #include "nouveau_crtc.h"
53 #include "nouveau_fence.h"
54 #include "nouveau_fbcon.h"
55 #include "nv50_display.h"
59 #define EVO_MASTER (0x00)
60 #define EVO_FLIP(c) (0x01 + (c))
61 #define EVO_OVLY(c) (0x05 + (c))
62 #define EVO_OIMM(c) (0x09 + (c))
63 #define EVO_CURS(c) (0x0d + (c))
65 /* offsets in shared sync bo of various structures */
66 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
67 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
68 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
69 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
70 #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
71 #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
73 /******************************************************************************
75 *****************************************************************************/
76 #define nv50_atom(p) container_of((p), struct nv50_atom, state)
79 struct drm_atomic_state state
;
81 struct list_head outp
;
86 struct nv50_outp_atom
{
87 struct list_head head
;
89 struct drm_encoder
*encoder
;
107 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
109 struct nv50_head_atom
{
110 struct drm_crtc_state state
;
119 struct nv50_head_mode
{
216 static inline struct nv50_head_atom
*
217 nv50_head_atom_get(struct drm_atomic_state
*state
, struct drm_crtc
*crtc
)
219 struct drm_crtc_state
*statec
= drm_atomic_get_crtc_state(state
, crtc
);
221 return (void *)statec
;
222 return nv50_head_atom(statec
);
225 #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
227 struct nv50_wndw_atom
{
228 struct drm_plane_state state
;
231 struct drm_rect clip
;
292 /******************************************************************************
294 *****************************************************************************/
297 struct nvif_object user
;
298 struct nvif_device
*device
;
302 nv50_chan_create(struct nvif_device
*device
, struct nvif_object
*disp
,
303 const s32
*oclass
, u8 head
, void *data
, u32 size
,
304 struct nv50_chan
*chan
)
306 struct nvif_sclass
*sclass
;
309 chan
->device
= device
;
311 ret
= n
= nvif_object_sclass_get(disp
, &sclass
);
316 for (i
= 0; i
< n
; i
++) {
317 if (sclass
[i
].oclass
== oclass
[0]) {
318 ret
= nvif_object_init(disp
, 0, oclass
[0],
319 data
, size
, &chan
->user
);
321 nvif_object_map(&chan
->user
);
322 nvif_object_sclass_put(&sclass
);
329 nvif_object_sclass_put(&sclass
);
334 nv50_chan_destroy(struct nv50_chan
*chan
)
336 nvif_object_fini(&chan
->user
);
339 /******************************************************************************
341 *****************************************************************************/
344 struct nv50_chan base
;
348 nv50_pioc_destroy(struct nv50_pioc
*pioc
)
350 nv50_chan_destroy(&pioc
->base
);
354 nv50_pioc_create(struct nvif_device
*device
, struct nvif_object
*disp
,
355 const s32
*oclass
, u8 head
, void *data
, u32 size
,
356 struct nv50_pioc
*pioc
)
358 return nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
362 /******************************************************************************
364 *****************************************************************************/
367 struct nv50_pioc base
;
371 nv50_oimm_create(struct nvif_device
*device
, struct nvif_object
*disp
,
372 int head
, struct nv50_oimm
*oimm
)
374 struct nv50_disp_cursor_v0 args
= {
377 static const s32 oclass
[] = {
386 return nv50_pioc_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
390 /******************************************************************************
392 *****************************************************************************/
394 struct nv50_dmac_ctxdma
{
395 struct list_head head
;
396 struct nvif_object object
;
400 struct nv50_chan base
;
404 struct nvif_object sync
;
405 struct nvif_object vram
;
406 struct list_head ctxdma
;
408 /* Protects against concurrent pushbuf access to this channel, lock is
409 * grabbed by evo_wait (if the pushbuf reservation is successful) and
410 * dropped again by evo_kick. */
415 nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma
*ctxdma
)
417 nvif_object_fini(&ctxdma
->object
);
418 list_del(&ctxdma
->head
);
422 static struct nv50_dmac_ctxdma
*
423 nv50_dmac_ctxdma_new(struct nv50_dmac
*dmac
, struct nouveau_framebuffer
*fb
)
425 struct nouveau_drm
*drm
= nouveau_drm(fb
->base
.dev
);
426 struct nv50_dmac_ctxdma
*ctxdma
;
427 const u8 kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
428 const u32 handle
= 0xfb000000 | kind
;
430 struct nv_dma_v0 base
;
432 struct nv50_dma_v0 nv50
;
433 struct gf100_dma_v0 gf100
;
434 struct gf119_dma_v0 gf119
;
437 u32 argc
= sizeof(args
.base
);
440 list_for_each_entry(ctxdma
, &dmac
->ctxdma
, head
) {
441 if (ctxdma
->object
.handle
== handle
)
445 if (!(ctxdma
= kzalloc(sizeof(*ctxdma
), GFP_KERNEL
)))
446 return ERR_PTR(-ENOMEM
);
447 list_add(&ctxdma
->head
, &dmac
->ctxdma
);
449 args
.base
.target
= NV_DMA_V0_TARGET_VRAM
;
450 args
.base
.access
= NV_DMA_V0_ACCESS_RDWR
;
452 args
.base
.limit
= drm
->client
.device
.info
.ram_user
- 1;
454 if (drm
->client
.device
.info
.chipset
< 0x80) {
455 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
456 argc
+= sizeof(args
.nv50
);
458 if (drm
->client
.device
.info
.chipset
< 0xc0) {
459 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
460 args
.nv50
.kind
= kind
;
461 argc
+= sizeof(args
.nv50
);
463 if (drm
->client
.device
.info
.chipset
< 0xd0) {
464 args
.gf100
.kind
= kind
;
465 argc
+= sizeof(args
.gf100
);
467 args
.gf119
.page
= GF119_DMA_V0_PAGE_LP
;
468 args
.gf119
.kind
= kind
;
469 argc
+= sizeof(args
.gf119
);
472 ret
= nvif_object_init(&dmac
->base
.user
, handle
, NV_DMA_IN_MEMORY
,
473 &args
, argc
, &ctxdma
->object
);
475 nv50_dmac_ctxdma_del(ctxdma
);
483 nv50_dmac_destroy(struct nv50_dmac
*dmac
, struct nvif_object
*disp
)
485 struct nvif_device
*device
= dmac
->base
.device
;
486 struct nv50_dmac_ctxdma
*ctxdma
, *ctxtmp
;
488 list_for_each_entry_safe(ctxdma
, ctxtmp
, &dmac
->ctxdma
, head
) {
489 nv50_dmac_ctxdma_del(ctxdma
);
492 nvif_object_fini(&dmac
->vram
);
493 nvif_object_fini(&dmac
->sync
);
495 nv50_chan_destroy(&dmac
->base
);
498 struct device
*dev
= nvxx_device(device
)->dev
;
499 dma_free_coherent(dev
, PAGE_SIZE
, dmac
->ptr
, dmac
->handle
);
504 nv50_dmac_create(struct nvif_device
*device
, struct nvif_object
*disp
,
505 const s32
*oclass
, u8 head
, void *data
, u32 size
, u64 syncbuf
,
506 struct nv50_dmac
*dmac
)
508 struct nv50_disp_core_channel_dma_v0
*args
= data
;
509 struct nvif_object pushbuf
;
512 mutex_init(&dmac
->lock
);
514 dmac
->ptr
= dma_alloc_coherent(nvxx_device(device
)->dev
, PAGE_SIZE
,
515 &dmac
->handle
, GFP_KERNEL
);
519 ret
= nvif_object_init(&device
->object
, 0, NV_DMA_FROM_MEMORY
,
520 &(struct nv_dma_v0
) {
521 .target
= NV_DMA_V0_TARGET_PCI_US
,
522 .access
= NV_DMA_V0_ACCESS_RD
,
523 .start
= dmac
->handle
+ 0x0000,
524 .limit
= dmac
->handle
+ 0x0fff,
525 }, sizeof(struct nv_dma_v0
), &pushbuf
);
529 args
->pushbuf
= nvif_handle(&pushbuf
);
531 ret
= nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
533 nvif_object_fini(&pushbuf
);
537 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000000, NV_DMA_IN_MEMORY
,
538 &(struct nv_dma_v0
) {
539 .target
= NV_DMA_V0_TARGET_VRAM
,
540 .access
= NV_DMA_V0_ACCESS_RDWR
,
541 .start
= syncbuf
+ 0x0000,
542 .limit
= syncbuf
+ 0x0fff,
543 }, sizeof(struct nv_dma_v0
),
548 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000001, NV_DMA_IN_MEMORY
,
549 &(struct nv_dma_v0
) {
550 .target
= NV_DMA_V0_TARGET_VRAM
,
551 .access
= NV_DMA_V0_ACCESS_RDWR
,
553 .limit
= device
->info
.ram_user
- 1,
554 }, sizeof(struct nv_dma_v0
),
559 INIT_LIST_HEAD(&dmac
->ctxdma
);
563 /******************************************************************************
565 *****************************************************************************/
568 struct nv50_dmac base
;
572 nv50_core_create(struct nvif_device
*device
, struct nvif_object
*disp
,
573 u64 syncbuf
, struct nv50_mast
*core
)
575 struct nv50_disp_core_channel_dma_v0 args
= {
576 .pushbuf
= 0xb0007d00,
578 static const s32 oclass
[] = {
579 GP102_DISP_CORE_CHANNEL_DMA
,
580 GP100_DISP_CORE_CHANNEL_DMA
,
581 GM200_DISP_CORE_CHANNEL_DMA
,
582 GM107_DISP_CORE_CHANNEL_DMA
,
583 GK110_DISP_CORE_CHANNEL_DMA
,
584 GK104_DISP_CORE_CHANNEL_DMA
,
585 GF110_DISP_CORE_CHANNEL_DMA
,
586 GT214_DISP_CORE_CHANNEL_DMA
,
587 GT206_DISP_CORE_CHANNEL_DMA
,
588 GT200_DISP_CORE_CHANNEL_DMA
,
589 G82_DISP_CORE_CHANNEL_DMA
,
590 NV50_DISP_CORE_CHANNEL_DMA
,
594 return nv50_dmac_create(device
, disp
, oclass
, 0, &args
, sizeof(args
),
595 syncbuf
, &core
->base
);
598 /******************************************************************************
600 *****************************************************************************/
603 struct nv50_dmac base
;
609 nv50_base_create(struct nvif_device
*device
, struct nvif_object
*disp
,
610 int head
, u64 syncbuf
, struct nv50_sync
*base
)
612 struct nv50_disp_base_channel_dma_v0 args
= {
613 .pushbuf
= 0xb0007c00 | head
,
616 static const s32 oclass
[] = {
617 GK110_DISP_BASE_CHANNEL_DMA
,
618 GK104_DISP_BASE_CHANNEL_DMA
,
619 GF110_DISP_BASE_CHANNEL_DMA
,
620 GT214_DISP_BASE_CHANNEL_DMA
,
621 GT200_DISP_BASE_CHANNEL_DMA
,
622 G82_DISP_BASE_CHANNEL_DMA
,
623 NV50_DISP_BASE_CHANNEL_DMA
,
627 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
628 syncbuf
, &base
->base
);
631 /******************************************************************************
633 *****************************************************************************/
636 struct nv50_dmac base
;
640 nv50_ovly_create(struct nvif_device
*device
, struct nvif_object
*disp
,
641 int head
, u64 syncbuf
, struct nv50_ovly
*ovly
)
643 struct nv50_disp_overlay_channel_dma_v0 args
= {
644 .pushbuf
= 0xb0007e00 | head
,
647 static const s32 oclass
[] = {
648 GK104_DISP_OVERLAY_CONTROL_DMA
,
649 GF110_DISP_OVERLAY_CONTROL_DMA
,
650 GT214_DISP_OVERLAY_CHANNEL_DMA
,
651 GT200_DISP_OVERLAY_CHANNEL_DMA
,
652 G82_DISP_OVERLAY_CHANNEL_DMA
,
653 NV50_DISP_OVERLAY_CHANNEL_DMA
,
657 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
658 syncbuf
, &ovly
->base
);
662 struct nouveau_crtc base
;
663 struct nv50_ovly ovly
;
664 struct nv50_oimm oimm
;
667 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
668 #define nv50_ovly(c) (&nv50_head(c)->ovly)
669 #define nv50_oimm(c) (&nv50_head(c)->oimm)
670 #define nv50_chan(c) (&(c)->base.base)
671 #define nv50_vers(c) nv50_chan(c)->user.oclass
674 struct nvif_object
*disp
;
675 struct nv50_mast mast
;
677 struct nouveau_bo
*sync
;
682 static struct nv50_disp
*
683 nv50_disp(struct drm_device
*dev
)
685 return nouveau_display(dev
)->priv
;
688 #define nv50_mast(d) (&nv50_disp(d)->mast)
690 /******************************************************************************
691 * EVO channel helpers
692 *****************************************************************************/
694 evo_wait(void *evoc
, int nr
)
696 struct nv50_dmac
*dmac
= evoc
;
697 struct nvif_device
*device
= dmac
->base
.device
;
698 u32 put
= nvif_rd32(&dmac
->base
.user
, 0x0000) / 4;
700 mutex_lock(&dmac
->lock
);
701 if (put
+ nr
>= (PAGE_SIZE
/ 4) - 8) {
702 dmac
->ptr
[put
] = 0x20000000;
704 nvif_wr32(&dmac
->base
.user
, 0x0000, 0x00000000);
705 if (nvif_msec(device
, 2000,
706 if (!nvif_rd32(&dmac
->base
.user
, 0x0004))
709 mutex_unlock(&dmac
->lock
);
710 pr_err("nouveau: evo channel stalled\n");
717 return dmac
->ptr
+ put
;
721 evo_kick(u32
*push
, void *evoc
)
723 struct nv50_dmac
*dmac
= evoc
;
724 nvif_wr32(&dmac
->base
.user
, 0x0000, (push
- dmac
->ptr
) << 2);
725 mutex_unlock(&dmac
->lock
);
728 #define evo_mthd(p, m, s) do { \
729 const u32 _m = (m), _s = (s); \
730 if (drm_debug & DRM_UT_KMS) \
731 pr_err("%04x %d %s\n", _m, _s, __func__); \
732 *((p)++) = ((_s << 18) | _m); \
735 #define evo_data(p, d) do { \
736 const u32 _d = (d); \
737 if (drm_debug & DRM_UT_KMS) \
738 pr_err("\t%08x\n", _d); \
742 /******************************************************************************
744 *****************************************************************************/
745 #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
748 const struct nv50_wndw_func
*func
;
749 struct nv50_dmac
*dmac
;
751 struct drm_plane plane
;
753 struct nvif_notify notify
;
759 struct nv50_wndw_func
{
760 void *(*dtor
)(struct nv50_wndw
*);
761 int (*acquire
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
762 struct nv50_head_atom
*asyh
);
763 void (*release
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
764 struct nv50_head_atom
*asyh
);
765 void (*prepare
)(struct nv50_wndw
*, struct nv50_head_atom
*asyh
,
766 struct nv50_wndw_atom
*asyw
);
768 void (*sema_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
769 void (*sema_clr
)(struct nv50_wndw
*);
770 void (*ntfy_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
771 void (*ntfy_clr
)(struct nv50_wndw
*);
772 int (*ntfy_wait_begun
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
773 void (*image_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
774 void (*image_clr
)(struct nv50_wndw
*);
775 void (*lut
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
776 void (*point
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
778 u32 (*update
)(struct nv50_wndw
*, u32 interlock
);
782 nv50_wndw_wait_armed(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
785 return wndw
->func
->ntfy_wait_begun(wndw
, asyw
);
790 nv50_wndw_flush_clr(struct nv50_wndw
*wndw
, u32 interlock
, bool flush
,
791 struct nv50_wndw_atom
*asyw
)
793 if (asyw
->clr
.sema
&& (!asyw
->set
.sema
|| flush
))
794 wndw
->func
->sema_clr(wndw
);
795 if (asyw
->clr
.ntfy
&& (!asyw
->set
.ntfy
|| flush
))
796 wndw
->func
->ntfy_clr(wndw
);
797 if (asyw
->clr
.image
&& (!asyw
->set
.image
|| flush
))
798 wndw
->func
->image_clr(wndw
);
800 return flush
? wndw
->func
->update(wndw
, interlock
) : 0;
804 nv50_wndw_flush_set(struct nv50_wndw
*wndw
, u32 interlock
,
805 struct nv50_wndw_atom
*asyw
)
808 asyw
->image
.mode
= 0;
809 asyw
->image
.interval
= 1;
812 if (asyw
->set
.sema
) wndw
->func
->sema_set (wndw
, asyw
);
813 if (asyw
->set
.ntfy
) wndw
->func
->ntfy_set (wndw
, asyw
);
814 if (asyw
->set
.image
) wndw
->func
->image_set(wndw
, asyw
);
815 if (asyw
->set
.lut
) wndw
->func
->lut (wndw
, asyw
);
816 if (asyw
->set
.point
) wndw
->func
->point (wndw
, asyw
);
818 return wndw
->func
->update(wndw
, interlock
);
822 nv50_wndw_atomic_check_release(struct nv50_wndw
*wndw
,
823 struct nv50_wndw_atom
*asyw
,
824 struct nv50_head_atom
*asyh
)
826 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
827 NV_ATOMIC(drm
, "%s release\n", wndw
->plane
.name
);
828 wndw
->func
->release(wndw
, asyw
, asyh
);
829 asyw
->ntfy
.handle
= 0;
830 asyw
->sema
.handle
= 0;
834 nv50_wndw_atomic_check_acquire(struct nv50_wndw
*wndw
,
835 struct nv50_wndw_atom
*asyw
,
836 struct nv50_head_atom
*asyh
)
838 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(asyw
->state
.fb
);
839 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
842 NV_ATOMIC(drm
, "%s acquire\n", wndw
->plane
.name
);
845 asyw
->clip
.x2
= asyh
->state
.mode
.hdisplay
;
846 asyw
->clip
.y2
= asyh
->state
.mode
.vdisplay
;
848 asyw
->image
.w
= fb
->base
.width
;
849 asyw
->image
.h
= fb
->base
.height
;
850 asyw
->image
.kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
852 if (asyh
->state
.pageflip_flags
& DRM_MODE_PAGE_FLIP_ASYNC
)
857 if (asyw
->image
.kind
) {
858 asyw
->image
.layout
= 0;
859 if (drm
->client
.device
.info
.chipset
>= 0xc0)
860 asyw
->image
.block
= fb
->nvbo
->tile_mode
>> 4;
862 asyw
->image
.block
= fb
->nvbo
->tile_mode
;
863 asyw
->image
.pitch
= (fb
->base
.pitches
[0] / 4) << 4;
865 asyw
->image
.layout
= 1;
866 asyw
->image
.block
= 0;
867 asyw
->image
.pitch
= fb
->base
.pitches
[0];
870 ret
= wndw
->func
->acquire(wndw
, asyw
, asyh
);
874 if (asyw
->set
.image
) {
875 if (!(asyw
->image
.mode
= asyw
->interval
? 0 : 1))
876 asyw
->image
.interval
= asyw
->interval
;
878 asyw
->image
.interval
= 0;
885 nv50_wndw_atomic_check(struct drm_plane
*plane
, struct drm_plane_state
*state
)
887 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
888 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
889 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(wndw
->plane
.state
);
890 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
891 struct nv50_head_atom
*harm
= NULL
, *asyh
= NULL
;
892 bool varm
= false, asyv
= false, asym
= false;
895 NV_ATOMIC(drm
, "%s atomic_check\n", plane
->name
);
896 if (asyw
->state
.crtc
) {
897 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
899 return PTR_ERR(asyh
);
900 asym
= drm_atomic_crtc_needs_modeset(&asyh
->state
);
901 asyv
= asyh
->state
.active
;
904 if (armw
->state
.crtc
) {
905 harm
= nv50_head_atom_get(asyw
->state
.state
, armw
->state
.crtc
);
907 return PTR_ERR(harm
);
908 varm
= harm
->state
.crtc
->state
->active
;
912 asyw
->point
.x
= asyw
->state
.crtc_x
;
913 asyw
->point
.y
= asyw
->state
.crtc_y
;
914 if (memcmp(&armw
->point
, &asyw
->point
, sizeof(asyw
->point
)))
915 asyw
->set
.point
= true;
917 ret
= nv50_wndw_atomic_check_acquire(wndw
, asyw
, asyh
);
922 nv50_wndw_atomic_check_release(wndw
, asyw
, harm
);
928 asyw
->clr
.ntfy
= armw
->ntfy
.handle
!= 0;
929 asyw
->clr
.sema
= armw
->sema
.handle
!= 0;
930 if (wndw
->func
->image_clr
)
931 asyw
->clr
.image
= armw
->image
.handle
!= 0;
932 asyw
->set
.lut
= wndw
->func
->lut
&& asyv
;
939 nv50_wndw_cleanup_fb(struct drm_plane
*plane
, struct drm_plane_state
*old_state
)
941 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(old_state
->fb
);
942 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
944 NV_ATOMIC(drm
, "%s cleanup: %p\n", plane
->name
, old_state
->fb
);
948 nouveau_bo_unpin(fb
->nvbo
);
952 nv50_wndw_prepare_fb(struct drm_plane
*plane
, struct drm_plane_state
*state
)
954 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(state
->fb
);
955 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
956 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
957 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
958 struct nv50_head_atom
*asyh
;
959 struct nv50_dmac_ctxdma
*ctxdma
;
962 NV_ATOMIC(drm
, "%s prepare: %p\n", plane
->name
, state
->fb
);
966 ret
= nouveau_bo_pin(fb
->nvbo
, TTM_PL_FLAG_VRAM
, true);
970 ctxdma
= nv50_dmac_ctxdma_new(wndw
->dmac
, fb
);
971 if (IS_ERR(ctxdma
)) {
972 nouveau_bo_unpin(fb
->nvbo
);
973 return PTR_ERR(ctxdma
);
976 asyw
->state
.fence
= reservation_object_get_excl_rcu(fb
->nvbo
->bo
.resv
);
977 asyw
->image
.handle
= ctxdma
->object
.handle
;
978 asyw
->image
.offset
= fb
->nvbo
->bo
.offset
;
980 if (wndw
->func
->prepare
) {
981 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
983 return PTR_ERR(asyh
);
985 wndw
->func
->prepare(wndw
, asyh
, asyw
);
991 static const struct drm_plane_helper_funcs
993 .prepare_fb
= nv50_wndw_prepare_fb
,
994 .cleanup_fb
= nv50_wndw_cleanup_fb
,
995 .atomic_check
= nv50_wndw_atomic_check
,
999 nv50_wndw_atomic_destroy_state(struct drm_plane
*plane
,
1000 struct drm_plane_state
*state
)
1002 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
1003 __drm_atomic_helper_plane_destroy_state(&asyw
->state
);
1007 static struct drm_plane_state
*
1008 nv50_wndw_atomic_duplicate_state(struct drm_plane
*plane
)
1010 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(plane
->state
);
1011 struct nv50_wndw_atom
*asyw
;
1012 if (!(asyw
= kmalloc(sizeof(*asyw
), GFP_KERNEL
)))
1014 __drm_atomic_helper_plane_duplicate_state(plane
, &asyw
->state
);
1016 asyw
->sema
= armw
->sema
;
1017 asyw
->ntfy
= armw
->ntfy
;
1018 asyw
->image
= armw
->image
;
1019 asyw
->point
= armw
->point
;
1020 asyw
->lut
= armw
->lut
;
1023 return &asyw
->state
;
1027 nv50_wndw_reset(struct drm_plane
*plane
)
1029 struct nv50_wndw_atom
*asyw
;
1031 if (WARN_ON(!(asyw
= kzalloc(sizeof(*asyw
), GFP_KERNEL
))))
1035 plane
->funcs
->atomic_destroy_state(plane
, plane
->state
);
1036 plane
->state
= &asyw
->state
;
1037 plane
->state
->plane
= plane
;
1038 plane
->state
->rotation
= DRM_MODE_ROTATE_0
;
1042 nv50_wndw_destroy(struct drm_plane
*plane
)
1044 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1046 nvif_notify_fini(&wndw
->notify
);
1047 data
= wndw
->func
->dtor(wndw
);
1048 drm_plane_cleanup(&wndw
->plane
);
1052 static const struct drm_plane_funcs
1054 .update_plane
= drm_atomic_helper_update_plane
,
1055 .disable_plane
= drm_atomic_helper_disable_plane
,
1056 .destroy
= nv50_wndw_destroy
,
1057 .reset
= nv50_wndw_reset
,
1058 .set_property
= drm_atomic_helper_plane_set_property
,
1059 .atomic_duplicate_state
= nv50_wndw_atomic_duplicate_state
,
1060 .atomic_destroy_state
= nv50_wndw_atomic_destroy_state
,
1064 nv50_wndw_fini(struct nv50_wndw
*wndw
)
1066 nvif_notify_put(&wndw
->notify
);
1070 nv50_wndw_init(struct nv50_wndw
*wndw
)
1072 nvif_notify_get(&wndw
->notify
);
1076 nv50_wndw_ctor(const struct nv50_wndw_func
*func
, struct drm_device
*dev
,
1077 enum drm_plane_type type
, const char *name
, int index
,
1078 struct nv50_dmac
*dmac
, const u32
*format
, int nformat
,
1079 struct nv50_wndw
*wndw
)
1086 ret
= drm_universal_plane_init(dev
, &wndw
->plane
, 0, &nv50_wndw
, format
,
1087 nformat
, type
, "%s-%d", name
, index
);
1091 drm_plane_helper_add(&wndw
->plane
, &nv50_wndw_helper
);
1095 /******************************************************************************
1097 *****************************************************************************/
1098 #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1101 struct nv50_wndw wndw
;
1102 struct nvif_object chan
;
1106 nv50_curs_update(struct nv50_wndw
*wndw
, u32 interlock
)
1108 struct nv50_curs
*curs
= nv50_curs(wndw
);
1109 nvif_wr32(&curs
->chan
, 0x0080, 0x00000000);
1114 nv50_curs_point(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1116 struct nv50_curs
*curs
= nv50_curs(wndw
);
1117 nvif_wr32(&curs
->chan
, 0x0084, (asyw
->point
.y
<< 16) | asyw
->point
.x
);
1121 nv50_curs_prepare(struct nv50_wndw
*wndw
, struct nv50_head_atom
*asyh
,
1122 struct nv50_wndw_atom
*asyw
)
1124 u32 handle
= nv50_disp(wndw
->plane
.dev
)->mast
.base
.vram
.handle
;
1125 u32 offset
= asyw
->image
.offset
;
1126 if (asyh
->curs
.handle
!= handle
|| asyh
->curs
.offset
!= offset
) {
1127 asyh
->curs
.handle
= handle
;
1128 asyh
->curs
.offset
= offset
;
1129 asyh
->set
.curs
= asyh
->curs
.visible
;
1134 nv50_curs_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1135 struct nv50_head_atom
*asyh
)
1137 asyh
->curs
.visible
= false;
1141 nv50_curs_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1142 struct nv50_head_atom
*asyh
)
1146 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1147 DRM_PLANE_HELPER_NO_SCALING
,
1148 DRM_PLANE_HELPER_NO_SCALING
,
1150 asyh
->curs
.visible
= asyw
->state
.visible
;
1151 if (ret
|| !asyh
->curs
.visible
)
1154 switch (asyw
->state
.fb
->width
) {
1155 case 32: asyh
->curs
.layout
= 0; break;
1156 case 64: asyh
->curs
.layout
= 1; break;
1161 if (asyw
->state
.fb
->width
!= asyw
->state
.fb
->height
)
1164 switch (asyw
->state
.fb
->format
->format
) {
1165 case DRM_FORMAT_ARGB8888
: asyh
->curs
.format
= 1; break;
1175 nv50_curs_dtor(struct nv50_wndw
*wndw
)
1177 struct nv50_curs
*curs
= nv50_curs(wndw
);
1178 nvif_object_fini(&curs
->chan
);
1183 nv50_curs_format
[] = {
1184 DRM_FORMAT_ARGB8888
,
1187 static const struct nv50_wndw_func
1189 .dtor
= nv50_curs_dtor
,
1190 .acquire
= nv50_curs_acquire
,
1191 .release
= nv50_curs_release
,
1192 .prepare
= nv50_curs_prepare
,
1193 .point
= nv50_curs_point
,
1194 .update
= nv50_curs_update
,
1198 nv50_curs_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1199 struct nv50_curs
**pcurs
)
1201 static const struct nvif_mclass curses
[] = {
1202 { GK104_DISP_CURSOR
, 0 },
1203 { GF110_DISP_CURSOR
, 0 },
1204 { GT214_DISP_CURSOR
, 0 },
1205 { G82_DISP_CURSOR
, 0 },
1206 { NV50_DISP_CURSOR
, 0 },
1209 struct nv50_disp_cursor_v0 args
= {
1210 .head
= head
->base
.index
,
1212 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1213 struct nv50_curs
*curs
;
1216 cid
= nvif_mclass(disp
->disp
, curses
);
1218 NV_ERROR(drm
, "No supported cursor immediate class\n");
1222 if (!(curs
= *pcurs
= kzalloc(sizeof(*curs
), GFP_KERNEL
)))
1225 ret
= nv50_wndw_ctor(&nv50_curs
, drm
->dev
, DRM_PLANE_TYPE_CURSOR
,
1226 "curs", head
->base
.index
, &disp
->mast
.base
,
1227 nv50_curs_format
, ARRAY_SIZE(nv50_curs_format
),
1234 ret
= nvif_object_init(disp
->disp
, 0, curses
[cid
].oclass
, &args
,
1235 sizeof(args
), &curs
->chan
);
1237 NV_ERROR(drm
, "curs%04x allocation failed: %d\n",
1238 curses
[cid
].oclass
, ret
);
1245 /******************************************************************************
1247 *****************************************************************************/
1248 #define nv50_base(p) container_of((p), struct nv50_base, wndw)
1251 struct nv50_wndw wndw
;
1252 struct nv50_sync chan
;
1257 nv50_base_notify(struct nvif_notify
*notify
)
1259 return NVIF_NOTIFY_KEEP
;
1263 nv50_base_lut(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1265 struct nv50_base
*base
= nv50_base(wndw
);
1267 if ((push
= evo_wait(&base
->chan
, 2))) {
1268 evo_mthd(push
, 0x00e0, 1);
1269 evo_data(push
, asyw
->lut
.enable
<< 30);
1270 evo_kick(push
, &base
->chan
);
1275 nv50_base_image_clr(struct nv50_wndw
*wndw
)
1277 struct nv50_base
*base
= nv50_base(wndw
);
1279 if ((push
= evo_wait(&base
->chan
, 4))) {
1280 evo_mthd(push
, 0x0084, 1);
1281 evo_data(push
, 0x00000000);
1282 evo_mthd(push
, 0x00c0, 1);
1283 evo_data(push
, 0x00000000);
1284 evo_kick(push
, &base
->chan
);
1289 nv50_base_image_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1291 struct nv50_base
*base
= nv50_base(wndw
);
1292 const s32 oclass
= base
->chan
.base
.base
.user
.oclass
;
1294 if ((push
= evo_wait(&base
->chan
, 10))) {
1295 evo_mthd(push
, 0x0084, 1);
1296 evo_data(push
, (asyw
->image
.mode
<< 8) |
1297 (asyw
->image
.interval
<< 4));
1298 evo_mthd(push
, 0x00c0, 1);
1299 evo_data(push
, asyw
->image
.handle
);
1300 if (oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1301 evo_mthd(push
, 0x0800, 5);
1302 evo_data(push
, asyw
->image
.offset
>> 8);
1303 evo_data(push
, 0x00000000);
1304 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1305 evo_data(push
, (asyw
->image
.layout
<< 20) |
1308 evo_data(push
, (asyw
->image
.kind
<< 16) |
1309 (asyw
->image
.format
<< 8));
1311 if (oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1312 evo_mthd(push
, 0x0800, 5);
1313 evo_data(push
, asyw
->image
.offset
>> 8);
1314 evo_data(push
, 0x00000000);
1315 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1316 evo_data(push
, (asyw
->image
.layout
<< 20) |
1319 evo_data(push
, asyw
->image
.format
<< 8);
1321 evo_mthd(push
, 0x0400, 5);
1322 evo_data(push
, asyw
->image
.offset
>> 8);
1323 evo_data(push
, 0x00000000);
1324 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1325 evo_data(push
, (asyw
->image
.layout
<< 24) |
1328 evo_data(push
, asyw
->image
.format
<< 8);
1330 evo_kick(push
, &base
->chan
);
1335 nv50_base_ntfy_clr(struct nv50_wndw
*wndw
)
1337 struct nv50_base
*base
= nv50_base(wndw
);
1339 if ((push
= evo_wait(&base
->chan
, 2))) {
1340 evo_mthd(push
, 0x00a4, 1);
1341 evo_data(push
, 0x00000000);
1342 evo_kick(push
, &base
->chan
);
1347 nv50_base_ntfy_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1349 struct nv50_base
*base
= nv50_base(wndw
);
1351 if ((push
= evo_wait(&base
->chan
, 3))) {
1352 evo_mthd(push
, 0x00a0, 2);
1353 evo_data(push
, (asyw
->ntfy
.awaken
<< 30) | asyw
->ntfy
.offset
);
1354 evo_data(push
, asyw
->ntfy
.handle
);
1355 evo_kick(push
, &base
->chan
);
1360 nv50_base_sema_clr(struct nv50_wndw
*wndw
)
1362 struct nv50_base
*base
= nv50_base(wndw
);
1364 if ((push
= evo_wait(&base
->chan
, 2))) {
1365 evo_mthd(push
, 0x0094, 1);
1366 evo_data(push
, 0x00000000);
1367 evo_kick(push
, &base
->chan
);
1372 nv50_base_sema_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1374 struct nv50_base
*base
= nv50_base(wndw
);
1376 if ((push
= evo_wait(&base
->chan
, 5))) {
1377 evo_mthd(push
, 0x0088, 4);
1378 evo_data(push
, asyw
->sema
.offset
);
1379 evo_data(push
, asyw
->sema
.acquire
);
1380 evo_data(push
, asyw
->sema
.release
);
1381 evo_data(push
, asyw
->sema
.handle
);
1382 evo_kick(push
, &base
->chan
);
1387 nv50_base_update(struct nv50_wndw
*wndw
, u32 interlock
)
1389 struct nv50_base
*base
= nv50_base(wndw
);
1392 if (!(push
= evo_wait(&base
->chan
, 2)))
1394 evo_mthd(push
, 0x0080, 1);
1395 evo_data(push
, interlock
);
1396 evo_kick(push
, &base
->chan
);
1398 if (base
->chan
.base
.base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
)
1399 return interlock
? 2 << (base
->id
* 8) : 0;
1400 return interlock
? 2 << (base
->id
* 4) : 0;
1404 nv50_base_ntfy_wait_begun(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1406 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
1407 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1408 if (nvif_msec(&drm
->client
.device
, 2000ULL,
1409 u32 data
= nouveau_bo_rd32(disp
->sync
, asyw
->ntfy
.offset
/ 4);
1410 if ((data
& 0xc0000000) == 0x40000000)
1419 nv50_base_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1420 struct nv50_head_atom
*asyh
)
1426 nv50_base_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1427 struct nv50_head_atom
*asyh
)
1429 const struct drm_framebuffer
*fb
= asyw
->state
.fb
;
1432 if (!fb
->format
->depth
)
1435 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1436 DRM_PLANE_HELPER_NO_SCALING
,
1437 DRM_PLANE_HELPER_NO_SCALING
,
1442 asyh
->base
.depth
= fb
->format
->depth
;
1443 asyh
->base
.cpp
= fb
->format
->cpp
[0];
1444 asyh
->base
.x
= asyw
->state
.src
.x1
>> 16;
1445 asyh
->base
.y
= asyw
->state
.src
.y1
>> 16;
1446 asyh
->base
.w
= asyw
->state
.fb
->width
;
1447 asyh
->base
.h
= asyw
->state
.fb
->height
;
1449 switch (fb
->format
->format
) {
1450 case DRM_FORMAT_C8
: asyw
->image
.format
= 0x1e; break;
1451 case DRM_FORMAT_RGB565
: asyw
->image
.format
= 0xe8; break;
1452 case DRM_FORMAT_XRGB1555
:
1453 case DRM_FORMAT_ARGB1555
: asyw
->image
.format
= 0xe9; break;
1454 case DRM_FORMAT_XRGB8888
:
1455 case DRM_FORMAT_ARGB8888
: asyw
->image
.format
= 0xcf; break;
1456 case DRM_FORMAT_XBGR2101010
:
1457 case DRM_FORMAT_ABGR2101010
: asyw
->image
.format
= 0xd1; break;
1458 case DRM_FORMAT_XBGR8888
:
1459 case DRM_FORMAT_ABGR8888
: asyw
->image
.format
= 0xd5; break;
1465 asyw
->lut
.enable
= 1;
1466 asyw
->set
.image
= true;
1471 nv50_base_dtor(struct nv50_wndw
*wndw
)
1473 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1474 struct nv50_base
*base
= nv50_base(wndw
);
1475 nv50_dmac_destroy(&base
->chan
.base
, disp
->disp
);
1480 nv50_base_format
[] = {
1483 DRM_FORMAT_XRGB1555
,
1484 DRM_FORMAT_ARGB1555
,
1485 DRM_FORMAT_XRGB8888
,
1486 DRM_FORMAT_ARGB8888
,
1487 DRM_FORMAT_XBGR2101010
,
1488 DRM_FORMAT_ABGR2101010
,
1489 DRM_FORMAT_XBGR8888
,
1490 DRM_FORMAT_ABGR8888
,
1493 static const struct nv50_wndw_func
1495 .dtor
= nv50_base_dtor
,
1496 .acquire
= nv50_base_acquire
,
1497 .release
= nv50_base_release
,
1498 .sema_set
= nv50_base_sema_set
,
1499 .sema_clr
= nv50_base_sema_clr
,
1500 .ntfy_set
= nv50_base_ntfy_set
,
1501 .ntfy_clr
= nv50_base_ntfy_clr
,
1502 .ntfy_wait_begun
= nv50_base_ntfy_wait_begun
,
1503 .image_set
= nv50_base_image_set
,
1504 .image_clr
= nv50_base_image_clr
,
1505 .lut
= nv50_base_lut
,
1506 .update
= nv50_base_update
,
1510 nv50_base_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1511 struct nv50_base
**pbase
)
1513 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1514 struct nv50_base
*base
;
1517 if (!(base
= *pbase
= kzalloc(sizeof(*base
), GFP_KERNEL
)))
1519 base
->id
= head
->base
.index
;
1520 base
->wndw
.ntfy
= EVO_FLIP_NTFY0(base
->id
);
1521 base
->wndw
.sema
= EVO_FLIP_SEM0(base
->id
);
1522 base
->wndw
.data
= 0x00000000;
1524 ret
= nv50_wndw_ctor(&nv50_base
, drm
->dev
, DRM_PLANE_TYPE_PRIMARY
,
1525 "base", base
->id
, &base
->chan
.base
,
1526 nv50_base_format
, ARRAY_SIZE(nv50_base_format
),
1533 ret
= nv50_base_create(&drm
->client
.device
, disp
->disp
, base
->id
,
1534 disp
->sync
->bo
.offset
, &base
->chan
);
1538 return nvif_notify_init(&base
->chan
.base
.base
.user
, nv50_base_notify
,
1540 NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT
,
1541 &(struct nvif_notify_uevent_req
) {},
1542 sizeof(struct nvif_notify_uevent_req
),
1543 sizeof(struct nvif_notify_uevent_rep
),
1544 &base
->wndw
.notify
);
1547 /******************************************************************************
1549 *****************************************************************************/
1551 nv50_head_procamp(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1553 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1555 if ((push
= evo_wait(core
, 2))) {
1556 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1557 evo_mthd(push
, 0x08a8 + (head
->base
.index
* 0x400), 1);
1559 evo_mthd(push
, 0x0498 + (head
->base
.index
* 0x300), 1);
1560 evo_data(push
, (asyh
->procamp
.sat
.sin
<< 20) |
1561 (asyh
->procamp
.sat
.cos
<< 8));
1562 evo_kick(push
, core
);
1567 nv50_head_dither(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1569 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1571 if ((push
= evo_wait(core
, 2))) {
1572 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1573 evo_mthd(push
, 0x08a0 + (head
->base
.index
* 0x0400), 1);
1575 if (core
->base
.user
.oclass
< GK104_DISP_CORE_CHANNEL_DMA
)
1576 evo_mthd(push
, 0x0490 + (head
->base
.index
* 0x0300), 1);
1578 evo_mthd(push
, 0x04a0 + (head
->base
.index
* 0x0300), 1);
1579 evo_data(push
, (asyh
->dither
.mode
<< 3) |
1580 (asyh
->dither
.bits
<< 1) |
1581 asyh
->dither
.enable
);
1582 evo_kick(push
, core
);
1587 nv50_head_ovly(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1589 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1593 if (asyh
->base
.cpp
) {
1594 switch (asyh
->base
.cpp
) {
1595 case 8: bounds
|= 0x00000500; break;
1596 case 4: bounds
|= 0x00000300; break;
1597 case 2: bounds
|= 0x00000100; break;
1602 bounds
|= 0x00000001;
1605 if ((push
= evo_wait(core
, 2))) {
1606 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1607 evo_mthd(push
, 0x0904 + head
->base
.index
* 0x400, 1);
1609 evo_mthd(push
, 0x04d4 + head
->base
.index
* 0x300, 1);
1610 evo_data(push
, bounds
);
1611 evo_kick(push
, core
);
1616 nv50_head_base(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1618 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1622 if (asyh
->base
.cpp
) {
1623 switch (asyh
->base
.cpp
) {
1624 case 8: bounds
|= 0x00000500; break;
1625 case 4: bounds
|= 0x00000300; break;
1626 case 2: bounds
|= 0x00000100; break;
1627 case 1: bounds
|= 0x00000000; break;
1632 bounds
|= 0x00000001;
1635 if ((push
= evo_wait(core
, 2))) {
1636 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1637 evo_mthd(push
, 0x0900 + head
->base
.index
* 0x400, 1);
1639 evo_mthd(push
, 0x04d0 + head
->base
.index
* 0x300, 1);
1640 evo_data(push
, bounds
);
1641 evo_kick(push
, core
);
1646 nv50_head_curs_clr(struct nv50_head
*head
)
1648 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1650 if ((push
= evo_wait(core
, 4))) {
1651 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1652 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1653 evo_data(push
, 0x05000000);
1655 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1656 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1657 evo_data(push
, 0x05000000);
1658 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1659 evo_data(push
, 0x00000000);
1661 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 1);
1662 evo_data(push
, 0x05000000);
1663 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1664 evo_data(push
, 0x00000000);
1666 evo_kick(push
, core
);
1671 nv50_head_curs_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1673 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1675 if ((push
= evo_wait(core
, 5))) {
1676 if (core
->base
.user
.oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1677 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1678 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1679 (asyh
->curs
.format
<< 24));
1680 evo_data(push
, asyh
->curs
.offset
>> 8);
1682 if (core
->base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1683 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1684 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1685 (asyh
->curs
.format
<< 24));
1686 evo_data(push
, asyh
->curs
.offset
>> 8);
1687 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1688 evo_data(push
, asyh
->curs
.handle
);
1690 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 2);
1691 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1692 (asyh
->curs
.format
<< 24));
1693 evo_data(push
, asyh
->curs
.offset
>> 8);
1694 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1695 evo_data(push
, asyh
->curs
.handle
);
1697 evo_kick(push
, core
);
1702 nv50_head_core_clr(struct nv50_head
*head
)
1704 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1706 if ((push
= evo_wait(core
, 2))) {
1707 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1708 evo_mthd(push
, 0x0874 + head
->base
.index
* 0x400, 1);
1710 evo_mthd(push
, 0x0474 + head
->base
.index
* 0x300, 1);
1711 evo_data(push
, 0x00000000);
1712 evo_kick(push
, core
);
1717 nv50_head_core_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1719 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1721 if ((push
= evo_wait(core
, 9))) {
1722 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1723 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1724 evo_data(push
, asyh
->core
.offset
>> 8);
1725 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1726 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1727 evo_data(push
, asyh
->core
.layout
<< 20 |
1728 (asyh
->core
.pitch
>> 8) << 8 |
1730 evo_data(push
, asyh
->core
.kind
<< 16 |
1731 asyh
->core
.format
<< 8);
1732 evo_data(push
, asyh
->core
.handle
);
1733 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1734 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1735 /* EVO will complain with INVALID_STATE if we have an
1736 * active cursor and (re)specify HeadSetContextDmaIso
1737 * without also updating HeadSetOffsetCursor.
1739 asyh
->set
.curs
= asyh
->curs
.visible
;
1741 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1742 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1743 evo_data(push
, asyh
->core
.offset
>> 8);
1744 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1745 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1746 evo_data(push
, asyh
->core
.layout
<< 20 |
1747 (asyh
->core
.pitch
>> 8) << 8 |
1749 evo_data(push
, asyh
->core
.format
<< 8);
1750 evo_data(push
, asyh
->core
.handle
);
1751 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1752 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1754 evo_mthd(push
, 0x0460 + head
->base
.index
* 0x300, 1);
1755 evo_data(push
, asyh
->core
.offset
>> 8);
1756 evo_mthd(push
, 0x0468 + head
->base
.index
* 0x300, 4);
1757 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1758 evo_data(push
, asyh
->core
.layout
<< 24 |
1759 (asyh
->core
.pitch
>> 8) << 8 |
1761 evo_data(push
, asyh
->core
.format
<< 8);
1762 evo_data(push
, asyh
->core
.handle
);
1763 evo_mthd(push
, 0x04b0 + head
->base
.index
* 0x300, 1);
1764 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1766 evo_kick(push
, core
);
1771 nv50_head_lut_clr(struct nv50_head
*head
)
1773 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1775 if ((push
= evo_wait(core
, 4))) {
1776 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1777 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1778 evo_data(push
, 0x40000000);
1780 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1781 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1782 evo_data(push
, 0x40000000);
1783 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1784 evo_data(push
, 0x00000000);
1786 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 1);
1787 evo_data(push
, 0x03000000);
1788 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1789 evo_data(push
, 0x00000000);
1791 evo_kick(push
, core
);
1796 nv50_head_lut_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1798 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1800 if ((push
= evo_wait(core
, 7))) {
1801 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1802 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1803 evo_data(push
, 0xc0000000);
1804 evo_data(push
, asyh
->lut
.offset
>> 8);
1806 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1807 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1808 evo_data(push
, 0xc0000000);
1809 evo_data(push
, asyh
->lut
.offset
>> 8);
1810 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1811 evo_data(push
, asyh
->lut
.handle
);
1813 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 4);
1814 evo_data(push
, 0x83000000);
1815 evo_data(push
, asyh
->lut
.offset
>> 8);
1816 evo_data(push
, 0x00000000);
1817 evo_data(push
, 0x00000000);
1818 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1819 evo_data(push
, asyh
->lut
.handle
);
1821 evo_kick(push
, core
);
1826 nv50_head_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1828 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1829 struct nv50_head_mode
*m
= &asyh
->mode
;
1831 if ((push
= evo_wait(core
, 14))) {
1832 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1833 evo_mthd(push
, 0x0804 + (head
->base
.index
* 0x400), 2);
1834 evo_data(push
, 0x00800000 | m
->clock
);
1835 evo_data(push
, m
->interlace
? 0x00000002 : 0x00000000);
1836 evo_mthd(push
, 0x0810 + (head
->base
.index
* 0x400), 7);
1837 evo_data(push
, 0x00000000);
1838 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1839 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1840 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1841 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1842 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1843 evo_data(push
, asyh
->mode
.v
.blankus
);
1844 evo_mthd(push
, 0x082c + (head
->base
.index
* 0x400), 1);
1845 evo_data(push
, 0x00000000);
1847 evo_mthd(push
, 0x0410 + (head
->base
.index
* 0x300), 6);
1848 evo_data(push
, 0x00000000);
1849 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1850 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1851 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1852 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1853 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1854 evo_mthd(push
, 0x042c + (head
->base
.index
* 0x300), 2);
1855 evo_data(push
, 0x00000000); /* ??? */
1856 evo_data(push
, 0xffffff00);
1857 evo_mthd(push
, 0x0450 + (head
->base
.index
* 0x300), 3);
1858 evo_data(push
, m
->clock
* 1000);
1859 evo_data(push
, 0x00200000); /* ??? */
1860 evo_data(push
, m
->clock
* 1000);
1862 evo_kick(push
, core
);
1867 nv50_head_view(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1869 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1871 if ((push
= evo_wait(core
, 10))) {
1872 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1873 evo_mthd(push
, 0x08a4 + (head
->base
.index
* 0x400), 1);
1874 evo_data(push
, 0x00000000);
1875 evo_mthd(push
, 0x08c8 + (head
->base
.index
* 0x400), 1);
1876 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1877 evo_mthd(push
, 0x08d8 + (head
->base
.index
* 0x400), 2);
1878 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1879 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1881 evo_mthd(push
, 0x0494 + (head
->base
.index
* 0x300), 1);
1882 evo_data(push
, 0x00000000);
1883 evo_mthd(push
, 0x04b8 + (head
->base
.index
* 0x300), 1);
1884 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1885 evo_mthd(push
, 0x04c0 + (head
->base
.index
* 0x300), 3);
1886 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1887 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1888 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1890 evo_kick(push
, core
);
1895 nv50_head_flush_clr(struct nv50_head
*head
, struct nv50_head_atom
*asyh
, bool y
)
1897 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1898 nv50_head_lut_clr(head
);
1899 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1900 nv50_head_core_clr(head
);
1901 if (asyh
->clr
.curs
&& (!asyh
->set
.curs
|| y
))
1902 nv50_head_curs_clr(head
);
1906 nv50_head_flush_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1908 if (asyh
->set
.view
) nv50_head_view (head
, asyh
);
1909 if (asyh
->set
.mode
) nv50_head_mode (head
, asyh
);
1910 if (asyh
->set
.core
) nv50_head_lut_set (head
, asyh
);
1911 if (asyh
->set
.core
) nv50_head_core_set(head
, asyh
);
1912 if (asyh
->set
.curs
) nv50_head_curs_set(head
, asyh
);
1913 if (asyh
->set
.base
) nv50_head_base (head
, asyh
);
1914 if (asyh
->set
.ovly
) nv50_head_ovly (head
, asyh
);
1915 if (asyh
->set
.dither
) nv50_head_dither (head
, asyh
);
1916 if (asyh
->set
.procamp
) nv50_head_procamp (head
, asyh
);
1920 nv50_head_atomic_check_procamp(struct nv50_head_atom
*armh
,
1921 struct nv50_head_atom
*asyh
,
1922 struct nouveau_conn_atom
*asyc
)
1924 const int vib
= asyc
->procamp
.color_vibrance
- 100;
1925 const int hue
= asyc
->procamp
.vibrant_hue
- 90;
1926 const int adj
= (vib
> 0) ? 50 : 0;
1927 asyh
->procamp
.sat
.cos
= ((vib
* 2047 + adj
) / 100) & 0xfff;
1928 asyh
->procamp
.sat
.sin
= ((hue
* 2047) / 100) & 0xfff;
1929 asyh
->set
.procamp
= true;
1933 nv50_head_atomic_check_dither(struct nv50_head_atom
*armh
,
1934 struct nv50_head_atom
*asyh
,
1935 struct nouveau_conn_atom
*asyc
)
1937 struct drm_connector
*connector
= asyc
->state
.connector
;
1940 if (asyc
->dither
.mode
== DITHERING_MODE_AUTO
) {
1941 if (asyh
->base
.depth
> connector
->display_info
.bpc
* 3)
1942 mode
= DITHERING_MODE_DYNAMIC2X2
;
1944 mode
= asyc
->dither
.mode
;
1947 if (asyc
->dither
.depth
== DITHERING_DEPTH_AUTO
) {
1948 if (connector
->display_info
.bpc
>= 8)
1949 mode
|= DITHERING_DEPTH_8BPC
;
1951 mode
|= asyc
->dither
.depth
;
1954 asyh
->dither
.enable
= mode
;
1955 asyh
->dither
.bits
= mode
>> 1;
1956 asyh
->dither
.mode
= mode
>> 3;
1957 asyh
->set
.dither
= true;
1961 nv50_head_atomic_check_view(struct nv50_head_atom
*armh
,
1962 struct nv50_head_atom
*asyh
,
1963 struct nouveau_conn_atom
*asyc
)
1965 struct drm_connector
*connector
= asyc
->state
.connector
;
1966 struct drm_display_mode
*omode
= &asyh
->state
.adjusted_mode
;
1967 struct drm_display_mode
*umode
= &asyh
->state
.mode
;
1968 int mode
= asyc
->scaler
.mode
;
1970 int umode_vdisplay
, omode_hdisplay
, omode_vdisplay
;
1972 if (connector
->edid_blob_ptr
)
1973 edid
= (struct edid
*)connector
->edid_blob_ptr
->data
;
1977 if (!asyc
->scaler
.full
) {
1978 if (mode
== DRM_MODE_SCALE_NONE
)
1981 /* Non-EDID LVDS/eDP mode. */
1982 mode
= DRM_MODE_SCALE_FULLSCREEN
;
1985 /* For the user-specified mode, we must ignore doublescan and
1986 * the like, but honor frame packing.
1988 umode_vdisplay
= umode
->vdisplay
;
1989 if ((umode
->flags
& DRM_MODE_FLAG_3D_MASK
) == DRM_MODE_FLAG_3D_FRAME_PACKING
)
1990 umode_vdisplay
+= umode
->vtotal
;
1991 asyh
->view
.iW
= umode
->hdisplay
;
1992 asyh
->view
.iH
= umode_vdisplay
;
1993 /* For the output mode, we can just use the stock helper. */
1994 drm_mode_get_hv_timing(omode
, &omode_hdisplay
, &omode_vdisplay
);
1995 asyh
->view
.oW
= omode_hdisplay
;
1996 asyh
->view
.oH
= omode_vdisplay
;
1998 /* Add overscan compensation if necessary, will keep the aspect
1999 * ratio the same as the backend mode unless overridden by the
2000 * user setting both hborder and vborder properties.
2002 if ((asyc
->scaler
.underscan
.mode
== UNDERSCAN_ON
||
2003 (asyc
->scaler
.underscan
.mode
== UNDERSCAN_AUTO
&&
2004 drm_detect_hdmi_monitor(edid
)))) {
2005 u32 bX
= asyc
->scaler
.underscan
.hborder
;
2006 u32 bY
= asyc
->scaler
.underscan
.vborder
;
2007 u32 r
= (asyh
->view
.oH
<< 19) / asyh
->view
.oW
;
2010 asyh
->view
.oW
-= (bX
* 2);
2011 if (bY
) asyh
->view
.oH
-= (bY
* 2);
2012 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2014 asyh
->view
.oW
-= (asyh
->view
.oW
>> 4) + 32;
2015 if (bY
) asyh
->view
.oH
-= (bY
* 2);
2016 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2020 /* Handle CENTER/ASPECT scaling, taking into account the areas
2021 * removed already for overscan compensation.
2024 case DRM_MODE_SCALE_CENTER
:
2025 asyh
->view
.oW
= min((u16
)umode
->hdisplay
, asyh
->view
.oW
);
2026 asyh
->view
.oH
= min((u16
)umode_vdisplay
, asyh
->view
.oH
);
2028 case DRM_MODE_SCALE_ASPECT
:
2029 if (asyh
->view
.oH
< asyh
->view
.oW
) {
2030 u32 r
= (asyh
->view
.iW
<< 19) / asyh
->view
.iH
;
2031 asyh
->view
.oW
= ((asyh
->view
.oH
* r
) + (r
/ 2)) >> 19;
2033 u32 r
= (asyh
->view
.iH
<< 19) / asyh
->view
.iW
;
2034 asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2041 asyh
->set
.view
= true;
2045 nv50_head_atomic_check_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
2047 struct drm_display_mode
*mode
= &asyh
->state
.adjusted_mode
;
2048 struct nv50_head_mode
*m
= &asyh
->mode
;
2051 drm_mode_set_crtcinfo(mode
, CRTC_INTERLACE_HALVE_V
| CRTC_STEREO_DOUBLE
);
2054 * DRM modes are defined in terms of a repeating interval
2055 * starting with the active display area. The hardware modes
2056 * are defined in terms of a repeating interval starting one
2057 * unit (pixel or line) into the sync pulse. So, add bias.
2060 m
->h
.active
= mode
->crtc_htotal
;
2061 m
->h
.synce
= mode
->crtc_hsync_end
- mode
->crtc_hsync_start
- 1;
2062 m
->h
.blanke
= mode
->crtc_hblank_end
- mode
->crtc_hsync_start
- 1;
2063 m
->h
.blanks
= m
->h
.blanke
+ mode
->crtc_hdisplay
;
2065 m
->v
.active
= mode
->crtc_vtotal
;
2066 m
->v
.synce
= mode
->crtc_vsync_end
- mode
->crtc_vsync_start
- 1;
2067 m
->v
.blanke
= mode
->crtc_vblank_end
- mode
->crtc_vsync_start
- 1;
2068 m
->v
.blanks
= m
->v
.blanke
+ mode
->crtc_vdisplay
;
2070 /*XXX: Safe underestimate, even "0" works */
2071 blankus
= (m
->v
.active
- mode
->crtc_vdisplay
- 2) * m
->h
.active
;
2073 blankus
/= mode
->crtc_clock
;
2074 m
->v
.blankus
= blankus
;
2076 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
) {
2077 m
->v
.blank2e
= m
->v
.active
+ m
->v
.blanke
;
2078 m
->v
.blank2s
= m
->v
.blank2e
+ mode
->crtc_vdisplay
;
2079 m
->v
.active
= (m
->v
.active
* 2) + 1;
2080 m
->interlace
= true;
2084 m
->interlace
= false;
2086 m
->clock
= mode
->crtc_clock
;
2088 asyh
->set
.mode
= true;
2092 nv50_head_atomic_check(struct drm_crtc
*crtc
, struct drm_crtc_state
*state
)
2094 struct nouveau_drm
*drm
= nouveau_drm(crtc
->dev
);
2095 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2096 struct nv50_head
*head
= nv50_head(crtc
);
2097 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2098 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2099 struct nouveau_conn_atom
*asyc
= NULL
;
2100 struct drm_connector_state
*conns
;
2101 struct drm_connector
*conn
;
2104 NV_ATOMIC(drm
, "%s atomic_check %d\n", crtc
->name
, asyh
->state
.active
);
2105 if (asyh
->state
.active
) {
2106 for_each_connector_in_state(asyh
->state
.state
, conn
, conns
, i
) {
2107 if (conns
->crtc
== crtc
) {
2108 asyc
= nouveau_conn_atom(conns
);
2113 if (armh
->state
.active
) {
2115 if (asyh
->state
.mode_changed
)
2116 asyc
->set
.scaler
= true;
2117 if (armh
->base
.depth
!= asyh
->base
.depth
)
2118 asyc
->set
.dither
= true;
2122 asyc
->set
.mask
= ~0;
2123 asyh
->set
.mask
= ~0;
2126 if (asyh
->state
.mode_changed
)
2127 nv50_head_atomic_check_mode(head
, asyh
);
2130 if (asyc
->set
.scaler
)
2131 nv50_head_atomic_check_view(armh
, asyh
, asyc
);
2132 if (asyc
->set
.dither
)
2133 nv50_head_atomic_check_dither(armh
, asyh
, asyc
);
2134 if (asyc
->set
.procamp
)
2135 nv50_head_atomic_check_procamp(armh
, asyh
, asyc
);
2138 if ((asyh
->core
.visible
= (asyh
->base
.cpp
!= 0))) {
2139 asyh
->core
.x
= asyh
->base
.x
;
2140 asyh
->core
.y
= asyh
->base
.y
;
2141 asyh
->core
.w
= asyh
->base
.w
;
2142 asyh
->core
.h
= asyh
->base
.h
;
2144 if ((asyh
->core
.visible
= asyh
->curs
.visible
)) {
2145 /*XXX: We need to either find some way of having the
2146 * primary base layer appear black, while still
2147 * being able to display the other layers, or we
2148 * need to allocate a dummy black surface here.
2152 asyh
->core
.w
= asyh
->state
.mode
.hdisplay
;
2153 asyh
->core
.h
= asyh
->state
.mode
.vdisplay
;
2155 asyh
->core
.handle
= disp
->mast
.base
.vram
.handle
;
2156 asyh
->core
.offset
= 0;
2157 asyh
->core
.format
= 0xcf;
2158 asyh
->core
.kind
= 0;
2159 asyh
->core
.layout
= 1;
2160 asyh
->core
.block
= 0;
2161 asyh
->core
.pitch
= ALIGN(asyh
->core
.w
, 64) * 4;
2162 asyh
->lut
.handle
= disp
->mast
.base
.vram
.handle
;
2163 asyh
->lut
.offset
= head
->base
.lut
.nvbo
->bo
.offset
;
2164 asyh
->set
.base
= armh
->base
.cpp
!= asyh
->base
.cpp
;
2165 asyh
->set
.ovly
= armh
->ovly
.cpp
!= asyh
->ovly
.cpp
;
2167 asyh
->core
.visible
= false;
2168 asyh
->curs
.visible
= false;
2173 if (!drm_atomic_crtc_needs_modeset(&asyh
->state
)) {
2174 if (asyh
->core
.visible
) {
2175 if (memcmp(&armh
->core
, &asyh
->core
, sizeof(asyh
->core
)))
2176 asyh
->set
.core
= true;
2178 if (armh
->core
.visible
) {
2179 asyh
->clr
.core
= true;
2182 if (asyh
->curs
.visible
) {
2183 if (memcmp(&armh
->curs
, &asyh
->curs
, sizeof(asyh
->curs
)))
2184 asyh
->set
.curs
= true;
2186 if (armh
->curs
.visible
) {
2187 asyh
->clr
.curs
= true;
2190 asyh
->clr
.core
= armh
->core
.visible
;
2191 asyh
->clr
.curs
= armh
->curs
.visible
;
2192 asyh
->set
.core
= asyh
->core
.visible
;
2193 asyh
->set
.curs
= asyh
->curs
.visible
;
2196 if (asyh
->clr
.mask
|| asyh
->set
.mask
)
2197 nv50_atom(asyh
->state
.state
)->lock_core
= true;
2202 nv50_head_lut_load(struct drm_crtc
*crtc
)
2204 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2205 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2206 void __iomem
*lut
= nvbo_kmap_obj_iovirtual(nv_crtc
->lut
.nvbo
);
2209 for (i
= 0; i
< 256; i
++) {
2210 u16 r
= nv_crtc
->lut
.r
[i
] >> 2;
2211 u16 g
= nv_crtc
->lut
.g
[i
] >> 2;
2212 u16 b
= nv_crtc
->lut
.b
[i
] >> 2;
2214 if (disp
->disp
->oclass
< GF110_DISP
) {
2215 writew(r
+ 0x0000, lut
+ (i
* 0x08) + 0);
2216 writew(g
+ 0x0000, lut
+ (i
* 0x08) + 2);
2217 writew(b
+ 0x0000, lut
+ (i
* 0x08) + 4);
2219 writew(r
+ 0x6000, lut
+ (i
* 0x20) + 0);
2220 writew(g
+ 0x6000, lut
+ (i
* 0x20) + 2);
2221 writew(b
+ 0x6000, lut
+ (i
* 0x20) + 4);
2226 static const struct drm_crtc_helper_funcs
2228 .load_lut
= nv50_head_lut_load
,
2229 .atomic_check
= nv50_head_atomic_check
,
2233 nv50_head_gamma_set(struct drm_crtc
*crtc
, u16
*r
, u16
*g
, u16
*b
,
2235 struct drm_modeset_acquire_ctx
*ctx
)
2237 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2240 for (i
= 0; i
< size
; i
++) {
2241 nv_crtc
->lut
.r
[i
] = r
[i
];
2242 nv_crtc
->lut
.g
[i
] = g
[i
];
2243 nv_crtc
->lut
.b
[i
] = b
[i
];
2246 nv50_head_lut_load(crtc
);
2251 nv50_head_atomic_destroy_state(struct drm_crtc
*crtc
,
2252 struct drm_crtc_state
*state
)
2254 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2255 __drm_atomic_helper_crtc_destroy_state(&asyh
->state
);
2259 static struct drm_crtc_state
*
2260 nv50_head_atomic_duplicate_state(struct drm_crtc
*crtc
)
2262 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2263 struct nv50_head_atom
*asyh
;
2264 if (!(asyh
= kmalloc(sizeof(*asyh
), GFP_KERNEL
)))
2266 __drm_atomic_helper_crtc_duplicate_state(crtc
, &asyh
->state
);
2267 asyh
->view
= armh
->view
;
2268 asyh
->mode
= armh
->mode
;
2269 asyh
->lut
= armh
->lut
;
2270 asyh
->core
= armh
->core
;
2271 asyh
->curs
= armh
->curs
;
2272 asyh
->base
= armh
->base
;
2273 asyh
->ovly
= armh
->ovly
;
2274 asyh
->dither
= armh
->dither
;
2275 asyh
->procamp
= armh
->procamp
;
2278 return &asyh
->state
;
2282 __drm_atomic_helper_crtc_reset(struct drm_crtc
*crtc
,
2283 struct drm_crtc_state
*state
)
2286 crtc
->funcs
->atomic_destroy_state(crtc
, crtc
->state
);
2287 crtc
->state
= state
;
2288 crtc
->state
->crtc
= crtc
;
2292 nv50_head_reset(struct drm_crtc
*crtc
)
2294 struct nv50_head_atom
*asyh
;
2296 if (WARN_ON(!(asyh
= kzalloc(sizeof(*asyh
), GFP_KERNEL
))))
2299 __drm_atomic_helper_crtc_reset(crtc
, &asyh
->state
);
2303 nv50_head_destroy(struct drm_crtc
*crtc
)
2305 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2306 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2307 struct nv50_head
*head
= nv50_head(crtc
);
2309 nv50_dmac_destroy(&head
->ovly
.base
, disp
->disp
);
2310 nv50_pioc_destroy(&head
->oimm
.base
);
2312 nouveau_bo_unmap(nv_crtc
->lut
.nvbo
);
2313 if (nv_crtc
->lut
.nvbo
)
2314 nouveau_bo_unpin(nv_crtc
->lut
.nvbo
);
2315 nouveau_bo_ref(NULL
, &nv_crtc
->lut
.nvbo
);
2317 drm_crtc_cleanup(crtc
);
2321 static const struct drm_crtc_funcs
2323 .reset
= nv50_head_reset
,
2324 .gamma_set
= nv50_head_gamma_set
,
2325 .destroy
= nv50_head_destroy
,
2326 .set_config
= drm_atomic_helper_set_config
,
2327 .page_flip
= drm_atomic_helper_page_flip
,
2328 .set_property
= drm_atomic_helper_crtc_set_property
,
2329 .atomic_duplicate_state
= nv50_head_atomic_duplicate_state
,
2330 .atomic_destroy_state
= nv50_head_atomic_destroy_state
,
2334 nv50_head_create(struct drm_device
*dev
, int index
)
2336 struct nouveau_drm
*drm
= nouveau_drm(dev
);
2337 struct nvif_device
*device
= &drm
->client
.device
;
2338 struct nv50_disp
*disp
= nv50_disp(dev
);
2339 struct nv50_head
*head
;
2340 struct nv50_base
*base
;
2341 struct nv50_curs
*curs
;
2342 struct drm_crtc
*crtc
;
2345 head
= kzalloc(sizeof(*head
), GFP_KERNEL
);
2349 head
->base
.index
= index
;
2350 for (i
= 0; i
< 256; i
++) {
2351 head
->base
.lut
.r
[i
] = i
<< 8;
2352 head
->base
.lut
.g
[i
] = i
<< 8;
2353 head
->base
.lut
.b
[i
] = i
<< 8;
2356 ret
= nv50_base_new(drm
, head
, &base
);
2358 ret
= nv50_curs_new(drm
, head
, &curs
);
2364 crtc
= &head
->base
.base
;
2365 drm_crtc_init_with_planes(dev
, crtc
, &base
->wndw
.plane
,
2366 &curs
->wndw
.plane
, &nv50_head_func
,
2367 "head-%d", head
->base
.index
);
2368 drm_crtc_helper_add(crtc
, &nv50_head_help
);
2369 drm_mode_crtc_set_gamma_size(crtc
, 256);
2371 ret
= nouveau_bo_new(&drm
->client
, 8192, 0x100, TTM_PL_FLAG_VRAM
,
2372 0, 0x0000, NULL
, NULL
, &head
->base
.lut
.nvbo
);
2374 ret
= nouveau_bo_pin(head
->base
.lut
.nvbo
, TTM_PL_FLAG_VRAM
, true);
2376 ret
= nouveau_bo_map(head
->base
.lut
.nvbo
);
2378 nouveau_bo_unpin(head
->base
.lut
.nvbo
);
2381 nouveau_bo_ref(NULL
, &head
->base
.lut
.nvbo
);
2387 /* allocate overlay resources */
2388 ret
= nv50_oimm_create(device
, disp
->disp
, index
, &head
->oimm
);
2392 ret
= nv50_ovly_create(device
, disp
->disp
, index
, disp
->sync
->bo
.offset
,
2399 nv50_head_destroy(crtc
);
2403 /******************************************************************************
2404 * Output path helpers
2405 *****************************************************************************/
2407 nv50_outp_release(struct nouveau_encoder
*nv_encoder
)
2409 struct nv50_disp
*disp
= nv50_disp(nv_encoder
->base
.base
.dev
);
2411 struct nv50_disp_mthd_v1 base
;
2414 .base
.method
= NV50_DISP_MTHD_V1_RELEASE
,
2415 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2416 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2419 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2420 nv_encoder
->or = -1;
2421 nv_encoder
->link
= 0;
2425 nv50_outp_acquire(struct nouveau_encoder
*nv_encoder
)
2427 struct nouveau_drm
*drm
= nouveau_drm(nv_encoder
->base
.base
.dev
);
2428 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
2430 struct nv50_disp_mthd_v1 base
;
2431 struct nv50_disp_acquire_v0 info
;
2434 .base
.method
= NV50_DISP_MTHD_V1_ACQUIRE
,
2435 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2436 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2440 ret
= nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2442 NV_ERROR(drm
, "error acquiring output path: %d\n", ret
);
2446 nv_encoder
->or = args
.info
.or;
2447 nv_encoder
->link
= args
.info
.link
;
2452 nv50_outp_atomic_check_view(struct drm_encoder
*encoder
,
2453 struct drm_crtc_state
*crtc_state
,
2454 struct drm_connector_state
*conn_state
,
2455 struct drm_display_mode
*native_mode
)
2457 struct drm_display_mode
*adjusted_mode
= &crtc_state
->adjusted_mode
;
2458 struct drm_display_mode
*mode
= &crtc_state
->mode
;
2459 struct drm_connector
*connector
= conn_state
->connector
;
2460 struct nouveau_conn_atom
*asyc
= nouveau_conn_atom(conn_state
);
2461 struct nouveau_drm
*drm
= nouveau_drm(encoder
->dev
);
2463 NV_ATOMIC(drm
, "%s atomic_check\n", encoder
->name
);
2464 asyc
->scaler
.full
= false;
2468 if (asyc
->scaler
.mode
== DRM_MODE_SCALE_NONE
) {
2469 switch (connector
->connector_type
) {
2470 case DRM_MODE_CONNECTOR_LVDS
:
2471 case DRM_MODE_CONNECTOR_eDP
:
2472 /* Force use of scaler for non-EDID modes. */
2473 if (adjusted_mode
->type
& DRM_MODE_TYPE_DRIVER
)
2476 asyc
->scaler
.full
= true;
2485 if (!drm_mode_equal(adjusted_mode
, mode
)) {
2486 drm_mode_copy(adjusted_mode
, mode
);
2487 crtc_state
->mode_changed
= true;
2494 nv50_outp_atomic_check(struct drm_encoder
*encoder
,
2495 struct drm_crtc_state
*crtc_state
,
2496 struct drm_connector_state
*conn_state
)
2498 struct nouveau_connector
*nv_connector
=
2499 nouveau_connector(conn_state
->connector
);
2500 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2501 nv_connector
->native_mode
);
2504 /******************************************************************************
2506 *****************************************************************************/
2508 nv50_dac_disable(struct drm_encoder
*encoder
)
2510 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2511 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2512 const int or = nv_encoder
->or;
2515 if (nv_encoder
->crtc
) {
2516 push
= evo_wait(mast
, 4);
2518 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2519 evo_mthd(push
, 0x0400 + (or * 0x080), 1);
2520 evo_data(push
, 0x00000000);
2522 evo_mthd(push
, 0x0180 + (or * 0x020), 1);
2523 evo_data(push
, 0x00000000);
2525 evo_kick(push
, mast
);
2529 nv_encoder
->crtc
= NULL
;
2530 nv50_outp_release(nv_encoder
);
2534 nv50_dac_enable(struct drm_encoder
*encoder
)
2536 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2537 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2538 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2539 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
2542 nv50_outp_acquire(nv_encoder
);
2544 push
= evo_wait(mast
, 8);
2546 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2547 u32 syncs
= 0x00000000;
2549 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2550 syncs
|= 0x00000001;
2551 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2552 syncs
|= 0x00000002;
2554 evo_mthd(push
, 0x0400 + (nv_encoder
->or * 0x080), 2);
2555 evo_data(push
, 1 << nv_crtc
->index
);
2556 evo_data(push
, syncs
);
2558 u32 magic
= 0x31ec6000 | (nv_crtc
->index
<< 25);
2559 u32 syncs
= 0x00000001;
2561 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2562 syncs
|= 0x00000008;
2563 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2564 syncs
|= 0x00000010;
2566 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
2567 magic
|= 0x00000001;
2569 evo_mthd(push
, 0x0404 + (nv_crtc
->index
* 0x300), 2);
2570 evo_data(push
, syncs
);
2571 evo_data(push
, magic
);
2572 evo_mthd(push
, 0x0180 + (nv_encoder
->or * 0x020), 1);
2573 evo_data(push
, 1 << nv_crtc
->index
);
2576 evo_kick(push
, mast
);
2579 nv_encoder
->crtc
= encoder
->crtc
;
2582 static enum drm_connector_status
2583 nv50_dac_detect(struct drm_encoder
*encoder
, struct drm_connector
*connector
)
2585 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2586 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2588 struct nv50_disp_mthd_v1 base
;
2589 struct nv50_disp_dac_load_v0 load
;
2592 .base
.method
= NV50_DISP_MTHD_V1_DAC_LOAD
,
2593 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2594 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2598 args
.load
.data
= nouveau_drm(encoder
->dev
)->vbios
.dactestval
;
2599 if (args
.load
.data
== 0)
2600 args
.load
.data
= 340;
2602 ret
= nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2603 if (ret
|| !args
.load
.load
)
2604 return connector_status_disconnected
;
2606 return connector_status_connected
;
2609 static const struct drm_encoder_helper_funcs
2611 .atomic_check
= nv50_outp_atomic_check
,
2612 .enable
= nv50_dac_enable
,
2613 .disable
= nv50_dac_disable
,
2614 .detect
= nv50_dac_detect
2618 nv50_dac_destroy(struct drm_encoder
*encoder
)
2620 drm_encoder_cleanup(encoder
);
2624 static const struct drm_encoder_funcs
2626 .destroy
= nv50_dac_destroy
,
2630 nv50_dac_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
2632 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
2633 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
2634 struct nvkm_i2c_bus
*bus
;
2635 struct nouveau_encoder
*nv_encoder
;
2636 struct drm_encoder
*encoder
;
2637 int type
= DRM_MODE_ENCODER_DAC
;
2639 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
2642 nv_encoder
->dcb
= dcbe
;
2644 bus
= nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
2646 nv_encoder
->i2c
= &bus
->i2c
;
2648 encoder
= to_drm_encoder(nv_encoder
);
2649 encoder
->possible_crtcs
= dcbe
->heads
;
2650 encoder
->possible_clones
= 0;
2651 drm_encoder_init(connector
->dev
, encoder
, &nv50_dac_func
, type
,
2652 "dac-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
2653 drm_encoder_helper_add(encoder
, &nv50_dac_help
);
2655 drm_mode_connector_attach_encoder(connector
, encoder
);
2659 /******************************************************************************
2661 *****************************************************************************/
2663 nv50_audio_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2665 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2666 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2668 struct nv50_disp_mthd_v1 base
;
2669 struct nv50_disp_sor_hda_eld_v0 eld
;
2672 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2673 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2674 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2675 (0x0100 << nv_crtc
->index
),
2678 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2682 nv50_audio_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2684 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2685 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2686 struct nouveau_connector
*nv_connector
;
2687 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2690 struct nv50_disp_mthd_v1 mthd
;
2691 struct nv50_disp_sor_hda_eld_v0 eld
;
2693 u8 data
[sizeof(nv_connector
->base
.eld
)];
2695 .base
.mthd
.version
= 1,
2696 .base
.mthd
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2697 .base
.mthd
.hasht
= nv_encoder
->dcb
->hasht
,
2698 .base
.mthd
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2699 (0x0100 << nv_crtc
->index
),
2702 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2703 if (!drm_detect_monitor_audio(nv_connector
->edid
))
2706 drm_edid_to_eld(&nv_connector
->base
, nv_connector
->edid
);
2707 memcpy(args
.data
, nv_connector
->base
.eld
, sizeof(args
.data
));
2709 nvif_mthd(disp
->disp
, 0, &args
,
2710 sizeof(args
.base
) + drm_eld_size(args
.data
));
2713 /******************************************************************************
2715 *****************************************************************************/
2717 nv50_hdmi_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2719 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2720 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2722 struct nv50_disp_mthd_v1 base
;
2723 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2726 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2727 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2728 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2729 (0x0100 << nv_crtc
->index
),
2732 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2736 nv50_hdmi_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2738 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2739 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2740 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2742 struct nv50_disp_mthd_v1 base
;
2743 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2744 u8 infoframes
[2 * 17]; /* two frames, up to 17 bytes each */
2747 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2748 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2749 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2750 (0x0100 << nv_crtc
->index
),
2752 .pwr
.rekey
= 56, /* binary driver, and tegra, constant */
2754 struct nouveau_connector
*nv_connector
;
2756 union hdmi_infoframe avi_frame
;
2757 union hdmi_infoframe vendor_frame
;
2761 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2762 if (!drm_detect_hdmi_monitor(nv_connector
->edid
))
2765 ret
= drm_hdmi_avi_infoframe_from_display_mode(&avi_frame
.avi
, mode
);
2767 /* We have an AVI InfoFrame, populate it to the display */
2768 args
.pwr
.avi_infoframe_length
2769 = hdmi_infoframe_pack(&avi_frame
, args
.infoframes
, 17);
2772 ret
= drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame
.vendor
.hdmi
, mode
);
2774 /* We have a Vendor InfoFrame, populate it to the display */
2775 args
.pwr
.vendor_infoframe_length
2776 = hdmi_infoframe_pack(&vendor_frame
,
2778 + args
.pwr
.avi_infoframe_length
,
2782 max_ac_packet
= mode
->htotal
- mode
->hdisplay
;
2783 max_ac_packet
-= args
.pwr
.rekey
;
2784 max_ac_packet
-= 18; /* constant from tegra */
2785 args
.pwr
.max_ac_packet
= max_ac_packet
/ 32;
2787 size
= sizeof(args
.base
)
2789 + args
.pwr
.avi_infoframe_length
2790 + args
.pwr
.vendor_infoframe_length
;
2791 nvif_mthd(disp
->disp
, 0, &args
, size
);
2792 nv50_audio_enable(encoder
, mode
);
2795 /******************************************************************************
2797 *****************************************************************************/
2798 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2799 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2800 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2803 struct nouveau_encoder
*outp
;
2805 struct drm_dp_mst_topology_mgr mgr
;
2806 struct nv50_msto
*msto
[4];
2814 struct nv50_mstm
*mstm
;
2815 struct drm_dp_mst_port
*port
;
2816 struct drm_connector connector
;
2818 struct drm_display_mode
*native
;
2825 struct drm_encoder encoder
;
2827 struct nv50_head
*head
;
2828 struct nv50_mstc
*mstc
;
2832 static struct drm_dp_payload
*
2833 nv50_msto_payload(struct nv50_msto
*msto
)
2835 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2836 struct nv50_mstc
*mstc
= msto
->mstc
;
2837 struct nv50_mstm
*mstm
= mstc
->mstm
;
2838 int vcpi
= mstc
->port
->vcpi
.vcpi
, i
;
2840 NV_ATOMIC(drm
, "%s: vcpi %d\n", msto
->encoder
.name
, vcpi
);
2841 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2842 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2843 NV_ATOMIC(drm
, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2844 mstm
->outp
->base
.base
.name
, i
, payload
->vcpi
,
2845 payload
->start_slot
, payload
->num_slots
);
2848 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2849 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2850 if (payload
->vcpi
== vcpi
)
2858 nv50_msto_cleanup(struct nv50_msto
*msto
)
2860 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2861 struct nv50_mstc
*mstc
= msto
->mstc
;
2862 struct nv50_mstm
*mstm
= mstc
->mstm
;
2864 NV_ATOMIC(drm
, "%s: msto cleanup\n", msto
->encoder
.name
);
2865 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0 && !nv50_msto_payload(msto
))
2866 drm_dp_mst_deallocate_vcpi(&mstm
->mgr
, mstc
->port
);
2867 if (msto
->disabled
) {
2870 msto
->disabled
= false;
2875 nv50_msto_prepare(struct nv50_msto
*msto
)
2877 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2878 struct nv50_mstc
*mstc
= msto
->mstc
;
2879 struct nv50_mstm
*mstm
= mstc
->mstm
;
2881 struct nv50_disp_mthd_v1 base
;
2882 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi
;
2885 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI
,
2886 .base
.hasht
= mstm
->outp
->dcb
->hasht
,
2887 .base
.hashm
= (0xf0ff & mstm
->outp
->dcb
->hashm
) |
2888 (0x0100 << msto
->head
->base
.index
),
2891 NV_ATOMIC(drm
, "%s: msto prepare\n", msto
->encoder
.name
);
2892 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0) {
2893 struct drm_dp_payload
*payload
= nv50_msto_payload(msto
);
2895 args
.vcpi
.start_slot
= payload
->start_slot
;
2896 args
.vcpi
.num_slots
= payload
->num_slots
;
2897 args
.vcpi
.pbn
= mstc
->port
->vcpi
.pbn
;
2898 args
.vcpi
.aligned_pbn
= mstc
->port
->vcpi
.aligned_pbn
;
2902 NV_ATOMIC(drm
, "%s: %s: %02x %02x %04x %04x\n",
2903 msto
->encoder
.name
, msto
->head
->base
.base
.name
,
2904 args
.vcpi
.start_slot
, args
.vcpi
.num_slots
,
2905 args
.vcpi
.pbn
, args
.vcpi
.aligned_pbn
);
2906 nvif_mthd(&drm
->display
->disp
, 0, &args
, sizeof(args
));
2910 nv50_msto_atomic_check(struct drm_encoder
*encoder
,
2911 struct drm_crtc_state
*crtc_state
,
2912 struct drm_connector_state
*conn_state
)
2914 struct nv50_mstc
*mstc
= nv50_mstc(conn_state
->connector
);
2915 struct nv50_mstm
*mstm
= mstc
->mstm
;
2916 int bpp
= conn_state
->connector
->display_info
.bpc
* 3;
2919 mstc
->pbn
= drm_dp_calc_pbn_mode(crtc_state
->adjusted_mode
.clock
, bpp
);
2921 slots
= drm_dp_find_vcpi_slots(&mstm
->mgr
, mstc
->pbn
);
2925 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2930 nv50_msto_enable(struct drm_encoder
*encoder
)
2932 struct nv50_head
*head
= nv50_head(encoder
->crtc
);
2933 struct nv50_msto
*msto
= nv50_msto(encoder
);
2934 struct nv50_mstc
*mstc
= NULL
;
2935 struct nv50_mstm
*mstm
= NULL
;
2936 struct drm_connector
*connector
;
2937 struct drm_connector_list_iter conn_iter
;
2942 drm_connector_list_iter_begin(encoder
->dev
, &conn_iter
);
2943 drm_for_each_connector_iter(connector
, &conn_iter
) {
2944 if (connector
->state
->best_encoder
== &msto
->encoder
) {
2945 mstc
= nv50_mstc(connector
);
2950 drm_connector_list_iter_end(&conn_iter
);
2955 slots
= drm_dp_find_vcpi_slots(&mstm
->mgr
, mstc
->pbn
);
2956 r
= drm_dp_mst_allocate_vcpi(&mstm
->mgr
, mstc
->port
, mstc
->pbn
, slots
);
2960 nv50_outp_acquire(mstm
->outp
);
2962 if (mstm
->outp
->link
& 1)
2967 switch (mstc
->connector
.display_info
.bpc
) {
2968 case 6: depth
= 0x2; break;
2969 case 8: depth
= 0x5; break;
2971 default: depth
= 0x6; break;
2974 mstm
->outp
->update(mstm
->outp
, head
->base
.index
,
2975 &head
->base
.base
.state
->adjusted_mode
, proto
, depth
);
2979 mstm
->modified
= true;
2983 nv50_msto_disable(struct drm_encoder
*encoder
)
2985 struct nv50_msto
*msto
= nv50_msto(encoder
);
2986 struct nv50_mstc
*mstc
= msto
->mstc
;
2987 struct nv50_mstm
*mstm
= mstc
->mstm
;
2990 drm_dp_mst_reset_vcpi_slots(&mstm
->mgr
, mstc
->port
);
2992 mstm
->outp
->update(mstm
->outp
, msto
->head
->base
.index
, NULL
, 0, 0);
2993 mstm
->modified
= true;
2995 mstm
->disabled
= true;
2996 msto
->disabled
= true;
2999 static const struct drm_encoder_helper_funcs
3001 .disable
= nv50_msto_disable
,
3002 .enable
= nv50_msto_enable
,
3003 .atomic_check
= nv50_msto_atomic_check
,
3007 nv50_msto_destroy(struct drm_encoder
*encoder
)
3009 struct nv50_msto
*msto
= nv50_msto(encoder
);
3010 drm_encoder_cleanup(&msto
->encoder
);
3014 static const struct drm_encoder_funcs
3016 .destroy
= nv50_msto_destroy
,
3020 nv50_msto_new(struct drm_device
*dev
, u32 heads
, const char *name
, int id
,
3021 struct nv50_msto
**pmsto
)
3023 struct nv50_msto
*msto
;
3026 if (!(msto
= *pmsto
= kzalloc(sizeof(*msto
), GFP_KERNEL
)))
3029 ret
= drm_encoder_init(dev
, &msto
->encoder
, &nv50_msto
,
3030 DRM_MODE_ENCODER_DPMST
, "%s-mst-%d", name
, id
);
3037 drm_encoder_helper_add(&msto
->encoder
, &nv50_msto_help
);
3038 msto
->encoder
.possible_crtcs
= heads
;
3042 static struct drm_encoder
*
3043 nv50_mstc_atomic_best_encoder(struct drm_connector
*connector
,
3044 struct drm_connector_state
*connector_state
)
3046 struct nv50_head
*head
= nv50_head(connector_state
->crtc
);
3047 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3049 struct nv50_mstm
*mstm
= mstc
->mstm
;
3050 return &mstm
->msto
[head
->base
.index
]->encoder
;
3055 static struct drm_encoder
*
3056 nv50_mstc_best_encoder(struct drm_connector
*connector
)
3058 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3060 struct nv50_mstm
*mstm
= mstc
->mstm
;
3061 return &mstm
->msto
[0]->encoder
;
3066 static enum drm_mode_status
3067 nv50_mstc_mode_valid(struct drm_connector
*connector
,
3068 struct drm_display_mode
*mode
)
3074 nv50_mstc_get_modes(struct drm_connector
*connector
)
3076 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3079 mstc
->edid
= drm_dp_mst_get_edid(&mstc
->connector
, mstc
->port
->mgr
, mstc
->port
);
3080 drm_mode_connector_update_edid_property(&mstc
->connector
, mstc
->edid
);
3082 ret
= drm_add_edid_modes(&mstc
->connector
, mstc
->edid
);
3083 drm_edid_to_eld(&mstc
->connector
, mstc
->edid
);
3086 if (!mstc
->connector
.display_info
.bpc
)
3087 mstc
->connector
.display_info
.bpc
= 8;
3090 drm_mode_destroy(mstc
->connector
.dev
, mstc
->native
);
3091 mstc
->native
= nouveau_conn_native_mode(&mstc
->connector
);
3095 static const struct drm_connector_helper_funcs
3097 .get_modes
= nv50_mstc_get_modes
,
3098 .mode_valid
= nv50_mstc_mode_valid
,
3099 .best_encoder
= nv50_mstc_best_encoder
,
3100 .atomic_best_encoder
= nv50_mstc_atomic_best_encoder
,
3103 static enum drm_connector_status
3104 nv50_mstc_detect(struct drm_connector
*connector
, bool force
)
3106 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3108 return connector_status_disconnected
;
3109 return drm_dp_mst_detect_port(connector
, mstc
->port
->mgr
, mstc
->port
);
3113 nv50_mstc_destroy(struct drm_connector
*connector
)
3115 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3116 drm_connector_cleanup(&mstc
->connector
);
3120 static const struct drm_connector_funcs
3122 .dpms
= drm_atomic_helper_connector_dpms
,
3123 .reset
= nouveau_conn_reset
,
3124 .detect
= nv50_mstc_detect
,
3125 .fill_modes
= drm_helper_probe_single_connector_modes
,
3126 .set_property
= drm_atomic_helper_connector_set_property
,
3127 .destroy
= nv50_mstc_destroy
,
3128 .atomic_duplicate_state
= nouveau_conn_atomic_duplicate_state
,
3129 .atomic_destroy_state
= nouveau_conn_atomic_destroy_state
,
3130 .atomic_set_property
= nouveau_conn_atomic_set_property
,
3131 .atomic_get_property
= nouveau_conn_atomic_get_property
,
3135 nv50_mstc_new(struct nv50_mstm
*mstm
, struct drm_dp_mst_port
*port
,
3136 const char *path
, struct nv50_mstc
**pmstc
)
3138 struct drm_device
*dev
= mstm
->outp
->base
.base
.dev
;
3139 struct nv50_mstc
*mstc
;
3142 if (!(mstc
= *pmstc
= kzalloc(sizeof(*mstc
), GFP_KERNEL
)))
3147 ret
= drm_connector_init(dev
, &mstc
->connector
, &nv50_mstc
,
3148 DRM_MODE_CONNECTOR_DisplayPort
);
3155 drm_connector_helper_add(&mstc
->connector
, &nv50_mstc_help
);
3157 mstc
->connector
.funcs
->reset(&mstc
->connector
);
3158 nouveau_conn_attach_properties(&mstc
->connector
);
3160 for (i
= 0; i
< ARRAY_SIZE(mstm
->msto
) && mstm
->msto
; i
++)
3161 drm_mode_connector_attach_encoder(&mstc
->connector
, &mstm
->msto
[i
]->encoder
);
3163 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.path_property
, 0);
3164 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.tile_property
, 0);
3165 drm_mode_connector_set_path_property(&mstc
->connector
, path
);
3170 nv50_mstm_cleanup(struct nv50_mstm
*mstm
)
3172 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3173 struct drm_encoder
*encoder
;
3176 NV_ATOMIC(drm
, "%s: mstm cleanup\n", mstm
->outp
->base
.base
.name
);
3177 ret
= drm_dp_check_act_status(&mstm
->mgr
);
3179 ret
= drm_dp_update_payload_part2(&mstm
->mgr
);
3181 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3182 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3183 struct nv50_msto
*msto
= nv50_msto(encoder
);
3184 struct nv50_mstc
*mstc
= msto
->mstc
;
3185 if (mstc
&& mstc
->mstm
== mstm
)
3186 nv50_msto_cleanup(msto
);
3190 mstm
->modified
= false;
3194 nv50_mstm_prepare(struct nv50_mstm
*mstm
)
3196 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3197 struct drm_encoder
*encoder
;
3200 NV_ATOMIC(drm
, "%s: mstm prepare\n", mstm
->outp
->base
.base
.name
);
3201 ret
= drm_dp_update_payload_part1(&mstm
->mgr
);
3203 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3204 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3205 struct nv50_msto
*msto
= nv50_msto(encoder
);
3206 struct nv50_mstc
*mstc
= msto
->mstc
;
3207 if (mstc
&& mstc
->mstm
== mstm
)
3208 nv50_msto_prepare(msto
);
3212 if (mstm
->disabled
) {
3214 nv50_outp_release(mstm
->outp
);
3215 mstm
->disabled
= false;
3220 nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr
*mgr
)
3222 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3223 drm_kms_helper_hotplug_event(mstm
->outp
->base
.base
.dev
);
3227 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3228 struct drm_connector
*connector
)
3230 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3231 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3233 drm_connector_unregister(&mstc
->connector
);
3235 drm_modeset_lock_all(drm
->dev
);
3236 drm_fb_helper_remove_one_connector(&drm
->fbcon
->helper
, &mstc
->connector
);
3238 drm_modeset_unlock_all(drm
->dev
);
3240 drm_connector_unreference(&mstc
->connector
);
3244 nv50_mstm_register_connector(struct drm_connector
*connector
)
3246 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3248 drm_modeset_lock_all(drm
->dev
);
3249 drm_fb_helper_add_one_connector(&drm
->fbcon
->helper
, connector
);
3250 drm_modeset_unlock_all(drm
->dev
);
3252 drm_connector_register(connector
);
3255 static struct drm_connector
*
3256 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3257 struct drm_dp_mst_port
*port
, const char *path
)
3259 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3260 struct nv50_mstc
*mstc
;
3263 ret
= nv50_mstc_new(mstm
, port
, path
, &mstc
);
3266 mstc
->connector
.funcs
->destroy(&mstc
->connector
);
3270 return &mstc
->connector
;
3273 static const struct drm_dp_mst_topology_cbs
3275 .add_connector
= nv50_mstm_add_connector
,
3276 .register_connector
= nv50_mstm_register_connector
,
3277 .destroy_connector
= nv50_mstm_destroy_connector
,
3278 .hotplug
= nv50_mstm_hotplug
,
3282 nv50_mstm_service(struct nv50_mstm
*mstm
)
3284 struct drm_dp_aux
*aux
= mstm
->mgr
.aux
;
3285 bool handled
= true;
3290 ret
= drm_dp_dpcd_read(aux
, DP_SINK_COUNT_ESI
, esi
, 8);
3292 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3296 drm_dp_mst_hpd_irq(&mstm
->mgr
, esi
, &handled
);
3300 drm_dp_dpcd_write(aux
, DP_SINK_COUNT_ESI
+ 1, &esi
[1], 3);
3305 nv50_mstm_remove(struct nv50_mstm
*mstm
)
3308 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3312 nv50_mstm_enable(struct nv50_mstm
*mstm
, u8 dpcd
, int state
)
3314 struct nouveau_encoder
*outp
= mstm
->outp
;
3316 struct nv50_disp_mthd_v1 base
;
3317 struct nv50_disp_sor_dp_mst_link_v0 mst
;
3320 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_LINK
,
3321 .base
.hasht
= outp
->dcb
->hasht
,
3322 .base
.hashm
= outp
->dcb
->hashm
,
3325 struct nouveau_drm
*drm
= nouveau_drm(outp
->base
.base
.dev
);
3326 struct nvif_object
*disp
= &drm
->display
->disp
;
3330 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, &dpcd
);
3338 ret
= drm_dp_dpcd_writeb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, dpcd
);
3343 return nvif_mthd(disp
, 0, &args
, sizeof(args
));
3347 nv50_mstm_detect(struct nv50_mstm
*mstm
, u8 dpcd
[8], int allow
)
3354 if (dpcd
[0] >= 0x12) {
3355 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CAP
, &dpcd
[1]);
3359 if (!(dpcd
[1] & DP_MST_CAP
))
3365 ret
= nv50_mstm_enable(mstm
, dpcd
[0], state
);
3369 ret
= drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, state
);
3371 return nv50_mstm_enable(mstm
, dpcd
[0], 0);
3373 return mstm
->mgr
.mst_state
;
3377 nv50_mstm_fini(struct nv50_mstm
*mstm
)
3379 if (mstm
&& mstm
->mgr
.mst_state
)
3380 drm_dp_mst_topology_mgr_suspend(&mstm
->mgr
);
3384 nv50_mstm_init(struct nv50_mstm
*mstm
)
3386 if (mstm
&& mstm
->mgr
.mst_state
)
3387 drm_dp_mst_topology_mgr_resume(&mstm
->mgr
);
3391 nv50_mstm_del(struct nv50_mstm
**pmstm
)
3393 struct nv50_mstm
*mstm
= *pmstm
;
3401 nv50_mstm_new(struct nouveau_encoder
*outp
, struct drm_dp_aux
*aux
, int aux_max
,
3402 int conn_base_id
, struct nv50_mstm
**pmstm
)
3404 const int max_payloads
= hweight8(outp
->dcb
->heads
);
3405 struct drm_device
*dev
= outp
->base
.base
.dev
;
3406 struct nv50_mstm
*mstm
;
3410 /* This is a workaround for some monitors not functioning
3411 * correctly in MST mode on initial module load. I think
3412 * some bad interaction with the VBIOS may be responsible.
3414 * A good ol' off and on again seems to work here ;)
3416 ret
= drm_dp_dpcd_readb(aux
, DP_DPCD_REV
, &dpcd
);
3417 if (ret
>= 0 && dpcd
>= 0x12)
3418 drm_dp_dpcd_writeb(aux
, DP_MSTM_CTRL
, 0);
3420 if (!(mstm
= *pmstm
= kzalloc(sizeof(*mstm
), GFP_KERNEL
)))
3423 mstm
->mgr
.cbs
= &nv50_mstm
;
3425 ret
= drm_dp_mst_topology_mgr_init(&mstm
->mgr
, dev
, aux
, aux_max
,
3426 max_payloads
, conn_base_id
);
3430 for (i
= 0; i
< max_payloads
; i
++) {
3431 ret
= nv50_msto_new(dev
, outp
->dcb
->heads
, outp
->base
.base
.name
,
3440 /******************************************************************************
3442 *****************************************************************************/
3444 nv50_sor_update(struct nouveau_encoder
*nv_encoder
, u8 head
,
3445 struct drm_display_mode
*mode
, u8 proto
, u8 depth
)
3447 struct nv50_dmac
*core
= &nv50_mast(nv_encoder
->base
.base
.dev
)->base
;
3451 nv_encoder
->ctrl
&= ~BIT(head
);
3452 if (!(nv_encoder
->ctrl
& 0x0000000f))
3453 nv_encoder
->ctrl
= 0;
3455 nv_encoder
->ctrl
|= proto
<< 8;
3456 nv_encoder
->ctrl
|= BIT(head
);
3459 if ((push
= evo_wait(core
, 6))) {
3460 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
3462 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3463 nv_encoder
->ctrl
|= 0x00001000;
3464 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3465 nv_encoder
->ctrl
|= 0x00002000;
3466 nv_encoder
->ctrl
|= depth
<< 16;
3468 evo_mthd(push
, 0x0600 + (nv_encoder
->or * 0x40), 1);
3471 u32 magic
= 0x31ec6000 | (head
<< 25);
3472 u32 syncs
= 0x00000001;
3473 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3474 syncs
|= 0x00000008;
3475 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3476 syncs
|= 0x00000010;
3477 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
3478 magic
|= 0x00000001;
3480 evo_mthd(push
, 0x0404 + (head
* 0x300), 2);
3481 evo_data(push
, syncs
| (depth
<< 6));
3482 evo_data(push
, magic
);
3484 evo_mthd(push
, 0x0200 + (nv_encoder
->or * 0x20), 1);
3486 evo_data(push
, nv_encoder
->ctrl
);
3487 evo_kick(push
, core
);
3492 nv50_sor_disable(struct drm_encoder
*encoder
)
3494 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3495 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(nv_encoder
->crtc
);
3497 nv_encoder
->crtc
= NULL
;
3500 struct nvkm_i2c_aux
*aux
= nv_encoder
->aux
;
3504 int ret
= nvkm_rdaux(aux
, DP_SET_POWER
, &pwr
, 1);
3506 pwr
&= ~DP_SET_POWER_MASK
;
3507 pwr
|= DP_SET_POWER_D3
;
3508 nvkm_wraux(aux
, DP_SET_POWER
, &pwr
, 1);
3512 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, NULL
, 0, 0);
3513 nv50_audio_disable(encoder
, nv_crtc
);
3514 nv50_hdmi_disable(&nv_encoder
->base
.base
, nv_crtc
);
3515 nv50_outp_release(nv_encoder
);
3520 nv50_sor_enable(struct drm_encoder
*encoder
)
3522 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3523 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3524 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3526 struct nv50_disp_mthd_v1 base
;
3527 struct nv50_disp_sor_lvds_script_v0 lvds
;
3530 .base
.method
= NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT
,
3531 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3532 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3534 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3535 struct drm_device
*dev
= encoder
->dev
;
3536 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3537 struct nouveau_connector
*nv_connector
;
3538 struct nvbios
*bios
= &drm
->vbios
;
3542 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3543 nv_encoder
->crtc
= encoder
->crtc
;
3544 nv50_outp_acquire(nv_encoder
);
3546 switch (nv_encoder
->dcb
->type
) {
3547 case DCB_OUTPUT_TMDS
:
3548 if (nv_encoder
->link
& 1) {
3550 /* Only enable dual-link if:
3551 * - Need to (i.e. rate > 165MHz)
3553 * - Not an HDMI monitor, since there's no dual-link
3556 if (mode
->clock
>= 165000 &&
3557 nv_encoder
->dcb
->duallink_possible
&&
3558 !drm_detect_hdmi_monitor(nv_connector
->edid
))
3564 nv50_hdmi_enable(&nv_encoder
->base
.base
, mode
);
3566 case DCB_OUTPUT_LVDS
:
3569 if (bios
->fp_no_ddc
) {
3570 if (bios
->fp
.dual_link
)
3571 lvds
.lvds
.script
|= 0x0100;
3572 if (bios
->fp
.if_is_24bit
)
3573 lvds
.lvds
.script
|= 0x0200;
3575 if (nv_connector
->type
== DCB_CONNECTOR_LVDS_SPWG
) {
3576 if (((u8
*)nv_connector
->edid
)[121] == 2)
3577 lvds
.lvds
.script
|= 0x0100;
3579 if (mode
->clock
>= bios
->fp
.duallink_transition_clk
) {
3580 lvds
.lvds
.script
|= 0x0100;
3583 if (lvds
.lvds
.script
& 0x0100) {
3584 if (bios
->fp
.strapless_is_24bit
& 2)
3585 lvds
.lvds
.script
|= 0x0200;
3587 if (bios
->fp
.strapless_is_24bit
& 1)
3588 lvds
.lvds
.script
|= 0x0200;
3591 if (nv_connector
->base
.display_info
.bpc
== 8)
3592 lvds
.lvds
.script
|= 0x0200;
3595 nvif_mthd(disp
->disp
, 0, &lvds
, sizeof(lvds
));
3598 if (nv_connector
->base
.display_info
.bpc
== 6)
3601 if (nv_connector
->base
.display_info
.bpc
== 8)
3606 if (nv_encoder
->link
& 1)
3611 nv50_audio_enable(encoder
, mode
);
3618 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, mode
, proto
, depth
);
3621 static const struct drm_encoder_helper_funcs
3623 .atomic_check
= nv50_outp_atomic_check
,
3624 .enable
= nv50_sor_enable
,
3625 .disable
= nv50_sor_disable
,
3629 nv50_sor_destroy(struct drm_encoder
*encoder
)
3631 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3632 nv50_mstm_del(&nv_encoder
->dp
.mstm
);
3633 drm_encoder_cleanup(encoder
);
3637 static const struct drm_encoder_funcs
3639 .destroy
= nv50_sor_destroy
,
3643 nv50_sor_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3645 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
3646 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3647 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3648 struct nouveau_encoder
*nv_encoder
;
3649 struct drm_encoder
*encoder
;
3652 switch (dcbe
->type
) {
3653 case DCB_OUTPUT_LVDS
: type
= DRM_MODE_ENCODER_LVDS
; break;
3654 case DCB_OUTPUT_TMDS
:
3657 type
= DRM_MODE_ENCODER_TMDS
;
3661 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3664 nv_encoder
->dcb
= dcbe
;
3665 nv_encoder
->update
= nv50_sor_update
;
3667 encoder
= to_drm_encoder(nv_encoder
);
3668 encoder
->possible_crtcs
= dcbe
->heads
;
3669 encoder
->possible_clones
= 0;
3670 drm_encoder_init(connector
->dev
, encoder
, &nv50_sor_func
, type
,
3671 "sor-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3672 drm_encoder_helper_add(encoder
, &nv50_sor_help
);
3674 drm_mode_connector_attach_encoder(connector
, encoder
);
3676 if (dcbe
->type
== DCB_OUTPUT_DP
) {
3677 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3678 struct nvkm_i2c_aux
*aux
=
3679 nvkm_i2c_aux_find(i2c
, dcbe
->i2c_index
);
3681 if (disp
->disp
->oclass
< GF110_DISP
) {
3682 /* HW has no support for address-only
3683 * transactions, so we're required to
3684 * use custom I2C-over-AUX code.
3686 nv_encoder
->i2c
= &aux
->i2c
;
3688 nv_encoder
->i2c
= &nv_connector
->aux
.ddc
;
3690 nv_encoder
->aux
= aux
;
3693 /*TODO: Use DP Info Table to check for support. */
3694 if (disp
->disp
->oclass
>= GF110_DISP
) {
3695 ret
= nv50_mstm_new(nv_encoder
, &nv_connector
->aux
, 16,
3696 nv_connector
->base
.base
.id
,
3697 &nv_encoder
->dp
.mstm
);
3702 struct nvkm_i2c_bus
*bus
=
3703 nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
3705 nv_encoder
->i2c
= &bus
->i2c
;
3711 /******************************************************************************
3713 *****************************************************************************/
3715 nv50_pior_atomic_check(struct drm_encoder
*encoder
,
3716 struct drm_crtc_state
*crtc_state
,
3717 struct drm_connector_state
*conn_state
)
3719 int ret
= nv50_outp_atomic_check(encoder
, crtc_state
, conn_state
);
3722 crtc_state
->adjusted_mode
.clock
*= 2;
3727 nv50_pior_disable(struct drm_encoder
*encoder
)
3729 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3730 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3731 const int or = nv_encoder
->or;
3734 if (nv_encoder
->crtc
) {
3735 push
= evo_wait(mast
, 4);
3737 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3738 evo_mthd(push
, 0x0700 + (or * 0x040), 1);
3739 evo_data(push
, 0x00000000);
3741 evo_kick(push
, mast
);
3745 nv_encoder
->crtc
= NULL
;
3746 nv50_outp_release(nv_encoder
);
3750 nv50_pior_enable(struct drm_encoder
*encoder
)
3752 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3753 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3754 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3755 struct nouveau_connector
*nv_connector
;
3756 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3757 u8 owner
= 1 << nv_crtc
->index
;
3761 nv50_outp_acquire(nv_encoder
);
3763 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3764 switch (nv_connector
->base
.display_info
.bpc
) {
3765 case 10: depth
= 0x6; break;
3766 case 8: depth
= 0x5; break;
3767 case 6: depth
= 0x2; break;
3768 default: depth
= 0x0; break;
3771 switch (nv_encoder
->dcb
->type
) {
3772 case DCB_OUTPUT_TMDS
:
3781 push
= evo_wait(mast
, 8);
3783 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3784 u32 ctrl
= (depth
<< 16) | (proto
<< 8) | owner
;
3785 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3787 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3789 evo_mthd(push
, 0x0700 + (nv_encoder
->or * 0x040), 1);
3790 evo_data(push
, ctrl
);
3793 evo_kick(push
, mast
);
3796 nv_encoder
->crtc
= encoder
->crtc
;
3799 static const struct drm_encoder_helper_funcs
3801 .atomic_check
= nv50_pior_atomic_check
,
3802 .enable
= nv50_pior_enable
,
3803 .disable
= nv50_pior_disable
,
3807 nv50_pior_destroy(struct drm_encoder
*encoder
)
3809 drm_encoder_cleanup(encoder
);
3813 static const struct drm_encoder_funcs
3815 .destroy
= nv50_pior_destroy
,
3819 nv50_pior_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3821 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
3822 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3823 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3824 struct nvkm_i2c_bus
*bus
= NULL
;
3825 struct nvkm_i2c_aux
*aux
= NULL
;
3826 struct i2c_adapter
*ddc
;
3827 struct nouveau_encoder
*nv_encoder
;
3828 struct drm_encoder
*encoder
;
3831 switch (dcbe
->type
) {
3832 case DCB_OUTPUT_TMDS
:
3833 bus
= nvkm_i2c_bus_find(i2c
, NVKM_I2C_BUS_EXT(dcbe
->extdev
));
3834 ddc
= bus
? &bus
->i2c
: NULL
;
3835 type
= DRM_MODE_ENCODER_TMDS
;
3838 aux
= nvkm_i2c_aux_find(i2c
, NVKM_I2C_AUX_EXT(dcbe
->extdev
));
3839 ddc
= aux
? &nv_connector
->aux
.ddc
: NULL
;
3840 type
= DRM_MODE_ENCODER_TMDS
;
3846 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3849 nv_encoder
->dcb
= dcbe
;
3850 nv_encoder
->i2c
= ddc
;
3851 nv_encoder
->aux
= aux
;
3853 encoder
= to_drm_encoder(nv_encoder
);
3854 encoder
->possible_crtcs
= dcbe
->heads
;
3855 encoder
->possible_clones
= 0;
3856 drm_encoder_init(connector
->dev
, encoder
, &nv50_pior_func
, type
,
3857 "pior-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3858 drm_encoder_helper_add(encoder
, &nv50_pior_help
);
3860 drm_mode_connector_attach_encoder(connector
, encoder
);
3864 /******************************************************************************
3866 *****************************************************************************/
3869 nv50_disp_atomic_commit_core(struct nouveau_drm
*drm
, u32 interlock
)
3871 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
3872 struct nv50_dmac
*core
= &disp
->mast
.base
;
3873 struct nv50_mstm
*mstm
;
3874 struct drm_encoder
*encoder
;
3877 NV_ATOMIC(drm
, "commit core %08x\n", interlock
);
3879 drm_for_each_encoder(encoder
, drm
->dev
) {
3880 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3881 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3882 if (mstm
&& mstm
->modified
)
3883 nv50_mstm_prepare(mstm
);
3887 if ((push
= evo_wait(core
, 5))) {
3888 evo_mthd(push
, 0x0084, 1);
3889 evo_data(push
, 0x80000000);
3890 evo_mthd(push
, 0x0080, 2);
3891 evo_data(push
, interlock
);
3892 evo_data(push
, 0x00000000);
3893 nouveau_bo_wr32(disp
->sync
, 0, 0x00000000);
3894 evo_kick(push
, core
);
3895 if (nvif_msec(&drm
->client
.device
, 2000ULL,
3896 if (nouveau_bo_rd32(disp
->sync
, 0))
3900 NV_ERROR(drm
, "EVO timeout\n");
3903 drm_for_each_encoder(encoder
, drm
->dev
) {
3904 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3905 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3906 if (mstm
&& mstm
->modified
)
3907 nv50_mstm_cleanup(mstm
);
3913 nv50_disp_atomic_commit_tail(struct drm_atomic_state
*state
)
3915 struct drm_device
*dev
= state
->dev
;
3916 struct drm_crtc_state
*crtc_state
;
3917 struct drm_crtc
*crtc
;
3918 struct drm_plane_state
*plane_state
;
3919 struct drm_plane
*plane
;
3920 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3921 struct nv50_disp
*disp
= nv50_disp(dev
);
3922 struct nv50_atom
*atom
= nv50_atom(state
);
3923 struct nv50_outp_atom
*outp
, *outt
;
3924 u32 interlock_core
= 0;
3925 u32 interlock_chan
= 0;
3928 NV_ATOMIC(drm
, "commit %d %d\n", atom
->lock_core
, atom
->flush_disable
);
3929 drm_atomic_helper_wait_for_fences(dev
, state
, false);
3930 drm_atomic_helper_wait_for_dependencies(state
);
3931 drm_atomic_helper_update_legacy_modeset_state(dev
, state
);
3933 if (atom
->lock_core
)
3934 mutex_lock(&disp
->mutex
);
3936 /* Disable head(s). */
3937 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
3938 struct nv50_head_atom
*asyh
= nv50_head_atom(crtc
->state
);
3939 struct nv50_head
*head
= nv50_head(crtc
);
3941 NV_ATOMIC(drm
, "%s: clr %04x (set %04x)\n", crtc
->name
,
3942 asyh
->clr
.mask
, asyh
->set
.mask
);
3943 if (crtc_state
->active
&& !asyh
->state
.active
)
3944 drm_crtc_vblank_off(crtc
);
3946 if (asyh
->clr
.mask
) {
3947 nv50_head_flush_clr(head
, asyh
, atom
->flush_disable
);
3948 interlock_core
|= 1;
3952 /* Disable plane(s). */
3953 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
3954 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
3955 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
3957 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", plane
->name
,
3958 asyw
->clr
.mask
, asyw
->set
.mask
);
3959 if (!asyw
->clr
.mask
)
3962 interlock_chan
|= nv50_wndw_flush_clr(wndw
, interlock_core
,
3963 atom
->flush_disable
,
3967 /* Disable output path(s). */
3968 list_for_each_entry(outp
, &atom
->outp
, head
) {
3969 const struct drm_encoder_helper_funcs
*help
;
3970 struct drm_encoder
*encoder
;
3972 encoder
= outp
->encoder
;
3973 help
= encoder
->helper_private
;
3975 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", encoder
->name
,
3976 outp
->clr
.mask
, outp
->set
.mask
);
3978 if (outp
->clr
.mask
) {
3979 help
->disable(encoder
);
3980 interlock_core
|= 1;
3981 if (outp
->flush_disable
) {
3982 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
3989 /* Flush disable. */
3990 if (interlock_core
) {
3991 if (atom
->flush_disable
) {
3992 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
3998 /* Update output path(s). */
3999 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
4000 const struct drm_encoder_helper_funcs
*help
;
4001 struct drm_encoder
*encoder
;
4003 encoder
= outp
->encoder
;
4004 help
= encoder
->helper_private
;
4006 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", encoder
->name
,
4007 outp
->set
.mask
, outp
->clr
.mask
);
4009 if (outp
->set
.mask
) {
4010 help
->enable(encoder
);
4014 list_del(&outp
->head
);
4018 /* Update head(s). */
4019 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
4020 struct nv50_head_atom
*asyh
= nv50_head_atom(crtc
->state
);
4021 struct nv50_head
*head
= nv50_head(crtc
);
4023 NV_ATOMIC(drm
, "%s: set %04x (clr %04x)\n", crtc
->name
,
4024 asyh
->set
.mask
, asyh
->clr
.mask
);
4026 if (asyh
->set
.mask
) {
4027 nv50_head_flush_set(head
, asyh
);
4031 if (asyh
->state
.active
) {
4032 if (!crtc_state
->active
)
4033 drm_crtc_vblank_on(crtc
);
4034 if (asyh
->state
.event
)
4035 drm_crtc_vblank_get(crtc
);
4039 /* Update plane(s). */
4040 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4041 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
4042 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4044 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", plane
->name
,
4045 asyw
->set
.mask
, asyw
->clr
.mask
);
4046 if ( !asyw
->set
.mask
&&
4047 (!asyw
->clr
.mask
|| atom
->flush_disable
))
4050 interlock_chan
|= nv50_wndw_flush_set(wndw
, interlock_core
, asyw
);
4054 if (interlock_core
) {
4055 if (!interlock_chan
&& atom
->state
.legacy_cursor_update
) {
4056 u32
*push
= evo_wait(&disp
->mast
, 2);
4058 evo_mthd(push
, 0x0080, 1);
4059 evo_data(push
, 0x00000000);
4060 evo_kick(push
, &disp
->mast
);
4063 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
4067 if (atom
->lock_core
)
4068 mutex_unlock(&disp
->mutex
);
4070 /* Wait for HW to signal completion. */
4071 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4072 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
4073 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4074 int ret
= nv50_wndw_wait_armed(wndw
, asyw
);
4076 NV_ERROR(drm
, "%s: timeout\n", plane
->name
);
4079 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
4080 if (crtc
->state
->event
) {
4081 unsigned long flags
;
4082 /* Get correct count/ts if racing with vblank irq */
4083 if (crtc
->state
->active
)
4084 drm_accurate_vblank_count(crtc
);
4085 spin_lock_irqsave(&crtc
->dev
->event_lock
, flags
);
4086 drm_crtc_send_vblank_event(crtc
, crtc
->state
->event
);
4087 spin_unlock_irqrestore(&crtc
->dev
->event_lock
, flags
);
4088 crtc
->state
->event
= NULL
;
4089 if (crtc
->state
->active
)
4090 drm_crtc_vblank_put(crtc
);
4094 drm_atomic_helper_commit_hw_done(state
);
4095 drm_atomic_helper_cleanup_planes(dev
, state
);
4096 drm_atomic_helper_commit_cleanup_done(state
);
4097 drm_atomic_state_put(state
);
4101 nv50_disp_atomic_commit_work(struct work_struct
*work
)
4103 struct drm_atomic_state
*state
=
4104 container_of(work
, typeof(*state
), commit_work
);
4105 nv50_disp_atomic_commit_tail(state
);
4109 nv50_disp_atomic_commit(struct drm_device
*dev
,
4110 struct drm_atomic_state
*state
, bool nonblock
)
4112 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4113 struct nv50_disp
*disp
= nv50_disp(dev
);
4114 struct drm_plane_state
*plane_state
;
4115 struct drm_plane
*plane
;
4116 struct drm_crtc
*crtc
;
4117 bool active
= false;
4120 ret
= pm_runtime_get_sync(dev
->dev
);
4121 if (ret
< 0 && ret
!= -EACCES
)
4124 ret
= drm_atomic_helper_setup_commit(state
, nonblock
);
4128 INIT_WORK(&state
->commit_work
, nv50_disp_atomic_commit_work
);
4130 ret
= drm_atomic_helper_prepare_planes(dev
, state
);
4135 ret
= drm_atomic_helper_wait_for_fences(dev
, state
, true);
4140 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4141 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane_state
);
4142 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4143 if (asyw
->set
.image
) {
4144 asyw
->ntfy
.handle
= wndw
->dmac
->sync
.handle
;
4145 asyw
->ntfy
.offset
= wndw
->ntfy
;
4146 asyw
->ntfy
.awaken
= false;
4147 asyw
->set
.ntfy
= true;
4148 nouveau_bo_wr32(disp
->sync
, wndw
->ntfy
/ 4, 0x00000000);
4153 drm_atomic_helper_swap_state(state
, true);
4154 drm_atomic_state_get(state
);
4157 queue_work(system_unbound_wq
, &state
->commit_work
);
4159 nv50_disp_atomic_commit_tail(state
);
4161 drm_for_each_crtc(crtc
, dev
) {
4162 if (crtc
->state
->enable
) {
4163 if (!drm
->have_disp_power_ref
) {
4164 drm
->have_disp_power_ref
= true;
4172 if (!active
&& drm
->have_disp_power_ref
) {
4173 pm_runtime_put_autosuspend(dev
->dev
);
4174 drm
->have_disp_power_ref
= false;
4178 pm_runtime_put_autosuspend(dev
->dev
);
4182 static struct nv50_outp_atom
*
4183 nv50_disp_outp_atomic_add(struct nv50_atom
*atom
, struct drm_encoder
*encoder
)
4185 struct nv50_outp_atom
*outp
;
4187 list_for_each_entry(outp
, &atom
->outp
, head
) {
4188 if (outp
->encoder
== encoder
)
4192 outp
= kzalloc(sizeof(*outp
), GFP_KERNEL
);
4194 return ERR_PTR(-ENOMEM
);
4196 list_add(&outp
->head
, &atom
->outp
);
4197 outp
->encoder
= encoder
;
4202 nv50_disp_outp_atomic_check_clr(struct nv50_atom
*atom
,
4203 struct drm_connector
*connector
)
4205 struct drm_encoder
*encoder
= connector
->state
->best_encoder
;
4206 struct drm_crtc_state
*crtc_state
;
4207 struct drm_crtc
*crtc
;
4208 struct nv50_outp_atom
*outp
;
4210 if (!(crtc
= connector
->state
->crtc
))
4213 crtc_state
= drm_atomic_get_existing_crtc_state(&atom
->state
, crtc
);
4214 if (crtc
->state
->active
&& drm_atomic_crtc_needs_modeset(crtc_state
)) {
4215 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4217 return PTR_ERR(outp
);
4219 if (outp
->encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
4220 outp
->flush_disable
= true;
4221 atom
->flush_disable
= true;
4223 outp
->clr
.ctrl
= true;
4224 atom
->lock_core
= true;
4231 nv50_disp_outp_atomic_check_set(struct nv50_atom
*atom
,
4232 struct drm_connector_state
*connector_state
)
4234 struct drm_encoder
*encoder
= connector_state
->best_encoder
;
4235 struct drm_crtc_state
*crtc_state
;
4236 struct drm_crtc
*crtc
;
4237 struct nv50_outp_atom
*outp
;
4239 if (!(crtc
= connector_state
->crtc
))
4242 crtc_state
= drm_atomic_get_existing_crtc_state(&atom
->state
, crtc
);
4243 if (crtc_state
->active
&& drm_atomic_crtc_needs_modeset(crtc_state
)) {
4244 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4246 return PTR_ERR(outp
);
4248 outp
->set
.ctrl
= true;
4249 atom
->lock_core
= true;
4256 nv50_disp_atomic_check(struct drm_device
*dev
, struct drm_atomic_state
*state
)
4258 struct nv50_atom
*atom
= nv50_atom(state
);
4259 struct drm_connector_state
*connector_state
;
4260 struct drm_connector
*connector
;
4263 ret
= drm_atomic_helper_check(dev
, state
);
4267 for_each_connector_in_state(state
, connector
, connector_state
, i
) {
4268 ret
= nv50_disp_outp_atomic_check_clr(atom
, connector
);
4272 ret
= nv50_disp_outp_atomic_check_set(atom
, connector_state
);
4281 nv50_disp_atomic_state_clear(struct drm_atomic_state
*state
)
4283 struct nv50_atom
*atom
= nv50_atom(state
);
4284 struct nv50_outp_atom
*outp
, *outt
;
4286 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
4287 list_del(&outp
->head
);
4291 drm_atomic_state_default_clear(state
);
4295 nv50_disp_atomic_state_free(struct drm_atomic_state
*state
)
4297 struct nv50_atom
*atom
= nv50_atom(state
);
4298 drm_atomic_state_default_release(&atom
->state
);
4302 static struct drm_atomic_state
*
4303 nv50_disp_atomic_state_alloc(struct drm_device
*dev
)
4305 struct nv50_atom
*atom
;
4306 if (!(atom
= kzalloc(sizeof(*atom
), GFP_KERNEL
)) ||
4307 drm_atomic_state_init(dev
, &atom
->state
) < 0) {
4311 INIT_LIST_HEAD(&atom
->outp
);
4312 return &atom
->state
;
4315 static const struct drm_mode_config_funcs
4317 .fb_create
= nouveau_user_framebuffer_create
,
4318 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
4319 .atomic_check
= nv50_disp_atomic_check
,
4320 .atomic_commit
= nv50_disp_atomic_commit
,
4321 .atomic_state_alloc
= nv50_disp_atomic_state_alloc
,
4322 .atomic_state_clear
= nv50_disp_atomic_state_clear
,
4323 .atomic_state_free
= nv50_disp_atomic_state_free
,
4326 /******************************************************************************
4328 *****************************************************************************/
4331 nv50_display_fini(struct drm_device
*dev
)
4333 struct nouveau_encoder
*nv_encoder
;
4334 struct drm_encoder
*encoder
;
4335 struct drm_plane
*plane
;
4337 drm_for_each_plane(plane
, dev
) {
4338 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4339 if (plane
->funcs
!= &nv50_wndw
)
4341 nv50_wndw_fini(wndw
);
4344 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4345 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4346 nv_encoder
= nouveau_encoder(encoder
);
4347 nv50_mstm_fini(nv_encoder
->dp
.mstm
);
4353 nv50_display_init(struct drm_device
*dev
)
4355 struct drm_encoder
*encoder
;
4356 struct drm_plane
*plane
;
4357 struct drm_crtc
*crtc
;
4360 push
= evo_wait(nv50_mast(dev
), 32);
4364 evo_mthd(push
, 0x0088, 1);
4365 evo_data(push
, nv50_mast(dev
)->base
.sync
.handle
);
4366 evo_kick(push
, nv50_mast(dev
));
4368 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4369 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4370 struct nouveau_encoder
*nv_encoder
=
4371 nouveau_encoder(encoder
);
4372 nv50_mstm_init(nv_encoder
->dp
.mstm
);
4376 drm_for_each_crtc(crtc
, dev
) {
4377 nv50_head_lut_load(crtc
);
4380 drm_for_each_plane(plane
, dev
) {
4381 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4382 if (plane
->funcs
!= &nv50_wndw
)
4384 nv50_wndw_init(wndw
);
4391 nv50_display_destroy(struct drm_device
*dev
)
4393 struct nv50_disp
*disp
= nv50_disp(dev
);
4395 nv50_dmac_destroy(&disp
->mast
.base
, disp
->disp
);
4397 nouveau_bo_unmap(disp
->sync
);
4399 nouveau_bo_unpin(disp
->sync
);
4400 nouveau_bo_ref(NULL
, &disp
->sync
);
4402 nouveau_display(dev
)->priv
= NULL
;
4406 MODULE_PARM_DESC(atomic
, "Expose atomic ioctl (default: disabled)");
4407 static int nouveau_atomic
= 0;
4408 module_param_named(atomic
, nouveau_atomic
, int, 0400);
4411 nv50_display_create(struct drm_device
*dev
)
4413 struct nvif_device
*device
= &nouveau_drm(dev
)->client
.device
;
4414 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4415 struct dcb_table
*dcb
= &drm
->vbios
.dcb
;
4416 struct drm_connector
*connector
, *tmp
;
4417 struct nv50_disp
*disp
;
4418 struct dcb_output
*dcbe
;
4421 disp
= kzalloc(sizeof(*disp
), GFP_KERNEL
);
4425 mutex_init(&disp
->mutex
);
4427 nouveau_display(dev
)->priv
= disp
;
4428 nouveau_display(dev
)->dtor
= nv50_display_destroy
;
4429 nouveau_display(dev
)->init
= nv50_display_init
;
4430 nouveau_display(dev
)->fini
= nv50_display_fini
;
4431 disp
->disp
= &nouveau_display(dev
)->disp
;
4432 dev
->mode_config
.funcs
= &nv50_disp_func
;
4434 dev
->driver
->driver_features
|= DRIVER_ATOMIC
;
4436 /* small shared memory area we use for notifiers and semaphores */
4437 ret
= nouveau_bo_new(&drm
->client
, 4096, 0x1000, TTM_PL_FLAG_VRAM
,
4438 0, 0x0000, NULL
, NULL
, &disp
->sync
);
4440 ret
= nouveau_bo_pin(disp
->sync
, TTM_PL_FLAG_VRAM
, true);
4442 ret
= nouveau_bo_map(disp
->sync
);
4444 nouveau_bo_unpin(disp
->sync
);
4447 nouveau_bo_ref(NULL
, &disp
->sync
);
4453 /* allocate master evo channel */
4454 ret
= nv50_core_create(device
, disp
->disp
, disp
->sync
->bo
.offset
,
4459 /* create crtc objects to represent the hw heads */
4460 if (disp
->disp
->oclass
>= GF110_DISP
)
4461 crtcs
= nvif_rd32(&device
->object
, 0x022448);
4465 for (i
= 0; i
< crtcs
; i
++) {
4466 ret
= nv50_head_create(dev
, i
);
4471 /* create encoder/connector objects based on VBIOS DCB table */
4472 for (i
= 0, dcbe
= &dcb
->entry
[0]; i
< dcb
->entries
; i
++, dcbe
++) {
4473 connector
= nouveau_connector_create(dev
, dcbe
->connector
);
4474 if (IS_ERR(connector
))
4477 if (dcbe
->location
== DCB_LOC_ON_CHIP
) {
4478 switch (dcbe
->type
) {
4479 case DCB_OUTPUT_TMDS
:
4480 case DCB_OUTPUT_LVDS
:
4482 ret
= nv50_sor_create(connector
, dcbe
);
4484 case DCB_OUTPUT_ANALOG
:
4485 ret
= nv50_dac_create(connector
, dcbe
);
4492 ret
= nv50_pior_create(connector
, dcbe
);
4496 NV_WARN(drm
, "failed to create encoder %d/%d/%d: %d\n",
4497 dcbe
->location
, dcbe
->type
,
4498 ffs(dcbe
->or) - 1, ret
);
4503 /* cull any connectors we created that don't have an encoder */
4504 list_for_each_entry_safe(connector
, tmp
, &dev
->mode_config
.connector_list
, head
) {
4505 if (connector
->encoder_ids
[0])
4508 NV_WARN(drm
, "%s has no encoders, removing\n",
4510 connector
->funcs
->destroy(connector
);
4515 nv50_display_destroy(dev
);