2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
26 #include <linux/hdmi.h>
29 #include <drm/drm_atomic.h>
30 #include <drm/drm_atomic_helper.h>
31 #include <drm/drm_crtc_helper.h>
32 #include <drm/drm_dp_helper.h>
33 #include <drm/drm_fb_helper.h>
34 #include <drm/drm_plane_helper.h>
35 #include <drm/drm_edid.h>
37 #include <nvif/class.h>
38 #include <nvif/cl0002.h>
39 #include <nvif/cl5070.h>
40 #include <nvif/cl507a.h>
41 #include <nvif/cl507b.h>
42 #include <nvif/cl507c.h>
43 #include <nvif/cl507d.h>
44 #include <nvif/cl507e.h>
45 #include <nvif/event.h>
47 #include "nouveau_drv.h"
48 #include "nouveau_dma.h"
49 #include "nouveau_gem.h"
50 #include "nouveau_connector.h"
51 #include "nouveau_encoder.h"
52 #include "nouveau_crtc.h"
53 #include "nouveau_fence.h"
54 #include "nouveau_fbcon.h"
55 #include "nv50_display.h"
59 #define EVO_MASTER (0x00)
60 #define EVO_FLIP(c) (0x01 + (c))
61 #define EVO_OVLY(c) (0x05 + (c))
62 #define EVO_OIMM(c) (0x09 + (c))
63 #define EVO_CURS(c) (0x0d + (c))
65 /* offsets in shared sync bo of various structures */
66 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
67 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
68 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
69 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
70 #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
71 #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
73 /******************************************************************************
75 *****************************************************************************/
76 #define nv50_atom(p) container_of((p), struct nv50_atom, state)
79 struct drm_atomic_state state
;
81 struct list_head outp
;
86 struct nv50_outp_atom
{
87 struct list_head head
;
89 struct drm_encoder
*encoder
;
107 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
109 struct nv50_head_atom
{
110 struct drm_crtc_state state
;
119 struct nv50_head_mode
{
216 static inline struct nv50_head_atom
*
217 nv50_head_atom_get(struct drm_atomic_state
*state
, struct drm_crtc
*crtc
)
219 struct drm_crtc_state
*statec
= drm_atomic_get_crtc_state(state
, crtc
);
221 return (void *)statec
;
222 return nv50_head_atom(statec
);
225 #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
227 struct nv50_wndw_atom
{
228 struct drm_plane_state state
;
231 struct drm_rect clip
;
292 /******************************************************************************
294 *****************************************************************************/
297 struct nvif_object user
;
298 struct nvif_device
*device
;
302 nv50_chan_create(struct nvif_device
*device
, struct nvif_object
*disp
,
303 const s32
*oclass
, u8 head
, void *data
, u32 size
,
304 struct nv50_chan
*chan
)
306 struct nvif_sclass
*sclass
;
309 chan
->device
= device
;
311 ret
= n
= nvif_object_sclass_get(disp
, &sclass
);
316 for (i
= 0; i
< n
; i
++) {
317 if (sclass
[i
].oclass
== oclass
[0]) {
318 ret
= nvif_object_init(disp
, 0, oclass
[0],
319 data
, size
, &chan
->user
);
321 nvif_object_map(&chan
->user
);
322 nvif_object_sclass_put(&sclass
);
329 nvif_object_sclass_put(&sclass
);
334 nv50_chan_destroy(struct nv50_chan
*chan
)
336 nvif_object_fini(&chan
->user
);
339 /******************************************************************************
341 *****************************************************************************/
344 struct nv50_chan base
;
348 nv50_pioc_destroy(struct nv50_pioc
*pioc
)
350 nv50_chan_destroy(&pioc
->base
);
354 nv50_pioc_create(struct nvif_device
*device
, struct nvif_object
*disp
,
355 const s32
*oclass
, u8 head
, void *data
, u32 size
,
356 struct nv50_pioc
*pioc
)
358 return nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
362 /******************************************************************************
364 *****************************************************************************/
367 struct nv50_pioc base
;
371 nv50_oimm_create(struct nvif_device
*device
, struct nvif_object
*disp
,
372 int head
, struct nv50_oimm
*oimm
)
374 struct nv50_disp_cursor_v0 args
= {
377 static const s32 oclass
[] = {
386 return nv50_pioc_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
390 /******************************************************************************
392 *****************************************************************************/
394 struct nv50_dmac_ctxdma
{
395 struct list_head head
;
396 struct nvif_object object
;
400 struct nv50_chan base
;
404 struct nvif_object sync
;
405 struct nvif_object vram
;
406 struct list_head ctxdma
;
408 /* Protects against concurrent pushbuf access to this channel, lock is
409 * grabbed by evo_wait (if the pushbuf reservation is successful) and
410 * dropped again by evo_kick. */
415 nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma
*ctxdma
)
417 nvif_object_fini(&ctxdma
->object
);
418 list_del(&ctxdma
->head
);
422 static struct nv50_dmac_ctxdma
*
423 nv50_dmac_ctxdma_new(struct nv50_dmac
*dmac
, struct nouveau_framebuffer
*fb
)
425 struct nouveau_drm
*drm
= nouveau_drm(fb
->base
.dev
);
426 struct nv50_dmac_ctxdma
*ctxdma
;
427 const u8 kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
428 const u32 handle
= 0xfb000000 | kind
;
430 struct nv_dma_v0 base
;
432 struct nv50_dma_v0 nv50
;
433 struct gf100_dma_v0 gf100
;
434 struct gf119_dma_v0 gf119
;
437 u32 argc
= sizeof(args
.base
);
440 list_for_each_entry(ctxdma
, &dmac
->ctxdma
, head
) {
441 if (ctxdma
->object
.handle
== handle
)
445 if (!(ctxdma
= kzalloc(sizeof(*ctxdma
), GFP_KERNEL
)))
446 return ERR_PTR(-ENOMEM
);
447 list_add(&ctxdma
->head
, &dmac
->ctxdma
);
449 args
.base
.target
= NV_DMA_V0_TARGET_VRAM
;
450 args
.base
.access
= NV_DMA_V0_ACCESS_RDWR
;
452 args
.base
.limit
= drm
->client
.device
.info
.ram_user
- 1;
454 if (drm
->client
.device
.info
.chipset
< 0x80) {
455 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
456 argc
+= sizeof(args
.nv50
);
458 if (drm
->client
.device
.info
.chipset
< 0xc0) {
459 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
460 args
.nv50
.kind
= kind
;
461 argc
+= sizeof(args
.nv50
);
463 if (drm
->client
.device
.info
.chipset
< 0xd0) {
464 args
.gf100
.kind
= kind
;
465 argc
+= sizeof(args
.gf100
);
467 args
.gf119
.page
= GF119_DMA_V0_PAGE_LP
;
468 args
.gf119
.kind
= kind
;
469 argc
+= sizeof(args
.gf119
);
472 ret
= nvif_object_init(&dmac
->base
.user
, handle
, NV_DMA_IN_MEMORY
,
473 &args
, argc
, &ctxdma
->object
);
475 nv50_dmac_ctxdma_del(ctxdma
);
483 nv50_dmac_destroy(struct nv50_dmac
*dmac
, struct nvif_object
*disp
)
485 struct nvif_device
*device
= dmac
->base
.device
;
486 struct nv50_dmac_ctxdma
*ctxdma
, *ctxtmp
;
488 list_for_each_entry_safe(ctxdma
, ctxtmp
, &dmac
->ctxdma
, head
) {
489 nv50_dmac_ctxdma_del(ctxdma
);
492 nvif_object_fini(&dmac
->vram
);
493 nvif_object_fini(&dmac
->sync
);
495 nv50_chan_destroy(&dmac
->base
);
498 struct device
*dev
= nvxx_device(device
)->dev
;
499 dma_free_coherent(dev
, PAGE_SIZE
, dmac
->ptr
, dmac
->handle
);
504 nv50_dmac_create(struct nvif_device
*device
, struct nvif_object
*disp
,
505 const s32
*oclass
, u8 head
, void *data
, u32 size
, u64 syncbuf
,
506 struct nv50_dmac
*dmac
)
508 struct nv50_disp_core_channel_dma_v0
*args
= data
;
509 struct nvif_object pushbuf
;
512 mutex_init(&dmac
->lock
);
514 dmac
->ptr
= dma_alloc_coherent(nvxx_device(device
)->dev
, PAGE_SIZE
,
515 &dmac
->handle
, GFP_KERNEL
);
519 ret
= nvif_object_init(&device
->object
, 0, NV_DMA_FROM_MEMORY
,
520 &(struct nv_dma_v0
) {
521 .target
= NV_DMA_V0_TARGET_PCI_US
,
522 .access
= NV_DMA_V0_ACCESS_RD
,
523 .start
= dmac
->handle
+ 0x0000,
524 .limit
= dmac
->handle
+ 0x0fff,
525 }, sizeof(struct nv_dma_v0
), &pushbuf
);
529 args
->pushbuf
= nvif_handle(&pushbuf
);
531 ret
= nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
533 nvif_object_fini(&pushbuf
);
537 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000000, NV_DMA_IN_MEMORY
,
538 &(struct nv_dma_v0
) {
539 .target
= NV_DMA_V0_TARGET_VRAM
,
540 .access
= NV_DMA_V0_ACCESS_RDWR
,
541 .start
= syncbuf
+ 0x0000,
542 .limit
= syncbuf
+ 0x0fff,
543 }, sizeof(struct nv_dma_v0
),
548 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000001, NV_DMA_IN_MEMORY
,
549 &(struct nv_dma_v0
) {
550 .target
= NV_DMA_V0_TARGET_VRAM
,
551 .access
= NV_DMA_V0_ACCESS_RDWR
,
553 .limit
= device
->info
.ram_user
- 1,
554 }, sizeof(struct nv_dma_v0
),
559 INIT_LIST_HEAD(&dmac
->ctxdma
);
563 /******************************************************************************
565 *****************************************************************************/
568 struct nv50_dmac base
;
572 nv50_core_create(struct nvif_device
*device
, struct nvif_object
*disp
,
573 u64 syncbuf
, struct nv50_mast
*core
)
575 struct nv50_disp_core_channel_dma_v0 args
= {
576 .pushbuf
= 0xb0007d00,
578 static const s32 oclass
[] = {
579 GP102_DISP_CORE_CHANNEL_DMA
,
580 GP100_DISP_CORE_CHANNEL_DMA
,
581 GM200_DISP_CORE_CHANNEL_DMA
,
582 GM107_DISP_CORE_CHANNEL_DMA
,
583 GK110_DISP_CORE_CHANNEL_DMA
,
584 GK104_DISP_CORE_CHANNEL_DMA
,
585 GF110_DISP_CORE_CHANNEL_DMA
,
586 GT214_DISP_CORE_CHANNEL_DMA
,
587 GT206_DISP_CORE_CHANNEL_DMA
,
588 GT200_DISP_CORE_CHANNEL_DMA
,
589 G82_DISP_CORE_CHANNEL_DMA
,
590 NV50_DISP_CORE_CHANNEL_DMA
,
594 return nv50_dmac_create(device
, disp
, oclass
, 0, &args
, sizeof(args
),
595 syncbuf
, &core
->base
);
598 /******************************************************************************
600 *****************************************************************************/
603 struct nv50_dmac base
;
609 nv50_base_create(struct nvif_device
*device
, struct nvif_object
*disp
,
610 int head
, u64 syncbuf
, struct nv50_sync
*base
)
612 struct nv50_disp_base_channel_dma_v0 args
= {
613 .pushbuf
= 0xb0007c00 | head
,
616 static const s32 oclass
[] = {
617 GK110_DISP_BASE_CHANNEL_DMA
,
618 GK104_DISP_BASE_CHANNEL_DMA
,
619 GF110_DISP_BASE_CHANNEL_DMA
,
620 GT214_DISP_BASE_CHANNEL_DMA
,
621 GT200_DISP_BASE_CHANNEL_DMA
,
622 G82_DISP_BASE_CHANNEL_DMA
,
623 NV50_DISP_BASE_CHANNEL_DMA
,
627 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
628 syncbuf
, &base
->base
);
631 /******************************************************************************
633 *****************************************************************************/
636 struct nv50_dmac base
;
640 nv50_ovly_create(struct nvif_device
*device
, struct nvif_object
*disp
,
641 int head
, u64 syncbuf
, struct nv50_ovly
*ovly
)
643 struct nv50_disp_overlay_channel_dma_v0 args
= {
644 .pushbuf
= 0xb0007e00 | head
,
647 static const s32 oclass
[] = {
648 GK104_DISP_OVERLAY_CONTROL_DMA
,
649 GF110_DISP_OVERLAY_CONTROL_DMA
,
650 GT214_DISP_OVERLAY_CHANNEL_DMA
,
651 GT200_DISP_OVERLAY_CHANNEL_DMA
,
652 G82_DISP_OVERLAY_CHANNEL_DMA
,
653 NV50_DISP_OVERLAY_CHANNEL_DMA
,
657 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
658 syncbuf
, &ovly
->base
);
662 struct nouveau_crtc base
;
663 struct nv50_ovly ovly
;
664 struct nv50_oimm oimm
;
667 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
668 #define nv50_ovly(c) (&nv50_head(c)->ovly)
669 #define nv50_oimm(c) (&nv50_head(c)->oimm)
670 #define nv50_chan(c) (&(c)->base.base)
671 #define nv50_vers(c) nv50_chan(c)->user.oclass
674 struct nvif_object
*disp
;
675 struct nv50_mast mast
;
677 struct nouveau_bo
*sync
;
682 static struct nv50_disp
*
683 nv50_disp(struct drm_device
*dev
)
685 return nouveau_display(dev
)->priv
;
688 #define nv50_mast(d) (&nv50_disp(d)->mast)
690 /******************************************************************************
691 * EVO channel helpers
692 *****************************************************************************/
694 evo_wait(void *evoc
, int nr
)
696 struct nv50_dmac
*dmac
= evoc
;
697 struct nvif_device
*device
= dmac
->base
.device
;
698 u32 put
= nvif_rd32(&dmac
->base
.user
, 0x0000) / 4;
700 mutex_lock(&dmac
->lock
);
701 if (put
+ nr
>= (PAGE_SIZE
/ 4) - 8) {
702 dmac
->ptr
[put
] = 0x20000000;
704 nvif_wr32(&dmac
->base
.user
, 0x0000, 0x00000000);
705 if (nvif_msec(device
, 2000,
706 if (!nvif_rd32(&dmac
->base
.user
, 0x0004))
709 mutex_unlock(&dmac
->lock
);
710 pr_err("nouveau: evo channel stalled\n");
717 return dmac
->ptr
+ put
;
721 evo_kick(u32
*push
, void *evoc
)
723 struct nv50_dmac
*dmac
= evoc
;
724 nvif_wr32(&dmac
->base
.user
, 0x0000, (push
- dmac
->ptr
) << 2);
725 mutex_unlock(&dmac
->lock
);
728 #define evo_mthd(p, m, s) do { \
729 const u32 _m = (m), _s = (s); \
730 if (drm_debug & DRM_UT_KMS) \
731 pr_err("%04x %d %s\n", _m, _s, __func__); \
732 *((p)++) = ((_s << 18) | _m); \
735 #define evo_data(p, d) do { \
736 const u32 _d = (d); \
737 if (drm_debug & DRM_UT_KMS) \
738 pr_err("\t%08x\n", _d); \
742 /******************************************************************************
744 *****************************************************************************/
745 #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
748 const struct nv50_wndw_func
*func
;
749 struct nv50_dmac
*dmac
;
751 struct drm_plane plane
;
753 struct nvif_notify notify
;
759 struct nv50_wndw_func
{
760 void *(*dtor
)(struct nv50_wndw
*);
761 int (*acquire
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
762 struct nv50_head_atom
*asyh
);
763 void (*release
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
764 struct nv50_head_atom
*asyh
);
765 void (*prepare
)(struct nv50_wndw
*, struct nv50_head_atom
*asyh
,
766 struct nv50_wndw_atom
*asyw
);
768 void (*sema_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
769 void (*sema_clr
)(struct nv50_wndw
*);
770 void (*ntfy_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
771 void (*ntfy_clr
)(struct nv50_wndw
*);
772 int (*ntfy_wait_begun
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
773 void (*image_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
774 void (*image_clr
)(struct nv50_wndw
*);
775 void (*lut
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
776 void (*point
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
778 u32 (*update
)(struct nv50_wndw
*, u32 interlock
);
782 nv50_wndw_wait_armed(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
785 return wndw
->func
->ntfy_wait_begun(wndw
, asyw
);
790 nv50_wndw_flush_clr(struct nv50_wndw
*wndw
, u32 interlock
, bool flush
,
791 struct nv50_wndw_atom
*asyw
)
793 if (asyw
->clr
.sema
&& (!asyw
->set
.sema
|| flush
))
794 wndw
->func
->sema_clr(wndw
);
795 if (asyw
->clr
.ntfy
&& (!asyw
->set
.ntfy
|| flush
))
796 wndw
->func
->ntfy_clr(wndw
);
797 if (asyw
->clr
.image
&& (!asyw
->set
.image
|| flush
))
798 wndw
->func
->image_clr(wndw
);
800 return flush
? wndw
->func
->update(wndw
, interlock
) : 0;
804 nv50_wndw_flush_set(struct nv50_wndw
*wndw
, u32 interlock
,
805 struct nv50_wndw_atom
*asyw
)
808 asyw
->image
.mode
= 0;
809 asyw
->image
.interval
= 1;
812 if (asyw
->set
.sema
) wndw
->func
->sema_set (wndw
, asyw
);
813 if (asyw
->set
.ntfy
) wndw
->func
->ntfy_set (wndw
, asyw
);
814 if (asyw
->set
.image
) wndw
->func
->image_set(wndw
, asyw
);
815 if (asyw
->set
.lut
) wndw
->func
->lut (wndw
, asyw
);
816 if (asyw
->set
.point
) wndw
->func
->point (wndw
, asyw
);
818 return wndw
->func
->update(wndw
, interlock
);
822 nv50_wndw_atomic_check_release(struct nv50_wndw
*wndw
,
823 struct nv50_wndw_atom
*asyw
,
824 struct nv50_head_atom
*asyh
)
826 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
827 NV_ATOMIC(drm
, "%s release\n", wndw
->plane
.name
);
828 wndw
->func
->release(wndw
, asyw
, asyh
);
829 asyw
->ntfy
.handle
= 0;
830 asyw
->sema
.handle
= 0;
834 nv50_wndw_atomic_check_acquire(struct nv50_wndw
*wndw
,
835 struct nv50_wndw_atom
*asyw
,
836 struct nv50_head_atom
*asyh
)
838 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(asyw
->state
.fb
);
839 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
842 NV_ATOMIC(drm
, "%s acquire\n", wndw
->plane
.name
);
845 asyw
->clip
.x2
= asyh
->state
.mode
.hdisplay
;
846 asyw
->clip
.y2
= asyh
->state
.mode
.vdisplay
;
848 asyw
->image
.w
= fb
->base
.width
;
849 asyw
->image
.h
= fb
->base
.height
;
850 asyw
->image
.kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
852 if (asyh
->state
.pageflip_flags
& DRM_MODE_PAGE_FLIP_ASYNC
)
857 if (asyw
->image
.kind
) {
858 asyw
->image
.layout
= 0;
859 if (drm
->client
.device
.info
.chipset
>= 0xc0)
860 asyw
->image
.block
= fb
->nvbo
->tile_mode
>> 4;
862 asyw
->image
.block
= fb
->nvbo
->tile_mode
;
863 asyw
->image
.pitch
= (fb
->base
.pitches
[0] / 4) << 4;
865 asyw
->image
.layout
= 1;
866 asyw
->image
.block
= 0;
867 asyw
->image
.pitch
= fb
->base
.pitches
[0];
870 ret
= wndw
->func
->acquire(wndw
, asyw
, asyh
);
874 if (asyw
->set
.image
) {
875 if (!(asyw
->image
.mode
= asyw
->interval
? 0 : 1))
876 asyw
->image
.interval
= asyw
->interval
;
878 asyw
->image
.interval
= 0;
885 nv50_wndw_atomic_check(struct drm_plane
*plane
, struct drm_plane_state
*state
)
887 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
888 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
889 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(wndw
->plane
.state
);
890 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
891 struct nv50_head_atom
*harm
= NULL
, *asyh
= NULL
;
892 bool varm
= false, asyv
= false, asym
= false;
895 NV_ATOMIC(drm
, "%s atomic_check\n", plane
->name
);
896 if (asyw
->state
.crtc
) {
897 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
899 return PTR_ERR(asyh
);
900 asym
= drm_atomic_crtc_needs_modeset(&asyh
->state
);
901 asyv
= asyh
->state
.active
;
904 if (armw
->state
.crtc
) {
905 harm
= nv50_head_atom_get(asyw
->state
.state
, armw
->state
.crtc
);
907 return PTR_ERR(harm
);
908 varm
= harm
->state
.crtc
->state
->active
;
912 asyw
->point
.x
= asyw
->state
.crtc_x
;
913 asyw
->point
.y
= asyw
->state
.crtc_y
;
914 if (memcmp(&armw
->point
, &asyw
->point
, sizeof(asyw
->point
)))
915 asyw
->set
.point
= true;
917 ret
= nv50_wndw_atomic_check_acquire(wndw
, asyw
, asyh
);
922 nv50_wndw_atomic_check_release(wndw
, asyw
, harm
);
928 asyw
->clr
.ntfy
= armw
->ntfy
.handle
!= 0;
929 asyw
->clr
.sema
= armw
->sema
.handle
!= 0;
930 if (wndw
->func
->image_clr
)
931 asyw
->clr
.image
= armw
->image
.handle
!= 0;
932 asyw
->set
.lut
= wndw
->func
->lut
&& asyv
;
939 nv50_wndw_cleanup_fb(struct drm_plane
*plane
, struct drm_plane_state
*old_state
)
941 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(old_state
->fb
);
942 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
944 NV_ATOMIC(drm
, "%s cleanup: %p\n", plane
->name
, old_state
->fb
);
948 nouveau_bo_unpin(fb
->nvbo
);
952 nv50_wndw_prepare_fb(struct drm_plane
*plane
, struct drm_plane_state
*state
)
954 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(state
->fb
);
955 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
956 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
957 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
958 struct nv50_head_atom
*asyh
;
959 struct nv50_dmac_ctxdma
*ctxdma
;
962 NV_ATOMIC(drm
, "%s prepare: %p\n", plane
->name
, state
->fb
);
966 ret
= nouveau_bo_pin(fb
->nvbo
, TTM_PL_FLAG_VRAM
, true);
970 ctxdma
= nv50_dmac_ctxdma_new(wndw
->dmac
, fb
);
971 if (IS_ERR(ctxdma
)) {
972 nouveau_bo_unpin(fb
->nvbo
);
973 return PTR_ERR(ctxdma
);
976 asyw
->state
.fence
= reservation_object_get_excl_rcu(fb
->nvbo
->bo
.resv
);
977 asyw
->image
.handle
= ctxdma
->object
.handle
;
978 asyw
->image
.offset
= fb
->nvbo
->bo
.offset
;
980 if (wndw
->func
->prepare
) {
981 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
983 return PTR_ERR(asyh
);
985 wndw
->func
->prepare(wndw
, asyh
, asyw
);
991 static const struct drm_plane_helper_funcs
993 .prepare_fb
= nv50_wndw_prepare_fb
,
994 .cleanup_fb
= nv50_wndw_cleanup_fb
,
995 .atomic_check
= nv50_wndw_atomic_check
,
999 nv50_wndw_atomic_destroy_state(struct drm_plane
*plane
,
1000 struct drm_plane_state
*state
)
1002 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
1003 __drm_atomic_helper_plane_destroy_state(&asyw
->state
);
1007 static struct drm_plane_state
*
1008 nv50_wndw_atomic_duplicate_state(struct drm_plane
*plane
)
1010 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(plane
->state
);
1011 struct nv50_wndw_atom
*asyw
;
1012 if (!(asyw
= kmalloc(sizeof(*asyw
), GFP_KERNEL
)))
1014 __drm_atomic_helper_plane_duplicate_state(plane
, &asyw
->state
);
1016 asyw
->sema
= armw
->sema
;
1017 asyw
->ntfy
= armw
->ntfy
;
1018 asyw
->image
= armw
->image
;
1019 asyw
->point
= armw
->point
;
1020 asyw
->lut
= armw
->lut
;
1023 return &asyw
->state
;
1027 nv50_wndw_reset(struct drm_plane
*plane
)
1029 struct nv50_wndw_atom
*asyw
;
1031 if (WARN_ON(!(asyw
= kzalloc(sizeof(*asyw
), GFP_KERNEL
))))
1035 plane
->funcs
->atomic_destroy_state(plane
, plane
->state
);
1036 plane
->state
= &asyw
->state
;
1037 plane
->state
->plane
= plane
;
1038 plane
->state
->rotation
= DRM_MODE_ROTATE_0
;
1042 nv50_wndw_destroy(struct drm_plane
*plane
)
1044 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1046 nvif_notify_fini(&wndw
->notify
);
1047 data
= wndw
->func
->dtor(wndw
);
1048 drm_plane_cleanup(&wndw
->plane
);
1052 static const struct drm_plane_funcs
1054 .update_plane
= drm_atomic_helper_update_plane
,
1055 .disable_plane
= drm_atomic_helper_disable_plane
,
1056 .destroy
= nv50_wndw_destroy
,
1057 .reset
= nv50_wndw_reset
,
1058 .set_property
= drm_atomic_helper_plane_set_property
,
1059 .atomic_duplicate_state
= nv50_wndw_atomic_duplicate_state
,
1060 .atomic_destroy_state
= nv50_wndw_atomic_destroy_state
,
1064 nv50_wndw_fini(struct nv50_wndw
*wndw
)
1066 nvif_notify_put(&wndw
->notify
);
1070 nv50_wndw_init(struct nv50_wndw
*wndw
)
1072 nvif_notify_get(&wndw
->notify
);
1076 nv50_wndw_ctor(const struct nv50_wndw_func
*func
, struct drm_device
*dev
,
1077 enum drm_plane_type type
, const char *name
, int index
,
1078 struct nv50_dmac
*dmac
, const u32
*format
, int nformat
,
1079 struct nv50_wndw
*wndw
)
1086 ret
= drm_universal_plane_init(dev
, &wndw
->plane
, 0, &nv50_wndw
,
1087 format
, nformat
, NULL
,
1088 type
, "%s-%d", name
, index
);
1092 drm_plane_helper_add(&wndw
->plane
, &nv50_wndw_helper
);
1096 /******************************************************************************
1098 *****************************************************************************/
1099 #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1102 struct nv50_wndw wndw
;
1103 struct nvif_object chan
;
1107 nv50_curs_update(struct nv50_wndw
*wndw
, u32 interlock
)
1109 struct nv50_curs
*curs
= nv50_curs(wndw
);
1110 nvif_wr32(&curs
->chan
, 0x0080, 0x00000000);
1115 nv50_curs_point(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1117 struct nv50_curs
*curs
= nv50_curs(wndw
);
1118 nvif_wr32(&curs
->chan
, 0x0084, (asyw
->point
.y
<< 16) | asyw
->point
.x
);
1122 nv50_curs_prepare(struct nv50_wndw
*wndw
, struct nv50_head_atom
*asyh
,
1123 struct nv50_wndw_atom
*asyw
)
1125 u32 handle
= nv50_disp(wndw
->plane
.dev
)->mast
.base
.vram
.handle
;
1126 u32 offset
= asyw
->image
.offset
;
1127 if (asyh
->curs
.handle
!= handle
|| asyh
->curs
.offset
!= offset
) {
1128 asyh
->curs
.handle
= handle
;
1129 asyh
->curs
.offset
= offset
;
1130 asyh
->set
.curs
= asyh
->curs
.visible
;
1135 nv50_curs_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1136 struct nv50_head_atom
*asyh
)
1138 asyh
->curs
.visible
= false;
1142 nv50_curs_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1143 struct nv50_head_atom
*asyh
)
1147 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1148 DRM_PLANE_HELPER_NO_SCALING
,
1149 DRM_PLANE_HELPER_NO_SCALING
,
1151 asyh
->curs
.visible
= asyw
->state
.visible
;
1152 if (ret
|| !asyh
->curs
.visible
)
1155 switch (asyw
->state
.fb
->width
) {
1156 case 32: asyh
->curs
.layout
= 0; break;
1157 case 64: asyh
->curs
.layout
= 1; break;
1162 if (asyw
->state
.fb
->width
!= asyw
->state
.fb
->height
)
1165 switch (asyw
->state
.fb
->format
->format
) {
1166 case DRM_FORMAT_ARGB8888
: asyh
->curs
.format
= 1; break;
1176 nv50_curs_dtor(struct nv50_wndw
*wndw
)
1178 struct nv50_curs
*curs
= nv50_curs(wndw
);
1179 nvif_object_fini(&curs
->chan
);
1184 nv50_curs_format
[] = {
1185 DRM_FORMAT_ARGB8888
,
1188 static const struct nv50_wndw_func
1190 .dtor
= nv50_curs_dtor
,
1191 .acquire
= nv50_curs_acquire
,
1192 .release
= nv50_curs_release
,
1193 .prepare
= nv50_curs_prepare
,
1194 .point
= nv50_curs_point
,
1195 .update
= nv50_curs_update
,
1199 nv50_curs_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1200 struct nv50_curs
**pcurs
)
1202 static const struct nvif_mclass curses
[] = {
1203 { GK104_DISP_CURSOR
, 0 },
1204 { GF110_DISP_CURSOR
, 0 },
1205 { GT214_DISP_CURSOR
, 0 },
1206 { G82_DISP_CURSOR
, 0 },
1207 { NV50_DISP_CURSOR
, 0 },
1210 struct nv50_disp_cursor_v0 args
= {
1211 .head
= head
->base
.index
,
1213 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1214 struct nv50_curs
*curs
;
1217 cid
= nvif_mclass(disp
->disp
, curses
);
1219 NV_ERROR(drm
, "No supported cursor immediate class\n");
1223 if (!(curs
= *pcurs
= kzalloc(sizeof(*curs
), GFP_KERNEL
)))
1226 ret
= nv50_wndw_ctor(&nv50_curs
, drm
->dev
, DRM_PLANE_TYPE_CURSOR
,
1227 "curs", head
->base
.index
, &disp
->mast
.base
,
1228 nv50_curs_format
, ARRAY_SIZE(nv50_curs_format
),
1235 ret
= nvif_object_init(disp
->disp
, 0, curses
[cid
].oclass
, &args
,
1236 sizeof(args
), &curs
->chan
);
1238 NV_ERROR(drm
, "curs%04x allocation failed: %d\n",
1239 curses
[cid
].oclass
, ret
);
1246 /******************************************************************************
1248 *****************************************************************************/
1249 #define nv50_base(p) container_of((p), struct nv50_base, wndw)
1252 struct nv50_wndw wndw
;
1253 struct nv50_sync chan
;
1258 nv50_base_notify(struct nvif_notify
*notify
)
1260 return NVIF_NOTIFY_KEEP
;
1264 nv50_base_lut(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1266 struct nv50_base
*base
= nv50_base(wndw
);
1268 if ((push
= evo_wait(&base
->chan
, 2))) {
1269 evo_mthd(push
, 0x00e0, 1);
1270 evo_data(push
, asyw
->lut
.enable
<< 30);
1271 evo_kick(push
, &base
->chan
);
1276 nv50_base_image_clr(struct nv50_wndw
*wndw
)
1278 struct nv50_base
*base
= nv50_base(wndw
);
1280 if ((push
= evo_wait(&base
->chan
, 4))) {
1281 evo_mthd(push
, 0x0084, 1);
1282 evo_data(push
, 0x00000000);
1283 evo_mthd(push
, 0x00c0, 1);
1284 evo_data(push
, 0x00000000);
1285 evo_kick(push
, &base
->chan
);
1290 nv50_base_image_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1292 struct nv50_base
*base
= nv50_base(wndw
);
1293 const s32 oclass
= base
->chan
.base
.base
.user
.oclass
;
1295 if ((push
= evo_wait(&base
->chan
, 10))) {
1296 evo_mthd(push
, 0x0084, 1);
1297 evo_data(push
, (asyw
->image
.mode
<< 8) |
1298 (asyw
->image
.interval
<< 4));
1299 evo_mthd(push
, 0x00c0, 1);
1300 evo_data(push
, asyw
->image
.handle
);
1301 if (oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1302 evo_mthd(push
, 0x0800, 5);
1303 evo_data(push
, asyw
->image
.offset
>> 8);
1304 evo_data(push
, 0x00000000);
1305 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1306 evo_data(push
, (asyw
->image
.layout
<< 20) |
1309 evo_data(push
, (asyw
->image
.kind
<< 16) |
1310 (asyw
->image
.format
<< 8));
1312 if (oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1313 evo_mthd(push
, 0x0800, 5);
1314 evo_data(push
, asyw
->image
.offset
>> 8);
1315 evo_data(push
, 0x00000000);
1316 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1317 evo_data(push
, (asyw
->image
.layout
<< 20) |
1320 evo_data(push
, asyw
->image
.format
<< 8);
1322 evo_mthd(push
, 0x0400, 5);
1323 evo_data(push
, asyw
->image
.offset
>> 8);
1324 evo_data(push
, 0x00000000);
1325 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1326 evo_data(push
, (asyw
->image
.layout
<< 24) |
1329 evo_data(push
, asyw
->image
.format
<< 8);
1331 evo_kick(push
, &base
->chan
);
1336 nv50_base_ntfy_clr(struct nv50_wndw
*wndw
)
1338 struct nv50_base
*base
= nv50_base(wndw
);
1340 if ((push
= evo_wait(&base
->chan
, 2))) {
1341 evo_mthd(push
, 0x00a4, 1);
1342 evo_data(push
, 0x00000000);
1343 evo_kick(push
, &base
->chan
);
1348 nv50_base_ntfy_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1350 struct nv50_base
*base
= nv50_base(wndw
);
1352 if ((push
= evo_wait(&base
->chan
, 3))) {
1353 evo_mthd(push
, 0x00a0, 2);
1354 evo_data(push
, (asyw
->ntfy
.awaken
<< 30) | asyw
->ntfy
.offset
);
1355 evo_data(push
, asyw
->ntfy
.handle
);
1356 evo_kick(push
, &base
->chan
);
1361 nv50_base_sema_clr(struct nv50_wndw
*wndw
)
1363 struct nv50_base
*base
= nv50_base(wndw
);
1365 if ((push
= evo_wait(&base
->chan
, 2))) {
1366 evo_mthd(push
, 0x0094, 1);
1367 evo_data(push
, 0x00000000);
1368 evo_kick(push
, &base
->chan
);
1373 nv50_base_sema_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1375 struct nv50_base
*base
= nv50_base(wndw
);
1377 if ((push
= evo_wait(&base
->chan
, 5))) {
1378 evo_mthd(push
, 0x0088, 4);
1379 evo_data(push
, asyw
->sema
.offset
);
1380 evo_data(push
, asyw
->sema
.acquire
);
1381 evo_data(push
, asyw
->sema
.release
);
1382 evo_data(push
, asyw
->sema
.handle
);
1383 evo_kick(push
, &base
->chan
);
1388 nv50_base_update(struct nv50_wndw
*wndw
, u32 interlock
)
1390 struct nv50_base
*base
= nv50_base(wndw
);
1393 if (!(push
= evo_wait(&base
->chan
, 2)))
1395 evo_mthd(push
, 0x0080, 1);
1396 evo_data(push
, interlock
);
1397 evo_kick(push
, &base
->chan
);
1399 if (base
->chan
.base
.base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
)
1400 return interlock
? 2 << (base
->id
* 8) : 0;
1401 return interlock
? 2 << (base
->id
* 4) : 0;
1405 nv50_base_ntfy_wait_begun(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1407 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
1408 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1409 if (nvif_msec(&drm
->client
.device
, 2000ULL,
1410 u32 data
= nouveau_bo_rd32(disp
->sync
, asyw
->ntfy
.offset
/ 4);
1411 if ((data
& 0xc0000000) == 0x40000000)
1420 nv50_base_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1421 struct nv50_head_atom
*asyh
)
1427 nv50_base_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1428 struct nv50_head_atom
*asyh
)
1430 const struct drm_framebuffer
*fb
= asyw
->state
.fb
;
1433 if (!fb
->format
->depth
)
1436 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1437 DRM_PLANE_HELPER_NO_SCALING
,
1438 DRM_PLANE_HELPER_NO_SCALING
,
1443 asyh
->base
.depth
= fb
->format
->depth
;
1444 asyh
->base
.cpp
= fb
->format
->cpp
[0];
1445 asyh
->base
.x
= asyw
->state
.src
.x1
>> 16;
1446 asyh
->base
.y
= asyw
->state
.src
.y1
>> 16;
1447 asyh
->base
.w
= asyw
->state
.fb
->width
;
1448 asyh
->base
.h
= asyw
->state
.fb
->height
;
1450 switch (fb
->format
->format
) {
1451 case DRM_FORMAT_C8
: asyw
->image
.format
= 0x1e; break;
1452 case DRM_FORMAT_RGB565
: asyw
->image
.format
= 0xe8; break;
1453 case DRM_FORMAT_XRGB1555
:
1454 case DRM_FORMAT_ARGB1555
: asyw
->image
.format
= 0xe9; break;
1455 case DRM_FORMAT_XRGB8888
:
1456 case DRM_FORMAT_ARGB8888
: asyw
->image
.format
= 0xcf; break;
1457 case DRM_FORMAT_XBGR2101010
:
1458 case DRM_FORMAT_ABGR2101010
: asyw
->image
.format
= 0xd1; break;
1459 case DRM_FORMAT_XBGR8888
:
1460 case DRM_FORMAT_ABGR8888
: asyw
->image
.format
= 0xd5; break;
1466 asyw
->lut
.enable
= 1;
1467 asyw
->set
.image
= true;
1472 nv50_base_dtor(struct nv50_wndw
*wndw
)
1474 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1475 struct nv50_base
*base
= nv50_base(wndw
);
1476 nv50_dmac_destroy(&base
->chan
.base
, disp
->disp
);
1481 nv50_base_format
[] = {
1484 DRM_FORMAT_XRGB1555
,
1485 DRM_FORMAT_ARGB1555
,
1486 DRM_FORMAT_XRGB8888
,
1487 DRM_FORMAT_ARGB8888
,
1488 DRM_FORMAT_XBGR2101010
,
1489 DRM_FORMAT_ABGR2101010
,
1490 DRM_FORMAT_XBGR8888
,
1491 DRM_FORMAT_ABGR8888
,
1494 static const struct nv50_wndw_func
1496 .dtor
= nv50_base_dtor
,
1497 .acquire
= nv50_base_acquire
,
1498 .release
= nv50_base_release
,
1499 .sema_set
= nv50_base_sema_set
,
1500 .sema_clr
= nv50_base_sema_clr
,
1501 .ntfy_set
= nv50_base_ntfy_set
,
1502 .ntfy_clr
= nv50_base_ntfy_clr
,
1503 .ntfy_wait_begun
= nv50_base_ntfy_wait_begun
,
1504 .image_set
= nv50_base_image_set
,
1505 .image_clr
= nv50_base_image_clr
,
1506 .lut
= nv50_base_lut
,
1507 .update
= nv50_base_update
,
1511 nv50_base_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1512 struct nv50_base
**pbase
)
1514 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1515 struct nv50_base
*base
;
1518 if (!(base
= *pbase
= kzalloc(sizeof(*base
), GFP_KERNEL
)))
1520 base
->id
= head
->base
.index
;
1521 base
->wndw
.ntfy
= EVO_FLIP_NTFY0(base
->id
);
1522 base
->wndw
.sema
= EVO_FLIP_SEM0(base
->id
);
1523 base
->wndw
.data
= 0x00000000;
1525 ret
= nv50_wndw_ctor(&nv50_base
, drm
->dev
, DRM_PLANE_TYPE_PRIMARY
,
1526 "base", base
->id
, &base
->chan
.base
,
1527 nv50_base_format
, ARRAY_SIZE(nv50_base_format
),
1534 ret
= nv50_base_create(&drm
->client
.device
, disp
->disp
, base
->id
,
1535 disp
->sync
->bo
.offset
, &base
->chan
);
1539 return nvif_notify_init(&base
->chan
.base
.base
.user
, nv50_base_notify
,
1541 NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT
,
1542 &(struct nvif_notify_uevent_req
) {},
1543 sizeof(struct nvif_notify_uevent_req
),
1544 sizeof(struct nvif_notify_uevent_rep
),
1545 &base
->wndw
.notify
);
1548 /******************************************************************************
1550 *****************************************************************************/
1552 nv50_head_procamp(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1554 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1556 if ((push
= evo_wait(core
, 2))) {
1557 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1558 evo_mthd(push
, 0x08a8 + (head
->base
.index
* 0x400), 1);
1560 evo_mthd(push
, 0x0498 + (head
->base
.index
* 0x300), 1);
1561 evo_data(push
, (asyh
->procamp
.sat
.sin
<< 20) |
1562 (asyh
->procamp
.sat
.cos
<< 8));
1563 evo_kick(push
, core
);
1568 nv50_head_dither(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1570 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1572 if ((push
= evo_wait(core
, 2))) {
1573 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1574 evo_mthd(push
, 0x08a0 + (head
->base
.index
* 0x0400), 1);
1576 if (core
->base
.user
.oclass
< GK104_DISP_CORE_CHANNEL_DMA
)
1577 evo_mthd(push
, 0x0490 + (head
->base
.index
* 0x0300), 1);
1579 evo_mthd(push
, 0x04a0 + (head
->base
.index
* 0x0300), 1);
1580 evo_data(push
, (asyh
->dither
.mode
<< 3) |
1581 (asyh
->dither
.bits
<< 1) |
1582 asyh
->dither
.enable
);
1583 evo_kick(push
, core
);
1588 nv50_head_ovly(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1590 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1594 if (asyh
->base
.cpp
) {
1595 switch (asyh
->base
.cpp
) {
1596 case 8: bounds
|= 0x00000500; break;
1597 case 4: bounds
|= 0x00000300; break;
1598 case 2: bounds
|= 0x00000100; break;
1603 bounds
|= 0x00000001;
1606 if ((push
= evo_wait(core
, 2))) {
1607 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1608 evo_mthd(push
, 0x0904 + head
->base
.index
* 0x400, 1);
1610 evo_mthd(push
, 0x04d4 + head
->base
.index
* 0x300, 1);
1611 evo_data(push
, bounds
);
1612 evo_kick(push
, core
);
1617 nv50_head_base(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1619 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1623 if (asyh
->base
.cpp
) {
1624 switch (asyh
->base
.cpp
) {
1625 case 8: bounds
|= 0x00000500; break;
1626 case 4: bounds
|= 0x00000300; break;
1627 case 2: bounds
|= 0x00000100; break;
1628 case 1: bounds
|= 0x00000000; break;
1633 bounds
|= 0x00000001;
1636 if ((push
= evo_wait(core
, 2))) {
1637 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1638 evo_mthd(push
, 0x0900 + head
->base
.index
* 0x400, 1);
1640 evo_mthd(push
, 0x04d0 + head
->base
.index
* 0x300, 1);
1641 evo_data(push
, bounds
);
1642 evo_kick(push
, core
);
1647 nv50_head_curs_clr(struct nv50_head
*head
)
1649 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1651 if ((push
= evo_wait(core
, 4))) {
1652 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1653 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1654 evo_data(push
, 0x05000000);
1656 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1657 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1658 evo_data(push
, 0x05000000);
1659 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1660 evo_data(push
, 0x00000000);
1662 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 1);
1663 evo_data(push
, 0x05000000);
1664 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1665 evo_data(push
, 0x00000000);
1667 evo_kick(push
, core
);
1672 nv50_head_curs_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1674 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1676 if ((push
= evo_wait(core
, 5))) {
1677 if (core
->base
.user
.oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1678 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1679 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1680 (asyh
->curs
.format
<< 24));
1681 evo_data(push
, asyh
->curs
.offset
>> 8);
1683 if (core
->base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1684 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1685 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1686 (asyh
->curs
.format
<< 24));
1687 evo_data(push
, asyh
->curs
.offset
>> 8);
1688 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1689 evo_data(push
, asyh
->curs
.handle
);
1691 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 2);
1692 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1693 (asyh
->curs
.format
<< 24));
1694 evo_data(push
, asyh
->curs
.offset
>> 8);
1695 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1696 evo_data(push
, asyh
->curs
.handle
);
1698 evo_kick(push
, core
);
1703 nv50_head_core_clr(struct nv50_head
*head
)
1705 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1707 if ((push
= evo_wait(core
, 2))) {
1708 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1709 evo_mthd(push
, 0x0874 + head
->base
.index
* 0x400, 1);
1711 evo_mthd(push
, 0x0474 + head
->base
.index
* 0x300, 1);
1712 evo_data(push
, 0x00000000);
1713 evo_kick(push
, core
);
1718 nv50_head_core_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1720 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1722 if ((push
= evo_wait(core
, 9))) {
1723 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1724 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1725 evo_data(push
, asyh
->core
.offset
>> 8);
1726 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1727 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1728 evo_data(push
, asyh
->core
.layout
<< 20 |
1729 (asyh
->core
.pitch
>> 8) << 8 |
1731 evo_data(push
, asyh
->core
.kind
<< 16 |
1732 asyh
->core
.format
<< 8);
1733 evo_data(push
, asyh
->core
.handle
);
1734 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1735 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1736 /* EVO will complain with INVALID_STATE if we have an
1737 * active cursor and (re)specify HeadSetContextDmaIso
1738 * without also updating HeadSetOffsetCursor.
1740 asyh
->set
.curs
= asyh
->curs
.visible
;
1742 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1743 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1744 evo_data(push
, asyh
->core
.offset
>> 8);
1745 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1746 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1747 evo_data(push
, asyh
->core
.layout
<< 20 |
1748 (asyh
->core
.pitch
>> 8) << 8 |
1750 evo_data(push
, asyh
->core
.format
<< 8);
1751 evo_data(push
, asyh
->core
.handle
);
1752 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1753 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1755 evo_mthd(push
, 0x0460 + head
->base
.index
* 0x300, 1);
1756 evo_data(push
, asyh
->core
.offset
>> 8);
1757 evo_mthd(push
, 0x0468 + head
->base
.index
* 0x300, 4);
1758 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1759 evo_data(push
, asyh
->core
.layout
<< 24 |
1760 (asyh
->core
.pitch
>> 8) << 8 |
1762 evo_data(push
, asyh
->core
.format
<< 8);
1763 evo_data(push
, asyh
->core
.handle
);
1764 evo_mthd(push
, 0x04b0 + head
->base
.index
* 0x300, 1);
1765 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1767 evo_kick(push
, core
);
1772 nv50_head_lut_clr(struct nv50_head
*head
)
1774 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1776 if ((push
= evo_wait(core
, 4))) {
1777 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1778 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1779 evo_data(push
, 0x40000000);
1781 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1782 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1783 evo_data(push
, 0x40000000);
1784 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1785 evo_data(push
, 0x00000000);
1787 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 1);
1788 evo_data(push
, 0x03000000);
1789 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1790 evo_data(push
, 0x00000000);
1792 evo_kick(push
, core
);
1797 nv50_head_lut_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1799 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1801 if ((push
= evo_wait(core
, 7))) {
1802 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1803 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1804 evo_data(push
, 0xc0000000);
1805 evo_data(push
, asyh
->lut
.offset
>> 8);
1807 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1808 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1809 evo_data(push
, 0xc0000000);
1810 evo_data(push
, asyh
->lut
.offset
>> 8);
1811 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1812 evo_data(push
, asyh
->lut
.handle
);
1814 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 4);
1815 evo_data(push
, 0x83000000);
1816 evo_data(push
, asyh
->lut
.offset
>> 8);
1817 evo_data(push
, 0x00000000);
1818 evo_data(push
, 0x00000000);
1819 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1820 evo_data(push
, asyh
->lut
.handle
);
1822 evo_kick(push
, core
);
1827 nv50_head_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1829 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1830 struct nv50_head_mode
*m
= &asyh
->mode
;
1832 if ((push
= evo_wait(core
, 14))) {
1833 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1834 evo_mthd(push
, 0x0804 + (head
->base
.index
* 0x400), 2);
1835 evo_data(push
, 0x00800000 | m
->clock
);
1836 evo_data(push
, m
->interlace
? 0x00000002 : 0x00000000);
1837 evo_mthd(push
, 0x0810 + (head
->base
.index
* 0x400), 7);
1838 evo_data(push
, 0x00000000);
1839 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1840 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1841 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1842 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1843 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1844 evo_data(push
, asyh
->mode
.v
.blankus
);
1845 evo_mthd(push
, 0x082c + (head
->base
.index
* 0x400), 1);
1846 evo_data(push
, 0x00000000);
1848 evo_mthd(push
, 0x0410 + (head
->base
.index
* 0x300), 6);
1849 evo_data(push
, 0x00000000);
1850 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1851 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1852 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1853 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1854 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1855 evo_mthd(push
, 0x042c + (head
->base
.index
* 0x300), 2);
1856 evo_data(push
, 0x00000000); /* ??? */
1857 evo_data(push
, 0xffffff00);
1858 evo_mthd(push
, 0x0450 + (head
->base
.index
* 0x300), 3);
1859 evo_data(push
, m
->clock
* 1000);
1860 evo_data(push
, 0x00200000); /* ??? */
1861 evo_data(push
, m
->clock
* 1000);
1863 evo_kick(push
, core
);
1868 nv50_head_view(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1870 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1872 if ((push
= evo_wait(core
, 10))) {
1873 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1874 evo_mthd(push
, 0x08a4 + (head
->base
.index
* 0x400), 1);
1875 evo_data(push
, 0x00000000);
1876 evo_mthd(push
, 0x08c8 + (head
->base
.index
* 0x400), 1);
1877 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1878 evo_mthd(push
, 0x08d8 + (head
->base
.index
* 0x400), 2);
1879 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1880 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1882 evo_mthd(push
, 0x0494 + (head
->base
.index
* 0x300), 1);
1883 evo_data(push
, 0x00000000);
1884 evo_mthd(push
, 0x04b8 + (head
->base
.index
* 0x300), 1);
1885 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1886 evo_mthd(push
, 0x04c0 + (head
->base
.index
* 0x300), 3);
1887 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1888 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1889 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1891 evo_kick(push
, core
);
1896 nv50_head_flush_clr(struct nv50_head
*head
, struct nv50_head_atom
*asyh
, bool y
)
1898 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1899 nv50_head_lut_clr(head
);
1900 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1901 nv50_head_core_clr(head
);
1902 if (asyh
->clr
.curs
&& (!asyh
->set
.curs
|| y
))
1903 nv50_head_curs_clr(head
);
1907 nv50_head_flush_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1909 if (asyh
->set
.view
) nv50_head_view (head
, asyh
);
1910 if (asyh
->set
.mode
) nv50_head_mode (head
, asyh
);
1911 if (asyh
->set
.core
) nv50_head_lut_set (head
, asyh
);
1912 if (asyh
->set
.core
) nv50_head_core_set(head
, asyh
);
1913 if (asyh
->set
.curs
) nv50_head_curs_set(head
, asyh
);
1914 if (asyh
->set
.base
) nv50_head_base (head
, asyh
);
1915 if (asyh
->set
.ovly
) nv50_head_ovly (head
, asyh
);
1916 if (asyh
->set
.dither
) nv50_head_dither (head
, asyh
);
1917 if (asyh
->set
.procamp
) nv50_head_procamp (head
, asyh
);
1921 nv50_head_atomic_check_procamp(struct nv50_head_atom
*armh
,
1922 struct nv50_head_atom
*asyh
,
1923 struct nouveau_conn_atom
*asyc
)
1925 const int vib
= asyc
->procamp
.color_vibrance
- 100;
1926 const int hue
= asyc
->procamp
.vibrant_hue
- 90;
1927 const int adj
= (vib
> 0) ? 50 : 0;
1928 asyh
->procamp
.sat
.cos
= ((vib
* 2047 + adj
) / 100) & 0xfff;
1929 asyh
->procamp
.sat
.sin
= ((hue
* 2047) / 100) & 0xfff;
1930 asyh
->set
.procamp
= true;
1934 nv50_head_atomic_check_dither(struct nv50_head_atom
*armh
,
1935 struct nv50_head_atom
*asyh
,
1936 struct nouveau_conn_atom
*asyc
)
1938 struct drm_connector
*connector
= asyc
->state
.connector
;
1941 if (asyc
->dither
.mode
== DITHERING_MODE_AUTO
) {
1942 if (asyh
->base
.depth
> connector
->display_info
.bpc
* 3)
1943 mode
= DITHERING_MODE_DYNAMIC2X2
;
1945 mode
= asyc
->dither
.mode
;
1948 if (asyc
->dither
.depth
== DITHERING_DEPTH_AUTO
) {
1949 if (connector
->display_info
.bpc
>= 8)
1950 mode
|= DITHERING_DEPTH_8BPC
;
1952 mode
|= asyc
->dither
.depth
;
1955 asyh
->dither
.enable
= mode
;
1956 asyh
->dither
.bits
= mode
>> 1;
1957 asyh
->dither
.mode
= mode
>> 3;
1958 asyh
->set
.dither
= true;
1962 nv50_head_atomic_check_view(struct nv50_head_atom
*armh
,
1963 struct nv50_head_atom
*asyh
,
1964 struct nouveau_conn_atom
*asyc
)
1966 struct drm_connector
*connector
= asyc
->state
.connector
;
1967 struct drm_display_mode
*omode
= &asyh
->state
.adjusted_mode
;
1968 struct drm_display_mode
*umode
= &asyh
->state
.mode
;
1969 int mode
= asyc
->scaler
.mode
;
1971 int umode_vdisplay
, omode_hdisplay
, omode_vdisplay
;
1973 if (connector
->edid_blob_ptr
)
1974 edid
= (struct edid
*)connector
->edid_blob_ptr
->data
;
1978 if (!asyc
->scaler
.full
) {
1979 if (mode
== DRM_MODE_SCALE_NONE
)
1982 /* Non-EDID LVDS/eDP mode. */
1983 mode
= DRM_MODE_SCALE_FULLSCREEN
;
1986 /* For the user-specified mode, we must ignore doublescan and
1987 * the like, but honor frame packing.
1989 umode_vdisplay
= umode
->vdisplay
;
1990 if ((umode
->flags
& DRM_MODE_FLAG_3D_MASK
) == DRM_MODE_FLAG_3D_FRAME_PACKING
)
1991 umode_vdisplay
+= umode
->vtotal
;
1992 asyh
->view
.iW
= umode
->hdisplay
;
1993 asyh
->view
.iH
= umode_vdisplay
;
1994 /* For the output mode, we can just use the stock helper. */
1995 drm_mode_get_hv_timing(omode
, &omode_hdisplay
, &omode_vdisplay
);
1996 asyh
->view
.oW
= omode_hdisplay
;
1997 asyh
->view
.oH
= omode_vdisplay
;
1999 /* Add overscan compensation if necessary, will keep the aspect
2000 * ratio the same as the backend mode unless overridden by the
2001 * user setting both hborder and vborder properties.
2003 if ((asyc
->scaler
.underscan
.mode
== UNDERSCAN_ON
||
2004 (asyc
->scaler
.underscan
.mode
== UNDERSCAN_AUTO
&&
2005 drm_detect_hdmi_monitor(edid
)))) {
2006 u32 bX
= asyc
->scaler
.underscan
.hborder
;
2007 u32 bY
= asyc
->scaler
.underscan
.vborder
;
2008 u32 r
= (asyh
->view
.oH
<< 19) / asyh
->view
.oW
;
2011 asyh
->view
.oW
-= (bX
* 2);
2012 if (bY
) asyh
->view
.oH
-= (bY
* 2);
2013 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2015 asyh
->view
.oW
-= (asyh
->view
.oW
>> 4) + 32;
2016 if (bY
) asyh
->view
.oH
-= (bY
* 2);
2017 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2021 /* Handle CENTER/ASPECT scaling, taking into account the areas
2022 * removed already for overscan compensation.
2025 case DRM_MODE_SCALE_CENTER
:
2026 asyh
->view
.oW
= min((u16
)umode
->hdisplay
, asyh
->view
.oW
);
2027 asyh
->view
.oH
= min((u16
)umode_vdisplay
, asyh
->view
.oH
);
2029 case DRM_MODE_SCALE_ASPECT
:
2030 if (asyh
->view
.oH
< asyh
->view
.oW
) {
2031 u32 r
= (asyh
->view
.iW
<< 19) / asyh
->view
.iH
;
2032 asyh
->view
.oW
= ((asyh
->view
.oH
* r
) + (r
/ 2)) >> 19;
2034 u32 r
= (asyh
->view
.iH
<< 19) / asyh
->view
.iW
;
2035 asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2042 asyh
->set
.view
= true;
2046 nv50_head_atomic_check_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
2048 struct drm_display_mode
*mode
= &asyh
->state
.adjusted_mode
;
2049 struct nv50_head_mode
*m
= &asyh
->mode
;
2052 drm_mode_set_crtcinfo(mode
, CRTC_INTERLACE_HALVE_V
| CRTC_STEREO_DOUBLE
);
2055 * DRM modes are defined in terms of a repeating interval
2056 * starting with the active display area. The hardware modes
2057 * are defined in terms of a repeating interval starting one
2058 * unit (pixel or line) into the sync pulse. So, add bias.
2061 m
->h
.active
= mode
->crtc_htotal
;
2062 m
->h
.synce
= mode
->crtc_hsync_end
- mode
->crtc_hsync_start
- 1;
2063 m
->h
.blanke
= mode
->crtc_hblank_end
- mode
->crtc_hsync_start
- 1;
2064 m
->h
.blanks
= m
->h
.blanke
+ mode
->crtc_hdisplay
;
2066 m
->v
.active
= mode
->crtc_vtotal
;
2067 m
->v
.synce
= mode
->crtc_vsync_end
- mode
->crtc_vsync_start
- 1;
2068 m
->v
.blanke
= mode
->crtc_vblank_end
- mode
->crtc_vsync_start
- 1;
2069 m
->v
.blanks
= m
->v
.blanke
+ mode
->crtc_vdisplay
;
2071 /*XXX: Safe underestimate, even "0" works */
2072 blankus
= (m
->v
.active
- mode
->crtc_vdisplay
- 2) * m
->h
.active
;
2074 blankus
/= mode
->crtc_clock
;
2075 m
->v
.blankus
= blankus
;
2077 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
) {
2078 m
->v
.blank2e
= m
->v
.active
+ m
->v
.blanke
;
2079 m
->v
.blank2s
= m
->v
.blank2e
+ mode
->crtc_vdisplay
;
2080 m
->v
.active
= (m
->v
.active
* 2) + 1;
2081 m
->interlace
= true;
2085 m
->interlace
= false;
2087 m
->clock
= mode
->crtc_clock
;
2089 asyh
->set
.mode
= true;
2093 nv50_head_atomic_check(struct drm_crtc
*crtc
, struct drm_crtc_state
*state
)
2095 struct nouveau_drm
*drm
= nouveau_drm(crtc
->dev
);
2096 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2097 struct nv50_head
*head
= nv50_head(crtc
);
2098 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2099 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2100 struct nouveau_conn_atom
*asyc
= NULL
;
2101 struct drm_connector_state
*conns
;
2102 struct drm_connector
*conn
;
2105 NV_ATOMIC(drm
, "%s atomic_check %d\n", crtc
->name
, asyh
->state
.active
);
2106 if (asyh
->state
.active
) {
2107 for_each_new_connector_in_state(asyh
->state
.state
, conn
, conns
, i
) {
2108 if (conns
->crtc
== crtc
) {
2109 asyc
= nouveau_conn_atom(conns
);
2114 if (armh
->state
.active
) {
2116 if (asyh
->state
.mode_changed
)
2117 asyc
->set
.scaler
= true;
2118 if (armh
->base
.depth
!= asyh
->base
.depth
)
2119 asyc
->set
.dither
= true;
2123 asyc
->set
.mask
= ~0;
2124 asyh
->set
.mask
= ~0;
2127 if (asyh
->state
.mode_changed
)
2128 nv50_head_atomic_check_mode(head
, asyh
);
2131 if (asyc
->set
.scaler
)
2132 nv50_head_atomic_check_view(armh
, asyh
, asyc
);
2133 if (asyc
->set
.dither
)
2134 nv50_head_atomic_check_dither(armh
, asyh
, asyc
);
2135 if (asyc
->set
.procamp
)
2136 nv50_head_atomic_check_procamp(armh
, asyh
, asyc
);
2139 if ((asyh
->core
.visible
= (asyh
->base
.cpp
!= 0))) {
2140 asyh
->core
.x
= asyh
->base
.x
;
2141 asyh
->core
.y
= asyh
->base
.y
;
2142 asyh
->core
.w
= asyh
->base
.w
;
2143 asyh
->core
.h
= asyh
->base
.h
;
2145 if ((asyh
->core
.visible
= asyh
->curs
.visible
)) {
2146 /*XXX: We need to either find some way of having the
2147 * primary base layer appear black, while still
2148 * being able to display the other layers, or we
2149 * need to allocate a dummy black surface here.
2153 asyh
->core
.w
= asyh
->state
.mode
.hdisplay
;
2154 asyh
->core
.h
= asyh
->state
.mode
.vdisplay
;
2156 asyh
->core
.handle
= disp
->mast
.base
.vram
.handle
;
2157 asyh
->core
.offset
= 0;
2158 asyh
->core
.format
= 0xcf;
2159 asyh
->core
.kind
= 0;
2160 asyh
->core
.layout
= 1;
2161 asyh
->core
.block
= 0;
2162 asyh
->core
.pitch
= ALIGN(asyh
->core
.w
, 64) * 4;
2163 asyh
->lut
.handle
= disp
->mast
.base
.vram
.handle
;
2164 asyh
->lut
.offset
= head
->base
.lut
.nvbo
->bo
.offset
;
2165 asyh
->set
.base
= armh
->base
.cpp
!= asyh
->base
.cpp
;
2166 asyh
->set
.ovly
= armh
->ovly
.cpp
!= asyh
->ovly
.cpp
;
2168 asyh
->core
.visible
= false;
2169 asyh
->curs
.visible
= false;
2174 if (!drm_atomic_crtc_needs_modeset(&asyh
->state
)) {
2175 if (asyh
->core
.visible
) {
2176 if (memcmp(&armh
->core
, &asyh
->core
, sizeof(asyh
->core
)))
2177 asyh
->set
.core
= true;
2179 if (armh
->core
.visible
) {
2180 asyh
->clr
.core
= true;
2183 if (asyh
->curs
.visible
) {
2184 if (memcmp(&armh
->curs
, &asyh
->curs
, sizeof(asyh
->curs
)))
2185 asyh
->set
.curs
= true;
2187 if (armh
->curs
.visible
) {
2188 asyh
->clr
.curs
= true;
2191 asyh
->clr
.core
= armh
->core
.visible
;
2192 asyh
->clr
.curs
= armh
->curs
.visible
;
2193 asyh
->set
.core
= asyh
->core
.visible
;
2194 asyh
->set
.curs
= asyh
->curs
.visible
;
2197 if (asyh
->clr
.mask
|| asyh
->set
.mask
)
2198 nv50_atom(asyh
->state
.state
)->lock_core
= true;
2203 nv50_head_lut_load(struct drm_crtc
*crtc
)
2205 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2206 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2207 void __iomem
*lut
= nvbo_kmap_obj_iovirtual(nv_crtc
->lut
.nvbo
);
2211 r
= crtc
->gamma_store
;
2212 g
= r
+ crtc
->gamma_size
;
2213 b
= g
+ crtc
->gamma_size
;
2215 for (i
= 0; i
< 256; i
++) {
2216 if (disp
->disp
->oclass
< GF110_DISP
) {
2217 writew((*r
++ >> 2) + 0x0000, lut
+ (i
* 0x08) + 0);
2218 writew((*g
++ >> 2) + 0x0000, lut
+ (i
* 0x08) + 2);
2219 writew((*b
++ >> 2) + 0x0000, lut
+ (i
* 0x08) + 4);
2221 /* 0x6000 interferes with the 14-bit color??? */
2222 writew((*r
++ >> 2) + 0x6000, lut
+ (i
* 0x20) + 0);
2223 writew((*g
++ >> 2) + 0x6000, lut
+ (i
* 0x20) + 2);
2224 writew((*b
++ >> 2) + 0x6000, lut
+ (i
* 0x20) + 4);
2229 static const struct drm_crtc_helper_funcs
2231 .atomic_check
= nv50_head_atomic_check
,
2235 nv50_head_gamma_set(struct drm_crtc
*crtc
, u16
*r
, u16
*g
, u16
*b
,
2237 struct drm_modeset_acquire_ctx
*ctx
)
2239 nv50_head_lut_load(crtc
);
2244 nv50_head_atomic_destroy_state(struct drm_crtc
*crtc
,
2245 struct drm_crtc_state
*state
)
2247 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2248 __drm_atomic_helper_crtc_destroy_state(&asyh
->state
);
2252 static struct drm_crtc_state
*
2253 nv50_head_atomic_duplicate_state(struct drm_crtc
*crtc
)
2255 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2256 struct nv50_head_atom
*asyh
;
2257 if (!(asyh
= kmalloc(sizeof(*asyh
), GFP_KERNEL
)))
2259 __drm_atomic_helper_crtc_duplicate_state(crtc
, &asyh
->state
);
2260 asyh
->view
= armh
->view
;
2261 asyh
->mode
= armh
->mode
;
2262 asyh
->lut
= armh
->lut
;
2263 asyh
->core
= armh
->core
;
2264 asyh
->curs
= armh
->curs
;
2265 asyh
->base
= armh
->base
;
2266 asyh
->ovly
= armh
->ovly
;
2267 asyh
->dither
= armh
->dither
;
2268 asyh
->procamp
= armh
->procamp
;
2271 return &asyh
->state
;
2275 __drm_atomic_helper_crtc_reset(struct drm_crtc
*crtc
,
2276 struct drm_crtc_state
*state
)
2279 crtc
->funcs
->atomic_destroy_state(crtc
, crtc
->state
);
2280 crtc
->state
= state
;
2281 crtc
->state
->crtc
= crtc
;
2285 nv50_head_reset(struct drm_crtc
*crtc
)
2287 struct nv50_head_atom
*asyh
;
2289 if (WARN_ON(!(asyh
= kzalloc(sizeof(*asyh
), GFP_KERNEL
))))
2292 __drm_atomic_helper_crtc_reset(crtc
, &asyh
->state
);
2296 nv50_head_destroy(struct drm_crtc
*crtc
)
2298 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2299 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2300 struct nv50_head
*head
= nv50_head(crtc
);
2302 nv50_dmac_destroy(&head
->ovly
.base
, disp
->disp
);
2303 nv50_pioc_destroy(&head
->oimm
.base
);
2305 nouveau_bo_unmap(nv_crtc
->lut
.nvbo
);
2306 if (nv_crtc
->lut
.nvbo
)
2307 nouveau_bo_unpin(nv_crtc
->lut
.nvbo
);
2308 nouveau_bo_ref(NULL
, &nv_crtc
->lut
.nvbo
);
2310 drm_crtc_cleanup(crtc
);
2314 static const struct drm_crtc_funcs
2316 .reset
= nv50_head_reset
,
2317 .gamma_set
= nv50_head_gamma_set
,
2318 .destroy
= nv50_head_destroy
,
2319 .set_config
= drm_atomic_helper_set_config
,
2320 .page_flip
= drm_atomic_helper_page_flip
,
2321 .atomic_duplicate_state
= nv50_head_atomic_duplicate_state
,
2322 .atomic_destroy_state
= nv50_head_atomic_destroy_state
,
2326 nv50_head_create(struct drm_device
*dev
, int index
)
2328 struct nouveau_drm
*drm
= nouveau_drm(dev
);
2329 struct nvif_device
*device
= &drm
->client
.device
;
2330 struct nv50_disp
*disp
= nv50_disp(dev
);
2331 struct nv50_head
*head
;
2332 struct nv50_base
*base
;
2333 struct nv50_curs
*curs
;
2334 struct drm_crtc
*crtc
;
2337 head
= kzalloc(sizeof(*head
), GFP_KERNEL
);
2341 head
->base
.index
= index
;
2342 ret
= nv50_base_new(drm
, head
, &base
);
2344 ret
= nv50_curs_new(drm
, head
, &curs
);
2350 crtc
= &head
->base
.base
;
2351 drm_crtc_init_with_planes(dev
, crtc
, &base
->wndw
.plane
,
2352 &curs
->wndw
.plane
, &nv50_head_func
,
2353 "head-%d", head
->base
.index
);
2354 drm_crtc_helper_add(crtc
, &nv50_head_help
);
2355 drm_mode_crtc_set_gamma_size(crtc
, 256);
2357 ret
= nouveau_bo_new(&drm
->client
, 8192, 0x100, TTM_PL_FLAG_VRAM
,
2358 0, 0x0000, NULL
, NULL
, &head
->base
.lut
.nvbo
);
2360 ret
= nouveau_bo_pin(head
->base
.lut
.nvbo
, TTM_PL_FLAG_VRAM
, true);
2362 ret
= nouveau_bo_map(head
->base
.lut
.nvbo
);
2364 nouveau_bo_unpin(head
->base
.lut
.nvbo
);
2367 nouveau_bo_ref(NULL
, &head
->base
.lut
.nvbo
);
2373 /* allocate overlay resources */
2374 ret
= nv50_oimm_create(device
, disp
->disp
, index
, &head
->oimm
);
2378 ret
= nv50_ovly_create(device
, disp
->disp
, index
, disp
->sync
->bo
.offset
,
2385 nv50_head_destroy(crtc
);
2389 /******************************************************************************
2390 * Output path helpers
2391 *****************************************************************************/
2393 nv50_outp_release(struct nouveau_encoder
*nv_encoder
)
2395 struct nv50_disp
*disp
= nv50_disp(nv_encoder
->base
.base
.dev
);
2397 struct nv50_disp_mthd_v1 base
;
2400 .base
.method
= NV50_DISP_MTHD_V1_RELEASE
,
2401 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2402 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2405 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2406 nv_encoder
->or = -1;
2407 nv_encoder
->link
= 0;
2411 nv50_outp_acquire(struct nouveau_encoder
*nv_encoder
)
2413 struct nouveau_drm
*drm
= nouveau_drm(nv_encoder
->base
.base
.dev
);
2414 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
2416 struct nv50_disp_mthd_v1 base
;
2417 struct nv50_disp_acquire_v0 info
;
2420 .base
.method
= NV50_DISP_MTHD_V1_ACQUIRE
,
2421 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2422 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2426 ret
= nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2428 NV_ERROR(drm
, "error acquiring output path: %d\n", ret
);
2432 nv_encoder
->or = args
.info
.or;
2433 nv_encoder
->link
= args
.info
.link
;
2438 nv50_outp_atomic_check_view(struct drm_encoder
*encoder
,
2439 struct drm_crtc_state
*crtc_state
,
2440 struct drm_connector_state
*conn_state
,
2441 struct drm_display_mode
*native_mode
)
2443 struct drm_display_mode
*adjusted_mode
= &crtc_state
->adjusted_mode
;
2444 struct drm_display_mode
*mode
= &crtc_state
->mode
;
2445 struct drm_connector
*connector
= conn_state
->connector
;
2446 struct nouveau_conn_atom
*asyc
= nouveau_conn_atom(conn_state
);
2447 struct nouveau_drm
*drm
= nouveau_drm(encoder
->dev
);
2449 NV_ATOMIC(drm
, "%s atomic_check\n", encoder
->name
);
2450 asyc
->scaler
.full
= false;
2454 if (asyc
->scaler
.mode
== DRM_MODE_SCALE_NONE
) {
2455 switch (connector
->connector_type
) {
2456 case DRM_MODE_CONNECTOR_LVDS
:
2457 case DRM_MODE_CONNECTOR_eDP
:
2458 /* Force use of scaler for non-EDID modes. */
2459 if (adjusted_mode
->type
& DRM_MODE_TYPE_DRIVER
)
2462 asyc
->scaler
.full
= true;
2471 if (!drm_mode_equal(adjusted_mode
, mode
)) {
2472 drm_mode_copy(adjusted_mode
, mode
);
2473 crtc_state
->mode_changed
= true;
2480 nv50_outp_atomic_check(struct drm_encoder
*encoder
,
2481 struct drm_crtc_state
*crtc_state
,
2482 struct drm_connector_state
*conn_state
)
2484 struct nouveau_connector
*nv_connector
=
2485 nouveau_connector(conn_state
->connector
);
2486 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2487 nv_connector
->native_mode
);
2490 /******************************************************************************
2492 *****************************************************************************/
2494 nv50_dac_disable(struct drm_encoder
*encoder
)
2496 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2497 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2498 const int or = nv_encoder
->or;
2501 if (nv_encoder
->crtc
) {
2502 push
= evo_wait(mast
, 4);
2504 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2505 evo_mthd(push
, 0x0400 + (or * 0x080), 1);
2506 evo_data(push
, 0x00000000);
2508 evo_mthd(push
, 0x0180 + (or * 0x020), 1);
2509 evo_data(push
, 0x00000000);
2511 evo_kick(push
, mast
);
2515 nv_encoder
->crtc
= NULL
;
2516 nv50_outp_release(nv_encoder
);
2520 nv50_dac_enable(struct drm_encoder
*encoder
)
2522 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2523 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2524 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2525 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
2528 nv50_outp_acquire(nv_encoder
);
2530 push
= evo_wait(mast
, 8);
2532 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2533 u32 syncs
= 0x00000000;
2535 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2536 syncs
|= 0x00000001;
2537 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2538 syncs
|= 0x00000002;
2540 evo_mthd(push
, 0x0400 + (nv_encoder
->or * 0x080), 2);
2541 evo_data(push
, 1 << nv_crtc
->index
);
2542 evo_data(push
, syncs
);
2544 u32 magic
= 0x31ec6000 | (nv_crtc
->index
<< 25);
2545 u32 syncs
= 0x00000001;
2547 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2548 syncs
|= 0x00000008;
2549 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2550 syncs
|= 0x00000010;
2552 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
2553 magic
|= 0x00000001;
2555 evo_mthd(push
, 0x0404 + (nv_crtc
->index
* 0x300), 2);
2556 evo_data(push
, syncs
);
2557 evo_data(push
, magic
);
2558 evo_mthd(push
, 0x0180 + (nv_encoder
->or * 0x020), 1);
2559 evo_data(push
, 1 << nv_crtc
->index
);
2562 evo_kick(push
, mast
);
2565 nv_encoder
->crtc
= encoder
->crtc
;
2568 static enum drm_connector_status
2569 nv50_dac_detect(struct drm_encoder
*encoder
, struct drm_connector
*connector
)
2571 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2572 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2574 struct nv50_disp_mthd_v1 base
;
2575 struct nv50_disp_dac_load_v0 load
;
2578 .base
.method
= NV50_DISP_MTHD_V1_DAC_LOAD
,
2579 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2580 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2584 args
.load
.data
= nouveau_drm(encoder
->dev
)->vbios
.dactestval
;
2585 if (args
.load
.data
== 0)
2586 args
.load
.data
= 340;
2588 ret
= nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2589 if (ret
|| !args
.load
.load
)
2590 return connector_status_disconnected
;
2592 return connector_status_connected
;
2595 static const struct drm_encoder_helper_funcs
2597 .atomic_check
= nv50_outp_atomic_check
,
2598 .enable
= nv50_dac_enable
,
2599 .disable
= nv50_dac_disable
,
2600 .detect
= nv50_dac_detect
2604 nv50_dac_destroy(struct drm_encoder
*encoder
)
2606 drm_encoder_cleanup(encoder
);
2610 static const struct drm_encoder_funcs
2612 .destroy
= nv50_dac_destroy
,
2616 nv50_dac_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
2618 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
2619 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
2620 struct nvkm_i2c_bus
*bus
;
2621 struct nouveau_encoder
*nv_encoder
;
2622 struct drm_encoder
*encoder
;
2623 int type
= DRM_MODE_ENCODER_DAC
;
2625 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
2628 nv_encoder
->dcb
= dcbe
;
2630 bus
= nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
2632 nv_encoder
->i2c
= &bus
->i2c
;
2634 encoder
= to_drm_encoder(nv_encoder
);
2635 encoder
->possible_crtcs
= dcbe
->heads
;
2636 encoder
->possible_clones
= 0;
2637 drm_encoder_init(connector
->dev
, encoder
, &nv50_dac_func
, type
,
2638 "dac-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
2639 drm_encoder_helper_add(encoder
, &nv50_dac_help
);
2641 drm_mode_connector_attach_encoder(connector
, encoder
);
2645 /******************************************************************************
2647 *****************************************************************************/
2649 nv50_audio_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2651 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2652 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2654 struct nv50_disp_mthd_v1 base
;
2655 struct nv50_disp_sor_hda_eld_v0 eld
;
2658 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2659 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2660 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2661 (0x0100 << nv_crtc
->index
),
2664 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2668 nv50_audio_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2670 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2671 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2672 struct nouveau_connector
*nv_connector
;
2673 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2676 struct nv50_disp_mthd_v1 mthd
;
2677 struct nv50_disp_sor_hda_eld_v0 eld
;
2679 u8 data
[sizeof(nv_connector
->base
.eld
)];
2681 .base
.mthd
.version
= 1,
2682 .base
.mthd
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2683 .base
.mthd
.hasht
= nv_encoder
->dcb
->hasht
,
2684 .base
.mthd
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2685 (0x0100 << nv_crtc
->index
),
2688 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2689 if (!drm_detect_monitor_audio(nv_connector
->edid
))
2692 drm_edid_to_eld(&nv_connector
->base
, nv_connector
->edid
);
2693 memcpy(args
.data
, nv_connector
->base
.eld
, sizeof(args
.data
));
2695 nvif_mthd(disp
->disp
, 0, &args
,
2696 sizeof(args
.base
) + drm_eld_size(args
.data
));
2699 /******************************************************************************
2701 *****************************************************************************/
2703 nv50_hdmi_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2705 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2706 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2708 struct nv50_disp_mthd_v1 base
;
2709 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2712 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2713 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2714 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2715 (0x0100 << nv_crtc
->index
),
2718 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2722 nv50_hdmi_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2724 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2725 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2726 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2728 struct nv50_disp_mthd_v1 base
;
2729 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2730 u8 infoframes
[2 * 17]; /* two frames, up to 17 bytes each */
2733 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2734 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2735 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2736 (0x0100 << nv_crtc
->index
),
2738 .pwr
.rekey
= 56, /* binary driver, and tegra, constant */
2740 struct nouveau_connector
*nv_connector
;
2742 union hdmi_infoframe avi_frame
;
2743 union hdmi_infoframe vendor_frame
;
2747 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2748 if (!drm_detect_hdmi_monitor(nv_connector
->edid
))
2751 ret
= drm_hdmi_avi_infoframe_from_display_mode(&avi_frame
.avi
, mode
,
2754 /* We have an AVI InfoFrame, populate it to the display */
2755 args
.pwr
.avi_infoframe_length
2756 = hdmi_infoframe_pack(&avi_frame
, args
.infoframes
, 17);
2759 ret
= drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame
.vendor
.hdmi
, mode
);
2761 /* We have a Vendor InfoFrame, populate it to the display */
2762 args
.pwr
.vendor_infoframe_length
2763 = hdmi_infoframe_pack(&vendor_frame
,
2765 + args
.pwr
.avi_infoframe_length
,
2769 max_ac_packet
= mode
->htotal
- mode
->hdisplay
;
2770 max_ac_packet
-= args
.pwr
.rekey
;
2771 max_ac_packet
-= 18; /* constant from tegra */
2772 args
.pwr
.max_ac_packet
= max_ac_packet
/ 32;
2774 size
= sizeof(args
.base
)
2776 + args
.pwr
.avi_infoframe_length
2777 + args
.pwr
.vendor_infoframe_length
;
2778 nvif_mthd(disp
->disp
, 0, &args
, size
);
2779 nv50_audio_enable(encoder
, mode
);
2782 /******************************************************************************
2784 *****************************************************************************/
2785 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2786 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2787 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2790 struct nouveau_encoder
*outp
;
2792 struct drm_dp_mst_topology_mgr mgr
;
2793 struct nv50_msto
*msto
[4];
2801 struct nv50_mstm
*mstm
;
2802 struct drm_dp_mst_port
*port
;
2803 struct drm_connector connector
;
2805 struct drm_display_mode
*native
;
2812 struct drm_encoder encoder
;
2814 struct nv50_head
*head
;
2815 struct nv50_mstc
*mstc
;
2819 static struct drm_dp_payload
*
2820 nv50_msto_payload(struct nv50_msto
*msto
)
2822 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2823 struct nv50_mstc
*mstc
= msto
->mstc
;
2824 struct nv50_mstm
*mstm
= mstc
->mstm
;
2825 int vcpi
= mstc
->port
->vcpi
.vcpi
, i
;
2827 NV_ATOMIC(drm
, "%s: vcpi %d\n", msto
->encoder
.name
, vcpi
);
2828 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2829 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2830 NV_ATOMIC(drm
, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2831 mstm
->outp
->base
.base
.name
, i
, payload
->vcpi
,
2832 payload
->start_slot
, payload
->num_slots
);
2835 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2836 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2837 if (payload
->vcpi
== vcpi
)
2845 nv50_msto_cleanup(struct nv50_msto
*msto
)
2847 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2848 struct nv50_mstc
*mstc
= msto
->mstc
;
2849 struct nv50_mstm
*mstm
= mstc
->mstm
;
2851 NV_ATOMIC(drm
, "%s: msto cleanup\n", msto
->encoder
.name
);
2852 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0 && !nv50_msto_payload(msto
))
2853 drm_dp_mst_deallocate_vcpi(&mstm
->mgr
, mstc
->port
);
2854 if (msto
->disabled
) {
2857 msto
->disabled
= false;
2862 nv50_msto_prepare(struct nv50_msto
*msto
)
2864 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2865 struct nv50_mstc
*mstc
= msto
->mstc
;
2866 struct nv50_mstm
*mstm
= mstc
->mstm
;
2868 struct nv50_disp_mthd_v1 base
;
2869 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi
;
2872 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI
,
2873 .base
.hasht
= mstm
->outp
->dcb
->hasht
,
2874 .base
.hashm
= (0xf0ff & mstm
->outp
->dcb
->hashm
) |
2875 (0x0100 << msto
->head
->base
.index
),
2878 NV_ATOMIC(drm
, "%s: msto prepare\n", msto
->encoder
.name
);
2879 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0) {
2880 struct drm_dp_payload
*payload
= nv50_msto_payload(msto
);
2882 args
.vcpi
.start_slot
= payload
->start_slot
;
2883 args
.vcpi
.num_slots
= payload
->num_slots
;
2884 args
.vcpi
.pbn
= mstc
->port
->vcpi
.pbn
;
2885 args
.vcpi
.aligned_pbn
= mstc
->port
->vcpi
.aligned_pbn
;
2889 NV_ATOMIC(drm
, "%s: %s: %02x %02x %04x %04x\n",
2890 msto
->encoder
.name
, msto
->head
->base
.base
.name
,
2891 args
.vcpi
.start_slot
, args
.vcpi
.num_slots
,
2892 args
.vcpi
.pbn
, args
.vcpi
.aligned_pbn
);
2893 nvif_mthd(&drm
->display
->disp
, 0, &args
, sizeof(args
));
2897 nv50_msto_atomic_check(struct drm_encoder
*encoder
,
2898 struct drm_crtc_state
*crtc_state
,
2899 struct drm_connector_state
*conn_state
)
2901 struct nv50_mstc
*mstc
= nv50_mstc(conn_state
->connector
);
2902 struct nv50_mstm
*mstm
= mstc
->mstm
;
2903 int bpp
= conn_state
->connector
->display_info
.bpc
* 3;
2906 mstc
->pbn
= drm_dp_calc_pbn_mode(crtc_state
->adjusted_mode
.clock
, bpp
);
2908 slots
= drm_dp_find_vcpi_slots(&mstm
->mgr
, mstc
->pbn
);
2912 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2917 nv50_msto_enable(struct drm_encoder
*encoder
)
2919 struct nv50_head
*head
= nv50_head(encoder
->crtc
);
2920 struct nv50_msto
*msto
= nv50_msto(encoder
);
2921 struct nv50_mstc
*mstc
= NULL
;
2922 struct nv50_mstm
*mstm
= NULL
;
2923 struct drm_connector
*connector
;
2924 struct drm_connector_list_iter conn_iter
;
2929 drm_connector_list_iter_begin(encoder
->dev
, &conn_iter
);
2930 drm_for_each_connector_iter(connector
, &conn_iter
) {
2931 if (connector
->state
->best_encoder
== &msto
->encoder
) {
2932 mstc
= nv50_mstc(connector
);
2937 drm_connector_list_iter_end(&conn_iter
);
2942 slots
= drm_dp_find_vcpi_slots(&mstm
->mgr
, mstc
->pbn
);
2943 r
= drm_dp_mst_allocate_vcpi(&mstm
->mgr
, mstc
->port
, mstc
->pbn
, slots
);
2947 nv50_outp_acquire(mstm
->outp
);
2949 if (mstm
->outp
->link
& 1)
2954 switch (mstc
->connector
.display_info
.bpc
) {
2955 case 6: depth
= 0x2; break;
2956 case 8: depth
= 0x5; break;
2958 default: depth
= 0x6; break;
2961 mstm
->outp
->update(mstm
->outp
, head
->base
.index
,
2962 &head
->base
.base
.state
->adjusted_mode
, proto
, depth
);
2966 mstm
->modified
= true;
2970 nv50_msto_disable(struct drm_encoder
*encoder
)
2972 struct nv50_msto
*msto
= nv50_msto(encoder
);
2973 struct nv50_mstc
*mstc
= msto
->mstc
;
2974 struct nv50_mstm
*mstm
= mstc
->mstm
;
2977 drm_dp_mst_reset_vcpi_slots(&mstm
->mgr
, mstc
->port
);
2979 mstm
->outp
->update(mstm
->outp
, msto
->head
->base
.index
, NULL
, 0, 0);
2980 mstm
->modified
= true;
2982 mstm
->disabled
= true;
2983 msto
->disabled
= true;
2986 static const struct drm_encoder_helper_funcs
2988 .disable
= nv50_msto_disable
,
2989 .enable
= nv50_msto_enable
,
2990 .atomic_check
= nv50_msto_atomic_check
,
2994 nv50_msto_destroy(struct drm_encoder
*encoder
)
2996 struct nv50_msto
*msto
= nv50_msto(encoder
);
2997 drm_encoder_cleanup(&msto
->encoder
);
3001 static const struct drm_encoder_funcs
3003 .destroy
= nv50_msto_destroy
,
3007 nv50_msto_new(struct drm_device
*dev
, u32 heads
, const char *name
, int id
,
3008 struct nv50_msto
**pmsto
)
3010 struct nv50_msto
*msto
;
3013 if (!(msto
= *pmsto
= kzalloc(sizeof(*msto
), GFP_KERNEL
)))
3016 ret
= drm_encoder_init(dev
, &msto
->encoder
, &nv50_msto
,
3017 DRM_MODE_ENCODER_DPMST
, "%s-mst-%d", name
, id
);
3024 drm_encoder_helper_add(&msto
->encoder
, &nv50_msto_help
);
3025 msto
->encoder
.possible_crtcs
= heads
;
3029 static struct drm_encoder
*
3030 nv50_mstc_atomic_best_encoder(struct drm_connector
*connector
,
3031 struct drm_connector_state
*connector_state
)
3033 struct nv50_head
*head
= nv50_head(connector_state
->crtc
);
3034 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3036 struct nv50_mstm
*mstm
= mstc
->mstm
;
3037 return &mstm
->msto
[head
->base
.index
]->encoder
;
3042 static struct drm_encoder
*
3043 nv50_mstc_best_encoder(struct drm_connector
*connector
)
3045 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3047 struct nv50_mstm
*mstm
= mstc
->mstm
;
3048 return &mstm
->msto
[0]->encoder
;
3053 static enum drm_mode_status
3054 nv50_mstc_mode_valid(struct drm_connector
*connector
,
3055 struct drm_display_mode
*mode
)
3061 nv50_mstc_get_modes(struct drm_connector
*connector
)
3063 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3066 mstc
->edid
= drm_dp_mst_get_edid(&mstc
->connector
, mstc
->port
->mgr
, mstc
->port
);
3067 drm_mode_connector_update_edid_property(&mstc
->connector
, mstc
->edid
);
3069 ret
= drm_add_edid_modes(&mstc
->connector
, mstc
->edid
);
3070 drm_edid_to_eld(&mstc
->connector
, mstc
->edid
);
3073 if (!mstc
->connector
.display_info
.bpc
)
3074 mstc
->connector
.display_info
.bpc
= 8;
3077 drm_mode_destroy(mstc
->connector
.dev
, mstc
->native
);
3078 mstc
->native
= nouveau_conn_native_mode(&mstc
->connector
);
3082 static const struct drm_connector_helper_funcs
3084 .get_modes
= nv50_mstc_get_modes
,
3085 .mode_valid
= nv50_mstc_mode_valid
,
3086 .best_encoder
= nv50_mstc_best_encoder
,
3087 .atomic_best_encoder
= nv50_mstc_atomic_best_encoder
,
3090 static enum drm_connector_status
3091 nv50_mstc_detect(struct drm_connector
*connector
, bool force
)
3093 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3095 return connector_status_disconnected
;
3096 return drm_dp_mst_detect_port(connector
, mstc
->port
->mgr
, mstc
->port
);
3100 nv50_mstc_destroy(struct drm_connector
*connector
)
3102 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3103 drm_connector_cleanup(&mstc
->connector
);
3107 static const struct drm_connector_funcs
3109 .dpms
= drm_atomic_helper_connector_dpms
,
3110 .reset
= nouveau_conn_reset
,
3111 .detect
= nv50_mstc_detect
,
3112 .fill_modes
= drm_helper_probe_single_connector_modes
,
3113 .set_property
= drm_atomic_helper_connector_set_property
,
3114 .destroy
= nv50_mstc_destroy
,
3115 .atomic_duplicate_state
= nouveau_conn_atomic_duplicate_state
,
3116 .atomic_destroy_state
= nouveau_conn_atomic_destroy_state
,
3117 .atomic_set_property
= nouveau_conn_atomic_set_property
,
3118 .atomic_get_property
= nouveau_conn_atomic_get_property
,
3122 nv50_mstc_new(struct nv50_mstm
*mstm
, struct drm_dp_mst_port
*port
,
3123 const char *path
, struct nv50_mstc
**pmstc
)
3125 struct drm_device
*dev
= mstm
->outp
->base
.base
.dev
;
3126 struct nv50_mstc
*mstc
;
3129 if (!(mstc
= *pmstc
= kzalloc(sizeof(*mstc
), GFP_KERNEL
)))
3134 ret
= drm_connector_init(dev
, &mstc
->connector
, &nv50_mstc
,
3135 DRM_MODE_CONNECTOR_DisplayPort
);
3142 drm_connector_helper_add(&mstc
->connector
, &nv50_mstc_help
);
3144 mstc
->connector
.funcs
->reset(&mstc
->connector
);
3145 nouveau_conn_attach_properties(&mstc
->connector
);
3147 for (i
= 0; i
< ARRAY_SIZE(mstm
->msto
) && mstm
->msto
; i
++)
3148 drm_mode_connector_attach_encoder(&mstc
->connector
, &mstm
->msto
[i
]->encoder
);
3150 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.path_property
, 0);
3151 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.tile_property
, 0);
3152 drm_mode_connector_set_path_property(&mstc
->connector
, path
);
3157 nv50_mstm_cleanup(struct nv50_mstm
*mstm
)
3159 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3160 struct drm_encoder
*encoder
;
3163 NV_ATOMIC(drm
, "%s: mstm cleanup\n", mstm
->outp
->base
.base
.name
);
3164 ret
= drm_dp_check_act_status(&mstm
->mgr
);
3166 ret
= drm_dp_update_payload_part2(&mstm
->mgr
);
3168 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3169 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3170 struct nv50_msto
*msto
= nv50_msto(encoder
);
3171 struct nv50_mstc
*mstc
= msto
->mstc
;
3172 if (mstc
&& mstc
->mstm
== mstm
)
3173 nv50_msto_cleanup(msto
);
3177 mstm
->modified
= false;
3181 nv50_mstm_prepare(struct nv50_mstm
*mstm
)
3183 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3184 struct drm_encoder
*encoder
;
3187 NV_ATOMIC(drm
, "%s: mstm prepare\n", mstm
->outp
->base
.base
.name
);
3188 ret
= drm_dp_update_payload_part1(&mstm
->mgr
);
3190 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3191 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3192 struct nv50_msto
*msto
= nv50_msto(encoder
);
3193 struct nv50_mstc
*mstc
= msto
->mstc
;
3194 if (mstc
&& mstc
->mstm
== mstm
)
3195 nv50_msto_prepare(msto
);
3199 if (mstm
->disabled
) {
3201 nv50_outp_release(mstm
->outp
);
3202 mstm
->disabled
= false;
3207 nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr
*mgr
)
3209 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3210 drm_kms_helper_hotplug_event(mstm
->outp
->base
.base
.dev
);
3214 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3215 struct drm_connector
*connector
)
3217 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3218 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3220 drm_connector_unregister(&mstc
->connector
);
3222 drm_modeset_lock_all(drm
->dev
);
3223 drm_fb_helper_remove_one_connector(&drm
->fbcon
->helper
, &mstc
->connector
);
3225 drm_modeset_unlock_all(drm
->dev
);
3227 drm_connector_unreference(&mstc
->connector
);
3231 nv50_mstm_register_connector(struct drm_connector
*connector
)
3233 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3235 drm_modeset_lock_all(drm
->dev
);
3236 drm_fb_helper_add_one_connector(&drm
->fbcon
->helper
, connector
);
3237 drm_modeset_unlock_all(drm
->dev
);
3239 drm_connector_register(connector
);
3242 static struct drm_connector
*
3243 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3244 struct drm_dp_mst_port
*port
, const char *path
)
3246 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3247 struct nv50_mstc
*mstc
;
3250 ret
= nv50_mstc_new(mstm
, port
, path
, &mstc
);
3253 mstc
->connector
.funcs
->destroy(&mstc
->connector
);
3257 return &mstc
->connector
;
3260 static const struct drm_dp_mst_topology_cbs
3262 .add_connector
= nv50_mstm_add_connector
,
3263 .register_connector
= nv50_mstm_register_connector
,
3264 .destroy_connector
= nv50_mstm_destroy_connector
,
3265 .hotplug
= nv50_mstm_hotplug
,
3269 nv50_mstm_service(struct nv50_mstm
*mstm
)
3271 struct drm_dp_aux
*aux
= mstm
->mgr
.aux
;
3272 bool handled
= true;
3277 ret
= drm_dp_dpcd_read(aux
, DP_SINK_COUNT_ESI
, esi
, 8);
3279 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3283 drm_dp_mst_hpd_irq(&mstm
->mgr
, esi
, &handled
);
3287 drm_dp_dpcd_write(aux
, DP_SINK_COUNT_ESI
+ 1, &esi
[1], 3);
3292 nv50_mstm_remove(struct nv50_mstm
*mstm
)
3295 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3299 nv50_mstm_enable(struct nv50_mstm
*mstm
, u8 dpcd
, int state
)
3301 struct nouveau_encoder
*outp
= mstm
->outp
;
3303 struct nv50_disp_mthd_v1 base
;
3304 struct nv50_disp_sor_dp_mst_link_v0 mst
;
3307 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_LINK
,
3308 .base
.hasht
= outp
->dcb
->hasht
,
3309 .base
.hashm
= outp
->dcb
->hashm
,
3312 struct nouveau_drm
*drm
= nouveau_drm(outp
->base
.base
.dev
);
3313 struct nvif_object
*disp
= &drm
->display
->disp
;
3317 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, &dpcd
);
3325 ret
= drm_dp_dpcd_writeb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, dpcd
);
3330 return nvif_mthd(disp
, 0, &args
, sizeof(args
));
3334 nv50_mstm_detect(struct nv50_mstm
*mstm
, u8 dpcd
[8], int allow
)
3341 if (dpcd
[0] >= 0x12) {
3342 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CAP
, &dpcd
[1]);
3346 if (!(dpcd
[1] & DP_MST_CAP
))
3352 ret
= nv50_mstm_enable(mstm
, dpcd
[0], state
);
3356 ret
= drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, state
);
3358 return nv50_mstm_enable(mstm
, dpcd
[0], 0);
3360 return mstm
->mgr
.mst_state
;
3364 nv50_mstm_fini(struct nv50_mstm
*mstm
)
3366 if (mstm
&& mstm
->mgr
.mst_state
)
3367 drm_dp_mst_topology_mgr_suspend(&mstm
->mgr
);
3371 nv50_mstm_init(struct nv50_mstm
*mstm
)
3373 if (mstm
&& mstm
->mgr
.mst_state
)
3374 drm_dp_mst_topology_mgr_resume(&mstm
->mgr
);
3378 nv50_mstm_del(struct nv50_mstm
**pmstm
)
3380 struct nv50_mstm
*mstm
= *pmstm
;
3388 nv50_mstm_new(struct nouveau_encoder
*outp
, struct drm_dp_aux
*aux
, int aux_max
,
3389 int conn_base_id
, struct nv50_mstm
**pmstm
)
3391 const int max_payloads
= hweight8(outp
->dcb
->heads
);
3392 struct drm_device
*dev
= outp
->base
.base
.dev
;
3393 struct nv50_mstm
*mstm
;
3397 /* This is a workaround for some monitors not functioning
3398 * correctly in MST mode on initial module load. I think
3399 * some bad interaction with the VBIOS may be responsible.
3401 * A good ol' off and on again seems to work here ;)
3403 ret
= drm_dp_dpcd_readb(aux
, DP_DPCD_REV
, &dpcd
);
3404 if (ret
>= 0 && dpcd
>= 0x12)
3405 drm_dp_dpcd_writeb(aux
, DP_MSTM_CTRL
, 0);
3407 if (!(mstm
= *pmstm
= kzalloc(sizeof(*mstm
), GFP_KERNEL
)))
3410 mstm
->mgr
.cbs
= &nv50_mstm
;
3412 ret
= drm_dp_mst_topology_mgr_init(&mstm
->mgr
, dev
, aux
, aux_max
,
3413 max_payloads
, conn_base_id
);
3417 for (i
= 0; i
< max_payloads
; i
++) {
3418 ret
= nv50_msto_new(dev
, outp
->dcb
->heads
, outp
->base
.base
.name
,
3427 /******************************************************************************
3429 *****************************************************************************/
3431 nv50_sor_update(struct nouveau_encoder
*nv_encoder
, u8 head
,
3432 struct drm_display_mode
*mode
, u8 proto
, u8 depth
)
3434 struct nv50_dmac
*core
= &nv50_mast(nv_encoder
->base
.base
.dev
)->base
;
3438 nv_encoder
->ctrl
&= ~BIT(head
);
3439 if (!(nv_encoder
->ctrl
& 0x0000000f))
3440 nv_encoder
->ctrl
= 0;
3442 nv_encoder
->ctrl
|= proto
<< 8;
3443 nv_encoder
->ctrl
|= BIT(head
);
3446 if ((push
= evo_wait(core
, 6))) {
3447 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
3449 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3450 nv_encoder
->ctrl
|= 0x00001000;
3451 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3452 nv_encoder
->ctrl
|= 0x00002000;
3453 nv_encoder
->ctrl
|= depth
<< 16;
3455 evo_mthd(push
, 0x0600 + (nv_encoder
->or * 0x40), 1);
3458 u32 magic
= 0x31ec6000 | (head
<< 25);
3459 u32 syncs
= 0x00000001;
3460 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3461 syncs
|= 0x00000008;
3462 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3463 syncs
|= 0x00000010;
3464 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
3465 magic
|= 0x00000001;
3467 evo_mthd(push
, 0x0404 + (head
* 0x300), 2);
3468 evo_data(push
, syncs
| (depth
<< 6));
3469 evo_data(push
, magic
);
3471 evo_mthd(push
, 0x0200 + (nv_encoder
->or * 0x20), 1);
3473 evo_data(push
, nv_encoder
->ctrl
);
3474 evo_kick(push
, core
);
3479 nv50_sor_disable(struct drm_encoder
*encoder
)
3481 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3482 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(nv_encoder
->crtc
);
3484 nv_encoder
->crtc
= NULL
;
3487 struct nvkm_i2c_aux
*aux
= nv_encoder
->aux
;
3491 int ret
= nvkm_rdaux(aux
, DP_SET_POWER
, &pwr
, 1);
3493 pwr
&= ~DP_SET_POWER_MASK
;
3494 pwr
|= DP_SET_POWER_D3
;
3495 nvkm_wraux(aux
, DP_SET_POWER
, &pwr
, 1);
3499 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, NULL
, 0, 0);
3500 nv50_audio_disable(encoder
, nv_crtc
);
3501 nv50_hdmi_disable(&nv_encoder
->base
.base
, nv_crtc
);
3502 nv50_outp_release(nv_encoder
);
3507 nv50_sor_enable(struct drm_encoder
*encoder
)
3509 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3510 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3511 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3513 struct nv50_disp_mthd_v1 base
;
3514 struct nv50_disp_sor_lvds_script_v0 lvds
;
3517 .base
.method
= NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT
,
3518 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3519 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3521 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3522 struct drm_device
*dev
= encoder
->dev
;
3523 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3524 struct nouveau_connector
*nv_connector
;
3525 struct nvbios
*bios
= &drm
->vbios
;
3529 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3530 nv_encoder
->crtc
= encoder
->crtc
;
3531 nv50_outp_acquire(nv_encoder
);
3533 switch (nv_encoder
->dcb
->type
) {
3534 case DCB_OUTPUT_TMDS
:
3535 if (nv_encoder
->link
& 1) {
3537 /* Only enable dual-link if:
3538 * - Need to (i.e. rate > 165MHz)
3540 * - Not an HDMI monitor, since there's no dual-link
3543 if (mode
->clock
>= 165000 &&
3544 nv_encoder
->dcb
->duallink_possible
&&
3545 !drm_detect_hdmi_monitor(nv_connector
->edid
))
3551 nv50_hdmi_enable(&nv_encoder
->base
.base
, mode
);
3553 case DCB_OUTPUT_LVDS
:
3556 if (bios
->fp_no_ddc
) {
3557 if (bios
->fp
.dual_link
)
3558 lvds
.lvds
.script
|= 0x0100;
3559 if (bios
->fp
.if_is_24bit
)
3560 lvds
.lvds
.script
|= 0x0200;
3562 if (nv_connector
->type
== DCB_CONNECTOR_LVDS_SPWG
) {
3563 if (((u8
*)nv_connector
->edid
)[121] == 2)
3564 lvds
.lvds
.script
|= 0x0100;
3566 if (mode
->clock
>= bios
->fp
.duallink_transition_clk
) {
3567 lvds
.lvds
.script
|= 0x0100;
3570 if (lvds
.lvds
.script
& 0x0100) {
3571 if (bios
->fp
.strapless_is_24bit
& 2)
3572 lvds
.lvds
.script
|= 0x0200;
3574 if (bios
->fp
.strapless_is_24bit
& 1)
3575 lvds
.lvds
.script
|= 0x0200;
3578 if (nv_connector
->base
.display_info
.bpc
== 8)
3579 lvds
.lvds
.script
|= 0x0200;
3582 nvif_mthd(disp
->disp
, 0, &lvds
, sizeof(lvds
));
3585 if (nv_connector
->base
.display_info
.bpc
== 6)
3588 if (nv_connector
->base
.display_info
.bpc
== 8)
3593 if (nv_encoder
->link
& 1)
3598 nv50_audio_enable(encoder
, mode
);
3605 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, mode
, proto
, depth
);
3608 static const struct drm_encoder_helper_funcs
3610 .atomic_check
= nv50_outp_atomic_check
,
3611 .enable
= nv50_sor_enable
,
3612 .disable
= nv50_sor_disable
,
3616 nv50_sor_destroy(struct drm_encoder
*encoder
)
3618 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3619 nv50_mstm_del(&nv_encoder
->dp
.mstm
);
3620 drm_encoder_cleanup(encoder
);
3624 static const struct drm_encoder_funcs
3626 .destroy
= nv50_sor_destroy
,
3630 nv50_sor_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3632 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
3633 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3634 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3635 struct nouveau_encoder
*nv_encoder
;
3636 struct drm_encoder
*encoder
;
3639 switch (dcbe
->type
) {
3640 case DCB_OUTPUT_LVDS
: type
= DRM_MODE_ENCODER_LVDS
; break;
3641 case DCB_OUTPUT_TMDS
:
3644 type
= DRM_MODE_ENCODER_TMDS
;
3648 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3651 nv_encoder
->dcb
= dcbe
;
3652 nv_encoder
->update
= nv50_sor_update
;
3654 encoder
= to_drm_encoder(nv_encoder
);
3655 encoder
->possible_crtcs
= dcbe
->heads
;
3656 encoder
->possible_clones
= 0;
3657 drm_encoder_init(connector
->dev
, encoder
, &nv50_sor_func
, type
,
3658 "sor-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3659 drm_encoder_helper_add(encoder
, &nv50_sor_help
);
3661 drm_mode_connector_attach_encoder(connector
, encoder
);
3663 if (dcbe
->type
== DCB_OUTPUT_DP
) {
3664 struct nvkm_i2c_aux
*aux
=
3665 nvkm_i2c_aux_find(i2c
, dcbe
->i2c_index
);
3667 nv_encoder
->i2c
= &nv_connector
->aux
.ddc
;
3668 nv_encoder
->aux
= aux
;
3671 /*TODO: Use DP Info Table to check for support. */
3672 if (nv50_disp(encoder
->dev
)->disp
->oclass
>= GF110_DISP
) {
3673 ret
= nv50_mstm_new(nv_encoder
, &nv_connector
->aux
, 16,
3674 nv_connector
->base
.base
.id
,
3675 &nv_encoder
->dp
.mstm
);
3680 struct nvkm_i2c_bus
*bus
=
3681 nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
3683 nv_encoder
->i2c
= &bus
->i2c
;
3689 /******************************************************************************
3691 *****************************************************************************/
3693 nv50_pior_atomic_check(struct drm_encoder
*encoder
,
3694 struct drm_crtc_state
*crtc_state
,
3695 struct drm_connector_state
*conn_state
)
3697 int ret
= nv50_outp_atomic_check(encoder
, crtc_state
, conn_state
);
3700 crtc_state
->adjusted_mode
.clock
*= 2;
3705 nv50_pior_disable(struct drm_encoder
*encoder
)
3707 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3708 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3709 const int or = nv_encoder
->or;
3712 if (nv_encoder
->crtc
) {
3713 push
= evo_wait(mast
, 4);
3715 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3716 evo_mthd(push
, 0x0700 + (or * 0x040), 1);
3717 evo_data(push
, 0x00000000);
3719 evo_kick(push
, mast
);
3723 nv_encoder
->crtc
= NULL
;
3724 nv50_outp_release(nv_encoder
);
3728 nv50_pior_enable(struct drm_encoder
*encoder
)
3730 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3731 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3732 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3733 struct nouveau_connector
*nv_connector
;
3734 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3735 u8 owner
= 1 << nv_crtc
->index
;
3739 nv50_outp_acquire(nv_encoder
);
3741 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3742 switch (nv_connector
->base
.display_info
.bpc
) {
3743 case 10: depth
= 0x6; break;
3744 case 8: depth
= 0x5; break;
3745 case 6: depth
= 0x2; break;
3746 default: depth
= 0x0; break;
3749 switch (nv_encoder
->dcb
->type
) {
3750 case DCB_OUTPUT_TMDS
:
3759 push
= evo_wait(mast
, 8);
3761 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3762 u32 ctrl
= (depth
<< 16) | (proto
<< 8) | owner
;
3763 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3765 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3767 evo_mthd(push
, 0x0700 + (nv_encoder
->or * 0x040), 1);
3768 evo_data(push
, ctrl
);
3771 evo_kick(push
, mast
);
3774 nv_encoder
->crtc
= encoder
->crtc
;
3777 static const struct drm_encoder_helper_funcs
3779 .atomic_check
= nv50_pior_atomic_check
,
3780 .enable
= nv50_pior_enable
,
3781 .disable
= nv50_pior_disable
,
3785 nv50_pior_destroy(struct drm_encoder
*encoder
)
3787 drm_encoder_cleanup(encoder
);
3791 static const struct drm_encoder_funcs
3793 .destroy
= nv50_pior_destroy
,
3797 nv50_pior_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3799 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
3800 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3801 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3802 struct nvkm_i2c_bus
*bus
= NULL
;
3803 struct nvkm_i2c_aux
*aux
= NULL
;
3804 struct i2c_adapter
*ddc
;
3805 struct nouveau_encoder
*nv_encoder
;
3806 struct drm_encoder
*encoder
;
3809 switch (dcbe
->type
) {
3810 case DCB_OUTPUT_TMDS
:
3811 bus
= nvkm_i2c_bus_find(i2c
, NVKM_I2C_BUS_EXT(dcbe
->extdev
));
3812 ddc
= bus
? &bus
->i2c
: NULL
;
3813 type
= DRM_MODE_ENCODER_TMDS
;
3816 aux
= nvkm_i2c_aux_find(i2c
, NVKM_I2C_AUX_EXT(dcbe
->extdev
));
3817 ddc
= aux
? &nv_connector
->aux
.ddc
: NULL
;
3818 type
= DRM_MODE_ENCODER_TMDS
;
3824 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3827 nv_encoder
->dcb
= dcbe
;
3828 nv_encoder
->i2c
= ddc
;
3829 nv_encoder
->aux
= aux
;
3831 encoder
= to_drm_encoder(nv_encoder
);
3832 encoder
->possible_crtcs
= dcbe
->heads
;
3833 encoder
->possible_clones
= 0;
3834 drm_encoder_init(connector
->dev
, encoder
, &nv50_pior_func
, type
,
3835 "pior-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3836 drm_encoder_helper_add(encoder
, &nv50_pior_help
);
3838 drm_mode_connector_attach_encoder(connector
, encoder
);
3842 /******************************************************************************
3844 *****************************************************************************/
3847 nv50_disp_atomic_commit_core(struct nouveau_drm
*drm
, u32 interlock
)
3849 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
3850 struct nv50_dmac
*core
= &disp
->mast
.base
;
3851 struct nv50_mstm
*mstm
;
3852 struct drm_encoder
*encoder
;
3855 NV_ATOMIC(drm
, "commit core %08x\n", interlock
);
3857 drm_for_each_encoder(encoder
, drm
->dev
) {
3858 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3859 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3860 if (mstm
&& mstm
->modified
)
3861 nv50_mstm_prepare(mstm
);
3865 if ((push
= evo_wait(core
, 5))) {
3866 evo_mthd(push
, 0x0084, 1);
3867 evo_data(push
, 0x80000000);
3868 evo_mthd(push
, 0x0080, 2);
3869 evo_data(push
, interlock
);
3870 evo_data(push
, 0x00000000);
3871 nouveau_bo_wr32(disp
->sync
, 0, 0x00000000);
3872 evo_kick(push
, core
);
3873 if (nvif_msec(&drm
->client
.device
, 2000ULL,
3874 if (nouveau_bo_rd32(disp
->sync
, 0))
3878 NV_ERROR(drm
, "EVO timeout\n");
3881 drm_for_each_encoder(encoder
, drm
->dev
) {
3882 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3883 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3884 if (mstm
&& mstm
->modified
)
3885 nv50_mstm_cleanup(mstm
);
3891 nv50_disp_atomic_commit_tail(struct drm_atomic_state
*state
)
3893 struct drm_device
*dev
= state
->dev
;
3894 struct drm_crtc_state
*new_crtc_state
;
3895 struct drm_crtc
*crtc
;
3896 struct drm_plane_state
*new_plane_state
;
3897 struct drm_plane
*plane
;
3898 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3899 struct nv50_disp
*disp
= nv50_disp(dev
);
3900 struct nv50_atom
*atom
= nv50_atom(state
);
3901 struct nv50_outp_atom
*outp
, *outt
;
3902 u32 interlock_core
= 0;
3903 u32 interlock_chan
= 0;
3906 NV_ATOMIC(drm
, "commit %d %d\n", atom
->lock_core
, atom
->flush_disable
);
3907 drm_atomic_helper_wait_for_fences(dev
, state
, false);
3908 drm_atomic_helper_wait_for_dependencies(state
);
3909 drm_atomic_helper_update_legacy_modeset_state(dev
, state
);
3911 if (atom
->lock_core
)
3912 mutex_lock(&disp
->mutex
);
3914 /* Disable head(s). */
3915 for_each_new_crtc_in_state(state
, crtc
, new_crtc_state
, i
) {
3916 struct nv50_head_atom
*asyh
= nv50_head_atom(new_crtc_state
);
3917 struct nv50_head
*head
= nv50_head(crtc
);
3919 NV_ATOMIC(drm
, "%s: clr %04x (set %04x)\n", crtc
->name
,
3920 asyh
->clr
.mask
, asyh
->set
.mask
);
3922 if (asyh
->clr
.mask
) {
3923 nv50_head_flush_clr(head
, asyh
, atom
->flush_disable
);
3924 interlock_core
|= 1;
3928 /* Disable plane(s). */
3929 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
3930 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(new_plane_state
);
3931 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
3933 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", plane
->name
,
3934 asyw
->clr
.mask
, asyw
->set
.mask
);
3935 if (!asyw
->clr
.mask
)
3938 interlock_chan
|= nv50_wndw_flush_clr(wndw
, interlock_core
,
3939 atom
->flush_disable
,
3943 /* Disable output path(s). */
3944 list_for_each_entry(outp
, &atom
->outp
, head
) {
3945 const struct drm_encoder_helper_funcs
*help
;
3946 struct drm_encoder
*encoder
;
3948 encoder
= outp
->encoder
;
3949 help
= encoder
->helper_private
;
3951 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", encoder
->name
,
3952 outp
->clr
.mask
, outp
->set
.mask
);
3954 if (outp
->clr
.mask
) {
3955 help
->disable(encoder
);
3956 interlock_core
|= 1;
3957 if (outp
->flush_disable
) {
3958 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
3965 /* Flush disable. */
3966 if (interlock_core
) {
3967 if (atom
->flush_disable
) {
3968 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
3974 /* Update output path(s). */
3975 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
3976 const struct drm_encoder_helper_funcs
*help
;
3977 struct drm_encoder
*encoder
;
3979 encoder
= outp
->encoder
;
3980 help
= encoder
->helper_private
;
3982 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", encoder
->name
,
3983 outp
->set
.mask
, outp
->clr
.mask
);
3985 if (outp
->set
.mask
) {
3986 help
->enable(encoder
);
3990 list_del(&outp
->head
);
3994 /* Update head(s). */
3995 for_each_new_crtc_in_state(state
, crtc
, new_crtc_state
, i
) {
3996 struct nv50_head_atom
*asyh
= nv50_head_atom(new_crtc_state
);
3997 struct nv50_head
*head
= nv50_head(crtc
);
3999 NV_ATOMIC(drm
, "%s: set %04x (clr %04x)\n", crtc
->name
,
4000 asyh
->set
.mask
, asyh
->clr
.mask
);
4002 if (asyh
->set
.mask
) {
4003 nv50_head_flush_set(head
, asyh
);
4008 for_each_new_crtc_in_state(state
, crtc
, new_crtc_state
, i
) {
4009 if (new_crtc_state
->event
)
4010 drm_crtc_vblank_get(crtc
);
4013 /* Update plane(s). */
4014 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
4015 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(new_plane_state
);
4016 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4018 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", plane
->name
,
4019 asyw
->set
.mask
, asyw
->clr
.mask
);
4020 if ( !asyw
->set
.mask
&&
4021 (!asyw
->clr
.mask
|| atom
->flush_disable
))
4024 interlock_chan
|= nv50_wndw_flush_set(wndw
, interlock_core
, asyw
);
4028 if (interlock_core
) {
4029 if (!interlock_chan
&& atom
->state
.legacy_cursor_update
) {
4030 u32
*push
= evo_wait(&disp
->mast
, 2);
4032 evo_mthd(push
, 0x0080, 1);
4033 evo_data(push
, 0x00000000);
4034 evo_kick(push
, &disp
->mast
);
4037 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
4041 if (atom
->lock_core
)
4042 mutex_unlock(&disp
->mutex
);
4044 /* Wait for HW to signal completion. */
4045 for_each_new_plane_in_state(state
, plane
, new_plane_state
, i
) {
4046 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(new_plane_state
);
4047 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4048 int ret
= nv50_wndw_wait_armed(wndw
, asyw
);
4050 NV_ERROR(drm
, "%s: timeout\n", plane
->name
);
4053 for_each_new_crtc_in_state(state
, crtc
, new_crtc_state
, i
) {
4054 if (new_crtc_state
->event
) {
4055 unsigned long flags
;
4056 /* Get correct count/ts if racing with vblank irq */
4057 drm_crtc_accurate_vblank_count(crtc
);
4058 spin_lock_irqsave(&crtc
->dev
->event_lock
, flags
);
4059 drm_crtc_send_vblank_event(crtc
, new_crtc_state
->event
);
4060 spin_unlock_irqrestore(&crtc
->dev
->event_lock
, flags
);
4061 new_crtc_state
->event
= NULL
;
4062 drm_crtc_vblank_put(crtc
);
4066 drm_atomic_helper_commit_hw_done(state
);
4067 drm_atomic_helper_cleanup_planes(dev
, state
);
4068 drm_atomic_helper_commit_cleanup_done(state
);
4069 drm_atomic_state_put(state
);
4073 nv50_disp_atomic_commit_work(struct work_struct
*work
)
4075 struct drm_atomic_state
*state
=
4076 container_of(work
, typeof(*state
), commit_work
);
4077 nv50_disp_atomic_commit_tail(state
);
4081 nv50_disp_atomic_commit(struct drm_device
*dev
,
4082 struct drm_atomic_state
*state
, bool nonblock
)
4084 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4085 struct nv50_disp
*disp
= nv50_disp(dev
);
4086 struct drm_plane_state
*old_plane_state
;
4087 struct drm_plane
*plane
;
4088 struct drm_crtc
*crtc
;
4089 bool active
= false;
4092 ret
= pm_runtime_get_sync(dev
->dev
);
4093 if (ret
< 0 && ret
!= -EACCES
)
4096 ret
= drm_atomic_helper_setup_commit(state
, nonblock
);
4100 INIT_WORK(&state
->commit_work
, nv50_disp_atomic_commit_work
);
4102 ret
= drm_atomic_helper_prepare_planes(dev
, state
);
4107 ret
= drm_atomic_helper_wait_for_fences(dev
, state
, true);
4112 ret
= drm_atomic_helper_swap_state(state
, true);
4116 for_each_old_plane_in_state(state
, plane
, old_plane_state
, i
) {
4117 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(old_plane_state
);
4118 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4120 if (asyw
->set
.image
) {
4121 asyw
->ntfy
.handle
= wndw
->dmac
->sync
.handle
;
4122 asyw
->ntfy
.offset
= wndw
->ntfy
;
4123 asyw
->ntfy
.awaken
= false;
4124 asyw
->set
.ntfy
= true;
4125 nouveau_bo_wr32(disp
->sync
, wndw
->ntfy
/ 4, 0x00000000);
4130 drm_atomic_state_get(state
);
4133 queue_work(system_unbound_wq
, &state
->commit_work
);
4135 nv50_disp_atomic_commit_tail(state
);
4137 drm_for_each_crtc(crtc
, dev
) {
4138 if (crtc
->state
->enable
) {
4139 if (!drm
->have_disp_power_ref
) {
4140 drm
->have_disp_power_ref
= true;
4148 if (!active
&& drm
->have_disp_power_ref
) {
4149 pm_runtime_put_autosuspend(dev
->dev
);
4150 drm
->have_disp_power_ref
= false;
4155 drm_atomic_helper_cleanup_planes(dev
, state
);
4157 pm_runtime_put_autosuspend(dev
->dev
);
4161 static struct nv50_outp_atom
*
4162 nv50_disp_outp_atomic_add(struct nv50_atom
*atom
, struct drm_encoder
*encoder
)
4164 struct nv50_outp_atom
*outp
;
4166 list_for_each_entry(outp
, &atom
->outp
, head
) {
4167 if (outp
->encoder
== encoder
)
4171 outp
= kzalloc(sizeof(*outp
), GFP_KERNEL
);
4173 return ERR_PTR(-ENOMEM
);
4175 list_add(&outp
->head
, &atom
->outp
);
4176 outp
->encoder
= encoder
;
4181 nv50_disp_outp_atomic_check_clr(struct nv50_atom
*atom
,
4182 struct drm_connector_state
*old_connector_state
)
4184 struct drm_encoder
*encoder
= old_connector_state
->best_encoder
;
4185 struct drm_crtc_state
*old_crtc_state
, *new_crtc_state
;
4186 struct drm_crtc
*crtc
;
4187 struct nv50_outp_atom
*outp
;
4189 if (!(crtc
= old_connector_state
->crtc
))
4192 old_crtc_state
= drm_atomic_get_old_crtc_state(&atom
->state
, crtc
);
4193 new_crtc_state
= drm_atomic_get_new_crtc_state(&atom
->state
, crtc
);
4194 if (old_crtc_state
->active
&& drm_atomic_crtc_needs_modeset(new_crtc_state
)) {
4195 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4197 return PTR_ERR(outp
);
4199 if (outp
->encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
4200 outp
->flush_disable
= true;
4201 atom
->flush_disable
= true;
4203 outp
->clr
.ctrl
= true;
4204 atom
->lock_core
= true;
4211 nv50_disp_outp_atomic_check_set(struct nv50_atom
*atom
,
4212 struct drm_connector_state
*connector_state
)
4214 struct drm_encoder
*encoder
= connector_state
->best_encoder
;
4215 struct drm_crtc_state
*new_crtc_state
;
4216 struct drm_crtc
*crtc
;
4217 struct nv50_outp_atom
*outp
;
4219 if (!(crtc
= connector_state
->crtc
))
4222 new_crtc_state
= drm_atomic_get_new_crtc_state(&atom
->state
, crtc
);
4223 if (new_crtc_state
->active
&& drm_atomic_crtc_needs_modeset(new_crtc_state
)) {
4224 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4226 return PTR_ERR(outp
);
4228 outp
->set
.ctrl
= true;
4229 atom
->lock_core
= true;
4236 nv50_disp_atomic_check(struct drm_device
*dev
, struct drm_atomic_state
*state
)
4238 struct nv50_atom
*atom
= nv50_atom(state
);
4239 struct drm_connector_state
*old_connector_state
, *new_connector_state
;
4240 struct drm_connector
*connector
;
4243 ret
= drm_atomic_helper_check(dev
, state
);
4247 for_each_oldnew_connector_in_state(state
, connector
, old_connector_state
, new_connector_state
, i
) {
4248 ret
= nv50_disp_outp_atomic_check_clr(atom
, old_connector_state
);
4252 ret
= nv50_disp_outp_atomic_check_set(atom
, new_connector_state
);
4261 nv50_disp_atomic_state_clear(struct drm_atomic_state
*state
)
4263 struct nv50_atom
*atom
= nv50_atom(state
);
4264 struct nv50_outp_atom
*outp
, *outt
;
4266 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
4267 list_del(&outp
->head
);
4271 drm_atomic_state_default_clear(state
);
4275 nv50_disp_atomic_state_free(struct drm_atomic_state
*state
)
4277 struct nv50_atom
*atom
= nv50_atom(state
);
4278 drm_atomic_state_default_release(&atom
->state
);
4282 static struct drm_atomic_state
*
4283 nv50_disp_atomic_state_alloc(struct drm_device
*dev
)
4285 struct nv50_atom
*atom
;
4286 if (!(atom
= kzalloc(sizeof(*atom
), GFP_KERNEL
)) ||
4287 drm_atomic_state_init(dev
, &atom
->state
) < 0) {
4291 INIT_LIST_HEAD(&atom
->outp
);
4292 return &atom
->state
;
4295 static const struct drm_mode_config_funcs
4297 .fb_create
= nouveau_user_framebuffer_create
,
4298 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
4299 .atomic_check
= nv50_disp_atomic_check
,
4300 .atomic_commit
= nv50_disp_atomic_commit
,
4301 .atomic_state_alloc
= nv50_disp_atomic_state_alloc
,
4302 .atomic_state_clear
= nv50_disp_atomic_state_clear
,
4303 .atomic_state_free
= nv50_disp_atomic_state_free
,
4306 /******************************************************************************
4308 *****************************************************************************/
4311 nv50_display_fini(struct drm_device
*dev
)
4313 struct nouveau_encoder
*nv_encoder
;
4314 struct drm_encoder
*encoder
;
4315 struct drm_plane
*plane
;
4317 drm_for_each_plane(plane
, dev
) {
4318 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4319 if (plane
->funcs
!= &nv50_wndw
)
4321 nv50_wndw_fini(wndw
);
4324 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4325 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4326 nv_encoder
= nouveau_encoder(encoder
);
4327 nv50_mstm_fini(nv_encoder
->dp
.mstm
);
4333 nv50_display_init(struct drm_device
*dev
)
4335 struct drm_encoder
*encoder
;
4336 struct drm_plane
*plane
;
4337 struct drm_crtc
*crtc
;
4340 push
= evo_wait(nv50_mast(dev
), 32);
4344 evo_mthd(push
, 0x0088, 1);
4345 evo_data(push
, nv50_mast(dev
)->base
.sync
.handle
);
4346 evo_kick(push
, nv50_mast(dev
));
4348 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4349 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4350 struct nouveau_encoder
*nv_encoder
=
4351 nouveau_encoder(encoder
);
4352 nv50_mstm_init(nv_encoder
->dp
.mstm
);
4356 drm_for_each_crtc(crtc
, dev
) {
4357 nv50_head_lut_load(crtc
);
4360 drm_for_each_plane(plane
, dev
) {
4361 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4362 if (plane
->funcs
!= &nv50_wndw
)
4364 nv50_wndw_init(wndw
);
4371 nv50_display_destroy(struct drm_device
*dev
)
4373 struct nv50_disp
*disp
= nv50_disp(dev
);
4375 nv50_dmac_destroy(&disp
->mast
.base
, disp
->disp
);
4377 nouveau_bo_unmap(disp
->sync
);
4379 nouveau_bo_unpin(disp
->sync
);
4380 nouveau_bo_ref(NULL
, &disp
->sync
);
4382 nouveau_display(dev
)->priv
= NULL
;
4386 MODULE_PARM_DESC(atomic
, "Expose atomic ioctl (default: disabled)");
4387 static int nouveau_atomic
= 0;
4388 module_param_named(atomic
, nouveau_atomic
, int, 0400);
4391 nv50_display_create(struct drm_device
*dev
)
4393 struct nvif_device
*device
= &nouveau_drm(dev
)->client
.device
;
4394 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4395 struct dcb_table
*dcb
= &drm
->vbios
.dcb
;
4396 struct drm_connector
*connector
, *tmp
;
4397 struct nv50_disp
*disp
;
4398 struct dcb_output
*dcbe
;
4401 disp
= kzalloc(sizeof(*disp
), GFP_KERNEL
);
4405 mutex_init(&disp
->mutex
);
4407 nouveau_display(dev
)->priv
= disp
;
4408 nouveau_display(dev
)->dtor
= nv50_display_destroy
;
4409 nouveau_display(dev
)->init
= nv50_display_init
;
4410 nouveau_display(dev
)->fini
= nv50_display_fini
;
4411 disp
->disp
= &nouveau_display(dev
)->disp
;
4412 dev
->mode_config
.funcs
= &nv50_disp_func
;
4414 dev
->driver
->driver_features
|= DRIVER_ATOMIC
;
4416 /* small shared memory area we use for notifiers and semaphores */
4417 ret
= nouveau_bo_new(&drm
->client
, 4096, 0x1000, TTM_PL_FLAG_VRAM
,
4418 0, 0x0000, NULL
, NULL
, &disp
->sync
);
4420 ret
= nouveau_bo_pin(disp
->sync
, TTM_PL_FLAG_VRAM
, true);
4422 ret
= nouveau_bo_map(disp
->sync
);
4424 nouveau_bo_unpin(disp
->sync
);
4427 nouveau_bo_ref(NULL
, &disp
->sync
);
4433 /* allocate master evo channel */
4434 ret
= nv50_core_create(device
, disp
->disp
, disp
->sync
->bo
.offset
,
4439 /* create crtc objects to represent the hw heads */
4440 if (disp
->disp
->oclass
>= GF110_DISP
)
4441 crtcs
= nvif_rd32(&device
->object
, 0x022448);
4445 for (i
= 0; i
< crtcs
; i
++) {
4446 ret
= nv50_head_create(dev
, i
);
4451 /* create encoder/connector objects based on VBIOS DCB table */
4452 for (i
= 0, dcbe
= &dcb
->entry
[0]; i
< dcb
->entries
; i
++, dcbe
++) {
4453 connector
= nouveau_connector_create(dev
, dcbe
->connector
);
4454 if (IS_ERR(connector
))
4457 if (dcbe
->location
== DCB_LOC_ON_CHIP
) {
4458 switch (dcbe
->type
) {
4459 case DCB_OUTPUT_TMDS
:
4460 case DCB_OUTPUT_LVDS
:
4462 ret
= nv50_sor_create(connector
, dcbe
);
4464 case DCB_OUTPUT_ANALOG
:
4465 ret
= nv50_dac_create(connector
, dcbe
);
4472 ret
= nv50_pior_create(connector
, dcbe
);
4476 NV_WARN(drm
, "failed to create encoder %d/%d/%d: %d\n",
4477 dcbe
->location
, dcbe
->type
,
4478 ffs(dcbe
->or) - 1, ret
);
4483 /* cull any connectors we created that don't have an encoder */
4484 list_for_each_entry_safe(connector
, tmp
, &dev
->mode_config
.connector_list
, head
) {
4485 if (connector
->encoder_ids
[0])
4488 NV_WARN(drm
, "%s has no encoders, removing\n",
4490 connector
->funcs
->destroy(connector
);
4495 nv50_display_destroy(dev
);