2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
28 #include <drm/drm_atomic.h>
29 #include <drm/drm_atomic_helper.h>
30 #include <drm/drm_crtc_helper.h>
31 #include <drm/drm_dp_helper.h>
32 #include <drm/drm_fb_helper.h>
33 #include <drm/drm_plane_helper.h>
35 #include <nvif/class.h>
36 #include <nvif/cl0002.h>
37 #include <nvif/cl5070.h>
38 #include <nvif/cl507a.h>
39 #include <nvif/cl507b.h>
40 #include <nvif/cl507c.h>
41 #include <nvif/cl507d.h>
42 #include <nvif/cl507e.h>
43 #include <nvif/event.h>
45 #include "nouveau_drv.h"
46 #include "nouveau_dma.h"
47 #include "nouveau_gem.h"
48 #include "nouveau_connector.h"
49 #include "nouveau_encoder.h"
50 #include "nouveau_crtc.h"
51 #include "nouveau_fence.h"
52 #include "nouveau_fbcon.h"
53 #include "nv50_display.h"
57 #define EVO_MASTER (0x00)
58 #define EVO_FLIP(c) (0x01 + (c))
59 #define EVO_OVLY(c) (0x05 + (c))
60 #define EVO_OIMM(c) (0x09 + (c))
61 #define EVO_CURS(c) (0x0d + (c))
63 /* offsets in shared sync bo of various structures */
64 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
65 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
66 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
67 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
68 #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
69 #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
71 /******************************************************************************
73 *****************************************************************************/
74 #define nv50_atom(p) container_of((p), struct nv50_atom, state)
77 struct drm_atomic_state state
;
79 struct list_head outp
;
84 struct nv50_outp_atom
{
85 struct list_head head
;
87 struct drm_encoder
*encoder
;
105 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
107 struct nv50_head_atom
{
108 struct drm_crtc_state state
;
117 struct nv50_head_mode
{
214 static inline struct nv50_head_atom
*
215 nv50_head_atom_get(struct drm_atomic_state
*state
, struct drm_crtc
*crtc
)
217 struct drm_crtc_state
*statec
= drm_atomic_get_crtc_state(state
, crtc
);
219 return (void *)statec
;
220 return nv50_head_atom(statec
);
223 #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
225 struct nv50_wndw_atom
{
226 struct drm_plane_state state
;
229 struct drm_rect clip
;
290 /******************************************************************************
292 *****************************************************************************/
295 struct nvif_object user
;
296 struct nvif_device
*device
;
300 nv50_chan_create(struct nvif_device
*device
, struct nvif_object
*disp
,
301 const s32
*oclass
, u8 head
, void *data
, u32 size
,
302 struct nv50_chan
*chan
)
304 struct nvif_sclass
*sclass
;
307 chan
->device
= device
;
309 ret
= n
= nvif_object_sclass_get(disp
, &sclass
);
314 for (i
= 0; i
< n
; i
++) {
315 if (sclass
[i
].oclass
== oclass
[0]) {
316 ret
= nvif_object_init(disp
, 0, oclass
[0],
317 data
, size
, &chan
->user
);
319 nvif_object_map(&chan
->user
);
320 nvif_object_sclass_put(&sclass
);
327 nvif_object_sclass_put(&sclass
);
332 nv50_chan_destroy(struct nv50_chan
*chan
)
334 nvif_object_fini(&chan
->user
);
337 /******************************************************************************
339 *****************************************************************************/
342 struct nv50_chan base
;
346 nv50_pioc_destroy(struct nv50_pioc
*pioc
)
348 nv50_chan_destroy(&pioc
->base
);
352 nv50_pioc_create(struct nvif_device
*device
, struct nvif_object
*disp
,
353 const s32
*oclass
, u8 head
, void *data
, u32 size
,
354 struct nv50_pioc
*pioc
)
356 return nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
360 /******************************************************************************
362 *****************************************************************************/
365 struct nv50_pioc base
;
369 nv50_oimm_create(struct nvif_device
*device
, struct nvif_object
*disp
,
370 int head
, struct nv50_oimm
*oimm
)
372 struct nv50_disp_cursor_v0 args
= {
375 static const s32 oclass
[] = {
384 return nv50_pioc_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
388 /******************************************************************************
390 *****************************************************************************/
392 struct nv50_dmac_ctxdma
{
393 struct list_head head
;
394 struct nvif_object object
;
398 struct nv50_chan base
;
402 struct nvif_object sync
;
403 struct nvif_object vram
;
404 struct list_head ctxdma
;
406 /* Protects against concurrent pushbuf access to this channel, lock is
407 * grabbed by evo_wait (if the pushbuf reservation is successful) and
408 * dropped again by evo_kick. */
413 nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma
*ctxdma
)
415 nvif_object_fini(&ctxdma
->object
);
416 list_del(&ctxdma
->head
);
420 static struct nv50_dmac_ctxdma
*
421 nv50_dmac_ctxdma_new(struct nv50_dmac
*dmac
, struct nouveau_framebuffer
*fb
)
423 struct nouveau_drm
*drm
= nouveau_drm(fb
->base
.dev
);
424 struct nv50_dmac_ctxdma
*ctxdma
;
425 const u8 kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
426 const u32 handle
= 0xfb000000 | kind
;
428 struct nv_dma_v0 base
;
430 struct nv50_dma_v0 nv50
;
431 struct gf100_dma_v0 gf100
;
432 struct gf119_dma_v0 gf119
;
435 u32 argc
= sizeof(args
.base
);
438 list_for_each_entry(ctxdma
, &dmac
->ctxdma
, head
) {
439 if (ctxdma
->object
.handle
== handle
)
443 if (!(ctxdma
= kzalloc(sizeof(*ctxdma
), GFP_KERNEL
)))
444 return ERR_PTR(-ENOMEM
);
445 list_add(&ctxdma
->head
, &dmac
->ctxdma
);
447 args
.base
.target
= NV_DMA_V0_TARGET_VRAM
;
448 args
.base
.access
= NV_DMA_V0_ACCESS_RDWR
;
450 args
.base
.limit
= drm
->client
.device
.info
.ram_user
- 1;
452 if (drm
->client
.device
.info
.chipset
< 0x80) {
453 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
454 argc
+= sizeof(args
.nv50
);
456 if (drm
->client
.device
.info
.chipset
< 0xc0) {
457 args
.nv50
.part
= NV50_DMA_V0_PART_256
;
458 args
.nv50
.kind
= kind
;
459 argc
+= sizeof(args
.nv50
);
461 if (drm
->client
.device
.info
.chipset
< 0xd0) {
462 args
.gf100
.kind
= kind
;
463 argc
+= sizeof(args
.gf100
);
465 args
.gf119
.page
= GF119_DMA_V0_PAGE_LP
;
466 args
.gf119
.kind
= kind
;
467 argc
+= sizeof(args
.gf119
);
470 ret
= nvif_object_init(&dmac
->base
.user
, handle
, NV_DMA_IN_MEMORY
,
471 &args
, argc
, &ctxdma
->object
);
473 nv50_dmac_ctxdma_del(ctxdma
);
481 nv50_dmac_destroy(struct nv50_dmac
*dmac
, struct nvif_object
*disp
)
483 struct nvif_device
*device
= dmac
->base
.device
;
484 struct nv50_dmac_ctxdma
*ctxdma
, *ctxtmp
;
486 list_for_each_entry_safe(ctxdma
, ctxtmp
, &dmac
->ctxdma
, head
) {
487 nv50_dmac_ctxdma_del(ctxdma
);
490 nvif_object_fini(&dmac
->vram
);
491 nvif_object_fini(&dmac
->sync
);
493 nv50_chan_destroy(&dmac
->base
);
496 struct device
*dev
= nvxx_device(device
)->dev
;
497 dma_free_coherent(dev
, PAGE_SIZE
, dmac
->ptr
, dmac
->handle
);
502 nv50_dmac_create(struct nvif_device
*device
, struct nvif_object
*disp
,
503 const s32
*oclass
, u8 head
, void *data
, u32 size
, u64 syncbuf
,
504 struct nv50_dmac
*dmac
)
506 struct nv50_disp_core_channel_dma_v0
*args
= data
;
507 struct nvif_object pushbuf
;
510 mutex_init(&dmac
->lock
);
512 dmac
->ptr
= dma_alloc_coherent(nvxx_device(device
)->dev
, PAGE_SIZE
,
513 &dmac
->handle
, GFP_KERNEL
);
517 ret
= nvif_object_init(&device
->object
, 0, NV_DMA_FROM_MEMORY
,
518 &(struct nv_dma_v0
) {
519 .target
= NV_DMA_V0_TARGET_PCI_US
,
520 .access
= NV_DMA_V0_ACCESS_RD
,
521 .start
= dmac
->handle
+ 0x0000,
522 .limit
= dmac
->handle
+ 0x0fff,
523 }, sizeof(struct nv_dma_v0
), &pushbuf
);
527 args
->pushbuf
= nvif_handle(&pushbuf
);
529 ret
= nv50_chan_create(device
, disp
, oclass
, head
, data
, size
,
531 nvif_object_fini(&pushbuf
);
535 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000000, NV_DMA_IN_MEMORY
,
536 &(struct nv_dma_v0
) {
537 .target
= NV_DMA_V0_TARGET_VRAM
,
538 .access
= NV_DMA_V0_ACCESS_RDWR
,
539 .start
= syncbuf
+ 0x0000,
540 .limit
= syncbuf
+ 0x0fff,
541 }, sizeof(struct nv_dma_v0
),
546 ret
= nvif_object_init(&dmac
->base
.user
, 0xf0000001, NV_DMA_IN_MEMORY
,
547 &(struct nv_dma_v0
) {
548 .target
= NV_DMA_V0_TARGET_VRAM
,
549 .access
= NV_DMA_V0_ACCESS_RDWR
,
551 .limit
= device
->info
.ram_user
- 1,
552 }, sizeof(struct nv_dma_v0
),
557 INIT_LIST_HEAD(&dmac
->ctxdma
);
561 /******************************************************************************
563 *****************************************************************************/
566 struct nv50_dmac base
;
570 nv50_core_create(struct nvif_device
*device
, struct nvif_object
*disp
,
571 u64 syncbuf
, struct nv50_mast
*core
)
573 struct nv50_disp_core_channel_dma_v0 args
= {
574 .pushbuf
= 0xb0007d00,
576 static const s32 oclass
[] = {
577 GP102_DISP_CORE_CHANNEL_DMA
,
578 GP100_DISP_CORE_CHANNEL_DMA
,
579 GM200_DISP_CORE_CHANNEL_DMA
,
580 GM107_DISP_CORE_CHANNEL_DMA
,
581 GK110_DISP_CORE_CHANNEL_DMA
,
582 GK104_DISP_CORE_CHANNEL_DMA
,
583 GF110_DISP_CORE_CHANNEL_DMA
,
584 GT214_DISP_CORE_CHANNEL_DMA
,
585 GT206_DISP_CORE_CHANNEL_DMA
,
586 GT200_DISP_CORE_CHANNEL_DMA
,
587 G82_DISP_CORE_CHANNEL_DMA
,
588 NV50_DISP_CORE_CHANNEL_DMA
,
592 return nv50_dmac_create(device
, disp
, oclass
, 0, &args
, sizeof(args
),
593 syncbuf
, &core
->base
);
596 /******************************************************************************
598 *****************************************************************************/
601 struct nv50_dmac base
;
607 nv50_base_create(struct nvif_device
*device
, struct nvif_object
*disp
,
608 int head
, u64 syncbuf
, struct nv50_sync
*base
)
610 struct nv50_disp_base_channel_dma_v0 args
= {
611 .pushbuf
= 0xb0007c00 | head
,
614 static const s32 oclass
[] = {
615 GK110_DISP_BASE_CHANNEL_DMA
,
616 GK104_DISP_BASE_CHANNEL_DMA
,
617 GF110_DISP_BASE_CHANNEL_DMA
,
618 GT214_DISP_BASE_CHANNEL_DMA
,
619 GT200_DISP_BASE_CHANNEL_DMA
,
620 G82_DISP_BASE_CHANNEL_DMA
,
621 NV50_DISP_BASE_CHANNEL_DMA
,
625 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
626 syncbuf
, &base
->base
);
629 /******************************************************************************
631 *****************************************************************************/
634 struct nv50_dmac base
;
638 nv50_ovly_create(struct nvif_device
*device
, struct nvif_object
*disp
,
639 int head
, u64 syncbuf
, struct nv50_ovly
*ovly
)
641 struct nv50_disp_overlay_channel_dma_v0 args
= {
642 .pushbuf
= 0xb0007e00 | head
,
645 static const s32 oclass
[] = {
646 GK104_DISP_OVERLAY_CONTROL_DMA
,
647 GF110_DISP_OVERLAY_CONTROL_DMA
,
648 GT214_DISP_OVERLAY_CHANNEL_DMA
,
649 GT200_DISP_OVERLAY_CHANNEL_DMA
,
650 G82_DISP_OVERLAY_CHANNEL_DMA
,
651 NV50_DISP_OVERLAY_CHANNEL_DMA
,
655 return nv50_dmac_create(device
, disp
, oclass
, head
, &args
, sizeof(args
),
656 syncbuf
, &ovly
->base
);
660 struct nouveau_crtc base
;
661 struct nv50_ovly ovly
;
662 struct nv50_oimm oimm
;
665 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
666 #define nv50_ovly(c) (&nv50_head(c)->ovly)
667 #define nv50_oimm(c) (&nv50_head(c)->oimm)
668 #define nv50_chan(c) (&(c)->base.base)
669 #define nv50_vers(c) nv50_chan(c)->user.oclass
672 struct nvif_object
*disp
;
673 struct nv50_mast mast
;
675 struct nouveau_bo
*sync
;
680 static struct nv50_disp
*
681 nv50_disp(struct drm_device
*dev
)
683 return nouveau_display(dev
)->priv
;
686 #define nv50_mast(d) (&nv50_disp(d)->mast)
688 /******************************************************************************
689 * EVO channel helpers
690 *****************************************************************************/
692 evo_wait(void *evoc
, int nr
)
694 struct nv50_dmac
*dmac
= evoc
;
695 struct nvif_device
*device
= dmac
->base
.device
;
696 u32 put
= nvif_rd32(&dmac
->base
.user
, 0x0000) / 4;
698 mutex_lock(&dmac
->lock
);
699 if (put
+ nr
>= (PAGE_SIZE
/ 4) - 8) {
700 dmac
->ptr
[put
] = 0x20000000;
702 nvif_wr32(&dmac
->base
.user
, 0x0000, 0x00000000);
703 if (nvif_msec(device
, 2000,
704 if (!nvif_rd32(&dmac
->base
.user
, 0x0004))
707 mutex_unlock(&dmac
->lock
);
708 printk(KERN_ERR
"nouveau: evo channel stalled\n");
715 return dmac
->ptr
+ put
;
719 evo_kick(u32
*push
, void *evoc
)
721 struct nv50_dmac
*dmac
= evoc
;
722 nvif_wr32(&dmac
->base
.user
, 0x0000, (push
- dmac
->ptr
) << 2);
723 mutex_unlock(&dmac
->lock
);
726 #define evo_mthd(p,m,s) do { \
727 const u32 _m = (m), _s = (s); \
728 if (drm_debug & DRM_UT_KMS) \
729 printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__); \
730 *((p)++) = ((_s << 18) | _m); \
733 #define evo_data(p,d) do { \
734 const u32 _d = (d); \
735 if (drm_debug & DRM_UT_KMS) \
736 printk(KERN_ERR "\t%08x\n", _d); \
740 /******************************************************************************
742 *****************************************************************************/
743 #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
746 const struct nv50_wndw_func
*func
;
747 struct nv50_dmac
*dmac
;
749 struct drm_plane plane
;
751 struct nvif_notify notify
;
757 struct nv50_wndw_func
{
758 void *(*dtor
)(struct nv50_wndw
*);
759 int (*acquire
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
760 struct nv50_head_atom
*asyh
);
761 void (*release
)(struct nv50_wndw
*, struct nv50_wndw_atom
*asyw
,
762 struct nv50_head_atom
*asyh
);
763 void (*prepare
)(struct nv50_wndw
*, struct nv50_head_atom
*asyh
,
764 struct nv50_wndw_atom
*asyw
);
766 void (*sema_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
767 void (*sema_clr
)(struct nv50_wndw
*);
768 void (*ntfy_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
769 void (*ntfy_clr
)(struct nv50_wndw
*);
770 int (*ntfy_wait_begun
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
771 void (*image_set
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
772 void (*image_clr
)(struct nv50_wndw
*);
773 void (*lut
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
774 void (*point
)(struct nv50_wndw
*, struct nv50_wndw_atom
*);
776 u32 (*update
)(struct nv50_wndw
*, u32 interlock
);
780 nv50_wndw_wait_armed(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
783 return wndw
->func
->ntfy_wait_begun(wndw
, asyw
);
788 nv50_wndw_flush_clr(struct nv50_wndw
*wndw
, u32 interlock
, bool flush
,
789 struct nv50_wndw_atom
*asyw
)
791 if (asyw
->clr
.sema
&& (!asyw
->set
.sema
|| flush
))
792 wndw
->func
->sema_clr(wndw
);
793 if (asyw
->clr
.ntfy
&& (!asyw
->set
.ntfy
|| flush
))
794 wndw
->func
->ntfy_clr(wndw
);
795 if (asyw
->clr
.image
&& (!asyw
->set
.image
|| flush
))
796 wndw
->func
->image_clr(wndw
);
798 return flush
? wndw
->func
->update(wndw
, interlock
) : 0;
802 nv50_wndw_flush_set(struct nv50_wndw
*wndw
, u32 interlock
,
803 struct nv50_wndw_atom
*asyw
)
806 asyw
->image
.mode
= 0;
807 asyw
->image
.interval
= 1;
810 if (asyw
->set
.sema
) wndw
->func
->sema_set (wndw
, asyw
);
811 if (asyw
->set
.ntfy
) wndw
->func
->ntfy_set (wndw
, asyw
);
812 if (asyw
->set
.image
) wndw
->func
->image_set(wndw
, asyw
);
813 if (asyw
->set
.lut
) wndw
->func
->lut (wndw
, asyw
);
814 if (asyw
->set
.point
) wndw
->func
->point (wndw
, asyw
);
816 return wndw
->func
->update(wndw
, interlock
);
820 nv50_wndw_atomic_check_release(struct nv50_wndw
*wndw
,
821 struct nv50_wndw_atom
*asyw
,
822 struct nv50_head_atom
*asyh
)
824 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
825 NV_ATOMIC(drm
, "%s release\n", wndw
->plane
.name
);
826 wndw
->func
->release(wndw
, asyw
, asyh
);
827 asyw
->ntfy
.handle
= 0;
828 asyw
->sema
.handle
= 0;
832 nv50_wndw_atomic_check_acquire(struct nv50_wndw
*wndw
,
833 struct nv50_wndw_atom
*asyw
,
834 struct nv50_head_atom
*asyh
)
836 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(asyw
->state
.fb
);
837 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
840 NV_ATOMIC(drm
, "%s acquire\n", wndw
->plane
.name
);
843 asyw
->clip
.x2
= asyh
->state
.mode
.hdisplay
;
844 asyw
->clip
.y2
= asyh
->state
.mode
.vdisplay
;
846 asyw
->image
.w
= fb
->base
.width
;
847 asyw
->image
.h
= fb
->base
.height
;
848 asyw
->image
.kind
= (fb
->nvbo
->tile_flags
& 0x0000ff00) >> 8;
849 if (asyw
->image
.kind
) {
850 asyw
->image
.layout
= 0;
851 if (drm
->client
.device
.info
.chipset
>= 0xc0)
852 asyw
->image
.block
= fb
->nvbo
->tile_mode
>> 4;
854 asyw
->image
.block
= fb
->nvbo
->tile_mode
;
855 asyw
->image
.pitch
= (fb
->base
.pitches
[0] / 4) << 4;
857 asyw
->image
.layout
= 1;
858 asyw
->image
.block
= 0;
859 asyw
->image
.pitch
= fb
->base
.pitches
[0];
862 ret
= wndw
->func
->acquire(wndw
, asyw
, asyh
);
866 if (asyw
->set
.image
) {
867 if (!(asyw
->image
.mode
= asyw
->interval
? 0 : 1))
868 asyw
->image
.interval
= asyw
->interval
;
870 asyw
->image
.interval
= 0;
877 nv50_wndw_atomic_check(struct drm_plane
*plane
, struct drm_plane_state
*state
)
879 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
880 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
881 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(wndw
->plane
.state
);
882 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
883 struct nv50_head_atom
*harm
= NULL
, *asyh
= NULL
;
884 bool varm
= false, asyv
= false, asym
= false;
887 NV_ATOMIC(drm
, "%s atomic_check\n", plane
->name
);
888 if (asyw
->state
.crtc
) {
889 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
891 return PTR_ERR(asyh
);
892 asym
= drm_atomic_crtc_needs_modeset(&asyh
->state
);
893 asyv
= asyh
->state
.active
;
896 if (armw
->state
.crtc
) {
897 harm
= nv50_head_atom_get(asyw
->state
.state
, armw
->state
.crtc
);
899 return PTR_ERR(harm
);
900 varm
= harm
->state
.crtc
->state
->active
;
904 asyw
->point
.x
= asyw
->state
.crtc_x
;
905 asyw
->point
.y
= asyw
->state
.crtc_y
;
906 if (memcmp(&armw
->point
, &asyw
->point
, sizeof(asyw
->point
)))
907 asyw
->set
.point
= true;
909 if (!varm
|| asym
|| armw
->state
.fb
!= asyw
->state
.fb
) {
910 ret
= nv50_wndw_atomic_check_acquire(wndw
, asyw
, asyh
);
916 nv50_wndw_atomic_check_release(wndw
, asyw
, harm
);
922 asyw
->clr
.ntfy
= armw
->ntfy
.handle
!= 0;
923 asyw
->clr
.sema
= armw
->sema
.handle
!= 0;
924 if (wndw
->func
->image_clr
)
925 asyw
->clr
.image
= armw
->image
.handle
!= 0;
926 asyw
->set
.lut
= wndw
->func
->lut
&& asyv
;
933 nv50_wndw_cleanup_fb(struct drm_plane
*plane
, struct drm_plane_state
*old_state
)
935 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(old_state
->fb
);
936 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
938 NV_ATOMIC(drm
, "%s cleanup: %p\n", plane
->name
, old_state
->fb
);
942 nouveau_bo_unpin(fb
->nvbo
);
946 nv50_wndw_prepare_fb(struct drm_plane
*plane
, struct drm_plane_state
*state
)
948 struct nouveau_framebuffer
*fb
= nouveau_framebuffer(state
->fb
);
949 struct nouveau_drm
*drm
= nouveau_drm(plane
->dev
);
950 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
951 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
952 struct nv50_head_atom
*asyh
;
953 struct nv50_dmac_ctxdma
*ctxdma
;
956 NV_ATOMIC(drm
, "%s prepare: %p\n", plane
->name
, state
->fb
);
960 ret
= nouveau_bo_pin(fb
->nvbo
, TTM_PL_FLAG_VRAM
, true);
964 ctxdma
= nv50_dmac_ctxdma_new(wndw
->dmac
, fb
);
965 if (IS_ERR(ctxdma
)) {
966 nouveau_bo_unpin(fb
->nvbo
);
967 return PTR_ERR(ctxdma
);
970 asyw
->state
.fence
= reservation_object_get_excl_rcu(fb
->nvbo
->bo
.resv
);
971 asyw
->image
.handle
= ctxdma
->object
.handle
;
972 asyw
->image
.offset
= fb
->nvbo
->bo
.offset
;
974 if (wndw
->func
->prepare
) {
975 asyh
= nv50_head_atom_get(asyw
->state
.state
, asyw
->state
.crtc
);
977 return PTR_ERR(asyh
);
979 wndw
->func
->prepare(wndw
, asyh
, asyw
);
985 static const struct drm_plane_helper_funcs
987 .prepare_fb
= nv50_wndw_prepare_fb
,
988 .cleanup_fb
= nv50_wndw_cleanup_fb
,
989 .atomic_check
= nv50_wndw_atomic_check
,
993 nv50_wndw_atomic_destroy_state(struct drm_plane
*plane
,
994 struct drm_plane_state
*state
)
996 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(state
);
997 __drm_atomic_helper_plane_destroy_state(&asyw
->state
);
1001 static struct drm_plane_state
*
1002 nv50_wndw_atomic_duplicate_state(struct drm_plane
*plane
)
1004 struct nv50_wndw_atom
*armw
= nv50_wndw_atom(plane
->state
);
1005 struct nv50_wndw_atom
*asyw
;
1006 if (!(asyw
= kmalloc(sizeof(*asyw
), GFP_KERNEL
)))
1008 __drm_atomic_helper_plane_duplicate_state(plane
, &asyw
->state
);
1010 asyw
->sema
= armw
->sema
;
1011 asyw
->ntfy
= armw
->ntfy
;
1012 asyw
->image
= armw
->image
;
1013 asyw
->point
= armw
->point
;
1014 asyw
->lut
= armw
->lut
;
1017 return &asyw
->state
;
1021 nv50_wndw_reset(struct drm_plane
*plane
)
1023 struct nv50_wndw_atom
*asyw
;
1025 if (WARN_ON(!(asyw
= kzalloc(sizeof(*asyw
), GFP_KERNEL
))))
1029 plane
->funcs
->atomic_destroy_state(plane
, plane
->state
);
1030 plane
->state
= &asyw
->state
;
1031 plane
->state
->plane
= plane
;
1032 plane
->state
->rotation
= DRM_ROTATE_0
;
1036 nv50_wndw_destroy(struct drm_plane
*plane
)
1038 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
1040 nvif_notify_fini(&wndw
->notify
);
1041 data
= wndw
->func
->dtor(wndw
);
1042 drm_plane_cleanup(&wndw
->plane
);
1046 static const struct drm_plane_funcs
1048 .update_plane
= drm_atomic_helper_update_plane
,
1049 .disable_plane
= drm_atomic_helper_disable_plane
,
1050 .destroy
= nv50_wndw_destroy
,
1051 .reset
= nv50_wndw_reset
,
1052 .set_property
= drm_atomic_helper_plane_set_property
,
1053 .atomic_duplicate_state
= nv50_wndw_atomic_duplicate_state
,
1054 .atomic_destroy_state
= nv50_wndw_atomic_destroy_state
,
1058 nv50_wndw_fini(struct nv50_wndw
*wndw
)
1060 nvif_notify_put(&wndw
->notify
);
1064 nv50_wndw_init(struct nv50_wndw
*wndw
)
1066 nvif_notify_get(&wndw
->notify
);
1070 nv50_wndw_ctor(const struct nv50_wndw_func
*func
, struct drm_device
*dev
,
1071 enum drm_plane_type type
, const char *name
, int index
,
1072 struct nv50_dmac
*dmac
, const u32
*format
, int nformat
,
1073 struct nv50_wndw
*wndw
)
1080 ret
= drm_universal_plane_init(dev
, &wndw
->plane
, 0, &nv50_wndw
, format
,
1081 nformat
, type
, "%s-%d", name
, index
);
1085 drm_plane_helper_add(&wndw
->plane
, &nv50_wndw_helper
);
1089 /******************************************************************************
1091 *****************************************************************************/
1092 #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1095 struct nv50_wndw wndw
;
1096 struct nvif_object chan
;
1100 nv50_curs_update(struct nv50_wndw
*wndw
, u32 interlock
)
1102 struct nv50_curs
*curs
= nv50_curs(wndw
);
1103 nvif_wr32(&curs
->chan
, 0x0080, 0x00000000);
1108 nv50_curs_point(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1110 struct nv50_curs
*curs
= nv50_curs(wndw
);
1111 nvif_wr32(&curs
->chan
, 0x0084, (asyw
->point
.y
<< 16) | asyw
->point
.x
);
1115 nv50_curs_prepare(struct nv50_wndw
*wndw
, struct nv50_head_atom
*asyh
,
1116 struct nv50_wndw_atom
*asyw
)
1118 asyh
->curs
.handle
= nv50_disp(wndw
->plane
.dev
)->mast
.base
.vram
.handle
;
1119 asyh
->curs
.offset
= asyw
->image
.offset
;
1120 asyh
->set
.curs
= asyh
->curs
.visible
;
1124 nv50_curs_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1125 struct nv50_head_atom
*asyh
)
1127 asyh
->curs
.visible
= false;
1131 nv50_curs_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1132 struct nv50_head_atom
*asyh
)
1136 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1137 DRM_PLANE_HELPER_NO_SCALING
,
1138 DRM_PLANE_HELPER_NO_SCALING
,
1140 asyh
->curs
.visible
= asyw
->state
.visible
;
1141 if (ret
|| !asyh
->curs
.visible
)
1144 switch (asyw
->state
.fb
->width
) {
1145 case 32: asyh
->curs
.layout
= 0; break;
1146 case 64: asyh
->curs
.layout
= 1; break;
1151 if (asyw
->state
.fb
->width
!= asyw
->state
.fb
->height
)
1154 switch (asyw
->state
.fb
->format
->format
) {
1155 case DRM_FORMAT_ARGB8888
: asyh
->curs
.format
= 1; break;
1165 nv50_curs_dtor(struct nv50_wndw
*wndw
)
1167 struct nv50_curs
*curs
= nv50_curs(wndw
);
1168 nvif_object_fini(&curs
->chan
);
1173 nv50_curs_format
[] = {
1174 DRM_FORMAT_ARGB8888
,
1177 static const struct nv50_wndw_func
1179 .dtor
= nv50_curs_dtor
,
1180 .acquire
= nv50_curs_acquire
,
1181 .release
= nv50_curs_release
,
1182 .prepare
= nv50_curs_prepare
,
1183 .point
= nv50_curs_point
,
1184 .update
= nv50_curs_update
,
1188 nv50_curs_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1189 struct nv50_curs
**pcurs
)
1191 static const struct nvif_mclass curses
[] = {
1192 { GK104_DISP_CURSOR
, 0 },
1193 { GF110_DISP_CURSOR
, 0 },
1194 { GT214_DISP_CURSOR
, 0 },
1195 { G82_DISP_CURSOR
, 0 },
1196 { NV50_DISP_CURSOR
, 0 },
1199 struct nv50_disp_cursor_v0 args
= {
1200 .head
= head
->base
.index
,
1202 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1203 struct nv50_curs
*curs
;
1206 cid
= nvif_mclass(disp
->disp
, curses
);
1208 NV_ERROR(drm
, "No supported cursor immediate class\n");
1212 if (!(curs
= *pcurs
= kzalloc(sizeof(*curs
), GFP_KERNEL
)))
1215 ret
= nv50_wndw_ctor(&nv50_curs
, drm
->dev
, DRM_PLANE_TYPE_CURSOR
,
1216 "curs", head
->base
.index
, &disp
->mast
.base
,
1217 nv50_curs_format
, ARRAY_SIZE(nv50_curs_format
),
1224 ret
= nvif_object_init(disp
->disp
, 0, curses
[cid
].oclass
, &args
,
1225 sizeof(args
), &curs
->chan
);
1227 NV_ERROR(drm
, "curs%04x allocation failed: %d\n",
1228 curses
[cid
].oclass
, ret
);
1235 /******************************************************************************
1237 *****************************************************************************/
1238 #define nv50_base(p) container_of((p), struct nv50_base, wndw)
1241 struct nv50_wndw wndw
;
1242 struct nv50_sync chan
;
1247 nv50_base_notify(struct nvif_notify
*notify
)
1249 return NVIF_NOTIFY_KEEP
;
1253 nv50_base_lut(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1255 struct nv50_base
*base
= nv50_base(wndw
);
1257 if ((push
= evo_wait(&base
->chan
, 2))) {
1258 evo_mthd(push
, 0x00e0, 1);
1259 evo_data(push
, asyw
->lut
.enable
<< 30);
1260 evo_kick(push
, &base
->chan
);
1265 nv50_base_image_clr(struct nv50_wndw
*wndw
)
1267 struct nv50_base
*base
= nv50_base(wndw
);
1269 if ((push
= evo_wait(&base
->chan
, 4))) {
1270 evo_mthd(push
, 0x0084, 1);
1271 evo_data(push
, 0x00000000);
1272 evo_mthd(push
, 0x00c0, 1);
1273 evo_data(push
, 0x00000000);
1274 evo_kick(push
, &base
->chan
);
1279 nv50_base_image_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1281 struct nv50_base
*base
= nv50_base(wndw
);
1282 const s32 oclass
= base
->chan
.base
.base
.user
.oclass
;
1284 if ((push
= evo_wait(&base
->chan
, 10))) {
1285 evo_mthd(push
, 0x0084, 1);
1286 evo_data(push
, (asyw
->image
.mode
<< 8) |
1287 (asyw
->image
.interval
<< 4));
1288 evo_mthd(push
, 0x00c0, 1);
1289 evo_data(push
, asyw
->image
.handle
);
1290 if (oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1291 evo_mthd(push
, 0x0800, 5);
1292 evo_data(push
, asyw
->image
.offset
>> 8);
1293 evo_data(push
, 0x00000000);
1294 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1295 evo_data(push
, (asyw
->image
.layout
<< 20) |
1298 evo_data(push
, (asyw
->image
.kind
<< 16) |
1299 (asyw
->image
.format
<< 8));
1301 if (oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1302 evo_mthd(push
, 0x0800, 5);
1303 evo_data(push
, asyw
->image
.offset
>> 8);
1304 evo_data(push
, 0x00000000);
1305 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1306 evo_data(push
, (asyw
->image
.layout
<< 20) |
1309 evo_data(push
, asyw
->image
.format
<< 8);
1311 evo_mthd(push
, 0x0400, 5);
1312 evo_data(push
, asyw
->image
.offset
>> 8);
1313 evo_data(push
, 0x00000000);
1314 evo_data(push
, (asyw
->image
.h
<< 16) | asyw
->image
.w
);
1315 evo_data(push
, (asyw
->image
.layout
<< 24) |
1318 evo_data(push
, asyw
->image
.format
<< 8);
1320 evo_kick(push
, &base
->chan
);
1325 nv50_base_ntfy_clr(struct nv50_wndw
*wndw
)
1327 struct nv50_base
*base
= nv50_base(wndw
);
1329 if ((push
= evo_wait(&base
->chan
, 2))) {
1330 evo_mthd(push
, 0x00a4, 1);
1331 evo_data(push
, 0x00000000);
1332 evo_kick(push
, &base
->chan
);
1337 nv50_base_ntfy_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1339 struct nv50_base
*base
= nv50_base(wndw
);
1341 if ((push
= evo_wait(&base
->chan
, 3))) {
1342 evo_mthd(push
, 0x00a0, 2);
1343 evo_data(push
, (asyw
->ntfy
.awaken
<< 30) | asyw
->ntfy
.offset
);
1344 evo_data(push
, asyw
->ntfy
.handle
);
1345 evo_kick(push
, &base
->chan
);
1350 nv50_base_sema_clr(struct nv50_wndw
*wndw
)
1352 struct nv50_base
*base
= nv50_base(wndw
);
1354 if ((push
= evo_wait(&base
->chan
, 2))) {
1355 evo_mthd(push
, 0x0094, 1);
1356 evo_data(push
, 0x00000000);
1357 evo_kick(push
, &base
->chan
);
1362 nv50_base_sema_set(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1364 struct nv50_base
*base
= nv50_base(wndw
);
1366 if ((push
= evo_wait(&base
->chan
, 5))) {
1367 evo_mthd(push
, 0x0088, 4);
1368 evo_data(push
, asyw
->sema
.offset
);
1369 evo_data(push
, asyw
->sema
.acquire
);
1370 evo_data(push
, asyw
->sema
.release
);
1371 evo_data(push
, asyw
->sema
.handle
);
1372 evo_kick(push
, &base
->chan
);
1377 nv50_base_update(struct nv50_wndw
*wndw
, u32 interlock
)
1379 struct nv50_base
*base
= nv50_base(wndw
);
1382 if (!(push
= evo_wait(&base
->chan
, 2)))
1384 evo_mthd(push
, 0x0080, 1);
1385 evo_data(push
, interlock
);
1386 evo_kick(push
, &base
->chan
);
1388 if (base
->chan
.base
.base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
)
1389 return interlock
? 2 << (base
->id
* 8) : 0;
1390 return interlock
? 2 << (base
->id
* 4) : 0;
1394 nv50_base_ntfy_wait_begun(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
)
1396 struct nouveau_drm
*drm
= nouveau_drm(wndw
->plane
.dev
);
1397 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1398 if (nvif_msec(&drm
->client
.device
, 2000ULL,
1399 u32 data
= nouveau_bo_rd32(disp
->sync
, asyw
->ntfy
.offset
/ 4);
1400 if ((data
& 0xc0000000) == 0x40000000)
1409 nv50_base_release(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1410 struct nv50_head_atom
*asyh
)
1416 nv50_base_acquire(struct nv50_wndw
*wndw
, struct nv50_wndw_atom
*asyw
,
1417 struct nv50_head_atom
*asyh
)
1419 const struct drm_framebuffer
*fb
= asyw
->state
.fb
;
1422 if (!fb
->format
->depth
)
1425 ret
= drm_plane_helper_check_state(&asyw
->state
, &asyw
->clip
,
1426 DRM_PLANE_HELPER_NO_SCALING
,
1427 DRM_PLANE_HELPER_NO_SCALING
,
1432 asyh
->base
.depth
= fb
->format
->depth
;
1433 asyh
->base
.cpp
= fb
->format
->cpp
[0];
1434 asyh
->base
.x
= asyw
->state
.src
.x1
>> 16;
1435 asyh
->base
.y
= asyw
->state
.src
.y1
>> 16;
1436 asyh
->base
.w
= asyw
->state
.fb
->width
;
1437 asyh
->base
.h
= asyw
->state
.fb
->height
;
1439 switch (fb
->format
->format
) {
1440 case DRM_FORMAT_C8
: asyw
->image
.format
= 0x1e; break;
1441 case DRM_FORMAT_RGB565
: asyw
->image
.format
= 0xe8; break;
1442 case DRM_FORMAT_XRGB1555
:
1443 case DRM_FORMAT_ARGB1555
: asyw
->image
.format
= 0xe9; break;
1444 case DRM_FORMAT_XRGB8888
:
1445 case DRM_FORMAT_ARGB8888
: asyw
->image
.format
= 0xcf; break;
1446 case DRM_FORMAT_XBGR2101010
:
1447 case DRM_FORMAT_ABGR2101010
: asyw
->image
.format
= 0xd1; break;
1448 case DRM_FORMAT_XBGR8888
:
1449 case DRM_FORMAT_ABGR8888
: asyw
->image
.format
= 0xd5; break;
1455 asyw
->lut
.enable
= 1;
1456 asyw
->set
.image
= true;
1461 nv50_base_dtor(struct nv50_wndw
*wndw
)
1463 struct nv50_disp
*disp
= nv50_disp(wndw
->plane
.dev
);
1464 struct nv50_base
*base
= nv50_base(wndw
);
1465 nv50_dmac_destroy(&base
->chan
.base
, disp
->disp
);
1470 nv50_base_format
[] = {
1473 DRM_FORMAT_XRGB1555
,
1474 DRM_FORMAT_ARGB1555
,
1475 DRM_FORMAT_XRGB8888
,
1476 DRM_FORMAT_ARGB8888
,
1477 DRM_FORMAT_XBGR2101010
,
1478 DRM_FORMAT_ABGR2101010
,
1479 DRM_FORMAT_XBGR8888
,
1480 DRM_FORMAT_ABGR8888
,
1483 static const struct nv50_wndw_func
1485 .dtor
= nv50_base_dtor
,
1486 .acquire
= nv50_base_acquire
,
1487 .release
= nv50_base_release
,
1488 .sema_set
= nv50_base_sema_set
,
1489 .sema_clr
= nv50_base_sema_clr
,
1490 .ntfy_set
= nv50_base_ntfy_set
,
1491 .ntfy_clr
= nv50_base_ntfy_clr
,
1492 .ntfy_wait_begun
= nv50_base_ntfy_wait_begun
,
1493 .image_set
= nv50_base_image_set
,
1494 .image_clr
= nv50_base_image_clr
,
1495 .lut
= nv50_base_lut
,
1496 .update
= nv50_base_update
,
1500 nv50_base_new(struct nouveau_drm
*drm
, struct nv50_head
*head
,
1501 struct nv50_base
**pbase
)
1503 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
1504 struct nv50_base
*base
;
1507 if (!(base
= *pbase
= kzalloc(sizeof(*base
), GFP_KERNEL
)))
1509 base
->id
= head
->base
.index
;
1510 base
->wndw
.ntfy
= EVO_FLIP_NTFY0(base
->id
);
1511 base
->wndw
.sema
= EVO_FLIP_SEM0(base
->id
);
1512 base
->wndw
.data
= 0x00000000;
1514 ret
= nv50_wndw_ctor(&nv50_base
, drm
->dev
, DRM_PLANE_TYPE_PRIMARY
,
1515 "base", base
->id
, &base
->chan
.base
,
1516 nv50_base_format
, ARRAY_SIZE(nv50_base_format
),
1523 ret
= nv50_base_create(&drm
->client
.device
, disp
->disp
, base
->id
,
1524 disp
->sync
->bo
.offset
, &base
->chan
);
1528 return nvif_notify_init(&base
->chan
.base
.base
.user
, nv50_base_notify
,
1530 NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT
,
1531 &(struct nvif_notify_uevent_req
) {},
1532 sizeof(struct nvif_notify_uevent_req
),
1533 sizeof(struct nvif_notify_uevent_rep
),
1534 &base
->wndw
.notify
);
1537 /******************************************************************************
1539 *****************************************************************************/
1541 nv50_head_procamp(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1543 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1545 if ((push
= evo_wait(core
, 2))) {
1546 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1547 evo_mthd(push
, 0x08a8 + (head
->base
.index
* 0x400), 1);
1549 evo_mthd(push
, 0x0498 + (head
->base
.index
* 0x300), 1);
1550 evo_data(push
, (asyh
->procamp
.sat
.sin
<< 20) |
1551 (asyh
->procamp
.sat
.cos
<< 8));
1552 evo_kick(push
, core
);
1557 nv50_head_dither(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1559 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1561 if ((push
= evo_wait(core
, 2))) {
1562 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1563 evo_mthd(push
, 0x08a0 + (head
->base
.index
* 0x0400), 1);
1565 if (core
->base
.user
.oclass
< GK104_DISP_CORE_CHANNEL_DMA
)
1566 evo_mthd(push
, 0x0490 + (head
->base
.index
* 0x0300), 1);
1568 evo_mthd(push
, 0x04a0 + (head
->base
.index
* 0x0300), 1);
1569 evo_data(push
, (asyh
->dither
.mode
<< 3) |
1570 (asyh
->dither
.bits
<< 1) |
1571 asyh
->dither
.enable
);
1572 evo_kick(push
, core
);
1577 nv50_head_ovly(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1579 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1583 if (asyh
->base
.cpp
) {
1584 switch (asyh
->base
.cpp
) {
1585 case 8: bounds
|= 0x00000500; break;
1586 case 4: bounds
|= 0x00000300; break;
1587 case 2: bounds
|= 0x00000100; break;
1592 bounds
|= 0x00000001;
1595 if ((push
= evo_wait(core
, 2))) {
1596 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1597 evo_mthd(push
, 0x0904 + head
->base
.index
* 0x400, 1);
1599 evo_mthd(push
, 0x04d4 + head
->base
.index
* 0x300, 1);
1600 evo_data(push
, bounds
);
1601 evo_kick(push
, core
);
1606 nv50_head_base(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1608 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1612 if (asyh
->base
.cpp
) {
1613 switch (asyh
->base
.cpp
) {
1614 case 8: bounds
|= 0x00000500; break;
1615 case 4: bounds
|= 0x00000300; break;
1616 case 2: bounds
|= 0x00000100; break;
1617 case 1: bounds
|= 0x00000000; break;
1622 bounds
|= 0x00000001;
1625 if ((push
= evo_wait(core
, 2))) {
1626 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1627 evo_mthd(push
, 0x0900 + head
->base
.index
* 0x400, 1);
1629 evo_mthd(push
, 0x04d0 + head
->base
.index
* 0x300, 1);
1630 evo_data(push
, bounds
);
1631 evo_kick(push
, core
);
1636 nv50_head_curs_clr(struct nv50_head
*head
)
1638 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1640 if ((push
= evo_wait(core
, 4))) {
1641 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1642 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1643 evo_data(push
, 0x05000000);
1645 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1646 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 1);
1647 evo_data(push
, 0x05000000);
1648 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1649 evo_data(push
, 0x00000000);
1651 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 1);
1652 evo_data(push
, 0x05000000);
1653 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1654 evo_data(push
, 0x00000000);
1656 evo_kick(push
, core
);
1661 nv50_head_curs_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1663 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1665 if ((push
= evo_wait(core
, 5))) {
1666 if (core
->base
.user
.oclass
< G82_DISP_BASE_CHANNEL_DMA
) {
1667 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1668 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1669 (asyh
->curs
.format
<< 24));
1670 evo_data(push
, asyh
->curs
.offset
>> 8);
1672 if (core
->base
.user
.oclass
< GF110_DISP_BASE_CHANNEL_DMA
) {
1673 evo_mthd(push
, 0x0880 + head
->base
.index
* 0x400, 2);
1674 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1675 (asyh
->curs
.format
<< 24));
1676 evo_data(push
, asyh
->curs
.offset
>> 8);
1677 evo_mthd(push
, 0x089c + head
->base
.index
* 0x400, 1);
1678 evo_data(push
, asyh
->curs
.handle
);
1680 evo_mthd(push
, 0x0480 + head
->base
.index
* 0x300, 2);
1681 evo_data(push
, 0x80000000 | (asyh
->curs
.layout
<< 26) |
1682 (asyh
->curs
.format
<< 24));
1683 evo_data(push
, asyh
->curs
.offset
>> 8);
1684 evo_mthd(push
, 0x048c + head
->base
.index
* 0x300, 1);
1685 evo_data(push
, asyh
->curs
.handle
);
1687 evo_kick(push
, core
);
1692 nv50_head_core_clr(struct nv50_head
*head
)
1694 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1696 if ((push
= evo_wait(core
, 2))) {
1697 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
)
1698 evo_mthd(push
, 0x0874 + head
->base
.index
* 0x400, 1);
1700 evo_mthd(push
, 0x0474 + head
->base
.index
* 0x300, 1);
1701 evo_data(push
, 0x00000000);
1702 evo_kick(push
, core
);
1707 nv50_head_core_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1709 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1711 if ((push
= evo_wait(core
, 9))) {
1712 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1713 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1714 evo_data(push
, asyh
->core
.offset
>> 8);
1715 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1716 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1717 evo_data(push
, asyh
->core
.layout
<< 20 |
1718 (asyh
->core
.pitch
>> 8) << 8 |
1720 evo_data(push
, asyh
->core
.kind
<< 16 |
1721 asyh
->core
.format
<< 8);
1722 evo_data(push
, asyh
->core
.handle
);
1723 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1724 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1725 /* EVO will complain with INVALID_STATE if we have an
1726 * active cursor and (re)specify HeadSetContextDmaIso
1727 * without also updating HeadSetOffsetCursor.
1729 asyh
->set
.curs
= asyh
->curs
.visible
;
1731 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1732 evo_mthd(push
, 0x0860 + head
->base
.index
* 0x400, 1);
1733 evo_data(push
, asyh
->core
.offset
>> 8);
1734 evo_mthd(push
, 0x0868 + head
->base
.index
* 0x400, 4);
1735 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1736 evo_data(push
, asyh
->core
.layout
<< 20 |
1737 (asyh
->core
.pitch
>> 8) << 8 |
1739 evo_data(push
, asyh
->core
.format
<< 8);
1740 evo_data(push
, asyh
->core
.handle
);
1741 evo_mthd(push
, 0x08c0 + head
->base
.index
* 0x400, 1);
1742 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1744 evo_mthd(push
, 0x0460 + head
->base
.index
* 0x300, 1);
1745 evo_data(push
, asyh
->core
.offset
>> 8);
1746 evo_mthd(push
, 0x0468 + head
->base
.index
* 0x300, 4);
1747 evo_data(push
, (asyh
->core
.h
<< 16) | asyh
->core
.w
);
1748 evo_data(push
, asyh
->core
.layout
<< 24 |
1749 (asyh
->core
.pitch
>> 8) << 8 |
1751 evo_data(push
, asyh
->core
.format
<< 8);
1752 evo_data(push
, asyh
->core
.handle
);
1753 evo_mthd(push
, 0x04b0 + head
->base
.index
* 0x300, 1);
1754 evo_data(push
, (asyh
->core
.y
<< 16) | asyh
->core
.x
);
1756 evo_kick(push
, core
);
1761 nv50_head_lut_clr(struct nv50_head
*head
)
1763 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1765 if ((push
= evo_wait(core
, 4))) {
1766 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1767 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1768 evo_data(push
, 0x40000000);
1770 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1771 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 1);
1772 evo_data(push
, 0x40000000);
1773 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1774 evo_data(push
, 0x00000000);
1776 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 1);
1777 evo_data(push
, 0x03000000);
1778 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1779 evo_data(push
, 0x00000000);
1781 evo_kick(push
, core
);
1786 nv50_head_lut_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1788 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1790 if ((push
= evo_wait(core
, 7))) {
1791 if (core
->base
.user
.oclass
< G82_DISP_CORE_CHANNEL_DMA
) {
1792 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1793 evo_data(push
, 0xc0000000);
1794 evo_data(push
, asyh
->lut
.offset
>> 8);
1796 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1797 evo_mthd(push
, 0x0840 + (head
->base
.index
* 0x400), 2);
1798 evo_data(push
, 0xc0000000);
1799 evo_data(push
, asyh
->lut
.offset
>> 8);
1800 evo_mthd(push
, 0x085c + (head
->base
.index
* 0x400), 1);
1801 evo_data(push
, asyh
->lut
.handle
);
1803 evo_mthd(push
, 0x0440 + (head
->base
.index
* 0x300), 4);
1804 evo_data(push
, 0x83000000);
1805 evo_data(push
, asyh
->lut
.offset
>> 8);
1806 evo_data(push
, 0x00000000);
1807 evo_data(push
, 0x00000000);
1808 evo_mthd(push
, 0x045c + (head
->base
.index
* 0x300), 1);
1809 evo_data(push
, asyh
->lut
.handle
);
1811 evo_kick(push
, core
);
1816 nv50_head_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1818 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1819 struct nv50_head_mode
*m
= &asyh
->mode
;
1821 if ((push
= evo_wait(core
, 14))) {
1822 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1823 evo_mthd(push
, 0x0804 + (head
->base
.index
* 0x400), 2);
1824 evo_data(push
, 0x00800000 | m
->clock
);
1825 evo_data(push
, m
->interlace
? 0x00000002 : 0x00000000);
1826 evo_mthd(push
, 0x0810 + (head
->base
.index
* 0x400), 7);
1827 evo_data(push
, 0x00000000);
1828 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1829 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1830 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1831 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1832 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1833 evo_data(push
, asyh
->mode
.v
.blankus
);
1834 evo_mthd(push
, 0x082c + (head
->base
.index
* 0x400), 1);
1835 evo_data(push
, 0x00000000);
1837 evo_mthd(push
, 0x0410 + (head
->base
.index
* 0x300), 6);
1838 evo_data(push
, 0x00000000);
1839 evo_data(push
, (m
->v
.active
<< 16) | m
->h
.active
);
1840 evo_data(push
, (m
->v
.synce
<< 16) | m
->h
.synce
);
1841 evo_data(push
, (m
->v
.blanke
<< 16) | m
->h
.blanke
);
1842 evo_data(push
, (m
->v
.blanks
<< 16) | m
->h
.blanks
);
1843 evo_data(push
, (m
->v
.blank2e
<< 16) | m
->v
.blank2s
);
1844 evo_mthd(push
, 0x042c + (head
->base
.index
* 0x300), 2);
1845 evo_data(push
, 0x00000000); /* ??? */
1846 evo_data(push
, 0xffffff00);
1847 evo_mthd(push
, 0x0450 + (head
->base
.index
* 0x300), 3);
1848 evo_data(push
, m
->clock
* 1000);
1849 evo_data(push
, 0x00200000); /* ??? */
1850 evo_data(push
, m
->clock
* 1000);
1852 evo_kick(push
, core
);
1857 nv50_head_view(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1859 struct nv50_dmac
*core
= &nv50_disp(head
->base
.base
.dev
)->mast
.base
;
1861 if ((push
= evo_wait(core
, 10))) {
1862 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
1863 evo_mthd(push
, 0x08a4 + (head
->base
.index
* 0x400), 1);
1864 evo_data(push
, 0x00000000);
1865 evo_mthd(push
, 0x08c8 + (head
->base
.index
* 0x400), 1);
1866 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1867 evo_mthd(push
, 0x08d8 + (head
->base
.index
* 0x400), 2);
1868 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1869 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1871 evo_mthd(push
, 0x0494 + (head
->base
.index
* 0x300), 1);
1872 evo_data(push
, 0x00000000);
1873 evo_mthd(push
, 0x04b8 + (head
->base
.index
* 0x300), 1);
1874 evo_data(push
, (asyh
->view
.iH
<< 16) | asyh
->view
.iW
);
1875 evo_mthd(push
, 0x04c0 + (head
->base
.index
* 0x300), 3);
1876 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1877 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1878 evo_data(push
, (asyh
->view
.oH
<< 16) | asyh
->view
.oW
);
1880 evo_kick(push
, core
);
1885 nv50_head_flush_clr(struct nv50_head
*head
, struct nv50_head_atom
*asyh
, bool y
)
1887 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1888 nv50_head_lut_clr(head
);
1889 if (asyh
->clr
.core
&& (!asyh
->set
.core
|| y
))
1890 nv50_head_core_clr(head
);
1891 if (asyh
->clr
.curs
&& (!asyh
->set
.curs
|| y
))
1892 nv50_head_curs_clr(head
);
1896 nv50_head_flush_set(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
1898 if (asyh
->set
.view
) nv50_head_view (head
, asyh
);
1899 if (asyh
->set
.mode
) nv50_head_mode (head
, asyh
);
1900 if (asyh
->set
.core
) nv50_head_lut_set (head
, asyh
);
1901 if (asyh
->set
.core
) nv50_head_core_set(head
, asyh
);
1902 if (asyh
->set
.curs
) nv50_head_curs_set(head
, asyh
);
1903 if (asyh
->set
.base
) nv50_head_base (head
, asyh
);
1904 if (asyh
->set
.ovly
) nv50_head_ovly (head
, asyh
);
1905 if (asyh
->set
.dither
) nv50_head_dither (head
, asyh
);
1906 if (asyh
->set
.procamp
) nv50_head_procamp (head
, asyh
);
1910 nv50_head_atomic_check_procamp(struct nv50_head_atom
*armh
,
1911 struct nv50_head_atom
*asyh
,
1912 struct nouveau_conn_atom
*asyc
)
1914 const int vib
= asyc
->procamp
.color_vibrance
- 100;
1915 const int hue
= asyc
->procamp
.vibrant_hue
- 90;
1916 const int adj
= (vib
> 0) ? 50 : 0;
1917 asyh
->procamp
.sat
.cos
= ((vib
* 2047 + adj
) / 100) & 0xfff;
1918 asyh
->procamp
.sat
.sin
= ((hue
* 2047) / 100) & 0xfff;
1919 asyh
->set
.procamp
= true;
1923 nv50_head_atomic_check_dither(struct nv50_head_atom
*armh
,
1924 struct nv50_head_atom
*asyh
,
1925 struct nouveau_conn_atom
*asyc
)
1927 struct drm_connector
*connector
= asyc
->state
.connector
;
1930 if (asyc
->dither
.mode
== DITHERING_MODE_AUTO
) {
1931 if (asyh
->base
.depth
> connector
->display_info
.bpc
* 3)
1932 mode
= DITHERING_MODE_DYNAMIC2X2
;
1934 mode
= asyc
->dither
.mode
;
1937 if (asyc
->dither
.depth
== DITHERING_DEPTH_AUTO
) {
1938 if (connector
->display_info
.bpc
>= 8)
1939 mode
|= DITHERING_DEPTH_8BPC
;
1941 mode
|= asyc
->dither
.depth
;
1944 asyh
->dither
.enable
= mode
;
1945 asyh
->dither
.bits
= mode
>> 1;
1946 asyh
->dither
.mode
= mode
>> 3;
1947 asyh
->set
.dither
= true;
1951 nv50_head_atomic_check_view(struct nv50_head_atom
*armh
,
1952 struct nv50_head_atom
*asyh
,
1953 struct nouveau_conn_atom
*asyc
)
1955 struct drm_connector
*connector
= asyc
->state
.connector
;
1956 struct drm_display_mode
*omode
= &asyh
->state
.adjusted_mode
;
1957 struct drm_display_mode
*umode
= &asyh
->state
.mode
;
1958 int mode
= asyc
->scaler
.mode
;
1961 if (connector
->edid_blob_ptr
)
1962 edid
= (struct edid
*)connector
->edid_blob_ptr
->data
;
1966 if (!asyc
->scaler
.full
) {
1967 if (mode
== DRM_MODE_SCALE_NONE
)
1970 /* Non-EDID LVDS/eDP mode. */
1971 mode
= DRM_MODE_SCALE_FULLSCREEN
;
1974 asyh
->view
.iW
= umode
->hdisplay
;
1975 asyh
->view
.iH
= umode
->vdisplay
;
1976 asyh
->view
.oW
= omode
->hdisplay
;
1977 asyh
->view
.oH
= omode
->vdisplay
;
1978 if (omode
->flags
& DRM_MODE_FLAG_DBLSCAN
)
1981 /* Add overscan compensation if necessary, will keep the aspect
1982 * ratio the same as the backend mode unless overridden by the
1983 * user setting both hborder and vborder properties.
1985 if ((asyc
->scaler
.underscan
.mode
== UNDERSCAN_ON
||
1986 (asyc
->scaler
.underscan
.mode
== UNDERSCAN_AUTO
&&
1987 drm_detect_hdmi_monitor(edid
)))) {
1988 u32 bX
= asyc
->scaler
.underscan
.hborder
;
1989 u32 bY
= asyc
->scaler
.underscan
.vborder
;
1990 u32 r
= (asyh
->view
.oH
<< 19) / asyh
->view
.oW
;
1993 asyh
->view
.oW
-= (bX
* 2);
1994 if (bY
) asyh
->view
.oH
-= (bY
* 2);
1995 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
1997 asyh
->view
.oW
-= (asyh
->view
.oW
>> 4) + 32;
1998 if (bY
) asyh
->view
.oH
-= (bY
* 2);
1999 else asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2003 /* Handle CENTER/ASPECT scaling, taking into account the areas
2004 * removed already for overscan compensation.
2007 case DRM_MODE_SCALE_CENTER
:
2008 asyh
->view
.oW
= min((u16
)umode
->hdisplay
, asyh
->view
.oW
);
2009 asyh
->view
.oH
= min((u16
)umode
->vdisplay
, asyh
->view
.oH
);
2011 case DRM_MODE_SCALE_ASPECT
:
2012 if (asyh
->view
.oH
< asyh
->view
.oW
) {
2013 u32 r
= (asyh
->view
.iW
<< 19) / asyh
->view
.iH
;
2014 asyh
->view
.oW
= ((asyh
->view
.oH
* r
) + (r
/ 2)) >> 19;
2016 u32 r
= (asyh
->view
.iH
<< 19) / asyh
->view
.iW
;
2017 asyh
->view
.oH
= ((asyh
->view
.oW
* r
) + (r
/ 2)) >> 19;
2024 asyh
->set
.view
= true;
2028 nv50_head_atomic_check_mode(struct nv50_head
*head
, struct nv50_head_atom
*asyh
)
2030 struct drm_display_mode
*mode
= &asyh
->state
.adjusted_mode
;
2031 u32 ilace
= (mode
->flags
& DRM_MODE_FLAG_INTERLACE
) ? 2 : 1;
2032 u32 vscan
= (mode
->flags
& DRM_MODE_FLAG_DBLSCAN
) ? 2 : 1;
2033 u32 hbackp
= mode
->htotal
- mode
->hsync_end
;
2034 u32 vbackp
= (mode
->vtotal
- mode
->vsync_end
) * vscan
/ ilace
;
2035 u32 hfrontp
= mode
->hsync_start
- mode
->hdisplay
;
2036 u32 vfrontp
= (mode
->vsync_start
- mode
->vdisplay
) * vscan
/ ilace
;
2038 struct nv50_head_mode
*m
= &asyh
->mode
;
2040 m
->h
.active
= mode
->htotal
;
2041 m
->h
.synce
= mode
->hsync_end
- mode
->hsync_start
- 1;
2042 m
->h
.blanke
= m
->h
.synce
+ hbackp
;
2043 m
->h
.blanks
= mode
->htotal
- hfrontp
- 1;
2045 m
->v
.active
= mode
->vtotal
* vscan
/ ilace
;
2046 m
->v
.synce
= ((mode
->vsync_end
- mode
->vsync_start
) * vscan
/ ilace
) - 1;
2047 m
->v
.blanke
= m
->v
.synce
+ vbackp
;
2048 m
->v
.blanks
= m
->v
.active
- vfrontp
- 1;
2050 /*XXX: Safe underestimate, even "0" works */
2051 blankus
= (m
->v
.active
- mode
->vdisplay
- 2) * m
->h
.active
;
2053 blankus
/= mode
->clock
;
2054 m
->v
.blankus
= blankus
;
2056 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
) {
2057 m
->v
.blank2e
= m
->v
.active
+ m
->v
.synce
+ vbackp
;
2058 m
->v
.blank2s
= m
->v
.blank2e
+ (mode
->vdisplay
* vscan
/ ilace
);
2059 m
->v
.active
= (m
->v
.active
* 2) + 1;
2060 m
->interlace
= true;
2064 m
->interlace
= false;
2066 m
->clock
= mode
->clock
;
2068 drm_mode_set_crtcinfo(mode
, CRTC_INTERLACE_HALVE_V
);
2069 asyh
->set
.mode
= true;
2073 nv50_head_atomic_check(struct drm_crtc
*crtc
, struct drm_crtc_state
*state
)
2075 struct nouveau_drm
*drm
= nouveau_drm(crtc
->dev
);
2076 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2077 struct nv50_head
*head
= nv50_head(crtc
);
2078 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2079 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2080 struct nouveau_conn_atom
*asyc
= NULL
;
2081 struct drm_connector_state
*conns
;
2082 struct drm_connector
*conn
;
2085 NV_ATOMIC(drm
, "%s atomic_check %d\n", crtc
->name
, asyh
->state
.active
);
2086 if (asyh
->state
.active
) {
2087 for_each_connector_in_state(asyh
->state
.state
, conn
, conns
, i
) {
2088 if (conns
->crtc
== crtc
) {
2089 asyc
= nouveau_conn_atom(conns
);
2094 if (armh
->state
.active
) {
2096 if (asyh
->state
.mode_changed
)
2097 asyc
->set
.scaler
= true;
2098 if (armh
->base
.depth
!= asyh
->base
.depth
)
2099 asyc
->set
.dither
= true;
2102 asyc
->set
.mask
= ~0;
2103 asyh
->set
.mask
= ~0;
2106 if (asyh
->state
.mode_changed
)
2107 nv50_head_atomic_check_mode(head
, asyh
);
2110 if (asyc
->set
.scaler
)
2111 nv50_head_atomic_check_view(armh
, asyh
, asyc
);
2112 if (asyc
->set
.dither
)
2113 nv50_head_atomic_check_dither(armh
, asyh
, asyc
);
2114 if (asyc
->set
.procamp
)
2115 nv50_head_atomic_check_procamp(armh
, asyh
, asyc
);
2118 if ((asyh
->core
.visible
= (asyh
->base
.cpp
!= 0))) {
2119 asyh
->core
.x
= asyh
->base
.x
;
2120 asyh
->core
.y
= asyh
->base
.y
;
2121 asyh
->core
.w
= asyh
->base
.w
;
2122 asyh
->core
.h
= asyh
->base
.h
;
2124 if ((asyh
->core
.visible
= asyh
->curs
.visible
)) {
2125 /*XXX: We need to either find some way of having the
2126 * primary base layer appear black, while still
2127 * being able to display the other layers, or we
2128 * need to allocate a dummy black surface here.
2132 asyh
->core
.w
= asyh
->state
.mode
.hdisplay
;
2133 asyh
->core
.h
= asyh
->state
.mode
.vdisplay
;
2135 asyh
->core
.handle
= disp
->mast
.base
.vram
.handle
;
2136 asyh
->core
.offset
= 0;
2137 asyh
->core
.format
= 0xcf;
2138 asyh
->core
.kind
= 0;
2139 asyh
->core
.layout
= 1;
2140 asyh
->core
.block
= 0;
2141 asyh
->core
.pitch
= ALIGN(asyh
->core
.w
, 64) * 4;
2142 asyh
->lut
.handle
= disp
->mast
.base
.vram
.handle
;
2143 asyh
->lut
.offset
= head
->base
.lut
.nvbo
->bo
.offset
;
2144 asyh
->set
.base
= armh
->base
.cpp
!= asyh
->base
.cpp
;
2145 asyh
->set
.ovly
= armh
->ovly
.cpp
!= asyh
->ovly
.cpp
;
2147 asyh
->core
.visible
= false;
2148 asyh
->curs
.visible
= false;
2153 if (!drm_atomic_crtc_needs_modeset(&asyh
->state
)) {
2154 if (asyh
->core
.visible
) {
2155 if (memcmp(&armh
->core
, &asyh
->core
, sizeof(asyh
->core
)))
2156 asyh
->set
.core
= true;
2158 if (armh
->core
.visible
) {
2159 asyh
->clr
.core
= true;
2162 if (asyh
->curs
.visible
) {
2163 if (memcmp(&armh
->curs
, &asyh
->curs
, sizeof(asyh
->curs
)))
2164 asyh
->set
.curs
= true;
2166 if (armh
->curs
.visible
) {
2167 asyh
->clr
.curs
= true;
2170 asyh
->clr
.core
= armh
->core
.visible
;
2171 asyh
->clr
.curs
= armh
->curs
.visible
;
2172 asyh
->set
.core
= asyh
->core
.visible
;
2173 asyh
->set
.curs
= asyh
->curs
.visible
;
2176 if (asyh
->clr
.mask
|| asyh
->set
.mask
)
2177 nv50_atom(asyh
->state
.state
)->lock_core
= true;
2182 nv50_head_lut_load(struct drm_crtc
*crtc
)
2184 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2185 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2186 void __iomem
*lut
= nvbo_kmap_obj_iovirtual(nv_crtc
->lut
.nvbo
);
2189 for (i
= 0; i
< 256; i
++) {
2190 u16 r
= nv_crtc
->lut
.r
[i
] >> 2;
2191 u16 g
= nv_crtc
->lut
.g
[i
] >> 2;
2192 u16 b
= nv_crtc
->lut
.b
[i
] >> 2;
2194 if (disp
->disp
->oclass
< GF110_DISP
) {
2195 writew(r
+ 0x0000, lut
+ (i
* 0x08) + 0);
2196 writew(g
+ 0x0000, lut
+ (i
* 0x08) + 2);
2197 writew(b
+ 0x0000, lut
+ (i
* 0x08) + 4);
2199 writew(r
+ 0x6000, lut
+ (i
* 0x20) + 0);
2200 writew(g
+ 0x6000, lut
+ (i
* 0x20) + 2);
2201 writew(b
+ 0x6000, lut
+ (i
* 0x20) + 4);
2207 nv50_head_mode_set_base_atomic(struct drm_crtc
*crtc
,
2208 struct drm_framebuffer
*fb
, int x
, int y
,
2209 enum mode_set_atomic state
)
2215 static const struct drm_crtc_helper_funcs
2217 .mode_set_base_atomic
= nv50_head_mode_set_base_atomic
,
2218 .load_lut
= nv50_head_lut_load
,
2219 .atomic_check
= nv50_head_atomic_check
,
2222 /* This is identical to the version in the atomic helpers, except that
2223 * it supports non-vblanked ("async") page flips.
2226 nv50_head_page_flip(struct drm_crtc
*crtc
, struct drm_framebuffer
*fb
,
2227 struct drm_pending_vblank_event
*event
, u32 flags
)
2229 struct drm_plane
*plane
= crtc
->primary
;
2230 struct drm_atomic_state
*state
;
2231 struct drm_plane_state
*plane_state
;
2232 struct drm_crtc_state
*crtc_state
;
2235 state
= drm_atomic_state_alloc(plane
->dev
);
2239 state
->acquire_ctx
= drm_modeset_legacy_acquire_ctx(crtc
);
2241 crtc_state
= drm_atomic_get_crtc_state(state
, crtc
);
2242 if (IS_ERR(crtc_state
)) {
2243 ret
= PTR_ERR(crtc_state
);
2246 crtc_state
->event
= event
;
2248 plane_state
= drm_atomic_get_plane_state(state
, plane
);
2249 if (IS_ERR(plane_state
)) {
2250 ret
= PTR_ERR(plane_state
);
2254 ret
= drm_atomic_set_crtc_for_plane(plane_state
, crtc
);
2257 drm_atomic_set_fb_for_plane(plane_state
, fb
);
2259 /* Make sure we don't accidentally do a full modeset. */
2260 state
->allow_modeset
= false;
2261 if (!crtc_state
->active
) {
2262 DRM_DEBUG_ATOMIC("[CRTC:%d] disabled, rejecting legacy flip\n",
2268 if (flags
& DRM_MODE_PAGE_FLIP_ASYNC
)
2269 nv50_wndw_atom(plane_state
)->interval
= 0;
2271 ret
= drm_atomic_nonblocking_commit(state
);
2273 if (ret
== -EDEADLK
)
2276 drm_atomic_state_put(state
);
2280 drm_atomic_state_clear(state
);
2281 drm_atomic_legacy_backoff(state
);
2284 * Someone might have exchanged the framebuffer while we dropped locks
2285 * in the backoff code. We need to fix up the fb refcount tracking the
2288 plane
->old_fb
= plane
->fb
;
2294 nv50_head_gamma_set(struct drm_crtc
*crtc
, u16
*r
, u16
*g
, u16
*b
,
2297 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2300 for (i
= 0; i
< size
; i
++) {
2301 nv_crtc
->lut
.r
[i
] = r
[i
];
2302 nv_crtc
->lut
.g
[i
] = g
[i
];
2303 nv_crtc
->lut
.b
[i
] = b
[i
];
2306 nv50_head_lut_load(crtc
);
2311 nv50_head_atomic_destroy_state(struct drm_crtc
*crtc
,
2312 struct drm_crtc_state
*state
)
2314 struct nv50_head_atom
*asyh
= nv50_head_atom(state
);
2315 __drm_atomic_helper_crtc_destroy_state(&asyh
->state
);
2319 static struct drm_crtc_state
*
2320 nv50_head_atomic_duplicate_state(struct drm_crtc
*crtc
)
2322 struct nv50_head_atom
*armh
= nv50_head_atom(crtc
->state
);
2323 struct nv50_head_atom
*asyh
;
2324 if (!(asyh
= kmalloc(sizeof(*asyh
), GFP_KERNEL
)))
2326 __drm_atomic_helper_crtc_duplicate_state(crtc
, &asyh
->state
);
2327 asyh
->view
= armh
->view
;
2328 asyh
->mode
= armh
->mode
;
2329 asyh
->lut
= armh
->lut
;
2330 asyh
->core
= armh
->core
;
2331 asyh
->curs
= armh
->curs
;
2332 asyh
->base
= armh
->base
;
2333 asyh
->ovly
= armh
->ovly
;
2334 asyh
->dither
= armh
->dither
;
2335 asyh
->procamp
= armh
->procamp
;
2338 return &asyh
->state
;
2342 __drm_atomic_helper_crtc_reset(struct drm_crtc
*crtc
,
2343 struct drm_crtc_state
*state
)
2346 crtc
->funcs
->atomic_destroy_state(crtc
, crtc
->state
);
2347 crtc
->state
= state
;
2348 crtc
->state
->crtc
= crtc
;
2352 nv50_head_reset(struct drm_crtc
*crtc
)
2354 struct nv50_head_atom
*asyh
;
2356 if (WARN_ON(!(asyh
= kzalloc(sizeof(*asyh
), GFP_KERNEL
))))
2359 __drm_atomic_helper_crtc_reset(crtc
, &asyh
->state
);
2363 nv50_head_destroy(struct drm_crtc
*crtc
)
2365 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(crtc
);
2366 struct nv50_disp
*disp
= nv50_disp(crtc
->dev
);
2367 struct nv50_head
*head
= nv50_head(crtc
);
2369 nv50_dmac_destroy(&head
->ovly
.base
, disp
->disp
);
2370 nv50_pioc_destroy(&head
->oimm
.base
);
2372 nouveau_bo_unmap(nv_crtc
->lut
.nvbo
);
2373 if (nv_crtc
->lut
.nvbo
)
2374 nouveau_bo_unpin(nv_crtc
->lut
.nvbo
);
2375 nouveau_bo_ref(NULL
, &nv_crtc
->lut
.nvbo
);
2377 drm_crtc_cleanup(crtc
);
2381 static const struct drm_crtc_funcs
2383 .reset
= nv50_head_reset
,
2384 .gamma_set
= nv50_head_gamma_set
,
2385 .destroy
= nv50_head_destroy
,
2386 .set_config
= drm_atomic_helper_set_config
,
2387 .page_flip
= nv50_head_page_flip
,
2388 .set_property
= drm_atomic_helper_crtc_set_property
,
2389 .atomic_duplicate_state
= nv50_head_atomic_duplicate_state
,
2390 .atomic_destroy_state
= nv50_head_atomic_destroy_state
,
2394 nv50_head_create(struct drm_device
*dev
, int index
)
2396 struct nouveau_drm
*drm
= nouveau_drm(dev
);
2397 struct nvif_device
*device
= &drm
->client
.device
;
2398 struct nv50_disp
*disp
= nv50_disp(dev
);
2399 struct nv50_head
*head
;
2400 struct nv50_base
*base
;
2401 struct nv50_curs
*curs
;
2402 struct drm_crtc
*crtc
;
2405 head
= kzalloc(sizeof(*head
), GFP_KERNEL
);
2409 head
->base
.index
= index
;
2410 for (i
= 0; i
< 256; i
++) {
2411 head
->base
.lut
.r
[i
] = i
<< 8;
2412 head
->base
.lut
.g
[i
] = i
<< 8;
2413 head
->base
.lut
.b
[i
] = i
<< 8;
2416 ret
= nv50_base_new(drm
, head
, &base
);
2418 ret
= nv50_curs_new(drm
, head
, &curs
);
2424 crtc
= &head
->base
.base
;
2425 drm_crtc_init_with_planes(dev
, crtc
, &base
->wndw
.plane
,
2426 &curs
->wndw
.plane
, &nv50_head_func
,
2427 "head-%d", head
->base
.index
);
2428 drm_crtc_helper_add(crtc
, &nv50_head_help
);
2429 drm_mode_crtc_set_gamma_size(crtc
, 256);
2431 ret
= nouveau_bo_new(&drm
->client
, 8192, 0x100, TTM_PL_FLAG_VRAM
,
2432 0, 0x0000, NULL
, NULL
, &head
->base
.lut
.nvbo
);
2434 ret
= nouveau_bo_pin(head
->base
.lut
.nvbo
, TTM_PL_FLAG_VRAM
, true);
2436 ret
= nouveau_bo_map(head
->base
.lut
.nvbo
);
2438 nouveau_bo_unpin(head
->base
.lut
.nvbo
);
2441 nouveau_bo_ref(NULL
, &head
->base
.lut
.nvbo
);
2447 /* allocate overlay resources */
2448 ret
= nv50_oimm_create(device
, disp
->disp
, index
, &head
->oimm
);
2452 ret
= nv50_ovly_create(device
, disp
->disp
, index
, disp
->sync
->bo
.offset
,
2459 nv50_head_destroy(crtc
);
2463 /******************************************************************************
2464 * Output path helpers
2465 *****************************************************************************/
2467 nv50_outp_atomic_check_view(struct drm_encoder
*encoder
,
2468 struct drm_crtc_state
*crtc_state
,
2469 struct drm_connector_state
*conn_state
,
2470 struct drm_display_mode
*native_mode
)
2472 struct drm_display_mode
*adjusted_mode
= &crtc_state
->adjusted_mode
;
2473 struct drm_display_mode
*mode
= &crtc_state
->mode
;
2474 struct drm_connector
*connector
= conn_state
->connector
;
2475 struct nouveau_conn_atom
*asyc
= nouveau_conn_atom(conn_state
);
2476 struct nouveau_drm
*drm
= nouveau_drm(encoder
->dev
);
2478 NV_ATOMIC(drm
, "%s atomic_check\n", encoder
->name
);
2479 asyc
->scaler
.full
= false;
2483 if (asyc
->scaler
.mode
== DRM_MODE_SCALE_NONE
) {
2484 switch (connector
->connector_type
) {
2485 case DRM_MODE_CONNECTOR_LVDS
:
2486 case DRM_MODE_CONNECTOR_eDP
:
2487 /* Force use of scaler for non-EDID modes. */
2488 if (adjusted_mode
->type
& DRM_MODE_TYPE_DRIVER
)
2491 asyc
->scaler
.full
= true;
2500 if (!drm_mode_equal(adjusted_mode
, mode
)) {
2501 drm_mode_copy(adjusted_mode
, mode
);
2502 crtc_state
->mode_changed
= true;
2509 nv50_outp_atomic_check(struct drm_encoder
*encoder
,
2510 struct drm_crtc_state
*crtc_state
,
2511 struct drm_connector_state
*conn_state
)
2513 struct nouveau_connector
*nv_connector
=
2514 nouveau_connector(conn_state
->connector
);
2515 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2516 nv_connector
->native_mode
);
2519 /******************************************************************************
2521 *****************************************************************************/
2523 nv50_dac_dpms(struct drm_encoder
*encoder
, int mode
)
2525 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2526 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2528 struct nv50_disp_mthd_v1 base
;
2529 struct nv50_disp_dac_pwr_v0 pwr
;
2532 .base
.method
= NV50_DISP_MTHD_V1_DAC_PWR
,
2533 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2534 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2537 .pwr
.vsync
= (mode
!= DRM_MODE_DPMS_SUSPEND
&&
2538 mode
!= DRM_MODE_DPMS_OFF
),
2539 .pwr
.hsync
= (mode
!= DRM_MODE_DPMS_STANDBY
&&
2540 mode
!= DRM_MODE_DPMS_OFF
),
2543 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2547 nv50_dac_disable(struct drm_encoder
*encoder
)
2549 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2550 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2551 const int or = nv_encoder
->or;
2554 if (nv_encoder
->crtc
) {
2555 push
= evo_wait(mast
, 4);
2557 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2558 evo_mthd(push
, 0x0400 + (or * 0x080), 1);
2559 evo_data(push
, 0x00000000);
2561 evo_mthd(push
, 0x0180 + (or * 0x020), 1);
2562 evo_data(push
, 0x00000000);
2564 evo_kick(push
, mast
);
2568 nv_encoder
->crtc
= NULL
;
2572 nv50_dac_enable(struct drm_encoder
*encoder
)
2574 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
2575 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2576 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2577 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
2580 push
= evo_wait(mast
, 8);
2582 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
2583 u32 syncs
= 0x00000000;
2585 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2586 syncs
|= 0x00000001;
2587 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2588 syncs
|= 0x00000002;
2590 evo_mthd(push
, 0x0400 + (nv_encoder
->or * 0x080), 2);
2591 evo_data(push
, 1 << nv_crtc
->index
);
2592 evo_data(push
, syncs
);
2594 u32 magic
= 0x31ec6000 | (nv_crtc
->index
<< 25);
2595 u32 syncs
= 0x00000001;
2597 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
2598 syncs
|= 0x00000008;
2599 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
2600 syncs
|= 0x00000010;
2602 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
2603 magic
|= 0x00000001;
2605 evo_mthd(push
, 0x0404 + (nv_crtc
->index
* 0x300), 2);
2606 evo_data(push
, syncs
);
2607 evo_data(push
, magic
);
2608 evo_mthd(push
, 0x0180 + (nv_encoder
->or * 0x020), 1);
2609 evo_data(push
, 1 << nv_crtc
->index
);
2612 evo_kick(push
, mast
);
2615 nv_encoder
->crtc
= encoder
->crtc
;
2618 static enum drm_connector_status
2619 nv50_dac_detect(struct drm_encoder
*encoder
, struct drm_connector
*connector
)
2621 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2622 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2624 struct nv50_disp_mthd_v1 base
;
2625 struct nv50_disp_dac_load_v0 load
;
2628 .base
.method
= NV50_DISP_MTHD_V1_DAC_LOAD
,
2629 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2630 .base
.hashm
= nv_encoder
->dcb
->hashm
,
2634 args
.load
.data
= nouveau_drm(encoder
->dev
)->vbios
.dactestval
;
2635 if (args
.load
.data
== 0)
2636 args
.load
.data
= 340;
2638 ret
= nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2639 if (ret
|| !args
.load
.load
)
2640 return connector_status_disconnected
;
2642 return connector_status_connected
;
2645 static const struct drm_encoder_helper_funcs
2647 .dpms
= nv50_dac_dpms
,
2648 .atomic_check
= nv50_outp_atomic_check
,
2649 .enable
= nv50_dac_enable
,
2650 .disable
= nv50_dac_disable
,
2651 .detect
= nv50_dac_detect
2655 nv50_dac_destroy(struct drm_encoder
*encoder
)
2657 drm_encoder_cleanup(encoder
);
2661 static const struct drm_encoder_funcs
2663 .destroy
= nv50_dac_destroy
,
2667 nv50_dac_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
2669 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
2670 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
2671 struct nvkm_i2c_bus
*bus
;
2672 struct nouveau_encoder
*nv_encoder
;
2673 struct drm_encoder
*encoder
;
2674 int type
= DRM_MODE_ENCODER_DAC
;
2676 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
2679 nv_encoder
->dcb
= dcbe
;
2680 nv_encoder
->or = ffs(dcbe
->or) - 1;
2682 bus
= nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
2684 nv_encoder
->i2c
= &bus
->i2c
;
2686 encoder
= to_drm_encoder(nv_encoder
);
2687 encoder
->possible_crtcs
= dcbe
->heads
;
2688 encoder
->possible_clones
= 0;
2689 drm_encoder_init(connector
->dev
, encoder
, &nv50_dac_func
, type
,
2690 "dac-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
2691 drm_encoder_helper_add(encoder
, &nv50_dac_help
);
2693 drm_mode_connector_attach_encoder(connector
, encoder
);
2697 /******************************************************************************
2699 *****************************************************************************/
2701 nv50_audio_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2703 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2704 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2706 struct nv50_disp_mthd_v1 base
;
2707 struct nv50_disp_sor_hda_eld_v0 eld
;
2710 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2711 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2712 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2713 (0x0100 << nv_crtc
->index
),
2716 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2720 nv50_audio_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2722 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2723 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2724 struct nouveau_connector
*nv_connector
;
2725 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2728 struct nv50_disp_mthd_v1 mthd
;
2729 struct nv50_disp_sor_hda_eld_v0 eld
;
2731 u8 data
[sizeof(nv_connector
->base
.eld
)];
2733 .base
.mthd
.version
= 1,
2734 .base
.mthd
.method
= NV50_DISP_MTHD_V1_SOR_HDA_ELD
,
2735 .base
.mthd
.hasht
= nv_encoder
->dcb
->hasht
,
2736 .base
.mthd
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2737 (0x0100 << nv_crtc
->index
),
2740 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2741 if (!drm_detect_monitor_audio(nv_connector
->edid
))
2744 drm_edid_to_eld(&nv_connector
->base
, nv_connector
->edid
);
2745 memcpy(args
.data
, nv_connector
->base
.eld
, sizeof(args
.data
));
2747 nvif_mthd(disp
->disp
, 0, &args
,
2748 sizeof(args
.base
) + drm_eld_size(args
.data
));
2751 /******************************************************************************
2753 *****************************************************************************/
2755 nv50_hdmi_disable(struct drm_encoder
*encoder
, struct nouveau_crtc
*nv_crtc
)
2757 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2758 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2760 struct nv50_disp_mthd_v1 base
;
2761 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2764 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2765 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2766 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2767 (0x0100 << nv_crtc
->index
),
2770 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2774 nv50_hdmi_enable(struct drm_encoder
*encoder
, struct drm_display_mode
*mode
)
2776 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
2777 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
2778 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
2780 struct nv50_disp_mthd_v1 base
;
2781 struct nv50_disp_sor_hdmi_pwr_v0 pwr
;
2784 .base
.method
= NV50_DISP_MTHD_V1_SOR_HDMI_PWR
,
2785 .base
.hasht
= nv_encoder
->dcb
->hasht
,
2786 .base
.hashm
= (0xf0ff & nv_encoder
->dcb
->hashm
) |
2787 (0x0100 << nv_crtc
->index
),
2789 .pwr
.rekey
= 56, /* binary driver, and tegra, constant */
2791 struct nouveau_connector
*nv_connector
;
2794 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
2795 if (!drm_detect_hdmi_monitor(nv_connector
->edid
))
2798 max_ac_packet
= mode
->htotal
- mode
->hdisplay
;
2799 max_ac_packet
-= args
.pwr
.rekey
;
2800 max_ac_packet
-= 18; /* constant from tegra */
2801 args
.pwr
.max_ac_packet
= max_ac_packet
/ 32;
2803 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
2804 nv50_audio_enable(encoder
, mode
);
2807 /******************************************************************************
2809 *****************************************************************************/
2810 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2811 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2812 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2815 struct nouveau_encoder
*outp
;
2817 struct drm_dp_mst_topology_mgr mgr
;
2818 struct nv50_msto
*msto
[4];
2824 struct nv50_mstm
*mstm
;
2825 struct drm_dp_mst_port
*port
;
2826 struct drm_connector connector
;
2828 struct drm_display_mode
*native
;
2835 struct drm_encoder encoder
;
2837 struct nv50_head
*head
;
2838 struct nv50_mstc
*mstc
;
2842 static struct drm_dp_payload
*
2843 nv50_msto_payload(struct nv50_msto
*msto
)
2845 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2846 struct nv50_mstc
*mstc
= msto
->mstc
;
2847 struct nv50_mstm
*mstm
= mstc
->mstm
;
2848 int vcpi
= mstc
->port
->vcpi
.vcpi
, i
;
2850 NV_ATOMIC(drm
, "%s: vcpi %d\n", msto
->encoder
.name
, vcpi
);
2851 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2852 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2853 NV_ATOMIC(drm
, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2854 mstm
->outp
->base
.base
.name
, i
, payload
->vcpi
,
2855 payload
->start_slot
, payload
->num_slots
);
2858 for (i
= 0; i
< mstm
->mgr
.max_payloads
; i
++) {
2859 struct drm_dp_payload
*payload
= &mstm
->mgr
.payloads
[i
];
2860 if (payload
->vcpi
== vcpi
)
2868 nv50_msto_cleanup(struct nv50_msto
*msto
)
2870 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2871 struct nv50_mstc
*mstc
= msto
->mstc
;
2872 struct nv50_mstm
*mstm
= mstc
->mstm
;
2874 NV_ATOMIC(drm
, "%s: msto cleanup\n", msto
->encoder
.name
);
2875 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0 && !nv50_msto_payload(msto
))
2876 drm_dp_mst_deallocate_vcpi(&mstm
->mgr
, mstc
->port
);
2877 if (msto
->disabled
) {
2880 msto
->disabled
= false;
2885 nv50_msto_prepare(struct nv50_msto
*msto
)
2887 struct nouveau_drm
*drm
= nouveau_drm(msto
->encoder
.dev
);
2888 struct nv50_mstc
*mstc
= msto
->mstc
;
2889 struct nv50_mstm
*mstm
= mstc
->mstm
;
2891 struct nv50_disp_mthd_v1 base
;
2892 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi
;
2895 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI
,
2896 .base
.hasht
= mstm
->outp
->dcb
->hasht
,
2897 .base
.hashm
= (0xf0ff & mstm
->outp
->dcb
->hashm
) |
2898 (0x0100 << msto
->head
->base
.index
),
2901 NV_ATOMIC(drm
, "%s: msto prepare\n", msto
->encoder
.name
);
2902 if (mstc
->port
&& mstc
->port
->vcpi
.vcpi
> 0) {
2903 struct drm_dp_payload
*payload
= nv50_msto_payload(msto
);
2905 args
.vcpi
.start_slot
= payload
->start_slot
;
2906 args
.vcpi
.num_slots
= payload
->num_slots
;
2907 args
.vcpi
.pbn
= mstc
->port
->vcpi
.pbn
;
2908 args
.vcpi
.aligned_pbn
= mstc
->port
->vcpi
.aligned_pbn
;
2912 NV_ATOMIC(drm
, "%s: %s: %02x %02x %04x %04x\n",
2913 msto
->encoder
.name
, msto
->head
->base
.base
.name
,
2914 args
.vcpi
.start_slot
, args
.vcpi
.num_slots
,
2915 args
.vcpi
.pbn
, args
.vcpi
.aligned_pbn
);
2916 nvif_mthd(&drm
->display
->disp
, 0, &args
, sizeof(args
));
2920 nv50_msto_atomic_check(struct drm_encoder
*encoder
,
2921 struct drm_crtc_state
*crtc_state
,
2922 struct drm_connector_state
*conn_state
)
2924 struct nv50_mstc
*mstc
= nv50_mstc(conn_state
->connector
);
2925 struct nv50_mstm
*mstm
= mstc
->mstm
;
2926 int bpp
= conn_state
->connector
->display_info
.bpc
* 3;
2929 mstc
->pbn
= drm_dp_calc_pbn_mode(crtc_state
->adjusted_mode
.clock
, bpp
);
2931 slots
= drm_dp_find_vcpi_slots(&mstm
->mgr
, mstc
->pbn
);
2935 return nv50_outp_atomic_check_view(encoder
, crtc_state
, conn_state
,
2940 nv50_msto_enable(struct drm_encoder
*encoder
)
2942 struct nv50_head
*head
= nv50_head(encoder
->crtc
);
2943 struct nv50_msto
*msto
= nv50_msto(encoder
);
2944 struct nv50_mstc
*mstc
= NULL
;
2945 struct nv50_mstm
*mstm
= NULL
;
2946 struct drm_connector
*connector
;
2951 drm_for_each_connector(connector
, encoder
->dev
) {
2952 if (connector
->state
->best_encoder
== &msto
->encoder
) {
2953 mstc
= nv50_mstc(connector
);
2962 r
= drm_dp_mst_allocate_vcpi(&mstm
->mgr
, mstc
->port
, mstc
->pbn
, &slots
);
2965 if (mstm
->outp
->dcb
->sorconf
.link
& 1)
2970 switch (mstc
->connector
.display_info
.bpc
) {
2971 case 6: depth
= 0x2; break;
2972 case 8: depth
= 0x5; break;
2974 default: depth
= 0x6; break;
2977 mstm
->outp
->update(mstm
->outp
, head
->base
.index
,
2978 &head
->base
.base
.state
->adjusted_mode
, proto
, depth
);
2982 mstm
->modified
= true;
2986 nv50_msto_disable(struct drm_encoder
*encoder
)
2988 struct nv50_msto
*msto
= nv50_msto(encoder
);
2989 struct nv50_mstc
*mstc
= msto
->mstc
;
2990 struct nv50_mstm
*mstm
= mstc
->mstm
;
2993 drm_dp_mst_reset_vcpi_slots(&mstm
->mgr
, mstc
->port
);
2995 mstm
->outp
->update(mstm
->outp
, msto
->head
->base
.index
, NULL
, 0, 0);
2996 mstm
->modified
= true;
2997 msto
->disabled
= true;
3000 static const struct drm_encoder_helper_funcs
3002 .disable
= nv50_msto_disable
,
3003 .enable
= nv50_msto_enable
,
3004 .atomic_check
= nv50_msto_atomic_check
,
3008 nv50_msto_destroy(struct drm_encoder
*encoder
)
3010 struct nv50_msto
*msto
= nv50_msto(encoder
);
3011 drm_encoder_cleanup(&msto
->encoder
);
3015 static const struct drm_encoder_funcs
3017 .destroy
= nv50_msto_destroy
,
3021 nv50_msto_new(struct drm_device
*dev
, u32 heads
, const char *name
, int id
,
3022 struct nv50_msto
**pmsto
)
3024 struct nv50_msto
*msto
;
3027 if (!(msto
= *pmsto
= kzalloc(sizeof(*msto
), GFP_KERNEL
)))
3030 ret
= drm_encoder_init(dev
, &msto
->encoder
, &nv50_msto
,
3031 DRM_MODE_ENCODER_DPMST
, "%s-mst-%d", name
, id
);
3038 drm_encoder_helper_add(&msto
->encoder
, &nv50_msto_help
);
3039 msto
->encoder
.possible_crtcs
= heads
;
3043 static struct drm_encoder
*
3044 nv50_mstc_atomic_best_encoder(struct drm_connector
*connector
,
3045 struct drm_connector_state
*connector_state
)
3047 struct nv50_head
*head
= nv50_head(connector_state
->crtc
);
3048 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3050 struct nv50_mstm
*mstm
= mstc
->mstm
;
3051 return &mstm
->msto
[head
->base
.index
]->encoder
;
3056 static struct drm_encoder
*
3057 nv50_mstc_best_encoder(struct drm_connector
*connector
)
3059 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3061 struct nv50_mstm
*mstm
= mstc
->mstm
;
3062 return &mstm
->msto
[0]->encoder
;
3067 static enum drm_mode_status
3068 nv50_mstc_mode_valid(struct drm_connector
*connector
,
3069 struct drm_display_mode
*mode
)
3075 nv50_mstc_get_modes(struct drm_connector
*connector
)
3077 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3080 mstc
->edid
= drm_dp_mst_get_edid(&mstc
->connector
, mstc
->port
->mgr
, mstc
->port
);
3081 drm_mode_connector_update_edid_property(&mstc
->connector
, mstc
->edid
);
3083 ret
= drm_add_edid_modes(&mstc
->connector
, mstc
->edid
);
3084 drm_edid_to_eld(&mstc
->connector
, mstc
->edid
);
3087 if (!mstc
->connector
.display_info
.bpc
)
3088 mstc
->connector
.display_info
.bpc
= 8;
3091 drm_mode_destroy(mstc
->connector
.dev
, mstc
->native
);
3092 mstc
->native
= nouveau_conn_native_mode(&mstc
->connector
);
3096 static const struct drm_connector_helper_funcs
3098 .get_modes
= nv50_mstc_get_modes
,
3099 .mode_valid
= nv50_mstc_mode_valid
,
3100 .best_encoder
= nv50_mstc_best_encoder
,
3101 .atomic_best_encoder
= nv50_mstc_atomic_best_encoder
,
3104 static enum drm_connector_status
3105 nv50_mstc_detect(struct drm_connector
*connector
, bool force
)
3107 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3109 return connector_status_disconnected
;
3110 return drm_dp_mst_detect_port(connector
, mstc
->port
->mgr
, mstc
->port
);
3114 nv50_mstc_destroy(struct drm_connector
*connector
)
3116 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3117 drm_connector_cleanup(&mstc
->connector
);
3121 static const struct drm_connector_funcs
3123 .dpms
= drm_atomic_helper_connector_dpms
,
3124 .reset
= nouveau_conn_reset
,
3125 .detect
= nv50_mstc_detect
,
3126 .fill_modes
= drm_helper_probe_single_connector_modes
,
3127 .set_property
= drm_atomic_helper_connector_set_property
,
3128 .destroy
= nv50_mstc_destroy
,
3129 .atomic_duplicate_state
= nouveau_conn_atomic_duplicate_state
,
3130 .atomic_destroy_state
= nouveau_conn_atomic_destroy_state
,
3131 .atomic_set_property
= nouveau_conn_atomic_set_property
,
3132 .atomic_get_property
= nouveau_conn_atomic_get_property
,
3136 nv50_mstc_new(struct nv50_mstm
*mstm
, struct drm_dp_mst_port
*port
,
3137 const char *path
, struct nv50_mstc
**pmstc
)
3139 struct drm_device
*dev
= mstm
->outp
->base
.base
.dev
;
3140 struct nv50_mstc
*mstc
;
3143 if (!(mstc
= *pmstc
= kzalloc(sizeof(*mstc
), GFP_KERNEL
)))
3148 ret
= drm_connector_init(dev
, &mstc
->connector
, &nv50_mstc
,
3149 DRM_MODE_CONNECTOR_DisplayPort
);
3156 drm_connector_helper_add(&mstc
->connector
, &nv50_mstc_help
);
3158 mstc
->connector
.funcs
->reset(&mstc
->connector
);
3159 nouveau_conn_attach_properties(&mstc
->connector
);
3161 for (i
= 0; i
< ARRAY_SIZE(mstm
->msto
) && mstm
->msto
; i
++)
3162 drm_mode_connector_attach_encoder(&mstc
->connector
, &mstm
->msto
[i
]->encoder
);
3164 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.path_property
, 0);
3165 drm_object_attach_property(&mstc
->connector
.base
, dev
->mode_config
.tile_property
, 0);
3166 drm_mode_connector_set_path_property(&mstc
->connector
, path
);
3171 nv50_mstm_cleanup(struct nv50_mstm
*mstm
)
3173 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3174 struct drm_encoder
*encoder
;
3177 NV_ATOMIC(drm
, "%s: mstm cleanup\n", mstm
->outp
->base
.base
.name
);
3178 ret
= drm_dp_check_act_status(&mstm
->mgr
);
3180 ret
= drm_dp_update_payload_part2(&mstm
->mgr
);
3182 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3183 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3184 struct nv50_msto
*msto
= nv50_msto(encoder
);
3185 struct nv50_mstc
*mstc
= msto
->mstc
;
3186 if (mstc
&& mstc
->mstm
== mstm
)
3187 nv50_msto_cleanup(msto
);
3191 mstm
->modified
= false;
3195 nv50_mstm_prepare(struct nv50_mstm
*mstm
)
3197 struct nouveau_drm
*drm
= nouveau_drm(mstm
->outp
->base
.base
.dev
);
3198 struct drm_encoder
*encoder
;
3201 NV_ATOMIC(drm
, "%s: mstm prepare\n", mstm
->outp
->base
.base
.name
);
3202 ret
= drm_dp_update_payload_part1(&mstm
->mgr
);
3204 drm_for_each_encoder(encoder
, mstm
->outp
->base
.base
.dev
) {
3205 if (encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
3206 struct nv50_msto
*msto
= nv50_msto(encoder
);
3207 struct nv50_mstc
*mstc
= msto
->mstc
;
3208 if (mstc
&& mstc
->mstm
== mstm
)
3209 nv50_msto_prepare(msto
);
3215 nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr
*mgr
)
3217 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3218 drm_kms_helper_hotplug_event(mstm
->outp
->base
.base
.dev
);
3222 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3223 struct drm_connector
*connector
)
3225 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3226 struct nv50_mstc
*mstc
= nv50_mstc(connector
);
3228 drm_connector_unregister(&mstc
->connector
);
3230 drm_modeset_lock_all(drm
->dev
);
3231 drm_fb_helper_remove_one_connector(&drm
->fbcon
->helper
, &mstc
->connector
);
3233 drm_modeset_unlock_all(drm
->dev
);
3235 drm_connector_unreference(&mstc
->connector
);
3239 nv50_mstm_register_connector(struct drm_connector
*connector
)
3241 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3243 drm_modeset_lock_all(drm
->dev
);
3244 drm_fb_helper_add_one_connector(&drm
->fbcon
->helper
, connector
);
3245 drm_modeset_unlock_all(drm
->dev
);
3247 drm_connector_register(connector
);
3250 static struct drm_connector
*
3251 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr
*mgr
,
3252 struct drm_dp_mst_port
*port
, const char *path
)
3254 struct nv50_mstm
*mstm
= nv50_mstm(mgr
);
3255 struct nv50_mstc
*mstc
;
3258 ret
= nv50_mstc_new(mstm
, port
, path
, &mstc
);
3261 mstc
->connector
.funcs
->destroy(&mstc
->connector
);
3265 return &mstc
->connector
;
3268 static const struct drm_dp_mst_topology_cbs
3270 .add_connector
= nv50_mstm_add_connector
,
3271 .register_connector
= nv50_mstm_register_connector
,
3272 .destroy_connector
= nv50_mstm_destroy_connector
,
3273 .hotplug
= nv50_mstm_hotplug
,
3277 nv50_mstm_service(struct nv50_mstm
*mstm
)
3279 struct drm_dp_aux
*aux
= mstm
->mgr
.aux
;
3280 bool handled
= true;
3285 ret
= drm_dp_dpcd_read(aux
, DP_SINK_COUNT_ESI
, esi
, 8);
3287 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3291 drm_dp_mst_hpd_irq(&mstm
->mgr
, esi
, &handled
);
3295 drm_dp_dpcd_write(aux
, DP_SINK_COUNT_ESI
+ 1, &esi
[1], 3);
3300 nv50_mstm_remove(struct nv50_mstm
*mstm
)
3303 drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, false);
3307 nv50_mstm_enable(struct nv50_mstm
*mstm
, u8 dpcd
, int state
)
3309 struct nouveau_encoder
*outp
= mstm
->outp
;
3311 struct nv50_disp_mthd_v1 base
;
3312 struct nv50_disp_sor_dp_mst_link_v0 mst
;
3315 .base
.method
= NV50_DISP_MTHD_V1_SOR_DP_MST_LINK
,
3316 .base
.hasht
= outp
->dcb
->hasht
,
3317 .base
.hashm
= outp
->dcb
->hashm
,
3320 struct nouveau_drm
*drm
= nouveau_drm(outp
->base
.base
.dev
);
3321 struct nvif_object
*disp
= &drm
->display
->disp
;
3325 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, &dpcd
);
3333 ret
= drm_dp_dpcd_writeb(mstm
->mgr
.aux
, DP_MSTM_CTRL
, dpcd
);
3338 return nvif_mthd(disp
, 0, &args
, sizeof(args
));
3342 nv50_mstm_detect(struct nv50_mstm
*mstm
, u8 dpcd
[8], int allow
)
3349 if (dpcd
[0] >= 0x12) {
3350 ret
= drm_dp_dpcd_readb(mstm
->mgr
.aux
, DP_MSTM_CAP
, &dpcd
[1]);
3354 if (!(dpcd
[1] & DP_MST_CAP
))
3360 ret
= nv50_mstm_enable(mstm
, dpcd
[0], state
);
3364 ret
= drm_dp_mst_topology_mgr_set_mst(&mstm
->mgr
, state
);
3366 return nv50_mstm_enable(mstm
, dpcd
[0], 0);
3368 return mstm
->mgr
.mst_state
;
3372 nv50_mstm_fini(struct nv50_mstm
*mstm
)
3374 if (mstm
&& mstm
->mgr
.mst_state
)
3375 drm_dp_mst_topology_mgr_suspend(&mstm
->mgr
);
3379 nv50_mstm_init(struct nv50_mstm
*mstm
)
3381 if (mstm
&& mstm
->mgr
.mst_state
)
3382 drm_dp_mst_topology_mgr_resume(&mstm
->mgr
);
3386 nv50_mstm_del(struct nv50_mstm
**pmstm
)
3388 struct nv50_mstm
*mstm
= *pmstm
;
3396 nv50_mstm_new(struct nouveau_encoder
*outp
, struct drm_dp_aux
*aux
, int aux_max
,
3397 int conn_base_id
, struct nv50_mstm
**pmstm
)
3399 const int max_payloads
= hweight8(outp
->dcb
->heads
);
3400 struct drm_device
*dev
= outp
->base
.base
.dev
;
3401 struct nv50_mstm
*mstm
;
3405 /* This is a workaround for some monitors not functioning
3406 * correctly in MST mode on initial module load. I think
3407 * some bad interaction with the VBIOS may be responsible.
3409 * A good ol' off and on again seems to work here ;)
3411 ret
= drm_dp_dpcd_readb(aux
, DP_DPCD_REV
, &dpcd
);
3412 if (ret
>= 0 && dpcd
>= 0x12)
3413 drm_dp_dpcd_writeb(aux
, DP_MSTM_CTRL
, 0);
3415 if (!(mstm
= *pmstm
= kzalloc(sizeof(*mstm
), GFP_KERNEL
)))
3418 mstm
->mgr
.cbs
= &nv50_mstm
;
3420 ret
= drm_dp_mst_topology_mgr_init(&mstm
->mgr
, dev
, aux
, aux_max
,
3421 max_payloads
, conn_base_id
);
3425 for (i
= 0; i
< max_payloads
; i
++) {
3426 ret
= nv50_msto_new(dev
, outp
->dcb
->heads
, outp
->base
.base
.name
,
3435 /******************************************************************************
3437 *****************************************************************************/
3439 nv50_sor_dpms(struct drm_encoder
*encoder
, int mode
)
3441 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3442 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3444 struct nv50_disp_mthd_v1 base
;
3445 struct nv50_disp_sor_pwr_v0 pwr
;
3448 .base
.method
= NV50_DISP_MTHD_V1_SOR_PWR
,
3449 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3450 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3451 .pwr
.state
= mode
== DRM_MODE_DPMS_ON
,
3454 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
3458 nv50_sor_update(struct nouveau_encoder
*nv_encoder
, u8 head
,
3459 struct drm_display_mode
*mode
, u8 proto
, u8 depth
)
3461 struct nv50_dmac
*core
= &nv50_mast(nv_encoder
->base
.base
.dev
)->base
;
3465 nv_encoder
->ctrl
&= ~BIT(head
);
3466 if (!(nv_encoder
->ctrl
& 0x0000000f))
3467 nv_encoder
->ctrl
= 0;
3469 nv_encoder
->ctrl
|= proto
<< 8;
3470 nv_encoder
->ctrl
|= BIT(head
);
3473 if ((push
= evo_wait(core
, 6))) {
3474 if (core
->base
.user
.oclass
< GF110_DISP_CORE_CHANNEL_DMA
) {
3476 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3477 nv_encoder
->ctrl
|= 0x00001000;
3478 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3479 nv_encoder
->ctrl
|= 0x00002000;
3480 nv_encoder
->ctrl
|= depth
<< 16;
3482 evo_mthd(push
, 0x0600 + (nv_encoder
->or * 0x40), 1);
3485 u32 magic
= 0x31ec6000 | (head
<< 25);
3486 u32 syncs
= 0x00000001;
3487 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3488 syncs
|= 0x00000008;
3489 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3490 syncs
|= 0x00000010;
3491 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
3492 magic
|= 0x00000001;
3494 evo_mthd(push
, 0x0404 + (head
* 0x300), 2);
3495 evo_data(push
, syncs
| (depth
<< 6));
3496 evo_data(push
, magic
);
3498 evo_mthd(push
, 0x0200 + (nv_encoder
->or * 0x20), 1);
3500 evo_data(push
, nv_encoder
->ctrl
);
3501 evo_kick(push
, core
);
3506 nv50_sor_disable(struct drm_encoder
*encoder
)
3508 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3509 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(nv_encoder
->crtc
);
3511 nv_encoder
->crtc
= NULL
;
3514 struct nvkm_i2c_aux
*aux
= nv_encoder
->aux
;
3518 int ret
= nvkm_rdaux(aux
, DP_SET_POWER
, &pwr
, 1);
3520 pwr
&= ~DP_SET_POWER_MASK
;
3521 pwr
|= DP_SET_POWER_D3
;
3522 nvkm_wraux(aux
, DP_SET_POWER
, &pwr
, 1);
3526 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, NULL
, 0, 0);
3527 nv50_audio_disable(encoder
, nv_crtc
);
3528 nv50_hdmi_disable(&nv_encoder
->base
.base
, nv_crtc
);
3533 nv50_sor_enable(struct drm_encoder
*encoder
)
3535 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3536 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3537 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3539 struct nv50_disp_mthd_v1 base
;
3540 struct nv50_disp_sor_lvds_script_v0 lvds
;
3543 .base
.method
= NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT
,
3544 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3545 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3547 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3548 struct drm_device
*dev
= encoder
->dev
;
3549 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3550 struct nouveau_connector
*nv_connector
;
3551 struct nvbios
*bios
= &drm
->vbios
;
3555 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3556 nv_encoder
->crtc
= encoder
->crtc
;
3558 switch (nv_encoder
->dcb
->type
) {
3559 case DCB_OUTPUT_TMDS
:
3560 if (nv_encoder
->dcb
->sorconf
.link
& 1) {
3562 /* Only enable dual-link if:
3563 * - Need to (i.e. rate > 165MHz)
3565 * - Not an HDMI monitor, since there's no dual-link
3568 if (mode
->clock
>= 165000 &&
3569 nv_encoder
->dcb
->duallink_possible
&&
3570 !drm_detect_hdmi_monitor(nv_connector
->edid
))
3576 nv50_hdmi_enable(&nv_encoder
->base
.base
, mode
);
3578 case DCB_OUTPUT_LVDS
:
3581 if (bios
->fp_no_ddc
) {
3582 if (bios
->fp
.dual_link
)
3583 lvds
.lvds
.script
|= 0x0100;
3584 if (bios
->fp
.if_is_24bit
)
3585 lvds
.lvds
.script
|= 0x0200;
3587 if (nv_connector
->type
== DCB_CONNECTOR_LVDS_SPWG
) {
3588 if (((u8
*)nv_connector
->edid
)[121] == 2)
3589 lvds
.lvds
.script
|= 0x0100;
3591 if (mode
->clock
>= bios
->fp
.duallink_transition_clk
) {
3592 lvds
.lvds
.script
|= 0x0100;
3595 if (lvds
.lvds
.script
& 0x0100) {
3596 if (bios
->fp
.strapless_is_24bit
& 2)
3597 lvds
.lvds
.script
|= 0x0200;
3599 if (bios
->fp
.strapless_is_24bit
& 1)
3600 lvds
.lvds
.script
|= 0x0200;
3603 if (nv_connector
->base
.display_info
.bpc
== 8)
3604 lvds
.lvds
.script
|= 0x0200;
3607 nvif_mthd(disp
->disp
, 0, &lvds
, sizeof(lvds
));
3610 if (nv_connector
->base
.display_info
.bpc
== 6)
3613 if (nv_connector
->base
.display_info
.bpc
== 8)
3618 if (nv_encoder
->dcb
->sorconf
.link
& 1)
3623 nv50_audio_enable(encoder
, mode
);
3630 nv_encoder
->update(nv_encoder
, nv_crtc
->index
, mode
, proto
, depth
);
3633 static const struct drm_encoder_helper_funcs
3635 .dpms
= nv50_sor_dpms
,
3636 .atomic_check
= nv50_outp_atomic_check
,
3637 .enable
= nv50_sor_enable
,
3638 .disable
= nv50_sor_disable
,
3642 nv50_sor_destroy(struct drm_encoder
*encoder
)
3644 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3645 nv50_mstm_del(&nv_encoder
->dp
.mstm
);
3646 drm_encoder_cleanup(encoder
);
3650 static const struct drm_encoder_funcs
3652 .destroy
= nv50_sor_destroy
,
3656 nv50_sor_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3658 struct nouveau_connector
*nv_connector
= nouveau_connector(connector
);
3659 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3660 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3661 struct nouveau_encoder
*nv_encoder
;
3662 struct drm_encoder
*encoder
;
3665 switch (dcbe
->type
) {
3666 case DCB_OUTPUT_LVDS
: type
= DRM_MODE_ENCODER_LVDS
; break;
3667 case DCB_OUTPUT_TMDS
:
3670 type
= DRM_MODE_ENCODER_TMDS
;
3674 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3677 nv_encoder
->dcb
= dcbe
;
3678 nv_encoder
->or = ffs(dcbe
->or) - 1;
3679 nv_encoder
->update
= nv50_sor_update
;
3681 encoder
= to_drm_encoder(nv_encoder
);
3682 encoder
->possible_crtcs
= dcbe
->heads
;
3683 encoder
->possible_clones
= 0;
3684 drm_encoder_init(connector
->dev
, encoder
, &nv50_sor_func
, type
,
3685 "sor-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3686 drm_encoder_helper_add(encoder
, &nv50_sor_help
);
3688 drm_mode_connector_attach_encoder(connector
, encoder
);
3690 if (dcbe
->type
== DCB_OUTPUT_DP
) {
3691 struct nvkm_i2c_aux
*aux
=
3692 nvkm_i2c_aux_find(i2c
, dcbe
->i2c_index
);
3694 nv_encoder
->i2c
= &aux
->i2c
;
3695 nv_encoder
->aux
= aux
;
3698 /*TODO: Use DP Info Table to check for support. */
3699 if (nv50_disp(encoder
->dev
)->disp
->oclass
>= GF110_DISP
) {
3700 ret
= nv50_mstm_new(nv_encoder
, &nv_connector
->aux
, 16,
3701 nv_connector
->base
.base
.id
,
3702 &nv_encoder
->dp
.mstm
);
3707 struct nvkm_i2c_bus
*bus
=
3708 nvkm_i2c_bus_find(i2c
, dcbe
->i2c_index
);
3710 nv_encoder
->i2c
= &bus
->i2c
;
3716 /******************************************************************************
3718 *****************************************************************************/
3720 nv50_pior_dpms(struct drm_encoder
*encoder
, int mode
)
3722 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3723 struct nv50_disp
*disp
= nv50_disp(encoder
->dev
);
3725 struct nv50_disp_mthd_v1 base
;
3726 struct nv50_disp_pior_pwr_v0 pwr
;
3729 .base
.method
= NV50_DISP_MTHD_V1_PIOR_PWR
,
3730 .base
.hasht
= nv_encoder
->dcb
->hasht
,
3731 .base
.hashm
= nv_encoder
->dcb
->hashm
,
3732 .pwr
.state
= mode
== DRM_MODE_DPMS_ON
,
3733 .pwr
.type
= nv_encoder
->dcb
->type
,
3736 nvif_mthd(disp
->disp
, 0, &args
, sizeof(args
));
3740 nv50_pior_atomic_check(struct drm_encoder
*encoder
,
3741 struct drm_crtc_state
*crtc_state
,
3742 struct drm_connector_state
*conn_state
)
3744 int ret
= nv50_outp_atomic_check(encoder
, crtc_state
, conn_state
);
3747 crtc_state
->adjusted_mode
.clock
*= 2;
3752 nv50_pior_disable(struct drm_encoder
*encoder
)
3754 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3755 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3756 const int or = nv_encoder
->or;
3759 if (nv_encoder
->crtc
) {
3760 push
= evo_wait(mast
, 4);
3762 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3763 evo_mthd(push
, 0x0700 + (or * 0x040), 1);
3764 evo_data(push
, 0x00000000);
3766 evo_kick(push
, mast
);
3770 nv_encoder
->crtc
= NULL
;
3774 nv50_pior_enable(struct drm_encoder
*encoder
)
3776 struct nv50_mast
*mast
= nv50_mast(encoder
->dev
);
3777 struct nouveau_encoder
*nv_encoder
= nouveau_encoder(encoder
);
3778 struct nouveau_crtc
*nv_crtc
= nouveau_crtc(encoder
->crtc
);
3779 struct nouveau_connector
*nv_connector
;
3780 struct drm_display_mode
*mode
= &nv_crtc
->base
.state
->adjusted_mode
;
3781 u8 owner
= 1 << nv_crtc
->index
;
3785 nv_connector
= nouveau_encoder_connector_get(nv_encoder
);
3786 switch (nv_connector
->base
.display_info
.bpc
) {
3787 case 10: depth
= 0x6; break;
3788 case 8: depth
= 0x5; break;
3789 case 6: depth
= 0x2; break;
3790 default: depth
= 0x0; break;
3793 switch (nv_encoder
->dcb
->type
) {
3794 case DCB_OUTPUT_TMDS
:
3803 push
= evo_wait(mast
, 8);
3805 if (nv50_vers(mast
) < GF110_DISP_CORE_CHANNEL_DMA
) {
3806 u32 ctrl
= (depth
<< 16) | (proto
<< 8) | owner
;
3807 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
3809 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
3811 evo_mthd(push
, 0x0700 + (nv_encoder
->or * 0x040), 1);
3812 evo_data(push
, ctrl
);
3815 evo_kick(push
, mast
);
3818 nv_encoder
->crtc
= encoder
->crtc
;
3821 static const struct drm_encoder_helper_funcs
3823 .dpms
= nv50_pior_dpms
,
3824 .atomic_check
= nv50_pior_atomic_check
,
3825 .enable
= nv50_pior_enable
,
3826 .disable
= nv50_pior_disable
,
3830 nv50_pior_destroy(struct drm_encoder
*encoder
)
3832 drm_encoder_cleanup(encoder
);
3836 static const struct drm_encoder_funcs
3838 .destroy
= nv50_pior_destroy
,
3842 nv50_pior_create(struct drm_connector
*connector
, struct dcb_output
*dcbe
)
3844 struct nouveau_drm
*drm
= nouveau_drm(connector
->dev
);
3845 struct nvkm_i2c
*i2c
= nvxx_i2c(&drm
->client
.device
);
3846 struct nvkm_i2c_bus
*bus
= NULL
;
3847 struct nvkm_i2c_aux
*aux
= NULL
;
3848 struct i2c_adapter
*ddc
;
3849 struct nouveau_encoder
*nv_encoder
;
3850 struct drm_encoder
*encoder
;
3853 switch (dcbe
->type
) {
3854 case DCB_OUTPUT_TMDS
:
3855 bus
= nvkm_i2c_bus_find(i2c
, NVKM_I2C_BUS_EXT(dcbe
->extdev
));
3856 ddc
= bus
? &bus
->i2c
: NULL
;
3857 type
= DRM_MODE_ENCODER_TMDS
;
3860 aux
= nvkm_i2c_aux_find(i2c
, NVKM_I2C_AUX_EXT(dcbe
->extdev
));
3861 ddc
= aux
? &aux
->i2c
: NULL
;
3862 type
= DRM_MODE_ENCODER_TMDS
;
3868 nv_encoder
= kzalloc(sizeof(*nv_encoder
), GFP_KERNEL
);
3871 nv_encoder
->dcb
= dcbe
;
3872 nv_encoder
->or = ffs(dcbe
->or) - 1;
3873 nv_encoder
->i2c
= ddc
;
3874 nv_encoder
->aux
= aux
;
3876 encoder
= to_drm_encoder(nv_encoder
);
3877 encoder
->possible_crtcs
= dcbe
->heads
;
3878 encoder
->possible_clones
= 0;
3879 drm_encoder_init(connector
->dev
, encoder
, &nv50_pior_func
, type
,
3880 "pior-%04x-%04x", dcbe
->hasht
, dcbe
->hashm
);
3881 drm_encoder_helper_add(encoder
, &nv50_pior_help
);
3883 drm_mode_connector_attach_encoder(connector
, encoder
);
3887 /******************************************************************************
3889 *****************************************************************************/
3892 nv50_disp_atomic_commit_core(struct nouveau_drm
*drm
, u32 interlock
)
3894 struct nv50_disp
*disp
= nv50_disp(drm
->dev
);
3895 struct nv50_dmac
*core
= &disp
->mast
.base
;
3896 struct nv50_mstm
*mstm
;
3897 struct drm_encoder
*encoder
;
3900 NV_ATOMIC(drm
, "commit core %08x\n", interlock
);
3902 drm_for_each_encoder(encoder
, drm
->dev
) {
3903 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3904 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3905 if (mstm
&& mstm
->modified
)
3906 nv50_mstm_prepare(mstm
);
3910 if ((push
= evo_wait(core
, 5))) {
3911 evo_mthd(push
, 0x0084, 1);
3912 evo_data(push
, 0x80000000);
3913 evo_mthd(push
, 0x0080, 2);
3914 evo_data(push
, interlock
);
3915 evo_data(push
, 0x00000000);
3916 nouveau_bo_wr32(disp
->sync
, 0, 0x00000000);
3917 evo_kick(push
, core
);
3918 if (nvif_msec(&drm
->client
.device
, 2000ULL,
3919 if (nouveau_bo_rd32(disp
->sync
, 0))
3923 NV_ERROR(drm
, "EVO timeout\n");
3926 drm_for_each_encoder(encoder
, drm
->dev
) {
3927 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
3928 mstm
= nouveau_encoder(encoder
)->dp
.mstm
;
3929 if (mstm
&& mstm
->modified
)
3930 nv50_mstm_cleanup(mstm
);
3936 nv50_disp_atomic_commit_tail(struct drm_atomic_state
*state
)
3938 struct drm_device
*dev
= state
->dev
;
3939 struct drm_crtc_state
*crtc_state
;
3940 struct drm_crtc
*crtc
;
3941 struct drm_plane_state
*plane_state
;
3942 struct drm_plane
*plane
;
3943 struct nouveau_drm
*drm
= nouveau_drm(dev
);
3944 struct nv50_disp
*disp
= nv50_disp(dev
);
3945 struct nv50_atom
*atom
= nv50_atom(state
);
3946 struct nv50_outp_atom
*outp
, *outt
;
3947 u32 interlock_core
= 0;
3948 u32 interlock_chan
= 0;
3951 NV_ATOMIC(drm
, "commit %d %d\n", atom
->lock_core
, atom
->flush_disable
);
3952 drm_atomic_helper_wait_for_fences(dev
, state
, false);
3953 drm_atomic_helper_wait_for_dependencies(state
);
3954 drm_atomic_helper_update_legacy_modeset_state(dev
, state
);
3956 if (atom
->lock_core
)
3957 mutex_lock(&disp
->mutex
);
3959 /* Disable head(s). */
3960 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
3961 struct nv50_head_atom
*asyh
= nv50_head_atom(crtc
->state
);
3962 struct nv50_head
*head
= nv50_head(crtc
);
3964 NV_ATOMIC(drm
, "%s: clr %04x (set %04x)\n", crtc
->name
,
3965 asyh
->clr
.mask
, asyh
->set
.mask
);
3967 if (asyh
->clr
.mask
) {
3968 nv50_head_flush_clr(head
, asyh
, atom
->flush_disable
);
3969 interlock_core
|= 1;
3973 /* Disable plane(s). */
3974 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
3975 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
3976 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
3978 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", plane
->name
,
3979 asyw
->clr
.mask
, asyw
->set
.mask
);
3980 if (!asyw
->clr
.mask
)
3983 interlock_chan
|= nv50_wndw_flush_clr(wndw
, interlock_core
,
3984 atom
->flush_disable
,
3988 /* Disable output path(s). */
3989 list_for_each_entry(outp
, &atom
->outp
, head
) {
3990 const struct drm_encoder_helper_funcs
*help
;
3991 struct drm_encoder
*encoder
;
3993 encoder
= outp
->encoder
;
3994 help
= encoder
->helper_private
;
3996 NV_ATOMIC(drm
, "%s: clr %02x (set %02x)\n", encoder
->name
,
3997 outp
->clr
.mask
, outp
->set
.mask
);
3999 if (outp
->clr
.mask
) {
4000 help
->disable(encoder
);
4001 interlock_core
|= 1;
4002 if (outp
->flush_disable
) {
4003 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
4010 /* Flush disable. */
4011 if (interlock_core
) {
4012 if (atom
->flush_disable
) {
4013 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
4019 /* Update output path(s). */
4020 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
4021 const struct drm_encoder_helper_funcs
*help
;
4022 struct drm_encoder
*encoder
;
4024 encoder
= outp
->encoder
;
4025 help
= encoder
->helper_private
;
4027 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", encoder
->name
,
4028 outp
->set
.mask
, outp
->clr
.mask
);
4030 if (outp
->set
.mask
) {
4031 help
->enable(encoder
);
4035 list_del(&outp
->head
);
4039 /* Update head(s). */
4040 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
4041 struct nv50_head_atom
*asyh
= nv50_head_atom(crtc
->state
);
4042 struct nv50_head
*head
= nv50_head(crtc
);
4044 NV_ATOMIC(drm
, "%s: set %04x (clr %04x)\n", crtc
->name
,
4045 asyh
->set
.mask
, asyh
->clr
.mask
);
4047 if (asyh
->set
.mask
) {
4048 nv50_head_flush_set(head
, asyh
);
4053 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
4054 if (crtc
->state
->event
)
4055 drm_crtc_vblank_get(crtc
);
4058 /* Update plane(s). */
4059 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4060 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
4061 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4063 NV_ATOMIC(drm
, "%s: set %02x (clr %02x)\n", plane
->name
,
4064 asyw
->set
.mask
, asyw
->clr
.mask
);
4065 if ( !asyw
->set
.mask
&&
4066 (!asyw
->clr
.mask
|| atom
->flush_disable
))
4069 interlock_chan
|= nv50_wndw_flush_set(wndw
, interlock_core
, asyw
);
4073 if (interlock_core
) {
4074 if (!interlock_chan
&& atom
->state
.legacy_cursor_update
) {
4075 u32
*push
= evo_wait(&disp
->mast
, 2);
4077 evo_mthd(push
, 0x0080, 1);
4078 evo_data(push
, 0x00000000);
4079 evo_kick(push
, &disp
->mast
);
4082 nv50_disp_atomic_commit_core(drm
, interlock_chan
);
4086 if (atom
->lock_core
)
4087 mutex_unlock(&disp
->mutex
);
4089 /* Wait for HW to signal completion. */
4090 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4091 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane
->state
);
4092 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4093 int ret
= nv50_wndw_wait_armed(wndw
, asyw
);
4095 NV_ERROR(drm
, "%s: timeout\n", plane
->name
);
4098 for_each_crtc_in_state(state
, crtc
, crtc_state
, i
) {
4099 if (crtc
->state
->event
) {
4100 unsigned long flags
;
4101 /* Get correct count/ts if racing with vblank irq */
4102 drm_accurate_vblank_count(crtc
);
4103 spin_lock_irqsave(&crtc
->dev
->event_lock
, flags
);
4104 drm_crtc_send_vblank_event(crtc
, crtc
->state
->event
);
4105 spin_unlock_irqrestore(&crtc
->dev
->event_lock
, flags
);
4106 crtc
->state
->event
= NULL
;
4107 drm_crtc_vblank_put(crtc
);
4111 drm_atomic_helper_commit_hw_done(state
);
4112 drm_atomic_helper_cleanup_planes(dev
, state
);
4113 drm_atomic_helper_commit_cleanup_done(state
);
4114 drm_atomic_state_put(state
);
4118 nv50_disp_atomic_commit_work(struct work_struct
*work
)
4120 struct drm_atomic_state
*state
=
4121 container_of(work
, typeof(*state
), commit_work
);
4122 nv50_disp_atomic_commit_tail(state
);
4126 nv50_disp_atomic_commit(struct drm_device
*dev
,
4127 struct drm_atomic_state
*state
, bool nonblock
)
4129 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4130 struct nv50_disp
*disp
= nv50_disp(dev
);
4131 struct drm_plane_state
*plane_state
;
4132 struct drm_plane
*plane
;
4133 struct drm_crtc
*crtc
;
4134 bool active
= false;
4137 ret
= pm_runtime_get_sync(dev
->dev
);
4138 if (ret
< 0 && ret
!= -EACCES
)
4141 ret
= drm_atomic_helper_setup_commit(state
, nonblock
);
4145 INIT_WORK(&state
->commit_work
, nv50_disp_atomic_commit_work
);
4147 ret
= drm_atomic_helper_prepare_planes(dev
, state
);
4152 ret
= drm_atomic_helper_wait_for_fences(dev
, state
, true);
4157 for_each_plane_in_state(state
, plane
, plane_state
, i
) {
4158 struct nv50_wndw_atom
*asyw
= nv50_wndw_atom(plane_state
);
4159 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4160 if (asyw
->set
.image
) {
4161 asyw
->ntfy
.handle
= wndw
->dmac
->sync
.handle
;
4162 asyw
->ntfy
.offset
= wndw
->ntfy
;
4163 asyw
->ntfy
.awaken
= false;
4164 asyw
->set
.ntfy
= true;
4165 nouveau_bo_wr32(disp
->sync
, wndw
->ntfy
/ 4, 0x00000000);
4170 drm_atomic_helper_swap_state(state
, true);
4171 drm_atomic_state_get(state
);
4174 queue_work(system_unbound_wq
, &state
->commit_work
);
4176 nv50_disp_atomic_commit_tail(state
);
4178 drm_for_each_crtc(crtc
, dev
) {
4179 if (crtc
->state
->enable
) {
4180 if (!drm
->have_disp_power_ref
) {
4181 drm
->have_disp_power_ref
= true;
4189 if (!active
&& drm
->have_disp_power_ref
) {
4190 pm_runtime_put_autosuspend(dev
->dev
);
4191 drm
->have_disp_power_ref
= false;
4195 pm_runtime_put_autosuspend(dev
->dev
);
4199 static struct nv50_outp_atom
*
4200 nv50_disp_outp_atomic_add(struct nv50_atom
*atom
, struct drm_encoder
*encoder
)
4202 struct nv50_outp_atom
*outp
;
4204 list_for_each_entry(outp
, &atom
->outp
, head
) {
4205 if (outp
->encoder
== encoder
)
4209 outp
= kzalloc(sizeof(*outp
), GFP_KERNEL
);
4211 return ERR_PTR(-ENOMEM
);
4213 list_add(&outp
->head
, &atom
->outp
);
4214 outp
->encoder
= encoder
;
4219 nv50_disp_outp_atomic_check_clr(struct nv50_atom
*atom
,
4220 struct drm_connector
*connector
)
4222 struct drm_encoder
*encoder
= connector
->state
->best_encoder
;
4223 struct drm_crtc_state
*crtc_state
;
4224 struct drm_crtc
*crtc
;
4225 struct nv50_outp_atom
*outp
;
4227 if (!(crtc
= connector
->state
->crtc
))
4230 crtc_state
= drm_atomic_get_existing_crtc_state(&atom
->state
, crtc
);
4231 if (crtc
->state
->active
&& drm_atomic_crtc_needs_modeset(crtc_state
)) {
4232 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4234 return PTR_ERR(outp
);
4236 if (outp
->encoder
->encoder_type
== DRM_MODE_ENCODER_DPMST
) {
4237 outp
->flush_disable
= true;
4238 atom
->flush_disable
= true;
4240 outp
->clr
.ctrl
= true;
4241 atom
->lock_core
= true;
4248 nv50_disp_outp_atomic_check_set(struct nv50_atom
*atom
,
4249 struct drm_connector_state
*connector_state
)
4251 struct drm_encoder
*encoder
= connector_state
->best_encoder
;
4252 struct drm_crtc_state
*crtc_state
;
4253 struct drm_crtc
*crtc
;
4254 struct nv50_outp_atom
*outp
;
4256 if (!(crtc
= connector_state
->crtc
))
4259 crtc_state
= drm_atomic_get_existing_crtc_state(&atom
->state
, crtc
);
4260 if (crtc_state
->active
&& drm_atomic_crtc_needs_modeset(crtc_state
)) {
4261 outp
= nv50_disp_outp_atomic_add(atom
, encoder
);
4263 return PTR_ERR(outp
);
4265 outp
->set
.ctrl
= true;
4266 atom
->lock_core
= true;
4273 nv50_disp_atomic_check(struct drm_device
*dev
, struct drm_atomic_state
*state
)
4275 struct nv50_atom
*atom
= nv50_atom(state
);
4276 struct drm_connector_state
*connector_state
;
4277 struct drm_connector
*connector
;
4280 ret
= drm_atomic_helper_check(dev
, state
);
4284 for_each_connector_in_state(state
, connector
, connector_state
, i
) {
4285 ret
= nv50_disp_outp_atomic_check_clr(atom
, connector
);
4289 ret
= nv50_disp_outp_atomic_check_set(atom
, connector_state
);
4298 nv50_disp_atomic_state_clear(struct drm_atomic_state
*state
)
4300 struct nv50_atom
*atom
= nv50_atom(state
);
4301 struct nv50_outp_atom
*outp
, *outt
;
4303 list_for_each_entry_safe(outp
, outt
, &atom
->outp
, head
) {
4304 list_del(&outp
->head
);
4308 drm_atomic_state_default_clear(state
);
4312 nv50_disp_atomic_state_free(struct drm_atomic_state
*state
)
4314 struct nv50_atom
*atom
= nv50_atom(state
);
4315 drm_atomic_state_default_release(&atom
->state
);
4319 static struct drm_atomic_state
*
4320 nv50_disp_atomic_state_alloc(struct drm_device
*dev
)
4322 struct nv50_atom
*atom
;
4323 if (!(atom
= kzalloc(sizeof(*atom
), GFP_KERNEL
)) ||
4324 drm_atomic_state_init(dev
, &atom
->state
) < 0) {
4328 INIT_LIST_HEAD(&atom
->outp
);
4329 return &atom
->state
;
4332 static const struct drm_mode_config_funcs
4334 .fb_create
= nouveau_user_framebuffer_create
,
4335 .output_poll_changed
= nouveau_fbcon_output_poll_changed
,
4336 .atomic_check
= nv50_disp_atomic_check
,
4337 .atomic_commit
= nv50_disp_atomic_commit
,
4338 .atomic_state_alloc
= nv50_disp_atomic_state_alloc
,
4339 .atomic_state_clear
= nv50_disp_atomic_state_clear
,
4340 .atomic_state_free
= nv50_disp_atomic_state_free
,
4343 /******************************************************************************
4345 *****************************************************************************/
4348 nv50_display_fini(struct drm_device
*dev
)
4350 struct nouveau_encoder
*nv_encoder
;
4351 struct drm_encoder
*encoder
;
4352 struct drm_plane
*plane
;
4354 drm_for_each_plane(plane
, dev
) {
4355 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4356 if (plane
->funcs
!= &nv50_wndw
)
4358 nv50_wndw_fini(wndw
);
4361 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4362 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4363 nv_encoder
= nouveau_encoder(encoder
);
4364 nv50_mstm_fini(nv_encoder
->dp
.mstm
);
4370 nv50_display_init(struct drm_device
*dev
)
4372 struct drm_encoder
*encoder
;
4373 struct drm_plane
*plane
;
4374 struct drm_crtc
*crtc
;
4377 push
= evo_wait(nv50_mast(dev
), 32);
4381 evo_mthd(push
, 0x0088, 1);
4382 evo_data(push
, nv50_mast(dev
)->base
.sync
.handle
);
4383 evo_kick(push
, nv50_mast(dev
));
4385 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
4386 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DPMST
) {
4387 const struct drm_encoder_helper_funcs
*help
;
4388 struct nouveau_encoder
*nv_encoder
;
4390 nv_encoder
= nouveau_encoder(encoder
);
4391 help
= encoder
->helper_private
;
4392 if (help
&& help
->dpms
)
4393 help
->dpms(encoder
, DRM_MODE_DPMS_ON
);
4395 nv50_mstm_init(nv_encoder
->dp
.mstm
);
4399 drm_for_each_crtc(crtc
, dev
) {
4400 nv50_head_lut_load(crtc
);
4403 drm_for_each_plane(plane
, dev
) {
4404 struct nv50_wndw
*wndw
= nv50_wndw(plane
);
4405 if (plane
->funcs
!= &nv50_wndw
)
4407 nv50_wndw_init(wndw
);
4414 nv50_display_destroy(struct drm_device
*dev
)
4416 struct nv50_disp
*disp
= nv50_disp(dev
);
4418 nv50_dmac_destroy(&disp
->mast
.base
, disp
->disp
);
4420 nouveau_bo_unmap(disp
->sync
);
4422 nouveau_bo_unpin(disp
->sync
);
4423 nouveau_bo_ref(NULL
, &disp
->sync
);
4425 nouveau_display(dev
)->priv
= NULL
;
4429 MODULE_PARM_DESC(atomic
, "Expose atomic ioctl (default: disabled)");
4430 static int nouveau_atomic
= 0;
4431 module_param_named(atomic
, nouveau_atomic
, int, 0400);
4434 nv50_display_create(struct drm_device
*dev
)
4436 struct nvif_device
*device
= &nouveau_drm(dev
)->client
.device
;
4437 struct nouveau_drm
*drm
= nouveau_drm(dev
);
4438 struct dcb_table
*dcb
= &drm
->vbios
.dcb
;
4439 struct drm_connector
*connector
, *tmp
;
4440 struct nv50_disp
*disp
;
4441 struct dcb_output
*dcbe
;
4444 disp
= kzalloc(sizeof(*disp
), GFP_KERNEL
);
4448 mutex_init(&disp
->mutex
);
4450 nouveau_display(dev
)->priv
= disp
;
4451 nouveau_display(dev
)->dtor
= nv50_display_destroy
;
4452 nouveau_display(dev
)->init
= nv50_display_init
;
4453 nouveau_display(dev
)->fini
= nv50_display_fini
;
4454 disp
->disp
= &nouveau_display(dev
)->disp
;
4455 dev
->mode_config
.funcs
= &nv50_disp_func
;
4457 dev
->driver
->driver_features
|= DRIVER_ATOMIC
;
4459 /* small shared memory area we use for notifiers and semaphores */
4460 ret
= nouveau_bo_new(&drm
->client
, 4096, 0x1000, TTM_PL_FLAG_VRAM
,
4461 0, 0x0000, NULL
, NULL
, &disp
->sync
);
4463 ret
= nouveau_bo_pin(disp
->sync
, TTM_PL_FLAG_VRAM
, true);
4465 ret
= nouveau_bo_map(disp
->sync
);
4467 nouveau_bo_unpin(disp
->sync
);
4470 nouveau_bo_ref(NULL
, &disp
->sync
);
4476 /* allocate master evo channel */
4477 ret
= nv50_core_create(device
, disp
->disp
, disp
->sync
->bo
.offset
,
4482 /* create crtc objects to represent the hw heads */
4483 if (disp
->disp
->oclass
>= GF110_DISP
)
4484 crtcs
= nvif_rd32(&device
->object
, 0x022448);
4488 for (i
= 0; i
< crtcs
; i
++) {
4489 ret
= nv50_head_create(dev
, i
);
4494 /* create encoder/connector objects based on VBIOS DCB table */
4495 for (i
= 0, dcbe
= &dcb
->entry
[0]; i
< dcb
->entries
; i
++, dcbe
++) {
4496 connector
= nouveau_connector_create(dev
, dcbe
->connector
);
4497 if (IS_ERR(connector
))
4500 if (dcbe
->location
== DCB_LOC_ON_CHIP
) {
4501 switch (dcbe
->type
) {
4502 case DCB_OUTPUT_TMDS
:
4503 case DCB_OUTPUT_LVDS
:
4505 ret
= nv50_sor_create(connector
, dcbe
);
4507 case DCB_OUTPUT_ANALOG
:
4508 ret
= nv50_dac_create(connector
, dcbe
);
4515 ret
= nv50_pior_create(connector
, dcbe
);
4519 NV_WARN(drm
, "failed to create encoder %d/%d/%d: %d\n",
4520 dcbe
->location
, dcbe
->type
,
4521 ffs(dcbe
->or) - 1, ret
);
4526 /* cull any connectors we created that don't have an encoder */
4527 list_for_each_entry_safe(connector
, tmp
, &dev
->mode_config
.connector_list
, head
) {
4528 if (connector
->encoder_ids
[0])
4531 NV_WARN(drm
, "%s has no encoders, removing\n",
4533 connector
->funcs
->destroy(connector
);
4538 nv50_display_destroy(dev
);