]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - drivers/gpu/drm/nouveau/dispnv50/disp.c
drm: drop _mode_ from drm_mode_connector_attach_encoder
[mirror_ubuntu-focal-kernel.git] / drivers / gpu / drm / nouveau / dispnv50 / disp.c
1 /*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24 #include "disp.h"
25 #include "atom.h"
26 #include "core.h"
27 #include "head.h"
28 #include "wndw.h"
29
30 #include <linux/dma-mapping.h>
31 #include <linux/hdmi.h>
32
33 #include <drm/drmP.h>
34 #include <drm/drm_atomic_helper.h>
35 #include <drm/drm_crtc_helper.h>
36 #include <drm/drm_dp_helper.h>
37 #include <drm/drm_fb_helper.h>
38 #include <drm/drm_plane_helper.h>
39 #include <drm/drm_edid.h>
40
41 #include <nvif/class.h>
42 #include <nvif/cl0002.h>
43 #include <nvif/cl5070.h>
44 #include <nvif/cl507d.h>
45 #include <nvif/event.h>
46
47 #include "nouveau_drv.h"
48 #include "nouveau_dma.h"
49 #include "nouveau_gem.h"
50 #include "nouveau_connector.h"
51 #include "nouveau_encoder.h"
52 #include "nouveau_fence.h"
53 #include "nouveau_fbcon.h"
54
55 #include <subdev/bios/dp.h>
56
57 /******************************************************************************
58 * Atomic state
59 *****************************************************************************/
60
61 struct nv50_outp_atom {
62 struct list_head head;
63
64 struct drm_encoder *encoder;
65 bool flush_disable;
66
67 union nv50_outp_atom_mask {
68 struct {
69 bool ctrl:1;
70 };
71 u8 mask;
72 } set, clr;
73 };
74
75 /******************************************************************************
76 * EVO channel
77 *****************************************************************************/
78
79 static int
80 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
81 const s32 *oclass, u8 head, void *data, u32 size,
82 struct nv50_chan *chan)
83 {
84 struct nvif_sclass *sclass;
85 int ret, i, n;
86
87 chan->device = device;
88
89 ret = n = nvif_object_sclass_get(disp, &sclass);
90 if (ret < 0)
91 return ret;
92
93 while (oclass[0]) {
94 for (i = 0; i < n; i++) {
95 if (sclass[i].oclass == oclass[0]) {
96 ret = nvif_object_init(disp, 0, oclass[0],
97 data, size, &chan->user);
98 if (ret == 0)
99 nvif_object_map(&chan->user, NULL, 0);
100 nvif_object_sclass_put(&sclass);
101 return ret;
102 }
103 }
104 oclass++;
105 }
106
107 nvif_object_sclass_put(&sclass);
108 return -ENOSYS;
109 }
110
111 static void
112 nv50_chan_destroy(struct nv50_chan *chan)
113 {
114 nvif_object_fini(&chan->user);
115 }
116
117 /******************************************************************************
118 * DMA EVO channel
119 *****************************************************************************/
120
121 void
122 nv50_dmac_destroy(struct nv50_dmac *dmac)
123 {
124 nvif_object_fini(&dmac->vram);
125 nvif_object_fini(&dmac->sync);
126
127 nv50_chan_destroy(&dmac->base);
128
129 nvif_mem_fini(&dmac->push);
130 }
131
132 int
133 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
134 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
135 struct nv50_dmac *dmac)
136 {
137 struct nouveau_cli *cli = (void *)device->object.client;
138 struct nv50_disp_core_channel_dma_v0 *args = data;
139 int ret;
140
141 mutex_init(&dmac->lock);
142
143 ret = nvif_mem_init_map(&cli->mmu, NVIF_MEM_COHERENT, 0x1000,
144 &dmac->push);
145 if (ret)
146 return ret;
147
148 dmac->ptr = dmac->push.object.map.ptr;
149
150 args->pushbuf = nvif_handle(&dmac->push.object);
151
152 ret = nv50_chan_create(device, disp, oclass, head, data, size,
153 &dmac->base);
154 if (ret)
155 return ret;
156
157 if (!syncbuf)
158 return 0;
159
160 ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
161 &(struct nv_dma_v0) {
162 .target = NV_DMA_V0_TARGET_VRAM,
163 .access = NV_DMA_V0_ACCESS_RDWR,
164 .start = syncbuf + 0x0000,
165 .limit = syncbuf + 0x0fff,
166 }, sizeof(struct nv_dma_v0),
167 &dmac->sync);
168 if (ret)
169 return ret;
170
171 ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
172 &(struct nv_dma_v0) {
173 .target = NV_DMA_V0_TARGET_VRAM,
174 .access = NV_DMA_V0_ACCESS_RDWR,
175 .start = 0,
176 .limit = device->info.ram_user - 1,
177 }, sizeof(struct nv_dma_v0),
178 &dmac->vram);
179 if (ret)
180 return ret;
181
182 return ret;
183 }
184
185 /******************************************************************************
186 * EVO channel helpers
187 *****************************************************************************/
188 u32 *
189 evo_wait(struct nv50_dmac *evoc, int nr)
190 {
191 struct nv50_dmac *dmac = evoc;
192 struct nvif_device *device = dmac->base.device;
193 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
194
195 mutex_lock(&dmac->lock);
196 if (put + nr >= (PAGE_SIZE / 4) - 8) {
197 dmac->ptr[put] = 0x20000000;
198
199 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
200 if (nvif_msec(device, 2000,
201 if (!nvif_rd32(&dmac->base.user, 0x0004))
202 break;
203 ) < 0) {
204 mutex_unlock(&dmac->lock);
205 pr_err("nouveau: evo channel stalled\n");
206 return NULL;
207 }
208
209 put = 0;
210 }
211
212 return dmac->ptr + put;
213 }
214
215 void
216 evo_kick(u32 *push, struct nv50_dmac *evoc)
217 {
218 struct nv50_dmac *dmac = evoc;
219 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
220 mutex_unlock(&dmac->lock);
221 }
222
223 /******************************************************************************
224 * Output path helpers
225 *****************************************************************************/
226 static void
227 nv50_outp_release(struct nouveau_encoder *nv_encoder)
228 {
229 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
230 struct {
231 struct nv50_disp_mthd_v1 base;
232 } args = {
233 .base.version = 1,
234 .base.method = NV50_DISP_MTHD_V1_RELEASE,
235 .base.hasht = nv_encoder->dcb->hasht,
236 .base.hashm = nv_encoder->dcb->hashm,
237 };
238
239 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
240 nv_encoder->or = -1;
241 nv_encoder->link = 0;
242 }
243
244 static int
245 nv50_outp_acquire(struct nouveau_encoder *nv_encoder)
246 {
247 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
248 struct nv50_disp *disp = nv50_disp(drm->dev);
249 struct {
250 struct nv50_disp_mthd_v1 base;
251 struct nv50_disp_acquire_v0 info;
252 } args = {
253 .base.version = 1,
254 .base.method = NV50_DISP_MTHD_V1_ACQUIRE,
255 .base.hasht = nv_encoder->dcb->hasht,
256 .base.hashm = nv_encoder->dcb->hashm,
257 };
258 int ret;
259
260 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
261 if (ret) {
262 NV_ERROR(drm, "error acquiring output path: %d\n", ret);
263 return ret;
264 }
265
266 nv_encoder->or = args.info.or;
267 nv_encoder->link = args.info.link;
268 return 0;
269 }
270
271 static int
272 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
273 struct drm_crtc_state *crtc_state,
274 struct drm_connector_state *conn_state,
275 struct drm_display_mode *native_mode)
276 {
277 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
278 struct drm_display_mode *mode = &crtc_state->mode;
279 struct drm_connector *connector = conn_state->connector;
280 struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
281 struct nouveau_drm *drm = nouveau_drm(encoder->dev);
282
283 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
284 asyc->scaler.full = false;
285 if (!native_mode)
286 return 0;
287
288 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
289 switch (connector->connector_type) {
290 case DRM_MODE_CONNECTOR_LVDS:
291 case DRM_MODE_CONNECTOR_eDP:
292 /* Force use of scaler for non-EDID modes. */
293 if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
294 break;
295 mode = native_mode;
296 asyc->scaler.full = true;
297 break;
298 default:
299 break;
300 }
301 } else {
302 mode = native_mode;
303 }
304
305 if (!drm_mode_equal(adjusted_mode, mode)) {
306 drm_mode_copy(adjusted_mode, mode);
307 crtc_state->mode_changed = true;
308 }
309
310 return 0;
311 }
312
313 static int
314 nv50_outp_atomic_check(struct drm_encoder *encoder,
315 struct drm_crtc_state *crtc_state,
316 struct drm_connector_state *conn_state)
317 {
318 struct nouveau_connector *nv_connector =
319 nouveau_connector(conn_state->connector);
320 return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
321 nv_connector->native_mode);
322 }
323
324 /******************************************************************************
325 * DAC
326 *****************************************************************************/
327 static void
328 nv50_dac_disable(struct drm_encoder *encoder)
329 {
330 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
331 struct nv50_core *core = nv50_disp(encoder->dev)->core;
332 if (nv_encoder->crtc)
333 core->func->dac->ctrl(core, nv_encoder->or, 0x00000000, NULL);
334 nv_encoder->crtc = NULL;
335 nv50_outp_release(nv_encoder);
336 }
337
338 static void
339 nv50_dac_enable(struct drm_encoder *encoder)
340 {
341 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
342 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
343 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
344 struct nv50_core *core = nv50_disp(encoder->dev)->core;
345
346 nv50_outp_acquire(nv_encoder);
347
348 core->func->dac->ctrl(core, nv_encoder->or, 1 << nv_crtc->index, asyh);
349 asyh->or.depth = 0;
350
351 nv_encoder->crtc = encoder->crtc;
352 }
353
354 static enum drm_connector_status
355 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
356 {
357 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
358 struct nv50_disp *disp = nv50_disp(encoder->dev);
359 struct {
360 struct nv50_disp_mthd_v1 base;
361 struct nv50_disp_dac_load_v0 load;
362 } args = {
363 .base.version = 1,
364 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
365 .base.hasht = nv_encoder->dcb->hasht,
366 .base.hashm = nv_encoder->dcb->hashm,
367 };
368 int ret;
369
370 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
371 if (args.load.data == 0)
372 args.load.data = 340;
373
374 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
375 if (ret || !args.load.load)
376 return connector_status_disconnected;
377
378 return connector_status_connected;
379 }
380
381 static const struct drm_encoder_helper_funcs
382 nv50_dac_help = {
383 .atomic_check = nv50_outp_atomic_check,
384 .enable = nv50_dac_enable,
385 .disable = nv50_dac_disable,
386 .detect = nv50_dac_detect
387 };
388
389 static void
390 nv50_dac_destroy(struct drm_encoder *encoder)
391 {
392 drm_encoder_cleanup(encoder);
393 kfree(encoder);
394 }
395
396 static const struct drm_encoder_funcs
397 nv50_dac_func = {
398 .destroy = nv50_dac_destroy,
399 };
400
401 static int
402 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
403 {
404 struct nouveau_drm *drm = nouveau_drm(connector->dev);
405 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
406 struct nvkm_i2c_bus *bus;
407 struct nouveau_encoder *nv_encoder;
408 struct drm_encoder *encoder;
409 int type = DRM_MODE_ENCODER_DAC;
410
411 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
412 if (!nv_encoder)
413 return -ENOMEM;
414 nv_encoder->dcb = dcbe;
415
416 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
417 if (bus)
418 nv_encoder->i2c = &bus->i2c;
419
420 encoder = to_drm_encoder(nv_encoder);
421 encoder->possible_crtcs = dcbe->heads;
422 encoder->possible_clones = 0;
423 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
424 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
425 drm_encoder_helper_add(encoder, &nv50_dac_help);
426
427 drm_connector_attach_encoder(connector, encoder);
428 return 0;
429 }
430
431 /******************************************************************************
432 * Audio
433 *****************************************************************************/
434 static void
435 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
436 {
437 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
438 struct nv50_disp *disp = nv50_disp(encoder->dev);
439 struct {
440 struct nv50_disp_mthd_v1 base;
441 struct nv50_disp_sor_hda_eld_v0 eld;
442 } args = {
443 .base.version = 1,
444 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
445 .base.hasht = nv_encoder->dcb->hasht,
446 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
447 (0x0100 << nv_crtc->index),
448 };
449
450 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
451 }
452
453 static void
454 nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
455 {
456 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
457 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
458 struct nouveau_connector *nv_connector;
459 struct nv50_disp *disp = nv50_disp(encoder->dev);
460 struct __packed {
461 struct {
462 struct nv50_disp_mthd_v1 mthd;
463 struct nv50_disp_sor_hda_eld_v0 eld;
464 } base;
465 u8 data[sizeof(nv_connector->base.eld)];
466 } args = {
467 .base.mthd.version = 1,
468 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
469 .base.mthd.hasht = nv_encoder->dcb->hasht,
470 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
471 (0x0100 << nv_crtc->index),
472 };
473
474 nv_connector = nouveau_encoder_connector_get(nv_encoder);
475 if (!drm_detect_monitor_audio(nv_connector->edid))
476 return;
477
478 memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
479
480 nvif_mthd(&disp->disp->object, 0, &args,
481 sizeof(args.base) + drm_eld_size(args.data));
482 }
483
484 /******************************************************************************
485 * HDMI
486 *****************************************************************************/
487 static void
488 nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
489 {
490 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
491 struct nv50_disp *disp = nv50_disp(encoder->dev);
492 struct {
493 struct nv50_disp_mthd_v1 base;
494 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
495 } args = {
496 .base.version = 1,
497 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
498 .base.hasht = nv_encoder->dcb->hasht,
499 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
500 (0x0100 << nv_crtc->index),
501 };
502
503 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
504 }
505
506 static void
507 nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
508 {
509 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
510 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
511 struct nv50_disp *disp = nv50_disp(encoder->dev);
512 struct {
513 struct nv50_disp_mthd_v1 base;
514 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
515 u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */
516 } args = {
517 .base.version = 1,
518 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
519 .base.hasht = nv_encoder->dcb->hasht,
520 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
521 (0x0100 << nv_crtc->index),
522 .pwr.state = 1,
523 .pwr.rekey = 56, /* binary driver, and tegra, constant */
524 };
525 struct nouveau_connector *nv_connector;
526 u32 max_ac_packet;
527 union hdmi_infoframe avi_frame;
528 union hdmi_infoframe vendor_frame;
529 int ret;
530 int size;
531
532 nv_connector = nouveau_encoder_connector_get(nv_encoder);
533 if (!drm_detect_hdmi_monitor(nv_connector->edid))
534 return;
535
536 ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi, mode,
537 false);
538 if (!ret) {
539 /* We have an AVI InfoFrame, populate it to the display */
540 args.pwr.avi_infoframe_length
541 = hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
542 }
543
544 ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi,
545 &nv_connector->base, mode);
546 if (!ret) {
547 /* We have a Vendor InfoFrame, populate it to the display */
548 args.pwr.vendor_infoframe_length
549 = hdmi_infoframe_pack(&vendor_frame,
550 args.infoframes
551 + args.pwr.avi_infoframe_length,
552 17);
553 }
554
555 max_ac_packet = mode->htotal - mode->hdisplay;
556 max_ac_packet -= args.pwr.rekey;
557 max_ac_packet -= 18; /* constant from tegra */
558 args.pwr.max_ac_packet = max_ac_packet / 32;
559
560 size = sizeof(args.base)
561 + sizeof(args.pwr)
562 + args.pwr.avi_infoframe_length
563 + args.pwr.vendor_infoframe_length;
564 nvif_mthd(&disp->disp->object, 0, &args, size);
565 nv50_audio_enable(encoder, mode);
566 }
567
568 /******************************************************************************
569 * MST
570 *****************************************************************************/
571 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
572 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
573 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
574
575 struct nv50_mstm {
576 struct nouveau_encoder *outp;
577
578 struct drm_dp_mst_topology_mgr mgr;
579 struct nv50_msto *msto[4];
580
581 bool modified;
582 bool disabled;
583 int links;
584 };
585
586 struct nv50_mstc {
587 struct nv50_mstm *mstm;
588 struct drm_dp_mst_port *port;
589 struct drm_connector connector;
590
591 struct drm_display_mode *native;
592 struct edid *edid;
593
594 int pbn;
595 };
596
597 struct nv50_msto {
598 struct drm_encoder encoder;
599
600 struct nv50_head *head;
601 struct nv50_mstc *mstc;
602 bool disabled;
603 };
604
605 static struct drm_dp_payload *
606 nv50_msto_payload(struct nv50_msto *msto)
607 {
608 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
609 struct nv50_mstc *mstc = msto->mstc;
610 struct nv50_mstm *mstm = mstc->mstm;
611 int vcpi = mstc->port->vcpi.vcpi, i;
612
613 NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
614 for (i = 0; i < mstm->mgr.max_payloads; i++) {
615 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
616 NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
617 mstm->outp->base.base.name, i, payload->vcpi,
618 payload->start_slot, payload->num_slots);
619 }
620
621 for (i = 0; i < mstm->mgr.max_payloads; i++) {
622 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
623 if (payload->vcpi == vcpi)
624 return payload;
625 }
626
627 return NULL;
628 }
629
630 static void
631 nv50_msto_cleanup(struct nv50_msto *msto)
632 {
633 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
634 struct nv50_mstc *mstc = msto->mstc;
635 struct nv50_mstm *mstm = mstc->mstm;
636
637 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
638 if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
639 drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
640 if (msto->disabled) {
641 msto->mstc = NULL;
642 msto->head = NULL;
643 msto->disabled = false;
644 }
645 }
646
647 static void
648 nv50_msto_prepare(struct nv50_msto *msto)
649 {
650 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
651 struct nv50_mstc *mstc = msto->mstc;
652 struct nv50_mstm *mstm = mstc->mstm;
653 struct {
654 struct nv50_disp_mthd_v1 base;
655 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
656 } args = {
657 .base.version = 1,
658 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
659 .base.hasht = mstm->outp->dcb->hasht,
660 .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) |
661 (0x0100 << msto->head->base.index),
662 };
663
664 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
665 if (mstc->port && mstc->port->vcpi.vcpi > 0) {
666 struct drm_dp_payload *payload = nv50_msto_payload(msto);
667 if (payload) {
668 args.vcpi.start_slot = payload->start_slot;
669 args.vcpi.num_slots = payload->num_slots;
670 args.vcpi.pbn = mstc->port->vcpi.pbn;
671 args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
672 }
673 }
674
675 NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
676 msto->encoder.name, msto->head->base.base.name,
677 args.vcpi.start_slot, args.vcpi.num_slots,
678 args.vcpi.pbn, args.vcpi.aligned_pbn);
679 nvif_mthd(&drm->display->disp.object, 0, &args, sizeof(args));
680 }
681
682 static int
683 nv50_msto_atomic_check(struct drm_encoder *encoder,
684 struct drm_crtc_state *crtc_state,
685 struct drm_connector_state *conn_state)
686 {
687 struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
688 struct nv50_mstm *mstm = mstc->mstm;
689 int bpp = conn_state->connector->display_info.bpc * 3;
690 int slots;
691
692 mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
693
694 slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
695 if (slots < 0)
696 return slots;
697
698 return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
699 mstc->native);
700 }
701
702 static void
703 nv50_msto_enable(struct drm_encoder *encoder)
704 {
705 struct nv50_head *head = nv50_head(encoder->crtc);
706 struct nv50_msto *msto = nv50_msto(encoder);
707 struct nv50_mstc *mstc = NULL;
708 struct nv50_mstm *mstm = NULL;
709 struct drm_connector *connector;
710 struct drm_connector_list_iter conn_iter;
711 u8 proto, depth;
712 int slots;
713 bool r;
714
715 drm_connector_list_iter_begin(encoder->dev, &conn_iter);
716 drm_for_each_connector_iter(connector, &conn_iter) {
717 if (connector->state->best_encoder == &msto->encoder) {
718 mstc = nv50_mstc(connector);
719 mstm = mstc->mstm;
720 break;
721 }
722 }
723 drm_connector_list_iter_end(&conn_iter);
724
725 if (WARN_ON(!mstc))
726 return;
727
728 slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
729 r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, slots);
730 WARN_ON(!r);
731
732 if (!mstm->links++)
733 nv50_outp_acquire(mstm->outp);
734
735 if (mstm->outp->link & 1)
736 proto = 0x8;
737 else
738 proto = 0x9;
739
740 switch (mstc->connector.display_info.bpc) {
741 case 6: depth = 0x2; break;
742 case 8: depth = 0x5; break;
743 case 10:
744 default: depth = 0x6; break;
745 }
746
747 mstm->outp->update(mstm->outp, head->base.index,
748 nv50_head_atom(head->base.base.state), proto, depth);
749
750 msto->head = head;
751 msto->mstc = mstc;
752 mstm->modified = true;
753 }
754
755 static void
756 nv50_msto_disable(struct drm_encoder *encoder)
757 {
758 struct nv50_msto *msto = nv50_msto(encoder);
759 struct nv50_mstc *mstc = msto->mstc;
760 struct nv50_mstm *mstm = mstc->mstm;
761
762 if (mstc->port)
763 drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
764
765 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
766 mstm->modified = true;
767 if (!--mstm->links)
768 mstm->disabled = true;
769 msto->disabled = true;
770 }
771
772 static const struct drm_encoder_helper_funcs
773 nv50_msto_help = {
774 .disable = nv50_msto_disable,
775 .enable = nv50_msto_enable,
776 .atomic_check = nv50_msto_atomic_check,
777 };
778
779 static void
780 nv50_msto_destroy(struct drm_encoder *encoder)
781 {
782 struct nv50_msto *msto = nv50_msto(encoder);
783 drm_encoder_cleanup(&msto->encoder);
784 kfree(msto);
785 }
786
787 static const struct drm_encoder_funcs
788 nv50_msto = {
789 .destroy = nv50_msto_destroy,
790 };
791
792 static int
793 nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
794 struct nv50_msto **pmsto)
795 {
796 struct nv50_msto *msto;
797 int ret;
798
799 if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
800 return -ENOMEM;
801
802 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
803 DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
804 if (ret) {
805 kfree(*pmsto);
806 *pmsto = NULL;
807 return ret;
808 }
809
810 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
811 msto->encoder.possible_crtcs = heads;
812 return 0;
813 }
814
815 static struct drm_encoder *
816 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
817 struct drm_connector_state *connector_state)
818 {
819 struct nv50_head *head = nv50_head(connector_state->crtc);
820 struct nv50_mstc *mstc = nv50_mstc(connector);
821 if (mstc->port) {
822 struct nv50_mstm *mstm = mstc->mstm;
823 return &mstm->msto[head->base.index]->encoder;
824 }
825 return NULL;
826 }
827
828 static struct drm_encoder *
829 nv50_mstc_best_encoder(struct drm_connector *connector)
830 {
831 struct nv50_mstc *mstc = nv50_mstc(connector);
832 if (mstc->port) {
833 struct nv50_mstm *mstm = mstc->mstm;
834 return &mstm->msto[0]->encoder;
835 }
836 return NULL;
837 }
838
839 static enum drm_mode_status
840 nv50_mstc_mode_valid(struct drm_connector *connector,
841 struct drm_display_mode *mode)
842 {
843 return MODE_OK;
844 }
845
846 static int
847 nv50_mstc_get_modes(struct drm_connector *connector)
848 {
849 struct nv50_mstc *mstc = nv50_mstc(connector);
850 int ret = 0;
851
852 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
853 drm_connector_update_edid_property(&mstc->connector, mstc->edid);
854 if (mstc->edid)
855 ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
856
857 if (!mstc->connector.display_info.bpc)
858 mstc->connector.display_info.bpc = 8;
859
860 if (mstc->native)
861 drm_mode_destroy(mstc->connector.dev, mstc->native);
862 mstc->native = nouveau_conn_native_mode(&mstc->connector);
863 return ret;
864 }
865
866 static const struct drm_connector_helper_funcs
867 nv50_mstc_help = {
868 .get_modes = nv50_mstc_get_modes,
869 .mode_valid = nv50_mstc_mode_valid,
870 .best_encoder = nv50_mstc_best_encoder,
871 .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
872 };
873
874 static enum drm_connector_status
875 nv50_mstc_detect(struct drm_connector *connector, bool force)
876 {
877 struct nv50_mstc *mstc = nv50_mstc(connector);
878 if (!mstc->port)
879 return connector_status_disconnected;
880 return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
881 }
882
883 static void
884 nv50_mstc_destroy(struct drm_connector *connector)
885 {
886 struct nv50_mstc *mstc = nv50_mstc(connector);
887 drm_connector_cleanup(&mstc->connector);
888 kfree(mstc);
889 }
890
891 static const struct drm_connector_funcs
892 nv50_mstc = {
893 .reset = nouveau_conn_reset,
894 .detect = nv50_mstc_detect,
895 .fill_modes = drm_helper_probe_single_connector_modes,
896 .destroy = nv50_mstc_destroy,
897 .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
898 .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
899 .atomic_set_property = nouveau_conn_atomic_set_property,
900 .atomic_get_property = nouveau_conn_atomic_get_property,
901 };
902
903 static int
904 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
905 const char *path, struct nv50_mstc **pmstc)
906 {
907 struct drm_device *dev = mstm->outp->base.base.dev;
908 struct nv50_mstc *mstc;
909 int ret, i;
910
911 if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
912 return -ENOMEM;
913 mstc->mstm = mstm;
914 mstc->port = port;
915
916 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
917 DRM_MODE_CONNECTOR_DisplayPort);
918 if (ret) {
919 kfree(*pmstc);
920 *pmstc = NULL;
921 return ret;
922 }
923
924 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
925
926 mstc->connector.funcs->reset(&mstc->connector);
927 nouveau_conn_attach_properties(&mstc->connector);
928
929 for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto[i]; i++)
930 drm_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
931
932 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
933 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
934 drm_mode_connector_set_path_property(&mstc->connector, path);
935 return 0;
936 }
937
938 static void
939 nv50_mstm_cleanup(struct nv50_mstm *mstm)
940 {
941 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
942 struct drm_encoder *encoder;
943 int ret;
944
945 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
946 ret = drm_dp_check_act_status(&mstm->mgr);
947
948 ret = drm_dp_update_payload_part2(&mstm->mgr);
949
950 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
951 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
952 struct nv50_msto *msto = nv50_msto(encoder);
953 struct nv50_mstc *mstc = msto->mstc;
954 if (mstc && mstc->mstm == mstm)
955 nv50_msto_cleanup(msto);
956 }
957 }
958
959 mstm->modified = false;
960 }
961
962 static void
963 nv50_mstm_prepare(struct nv50_mstm *mstm)
964 {
965 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
966 struct drm_encoder *encoder;
967 int ret;
968
969 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
970 ret = drm_dp_update_payload_part1(&mstm->mgr);
971
972 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
973 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
974 struct nv50_msto *msto = nv50_msto(encoder);
975 struct nv50_mstc *mstc = msto->mstc;
976 if (mstc && mstc->mstm == mstm)
977 nv50_msto_prepare(msto);
978 }
979 }
980
981 if (mstm->disabled) {
982 if (!mstm->links)
983 nv50_outp_release(mstm->outp);
984 mstm->disabled = false;
985 }
986 }
987
988 static void
989 nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
990 {
991 struct nv50_mstm *mstm = nv50_mstm(mgr);
992 drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
993 }
994
995 static void
996 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
997 struct drm_connector *connector)
998 {
999 struct nouveau_drm *drm = nouveau_drm(connector->dev);
1000 struct nv50_mstc *mstc = nv50_mstc(connector);
1001
1002 drm_connector_unregister(&mstc->connector);
1003
1004 drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
1005
1006 drm_modeset_lock(&drm->dev->mode_config.connection_mutex, NULL);
1007 mstc->port = NULL;
1008 drm_modeset_unlock(&drm->dev->mode_config.connection_mutex);
1009
1010 drm_connector_unreference(&mstc->connector);
1011 }
1012
1013 static void
1014 nv50_mstm_register_connector(struct drm_connector *connector)
1015 {
1016 struct nouveau_drm *drm = nouveau_drm(connector->dev);
1017
1018 drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
1019
1020 drm_connector_register(connector);
1021 }
1022
1023 static struct drm_connector *
1024 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1025 struct drm_dp_mst_port *port, const char *path)
1026 {
1027 struct nv50_mstm *mstm = nv50_mstm(mgr);
1028 struct nv50_mstc *mstc;
1029 int ret;
1030
1031 ret = nv50_mstc_new(mstm, port, path, &mstc);
1032 if (ret) {
1033 if (mstc)
1034 mstc->connector.funcs->destroy(&mstc->connector);
1035 return NULL;
1036 }
1037
1038 return &mstc->connector;
1039 }
1040
1041 static const struct drm_dp_mst_topology_cbs
1042 nv50_mstm = {
1043 .add_connector = nv50_mstm_add_connector,
1044 .register_connector = nv50_mstm_register_connector,
1045 .destroy_connector = nv50_mstm_destroy_connector,
1046 .hotplug = nv50_mstm_hotplug,
1047 };
1048
1049 void
1050 nv50_mstm_service(struct nv50_mstm *mstm)
1051 {
1052 struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL;
1053 bool handled = true;
1054 int ret;
1055 u8 esi[8] = {};
1056
1057 if (!aux)
1058 return;
1059
1060 while (handled) {
1061 ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
1062 if (ret != 8) {
1063 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1064 return;
1065 }
1066
1067 drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
1068 if (!handled)
1069 break;
1070
1071 drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
1072 }
1073 }
1074
1075 void
1076 nv50_mstm_remove(struct nv50_mstm *mstm)
1077 {
1078 if (mstm)
1079 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1080 }
1081
1082 static int
1083 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
1084 {
1085 struct nouveau_encoder *outp = mstm->outp;
1086 struct {
1087 struct nv50_disp_mthd_v1 base;
1088 struct nv50_disp_sor_dp_mst_link_v0 mst;
1089 } args = {
1090 .base.version = 1,
1091 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
1092 .base.hasht = outp->dcb->hasht,
1093 .base.hashm = outp->dcb->hashm,
1094 .mst.state = state,
1095 };
1096 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
1097 struct nvif_object *disp = &drm->display->disp.object;
1098 int ret;
1099
1100 if (dpcd >= 0x12) {
1101 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
1102 if (ret < 0)
1103 return ret;
1104
1105 dpcd &= ~DP_MST_EN;
1106 if (state)
1107 dpcd |= DP_MST_EN;
1108
1109 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
1110 if (ret < 0)
1111 return ret;
1112 }
1113
1114 return nvif_mthd(disp, 0, &args, sizeof(args));
1115 }
1116
1117 int
1118 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
1119 {
1120 int ret, state = 0;
1121
1122 if (!mstm)
1123 return 0;
1124
1125 if (dpcd[0] >= 0x12) {
1126 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
1127 if (ret < 0)
1128 return ret;
1129
1130 if (!(dpcd[1] & DP_MST_CAP))
1131 dpcd[0] = 0x11;
1132 else
1133 state = allow;
1134 }
1135
1136 ret = nv50_mstm_enable(mstm, dpcd[0], state);
1137 if (ret)
1138 return ret;
1139
1140 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
1141 if (ret)
1142 return nv50_mstm_enable(mstm, dpcd[0], 0);
1143
1144 return mstm->mgr.mst_state;
1145 }
1146
1147 static void
1148 nv50_mstm_fini(struct nv50_mstm *mstm)
1149 {
1150 if (mstm && mstm->mgr.mst_state)
1151 drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
1152 }
1153
1154 static void
1155 nv50_mstm_init(struct nv50_mstm *mstm)
1156 {
1157 if (mstm && mstm->mgr.mst_state)
1158 drm_dp_mst_topology_mgr_resume(&mstm->mgr);
1159 }
1160
1161 static void
1162 nv50_mstm_del(struct nv50_mstm **pmstm)
1163 {
1164 struct nv50_mstm *mstm = *pmstm;
1165 if (mstm) {
1166 kfree(*pmstm);
1167 *pmstm = NULL;
1168 }
1169 }
1170
1171 static int
1172 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
1173 int conn_base_id, struct nv50_mstm **pmstm)
1174 {
1175 const int max_payloads = hweight8(outp->dcb->heads);
1176 struct drm_device *dev = outp->base.base.dev;
1177 struct nv50_mstm *mstm;
1178 int ret, i;
1179 u8 dpcd;
1180
1181 /* This is a workaround for some monitors not functioning
1182 * correctly in MST mode on initial module load. I think
1183 * some bad interaction with the VBIOS may be responsible.
1184 *
1185 * A good ol' off and on again seems to work here ;)
1186 */
1187 ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
1188 if (ret >= 0 && dpcd >= 0x12)
1189 drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
1190
1191 if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
1192 return -ENOMEM;
1193 mstm->outp = outp;
1194 mstm->mgr.cbs = &nv50_mstm;
1195
1196 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
1197 max_payloads, conn_base_id);
1198 if (ret)
1199 return ret;
1200
1201 for (i = 0; i < max_payloads; i++) {
1202 ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
1203 i, &mstm->msto[i]);
1204 if (ret)
1205 return ret;
1206 }
1207
1208 return 0;
1209 }
1210
1211 /******************************************************************************
1212 * SOR
1213 *****************************************************************************/
1214 static void
1215 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
1216 struct nv50_head_atom *asyh, u8 proto, u8 depth)
1217 {
1218 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
1219 struct nv50_core *core = disp->core;
1220
1221 if (!asyh) {
1222 nv_encoder->ctrl &= ~BIT(head);
1223 if (!(nv_encoder->ctrl & 0x0000000f))
1224 nv_encoder->ctrl = 0;
1225 } else {
1226 nv_encoder->ctrl |= proto << 8;
1227 nv_encoder->ctrl |= BIT(head);
1228 asyh->or.depth = depth;
1229 }
1230
1231 core->func->sor->ctrl(core, nv_encoder->or, nv_encoder->ctrl, asyh);
1232 }
1233
1234 static void
1235 nv50_sor_disable(struct drm_encoder *encoder)
1236 {
1237 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1238 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1239
1240 nv_encoder->crtc = NULL;
1241
1242 if (nv_crtc) {
1243 struct nvkm_i2c_aux *aux = nv_encoder->aux;
1244 u8 pwr;
1245
1246 if (aux) {
1247 int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
1248 if (ret == 0) {
1249 pwr &= ~DP_SET_POWER_MASK;
1250 pwr |= DP_SET_POWER_D3;
1251 nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
1252 }
1253 }
1254
1255 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
1256 nv50_audio_disable(encoder, nv_crtc);
1257 nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
1258 nv50_outp_release(nv_encoder);
1259 }
1260 }
1261
1262 static void
1263 nv50_sor_enable(struct drm_encoder *encoder)
1264 {
1265 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1266 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1267 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
1268 struct drm_display_mode *mode = &asyh->state.adjusted_mode;
1269 struct {
1270 struct nv50_disp_mthd_v1 base;
1271 struct nv50_disp_sor_lvds_script_v0 lvds;
1272 } lvds = {
1273 .base.version = 1,
1274 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1275 .base.hasht = nv_encoder->dcb->hasht,
1276 .base.hashm = nv_encoder->dcb->hashm,
1277 };
1278 struct nv50_disp *disp = nv50_disp(encoder->dev);
1279 struct drm_device *dev = encoder->dev;
1280 struct nouveau_drm *drm = nouveau_drm(dev);
1281 struct nouveau_connector *nv_connector;
1282 struct nvbios *bios = &drm->vbios;
1283 u8 proto = 0xf;
1284 u8 depth = 0x0;
1285
1286 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1287 nv_encoder->crtc = encoder->crtc;
1288 nv50_outp_acquire(nv_encoder);
1289
1290 switch (nv_encoder->dcb->type) {
1291 case DCB_OUTPUT_TMDS:
1292 if (nv_encoder->link & 1) {
1293 proto = 0x1;
1294 /* Only enable dual-link if:
1295 * - Need to (i.e. rate > 165MHz)
1296 * - DCB says we can
1297 * - Not an HDMI monitor, since there's no dual-link
1298 * on HDMI.
1299 */
1300 if (mode->clock >= 165000 &&
1301 nv_encoder->dcb->duallink_possible &&
1302 !drm_detect_hdmi_monitor(nv_connector->edid))
1303 proto |= 0x4;
1304 } else {
1305 proto = 0x2;
1306 }
1307
1308 nv50_hdmi_enable(&nv_encoder->base.base, mode);
1309 break;
1310 case DCB_OUTPUT_LVDS:
1311 proto = 0x0;
1312
1313 if (bios->fp_no_ddc) {
1314 if (bios->fp.dual_link)
1315 lvds.lvds.script |= 0x0100;
1316 if (bios->fp.if_is_24bit)
1317 lvds.lvds.script |= 0x0200;
1318 } else {
1319 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1320 if (((u8 *)nv_connector->edid)[121] == 2)
1321 lvds.lvds.script |= 0x0100;
1322 } else
1323 if (mode->clock >= bios->fp.duallink_transition_clk) {
1324 lvds.lvds.script |= 0x0100;
1325 }
1326
1327 if (lvds.lvds.script & 0x0100) {
1328 if (bios->fp.strapless_is_24bit & 2)
1329 lvds.lvds.script |= 0x0200;
1330 } else {
1331 if (bios->fp.strapless_is_24bit & 1)
1332 lvds.lvds.script |= 0x0200;
1333 }
1334
1335 if (nv_connector->base.display_info.bpc == 8)
1336 lvds.lvds.script |= 0x0200;
1337 }
1338
1339 nvif_mthd(&disp->disp->object, 0, &lvds, sizeof(lvds));
1340 break;
1341 case DCB_OUTPUT_DP:
1342 if (nv_connector->base.display_info.bpc == 6)
1343 depth = 0x2;
1344 else
1345 if (nv_connector->base.display_info.bpc == 8)
1346 depth = 0x5;
1347 else
1348 depth = 0x6;
1349
1350 if (nv_encoder->link & 1)
1351 proto = 0x8;
1352 else
1353 proto = 0x9;
1354
1355 nv50_audio_enable(encoder, mode);
1356 break;
1357 default:
1358 BUG();
1359 break;
1360 }
1361
1362 nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth);
1363 }
1364
1365 static const struct drm_encoder_helper_funcs
1366 nv50_sor_help = {
1367 .atomic_check = nv50_outp_atomic_check,
1368 .enable = nv50_sor_enable,
1369 .disable = nv50_sor_disable,
1370 };
1371
1372 static void
1373 nv50_sor_destroy(struct drm_encoder *encoder)
1374 {
1375 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1376 nv50_mstm_del(&nv_encoder->dp.mstm);
1377 drm_encoder_cleanup(encoder);
1378 kfree(encoder);
1379 }
1380
1381 static const struct drm_encoder_funcs
1382 nv50_sor_func = {
1383 .destroy = nv50_sor_destroy,
1384 };
1385
1386 static int
1387 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1388 {
1389 struct nouveau_connector *nv_connector = nouveau_connector(connector);
1390 struct nouveau_drm *drm = nouveau_drm(connector->dev);
1391 struct nvkm_bios *bios = nvxx_bios(&drm->client.device);
1392 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1393 struct nouveau_encoder *nv_encoder;
1394 struct drm_encoder *encoder;
1395 u8 ver, hdr, cnt, len;
1396 u32 data;
1397 int type, ret;
1398
1399 switch (dcbe->type) {
1400 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1401 case DCB_OUTPUT_TMDS:
1402 case DCB_OUTPUT_DP:
1403 default:
1404 type = DRM_MODE_ENCODER_TMDS;
1405 break;
1406 }
1407
1408 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1409 if (!nv_encoder)
1410 return -ENOMEM;
1411 nv_encoder->dcb = dcbe;
1412 nv_encoder->update = nv50_sor_update;
1413
1414 encoder = to_drm_encoder(nv_encoder);
1415 encoder->possible_crtcs = dcbe->heads;
1416 encoder->possible_clones = 0;
1417 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
1418 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
1419 drm_encoder_helper_add(encoder, &nv50_sor_help);
1420
1421 drm_connector_attach_encoder(connector, encoder);
1422
1423 if (dcbe->type == DCB_OUTPUT_DP) {
1424 struct nv50_disp *disp = nv50_disp(encoder->dev);
1425 struct nvkm_i2c_aux *aux =
1426 nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
1427 if (aux) {
1428 if (disp->disp->object.oclass < GF110_DISP) {
1429 /* HW has no support for address-only
1430 * transactions, so we're required to
1431 * use custom I2C-over-AUX code.
1432 */
1433 nv_encoder->i2c = &aux->i2c;
1434 } else {
1435 nv_encoder->i2c = &nv_connector->aux.ddc;
1436 }
1437 nv_encoder->aux = aux;
1438 }
1439
1440 if ((data = nvbios_dp_table(bios, &ver, &hdr, &cnt, &len)) &&
1441 ver >= 0x40 && (nvbios_rd08(bios, data + 0x08) & 0x04)) {
1442 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
1443 nv_connector->base.base.id,
1444 &nv_encoder->dp.mstm);
1445 if (ret)
1446 return ret;
1447 }
1448 } else {
1449 struct nvkm_i2c_bus *bus =
1450 nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1451 if (bus)
1452 nv_encoder->i2c = &bus->i2c;
1453 }
1454
1455 return 0;
1456 }
1457
1458 /******************************************************************************
1459 * PIOR
1460 *****************************************************************************/
1461 static int
1462 nv50_pior_atomic_check(struct drm_encoder *encoder,
1463 struct drm_crtc_state *crtc_state,
1464 struct drm_connector_state *conn_state)
1465 {
1466 int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
1467 if (ret)
1468 return ret;
1469 crtc_state->adjusted_mode.clock *= 2;
1470 return 0;
1471 }
1472
1473 static void
1474 nv50_pior_disable(struct drm_encoder *encoder)
1475 {
1476 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1477 struct nv50_core *core = nv50_disp(encoder->dev)->core;
1478 if (nv_encoder->crtc)
1479 core->func->pior->ctrl(core, nv_encoder->or, 0x00000000, NULL);
1480 nv_encoder->crtc = NULL;
1481 nv50_outp_release(nv_encoder);
1482 }
1483
1484 static void
1485 nv50_pior_enable(struct drm_encoder *encoder)
1486 {
1487 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1488 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1489 struct nouveau_connector *nv_connector;
1490 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
1491 struct nv50_core *core = nv50_disp(encoder->dev)->core;
1492 u8 owner = 1 << nv_crtc->index;
1493 u8 proto;
1494
1495 nv50_outp_acquire(nv_encoder);
1496
1497 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1498 switch (nv_connector->base.display_info.bpc) {
1499 case 10: asyh->or.depth = 0x6; break;
1500 case 8: asyh->or.depth = 0x5; break;
1501 case 6: asyh->or.depth = 0x2; break;
1502 default: asyh->or.depth = 0x0; break;
1503 }
1504
1505 switch (nv_encoder->dcb->type) {
1506 case DCB_OUTPUT_TMDS:
1507 case DCB_OUTPUT_DP:
1508 proto = 0x0;
1509 break;
1510 default:
1511 BUG();
1512 break;
1513 }
1514
1515 core->func->pior->ctrl(core, nv_encoder->or, (proto << 8) | owner, asyh);
1516 nv_encoder->crtc = encoder->crtc;
1517 }
1518
1519 static const struct drm_encoder_helper_funcs
1520 nv50_pior_help = {
1521 .atomic_check = nv50_pior_atomic_check,
1522 .enable = nv50_pior_enable,
1523 .disable = nv50_pior_disable,
1524 };
1525
1526 static void
1527 nv50_pior_destroy(struct drm_encoder *encoder)
1528 {
1529 drm_encoder_cleanup(encoder);
1530 kfree(encoder);
1531 }
1532
1533 static const struct drm_encoder_funcs
1534 nv50_pior_func = {
1535 .destroy = nv50_pior_destroy,
1536 };
1537
1538 static int
1539 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
1540 {
1541 struct nouveau_drm *drm = nouveau_drm(connector->dev);
1542 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1543 struct nvkm_i2c_bus *bus = NULL;
1544 struct nvkm_i2c_aux *aux = NULL;
1545 struct i2c_adapter *ddc;
1546 struct nouveau_encoder *nv_encoder;
1547 struct drm_encoder *encoder;
1548 int type;
1549
1550 switch (dcbe->type) {
1551 case DCB_OUTPUT_TMDS:
1552 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
1553 ddc = bus ? &bus->i2c : NULL;
1554 type = DRM_MODE_ENCODER_TMDS;
1555 break;
1556 case DCB_OUTPUT_DP:
1557 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
1558 ddc = aux ? &aux->i2c : NULL;
1559 type = DRM_MODE_ENCODER_TMDS;
1560 break;
1561 default:
1562 return -ENODEV;
1563 }
1564
1565 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1566 if (!nv_encoder)
1567 return -ENOMEM;
1568 nv_encoder->dcb = dcbe;
1569 nv_encoder->i2c = ddc;
1570 nv_encoder->aux = aux;
1571
1572 encoder = to_drm_encoder(nv_encoder);
1573 encoder->possible_crtcs = dcbe->heads;
1574 encoder->possible_clones = 0;
1575 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
1576 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
1577 drm_encoder_helper_add(encoder, &nv50_pior_help);
1578
1579 drm_connector_attach_encoder(connector, encoder);
1580 return 0;
1581 }
1582
1583 /******************************************************************************
1584 * Atomic
1585 *****************************************************************************/
1586
1587 static void
1588 nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 *interlock)
1589 {
1590 struct nv50_disp *disp = nv50_disp(drm->dev);
1591 struct nv50_core *core = disp->core;
1592 struct nv50_mstm *mstm;
1593 struct drm_encoder *encoder;
1594
1595 NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]);
1596
1597 drm_for_each_encoder(encoder, drm->dev) {
1598 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
1599 mstm = nouveau_encoder(encoder)->dp.mstm;
1600 if (mstm && mstm->modified)
1601 nv50_mstm_prepare(mstm);
1602 }
1603 }
1604
1605 core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY);
1606 core->func->update(core, interlock, true);
1607 if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY,
1608 disp->core->chan.base.device))
1609 NV_ERROR(drm, "core notifier timeout\n");
1610
1611 drm_for_each_encoder(encoder, drm->dev) {
1612 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
1613 mstm = nouveau_encoder(encoder)->dp.mstm;
1614 if (mstm && mstm->modified)
1615 nv50_mstm_cleanup(mstm);
1616 }
1617 }
1618 }
1619
1620 static void
1621 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
1622 {
1623 struct drm_device *dev = state->dev;
1624 struct drm_crtc_state *new_crtc_state, *old_crtc_state;
1625 struct drm_crtc *crtc;
1626 struct drm_plane_state *new_plane_state;
1627 struct drm_plane *plane;
1628 struct nouveau_drm *drm = nouveau_drm(dev);
1629 struct nv50_disp *disp = nv50_disp(dev);
1630 struct nv50_atom *atom = nv50_atom(state);
1631 struct nv50_outp_atom *outp, *outt;
1632 u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {};
1633 int i;
1634
1635 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
1636 drm_atomic_helper_wait_for_fences(dev, state, false);
1637 drm_atomic_helper_wait_for_dependencies(state);
1638 drm_atomic_helper_update_legacy_modeset_state(dev, state);
1639
1640 if (atom->lock_core)
1641 mutex_lock(&disp->mutex);
1642
1643 /* Disable head(s). */
1644 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
1645 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
1646 struct nv50_head *head = nv50_head(crtc);
1647
1648 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
1649 asyh->clr.mask, asyh->set.mask);
1650 if (old_crtc_state->active && !new_crtc_state->active)
1651 drm_crtc_vblank_off(crtc);
1652
1653 if (asyh->clr.mask) {
1654 nv50_head_flush_clr(head, asyh, atom->flush_disable);
1655 interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
1656 }
1657 }
1658
1659 /* Disable plane(s). */
1660 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1661 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
1662 struct nv50_wndw *wndw = nv50_wndw(plane);
1663
1664 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
1665 asyw->clr.mask, asyw->set.mask);
1666 if (!asyw->clr.mask)
1667 continue;
1668
1669 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw);
1670 }
1671
1672 /* Disable output path(s). */
1673 list_for_each_entry(outp, &atom->outp, head) {
1674 const struct drm_encoder_helper_funcs *help;
1675 struct drm_encoder *encoder;
1676
1677 encoder = outp->encoder;
1678 help = encoder->helper_private;
1679
1680 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
1681 outp->clr.mask, outp->set.mask);
1682
1683 if (outp->clr.mask) {
1684 help->disable(encoder);
1685 interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
1686 if (outp->flush_disable) {
1687 nv50_disp_atomic_commit_core(drm, interlock);
1688 memset(interlock, 0x00, sizeof(interlock));
1689 }
1690 }
1691 }
1692
1693 /* Flush disable. */
1694 if (interlock[NV50_DISP_INTERLOCK_CORE]) {
1695 if (atom->flush_disable) {
1696 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1697 struct nv50_wndw *wndw = nv50_wndw(plane);
1698 if (interlock[wndw->interlock.type] & wndw->interlock.data) {
1699 if (wndw->func->update)
1700 wndw->func->update(wndw, interlock);
1701 }
1702 }
1703
1704 nv50_disp_atomic_commit_core(drm, interlock);
1705 memset(interlock, 0x00, sizeof(interlock));
1706 }
1707 }
1708
1709 /* Update output path(s). */
1710 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
1711 const struct drm_encoder_helper_funcs *help;
1712 struct drm_encoder *encoder;
1713
1714 encoder = outp->encoder;
1715 help = encoder->helper_private;
1716
1717 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
1718 outp->set.mask, outp->clr.mask);
1719
1720 if (outp->set.mask) {
1721 help->enable(encoder);
1722 interlock[NV50_DISP_INTERLOCK_CORE] = 1;
1723 }
1724
1725 list_del(&outp->head);
1726 kfree(outp);
1727 }
1728
1729 /* Update head(s). */
1730 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
1731 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
1732 struct nv50_head *head = nv50_head(crtc);
1733
1734 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
1735 asyh->set.mask, asyh->clr.mask);
1736
1737 if (asyh->set.mask) {
1738 nv50_head_flush_set(head, asyh);
1739 interlock[NV50_DISP_INTERLOCK_CORE] = 1;
1740 }
1741
1742 if (new_crtc_state->active) {
1743 if (!old_crtc_state->active)
1744 drm_crtc_vblank_on(crtc);
1745 if (new_crtc_state->event)
1746 drm_crtc_vblank_get(crtc);
1747 }
1748 }
1749
1750 /* Update plane(s). */
1751 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1752 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
1753 struct nv50_wndw *wndw = nv50_wndw(plane);
1754
1755 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
1756 asyw->set.mask, asyw->clr.mask);
1757 if ( !asyw->set.mask &&
1758 (!asyw->clr.mask || atom->flush_disable))
1759 continue;
1760
1761 nv50_wndw_flush_set(wndw, interlock, asyw);
1762 }
1763
1764 /* Flush update. */
1765 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1766 struct nv50_wndw *wndw = nv50_wndw(plane);
1767 if (interlock[wndw->interlock.type] & wndw->interlock.data) {
1768 if (wndw->func->update)
1769 wndw->func->update(wndw, interlock);
1770 }
1771 }
1772
1773 if (interlock[NV50_DISP_INTERLOCK_CORE]) {
1774 if (interlock[NV50_DISP_INTERLOCK_BASE] ||
1775 !atom->state.legacy_cursor_update)
1776 nv50_disp_atomic_commit_core(drm, interlock);
1777 else
1778 disp->core->func->update(disp->core, interlock, false);
1779 }
1780
1781 if (atom->lock_core)
1782 mutex_unlock(&disp->mutex);
1783
1784 /* Wait for HW to signal completion. */
1785 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1786 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
1787 struct nv50_wndw *wndw = nv50_wndw(plane);
1788 int ret = nv50_wndw_wait_armed(wndw, asyw);
1789 if (ret)
1790 NV_ERROR(drm, "%s: timeout\n", plane->name);
1791 }
1792
1793 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
1794 if (new_crtc_state->event) {
1795 unsigned long flags;
1796 /* Get correct count/ts if racing with vblank irq */
1797 if (new_crtc_state->active)
1798 drm_crtc_accurate_vblank_count(crtc);
1799 spin_lock_irqsave(&crtc->dev->event_lock, flags);
1800 drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
1801 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
1802
1803 new_crtc_state->event = NULL;
1804 if (new_crtc_state->active)
1805 drm_crtc_vblank_put(crtc);
1806 }
1807 }
1808
1809 drm_atomic_helper_commit_hw_done(state);
1810 drm_atomic_helper_cleanup_planes(dev, state);
1811 drm_atomic_helper_commit_cleanup_done(state);
1812 drm_atomic_state_put(state);
1813 }
1814
1815 static void
1816 nv50_disp_atomic_commit_work(struct work_struct *work)
1817 {
1818 struct drm_atomic_state *state =
1819 container_of(work, typeof(*state), commit_work);
1820 nv50_disp_atomic_commit_tail(state);
1821 }
1822
1823 static int
1824 nv50_disp_atomic_commit(struct drm_device *dev,
1825 struct drm_atomic_state *state, bool nonblock)
1826 {
1827 struct nouveau_drm *drm = nouveau_drm(dev);
1828 struct drm_plane_state *new_plane_state;
1829 struct drm_plane *plane;
1830 struct drm_crtc *crtc;
1831 bool active = false;
1832 int ret, i;
1833
1834 ret = pm_runtime_get_sync(dev->dev);
1835 if (ret < 0 && ret != -EACCES)
1836 return ret;
1837
1838 ret = drm_atomic_helper_setup_commit(state, nonblock);
1839 if (ret)
1840 goto done;
1841
1842 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
1843
1844 ret = drm_atomic_helper_prepare_planes(dev, state);
1845 if (ret)
1846 goto done;
1847
1848 if (!nonblock) {
1849 ret = drm_atomic_helper_wait_for_fences(dev, state, true);
1850 if (ret)
1851 goto err_cleanup;
1852 }
1853
1854 ret = drm_atomic_helper_swap_state(state, true);
1855 if (ret)
1856 goto err_cleanup;
1857
1858 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1859 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
1860 struct nv50_wndw *wndw = nv50_wndw(plane);
1861
1862 if (asyw->set.image)
1863 nv50_wndw_ntfy_enable(wndw, asyw);
1864 }
1865
1866 drm_atomic_state_get(state);
1867
1868 if (nonblock)
1869 queue_work(system_unbound_wq, &state->commit_work);
1870 else
1871 nv50_disp_atomic_commit_tail(state);
1872
1873 drm_for_each_crtc(crtc, dev) {
1874 if (crtc->state->enable) {
1875 if (!drm->have_disp_power_ref) {
1876 drm->have_disp_power_ref = true;
1877 return 0;
1878 }
1879 active = true;
1880 break;
1881 }
1882 }
1883
1884 if (!active && drm->have_disp_power_ref) {
1885 pm_runtime_put_autosuspend(dev->dev);
1886 drm->have_disp_power_ref = false;
1887 }
1888
1889 err_cleanup:
1890 if (ret)
1891 drm_atomic_helper_cleanup_planes(dev, state);
1892 done:
1893 pm_runtime_put_autosuspend(dev->dev);
1894 return ret;
1895 }
1896
1897 static struct nv50_outp_atom *
1898 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
1899 {
1900 struct nv50_outp_atom *outp;
1901
1902 list_for_each_entry(outp, &atom->outp, head) {
1903 if (outp->encoder == encoder)
1904 return outp;
1905 }
1906
1907 outp = kzalloc(sizeof(*outp), GFP_KERNEL);
1908 if (!outp)
1909 return ERR_PTR(-ENOMEM);
1910
1911 list_add(&outp->head, &atom->outp);
1912 outp->encoder = encoder;
1913 return outp;
1914 }
1915
1916 static int
1917 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
1918 struct drm_connector_state *old_connector_state)
1919 {
1920 struct drm_encoder *encoder = old_connector_state->best_encoder;
1921 struct drm_crtc_state *old_crtc_state, *new_crtc_state;
1922 struct drm_crtc *crtc;
1923 struct nv50_outp_atom *outp;
1924
1925 if (!(crtc = old_connector_state->crtc))
1926 return 0;
1927
1928 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
1929 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
1930 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
1931 outp = nv50_disp_outp_atomic_add(atom, encoder);
1932 if (IS_ERR(outp))
1933 return PTR_ERR(outp);
1934
1935 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1936 outp->flush_disable = true;
1937 atom->flush_disable = true;
1938 }
1939 outp->clr.ctrl = true;
1940 atom->lock_core = true;
1941 }
1942
1943 return 0;
1944 }
1945
1946 static int
1947 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
1948 struct drm_connector_state *connector_state)
1949 {
1950 struct drm_encoder *encoder = connector_state->best_encoder;
1951 struct drm_crtc_state *new_crtc_state;
1952 struct drm_crtc *crtc;
1953 struct nv50_outp_atom *outp;
1954
1955 if (!(crtc = connector_state->crtc))
1956 return 0;
1957
1958 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
1959 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
1960 outp = nv50_disp_outp_atomic_add(atom, encoder);
1961 if (IS_ERR(outp))
1962 return PTR_ERR(outp);
1963
1964 outp->set.ctrl = true;
1965 atom->lock_core = true;
1966 }
1967
1968 return 0;
1969 }
1970
1971 static int
1972 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
1973 {
1974 struct nv50_atom *atom = nv50_atom(state);
1975 struct drm_connector_state *old_connector_state, *new_connector_state;
1976 struct drm_connector *connector;
1977 struct drm_crtc_state *new_crtc_state;
1978 struct drm_crtc *crtc;
1979 int ret, i;
1980
1981 /* We need to handle colour management on a per-plane basis. */
1982 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
1983 if (new_crtc_state->color_mgmt_changed) {
1984 ret = drm_atomic_add_affected_planes(state, crtc);
1985 if (ret)
1986 return ret;
1987 }
1988 }
1989
1990 ret = drm_atomic_helper_check(dev, state);
1991 if (ret)
1992 return ret;
1993
1994 for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
1995 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
1996 if (ret)
1997 return ret;
1998
1999 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
2000 if (ret)
2001 return ret;
2002 }
2003
2004 return 0;
2005 }
2006
2007 static void
2008 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
2009 {
2010 struct nv50_atom *atom = nv50_atom(state);
2011 struct nv50_outp_atom *outp, *outt;
2012
2013 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2014 list_del(&outp->head);
2015 kfree(outp);
2016 }
2017
2018 drm_atomic_state_default_clear(state);
2019 }
2020
2021 static void
2022 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
2023 {
2024 struct nv50_atom *atom = nv50_atom(state);
2025 drm_atomic_state_default_release(&atom->state);
2026 kfree(atom);
2027 }
2028
2029 static struct drm_atomic_state *
2030 nv50_disp_atomic_state_alloc(struct drm_device *dev)
2031 {
2032 struct nv50_atom *atom;
2033 if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
2034 drm_atomic_state_init(dev, &atom->state) < 0) {
2035 kfree(atom);
2036 return NULL;
2037 }
2038 INIT_LIST_HEAD(&atom->outp);
2039 return &atom->state;
2040 }
2041
2042 static const struct drm_mode_config_funcs
2043 nv50_disp_func = {
2044 .fb_create = nouveau_user_framebuffer_create,
2045 .output_poll_changed = drm_fb_helper_output_poll_changed,
2046 .atomic_check = nv50_disp_atomic_check,
2047 .atomic_commit = nv50_disp_atomic_commit,
2048 .atomic_state_alloc = nv50_disp_atomic_state_alloc,
2049 .atomic_state_clear = nv50_disp_atomic_state_clear,
2050 .atomic_state_free = nv50_disp_atomic_state_free,
2051 };
2052
2053 /******************************************************************************
2054 * Init
2055 *****************************************************************************/
2056
2057 void
2058 nv50_display_fini(struct drm_device *dev)
2059 {
2060 struct nouveau_encoder *nv_encoder;
2061 struct drm_encoder *encoder;
2062 struct drm_plane *plane;
2063
2064 drm_for_each_plane(plane, dev) {
2065 struct nv50_wndw *wndw = nv50_wndw(plane);
2066 if (plane->funcs != &nv50_wndw)
2067 continue;
2068 nv50_wndw_fini(wndw);
2069 }
2070
2071 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2072 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2073 nv_encoder = nouveau_encoder(encoder);
2074 nv50_mstm_fini(nv_encoder->dp.mstm);
2075 }
2076 }
2077 }
2078
2079 int
2080 nv50_display_init(struct drm_device *dev)
2081 {
2082 struct nv50_core *core = nv50_disp(dev)->core;
2083 struct drm_encoder *encoder;
2084 struct drm_plane *plane;
2085
2086 core->func->init(core);
2087
2088 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2089 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2090 struct nouveau_encoder *nv_encoder =
2091 nouveau_encoder(encoder);
2092 nv50_mstm_init(nv_encoder->dp.mstm);
2093 }
2094 }
2095
2096 drm_for_each_plane(plane, dev) {
2097 struct nv50_wndw *wndw = nv50_wndw(plane);
2098 if (plane->funcs != &nv50_wndw)
2099 continue;
2100 nv50_wndw_init(wndw);
2101 }
2102
2103 return 0;
2104 }
2105
2106 void
2107 nv50_display_destroy(struct drm_device *dev)
2108 {
2109 struct nv50_disp *disp = nv50_disp(dev);
2110
2111 nv50_core_del(&disp->core);
2112
2113 nouveau_bo_unmap(disp->sync);
2114 if (disp->sync)
2115 nouveau_bo_unpin(disp->sync);
2116 nouveau_bo_ref(NULL, &disp->sync);
2117
2118 nouveau_display(dev)->priv = NULL;
2119 kfree(disp);
2120 }
2121
2122 MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
2123 static int nouveau_atomic = 0;
2124 module_param_named(atomic, nouveau_atomic, int, 0400);
2125
2126 int
2127 nv50_display_create(struct drm_device *dev)
2128 {
2129 struct nvif_device *device = &nouveau_drm(dev)->client.device;
2130 struct nouveau_drm *drm = nouveau_drm(dev);
2131 struct dcb_table *dcb = &drm->vbios.dcb;
2132 struct drm_connector *connector, *tmp;
2133 struct nv50_disp *disp;
2134 struct dcb_output *dcbe;
2135 int crtcs, ret, i;
2136
2137 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2138 if (!disp)
2139 return -ENOMEM;
2140
2141 mutex_init(&disp->mutex);
2142
2143 nouveau_display(dev)->priv = disp;
2144 nouveau_display(dev)->dtor = nv50_display_destroy;
2145 nouveau_display(dev)->init = nv50_display_init;
2146 nouveau_display(dev)->fini = nv50_display_fini;
2147 disp->disp = &nouveau_display(dev)->disp;
2148 dev->mode_config.funcs = &nv50_disp_func;
2149 dev->driver->driver_features |= DRIVER_PREFER_XBGR_30BPP;
2150 if (nouveau_atomic)
2151 dev->driver->driver_features |= DRIVER_ATOMIC;
2152
2153 /* small shared memory area we use for notifiers and semaphores */
2154 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2155 0, 0x0000, NULL, NULL, &disp->sync);
2156 if (!ret) {
2157 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
2158 if (!ret) {
2159 ret = nouveau_bo_map(disp->sync);
2160 if (ret)
2161 nouveau_bo_unpin(disp->sync);
2162 }
2163 if (ret)
2164 nouveau_bo_ref(NULL, &disp->sync);
2165 }
2166
2167 if (ret)
2168 goto out;
2169
2170 /* allocate master evo channel */
2171 ret = nv50_core_new(drm, &disp->core);
2172 if (ret)
2173 goto out;
2174
2175 /* create crtc objects to represent the hw heads */
2176 if (disp->disp->object.oclass >= GV100_DISP)
2177 crtcs = nvif_rd32(&device->object, 0x610060) & 0xff;
2178 else
2179 if (disp->disp->object.oclass >= GF110_DISP)
2180 crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
2181 else
2182 crtcs = 0x3;
2183
2184 for (i = 0; i < fls(crtcs); i++) {
2185 if (!(crtcs & (1 << i)))
2186 continue;
2187 ret = nv50_head_create(dev, i);
2188 if (ret)
2189 goto out;
2190 }
2191
2192 /* create encoder/connector objects based on VBIOS DCB table */
2193 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2194 connector = nouveau_connector_create(dev, dcbe->connector);
2195 if (IS_ERR(connector))
2196 continue;
2197
2198 if (dcbe->location == DCB_LOC_ON_CHIP) {
2199 switch (dcbe->type) {
2200 case DCB_OUTPUT_TMDS:
2201 case DCB_OUTPUT_LVDS:
2202 case DCB_OUTPUT_DP:
2203 ret = nv50_sor_create(connector, dcbe);
2204 break;
2205 case DCB_OUTPUT_ANALOG:
2206 ret = nv50_dac_create(connector, dcbe);
2207 break;
2208 default:
2209 ret = -ENODEV;
2210 break;
2211 }
2212 } else {
2213 ret = nv50_pior_create(connector, dcbe);
2214 }
2215
2216 if (ret) {
2217 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2218 dcbe->location, dcbe->type,
2219 ffs(dcbe->or) - 1, ret);
2220 ret = 0;
2221 }
2222 }
2223
2224 /* cull any connectors we created that don't have an encoder */
2225 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2226 if (connector->encoder_ids[0])
2227 continue;
2228
2229 NV_WARN(drm, "%s has no encoders, removing\n",
2230 connector->name);
2231 connector->funcs->destroy(connector);
2232 }
2233
2234 out:
2235 if (ret)
2236 nv50_display_destroy(dev);
2237 return ret;
2238 }