]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - drivers/gpu/drm/nouveau/nvd0_display.c
drm/nvd0/disp: extend the init voodoo to cover crtcs
[mirror_ubuntu-bionic-kernel.git] / drivers / gpu / drm / nouveau / nvd0_display.c
CommitLineData
26f6d88b
BS
1/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
51beb428 25#include <linux/dma-mapping.h>
83fc083c 26
26f6d88b 27#include "drmP.h"
83fc083c 28#include "drm_crtc_helper.h"
26f6d88b
BS
29
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
438d99e3 34#include "nouveau_fb.h"
26f6d88b 35
efd272a7
BS
36#define MEM_SYNC 0xe0000001
37#define MEM_VRAM 0xe0010000
c0cc92a1 38#include "nouveau_dma.h"
efd272a7 39
26f6d88b
BS
40struct nvd0_display {
41 struct nouveau_gpuobj *mem;
51beb428
BS
42 struct {
43 dma_addr_t handle;
44 u32 *ptr;
45 } evo[1];
26f6d88b
BS
46};
47
48static struct nvd0_display *
49nvd0_display(struct drm_device *dev)
50{
51 struct drm_nouveau_private *dev_priv = dev->dev_private;
52 return dev_priv->engine.display.priv;
53}
54
51beb428
BS
55static int
56evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
57{
58 int ret = 0;
59 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
60 nv_wr32(dev, 0x610704 + (id * 0x10), data);
61 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
62 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
63 ret = -EBUSY;
64 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
65 return ret;
66}
67
68static u32 *
69evo_wait(struct drm_device *dev, int id, int nr)
70{
71 struct nvd0_display *disp = nvd0_display(dev);
72 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
73
74 if (put + nr >= (PAGE_SIZE / 4)) {
75 disp->evo[id].ptr[put] = 0x20000000;
76
77 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
78 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
79 NV_ERROR(dev, "evo %d dma stalled\n", id);
80 return NULL;
81 }
82
83 put = 0;
84 }
85
86 return disp->evo[id].ptr + put;
87}
88
89static void
90evo_kick(u32 *push, struct drm_device *dev, int id)
91{
92 struct nvd0_display *disp = nvd0_display(dev);
93 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
94}
95
96#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
97#define evo_data(p,d) *((p)++) = (d)
98
83fc083c
BS
99static struct drm_crtc *
100nvd0_display_crtc_get(struct drm_encoder *encoder)
101{
102 return nouveau_encoder(encoder)->crtc;
103}
104
438d99e3
BS
105/******************************************************************************
106 * CRTC
107 *****************************************************************************/
108static int
109nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
110{
111 struct drm_device *dev = nv_crtc->base.dev;
112 u32 *push, mode;
113
114 mode = 0x00000000;
115 if (on) {
116 /* 0x11: 6bpc dynamic 2x2
117 * 0x13: 8bpc dynamic 2x2
118 * 0x19: 6bpc static 2x2
119 * 0x1b: 8bpc static 2x2
120 * 0x21: 6bpc temporal
121 * 0x23: 8bpc temporal
122 */
123 mode = 0x00000011;
124 }
125
126 push = evo_wait(dev, 0, 4);
127 if (push) {
128 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
129 evo_data(push, mode);
130 if (update) {
131 evo_mthd(push, 0x0080, 1);
132 evo_data(push, 0x00000000);
133 }
134 evo_kick(push, dev, 0);
135 }
136
137 return 0;
138}
139
140static int
141nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
142{
143 struct drm_display_mode *mode = &nv_crtc->base.mode;
144 struct drm_device *dev = nv_crtc->base.dev;
145 u32 *push;
146
147 /*XXX: actually handle scaling */
148
149 push = evo_wait(dev, 0, 16);
150 if (push) {
151 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
152 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
153 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
154 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
155 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
156 evo_data(push, 0x00000000);
157 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
158 evo_data(push, 0x00000000);
159 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
160 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
161 if (update) {
162 evo_mthd(push, 0x0080, 1);
163 evo_data(push, 0x00000000);
164 }
165 evo_kick(push, dev, 0);
166 }
167
168 return 0;
169}
170
171static int
172nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
173 int x, int y, bool update)
174{
175 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
176 u32 *push;
177
438d99e3
BS
178 push = evo_wait(fb->dev, 0, 16);
179 if (push) {
180 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
181 evo_data(push, nvfb->nvbo->bo.offset >> 8);
182 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
183 evo_data(push, (fb->height << 16) | fb->width);
184 evo_data(push, nvfb->r_pitch);
185 evo_data(push, nvfb->r_format);
c0cc92a1 186 evo_data(push, nvfb->r_dma);
438d99e3
BS
187 evo_kick(push, fb->dev, 0);
188 }
189
c0cc92a1 190 nv_crtc->fb.tile_flags = nvfb->r_dma;
438d99e3
BS
191 return 0;
192}
193
194static void
195nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
196{
197 struct drm_device *dev = nv_crtc->base.dev;
198 u32 *push = evo_wait(dev, 0, 16);
199 if (push) {
200 if (show) {
201 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
202 evo_data(push, 0x85000000);
203 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
204 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
205 evo_data(push, MEM_VRAM);
206 } else {
207 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
208 evo_data(push, 0x05000000);
209 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
210 evo_data(push, 0x00000000);
211 }
212
213 if (update) {
214 evo_mthd(push, 0x0080, 1);
215 evo_data(push, 0x00000000);
216 }
217
218 evo_kick(push, dev, 0);
219 }
220}
221
222static void
223nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
224{
225}
226
227static void
228nvd0_crtc_prepare(struct drm_crtc *crtc)
229{
230 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
231 u32 *push;
232
233 push = evo_wait(crtc->dev, 0, 2);
234 if (push) {
235 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
236 evo_data(push, 0x00000000);
237 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
238 evo_data(push, 0x03000000);
239 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
240 evo_data(push, 0x00000000);
241 evo_kick(push, crtc->dev, 0);
242 }
243
244 nvd0_crtc_cursor_show(nv_crtc, false, false);
245}
246
247static void
248nvd0_crtc_commit(struct drm_crtc *crtc)
249{
250 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
251 u32 *push;
252
253 push = evo_wait(crtc->dev, 0, 32);
254 if (push) {
255 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
256 evo_data(push, nv_crtc->fb.tile_flags);
257 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
258 evo_data(push, 0x83000000);
259 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
260 evo_data(push, 0x00000000);
261 evo_data(push, 0x00000000);
262 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
263 evo_data(push, MEM_VRAM);
264 evo_kick(push, crtc->dev, 0);
265 }
266
267 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
268}
269
270static bool
271nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
272 struct drm_display_mode *adjusted_mode)
273{
274 return true;
275}
276
277static int
278nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
279{
280 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
281 int ret;
282
283 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
284 if (ret)
285 return ret;
286
287 if (old_fb) {
288 nvfb = nouveau_framebuffer(old_fb);
289 nouveau_bo_unpin(nvfb->nvbo);
290 }
291
292 return 0;
293}
294
295static int
296nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
297 struct drm_display_mode *mode, int x, int y,
298 struct drm_framebuffer *old_fb)
299{
300 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
301 struct nouveau_connector *nv_connector;
302 u32 htotal = mode->htotal;
303 u32 vtotal = mode->vtotal;
304 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
305 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
306 u32 hfrntp = mode->hsync_start - mode->hdisplay;
307 u32 vfrntp = mode->vsync_start - mode->vdisplay;
308 u32 hbackp = mode->htotal - mode->hsync_end;
309 u32 vbackp = mode->vtotal - mode->vsync_end;
310 u32 hss2be = hsyncw + hbackp;
311 u32 vss2be = vsyncw + vbackp;
312 u32 hss2de = htotal - hfrntp;
313 u32 vss2de = vtotal - vfrntp;
314 u32 hstart = 0;
315 u32 vstart = 0;
316 u32 *push;
317 int ret;
318
319 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
320 if (ret)
321 return ret;
322
323 push = evo_wait(crtc->dev, 0, 64);
324 if (push) {
325 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
326 evo_data(push, (vstart << 16) | hstart);
327 evo_data(push, (vtotal << 16) | htotal);
328 evo_data(push, (vsyncw << 16) | hsyncw);
329 evo_data(push, (vss2be << 16) | hss2be);
330 evo_data(push, (vss2de << 16) | hss2de);
331 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
332 evo_data(push, 0x00000000); /* ??? */
333 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
334 evo_data(push, mode->clock * 1000);
335 evo_data(push, 0x00200000); /* ??? */
336 evo_data(push, mode->clock * 1000);
337 evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
338 evo_data(push, 0x31ec6000); /* ??? */
339 evo_kick(push, crtc->dev, 0);
340 }
341
342 nv_connector = nouveau_crtc_connector_get(nv_crtc);
343 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
344 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
345 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
346 return 0;
347}
348
349static int
350nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
351 struct drm_framebuffer *old_fb)
352{
353 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
354 int ret;
355
356 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
357 if (ret)
358 return ret;
359
360 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
361 return 0;
362}
363
364static int
365nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
366 struct drm_framebuffer *fb, int x, int y,
367 enum mode_set_atomic state)
368{
369 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
370 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
371 return 0;
372}
373
374static void
375nvd0_crtc_lut_load(struct drm_crtc *crtc)
376{
377 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
378 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
379 int i;
380
381 for (i = 0; i < 256; i++) {
382 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
383 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
384 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
385 }
386}
387
388static int
389nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
390 uint32_t handle, uint32_t width, uint32_t height)
391{
392 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
393 struct drm_device *dev = crtc->dev;
394 struct drm_gem_object *gem;
395 struct nouveau_bo *nvbo;
396 bool visible = (handle != 0);
397 int i, ret = 0;
398
399 if (visible) {
400 if (width != 64 || height != 64)
401 return -EINVAL;
402
403 gem = drm_gem_object_lookup(dev, file_priv, handle);
404 if (unlikely(!gem))
405 return -ENOENT;
406 nvbo = nouveau_gem_object(gem);
407
408 ret = nouveau_bo_map(nvbo);
409 if (ret == 0) {
410 for (i = 0; i < 64 * 64; i++) {
411 u32 v = nouveau_bo_rd32(nvbo, i);
412 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
413 }
414 nouveau_bo_unmap(nvbo);
415 }
416
417 drm_gem_object_unreference_unlocked(gem);
418 }
419
420 if (visible != nv_crtc->cursor.visible) {
421 nvd0_crtc_cursor_show(nv_crtc, visible, true);
422 nv_crtc->cursor.visible = visible;
423 }
424
425 return ret;
426}
427
428static int
429nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
430{
431 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
432 const u32 data = (y << 16) | x;
433
434 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
435 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
436 return 0;
437}
438
439static void
440nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
441 uint32_t start, uint32_t size)
442{
443 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
444 u32 end = max(start + size, (u32)256);
445 u32 i;
446
447 for (i = start; i < end; i++) {
448 nv_crtc->lut.r[i] = r[i];
449 nv_crtc->lut.g[i] = g[i];
450 nv_crtc->lut.b[i] = b[i];
451 }
452
453 nvd0_crtc_lut_load(crtc);
454}
455
456static void
457nvd0_crtc_destroy(struct drm_crtc *crtc)
458{
459 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
460 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
461 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
462 nouveau_bo_unmap(nv_crtc->lut.nvbo);
463 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
464 drm_crtc_cleanup(crtc);
465 kfree(crtc);
466}
467
468static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
469 .dpms = nvd0_crtc_dpms,
470 .prepare = nvd0_crtc_prepare,
471 .commit = nvd0_crtc_commit,
472 .mode_fixup = nvd0_crtc_mode_fixup,
473 .mode_set = nvd0_crtc_mode_set,
474 .mode_set_base = nvd0_crtc_mode_set_base,
475 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
476 .load_lut = nvd0_crtc_lut_load,
477};
478
479static const struct drm_crtc_funcs nvd0_crtc_func = {
480 .cursor_set = nvd0_crtc_cursor_set,
481 .cursor_move = nvd0_crtc_cursor_move,
482 .gamma_set = nvd0_crtc_gamma_set,
483 .set_config = drm_crtc_helper_set_config,
484 .destroy = nvd0_crtc_destroy,
485};
486
487static int
488nvd0_crtc_create(struct drm_device *dev, int index)
489{
490 struct nouveau_crtc *nv_crtc;
491 struct drm_crtc *crtc;
492 int ret, i;
493
494 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
495 if (!nv_crtc)
496 return -ENOMEM;
497
498 nv_crtc->index = index;
499 nv_crtc->set_dither = nvd0_crtc_set_dither;
500 nv_crtc->set_scale = nvd0_crtc_set_scale;
501 for (i = 0; i < 256; i++) {
502 nv_crtc->lut.r[i] = i << 8;
503 nv_crtc->lut.g[i] = i << 8;
504 nv_crtc->lut.b[i] = i << 8;
505 }
506
507 crtc = &nv_crtc->base;
508 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
509 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
510 drm_mode_crtc_set_gamma_size(crtc, 256);
511
512 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
513 0, 0x0000, &nv_crtc->cursor.nvbo);
514 if (!ret) {
515 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
516 if (!ret)
517 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
518 if (ret)
519 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
520 }
521
522 if (ret)
523 goto out;
524
525 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
526 0, 0x0000, &nv_crtc->lut.nvbo);
527 if (!ret) {
528 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
529 if (!ret)
530 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
531 if (ret)
532 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
533 }
534
535 if (ret)
536 goto out;
537
538 nvd0_crtc_lut_load(crtc);
539
540out:
541 if (ret)
542 nvd0_crtc_destroy(crtc);
543 return ret;
544}
545
26f6d88b
BS
546/******************************************************************************
547 * DAC
548 *****************************************************************************/
549
550/******************************************************************************
551 * SOR
552 *****************************************************************************/
83fc083c
BS
553static void
554nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
555{
556 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
557 struct drm_device *dev = encoder->dev;
558 struct drm_encoder *partner;
559 int or = nv_encoder->or;
560 u32 dpms_ctrl;
561
562 nv_encoder->last_dpms = mode;
563
564 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
565 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
566
567 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
568 continue;
569
570 if (nv_partner != nv_encoder &&
571 nv_partner->dcb->or == nv_encoder->or) {
572 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
573 return;
574 break;
575 }
576 }
577
578 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
579 dpms_ctrl |= 0x80000000;
580
581 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
582 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
583 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
584 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
585}
586
587static bool
588nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
589 struct drm_display_mode *adjusted_mode)
590{
591 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
592 struct nouveau_connector *nv_connector;
593
594 nv_connector = nouveau_encoder_connector_get(nv_encoder);
595 if (nv_connector && nv_connector->native_mode) {
596 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
597 int id = adjusted_mode->base.id;
598 *adjusted_mode = *nv_connector->native_mode;
599 adjusted_mode->base.id = id;
600 }
601 }
602
603 return true;
604}
605
606static void
607nvd0_sor_prepare(struct drm_encoder *encoder)
608{
609}
610
611static void
612nvd0_sor_commit(struct drm_encoder *encoder)
613{
614}
615
616static void
617nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
618 struct drm_display_mode *adjusted_mode)
619{
620 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
621 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
622 u32 mode_ctrl = (1 << nv_crtc->index);
623 u32 *push;
624
625 if (nv_encoder->dcb->sorconf.link & 1) {
626 if (adjusted_mode->clock < 165000)
627 mode_ctrl |= 0x00000100;
628 else
629 mode_ctrl |= 0x00000500;
630 } else {
631 mode_ctrl |= 0x00000200;
632 }
633
634 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
635
636 push = evo_wait(encoder->dev, 0, 2);
637 if (push) {
638 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
639 evo_data(push, mode_ctrl);
438d99e3 640 evo_kick(push, encoder->dev, 0);
83fc083c
BS
641 }
642
643 nv_encoder->crtc = encoder->crtc;
644}
645
646static void
647nvd0_sor_disconnect(struct drm_encoder *encoder)
648{
649 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
650 struct drm_device *dev = encoder->dev;
438d99e3 651 u32 *push;
83fc083c
BS
652
653 if (nv_encoder->crtc) {
438d99e3
BS
654 nvd0_crtc_prepare(nv_encoder->crtc);
655
656 push = evo_wait(dev, 0, 4);
83fc083c
BS
657 if (push) {
658 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
659 evo_data(push, 0x00000000);
660 evo_mthd(push, 0x0080, 1);
661 evo_data(push, 0x00000000);
662 evo_kick(push, dev, 0);
663 }
664
665 nv_encoder->crtc = NULL;
666 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
667 }
668}
669
670static void
671nvd0_sor_destroy(struct drm_encoder *encoder)
672{
673 drm_encoder_cleanup(encoder);
674 kfree(encoder);
675}
676
677static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
678 .dpms = nvd0_sor_dpms,
679 .mode_fixup = nvd0_sor_mode_fixup,
680 .prepare = nvd0_sor_prepare,
681 .commit = nvd0_sor_commit,
682 .mode_set = nvd0_sor_mode_set,
683 .disable = nvd0_sor_disconnect,
684 .get_crtc = nvd0_display_crtc_get,
685};
686
687static const struct drm_encoder_funcs nvd0_sor_func = {
688 .destroy = nvd0_sor_destroy,
689};
690
691static int
692nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
693{
694 struct drm_device *dev = connector->dev;
695 struct nouveau_encoder *nv_encoder;
696 struct drm_encoder *encoder;
697
698 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
699 if (!nv_encoder)
700 return -ENOMEM;
701 nv_encoder->dcb = dcbe;
702 nv_encoder->or = ffs(dcbe->or) - 1;
703 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
704
705 encoder = to_drm_encoder(nv_encoder);
706 encoder->possible_crtcs = dcbe->heads;
707 encoder->possible_clones = 0;
708 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
709 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
710
711 drm_mode_connector_attach_encoder(connector, encoder);
712 return 0;
713}
26f6d88b
BS
714
715/******************************************************************************
716 * IRQ
717 *****************************************************************************/
270a5747
BS
718static void
719nvd0_display_unk1_handler(struct drm_device *dev)
720{
a36f04c0
BS
721 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
722 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
270a5747
BS
723
724 nv_wr32(dev, 0x6101d4, 0x00000000);
725 nv_wr32(dev, 0x6109d4, 0x00000000);
726 nv_wr32(dev, 0x6101d0, 0x80000000);
727}
728
729static void
730nvd0_display_unk2_handler(struct drm_device *dev)
731{
a36f04c0
BS
732 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
733 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
270a5747
BS
734
735 nv_wr32(dev, 0x6101d4, 0x00000000);
736 nv_wr32(dev, 0x6109d4, 0x00000000);
737 nv_wr32(dev, 0x6101d0, 0x80000000);
738}
739
740static void
741nvd0_display_unk4_handler(struct drm_device *dev)
742{
a36f04c0
BS
743 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
744 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
270a5747
BS
745
746 nv_wr32(dev, 0x6101d4, 0x00000000);
747 nv_wr32(dev, 0x6109d4, 0x00000000);
748 nv_wr32(dev, 0x6101d0, 0x80000000);
749}
750
4600522a
BS
751static void
752nvd0_display_intr(struct drm_device *dev)
753{
754 u32 intr = nv_rd32(dev, 0x610088);
755
756 if (intr & 0x00000002) {
757 u32 stat = nv_rd32(dev, 0x61009c);
758 int chid = ffs(stat) - 1;
759 if (chid >= 0) {
760 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
761 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
762 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
763
764 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
765 "0x%08x 0x%08x\n",
766 chid, (mthd & 0x0000ffc), data, mthd, unkn);
767 nv_wr32(dev, 0x61009c, (1 << chid));
768 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
769 }
770
771 intr &= ~0x00000002;
772 }
773
270a5747
BS
774 if (intr & 0x00100000) {
775 u32 stat = nv_rd32(dev, 0x6100ac);
776
777 if (stat & 0x00000007) {
778 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
779
780 if (stat & 0x00000001)
781 nvd0_display_unk1_handler(dev);
782 if (stat & 0x00000002)
783 nvd0_display_unk2_handler(dev);
784 if (stat & 0x00000004)
785 nvd0_display_unk4_handler(dev);
786 stat &= ~0x00000007;
787 }
788
789 if (stat) {
790 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
791 nv_wr32(dev, 0x6100ac, stat);
792 }
793
794 intr &= ~0x00100000;
795 }
796
4600522a
BS
797 if (intr & 0x01000000) {
798 u32 stat = nv_rd32(dev, 0x6100bc);
799 nv_wr32(dev, 0x6100bc, stat);
800 intr &= ~0x01000000;
801 }
802
803 if (intr & 0x02000000) {
804 u32 stat = nv_rd32(dev, 0x6108bc);
805 nv_wr32(dev, 0x6108bc, stat);
806 intr &= ~0x02000000;
807 }
808
809 if (intr)
810 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
811}
26f6d88b
BS
812
813/******************************************************************************
814 * Init
815 *****************************************************************************/
816static void
817nvd0_display_fini(struct drm_device *dev)
818{
819 int i;
820
821 /* fini cursors */
822 for (i = 14; i >= 13; i--) {
823 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
824 continue;
825
826 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
827 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
828 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
829 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
830 }
831
832 /* fini master */
833 if (nv_rd32(dev, 0x610490) & 0x00000010) {
834 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
835 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
836 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
837 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
838 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
839 }
840}
841
842int
843nvd0_display_init(struct drm_device *dev)
844{
845 struct nvd0_display *disp = nvd0_display(dev);
efd272a7 846 u32 *push;
26f6d88b
BS
847 int i;
848
a36f04c0
BS
849 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
850 nv_wr32(dev, 0x6100ac, 0x00000100);
851 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
852 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
853 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
854 nv_rd32(dev, 0x6194e8));
855 return -EBUSY;
856 }
857 }
858
859 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
860 * work at all unless you do the SOR part below.
861 */
1d6e7a59
BS
862 for (i = 0; i < 3; i++) {
863 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
864 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
865 }
866
1d6e7a59
BS
867 for (i = 0; i < 4; i++) {
868 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
869 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
870 }
871
a36f04c0
BS
872 for (i = 0; i < 2; i++) {
873 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
874 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
875 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
876 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
877 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
878 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
26f6d88b
BS
879 }
880
a36f04c0 881 /* point at our hash table / objects, enable interrupts */
26f6d88b 882 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
270a5747 883 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
26f6d88b
BS
884
885 /* init master */
51beb428 886 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
26f6d88b 887 nv_wr32(dev, 0x610498, 0x00010000);
efd272a7 888 nv_wr32(dev, 0x61049c, 0x00000001);
26f6d88b
BS
889 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
890 nv_wr32(dev, 0x640000, 0x00000000);
891 nv_wr32(dev, 0x610490, 0x01000013);
892 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
893 NV_ERROR(dev, "PDISP: master 0x%08x\n",
894 nv_rd32(dev, 0x610490));
895 return -EBUSY;
896 }
897 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
898 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
899
900 /* init cursors */
901 for (i = 13; i <= 14; i++) {
902 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
903 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
904 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
905 nv_rd32(dev, 0x610490 + (i * 0x10)));
906 return -EBUSY;
907 }
908
909 nv_mask(dev, 0x610090, 1 << i, 1 << i);
910 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
911 }
912
efd272a7
BS
913 push = evo_wait(dev, 0, 32);
914 if (!push)
915 return -EBUSY;
916 evo_mthd(push, 0x0088, 1);
917 evo_data(push, MEM_SYNC);
918 evo_mthd(push, 0x0084, 1);
919 evo_data(push, 0x00000000);
920 evo_mthd(push, 0x0084, 1);
921 evo_data(push, 0x80000000);
922 evo_mthd(push, 0x008c, 1);
923 evo_data(push, 0x00000000);
924 evo_kick(push, dev, 0);
925
26f6d88b
BS
926 return 0;
927}
928
929void
930nvd0_display_destroy(struct drm_device *dev)
931{
932 struct drm_nouveau_private *dev_priv = dev->dev_private;
933 struct nvd0_display *disp = nvd0_display(dev);
51beb428 934 struct pci_dev *pdev = dev->pdev;
26f6d88b
BS
935
936 nvd0_display_fini(dev);
937
51beb428 938 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
26f6d88b 939 nouveau_gpuobj_ref(NULL, &disp->mem);
4600522a 940 nouveau_irq_unregister(dev, 26);
51beb428
BS
941
942 dev_priv->engine.display.priv = NULL;
26f6d88b
BS
943 kfree(disp);
944}
945
946int
947nvd0_display_create(struct drm_device *dev)
948{
949 struct drm_nouveau_private *dev_priv = dev->dev_private;
efd272a7 950 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
83fc083c
BS
951 struct dcb_table *dcb = &dev_priv->vbios.dcb;
952 struct drm_connector *connector, *tmp;
51beb428 953 struct pci_dev *pdev = dev->pdev;
26f6d88b 954 struct nvd0_display *disp;
83fc083c
BS
955 struct dcb_entry *dcbe;
956 int ret, i;
26f6d88b
BS
957
958 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
959 if (!disp)
960 return -ENOMEM;
961 dev_priv->engine.display.priv = disp;
962
438d99e3
BS
963 /* create crtc objects to represent the hw heads */
964 for (i = 0; i < 2; i++) {
965 ret = nvd0_crtc_create(dev, i);
966 if (ret)
967 goto out;
968 }
969
83fc083c
BS
970 /* create encoder/connector objects based on VBIOS DCB table */
971 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
972 connector = nouveau_connector_create(dev, dcbe->connector);
973 if (IS_ERR(connector))
974 continue;
975
976 if (dcbe->location != DCB_LOC_ON_CHIP) {
977 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
978 dcbe->type, ffs(dcbe->or) - 1);
979 continue;
980 }
981
982 switch (dcbe->type) {
983 case OUTPUT_TMDS:
984 nvd0_sor_create(connector, dcbe);
985 break;
986 default:
987 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
988 dcbe->type, ffs(dcbe->or) - 1);
989 continue;
990 }
991 }
992
993 /* cull any connectors we created that don't have an encoder */
994 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
995 if (connector->encoder_ids[0])
996 continue;
997
998 NV_WARN(dev, "%s has no encoders, removing\n",
999 drm_get_connector_name(connector));
1000 connector->funcs->destroy(connector);
1001 }
1002
4600522a
BS
1003 /* setup interrupt handling */
1004 nouveau_irq_register(dev, 26, nvd0_display_intr);
1005
51beb428 1006 /* hash table and dma objects for the memory areas we care about */
efd272a7
BS
1007 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1008 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
26f6d88b
BS
1009 if (ret)
1010 goto out;
1011
efd272a7
BS
1012 nv_wo32(disp->mem, 0x1000, 0x00000049);
1013 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1014 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1015 nv_wo32(disp->mem, 0x100c, 0x00000000);
1016 nv_wo32(disp->mem, 0x1010, 0x00000000);
1017 nv_wo32(disp->mem, 0x1014, 0x00000000);
1018 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1019 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1020
c0cc92a1 1021 nv_wo32(disp->mem, 0x1020, 0x00000049);
efd272a7
BS
1022 nv_wo32(disp->mem, 0x1024, 0x00000000);
1023 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1024 nv_wo32(disp->mem, 0x102c, 0x00000000);
1025 nv_wo32(disp->mem, 0x1030, 0x00000000);
1026 nv_wo32(disp->mem, 0x1034, 0x00000000);
1027 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1028 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1029
c0cc92a1
BS
1030 nv_wo32(disp->mem, 0x1040, 0x00000009);
1031 nv_wo32(disp->mem, 0x1044, 0x00000000);
1032 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1033 nv_wo32(disp->mem, 0x104c, 0x00000000);
1034 nv_wo32(disp->mem, 0x1050, 0x00000000);
1035 nv_wo32(disp->mem, 0x1054, 0x00000000);
1036 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1037 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1038
1039 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1040 nv_wo32(disp->mem, 0x1064, 0x00000000);
1041 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1042 nv_wo32(disp->mem, 0x106c, 0x00000000);
1043 nv_wo32(disp->mem, 0x1070, 0x00000000);
1044 nv_wo32(disp->mem, 0x1074, 0x00000000);
1045 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1046 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1047
efd272a7
BS
1048 pinstmem->flush(dev);
1049
51beb428
BS
1050 /* push buffers for evo channels */
1051 disp->evo[0].ptr =
1052 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1053 if (!disp->evo[0].ptr) {
1054 ret = -ENOMEM;
1055 goto out;
1056 }
1057
26f6d88b
BS
1058 ret = nvd0_display_init(dev);
1059 if (ret)
1060 goto out;
1061
1062out:
1063 if (ret)
1064 nvd0_display_destroy(dev);
1065 return ret;
1066}