]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - drivers/gpu/drm/nouveau/nvd0_display.c
drm/nvd0/disp: scaling
[mirror_ubuntu-bionic-kernel.git] / drivers / gpu / drm / nouveau / nvd0_display.c
CommitLineData
26f6d88b
BS
1/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
51beb428 25#include <linux/dma-mapping.h>
83fc083c 26
26f6d88b 27#include "drmP.h"
83fc083c 28#include "drm_crtc_helper.h"
26f6d88b
BS
29
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
438d99e3 34#include "nouveau_fb.h"
3a89cd02 35#include "nv50_display.h"
26f6d88b 36
efd272a7
BS
37#define MEM_SYNC 0xe0000001
38#define MEM_VRAM 0xe0010000
c0cc92a1 39#include "nouveau_dma.h"
efd272a7 40
26f6d88b
BS
41struct nvd0_display {
42 struct nouveau_gpuobj *mem;
51beb428
BS
43 struct {
44 dma_addr_t handle;
45 u32 *ptr;
46 } evo[1];
3a89cd02
BS
47 struct {
48 struct dcb_entry *dis;
49 struct dcb_entry *ena;
50 int crtc;
51 int pclk;
52 u16 script;
53 } irq;
26f6d88b
BS
54};
55
56static struct nvd0_display *
57nvd0_display(struct drm_device *dev)
58{
59 struct drm_nouveau_private *dev_priv = dev->dev_private;
60 return dev_priv->engine.display.priv;
61}
62
51beb428
BS
63static int
64evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
65{
66 int ret = 0;
67 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
68 nv_wr32(dev, 0x610704 + (id * 0x10), data);
69 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
70 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
71 ret = -EBUSY;
72 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
73 return ret;
74}
75
76static u32 *
77evo_wait(struct drm_device *dev, int id, int nr)
78{
79 struct nvd0_display *disp = nvd0_display(dev);
80 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
81
82 if (put + nr >= (PAGE_SIZE / 4)) {
83 disp->evo[id].ptr[put] = 0x20000000;
84
85 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
86 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
87 NV_ERROR(dev, "evo %d dma stalled\n", id);
88 return NULL;
89 }
90
91 put = 0;
92 }
93
94 return disp->evo[id].ptr + put;
95}
96
97static void
98evo_kick(u32 *push, struct drm_device *dev, int id)
99{
100 struct nvd0_display *disp = nvd0_display(dev);
101 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
102}
103
104#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
105#define evo_data(p,d) *((p)++) = (d)
106
83fc083c
BS
107static struct drm_crtc *
108nvd0_display_crtc_get(struct drm_encoder *encoder)
109{
110 return nouveau_encoder(encoder)->crtc;
111}
112
438d99e3
BS
113/******************************************************************************
114 * CRTC
115 *****************************************************************************/
116static int
117nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
118{
119 struct drm_device *dev = nv_crtc->base.dev;
120 u32 *push, mode;
121
122 mode = 0x00000000;
123 if (on) {
124 /* 0x11: 6bpc dynamic 2x2
125 * 0x13: 8bpc dynamic 2x2
126 * 0x19: 6bpc static 2x2
127 * 0x1b: 8bpc static 2x2
128 * 0x21: 6bpc temporal
129 * 0x23: 8bpc temporal
130 */
131 mode = 0x00000011;
132 }
133
134 push = evo_wait(dev, 0, 4);
135 if (push) {
136 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
137 evo_data(push, mode);
138 if (update) {
139 evo_mthd(push, 0x0080, 1);
140 evo_data(push, 0x00000000);
141 }
142 evo_kick(push, dev, 0);
143 }
144
145 return 0;
146}
147
148static int
149nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
150{
151 struct drm_display_mode *mode = &nv_crtc->base.mode;
152 struct drm_device *dev = nv_crtc->base.dev;
f3fdc52d
BS
153 struct nouveau_connector *nv_connector;
154 u32 *push, outX, outY;
155
156 outX = mode->hdisplay;
157 outY = mode->vdisplay;
158
159 nv_connector = nouveau_crtc_connector_get(nv_crtc);
160 if (nv_connector && nv_connector->native_mode) {
161 struct drm_display_mode *native = nv_connector->native_mode;
162 u32 xratio = (native->hdisplay << 19) / mode->hdisplay;
163 u32 yratio = (native->vdisplay << 19) / mode->vdisplay;
164
165 switch (type) {
166 case DRM_MODE_SCALE_ASPECT:
167 if (xratio > yratio) {
168 outX = (mode->hdisplay * yratio) >> 19;
169 outY = (mode->vdisplay * yratio) >> 19;
170 } else {
171 outX = (mode->hdisplay * xratio) >> 19;
172 outY = (mode->vdisplay * xratio) >> 19;
173 }
174 break;
175 case DRM_MODE_SCALE_FULLSCREEN:
176 outX = native->hdisplay;
177 outY = native->vdisplay;
178 break;
179 default:
180 break;
181 }
182 }
438d99e3
BS
183
184 push = evo_wait(dev, 0, 16);
185 if (push) {
186 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
f3fdc52d
BS
187 evo_data(push, (outY << 16) | outX);
188 evo_data(push, (outY << 16) | outX);
189 evo_data(push, (outY << 16) | outX);
438d99e3
BS
190 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
191 evo_data(push, 0x00000000);
192 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
193 evo_data(push, 0x00000000);
194 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
195 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
196 if (update) {
197 evo_mthd(push, 0x0080, 1);
198 evo_data(push, 0x00000000);
199 }
200 evo_kick(push, dev, 0);
201 }
202
203 return 0;
204}
205
206static int
207nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
208 int x, int y, bool update)
209{
210 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
211 u32 *push;
212
438d99e3
BS
213 push = evo_wait(fb->dev, 0, 16);
214 if (push) {
215 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
216 evo_data(push, nvfb->nvbo->bo.offset >> 8);
217 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
218 evo_data(push, (fb->height << 16) | fb->width);
219 evo_data(push, nvfb->r_pitch);
220 evo_data(push, nvfb->r_format);
c0cc92a1 221 evo_data(push, nvfb->r_dma);
a46232ee
BS
222 if (update) {
223 evo_mthd(push, 0x0080, 1);
224 evo_data(push, 0x00000000);
225 }
438d99e3
BS
226 evo_kick(push, fb->dev, 0);
227 }
228
c0cc92a1 229 nv_crtc->fb.tile_flags = nvfb->r_dma;
438d99e3
BS
230 return 0;
231}
232
233static void
234nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
235{
236 struct drm_device *dev = nv_crtc->base.dev;
237 u32 *push = evo_wait(dev, 0, 16);
238 if (push) {
239 if (show) {
240 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
241 evo_data(push, 0x85000000);
242 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
243 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
244 evo_data(push, MEM_VRAM);
245 } else {
246 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
247 evo_data(push, 0x05000000);
248 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
249 evo_data(push, 0x00000000);
250 }
251
252 if (update) {
253 evo_mthd(push, 0x0080, 1);
254 evo_data(push, 0x00000000);
255 }
256
257 evo_kick(push, dev, 0);
258 }
259}
260
261static void
262nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
263{
264}
265
266static void
267nvd0_crtc_prepare(struct drm_crtc *crtc)
268{
269 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
270 u32 *push;
271
272 push = evo_wait(crtc->dev, 0, 2);
273 if (push) {
274 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
275 evo_data(push, 0x00000000);
276 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
277 evo_data(push, 0x03000000);
278 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
279 evo_data(push, 0x00000000);
280 evo_kick(push, crtc->dev, 0);
281 }
282
283 nvd0_crtc_cursor_show(nv_crtc, false, false);
284}
285
286static void
287nvd0_crtc_commit(struct drm_crtc *crtc)
288{
289 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
290 u32 *push;
291
292 push = evo_wait(crtc->dev, 0, 32);
293 if (push) {
294 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
295 evo_data(push, nv_crtc->fb.tile_flags);
296 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
297 evo_data(push, 0x83000000);
298 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
299 evo_data(push, 0x00000000);
300 evo_data(push, 0x00000000);
301 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
302 evo_data(push, MEM_VRAM);
8ea0d4aa
BS
303 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
304 evo_data(push, 0xffffff00);
438d99e3
BS
305 evo_kick(push, crtc->dev, 0);
306 }
307
308 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
309}
310
311static bool
312nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
313 struct drm_display_mode *adjusted_mode)
314{
315 return true;
316}
317
318static int
319nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
320{
321 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
322 int ret;
323
324 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
325 if (ret)
326 return ret;
327
328 if (old_fb) {
329 nvfb = nouveau_framebuffer(old_fb);
330 nouveau_bo_unpin(nvfb->nvbo);
331 }
332
333 return 0;
334}
335
336static int
337nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
338 struct drm_display_mode *mode, int x, int y,
339 struct drm_framebuffer *old_fb)
340{
341 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
342 struct nouveau_connector *nv_connector;
343 u32 htotal = mode->htotal;
344 u32 vtotal = mode->vtotal;
345 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
346 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
347 u32 hfrntp = mode->hsync_start - mode->hdisplay;
348 u32 vfrntp = mode->vsync_start - mode->vdisplay;
349 u32 hbackp = mode->htotal - mode->hsync_end;
350 u32 vbackp = mode->vtotal - mode->vsync_end;
351 u32 hss2be = hsyncw + hbackp;
352 u32 vss2be = vsyncw + vbackp;
353 u32 hss2de = htotal - hfrntp;
354 u32 vss2de = vtotal - vfrntp;
355 u32 hstart = 0;
356 u32 vstart = 0;
357 u32 *push;
358 int ret;
359
360 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
361 if (ret)
362 return ret;
363
364 push = evo_wait(crtc->dev, 0, 64);
365 if (push) {
366 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
367 evo_data(push, (vstart << 16) | hstart);
368 evo_data(push, (vtotal << 16) | htotal);
369 evo_data(push, (vsyncw << 16) | hsyncw);
370 evo_data(push, (vss2be << 16) | hss2be);
371 evo_data(push, (vss2de << 16) | hss2de);
372 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
373 evo_data(push, 0x00000000); /* ??? */
374 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
375 evo_data(push, mode->clock * 1000);
376 evo_data(push, 0x00200000); /* ??? */
377 evo_data(push, mode->clock * 1000);
378 evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
379 evo_data(push, 0x31ec6000); /* ??? */
380 evo_kick(push, crtc->dev, 0);
381 }
382
383 nv_connector = nouveau_crtc_connector_get(nv_crtc);
384 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
385 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
386 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
387 return 0;
388}
389
390static int
391nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
392 struct drm_framebuffer *old_fb)
393{
394 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
395 int ret;
396
397 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
398 if (ret)
399 return ret;
400
401 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
402 return 0;
403}
404
405static int
406nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
407 struct drm_framebuffer *fb, int x, int y,
408 enum mode_set_atomic state)
409{
410 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
411 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
412 return 0;
413}
414
415static void
416nvd0_crtc_lut_load(struct drm_crtc *crtc)
417{
418 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
419 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
420 int i;
421
422 for (i = 0; i < 256; i++) {
8ea0d4aa
BS
423 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
424 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
425 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
438d99e3
BS
426 }
427}
428
429static int
430nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
431 uint32_t handle, uint32_t width, uint32_t height)
432{
433 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
434 struct drm_device *dev = crtc->dev;
435 struct drm_gem_object *gem;
436 struct nouveau_bo *nvbo;
437 bool visible = (handle != 0);
438 int i, ret = 0;
439
440 if (visible) {
441 if (width != 64 || height != 64)
442 return -EINVAL;
443
444 gem = drm_gem_object_lookup(dev, file_priv, handle);
445 if (unlikely(!gem))
446 return -ENOENT;
447 nvbo = nouveau_gem_object(gem);
448
449 ret = nouveau_bo_map(nvbo);
450 if (ret == 0) {
451 for (i = 0; i < 64 * 64; i++) {
452 u32 v = nouveau_bo_rd32(nvbo, i);
453 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
454 }
455 nouveau_bo_unmap(nvbo);
456 }
457
458 drm_gem_object_unreference_unlocked(gem);
459 }
460
461 if (visible != nv_crtc->cursor.visible) {
462 nvd0_crtc_cursor_show(nv_crtc, visible, true);
463 nv_crtc->cursor.visible = visible;
464 }
465
466 return ret;
467}
468
469static int
470nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
471{
472 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
473 const u32 data = (y << 16) | x;
474
475 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
476 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
477 return 0;
478}
479
480static void
481nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
482 uint32_t start, uint32_t size)
483{
484 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
485 u32 end = max(start + size, (u32)256);
486 u32 i;
487
488 for (i = start; i < end; i++) {
489 nv_crtc->lut.r[i] = r[i];
490 nv_crtc->lut.g[i] = g[i];
491 nv_crtc->lut.b[i] = b[i];
492 }
493
494 nvd0_crtc_lut_load(crtc);
495}
496
497static void
498nvd0_crtc_destroy(struct drm_crtc *crtc)
499{
500 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
501 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
502 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
503 nouveau_bo_unmap(nv_crtc->lut.nvbo);
504 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
505 drm_crtc_cleanup(crtc);
506 kfree(crtc);
507}
508
509static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
510 .dpms = nvd0_crtc_dpms,
511 .prepare = nvd0_crtc_prepare,
512 .commit = nvd0_crtc_commit,
513 .mode_fixup = nvd0_crtc_mode_fixup,
514 .mode_set = nvd0_crtc_mode_set,
515 .mode_set_base = nvd0_crtc_mode_set_base,
516 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
517 .load_lut = nvd0_crtc_lut_load,
518};
519
520static const struct drm_crtc_funcs nvd0_crtc_func = {
521 .cursor_set = nvd0_crtc_cursor_set,
522 .cursor_move = nvd0_crtc_cursor_move,
523 .gamma_set = nvd0_crtc_gamma_set,
524 .set_config = drm_crtc_helper_set_config,
525 .destroy = nvd0_crtc_destroy,
526};
527
528static int
529nvd0_crtc_create(struct drm_device *dev, int index)
530{
531 struct nouveau_crtc *nv_crtc;
532 struct drm_crtc *crtc;
533 int ret, i;
534
535 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
536 if (!nv_crtc)
537 return -ENOMEM;
538
539 nv_crtc->index = index;
540 nv_crtc->set_dither = nvd0_crtc_set_dither;
541 nv_crtc->set_scale = nvd0_crtc_set_scale;
542 for (i = 0; i < 256; i++) {
543 nv_crtc->lut.r[i] = i << 8;
544 nv_crtc->lut.g[i] = i << 8;
545 nv_crtc->lut.b[i] = i << 8;
546 }
547
548 crtc = &nv_crtc->base;
549 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
550 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
551 drm_mode_crtc_set_gamma_size(crtc, 256);
552
553 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
554 0, 0x0000, &nv_crtc->cursor.nvbo);
555 if (!ret) {
556 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
557 if (!ret)
558 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
559 if (ret)
560 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
561 }
562
563 if (ret)
564 goto out;
565
8ea0d4aa 566 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
438d99e3
BS
567 0, 0x0000, &nv_crtc->lut.nvbo);
568 if (!ret) {
569 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
570 if (!ret)
571 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
572 if (ret)
573 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
574 }
575
576 if (ret)
577 goto out;
578
579 nvd0_crtc_lut_load(crtc);
580
581out:
582 if (ret)
583 nvd0_crtc_destroy(crtc);
584 return ret;
585}
586
26f6d88b
BS
587/******************************************************************************
588 * DAC
589 *****************************************************************************/
8eaa9669
BS
590static void
591nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
592{
593 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
594 struct drm_device *dev = encoder->dev;
595 int or = nv_encoder->or;
596 u32 dpms_ctrl;
597
598 dpms_ctrl = 0x80000000;
599 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
600 dpms_ctrl |= 0x00000001;
601 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
602 dpms_ctrl |= 0x00000004;
603
604 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
605 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
606 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
607}
608
609static bool
610nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
611 struct drm_display_mode *adjusted_mode)
612{
613 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
614 struct nouveau_connector *nv_connector;
615
616 nv_connector = nouveau_encoder_connector_get(nv_encoder);
617 if (nv_connector && nv_connector->native_mode) {
618 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
619 int id = adjusted_mode->base.id;
620 *adjusted_mode = *nv_connector->native_mode;
621 adjusted_mode->base.id = id;
622 }
623 }
624
625 return true;
626}
627
628static void
629nvd0_dac_prepare(struct drm_encoder *encoder)
630{
631}
632
633static void
634nvd0_dac_commit(struct drm_encoder *encoder)
635{
636}
637
638static void
639nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
640 struct drm_display_mode *adjusted_mode)
641{
642 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
643 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
644 u32 *push;
645
646 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
647
648 push = evo_wait(encoder->dev, 0, 2);
649 if (push) {
650 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
651 evo_data(push, 1 << nv_crtc->index);
652 evo_kick(push, encoder->dev, 0);
653 }
654
655 nv_encoder->crtc = encoder->crtc;
656}
657
658static void
659nvd0_dac_disconnect(struct drm_encoder *encoder)
660{
661 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
662 struct drm_device *dev = encoder->dev;
663 u32 *push;
664
665 if (nv_encoder->crtc) {
666 nvd0_crtc_prepare(nv_encoder->crtc);
667
668 push = evo_wait(dev, 0, 4);
669 if (push) {
670 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
671 evo_data(push, 0x00000000);
672 evo_mthd(push, 0x0080, 1);
673 evo_data(push, 0x00000000);
674 evo_kick(push, dev, 0);
675 }
676
677 nv_encoder->crtc = NULL;
678 }
679}
680
b6d8e7ec
BS
681static enum drm_connector_status
682nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
683{
684 return connector_status_disconnected;
685}
686
8eaa9669
BS
687static void
688nvd0_dac_destroy(struct drm_encoder *encoder)
689{
690 drm_encoder_cleanup(encoder);
691 kfree(encoder);
692}
693
694static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
695 .dpms = nvd0_dac_dpms,
696 .mode_fixup = nvd0_dac_mode_fixup,
697 .prepare = nvd0_dac_prepare,
698 .commit = nvd0_dac_commit,
699 .mode_set = nvd0_dac_mode_set,
700 .disable = nvd0_dac_disconnect,
701 .get_crtc = nvd0_display_crtc_get,
b6d8e7ec 702 .detect = nvd0_dac_detect
8eaa9669
BS
703};
704
705static const struct drm_encoder_funcs nvd0_dac_func = {
706 .destroy = nvd0_dac_destroy,
707};
708
709static int
710nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
711{
712 struct drm_device *dev = connector->dev;
713 struct nouveau_encoder *nv_encoder;
714 struct drm_encoder *encoder;
715
716 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
717 if (!nv_encoder)
718 return -ENOMEM;
719 nv_encoder->dcb = dcbe;
720 nv_encoder->or = ffs(dcbe->or) - 1;
721
722 encoder = to_drm_encoder(nv_encoder);
723 encoder->possible_crtcs = dcbe->heads;
724 encoder->possible_clones = 0;
725 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
726 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
727
728 drm_mode_connector_attach_encoder(connector, encoder);
729 return 0;
730}
26f6d88b
BS
731
732/******************************************************************************
733 * SOR
734 *****************************************************************************/
83fc083c
BS
735static void
736nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
737{
738 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
739 struct drm_device *dev = encoder->dev;
740 struct drm_encoder *partner;
741 int or = nv_encoder->or;
742 u32 dpms_ctrl;
743
744 nv_encoder->last_dpms = mode;
745
746 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
747 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
748
749 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
750 continue;
751
752 if (nv_partner != nv_encoder &&
753 nv_partner->dcb->or == nv_encoder->or) {
754 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
755 return;
756 break;
757 }
758 }
759
760 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
761 dpms_ctrl |= 0x80000000;
762
763 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
764 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
765 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
766 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
767}
768
769static bool
770nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
771 struct drm_display_mode *adjusted_mode)
772{
773 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
774 struct nouveau_connector *nv_connector;
775
776 nv_connector = nouveau_encoder_connector_get(nv_encoder);
777 if (nv_connector && nv_connector->native_mode) {
778 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
779 int id = adjusted_mode->base.id;
780 *adjusted_mode = *nv_connector->native_mode;
781 adjusted_mode->base.id = id;
782 }
783 }
784
785 return true;
786}
787
788static void
789nvd0_sor_prepare(struct drm_encoder *encoder)
790{
791}
792
793static void
794nvd0_sor_commit(struct drm_encoder *encoder)
795{
796}
797
798static void
799nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
800 struct drm_display_mode *adjusted_mode)
801{
802 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
803 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
804 u32 mode_ctrl = (1 << nv_crtc->index);
805 u32 *push;
806
807 if (nv_encoder->dcb->sorconf.link & 1) {
808 if (adjusted_mode->clock < 165000)
809 mode_ctrl |= 0x00000100;
810 else
811 mode_ctrl |= 0x00000500;
812 } else {
813 mode_ctrl |= 0x00000200;
814 }
815
816 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
817
818 push = evo_wait(encoder->dev, 0, 2);
819 if (push) {
820 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
821 evo_data(push, mode_ctrl);
438d99e3 822 evo_kick(push, encoder->dev, 0);
83fc083c
BS
823 }
824
825 nv_encoder->crtc = encoder->crtc;
826}
827
828static void
829nvd0_sor_disconnect(struct drm_encoder *encoder)
830{
831 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
832 struct drm_device *dev = encoder->dev;
438d99e3 833 u32 *push;
83fc083c
BS
834
835 if (nv_encoder->crtc) {
438d99e3
BS
836 nvd0_crtc_prepare(nv_encoder->crtc);
837
838 push = evo_wait(dev, 0, 4);
83fc083c
BS
839 if (push) {
840 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
841 evo_data(push, 0x00000000);
842 evo_mthd(push, 0x0080, 1);
843 evo_data(push, 0x00000000);
844 evo_kick(push, dev, 0);
845 }
846
847 nv_encoder->crtc = NULL;
848 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
849 }
850}
851
852static void
853nvd0_sor_destroy(struct drm_encoder *encoder)
854{
855 drm_encoder_cleanup(encoder);
856 kfree(encoder);
857}
858
859static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
860 .dpms = nvd0_sor_dpms,
861 .mode_fixup = nvd0_sor_mode_fixup,
862 .prepare = nvd0_sor_prepare,
863 .commit = nvd0_sor_commit,
864 .mode_set = nvd0_sor_mode_set,
865 .disable = nvd0_sor_disconnect,
866 .get_crtc = nvd0_display_crtc_get,
867};
868
869static const struct drm_encoder_funcs nvd0_sor_func = {
870 .destroy = nvd0_sor_destroy,
871};
872
873static int
874nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
875{
876 struct drm_device *dev = connector->dev;
877 struct nouveau_encoder *nv_encoder;
878 struct drm_encoder *encoder;
879
880 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
881 if (!nv_encoder)
882 return -ENOMEM;
883 nv_encoder->dcb = dcbe;
884 nv_encoder->or = ffs(dcbe->or) - 1;
885 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
886
887 encoder = to_drm_encoder(nv_encoder);
888 encoder->possible_crtcs = dcbe->heads;
889 encoder->possible_clones = 0;
890 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
891 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
892
893 drm_mode_connector_attach_encoder(connector, encoder);
894 return 0;
895}
26f6d88b
BS
896
897/******************************************************************************
898 * IRQ
899 *****************************************************************************/
3a89cd02
BS
900static struct dcb_entry *
901lookup_dcb(struct drm_device *dev, int id, u32 mc)
902{
903 struct drm_nouveau_private *dev_priv = dev->dev_private;
904 int type, or, i;
905
906 if (id < 4) {
907 type = OUTPUT_ANALOG;
908 or = id;
909 } else {
910 type = OUTPUT_TMDS;
911 or = id - 4;
912 }
913
914 for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
915 struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
916 if (dcb->type == type && (dcb->or & (1 << or)))
917 return dcb;
918 }
919
920 NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
921 return NULL;
922}
923
270a5747
BS
924static void
925nvd0_display_unk1_handler(struct drm_device *dev)
926{
3a89cd02
BS
927 struct nvd0_display *disp = nvd0_display(dev);
928 struct dcb_entry *dcb;
929 u32 unkn, crtc = 0;
930 int i;
931
a36f04c0
BS
932 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
933 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
270a5747 934
3a89cd02
BS
935 unkn = nv_rd32(dev, 0x6101d4);
936 if (!unkn) {
937 unkn = nv_rd32(dev, 0x6109d4);
938 crtc = 1;
939 }
940
941 disp->irq.ena = NULL;
942 disp->irq.dis = NULL;
943 disp->irq.crtc = crtc;
944 disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
945 disp->irq.pclk /= 1000;
946
947 for (i = 0; i < 8; i++) {
948 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
949 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
950
951 if (mcc & (1 << crtc))
952 disp->irq.dis = lookup_dcb(dev, i, mcc);
953
954 if (mcp & (1 << crtc)) {
955 disp->irq.ena = lookup_dcb(dev, i, mcp);
956 switch (disp->irq.ena->type) {
957 case OUTPUT_ANALOG:
958 disp->irq.script = 0x00ff;
959 break;
960 case OUTPUT_TMDS:
961 disp->irq.script = (mcp & 0x00000f00) >> 8;
962 if (disp->irq.pclk >= 165000)
963 disp->irq.script |= 0x0100;
964 break;
965 default:
966 disp->irq.script = 0xbeef;
967 break;
968 }
969 }
970 }
971
972 dcb = disp->irq.dis;
973 if (dcb)
974 nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
975
270a5747
BS
976 nv_wr32(dev, 0x6101d4, 0x00000000);
977 nv_wr32(dev, 0x6109d4, 0x00000000);
978 nv_wr32(dev, 0x6101d0, 0x80000000);
979}
980
981static void
982nvd0_display_unk2_handler(struct drm_device *dev)
983{
3a89cd02
BS
984 struct nvd0_display *disp = nvd0_display(dev);
985 struct dcb_entry *dcb;
986 int crtc = disp->irq.crtc;
987 int pclk = disp->irq.pclk;
988 int or;
989 u32 tmp;
990
a36f04c0
BS
991 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
992 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
270a5747 993
3a89cd02
BS
994 dcb = disp->irq.dis;
995 disp->irq.dis = NULL;
996 if (dcb)
997 nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
998
999 nv50_crtc_set_clock(dev, crtc, pclk);
1000
1001 dcb = disp->irq.ena;
1002 if (!dcb)
1003 goto ack;
1004 or = ffs(dcb->or) - 1;
1005
1006 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
1007
1008 nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
1009 switch (dcb->type) {
1010 case OUTPUT_ANALOG:
1011 nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
1012 break;
1013 case OUTPUT_TMDS:
1014 if (disp->irq.pclk >= 165000)
1015 tmp = 0x00000101;
1016 else
1017 tmp = 0x00000000;
1018
1019 nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
1020 break;
1021 default:
1022 break;
1023 }
1024
1025ack:
270a5747
BS
1026 nv_wr32(dev, 0x6101d4, 0x00000000);
1027 nv_wr32(dev, 0x6109d4, 0x00000000);
1028 nv_wr32(dev, 0x6101d0, 0x80000000);
1029}
1030
1031static void
1032nvd0_display_unk4_handler(struct drm_device *dev)
1033{
3a89cd02
BS
1034 struct nvd0_display *disp = nvd0_display(dev);
1035 struct dcb_entry *dcb;
1036 int crtc = disp->irq.crtc;
1037 int pclk = disp->irq.pclk;
1038
a36f04c0
BS
1039 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
1040 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
270a5747 1041
3a89cd02
BS
1042 dcb = disp->irq.ena;
1043 disp->irq.ena = NULL;
1044 if (!dcb)
1045 goto ack;
1046
1047 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
1048
1049ack:
270a5747
BS
1050 nv_wr32(dev, 0x6101d4, 0x00000000);
1051 nv_wr32(dev, 0x6109d4, 0x00000000);
1052 nv_wr32(dev, 0x6101d0, 0x80000000);
1053}
1054
4600522a
BS
1055static void
1056nvd0_display_intr(struct drm_device *dev)
1057{
1058 u32 intr = nv_rd32(dev, 0x610088);
1059
1060 if (intr & 0x00000002) {
1061 u32 stat = nv_rd32(dev, 0x61009c);
1062 int chid = ffs(stat) - 1;
1063 if (chid >= 0) {
1064 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
1065 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
1066 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
1067
1068 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1069 "0x%08x 0x%08x\n",
1070 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1071 nv_wr32(dev, 0x61009c, (1 << chid));
1072 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
1073 }
1074
1075 intr &= ~0x00000002;
1076 }
1077
270a5747
BS
1078 if (intr & 0x00100000) {
1079 u32 stat = nv_rd32(dev, 0x6100ac);
1080
1081 if (stat & 0x00000007) {
1082 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
1083
1084 if (stat & 0x00000001)
1085 nvd0_display_unk1_handler(dev);
1086 if (stat & 0x00000002)
1087 nvd0_display_unk2_handler(dev);
1088 if (stat & 0x00000004)
1089 nvd0_display_unk4_handler(dev);
1090 stat &= ~0x00000007;
1091 }
1092
1093 if (stat) {
1094 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
1095 nv_wr32(dev, 0x6100ac, stat);
1096 }
1097
1098 intr &= ~0x00100000;
1099 }
1100
4600522a
BS
1101 if (intr & 0x01000000) {
1102 u32 stat = nv_rd32(dev, 0x6100bc);
1103 nv_wr32(dev, 0x6100bc, stat);
1104 intr &= ~0x01000000;
1105 }
1106
1107 if (intr & 0x02000000) {
1108 u32 stat = nv_rd32(dev, 0x6108bc);
1109 nv_wr32(dev, 0x6108bc, stat);
1110 intr &= ~0x02000000;
1111 }
1112
1113 if (intr)
1114 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
1115}
26f6d88b
BS
1116
1117/******************************************************************************
1118 * Init
1119 *****************************************************************************/
1120static void
1121nvd0_display_fini(struct drm_device *dev)
1122{
1123 int i;
1124
1125 /* fini cursors */
1126 for (i = 14; i >= 13; i--) {
1127 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
1128 continue;
1129
1130 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
1131 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
1132 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
1133 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
1134 }
1135
1136 /* fini master */
1137 if (nv_rd32(dev, 0x610490) & 0x00000010) {
1138 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
1139 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
1140 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
1141 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
1142 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
1143 }
1144}
1145
1146int
1147nvd0_display_init(struct drm_device *dev)
1148{
1149 struct nvd0_display *disp = nvd0_display(dev);
efd272a7 1150 u32 *push;
26f6d88b
BS
1151 int i;
1152
a36f04c0
BS
1153 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
1154 nv_wr32(dev, 0x6100ac, 0x00000100);
1155 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
1156 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
1157 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
1158 nv_rd32(dev, 0x6194e8));
1159 return -EBUSY;
1160 }
1161 }
1162
1163 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1164 * work at all unless you do the SOR part below.
1165 */
1d6e7a59
BS
1166 for (i = 0; i < 3; i++) {
1167 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1168 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1169 }
1170
1d6e7a59
BS
1171 for (i = 0; i < 4; i++) {
1172 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1173 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1174 }
1175
a36f04c0
BS
1176 for (i = 0; i < 2; i++) {
1177 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1178 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1179 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1180 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1181 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1182 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
26f6d88b
BS
1183 }
1184
a36f04c0 1185 /* point at our hash table / objects, enable interrupts */
26f6d88b 1186 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
270a5747 1187 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
26f6d88b
BS
1188
1189 /* init master */
51beb428 1190 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
26f6d88b 1191 nv_wr32(dev, 0x610498, 0x00010000);
efd272a7 1192 nv_wr32(dev, 0x61049c, 0x00000001);
26f6d88b
BS
1193 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1194 nv_wr32(dev, 0x640000, 0x00000000);
1195 nv_wr32(dev, 0x610490, 0x01000013);
1196 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1197 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1198 nv_rd32(dev, 0x610490));
1199 return -EBUSY;
1200 }
1201 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1202 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1203
1204 /* init cursors */
1205 for (i = 13; i <= 14; i++) {
1206 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1207 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1208 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1209 nv_rd32(dev, 0x610490 + (i * 0x10)));
1210 return -EBUSY;
1211 }
1212
1213 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1214 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1215 }
1216
efd272a7
BS
1217 push = evo_wait(dev, 0, 32);
1218 if (!push)
1219 return -EBUSY;
1220 evo_mthd(push, 0x0088, 1);
1221 evo_data(push, MEM_SYNC);
1222 evo_mthd(push, 0x0084, 1);
1223 evo_data(push, 0x00000000);
1224 evo_mthd(push, 0x0084, 1);
1225 evo_data(push, 0x80000000);
1226 evo_mthd(push, 0x008c, 1);
1227 evo_data(push, 0x00000000);
1228 evo_kick(push, dev, 0);
1229
26f6d88b
BS
1230 return 0;
1231}
1232
1233void
1234nvd0_display_destroy(struct drm_device *dev)
1235{
1236 struct drm_nouveau_private *dev_priv = dev->dev_private;
1237 struct nvd0_display *disp = nvd0_display(dev);
51beb428 1238 struct pci_dev *pdev = dev->pdev;
26f6d88b
BS
1239
1240 nvd0_display_fini(dev);
1241
51beb428 1242 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
26f6d88b 1243 nouveau_gpuobj_ref(NULL, &disp->mem);
4600522a 1244 nouveau_irq_unregister(dev, 26);
51beb428
BS
1245
1246 dev_priv->engine.display.priv = NULL;
26f6d88b
BS
1247 kfree(disp);
1248}
1249
1250int
1251nvd0_display_create(struct drm_device *dev)
1252{
1253 struct drm_nouveau_private *dev_priv = dev->dev_private;
efd272a7 1254 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
83fc083c
BS
1255 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1256 struct drm_connector *connector, *tmp;
51beb428 1257 struct pci_dev *pdev = dev->pdev;
26f6d88b 1258 struct nvd0_display *disp;
83fc083c
BS
1259 struct dcb_entry *dcbe;
1260 int ret, i;
26f6d88b
BS
1261
1262 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1263 if (!disp)
1264 return -ENOMEM;
1265 dev_priv->engine.display.priv = disp;
1266
438d99e3
BS
1267 /* create crtc objects to represent the hw heads */
1268 for (i = 0; i < 2; i++) {
1269 ret = nvd0_crtc_create(dev, i);
1270 if (ret)
1271 goto out;
1272 }
1273
83fc083c
BS
1274 /* create encoder/connector objects based on VBIOS DCB table */
1275 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1276 connector = nouveau_connector_create(dev, dcbe->connector);
1277 if (IS_ERR(connector))
1278 continue;
1279
1280 if (dcbe->location != DCB_LOC_ON_CHIP) {
1281 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1282 dcbe->type, ffs(dcbe->or) - 1);
1283 continue;
1284 }
1285
1286 switch (dcbe->type) {
1287 case OUTPUT_TMDS:
1288 nvd0_sor_create(connector, dcbe);
1289 break;
8eaa9669
BS
1290 case OUTPUT_ANALOG:
1291 nvd0_dac_create(connector, dcbe);
1292 break;
83fc083c
BS
1293 default:
1294 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1295 dcbe->type, ffs(dcbe->or) - 1);
1296 continue;
1297 }
1298 }
1299
1300 /* cull any connectors we created that don't have an encoder */
1301 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1302 if (connector->encoder_ids[0])
1303 continue;
1304
1305 NV_WARN(dev, "%s has no encoders, removing\n",
1306 drm_get_connector_name(connector));
1307 connector->funcs->destroy(connector);
1308 }
1309
4600522a
BS
1310 /* setup interrupt handling */
1311 nouveau_irq_register(dev, 26, nvd0_display_intr);
1312
51beb428 1313 /* hash table and dma objects for the memory areas we care about */
efd272a7
BS
1314 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1315 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
26f6d88b
BS
1316 if (ret)
1317 goto out;
1318
efd272a7
BS
1319 nv_wo32(disp->mem, 0x1000, 0x00000049);
1320 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1321 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1322 nv_wo32(disp->mem, 0x100c, 0x00000000);
1323 nv_wo32(disp->mem, 0x1010, 0x00000000);
1324 nv_wo32(disp->mem, 0x1014, 0x00000000);
1325 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1326 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1327
c0cc92a1 1328 nv_wo32(disp->mem, 0x1020, 0x00000049);
efd272a7
BS
1329 nv_wo32(disp->mem, 0x1024, 0x00000000);
1330 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1331 nv_wo32(disp->mem, 0x102c, 0x00000000);
1332 nv_wo32(disp->mem, 0x1030, 0x00000000);
1333 nv_wo32(disp->mem, 0x1034, 0x00000000);
1334 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1335 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1336
c0cc92a1
BS
1337 nv_wo32(disp->mem, 0x1040, 0x00000009);
1338 nv_wo32(disp->mem, 0x1044, 0x00000000);
1339 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1340 nv_wo32(disp->mem, 0x104c, 0x00000000);
1341 nv_wo32(disp->mem, 0x1050, 0x00000000);
1342 nv_wo32(disp->mem, 0x1054, 0x00000000);
1343 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1344 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1345
1346 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1347 nv_wo32(disp->mem, 0x1064, 0x00000000);
1348 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1349 nv_wo32(disp->mem, 0x106c, 0x00000000);
1350 nv_wo32(disp->mem, 0x1070, 0x00000000);
1351 nv_wo32(disp->mem, 0x1074, 0x00000000);
1352 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1353 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1354
efd272a7
BS
1355 pinstmem->flush(dev);
1356
51beb428
BS
1357 /* push buffers for evo channels */
1358 disp->evo[0].ptr =
1359 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1360 if (!disp->evo[0].ptr) {
1361 ret = -ENOMEM;
1362 goto out;
1363 }
1364
26f6d88b
BS
1365 ret = nvd0_display_init(dev);
1366 if (ret)
1367 goto out;
1368
1369out:
1370 if (ret)
1371 nvd0_display_destroy(dev);
1372 return ret;
1373}