2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
28 #include <drm/drm_crtc_helper.h>
30 #include "nouveau_drm.h"
31 #include "nouveau_dma.h"
32 #include "nouveau_gem.h"
33 #include "nouveau_connector.h"
34 #include "nouveau_encoder.h"
35 #include "nouveau_crtc.h"
36 #include "nouveau_fence.h"
37 #include "nv50_display.h"
39 #include <core/gpuobj.h>
41 #include <subdev/timer.h>
42 #include <subdev/bar.h>
43 #include <subdev/fb.h>
47 #define EVO_MASTER (0x00)
48 #define EVO_FLIP(c) (0x01 + (c))
49 #define EVO_OVLY(c) (0x05 + (c))
50 #define EVO_OIMM(c) (0x09 + (c))
51 #define EVO_CURS(c) (0x0d + (c))
53 /* offsets in shared sync bo of various structures */
54 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
55 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
56 #define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
57 #define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
70 struct nouveau_gpuobj *mem;
71 struct nouveau_bo *sync;
74 struct tasklet_struct tasklet;
78 static struct nvd0_display *
79 nvd0_display(struct drm_device *dev)
81 return nouveau_display(dev)->priv;
84 static struct drm_crtc *
85 nvd0_display_crtc_get(struct drm_encoder *encoder)
87 return nouveau_encoder(encoder)->crtc;
90 /******************************************************************************
92 *****************************************************************************/
94 evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
96 struct nouveau_device *device = nouveau_dev(dev);
98 nv_mask(device, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
99 nv_wr32(device, 0x610704 + (id * 0x10), data);
100 nv_mask(device, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
101 if (!nv_wait(device, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
103 nv_mask(device, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
108 evo_wait(struct drm_device *dev, int id, int nr)
110 struct nouveau_device *device = nouveau_dev(dev);
111 struct nouveau_drm *drm = nouveau_drm(dev);
112 struct nvd0_display *disp = nvd0_display(dev);
113 u32 put = nv_rd32(device, 0x640000 + (id * 0x1000)) / 4;
115 if (put + nr >= (PAGE_SIZE / 4)) {
116 disp->evo[id].ptr[put] = 0x20000000;
118 nv_wr32(device, 0x640000 + (id * 0x1000), 0x00000000);
119 if (!nv_wait(device, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
120 NV_ERROR(drm, "evo %d dma stalled\n", id);
127 return disp->evo[id].ptr + put;
131 evo_kick(u32 *push, struct drm_device *dev, int id)
133 struct nouveau_device *device = nouveau_dev(dev);
134 struct nvd0_display *disp = nvd0_display(dev);
136 nv_wr32(device, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
139 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
140 #define evo_data(p,d) *((p)++) = (d)
143 evo_init_dma(struct drm_device *dev, int ch)
145 struct nouveau_device *device = nouveau_dev(dev);
146 struct nouveau_drm *drm = nouveau_drm(dev);
147 struct nvd0_display *disp = nvd0_display(dev);
151 if (ch == EVO_MASTER)
154 nv_wr32(device, 0x610494 + (ch * 0x0010), (disp->evo[ch].handle >> 8) | 3);
155 nv_wr32(device, 0x610498 + (ch * 0x0010), 0x00010000);
156 nv_wr32(device, 0x61049c + (ch * 0x0010), 0x00000001);
157 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
158 nv_wr32(device, 0x640000 + (ch * 0x1000), 0x00000000);
159 nv_wr32(device, 0x610490 + (ch * 0x0010), 0x00000013 | flags);
160 if (!nv_wait(device, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000)) {
161 NV_ERROR(drm, "PDISP: ch%d 0x%08x\n", ch,
162 nv_rd32(device, 0x610490 + (ch * 0x0010)));
166 nv_mask(device, 0x610090, (1 << ch), (1 << ch));
167 nv_mask(device, 0x6100a0, (1 << ch), (1 << ch));
172 evo_fini_dma(struct drm_device *dev, int ch)
174 struct nouveau_device *device = nouveau_dev(dev);
176 if (!(nv_rd32(device, 0x610490 + (ch * 0x0010)) & 0x00000010))
179 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000000);
180 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000003, 0x00000000);
181 nv_wait(device, 0x610490 + (ch * 0x0010), 0x80000000, 0x00000000);
182 nv_mask(device, 0x610090, (1 << ch), 0x00000000);
183 nv_mask(device, 0x6100a0, (1 << ch), 0x00000000);
187 evo_piow(struct drm_device *dev, int ch, u16 mthd, u32 data)
189 struct nouveau_device *device = nouveau_dev(dev);
190 nv_wr32(device, 0x640000 + (ch * 0x1000) + mthd, data);
194 evo_init_pio(struct drm_device *dev, int ch)
196 struct nouveau_device *device = nouveau_dev(dev);
197 struct nouveau_drm *drm = nouveau_drm(dev);
199 nv_wr32(device, 0x610490 + (ch * 0x0010), 0x00000001);
200 if (!nv_wait(device, 0x610490 + (ch * 0x0010), 0x00010000, 0x00010000)) {
201 NV_ERROR(drm, "PDISP: ch%d 0x%08x\n", ch,
202 nv_rd32(device, 0x610490 + (ch * 0x0010)));
206 nv_mask(device, 0x610090, (1 << ch), (1 << ch));
207 nv_mask(device, 0x6100a0, (1 << ch), (1 << ch));
212 evo_fini_pio(struct drm_device *dev, int ch)
214 struct nouveau_device *device = nouveau_dev(dev);
216 if (!(nv_rd32(device, 0x610490 + (ch * 0x0010)) & 0x00000001))
219 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000010, 0x00000010);
220 nv_mask(device, 0x610490 + (ch * 0x0010), 0x00000001, 0x00000000);
221 nv_wait(device, 0x610490 + (ch * 0x0010), 0x00010000, 0x00000000);
222 nv_mask(device, 0x610090, (1 << ch), 0x00000000);
223 nv_mask(device, 0x6100a0, (1 << ch), 0x00000000);
227 evo_sync_wait(void *data)
229 return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
233 evo_sync(struct drm_device *dev, int ch)
235 struct nouveau_device *device = nouveau_dev(dev);
236 struct nvd0_display *disp = nvd0_display(dev);
237 u32 *push = evo_wait(dev, ch, 8);
239 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
240 evo_mthd(push, 0x0084, 1);
241 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
242 evo_mthd(push, 0x0080, 2);
243 evo_data(push, 0x00000000);
244 evo_data(push, 0x00000000);
245 evo_kick(push, dev, ch);
246 if (nv_wait_cb(device, evo_sync_wait, disp->sync))
253 /******************************************************************************
254 * Page flipping channel
255 *****************************************************************************/
257 nvd0_display_crtc_sema(struct drm_device *dev, int crtc)
259 return nvd0_display(dev)->sync;
263 nvd0_display_flip_stop(struct drm_crtc *crtc)
265 struct nvd0_display *disp = nvd0_display(crtc->dev);
266 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
267 struct evo *evo = &disp->evo[EVO_FLIP(nv_crtc->index)];
270 push = evo_wait(crtc->dev, evo->idx, 8);
272 evo_mthd(push, 0x0084, 1);
273 evo_data(push, 0x00000000);
274 evo_mthd(push, 0x0094, 1);
275 evo_data(push, 0x00000000);
276 evo_mthd(push, 0x00c0, 1);
277 evo_data(push, 0x00000000);
278 evo_mthd(push, 0x0080, 1);
279 evo_data(push, 0x00000000);
280 evo_kick(push, crtc->dev, evo->idx);
285 nvd0_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
286 struct nouveau_channel *chan, u32 swap_interval)
288 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
289 struct nvd0_display *disp = nvd0_display(crtc->dev);
290 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
291 struct evo *evo = &disp->evo[EVO_FLIP(nv_crtc->index)];
297 if (swap_interval == 0)
298 swap_interval |= 0x100;
300 push = evo_wait(crtc->dev, evo->idx, 128);
301 if (unlikely(push == NULL))
304 /* synchronise with the rendering channel, if necessary */
306 ret = RING_SPACE(chan, 10);
311 offset = nvc0_fence_crtc(chan, nv_crtc->index);
312 offset += evo->sem.offset;
314 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
315 OUT_RING (chan, upper_32_bits(offset));
316 OUT_RING (chan, lower_32_bits(offset));
317 OUT_RING (chan, 0xf00d0000 | evo->sem.value);
318 OUT_RING (chan, 0x1002);
319 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
320 OUT_RING (chan, upper_32_bits(offset));
321 OUT_RING (chan, lower_32_bits(offset ^ 0x10));
322 OUT_RING (chan, 0x74b1e000);
323 OUT_RING (chan, 0x1001);
326 nouveau_bo_wr32(disp->sync, evo->sem.offset / 4,
327 0xf00d0000 | evo->sem.value);
328 evo_sync(crtc->dev, EVO_MASTER);
332 evo_mthd(push, 0x0100, 1);
333 evo_data(push, 0xfffe0000);
334 evo_mthd(push, 0x0084, 1);
335 evo_data(push, swap_interval);
336 if (!(swap_interval & 0x00000100)) {
337 evo_mthd(push, 0x00e0, 1);
338 evo_data(push, 0x40000000);
340 evo_mthd(push, 0x0088, 4);
341 evo_data(push, evo->sem.offset);
342 evo_data(push, 0xf00d0000 | evo->sem.value);
343 evo_data(push, 0x74b1e000);
344 evo_data(push, NvEvoSync);
345 evo_mthd(push, 0x00a0, 2);
346 evo_data(push, 0x00000000);
347 evo_data(push, 0x00000000);
348 evo_mthd(push, 0x00c0, 1);
349 evo_data(push, nv_fb->r_dma);
350 evo_mthd(push, 0x0110, 2);
351 evo_data(push, 0x00000000);
352 evo_data(push, 0x00000000);
353 evo_mthd(push, 0x0400, 5);
354 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
356 evo_data(push, (fb->height << 16) | fb->width);
357 evo_data(push, nv_fb->r_pitch);
358 evo_data(push, nv_fb->r_format);
359 evo_mthd(push, 0x0080, 1);
360 evo_data(push, 0x00000000);
361 evo_kick(push, crtc->dev, evo->idx);
363 evo->sem.offset ^= 0x10;
368 /******************************************************************************
370 *****************************************************************************/
372 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
374 struct nouveau_drm *drm = nouveau_drm(nv_crtc->base.dev);
375 struct drm_device *dev = nv_crtc->base.dev;
376 struct nouveau_connector *nv_connector;
377 struct drm_connector *connector;
378 u32 *push, mode = 0x00;
381 nv_connector = nouveau_crtc_connector_get(nv_crtc);
382 connector = &nv_connector->base;
383 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
384 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
385 mode = DITHERING_MODE_DYNAMIC2X2;
387 mode = nv_connector->dithering_mode;
390 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
391 if (connector->display_info.bpc >= 8)
392 mode |= DITHERING_DEPTH_8BPC;
394 mode |= nv_connector->dithering_depth;
397 if (nv_device(drm->device)->card_type < NV_E0)
398 mthd = 0x0490 + (nv_crtc->index * 0x0300);
400 mthd = 0x04a0 + (nv_crtc->index * 0x0300);
402 push = evo_wait(dev, EVO_MASTER, 4);
404 evo_mthd(push, mthd, 1);
405 evo_data(push, mode);
407 evo_mthd(push, 0x0080, 1);
408 evo_data(push, 0x00000000);
410 evo_kick(push, dev, EVO_MASTER);
417 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
419 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
420 struct drm_device *dev = nv_crtc->base.dev;
421 struct drm_crtc *crtc = &nv_crtc->base;
422 struct nouveau_connector *nv_connector;
423 int mode = DRM_MODE_SCALE_NONE;
426 /* start off at the resolution we programmed the crtc for, this
427 * effectively handles NONE/FULL scaling
429 nv_connector = nouveau_crtc_connector_get(nv_crtc);
430 if (nv_connector && nv_connector->native_mode)
431 mode = nv_connector->scaling_mode;
433 if (mode != DRM_MODE_SCALE_NONE)
434 omode = nv_connector->native_mode;
438 oX = omode->hdisplay;
439 oY = omode->vdisplay;
440 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
443 /* add overscan compensation if necessary, will keep the aspect
444 * ratio the same as the backend mode unless overridden by the
445 * user setting both hborder and vborder properties.
447 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
448 (nv_connector->underscan == UNDERSCAN_AUTO &&
449 nv_connector->edid &&
450 drm_detect_hdmi_monitor(nv_connector->edid)))) {
451 u32 bX = nv_connector->underscan_hborder;
452 u32 bY = nv_connector->underscan_vborder;
453 u32 aspect = (oY << 19) / oX;
457 if (bY) oY -= (bY * 2);
458 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
460 oX -= (oX >> 4) + 32;
461 if (bY) oY -= (bY * 2);
462 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
466 /* handle CENTER/ASPECT scaling, taking into account the areas
467 * removed already for overscan compensation
470 case DRM_MODE_SCALE_CENTER:
471 oX = min((u32)umode->hdisplay, oX);
472 oY = min((u32)umode->vdisplay, oY);
474 case DRM_MODE_SCALE_ASPECT:
476 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
477 oX = ((oY * aspect) + (aspect / 2)) >> 19;
479 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
480 oY = ((oX * aspect) + (aspect / 2)) >> 19;
487 push = evo_wait(dev, EVO_MASTER, 8);
489 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
490 evo_data(push, (oY << 16) | oX);
491 evo_data(push, (oY << 16) | oX);
492 evo_data(push, (oY << 16) | oX);
493 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
494 evo_data(push, 0x00000000);
495 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
496 evo_data(push, (umode->vdisplay << 16) | umode->hdisplay);
497 evo_kick(push, dev, EVO_MASTER);
499 nvd0_display_flip_stop(crtc);
500 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
508 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
509 int x, int y, bool update)
511 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
514 push = evo_wait(fb->dev, EVO_MASTER, 16);
516 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
517 evo_data(push, nvfb->nvbo->bo.offset >> 8);
518 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
519 evo_data(push, (fb->height << 16) | fb->width);
520 evo_data(push, nvfb->r_pitch);
521 evo_data(push, nvfb->r_format);
522 evo_data(push, nvfb->r_dma);
523 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
524 evo_data(push, (y << 16) | x);
526 evo_mthd(push, 0x0080, 1);
527 evo_data(push, 0x00000000);
529 evo_kick(push, fb->dev, EVO_MASTER);
532 nv_crtc->fb.tile_flags = nvfb->r_dma;
537 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
539 struct drm_device *dev = nv_crtc->base.dev;
540 u32 *push = evo_wait(dev, EVO_MASTER, 16);
543 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
544 evo_data(push, 0x85000000);
545 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
546 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
547 evo_data(push, NvEvoVRAM);
549 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
550 evo_data(push, 0x05000000);
551 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
552 evo_data(push, 0x00000000);
556 evo_mthd(push, 0x0080, 1);
557 evo_data(push, 0x00000000);
560 evo_kick(push, dev, EVO_MASTER);
565 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
570 nvd0_crtc_prepare(struct drm_crtc *crtc)
572 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
575 nvd0_display_flip_stop(crtc);
577 push = evo_wait(crtc->dev, EVO_MASTER, 2);
579 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
580 evo_data(push, 0x00000000);
581 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
582 evo_data(push, 0x03000000);
583 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
584 evo_data(push, 0x00000000);
585 evo_kick(push, crtc->dev, EVO_MASTER);
588 nvd0_crtc_cursor_show(nv_crtc, false, false);
592 nvd0_crtc_commit(struct drm_crtc *crtc)
594 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
597 push = evo_wait(crtc->dev, EVO_MASTER, 32);
599 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
600 evo_data(push, nv_crtc->fb.tile_flags);
601 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
602 evo_data(push, 0x83000000);
603 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
604 evo_data(push, 0x00000000);
605 evo_data(push, 0x00000000);
606 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
607 evo_data(push, NvEvoVRAM);
608 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
609 evo_data(push, 0xffffff00);
610 evo_kick(push, crtc->dev, EVO_MASTER);
613 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
614 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
618 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
619 struct drm_display_mode *adjusted_mode)
625 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
627 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
630 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
635 nvfb = nouveau_framebuffer(old_fb);
636 nouveau_bo_unpin(nvfb->nvbo);
643 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
644 struct drm_display_mode *mode, int x, int y,
645 struct drm_framebuffer *old_fb)
647 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
648 struct nouveau_connector *nv_connector;
649 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
650 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
651 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
652 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
653 u32 vblan2e = 0, vblan2s = 1;
657 hactive = mode->htotal;
658 hsynce = mode->hsync_end - mode->hsync_start - 1;
659 hbackp = mode->htotal - mode->hsync_end;
660 hblanke = hsynce + hbackp;
661 hfrontp = mode->hsync_start - mode->hdisplay;
662 hblanks = mode->htotal - hfrontp - 1;
664 vactive = mode->vtotal * vscan / ilace;
665 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
666 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
667 vblanke = vsynce + vbackp;
668 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
669 vblanks = vactive - vfrontp - 1;
670 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
671 vblan2e = vactive + vsynce + vbackp;
672 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
673 vactive = (vactive * 2) + 1;
676 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
680 push = evo_wait(crtc->dev, EVO_MASTER, 64);
682 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
683 evo_data(push, 0x00000000);
684 evo_data(push, (vactive << 16) | hactive);
685 evo_data(push, ( vsynce << 16) | hsynce);
686 evo_data(push, (vblanke << 16) | hblanke);
687 evo_data(push, (vblanks << 16) | hblanks);
688 evo_data(push, (vblan2e << 16) | vblan2s);
689 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
690 evo_data(push, 0x00000000); /* ??? */
691 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
692 evo_data(push, mode->clock * 1000);
693 evo_data(push, 0x00200000); /* ??? */
694 evo_data(push, mode->clock * 1000);
695 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
696 evo_data(push, 0x00000311);
697 evo_data(push, 0x00000100);
698 evo_kick(push, crtc->dev, EVO_MASTER);
701 nv_connector = nouveau_crtc_connector_get(nv_crtc);
702 nvd0_crtc_set_dither(nv_crtc, false);
703 nvd0_crtc_set_scale(nv_crtc, false);
704 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
709 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
710 struct drm_framebuffer *old_fb)
712 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
713 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
717 NV_DEBUG(drm, "No FB bound\n");
721 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
725 nvd0_display_flip_stop(crtc);
726 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
727 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
732 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
733 struct drm_framebuffer *fb, int x, int y,
734 enum mode_set_atomic state)
736 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
737 nvd0_display_flip_stop(crtc);
738 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
743 nvd0_crtc_lut_load(struct drm_crtc *crtc)
745 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
746 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
749 for (i = 0; i < 256; i++) {
750 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
751 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
752 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
757 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
758 uint32_t handle, uint32_t width, uint32_t height)
760 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
761 struct drm_device *dev = crtc->dev;
762 struct drm_gem_object *gem;
763 struct nouveau_bo *nvbo;
764 bool visible = (handle != 0);
768 if (width != 64 || height != 64)
771 gem = drm_gem_object_lookup(dev, file_priv, handle);
774 nvbo = nouveau_gem_object(gem);
776 ret = nouveau_bo_map(nvbo);
778 for (i = 0; i < 64 * 64; i++) {
779 u32 v = nouveau_bo_rd32(nvbo, i);
780 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
782 nouveau_bo_unmap(nvbo);
785 drm_gem_object_unreference_unlocked(gem);
788 if (visible != nv_crtc->cursor.visible) {
789 nvd0_crtc_cursor_show(nv_crtc, visible, true);
790 nv_crtc->cursor.visible = visible;
797 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
799 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
800 int ch = EVO_CURS(nv_crtc->index);
802 evo_piow(crtc->dev, ch, 0x0084, (y << 16) | (x & 0xffff));
803 evo_piow(crtc->dev, ch, 0x0080, 0x00000000);
808 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
809 uint32_t start, uint32_t size)
811 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
812 u32 end = max(start + size, (u32)256);
815 for (i = start; i < end; i++) {
816 nv_crtc->lut.r[i] = r[i];
817 nv_crtc->lut.g[i] = g[i];
818 nv_crtc->lut.b[i] = b[i];
821 nvd0_crtc_lut_load(crtc);
825 nvd0_crtc_destroy(struct drm_crtc *crtc)
827 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
828 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
829 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
830 nouveau_bo_unmap(nv_crtc->lut.nvbo);
831 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
832 drm_crtc_cleanup(crtc);
836 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
837 .dpms = nvd0_crtc_dpms,
838 .prepare = nvd0_crtc_prepare,
839 .commit = nvd0_crtc_commit,
840 .mode_fixup = nvd0_crtc_mode_fixup,
841 .mode_set = nvd0_crtc_mode_set,
842 .mode_set_base = nvd0_crtc_mode_set_base,
843 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
844 .load_lut = nvd0_crtc_lut_load,
847 static const struct drm_crtc_funcs nvd0_crtc_func = {
848 .cursor_set = nvd0_crtc_cursor_set,
849 .cursor_move = nvd0_crtc_cursor_move,
850 .gamma_set = nvd0_crtc_gamma_set,
851 .set_config = drm_crtc_helper_set_config,
852 .destroy = nvd0_crtc_destroy,
853 .page_flip = nouveau_crtc_page_flip,
857 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
862 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
867 nvd0_crtc_create(struct drm_device *dev, int index)
869 struct nouveau_crtc *nv_crtc;
870 struct drm_crtc *crtc;
873 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
877 nv_crtc->index = index;
878 nv_crtc->set_dither = nvd0_crtc_set_dither;
879 nv_crtc->set_scale = nvd0_crtc_set_scale;
880 nv_crtc->cursor.set_offset = nvd0_cursor_set_offset;
881 nv_crtc->cursor.set_pos = nvd0_cursor_set_pos;
882 for (i = 0; i < 256; i++) {
883 nv_crtc->lut.r[i] = i << 8;
884 nv_crtc->lut.g[i] = i << 8;
885 nv_crtc->lut.b[i] = i << 8;
888 crtc = &nv_crtc->base;
889 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
890 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
891 drm_mode_crtc_set_gamma_size(crtc, 256);
893 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
894 0, 0x0000, NULL, &nv_crtc->cursor.nvbo);
896 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
898 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
900 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
906 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
907 0, 0x0000, NULL, &nv_crtc->lut.nvbo);
909 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
911 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
913 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
919 nvd0_crtc_lut_load(crtc);
923 nvd0_crtc_destroy(crtc);
927 /******************************************************************************
929 *****************************************************************************/
931 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
933 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
934 struct drm_device *dev = encoder->dev;
935 struct nouveau_device *device = nouveau_dev(dev);
936 int or = nv_encoder->or;
939 dpms_ctrl = 0x80000000;
940 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
941 dpms_ctrl |= 0x00000001;
942 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
943 dpms_ctrl |= 0x00000004;
945 nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
946 nv_mask(device, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
947 nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
951 nvd0_dac_mode_fixup(struct drm_encoder *encoder,
952 const struct drm_display_mode *mode,
953 struct drm_display_mode *adjusted_mode)
955 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
956 struct nouveau_connector *nv_connector;
958 nv_connector = nouveau_encoder_connector_get(nv_encoder);
959 if (nv_connector && nv_connector->native_mode) {
960 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
961 int id = adjusted_mode->base.id;
962 *adjusted_mode = *nv_connector->native_mode;
963 adjusted_mode->base.id = id;
971 nvd0_dac_commit(struct drm_encoder *encoder)
976 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
977 struct drm_display_mode *adjusted_mode)
979 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
980 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
981 u32 syncs, magic, *push;
984 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
986 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
989 magic = 0x31ec6000 | (nv_crtc->index << 25);
990 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
993 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
995 push = evo_wait(encoder->dev, EVO_MASTER, 8);
997 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
998 evo_data(push, syncs);
999 evo_data(push, magic);
1000 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 2);
1001 evo_data(push, 1 << nv_crtc->index);
1002 evo_data(push, 0x00ff);
1003 evo_kick(push, encoder->dev, EVO_MASTER);
1006 nv_encoder->crtc = encoder->crtc;
1010 nvd0_dac_disconnect(struct drm_encoder *encoder)
1012 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1013 struct drm_device *dev = encoder->dev;
1016 if (nv_encoder->crtc) {
1017 nvd0_crtc_prepare(nv_encoder->crtc);
1019 push = evo_wait(dev, EVO_MASTER, 4);
1021 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
1022 evo_data(push, 0x00000000);
1023 evo_mthd(push, 0x0080, 1);
1024 evo_data(push, 0x00000000);
1025 evo_kick(push, dev, EVO_MASTER);
1028 nv_encoder->crtc = NULL;
1032 static enum drm_connector_status
1033 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1035 enum drm_connector_status status = connector_status_disconnected;
1036 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1037 struct drm_device *dev = encoder->dev;
1038 struct nouveau_device *device = nouveau_dev(dev);
1039 int or = nv_encoder->or;
1042 nv_wr32(device, 0x61a00c + (or * 0x800), 0x00100000);
1044 nv_wr32(device, 0x61a00c + (or * 0x800), 0x80000000);
1046 load = nv_rd32(device, 0x61a00c + (or * 0x800));
1047 if ((load & 0x38000000) == 0x38000000)
1048 status = connector_status_connected;
1050 nv_wr32(device, 0x61a00c + (or * 0x800), 0x00000000);
1055 nvd0_dac_destroy(struct drm_encoder *encoder)
1057 drm_encoder_cleanup(encoder);
1061 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1062 .dpms = nvd0_dac_dpms,
1063 .mode_fixup = nvd0_dac_mode_fixup,
1064 .prepare = nvd0_dac_disconnect,
1065 .commit = nvd0_dac_commit,
1066 .mode_set = nvd0_dac_mode_set,
1067 .disable = nvd0_dac_disconnect,
1068 .get_crtc = nvd0_display_crtc_get,
1069 .detect = nvd0_dac_detect
1072 static const struct drm_encoder_funcs nvd0_dac_func = {
1073 .destroy = nvd0_dac_destroy,
1077 nvd0_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1079 struct drm_device *dev = connector->dev;
1080 struct nouveau_encoder *nv_encoder;
1081 struct drm_encoder *encoder;
1083 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1086 nv_encoder->dcb = dcbe;
1087 nv_encoder->or = ffs(dcbe->or) - 1;
1089 encoder = to_drm_encoder(nv_encoder);
1090 encoder->possible_crtcs = dcbe->heads;
1091 encoder->possible_clones = 0;
1092 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1093 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1095 drm_mode_connector_attach_encoder(connector, encoder);
1099 /******************************************************************************
1101 *****************************************************************************/
1103 nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1105 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1106 struct nouveau_connector *nv_connector;
1107 struct drm_device *dev = encoder->dev;
1108 struct nouveau_device *device = nouveau_dev(dev);
1109 int i, or = nv_encoder->or * 0x30;
1111 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1112 if (!drm_detect_monitor_audio(nv_connector->edid))
1115 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000001);
1117 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1118 if (nv_connector->base.eld[0]) {
1119 u8 *eld = nv_connector->base.eld;
1121 for (i = 0; i < eld[2] * 4; i++)
1122 nv_wr32(device, 0x10ec00 + or, (i << 8) | eld[i]);
1123 for (i = eld[2] * 4; i < 0x60; i++)
1124 nv_wr32(device, 0x10ec00 + or, (i << 8) | 0x00);
1126 nv_mask(device, 0x10ec10 + or, 0x80000002, 0x80000002);
1131 nvd0_audio_disconnect(struct drm_encoder *encoder)
1133 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1134 struct drm_device *dev = encoder->dev;
1135 struct nouveau_device *device = nouveau_dev(dev);
1136 int or = nv_encoder->or * 0x30;
1138 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000000);
1141 /******************************************************************************
1143 *****************************************************************************/
1145 nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1147 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1148 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1149 struct nouveau_connector *nv_connector;
1150 struct drm_device *dev = encoder->dev;
1151 struct nouveau_device *device = nouveau_dev(dev);
1152 int head = nv_crtc->index * 0x800;
1153 u32 rekey = 56; /* binary driver, and tegra constant */
1156 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1157 if (!drm_detect_hdmi_monitor(nv_connector->edid))
1160 max_ac_packet = mode->htotal - mode->hdisplay;
1161 max_ac_packet -= rekey;
1162 max_ac_packet -= 18; /* constant from tegra */
1163 max_ac_packet /= 32;
1166 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1167 nv_wr32(device, 0x61671c + head, 0x000d0282);
1168 nv_wr32(device, 0x616720 + head, 0x0000006f);
1169 nv_wr32(device, 0x616724 + head, 0x00000000);
1170 nv_wr32(device, 0x616728 + head, 0x00000000);
1171 nv_wr32(device, 0x61672c + head, 0x00000000);
1172 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000001);
1174 /* ??? InfoFrame? */
1175 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1176 nv_wr32(device, 0x6167ac + head, 0x00000010);
1177 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000001);
1180 nv_mask(device, 0x616798 + head, 0x401f007f, 0x40000000 | rekey |
1181 max_ac_packet << 16);
1183 /* NFI, audio doesn't work without it though.. */
1184 nv_mask(device, 0x616548 + head, 0x00000070, 0x00000000);
1186 nvd0_audio_mode_set(encoder, mode);
1190 nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1192 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1193 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1194 struct drm_device *dev = encoder->dev;
1195 struct nouveau_device *device = nouveau_dev(dev);
1196 int head = nv_crtc->index * 0x800;
1198 nvd0_audio_disconnect(encoder);
1200 nv_mask(device, 0x616798 + head, 0x40000000, 0x00000000);
1201 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1202 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1205 /******************************************************************************
1207 *****************************************************************************/
1209 nvd0_sor_dp_lane_map(struct drm_device *dev, struct dcb_output *dcb, u8 lane)
1211 static const u8 nvd0[] = { 16, 8, 0, 24 };
1216 nvd0_sor_dp_train_set(struct drm_device *dev, struct dcb_output *dcb, u8 pattern)
1218 struct nouveau_device *device = nouveau_dev(dev);
1219 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1220 const u32 loff = (or * 0x800) + (link * 0x80);
1221 nv_mask(device, 0x61c110 + loff, 0x0f0f0f0f, 0x01010101 * pattern);
1225 nvd0_sor_dp_train_adj(struct drm_device *dev, struct dcb_output *dcb,
1226 u8 lane, u8 swing, u8 preem)
1228 struct nouveau_device *device = nouveau_dev(dev);
1229 struct nouveau_drm *drm = nouveau_drm(dev);
1230 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1231 const u32 loff = (or * 0x800) + (link * 0x80);
1232 u32 shift = nvd0_sor_dp_lane_map(dev, dcb, lane);
1233 u32 mask = 0x000000ff << shift;
1234 u8 *table, *entry, *config = NULL;
1237 case 0: preem += 0; break;
1238 case 1: preem += 4; break;
1239 case 2: preem += 7; break;
1240 case 3: preem += 9; break;
1243 table = nouveau_dp_bios_data(dev, dcb, &entry);
1245 if (table[0] == 0x30) {
1246 config = entry + table[4];
1247 config += table[5] * preem;
1249 if (table[0] == 0x40) {
1250 config = table + table[1];
1251 config += table[2] * table[3];
1252 config += table[6] * preem;
1257 NV_ERROR(drm, "PDISP: unsupported DP table for chipset\n");
1261 nv_mask(device, 0x61c118 + loff, mask, config[1] << shift);
1262 nv_mask(device, 0x61c120 + loff, mask, config[2] << shift);
1263 nv_mask(device, 0x61c130 + loff, 0x0000ff00, config[3] << 8);
1264 nv_mask(device, 0x61c13c + loff, 0x00000000, 0x00000000);
1268 nvd0_sor_dp_link_set(struct drm_device *dev, struct dcb_output *dcb, int crtc,
1269 int link_nr, u32 link_bw, bool enhframe)
1271 struct nouveau_device *device = nouveau_dev(dev);
1272 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1273 const u32 loff = (or * 0x800) + (link * 0x80);
1274 const u32 soff = (or * 0x800);
1275 u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & ~0x001f4000;
1276 u32 clksor = nv_rd32(device, 0x612300 + soff) & ~0x007c0000;
1277 u32 script = 0x0000, lane_mask = 0;
1283 table = nouveau_dp_bios_data(dev, dcb, &entry);
1285 if (table[0] == 0x30) entry = ROMPTR(dev, entry[10]);
1286 else if (table[0] == 0x40) entry = ROMPTR(dev, entry[9]);
1290 if (entry[0] >= link_bw)
1295 nouveau_bios_run_init_table(dev, script, dcb, crtc);
1298 clksor |= link_bw << 18;
1299 dpctrl |= ((1 << link_nr) - 1) << 16;
1301 dpctrl |= 0x00004000;
1303 for (i = 0; i < link_nr; i++)
1304 lane_mask |= 1 << (nvd0_sor_dp_lane_map(dev, dcb, i) >> 3);
1306 nv_wr32(device, 0x612300 + soff, clksor);
1307 nv_wr32(device, 0x61c10c + loff, dpctrl);
1308 nv_mask(device, 0x61c130 + loff, 0x0000000f, lane_mask);
1312 nvd0_sor_dp_link_get(struct drm_device *dev, struct dcb_output *dcb,
1313 u32 *link_nr, u32 *link_bw)
1315 struct nouveau_device *device = nouveau_dev(dev);
1316 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1317 const u32 loff = (or * 0x800) + (link * 0x80);
1318 const u32 soff = (or * 0x800);
1319 u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & 0x000f0000;
1320 u32 clksor = nv_rd32(device, 0x612300 + soff);
1322 if (dpctrl > 0x00030000) *link_nr = 4;
1323 else if (dpctrl > 0x00010000) *link_nr = 2;
1326 *link_bw = (clksor & 0x007c0000) >> 18;
1331 nvd0_sor_dp_calc_tu(struct drm_device *dev, struct dcb_output *dcb,
1332 u32 crtc, u32 datarate)
1334 struct nouveau_device *device = nouveau_dev(dev);
1335 const u32 symbol = 100000;
1337 u32 link_nr, link_bw;
1340 nvd0_sor_dp_link_get(dev, dcb, &link_nr, &link_bw);
1344 do_div(ratio, link_nr * link_bw);
1346 value = (symbol - ratio) * TU;
1348 do_div(value, symbol);
1349 do_div(value, symbol);
1352 value |= 0x08000000;
1354 nv_wr32(device, 0x616610 + (crtc * 0x800), value);
1358 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1360 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1361 struct drm_device *dev = encoder->dev;
1362 struct nouveau_device *device = nouveau_dev(dev);
1363 struct drm_encoder *partner;
1364 int or = nv_encoder->or;
1367 nv_encoder->last_dpms = mode;
1369 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1370 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1372 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1375 if (nv_partner != nv_encoder &&
1376 nv_partner->dcb->or == nv_encoder->dcb->or) {
1377 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1383 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
1384 dpms_ctrl |= 0x80000000;
1386 nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1387 nv_mask(device, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
1388 nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1389 nv_wait(device, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
1391 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1392 struct dp_train_func func = {
1393 .link_set = nvd0_sor_dp_link_set,
1394 .train_set = nvd0_sor_dp_train_set,
1395 .train_adj = nvd0_sor_dp_train_adj
1398 nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, &func);
1403 nvd0_sor_mode_fixup(struct drm_encoder *encoder,
1404 const struct drm_display_mode *mode,
1405 struct drm_display_mode *adjusted_mode)
1407 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1408 struct nouveau_connector *nv_connector;
1410 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1411 if (nv_connector && nv_connector->native_mode) {
1412 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1413 int id = adjusted_mode->base.id;
1414 *adjusted_mode = *nv_connector->native_mode;
1415 adjusted_mode->base.id = id;
1423 nvd0_sor_disconnect(struct drm_encoder *encoder)
1425 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1426 struct drm_device *dev = encoder->dev;
1429 if (nv_encoder->crtc) {
1430 nvd0_crtc_prepare(nv_encoder->crtc);
1432 push = evo_wait(dev, EVO_MASTER, 4);
1434 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1435 evo_data(push, 0x00000000);
1436 evo_mthd(push, 0x0080, 1);
1437 evo_data(push, 0x00000000);
1438 evo_kick(push, dev, EVO_MASTER);
1441 nvd0_hdmi_disconnect(encoder);
1443 nv_encoder->crtc = NULL;
1444 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1449 nvd0_sor_prepare(struct drm_encoder *encoder)
1451 nvd0_sor_disconnect(encoder);
1452 if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
1453 evo_sync(encoder->dev, EVO_MASTER);
1457 nvd0_sor_commit(struct drm_encoder *encoder)
1462 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1463 struct drm_display_mode *mode)
1465 struct drm_device *dev = encoder->dev;
1466 struct nouveau_drm *drm = nouveau_drm(dev);
1467 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1468 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1469 struct nouveau_connector *nv_connector;
1470 struct nvbios *bios = &drm->vbios;
1471 u32 mode_ctrl = (1 << nv_crtc->index);
1472 u32 syncs, magic, *push;
1476 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1477 syncs |= 0x00000008;
1478 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1479 syncs |= 0x00000010;
1481 magic = 0x31ec6000 | (nv_crtc->index << 25);
1482 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1483 magic |= 0x00000001;
1485 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1486 switch (nv_encoder->dcb->type) {
1487 case DCB_OUTPUT_TMDS:
1488 if (nv_encoder->dcb->sorconf.link & 1) {
1489 if (mode->clock < 165000)
1490 mode_ctrl |= 0x00000100;
1492 mode_ctrl |= 0x00000500;
1494 mode_ctrl |= 0x00000200;
1497 or_config = (mode_ctrl & 0x00000f00) >> 8;
1498 if (mode->clock >= 165000)
1499 or_config |= 0x0100;
1501 nvd0_hdmi_mode_set(encoder, mode);
1503 case DCB_OUTPUT_LVDS:
1504 or_config = (mode_ctrl & 0x00000f00) >> 8;
1505 if (bios->fp_no_ddc) {
1506 if (bios->fp.dual_link)
1507 or_config |= 0x0100;
1508 if (bios->fp.if_is_24bit)
1509 or_config |= 0x0200;
1511 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1512 if (((u8 *)nv_connector->edid)[121] == 2)
1513 or_config |= 0x0100;
1515 if (mode->clock >= bios->fp.duallink_transition_clk) {
1516 or_config |= 0x0100;
1519 if (or_config & 0x0100) {
1520 if (bios->fp.strapless_is_24bit & 2)
1521 or_config |= 0x0200;
1523 if (bios->fp.strapless_is_24bit & 1)
1524 or_config |= 0x0200;
1527 if (nv_connector->base.display_info.bpc == 8)
1528 or_config |= 0x0200;
1533 if (nv_connector->base.display_info.bpc == 6) {
1534 nv_encoder->dp.datarate = mode->clock * 18 / 8;
1535 syncs |= 0x00000002 << 6;
1537 nv_encoder->dp.datarate = mode->clock * 24 / 8;
1538 syncs |= 0x00000005 << 6;
1541 if (nv_encoder->dcb->sorconf.link & 1)
1542 mode_ctrl |= 0x00000800;
1544 mode_ctrl |= 0x00000900;
1546 or_config = (mode_ctrl & 0x00000f00) >> 8;
1553 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1555 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1556 nvd0_sor_dp_calc_tu(dev, nv_encoder->dcb, nv_crtc->index,
1557 nv_encoder->dp.datarate);
1560 push = evo_wait(dev, EVO_MASTER, 8);
1562 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1563 evo_data(push, syncs);
1564 evo_data(push, magic);
1565 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 2);
1566 evo_data(push, mode_ctrl);
1567 evo_data(push, or_config);
1568 evo_kick(push, dev, EVO_MASTER);
1571 nv_encoder->crtc = encoder->crtc;
1575 nvd0_sor_destroy(struct drm_encoder *encoder)
1577 drm_encoder_cleanup(encoder);
1581 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1582 .dpms = nvd0_sor_dpms,
1583 .mode_fixup = nvd0_sor_mode_fixup,
1584 .prepare = nvd0_sor_prepare,
1585 .commit = nvd0_sor_commit,
1586 .mode_set = nvd0_sor_mode_set,
1587 .disable = nvd0_sor_disconnect,
1588 .get_crtc = nvd0_display_crtc_get,
1591 static const struct drm_encoder_funcs nvd0_sor_func = {
1592 .destroy = nvd0_sor_destroy,
1596 nvd0_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1598 struct drm_device *dev = connector->dev;
1599 struct nouveau_encoder *nv_encoder;
1600 struct drm_encoder *encoder;
1602 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1605 nv_encoder->dcb = dcbe;
1606 nv_encoder->or = ffs(dcbe->or) - 1;
1607 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1609 encoder = to_drm_encoder(nv_encoder);
1610 encoder->possible_crtcs = dcbe->heads;
1611 encoder->possible_clones = 0;
1612 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1613 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1615 drm_mode_connector_attach_encoder(connector, encoder);
1619 /******************************************************************************
1621 *****************************************************************************/
1622 static struct dcb_output *
1623 lookup_dcb(struct drm_device *dev, int id, u32 mc)
1625 struct nouveau_drm *drm = nouveau_drm(dev);
1626 int type, or, i, link = -1;
1629 type = DCB_OUTPUT_ANALOG;
1632 switch (mc & 0x00000f00) {
1633 case 0x00000000: link = 0; type = DCB_OUTPUT_LVDS; break;
1634 case 0x00000100: link = 0; type = DCB_OUTPUT_TMDS; break;
1635 case 0x00000200: link = 1; type = DCB_OUTPUT_TMDS; break;
1636 case 0x00000500: link = 0; type = DCB_OUTPUT_TMDS; break;
1637 case 0x00000800: link = 0; type = DCB_OUTPUT_DP; break;
1638 case 0x00000900: link = 1; type = DCB_OUTPUT_DP; break;
1640 NV_ERROR(drm, "PDISP: unknown SOR mc 0x%08x\n", mc);
1647 for (i = 0; i < drm->vbios.dcb.entries; i++) {
1648 struct dcb_output *dcb = &drm->vbios.dcb.entry[i];
1649 if (dcb->type == type && (dcb->or & (1 << or)) &&
1650 (link < 0 || link == !(dcb->sorconf.link & 1)))
1654 NV_ERROR(drm, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
1659 nvd0_display_unk1_handler(struct drm_device *dev, u32 crtc, u32 mask)
1661 struct nouveau_device *device = nouveau_dev(dev);
1662 struct dcb_output *dcb;
1665 for (i = 0; mask && i < 8; i++) {
1666 u32 mcc = nv_rd32(device, 0x640180 + (i * 0x20));
1667 if (!(mcc & (1 << crtc)))
1670 dcb = lookup_dcb(dev, i, mcc);
1674 nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
1677 nv_wr32(device, 0x6101d4, 0x00000000);
1678 nv_wr32(device, 0x6109d4, 0x00000000);
1679 nv_wr32(device, 0x6101d0, 0x80000000);
1683 nvd0_display_unk2_handler(struct drm_device *dev, u32 crtc, u32 mask)
1685 struct nouveau_device *device = nouveau_dev(dev);
1686 struct nouveau_drm *drm = nouveau_drm(dev);
1687 struct dcb_output *dcb;
1691 for (i = 0; mask && i < 8; i++) {
1692 u32 mcc = nv_rd32(device, 0x640180 + (i * 0x20));
1693 if (!(mcc & (1 << crtc)))
1696 dcb = lookup_dcb(dev, i, mcc);
1700 nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
1703 pclk = nv_rd32(device, 0x660450 + (crtc * 0x300)) / 1000;
1704 NV_DEBUG(drm, "PDISP: crtc %d pclk %d mask 0x%08x\n",
1706 if (pclk && (mask & 0x00010000)) {
1707 nv50_crtc_set_clock(dev, crtc, pclk);
1710 for (i = 0; mask && i < 8; i++) {
1711 u32 mcp = nv_rd32(device, 0x660180 + (i * 0x20));
1712 u32 cfg = nv_rd32(device, 0x660184 + (i * 0x20));
1713 if (!(mcp & (1 << crtc)))
1716 dcb = lookup_dcb(dev, i, mcp);
1719 or = ffs(dcb->or) - 1;
1721 nouveau_bios_run_display_table(dev, cfg, pclk, dcb, crtc);
1723 nv_wr32(device, 0x612200 + (crtc * 0x800), 0x00000000);
1724 switch (dcb->type) {
1725 case DCB_OUTPUT_ANALOG:
1726 nv_wr32(device, 0x612280 + (or * 0x800), 0x00000000);
1728 case DCB_OUTPUT_TMDS:
1729 case DCB_OUTPUT_LVDS:
1731 if (cfg & 0x00000100)
1736 nv_mask(device, 0x612300 + (or * 0x800), 0x00000707, tmp);
1745 nv_wr32(device, 0x6101d4, 0x00000000);
1746 nv_wr32(device, 0x6109d4, 0x00000000);
1747 nv_wr32(device, 0x6101d0, 0x80000000);
1751 nvd0_display_unk4_handler(struct drm_device *dev, u32 crtc, u32 mask)
1753 struct nouveau_device *device = nouveau_dev(dev);
1754 struct dcb_output *dcb;
1757 pclk = nv_rd32(device, 0x660450 + (crtc * 0x300)) / 1000;
1759 for (i = 0; mask && i < 8; i++) {
1760 u32 mcp = nv_rd32(device, 0x660180 + (i * 0x20));
1761 u32 cfg = nv_rd32(device, 0x660184 + (i * 0x20));
1762 if (!(mcp & (1 << crtc)))
1765 dcb = lookup_dcb(dev, i, mcp);
1769 nouveau_bios_run_display_table(dev, cfg, -pclk, dcb, crtc);
1772 nv_wr32(device, 0x6101d4, 0x00000000);
1773 nv_wr32(device, 0x6109d4, 0x00000000);
1774 nv_wr32(device, 0x6101d0, 0x80000000);
1778 nvd0_display_bh(unsigned long data)
1780 struct drm_device *dev = (struct drm_device *)data;
1781 struct nouveau_device *device = nouveau_dev(dev);
1782 struct nouveau_drm *drm = nouveau_drm(dev);
1783 struct nvd0_display *disp = nvd0_display(dev);
1784 u32 mask = 0, crtc = ~0;
1787 if (drm_debug & (DRM_UT_DRIVER | DRM_UT_KMS)) {
1788 NV_INFO(drm, "PDISP: modeset req %d\n", disp->modeset);
1789 NV_INFO(drm, " STAT: 0x%08x 0x%08x 0x%08x\n",
1790 nv_rd32(device, 0x6101d0),
1791 nv_rd32(device, 0x6101d4), nv_rd32(device, 0x6109d4));
1792 for (i = 0; i < 8; i++) {
1793 NV_INFO(drm, " %s%d: 0x%08x 0x%08x\n",
1794 i < 4 ? "DAC" : "SOR", i,
1795 nv_rd32(device, 0x640180 + (i * 0x20)),
1796 nv_rd32(device, 0x660180 + (i * 0x20)));
1800 while (!mask && ++crtc < dev->mode_config.num_crtc)
1801 mask = nv_rd32(device, 0x6101d4 + (crtc * 0x800));
1803 if (disp->modeset & 0x00000001)
1804 nvd0_display_unk1_handler(dev, crtc, mask);
1805 if (disp->modeset & 0x00000002)
1806 nvd0_display_unk2_handler(dev, crtc, mask);
1807 if (disp->modeset & 0x00000004)
1808 nvd0_display_unk4_handler(dev, crtc, mask);
1812 nvd0_display_intr(struct drm_device *dev)
1814 struct nvd0_display *disp = nvd0_display(dev);
1815 struct nouveau_device *device = nouveau_dev(dev);
1816 struct nouveau_drm *drm = nouveau_drm(dev);
1817 u32 intr = nv_rd32(device, 0x610088);
1819 if (intr & 0x00000001) {
1820 u32 stat = nv_rd32(device, 0x61008c);
1821 nv_wr32(device, 0x61008c, stat);
1822 intr &= ~0x00000001;
1825 if (intr & 0x00000002) {
1826 u32 stat = nv_rd32(device, 0x61009c);
1827 int chid = ffs(stat) - 1;
1829 u32 mthd = nv_rd32(device, 0x6101f0 + (chid * 12));
1830 u32 data = nv_rd32(device, 0x6101f4 + (chid * 12));
1831 u32 unkn = nv_rd32(device, 0x6101f8 + (chid * 12));
1833 NV_INFO(drm, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1835 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1836 nv_wr32(device, 0x61009c, (1 << chid));
1837 nv_wr32(device, 0x6101f0 + (chid * 12), 0x90000000);
1840 intr &= ~0x00000002;
1843 if (intr & 0x00100000) {
1844 u32 stat = nv_rd32(device, 0x6100ac);
1846 if (stat & 0x00000007) {
1847 disp->modeset = stat;
1848 tasklet_schedule(&disp->tasklet);
1850 nv_wr32(device, 0x6100ac, (stat & 0x00000007));
1851 stat &= ~0x00000007;
1855 NV_INFO(drm, "PDISP: unknown intr24 0x%08x\n", stat);
1856 nv_wr32(device, 0x6100ac, stat);
1859 intr &= ~0x00100000;
1862 intr &= ~0x0f000000; /* vblank, handled in core */
1864 NV_INFO(drm, "PDISP: unknown intr 0x%08x\n", intr);
1867 /******************************************************************************
1869 *****************************************************************************/
1871 nvd0_display_fini(struct drm_device *dev)
1875 /* fini cursors + overlays + flips */
1876 for (i = 1; i >= 0; i--) {
1877 evo_fini_pio(dev, EVO_CURS(i));
1878 evo_fini_pio(dev, EVO_OIMM(i));
1879 evo_fini_dma(dev, EVO_OVLY(i));
1880 evo_fini_dma(dev, EVO_FLIP(i));
1884 evo_fini_dma(dev, EVO_MASTER);
1888 nvd0_display_init(struct drm_device *dev)
1890 struct nvd0_display *disp = nvd0_display(dev);
1891 struct nouveau_device *device = nouveau_dev(dev);
1892 struct nouveau_drm *drm = nouveau_drm(dev);
1896 if (nv_rd32(device, 0x6100ac) & 0x00000100) {
1897 nv_wr32(device, 0x6100ac, 0x00000100);
1898 nv_mask(device, 0x6194e8, 0x00000001, 0x00000000);
1899 if (!nv_wait(device, 0x6194e8, 0x00000002, 0x00000000)) {
1900 NV_ERROR(drm, "PDISP: 0x6194e8 0x%08x\n",
1901 nv_rd32(device, 0x6194e8));
1906 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1907 * work at all unless you do the SOR part below.
1909 for (i = 0; i < 3; i++) {
1910 u32 dac = nv_rd32(device, 0x61a000 + (i * 0x800));
1911 nv_wr32(device, 0x6101c0 + (i * 0x800), dac);
1914 for (i = 0; i < 4; i++) {
1915 u32 sor = nv_rd32(device, 0x61c000 + (i * 0x800));
1916 nv_wr32(device, 0x6301c4 + (i * 0x800), sor);
1919 for (i = 0; i < dev->mode_config.num_crtc; i++) {
1920 u32 crtc0 = nv_rd32(device, 0x616104 + (i * 0x800));
1921 u32 crtc1 = nv_rd32(device, 0x616108 + (i * 0x800));
1922 u32 crtc2 = nv_rd32(device, 0x61610c + (i * 0x800));
1923 nv_wr32(device, 0x6101b4 + (i * 0x800), crtc0);
1924 nv_wr32(device, 0x6101b8 + (i * 0x800), crtc1);
1925 nv_wr32(device, 0x6101bc + (i * 0x800), crtc2);
1928 /* point at our hash table / objects, enable interrupts */
1929 nv_wr32(device, 0x610010, (disp->mem->addr >> 8) | 9);
1930 nv_mask(device, 0x6100b0, 0x00000307, 0x00000307);
1933 ret = evo_init_dma(dev, EVO_MASTER);
1937 /* init flips + overlays + cursors */
1938 for (i = 0; i < dev->mode_config.num_crtc; i++) {
1939 if ((ret = evo_init_dma(dev, EVO_FLIP(i))) ||
1940 (ret = evo_init_dma(dev, EVO_OVLY(i))) ||
1941 (ret = evo_init_pio(dev, EVO_OIMM(i))) ||
1942 (ret = evo_init_pio(dev, EVO_CURS(i))))
1946 push = evo_wait(dev, EVO_MASTER, 32);
1951 evo_mthd(push, 0x0088, 1);
1952 evo_data(push, NvEvoSync);
1953 evo_mthd(push, 0x0084, 1);
1954 evo_data(push, 0x00000000);
1955 evo_mthd(push, 0x0084, 1);
1956 evo_data(push, 0x80000000);
1957 evo_mthd(push, 0x008c, 1);
1958 evo_data(push, 0x00000000);
1959 evo_kick(push, dev, EVO_MASTER);
1963 nvd0_display_fini(dev);
1968 nvd0_display_destroy(struct drm_device *dev)
1970 struct nvd0_display *disp = nvd0_display(dev);
1971 struct pci_dev *pdev = dev->pdev;
1974 for (i = 0; i < EVO_DMA_NR; i++) {
1975 struct evo *evo = &disp->evo[i];
1976 pci_free_consistent(pdev, PAGE_SIZE, evo->ptr, evo->handle);
1979 nouveau_gpuobj_ref(NULL, &disp->mem);
1980 nouveau_bo_unmap(disp->sync);
1981 nouveau_bo_ref(NULL, &disp->sync);
1983 nouveau_display(dev)->priv = NULL;
1988 nvd0_display_create(struct drm_device *dev)
1990 struct nouveau_device *device = nouveau_dev(dev);
1991 struct nouveau_drm *drm = nouveau_drm(dev);
1992 struct nouveau_bar *bar = nouveau_bar(device);
1993 struct nouveau_fb *pfb = nouveau_fb(device);
1994 struct dcb_table *dcb = &drm->vbios.dcb;
1995 struct drm_connector *connector, *tmp;
1996 struct pci_dev *pdev = dev->pdev;
1997 struct nvd0_display *disp;
1998 struct dcb_output *dcbe;
2001 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2005 nouveau_display(dev)->priv = disp;
2006 nouveau_display(dev)->dtor = nvd0_display_destroy;
2007 nouveau_display(dev)->init = nvd0_display_init;
2008 nouveau_display(dev)->fini = nvd0_display_fini;
2010 /* create crtc objects to represent the hw heads */
2011 crtcs = nv_rd32(device, 0x022448);
2012 for (i = 0; i < crtcs; i++) {
2013 ret = nvd0_crtc_create(dev, i);
2018 /* create encoder/connector objects based on VBIOS DCB table */
2019 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2020 connector = nouveau_connector_create(dev, dcbe->connector);
2021 if (IS_ERR(connector))
2024 if (dcbe->location != DCB_LOC_ON_CHIP) {
2025 NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
2026 dcbe->type, ffs(dcbe->or) - 1);
2030 switch (dcbe->type) {
2031 case DCB_OUTPUT_TMDS:
2032 case DCB_OUTPUT_LVDS:
2034 nvd0_sor_create(connector, dcbe);
2036 case DCB_OUTPUT_ANALOG:
2037 nvd0_dac_create(connector, dcbe);
2040 NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
2041 dcbe->type, ffs(dcbe->or) - 1);
2046 /* cull any connectors we created that don't have an encoder */
2047 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2048 if (connector->encoder_ids[0])
2051 NV_WARN(drm, "%s has no encoders, removing\n",
2052 drm_get_connector_name(connector));
2053 connector->funcs->destroy(connector);
2056 /* setup interrupt handling */
2057 tasklet_init(&disp->tasklet, nvd0_display_bh, (unsigned long)dev);
2059 /* small shared memory area we use for notifiers and semaphores */
2060 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2061 0, 0x0000, NULL, &disp->sync);
2063 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
2065 ret = nouveau_bo_map(disp->sync);
2067 nouveau_bo_ref(NULL, &disp->sync);
2073 /* hash table and dma objects for the memory areas we care about */
2074 ret = nouveau_gpuobj_new(nv_object(device), NULL, 0x4000, 0x10000,
2075 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
2079 /* create evo dma channels */
2080 for (i = 0; i < EVO_DMA_NR; i++) {
2081 struct evo *evo = &disp->evo[i];
2082 u64 offset = disp->sync->bo.offset;
2083 u32 dmao = 0x1000 + (i * 0x100);
2084 u32 hash = 0x0000 + (i * 0x040);
2087 evo->sem.offset = EVO_SYNC(evo->idx, 0x00);
2088 evo->ptr = pci_alloc_consistent(pdev, PAGE_SIZE, &evo->handle);
2094 nv_wo32(disp->mem, dmao + 0x00, 0x00000049);
2095 nv_wo32(disp->mem, dmao + 0x04, (offset + 0x0000) >> 8);
2096 nv_wo32(disp->mem, dmao + 0x08, (offset + 0x0fff) >> 8);
2097 nv_wo32(disp->mem, dmao + 0x0c, 0x00000000);
2098 nv_wo32(disp->mem, dmao + 0x10, 0x00000000);
2099 nv_wo32(disp->mem, dmao + 0x14, 0x00000000);
2100 nv_wo32(disp->mem, hash + 0x00, NvEvoSync);
2101 nv_wo32(disp->mem, hash + 0x04, 0x00000001 | (i << 27) |
2102 ((dmao + 0x00) << 9));
2104 nv_wo32(disp->mem, dmao + 0x20, 0x00000049);
2105 nv_wo32(disp->mem, dmao + 0x24, 0x00000000);
2106 nv_wo32(disp->mem, dmao + 0x28, (pfb->ram.size - 1) >> 8);
2107 nv_wo32(disp->mem, dmao + 0x2c, 0x00000000);
2108 nv_wo32(disp->mem, dmao + 0x30, 0x00000000);
2109 nv_wo32(disp->mem, dmao + 0x34, 0x00000000);
2110 nv_wo32(disp->mem, hash + 0x08, NvEvoVRAM);
2111 nv_wo32(disp->mem, hash + 0x0c, 0x00000001 | (i << 27) |
2112 ((dmao + 0x20) << 9));
2114 nv_wo32(disp->mem, dmao + 0x40, 0x00000009);
2115 nv_wo32(disp->mem, dmao + 0x44, 0x00000000);
2116 nv_wo32(disp->mem, dmao + 0x48, (pfb->ram.size - 1) >> 8);
2117 nv_wo32(disp->mem, dmao + 0x4c, 0x00000000);
2118 nv_wo32(disp->mem, dmao + 0x50, 0x00000000);
2119 nv_wo32(disp->mem, dmao + 0x54, 0x00000000);
2120 nv_wo32(disp->mem, hash + 0x10, NvEvoVRAM_LP);
2121 nv_wo32(disp->mem, hash + 0x14, 0x00000001 | (i << 27) |
2122 ((dmao + 0x40) << 9));
2124 nv_wo32(disp->mem, dmao + 0x60, 0x0fe00009);
2125 nv_wo32(disp->mem, dmao + 0x64, 0x00000000);
2126 nv_wo32(disp->mem, dmao + 0x68, (pfb->ram.size - 1) >> 8);
2127 nv_wo32(disp->mem, dmao + 0x6c, 0x00000000);
2128 nv_wo32(disp->mem, dmao + 0x70, 0x00000000);
2129 nv_wo32(disp->mem, dmao + 0x74, 0x00000000);
2130 nv_wo32(disp->mem, hash + 0x18, NvEvoFB32);
2131 nv_wo32(disp->mem, hash + 0x1c, 0x00000001 | (i << 27) |
2132 ((dmao + 0x60) << 9));
2139 nvd0_display_destroy(dev);