]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - drivers/gpu/drm/gma500/cdv_intel_dp.c
Merge branches 'for-4.4/upstream-fixes', 'for-4.5/async-suspend', 'for-4.5/container...
[mirror_ubuntu-artful-kernel.git] / drivers / gpu / drm / gma500 / cdv_intel_dp.c
1 /*
2 * Copyright © 2012 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
25 *
26 */
27
28 #include <linux/i2c.h>
29 #include <linux/slab.h>
30 #include <linux/module.h>
31 #include <drm/drmP.h>
32 #include <drm/drm_crtc.h>
33 #include <drm/drm_crtc_helper.h>
34 #include "psb_drv.h"
35 #include "psb_intel_drv.h"
36 #include "psb_intel_reg.h"
37 #include "gma_display.h"
38 #include <drm/drm_dp_helper.h>
39
40 /**
41 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
42 * aux algorithm
43 * @running: set by the algo indicating whether an i2c is ongoing or whether
44 * the i2c bus is quiescent
45 * @address: i2c target address for the currently ongoing transfer
46 * @aux_ch: driver callback to transfer a single byte of the i2c payload
47 */
48 struct i2c_algo_dp_aux_data {
49 bool running;
50 u16 address;
51 int (*aux_ch) (struct i2c_adapter *adapter,
52 int mode, uint8_t write_byte,
53 uint8_t *read_byte);
54 };
55
56 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
57 static int
58 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
59 uint8_t write_byte, uint8_t *read_byte)
60 {
61 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
62 int ret;
63
64 ret = (*algo_data->aux_ch)(adapter, mode,
65 write_byte, read_byte);
66 return ret;
67 }
68
69 /*
70 * I2C over AUX CH
71 */
72
73 /*
74 * Send the address. If the I2C link is running, this 'restarts'
75 * the connection with the new address, this is used for doing
76 * a write followed by a read (as needed for DDC)
77 */
78 static int
79 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
80 {
81 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
82 int mode = MODE_I2C_START;
83 int ret;
84
85 if (reading)
86 mode |= MODE_I2C_READ;
87 else
88 mode |= MODE_I2C_WRITE;
89 algo_data->address = address;
90 algo_data->running = true;
91 ret = i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
92 return ret;
93 }
94
95 /*
96 * Stop the I2C transaction. This closes out the link, sending
97 * a bare address packet with the MOT bit turned off
98 */
99 static void
100 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
101 {
102 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
103 int mode = MODE_I2C_STOP;
104
105 if (reading)
106 mode |= MODE_I2C_READ;
107 else
108 mode |= MODE_I2C_WRITE;
109 if (algo_data->running) {
110 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
111 algo_data->running = false;
112 }
113 }
114
115 /*
116 * Write a single byte to the current I2C address, the
117 * the I2C link must be running or this returns -EIO
118 */
119 static int
120 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
121 {
122 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
123 int ret;
124
125 if (!algo_data->running)
126 return -EIO;
127
128 ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
129 return ret;
130 }
131
132 /*
133 * Read a single byte from the current I2C address, the
134 * I2C link must be running or this returns -EIO
135 */
136 static int
137 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
138 {
139 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
140 int ret;
141
142 if (!algo_data->running)
143 return -EIO;
144
145 ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
146 return ret;
147 }
148
149 static int
150 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
151 struct i2c_msg *msgs,
152 int num)
153 {
154 int ret = 0;
155 bool reading = false;
156 int m;
157 int b;
158
159 for (m = 0; m < num; m++) {
160 u16 len = msgs[m].len;
161 u8 *buf = msgs[m].buf;
162 reading = (msgs[m].flags & I2C_M_RD) != 0;
163 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
164 if (ret < 0)
165 break;
166 if (reading) {
167 for (b = 0; b < len; b++) {
168 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
169 if (ret < 0)
170 break;
171 }
172 } else {
173 for (b = 0; b < len; b++) {
174 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
175 if (ret < 0)
176 break;
177 }
178 }
179 if (ret < 0)
180 break;
181 }
182 if (ret >= 0)
183 ret = num;
184 i2c_algo_dp_aux_stop(adapter, reading);
185 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
186 return ret;
187 }
188
189 static u32
190 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
191 {
192 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
193 I2C_FUNC_SMBUS_READ_BLOCK_DATA |
194 I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
195 I2C_FUNC_10BIT_ADDR;
196 }
197
198 static const struct i2c_algorithm i2c_dp_aux_algo = {
199 .master_xfer = i2c_algo_dp_aux_xfer,
200 .functionality = i2c_algo_dp_aux_functionality,
201 };
202
203 static void
204 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
205 {
206 (void) i2c_algo_dp_aux_address(adapter, 0, false);
207 (void) i2c_algo_dp_aux_stop(adapter, false);
208 }
209
210 static int
211 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
212 {
213 adapter->algo = &i2c_dp_aux_algo;
214 adapter->retries = 3;
215 i2c_dp_aux_reset_bus(adapter);
216 return 0;
217 }
218
219 /*
220 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
221 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
222 */
223 static int __deprecated
224 i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
225 {
226 int error;
227
228 error = i2c_dp_aux_prepare_bus(adapter);
229 if (error)
230 return error;
231 error = i2c_add_adapter(adapter);
232 return error;
233 }
234
235 #define _wait_for(COND, MS, W) ({ \
236 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
237 int ret__ = 0; \
238 while (! (COND)) { \
239 if (time_after(jiffies, timeout__)) { \
240 ret__ = -ETIMEDOUT; \
241 break; \
242 } \
243 if (W && !in_dbg_master()) msleep(W); \
244 } \
245 ret__; \
246 })
247
248 #define wait_for(COND, MS) _wait_for(COND, MS, 1)
249
250 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
251
252 #define DP_LINK_CONFIGURATION_SIZE 9
253
254 #define CDV_FAST_LINK_TRAIN 1
255
256 struct cdv_intel_dp {
257 uint32_t output_reg;
258 uint32_t DP;
259 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
260 bool has_audio;
261 int force_audio;
262 uint32_t color_range;
263 uint8_t link_bw;
264 uint8_t lane_count;
265 uint8_t dpcd[4];
266 struct gma_encoder *encoder;
267 struct i2c_adapter adapter;
268 struct i2c_algo_dp_aux_data algo;
269 uint8_t train_set[4];
270 uint8_t link_status[DP_LINK_STATUS_SIZE];
271 int panel_power_up_delay;
272 int panel_power_down_delay;
273 int panel_power_cycle_delay;
274 int backlight_on_delay;
275 int backlight_off_delay;
276 struct drm_display_mode *panel_fixed_mode; /* for eDP */
277 bool panel_on;
278 };
279
280 struct ddi_regoff {
281 uint32_t PreEmph1;
282 uint32_t PreEmph2;
283 uint32_t VSwing1;
284 uint32_t VSwing2;
285 uint32_t VSwing3;
286 uint32_t VSwing4;
287 uint32_t VSwing5;
288 };
289
290 static struct ddi_regoff ddi_DP_train_table[] = {
291 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
292 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
293 .VSwing5 = 0x8158,},
294 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
295 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
296 .VSwing5 = 0x8258,},
297 };
298
299 static uint32_t dp_vswing_premph_table[] = {
300 0x55338954, 0x4000,
301 0x554d8954, 0x2000,
302 0x55668954, 0,
303 0x559ac0d4, 0x6000,
304 };
305 /**
306 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
307 * @intel_dp: DP struct
308 *
309 * If a CPU or PCH DP output is attached to an eDP panel, this function
310 * will return true, and false otherwise.
311 */
312 static bool is_edp(struct gma_encoder *encoder)
313 {
314 return encoder->type == INTEL_OUTPUT_EDP;
315 }
316
317
318 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
319 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
320 static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
321
322 static int
323 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
324 {
325 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
326 int max_lane_count = 4;
327
328 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
329 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
330 switch (max_lane_count) {
331 case 1: case 2: case 4:
332 break;
333 default:
334 max_lane_count = 4;
335 }
336 }
337 return max_lane_count;
338 }
339
340 static int
341 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
342 {
343 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
344 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
345
346 switch (max_link_bw) {
347 case DP_LINK_BW_1_62:
348 case DP_LINK_BW_2_7:
349 break;
350 default:
351 max_link_bw = DP_LINK_BW_1_62;
352 break;
353 }
354 return max_link_bw;
355 }
356
357 static int
358 cdv_intel_dp_link_clock(uint8_t link_bw)
359 {
360 if (link_bw == DP_LINK_BW_2_7)
361 return 270000;
362 else
363 return 162000;
364 }
365
366 static int
367 cdv_intel_dp_link_required(int pixel_clock, int bpp)
368 {
369 return (pixel_clock * bpp + 7) / 8;
370 }
371
372 static int
373 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
374 {
375 return (max_link_clock * max_lanes * 19) / 20;
376 }
377
378 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
379 {
380 struct drm_device *dev = intel_encoder->base.dev;
381 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
382 u32 pp;
383
384 if (intel_dp->panel_on) {
385 DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
386 return;
387 }
388 DRM_DEBUG_KMS("\n");
389
390 pp = REG_READ(PP_CONTROL);
391
392 pp |= EDP_FORCE_VDD;
393 REG_WRITE(PP_CONTROL, pp);
394 REG_READ(PP_CONTROL);
395 msleep(intel_dp->panel_power_up_delay);
396 }
397
398 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
399 {
400 struct drm_device *dev = intel_encoder->base.dev;
401 u32 pp;
402
403 DRM_DEBUG_KMS("\n");
404 pp = REG_READ(PP_CONTROL);
405
406 pp &= ~EDP_FORCE_VDD;
407 REG_WRITE(PP_CONTROL, pp);
408 REG_READ(PP_CONTROL);
409
410 }
411
412 /* Returns true if the panel was already on when called */
413 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
414 {
415 struct drm_device *dev = intel_encoder->base.dev;
416 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
417 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
418
419 if (intel_dp->panel_on)
420 return true;
421
422 DRM_DEBUG_KMS("\n");
423 pp = REG_READ(PP_CONTROL);
424 pp &= ~PANEL_UNLOCK_MASK;
425
426 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
427 REG_WRITE(PP_CONTROL, pp);
428 REG_READ(PP_CONTROL);
429
430 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
431 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
432 intel_dp->panel_on = false;
433 } else
434 intel_dp->panel_on = true;
435 msleep(intel_dp->panel_power_up_delay);
436
437 return false;
438 }
439
440 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
441 {
442 struct drm_device *dev = intel_encoder->base.dev;
443 u32 pp, idle_off_mask = PP_ON ;
444 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
445
446 DRM_DEBUG_KMS("\n");
447
448 pp = REG_READ(PP_CONTROL);
449
450 if ((pp & POWER_TARGET_ON) == 0)
451 return;
452
453 intel_dp->panel_on = false;
454 pp &= ~PANEL_UNLOCK_MASK;
455 /* ILK workaround: disable reset around power sequence */
456
457 pp &= ~POWER_TARGET_ON;
458 pp &= ~EDP_FORCE_VDD;
459 pp &= ~EDP_BLC_ENABLE;
460 REG_WRITE(PP_CONTROL, pp);
461 REG_READ(PP_CONTROL);
462 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
463
464 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
465 DRM_DEBUG_KMS("Error in turning off Panel\n");
466 }
467
468 msleep(intel_dp->panel_power_cycle_delay);
469 DRM_DEBUG_KMS("Over\n");
470 }
471
472 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
473 {
474 struct drm_device *dev = intel_encoder->base.dev;
475 u32 pp;
476
477 DRM_DEBUG_KMS("\n");
478 /*
479 * If we enable the backlight right away following a panel power
480 * on, we may see slight flicker as the panel syncs with the eDP
481 * link. So delay a bit to make sure the image is solid before
482 * allowing it to appear.
483 */
484 msleep(300);
485 pp = REG_READ(PP_CONTROL);
486
487 pp |= EDP_BLC_ENABLE;
488 REG_WRITE(PP_CONTROL, pp);
489 gma_backlight_enable(dev);
490 }
491
492 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
493 {
494 struct drm_device *dev = intel_encoder->base.dev;
495 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
496 u32 pp;
497
498 DRM_DEBUG_KMS("\n");
499 gma_backlight_disable(dev);
500 msleep(10);
501 pp = REG_READ(PP_CONTROL);
502
503 pp &= ~EDP_BLC_ENABLE;
504 REG_WRITE(PP_CONTROL, pp);
505 msleep(intel_dp->backlight_off_delay);
506 }
507
508 static int
509 cdv_intel_dp_mode_valid(struct drm_connector *connector,
510 struct drm_display_mode *mode)
511 {
512 struct gma_encoder *encoder = gma_attached_encoder(connector);
513 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
514 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
515 int max_lanes = cdv_intel_dp_max_lane_count(encoder);
516 struct drm_psb_private *dev_priv = connector->dev->dev_private;
517
518 if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
519 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
520 return MODE_PANEL;
521 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
522 return MODE_PANEL;
523 }
524
525 /* only refuse the mode on non eDP since we have seen some weird eDP panels
526 which are outside spec tolerances but somehow work by magic */
527 if (!is_edp(encoder) &&
528 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
529 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
530 return MODE_CLOCK_HIGH;
531
532 if (is_edp(encoder)) {
533 if (cdv_intel_dp_link_required(mode->clock, 24)
534 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
535 return MODE_CLOCK_HIGH;
536
537 }
538 if (mode->clock < 10000)
539 return MODE_CLOCK_LOW;
540
541 return MODE_OK;
542 }
543
544 static uint32_t
545 pack_aux(uint8_t *src, int src_bytes)
546 {
547 int i;
548 uint32_t v = 0;
549
550 if (src_bytes > 4)
551 src_bytes = 4;
552 for (i = 0; i < src_bytes; i++)
553 v |= ((uint32_t) src[i]) << ((3-i) * 8);
554 return v;
555 }
556
557 static void
558 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
559 {
560 int i;
561 if (dst_bytes > 4)
562 dst_bytes = 4;
563 for (i = 0; i < dst_bytes; i++)
564 dst[i] = src >> ((3-i) * 8);
565 }
566
567 static int
568 cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
569 uint8_t *send, int send_bytes,
570 uint8_t *recv, int recv_size)
571 {
572 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
573 uint32_t output_reg = intel_dp->output_reg;
574 struct drm_device *dev = encoder->base.dev;
575 uint32_t ch_ctl = output_reg + 0x10;
576 uint32_t ch_data = ch_ctl + 4;
577 int i;
578 int recv_bytes;
579 uint32_t status;
580 uint32_t aux_clock_divider;
581 int try, precharge;
582
583 /* The clock divider is based off the hrawclk,
584 * and would like to run at 2MHz. So, take the
585 * hrawclk value and divide by 2 and use that
586 * On CDV platform it uses 200MHz as hrawclk.
587 *
588 */
589 aux_clock_divider = 200 / 2;
590
591 precharge = 4;
592 if (is_edp(encoder))
593 precharge = 10;
594
595 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
596 DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
597 REG_READ(ch_ctl));
598 return -EBUSY;
599 }
600
601 /* Must try at least 3 times according to DP spec */
602 for (try = 0; try < 5; try++) {
603 /* Load the send data into the aux channel data registers */
604 for (i = 0; i < send_bytes; i += 4)
605 REG_WRITE(ch_data + i,
606 pack_aux(send + i, send_bytes - i));
607
608 /* Send the command and wait for it to complete */
609 REG_WRITE(ch_ctl,
610 DP_AUX_CH_CTL_SEND_BUSY |
611 DP_AUX_CH_CTL_TIME_OUT_400us |
612 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
613 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
614 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
615 DP_AUX_CH_CTL_DONE |
616 DP_AUX_CH_CTL_TIME_OUT_ERROR |
617 DP_AUX_CH_CTL_RECEIVE_ERROR);
618 for (;;) {
619 status = REG_READ(ch_ctl);
620 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
621 break;
622 udelay(100);
623 }
624
625 /* Clear done status and any errors */
626 REG_WRITE(ch_ctl,
627 status |
628 DP_AUX_CH_CTL_DONE |
629 DP_AUX_CH_CTL_TIME_OUT_ERROR |
630 DP_AUX_CH_CTL_RECEIVE_ERROR);
631 if (status & DP_AUX_CH_CTL_DONE)
632 break;
633 }
634
635 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
636 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
637 return -EBUSY;
638 }
639
640 /* Check for timeout or receive error.
641 * Timeouts occur when the sink is not connected
642 */
643 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
644 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
645 return -EIO;
646 }
647
648 /* Timeouts occur when the device isn't connected, so they're
649 * "normal" -- don't fill the kernel log with these */
650 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
651 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
652 return -ETIMEDOUT;
653 }
654
655 /* Unload any bytes sent back from the other side */
656 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
657 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
658 if (recv_bytes > recv_size)
659 recv_bytes = recv_size;
660
661 for (i = 0; i < recv_bytes; i += 4)
662 unpack_aux(REG_READ(ch_data + i),
663 recv + i, recv_bytes - i);
664
665 return recv_bytes;
666 }
667
668 /* Write data to the aux channel in native mode */
669 static int
670 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
671 uint16_t address, uint8_t *send, int send_bytes)
672 {
673 int ret;
674 uint8_t msg[20];
675 int msg_bytes;
676 uint8_t ack;
677
678 if (send_bytes > 16)
679 return -1;
680 msg[0] = DP_AUX_NATIVE_WRITE << 4;
681 msg[1] = address >> 8;
682 msg[2] = address & 0xff;
683 msg[3] = send_bytes - 1;
684 memcpy(&msg[4], send, send_bytes);
685 msg_bytes = send_bytes + 4;
686 for (;;) {
687 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
688 if (ret < 0)
689 return ret;
690 ack >>= 4;
691 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
692 break;
693 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
694 udelay(100);
695 else
696 return -EIO;
697 }
698 return send_bytes;
699 }
700
701 /* Write a single byte to the aux channel in native mode */
702 static int
703 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
704 uint16_t address, uint8_t byte)
705 {
706 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
707 }
708
709 /* read bytes from a native aux channel */
710 static int
711 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
712 uint16_t address, uint8_t *recv, int recv_bytes)
713 {
714 uint8_t msg[4];
715 int msg_bytes;
716 uint8_t reply[20];
717 int reply_bytes;
718 uint8_t ack;
719 int ret;
720
721 msg[0] = DP_AUX_NATIVE_READ << 4;
722 msg[1] = address >> 8;
723 msg[2] = address & 0xff;
724 msg[3] = recv_bytes - 1;
725
726 msg_bytes = 4;
727 reply_bytes = recv_bytes + 1;
728
729 for (;;) {
730 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
731 reply, reply_bytes);
732 if (ret == 0)
733 return -EPROTO;
734 if (ret < 0)
735 return ret;
736 ack = reply[0] >> 4;
737 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
738 memcpy(recv, reply + 1, ret - 1);
739 return ret - 1;
740 }
741 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
742 udelay(100);
743 else
744 return -EIO;
745 }
746 }
747
748 static int
749 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
750 uint8_t write_byte, uint8_t *read_byte)
751 {
752 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
753 struct cdv_intel_dp *intel_dp = container_of(adapter,
754 struct cdv_intel_dp,
755 adapter);
756 struct gma_encoder *encoder = intel_dp->encoder;
757 uint16_t address = algo_data->address;
758 uint8_t msg[5];
759 uint8_t reply[2];
760 unsigned retry;
761 int msg_bytes;
762 int reply_bytes;
763 int ret;
764
765 /* Set up the command byte */
766 if (mode & MODE_I2C_READ)
767 msg[0] = DP_AUX_I2C_READ << 4;
768 else
769 msg[0] = DP_AUX_I2C_WRITE << 4;
770
771 if (!(mode & MODE_I2C_STOP))
772 msg[0] |= DP_AUX_I2C_MOT << 4;
773
774 msg[1] = address >> 8;
775 msg[2] = address;
776
777 switch (mode) {
778 case MODE_I2C_WRITE:
779 msg[3] = 0;
780 msg[4] = write_byte;
781 msg_bytes = 5;
782 reply_bytes = 1;
783 break;
784 case MODE_I2C_READ:
785 msg[3] = 0;
786 msg_bytes = 4;
787 reply_bytes = 2;
788 break;
789 default:
790 msg_bytes = 3;
791 reply_bytes = 1;
792 break;
793 }
794
795 for (retry = 0; retry < 5; retry++) {
796 ret = cdv_intel_dp_aux_ch(encoder,
797 msg, msg_bytes,
798 reply, reply_bytes);
799 if (ret < 0) {
800 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
801 return ret;
802 }
803
804 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
805 case DP_AUX_NATIVE_REPLY_ACK:
806 /* I2C-over-AUX Reply field is only valid
807 * when paired with AUX ACK.
808 */
809 break;
810 case DP_AUX_NATIVE_REPLY_NACK:
811 DRM_DEBUG_KMS("aux_ch native nack\n");
812 return -EREMOTEIO;
813 case DP_AUX_NATIVE_REPLY_DEFER:
814 udelay(100);
815 continue;
816 default:
817 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
818 reply[0]);
819 return -EREMOTEIO;
820 }
821
822 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
823 case DP_AUX_I2C_REPLY_ACK:
824 if (mode == MODE_I2C_READ) {
825 *read_byte = reply[1];
826 }
827 return reply_bytes - 1;
828 case DP_AUX_I2C_REPLY_NACK:
829 DRM_DEBUG_KMS("aux_i2c nack\n");
830 return -EREMOTEIO;
831 case DP_AUX_I2C_REPLY_DEFER:
832 DRM_DEBUG_KMS("aux_i2c defer\n");
833 udelay(100);
834 break;
835 default:
836 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
837 return -EREMOTEIO;
838 }
839 }
840
841 DRM_ERROR("too many retries, giving up\n");
842 return -EREMOTEIO;
843 }
844
845 static int
846 cdv_intel_dp_i2c_init(struct gma_connector *connector,
847 struct gma_encoder *encoder, const char *name)
848 {
849 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
850 int ret;
851
852 DRM_DEBUG_KMS("i2c_init %s\n", name);
853
854 intel_dp->algo.running = false;
855 intel_dp->algo.address = 0;
856 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
857
858 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
859 intel_dp->adapter.owner = THIS_MODULE;
860 intel_dp->adapter.class = I2C_CLASS_DDC;
861 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
862 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
863 intel_dp->adapter.algo_data = &intel_dp->algo;
864 intel_dp->adapter.dev.parent = connector->base.kdev;
865
866 if (is_edp(encoder))
867 cdv_intel_edp_panel_vdd_on(encoder);
868 ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
869 if (is_edp(encoder))
870 cdv_intel_edp_panel_vdd_off(encoder);
871
872 return ret;
873 }
874
875 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
876 struct drm_display_mode *adjusted_mode)
877 {
878 adjusted_mode->hdisplay = fixed_mode->hdisplay;
879 adjusted_mode->hsync_start = fixed_mode->hsync_start;
880 adjusted_mode->hsync_end = fixed_mode->hsync_end;
881 adjusted_mode->htotal = fixed_mode->htotal;
882
883 adjusted_mode->vdisplay = fixed_mode->vdisplay;
884 adjusted_mode->vsync_start = fixed_mode->vsync_start;
885 adjusted_mode->vsync_end = fixed_mode->vsync_end;
886 adjusted_mode->vtotal = fixed_mode->vtotal;
887
888 adjusted_mode->clock = fixed_mode->clock;
889
890 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
891 }
892
893 static bool
894 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
895 struct drm_display_mode *adjusted_mode)
896 {
897 struct drm_psb_private *dev_priv = encoder->dev->dev_private;
898 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
899 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
900 int lane_count, clock;
901 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
902 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
903 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
904 int refclock = mode->clock;
905 int bpp = 24;
906
907 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
908 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
909 refclock = intel_dp->panel_fixed_mode->clock;
910 bpp = dev_priv->edp.bpp;
911 }
912
913 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
914 for (clock = max_clock; clock >= 0; clock--) {
915 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
916
917 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
918 intel_dp->link_bw = bws[clock];
919 intel_dp->lane_count = lane_count;
920 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
921 DRM_DEBUG_KMS("Display port link bw %02x lane "
922 "count %d clock %d\n",
923 intel_dp->link_bw, intel_dp->lane_count,
924 adjusted_mode->clock);
925 return true;
926 }
927 }
928 }
929 if (is_edp(intel_encoder)) {
930 /* okay we failed just pick the highest */
931 intel_dp->lane_count = max_lane_count;
932 intel_dp->link_bw = bws[max_clock];
933 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
934 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
935 "count %d clock %d\n",
936 intel_dp->link_bw, intel_dp->lane_count,
937 adjusted_mode->clock);
938
939 return true;
940 }
941 return false;
942 }
943
944 struct cdv_intel_dp_m_n {
945 uint32_t tu;
946 uint32_t gmch_m;
947 uint32_t gmch_n;
948 uint32_t link_m;
949 uint32_t link_n;
950 };
951
952 static void
953 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
954 {
955 /*
956 while (*num > 0xffffff || *den > 0xffffff) {
957 *num >>= 1;
958 *den >>= 1;
959 }*/
960 uint64_t value, m;
961 m = *num;
962 value = m * (0x800000);
963 m = do_div(value, *den);
964 *num = value;
965 *den = 0x800000;
966 }
967
968 static void
969 cdv_intel_dp_compute_m_n(int bpp,
970 int nlanes,
971 int pixel_clock,
972 int link_clock,
973 struct cdv_intel_dp_m_n *m_n)
974 {
975 m_n->tu = 64;
976 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
977 m_n->gmch_n = link_clock * nlanes;
978 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
979 m_n->link_m = pixel_clock;
980 m_n->link_n = link_clock;
981 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
982 }
983
984 void
985 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
986 struct drm_display_mode *adjusted_mode)
987 {
988 struct drm_device *dev = crtc->dev;
989 struct drm_psb_private *dev_priv = dev->dev_private;
990 struct drm_mode_config *mode_config = &dev->mode_config;
991 struct drm_encoder *encoder;
992 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
993 int lane_count = 4, bpp = 24;
994 struct cdv_intel_dp_m_n m_n;
995 int pipe = gma_crtc->pipe;
996
997 /*
998 * Find the lane count in the intel_encoder private
999 */
1000 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
1001 struct gma_encoder *intel_encoder;
1002 struct cdv_intel_dp *intel_dp;
1003
1004 if (encoder->crtc != crtc)
1005 continue;
1006
1007 intel_encoder = to_gma_encoder(encoder);
1008 intel_dp = intel_encoder->dev_priv;
1009 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
1010 lane_count = intel_dp->lane_count;
1011 break;
1012 } else if (is_edp(intel_encoder)) {
1013 lane_count = intel_dp->lane_count;
1014 bpp = dev_priv->edp.bpp;
1015 break;
1016 }
1017 }
1018
1019 /*
1020 * Compute the GMCH and Link ratios. The '3' here is
1021 * the number of bytes_per_pixel post-LUT, which we always
1022 * set up for 8-bits of R/G/B, or 3 bytes total.
1023 */
1024 cdv_intel_dp_compute_m_n(bpp, lane_count,
1025 mode->clock, adjusted_mode->clock, &m_n);
1026
1027 {
1028 REG_WRITE(PIPE_GMCH_DATA_M(pipe),
1029 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
1030 m_n.gmch_m);
1031 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
1032 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
1033 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
1034 }
1035 }
1036
1037 static void
1038 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1039 struct drm_display_mode *adjusted_mode)
1040 {
1041 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1042 struct drm_crtc *crtc = encoder->crtc;
1043 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
1044 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1045 struct drm_device *dev = encoder->dev;
1046
1047 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
1048 intel_dp->DP |= intel_dp->color_range;
1049
1050 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
1051 intel_dp->DP |= DP_SYNC_HS_HIGH;
1052 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
1053 intel_dp->DP |= DP_SYNC_VS_HIGH;
1054
1055 intel_dp->DP |= DP_LINK_TRAIN_OFF;
1056
1057 switch (intel_dp->lane_count) {
1058 case 1:
1059 intel_dp->DP |= DP_PORT_WIDTH_1;
1060 break;
1061 case 2:
1062 intel_dp->DP |= DP_PORT_WIDTH_2;
1063 break;
1064 case 4:
1065 intel_dp->DP |= DP_PORT_WIDTH_4;
1066 break;
1067 }
1068 if (intel_dp->has_audio)
1069 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
1070
1071 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
1072 intel_dp->link_configuration[0] = intel_dp->link_bw;
1073 intel_dp->link_configuration[1] = intel_dp->lane_count;
1074
1075 /*
1076 * Check for DPCD version > 1.1 and enhanced framing support
1077 */
1078 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1079 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
1080 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
1081 intel_dp->DP |= DP_ENHANCED_FRAMING;
1082 }
1083
1084 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
1085 if (gma_crtc->pipe == 1)
1086 intel_dp->DP |= DP_PIPEB_SELECT;
1087
1088 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
1089 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
1090 if (is_edp(intel_encoder)) {
1091 uint32_t pfit_control;
1092 cdv_intel_edp_panel_on(intel_encoder);
1093
1094 if (mode->hdisplay != adjusted_mode->hdisplay ||
1095 mode->vdisplay != adjusted_mode->vdisplay)
1096 pfit_control = PFIT_ENABLE;
1097 else
1098 pfit_control = 0;
1099
1100 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
1101
1102 REG_WRITE(PFIT_CONTROL, pfit_control);
1103 }
1104 }
1105
1106
1107 /* If the sink supports it, try to set the power state appropriately */
1108 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
1109 {
1110 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1111 int ret, i;
1112
1113 /* Should have a valid DPCD by this point */
1114 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1115 return;
1116
1117 if (mode != DRM_MODE_DPMS_ON) {
1118 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
1119 DP_SET_POWER_D3);
1120 if (ret != 1)
1121 DRM_DEBUG_DRIVER("failed to write sink power state\n");
1122 } else {
1123 /*
1124 * When turning on, we need to retry for 1ms to give the sink
1125 * time to wake up.
1126 */
1127 for (i = 0; i < 3; i++) {
1128 ret = cdv_intel_dp_aux_native_write_1(encoder,
1129 DP_SET_POWER,
1130 DP_SET_POWER_D0);
1131 if (ret == 1)
1132 break;
1133 udelay(1000);
1134 }
1135 }
1136 }
1137
1138 static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
1139 {
1140 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1141 int edp = is_edp(intel_encoder);
1142
1143 if (edp) {
1144 cdv_intel_edp_backlight_off(intel_encoder);
1145 cdv_intel_edp_panel_off(intel_encoder);
1146 cdv_intel_edp_panel_vdd_on(intel_encoder);
1147 }
1148 /* Wake up the sink first */
1149 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
1150 cdv_intel_dp_link_down(intel_encoder);
1151 if (edp)
1152 cdv_intel_edp_panel_vdd_off(intel_encoder);
1153 }
1154
1155 static void cdv_intel_dp_commit(struct drm_encoder *encoder)
1156 {
1157 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1158 int edp = is_edp(intel_encoder);
1159
1160 if (edp)
1161 cdv_intel_edp_panel_on(intel_encoder);
1162 cdv_intel_dp_start_link_train(intel_encoder);
1163 cdv_intel_dp_complete_link_train(intel_encoder);
1164 if (edp)
1165 cdv_intel_edp_backlight_on(intel_encoder);
1166 }
1167
1168 static void
1169 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
1170 {
1171 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
1172 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1173 struct drm_device *dev = encoder->dev;
1174 uint32_t dp_reg = REG_READ(intel_dp->output_reg);
1175 int edp = is_edp(intel_encoder);
1176
1177 if (mode != DRM_MODE_DPMS_ON) {
1178 if (edp) {
1179 cdv_intel_edp_backlight_off(intel_encoder);
1180 cdv_intel_edp_panel_vdd_on(intel_encoder);
1181 }
1182 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1183 cdv_intel_dp_link_down(intel_encoder);
1184 if (edp) {
1185 cdv_intel_edp_panel_vdd_off(intel_encoder);
1186 cdv_intel_edp_panel_off(intel_encoder);
1187 }
1188 } else {
1189 if (edp)
1190 cdv_intel_edp_panel_on(intel_encoder);
1191 cdv_intel_dp_sink_dpms(intel_encoder, mode);
1192 if (!(dp_reg & DP_PORT_EN)) {
1193 cdv_intel_dp_start_link_train(intel_encoder);
1194 cdv_intel_dp_complete_link_train(intel_encoder);
1195 }
1196 if (edp)
1197 cdv_intel_edp_backlight_on(intel_encoder);
1198 }
1199 }
1200
1201 /*
1202 * Native read with retry for link status and receiver capability reads for
1203 * cases where the sink may still be asleep.
1204 */
1205 static bool
1206 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
1207 uint8_t *recv, int recv_bytes)
1208 {
1209 int ret, i;
1210
1211 /*
1212 * Sinks are *supposed* to come up within 1ms from an off state,
1213 * but we're also supposed to retry 3 times per the spec.
1214 */
1215 for (i = 0; i < 3; i++) {
1216 ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
1217 recv_bytes);
1218 if (ret == recv_bytes)
1219 return true;
1220 udelay(1000);
1221 }
1222
1223 return false;
1224 }
1225
1226 /*
1227 * Fetch AUX CH registers 0x202 - 0x207 which contain
1228 * link status information
1229 */
1230 static bool
1231 cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
1232 {
1233 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1234 return cdv_intel_dp_aux_native_read_retry(encoder,
1235 DP_LANE0_1_STATUS,
1236 intel_dp->link_status,
1237 DP_LINK_STATUS_SIZE);
1238 }
1239
1240 static uint8_t
1241 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1242 int r)
1243 {
1244 return link_status[r - DP_LANE0_1_STATUS];
1245 }
1246
1247 static uint8_t
1248 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1249 int lane)
1250 {
1251 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1252 int s = ((lane & 1) ?
1253 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1254 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1255 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1256
1257 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1258 }
1259
1260 static uint8_t
1261 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1262 int lane)
1263 {
1264 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1265 int s = ((lane & 1) ?
1266 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1267 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1268 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1269
1270 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1271 }
1272
1273
1274 #if 0
1275 static char *voltage_names[] = {
1276 "0.4V", "0.6V", "0.8V", "1.2V"
1277 };
1278 static char *pre_emph_names[] = {
1279 "0dB", "3.5dB", "6dB", "9.5dB"
1280 };
1281 static char *link_train_names[] = {
1282 "pattern 1", "pattern 2", "idle", "off"
1283 };
1284 #endif
1285
1286 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
1287 /*
1288 static uint8_t
1289 cdv_intel_dp_pre_emphasis_max(uint8_t voltage_swing)
1290 {
1291 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1292 case DP_TRAIN_VOLTAGE_SWING_400:
1293 return DP_TRAIN_PRE_EMPHASIS_6;
1294 case DP_TRAIN_VOLTAGE_SWING_600:
1295 return DP_TRAIN_PRE_EMPHASIS_6;
1296 case DP_TRAIN_VOLTAGE_SWING_800:
1297 return DP_TRAIN_PRE_EMPHASIS_3_5;
1298 case DP_TRAIN_VOLTAGE_SWING_1200:
1299 default:
1300 return DP_TRAIN_PRE_EMPHASIS_0;
1301 }
1302 }
1303 */
1304 static void
1305 cdv_intel_get_adjust_train(struct gma_encoder *encoder)
1306 {
1307 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1308 uint8_t v = 0;
1309 uint8_t p = 0;
1310 int lane;
1311
1312 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1313 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1314 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1315
1316 if (this_v > v)
1317 v = this_v;
1318 if (this_p > p)
1319 p = this_p;
1320 }
1321
1322 if (v >= CDV_DP_VOLTAGE_MAX)
1323 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1324
1325 if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
1326 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1327
1328 for (lane = 0; lane < 4; lane++)
1329 intel_dp->train_set[lane] = v | p;
1330 }
1331
1332
1333 static uint8_t
1334 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1335 int lane)
1336 {
1337 int i = DP_LANE0_1_STATUS + (lane >> 1);
1338 int s = (lane & 1) * 4;
1339 uint8_t l = cdv_intel_dp_link_status(link_status, i);
1340
1341 return (l >> s) & 0xf;
1342 }
1343
1344 /* Check for clock recovery is done on all channels */
1345 static bool
1346 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1347 {
1348 int lane;
1349 uint8_t lane_status;
1350
1351 for (lane = 0; lane < lane_count; lane++) {
1352 lane_status = cdv_intel_get_lane_status(link_status, lane);
1353 if ((lane_status & DP_LANE_CR_DONE) == 0)
1354 return false;
1355 }
1356 return true;
1357 }
1358
1359 /* Check to see if channel eq is done on all channels */
1360 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1361 DP_LANE_CHANNEL_EQ_DONE|\
1362 DP_LANE_SYMBOL_LOCKED)
1363 static bool
1364 cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
1365 {
1366 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1367 uint8_t lane_align;
1368 uint8_t lane_status;
1369 int lane;
1370
1371 lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
1372 DP_LANE_ALIGN_STATUS_UPDATED);
1373 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1374 return false;
1375 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1376 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
1377 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1378 return false;
1379 }
1380 return true;
1381 }
1382
1383 static bool
1384 cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
1385 uint32_t dp_reg_value,
1386 uint8_t dp_train_pat)
1387 {
1388
1389 struct drm_device *dev = encoder->base.dev;
1390 int ret;
1391 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1392
1393 REG_WRITE(intel_dp->output_reg, dp_reg_value);
1394 REG_READ(intel_dp->output_reg);
1395
1396 ret = cdv_intel_dp_aux_native_write_1(encoder,
1397 DP_TRAINING_PATTERN_SET,
1398 dp_train_pat);
1399
1400 if (ret != 1) {
1401 DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
1402 dp_train_pat);
1403 return false;
1404 }
1405
1406 return true;
1407 }
1408
1409
1410 static bool
1411 cdv_intel_dplink_set_level(struct gma_encoder *encoder,
1412 uint8_t dp_train_pat)
1413 {
1414
1415 int ret;
1416 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1417
1418 ret = cdv_intel_dp_aux_native_write(encoder,
1419 DP_TRAINING_LANE0_SET,
1420 intel_dp->train_set,
1421 intel_dp->lane_count);
1422
1423 if (ret != intel_dp->lane_count) {
1424 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
1425 intel_dp->train_set[0], intel_dp->lane_count);
1426 return false;
1427 }
1428 return true;
1429 }
1430
1431 static void
1432 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
1433 {
1434 struct drm_device *dev = encoder->base.dev;
1435 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1436 struct ddi_regoff *ddi_reg;
1437 int vswing, premph, index;
1438
1439 if (intel_dp->output_reg == DP_B)
1440 ddi_reg = &ddi_DP_train_table[0];
1441 else
1442 ddi_reg = &ddi_DP_train_table[1];
1443
1444 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
1445 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
1446 DP_TRAIN_PRE_EMPHASIS_SHIFT;
1447
1448 if (vswing + premph > 3)
1449 return;
1450 #ifdef CDV_FAST_LINK_TRAIN
1451 return;
1452 #endif
1453 DRM_DEBUG_KMS("Test2\n");
1454 //return ;
1455 cdv_sb_reset(dev);
1456 /* ;Swing voltage programming
1457 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
1458 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
1459
1460 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
1461 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
1462
1463 /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
1464 * The VSwing_PreEmph table is also considered based on the vswing/premp
1465 */
1466 index = (vswing + premph) * 2;
1467 if (premph == 1 && vswing == 1) {
1468 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
1469 } else
1470 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
1471
1472 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
1473 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1474 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
1475 else
1476 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
1477
1478 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
1479 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
1480
1481 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
1482 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
1483
1484 /* ;Pre emphasis programming
1485 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
1486 */
1487 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
1488
1489 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
1490 index = 2 * premph + 1;
1491 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
1492 return;
1493 }
1494
1495
1496 /* Enable corresponding port and start training pattern 1 */
1497 static void
1498 cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
1499 {
1500 struct drm_device *dev = encoder->base.dev;
1501 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1502 int i;
1503 uint8_t voltage;
1504 bool clock_recovery = false;
1505 int tries;
1506 u32 reg;
1507 uint32_t DP = intel_dp->DP;
1508
1509 DP |= DP_PORT_EN;
1510 DP &= ~DP_LINK_TRAIN_MASK;
1511
1512 reg = DP;
1513 reg |= DP_LINK_TRAIN_PAT_1;
1514 /* Enable output, wait for it to become active */
1515 REG_WRITE(intel_dp->output_reg, reg);
1516 REG_READ(intel_dp->output_reg);
1517 gma_wait_for_vblank(dev);
1518
1519 DRM_DEBUG_KMS("Link config\n");
1520 /* Write the link configuration data */
1521 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
1522 intel_dp->link_configuration,
1523 2);
1524
1525 memset(intel_dp->train_set, 0, 4);
1526 voltage = 0;
1527 tries = 0;
1528 clock_recovery = false;
1529
1530 DRM_DEBUG_KMS("Start train\n");
1531 reg = DP | DP_LINK_TRAIN_PAT_1;
1532
1533
1534 for (;;) {
1535 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1536 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1537 intel_dp->train_set[0],
1538 intel_dp->link_configuration[0],
1539 intel_dp->link_configuration[1]);
1540
1541 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
1542 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
1543 }
1544 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1545 /* Set training pattern 1 */
1546
1547 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
1548
1549 udelay(200);
1550 if (!cdv_intel_dp_get_link_status(encoder))
1551 break;
1552
1553 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1554 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1555 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1556
1557 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1558 DRM_DEBUG_KMS("PT1 train is done\n");
1559 clock_recovery = true;
1560 break;
1561 }
1562
1563 /* Check to see if we've tried the max voltage */
1564 for (i = 0; i < intel_dp->lane_count; i++)
1565 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1566 break;
1567 if (i == intel_dp->lane_count)
1568 break;
1569
1570 /* Check to see if we've tried the same voltage 5 times */
1571 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1572 ++tries;
1573 if (tries == 5)
1574 break;
1575 } else
1576 tries = 0;
1577 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1578
1579 /* Compute new intel_dp->train_set as requested by target */
1580 cdv_intel_get_adjust_train(encoder);
1581
1582 }
1583
1584 if (!clock_recovery) {
1585 DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]);
1586 }
1587
1588 intel_dp->DP = DP;
1589 }
1590
1591 static void
1592 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
1593 {
1594 struct drm_device *dev = encoder->base.dev;
1595 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1596 bool channel_eq = false;
1597 int tries, cr_tries;
1598 u32 reg;
1599 uint32_t DP = intel_dp->DP;
1600
1601 /* channel equalization */
1602 tries = 0;
1603 cr_tries = 0;
1604 channel_eq = false;
1605
1606 DRM_DEBUG_KMS("\n");
1607 reg = DP | DP_LINK_TRAIN_PAT_2;
1608
1609 for (;;) {
1610
1611 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
1612 intel_dp->train_set[0],
1613 intel_dp->link_configuration[0],
1614 intel_dp->link_configuration[1]);
1615 /* channel eq pattern */
1616
1617 if (!cdv_intel_dp_set_link_train(encoder, reg,
1618 DP_TRAINING_PATTERN_2)) {
1619 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
1620 }
1621 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1622
1623 if (cr_tries > 5) {
1624 DRM_ERROR("failed to train DP, aborting\n");
1625 cdv_intel_dp_link_down(encoder);
1626 break;
1627 }
1628
1629 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
1630
1631 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
1632
1633 udelay(1000);
1634 if (!cdv_intel_dp_get_link_status(encoder))
1635 break;
1636
1637 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
1638 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
1639 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
1640
1641 /* Make sure clock is still ok */
1642 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1643 cdv_intel_dp_start_link_train(encoder);
1644 cr_tries++;
1645 continue;
1646 }
1647
1648 if (cdv_intel_channel_eq_ok(encoder)) {
1649 DRM_DEBUG_KMS("PT2 train is done\n");
1650 channel_eq = true;
1651 break;
1652 }
1653
1654 /* Try 5 times, then try clock recovery if that fails */
1655 if (tries > 5) {
1656 cdv_intel_dp_link_down(encoder);
1657 cdv_intel_dp_start_link_train(encoder);
1658 tries = 0;
1659 cr_tries++;
1660 continue;
1661 }
1662
1663 /* Compute new intel_dp->train_set as requested by target */
1664 cdv_intel_get_adjust_train(encoder);
1665 ++tries;
1666
1667 }
1668
1669 reg = DP | DP_LINK_TRAIN_OFF;
1670
1671 REG_WRITE(intel_dp->output_reg, reg);
1672 REG_READ(intel_dp->output_reg);
1673 cdv_intel_dp_aux_native_write_1(encoder,
1674 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1675 }
1676
1677 static void
1678 cdv_intel_dp_link_down(struct gma_encoder *encoder)
1679 {
1680 struct drm_device *dev = encoder->base.dev;
1681 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1682 uint32_t DP = intel_dp->DP;
1683
1684 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1685 return;
1686
1687 DRM_DEBUG_KMS("\n");
1688
1689
1690 {
1691 DP &= ~DP_LINK_TRAIN_MASK;
1692 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1693 }
1694 REG_READ(intel_dp->output_reg);
1695
1696 msleep(17);
1697
1698 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1699 REG_READ(intel_dp->output_reg);
1700 }
1701
1702 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
1703 {
1704 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1705 enum drm_connector_status status;
1706
1707 status = connector_status_disconnected;
1708 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
1709 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1710 {
1711 if (intel_dp->dpcd[DP_DPCD_REV] != 0)
1712 status = connector_status_connected;
1713 }
1714 if (status == connector_status_connected)
1715 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
1716 intel_dp->dpcd[0], intel_dp->dpcd[1],
1717 intel_dp->dpcd[2], intel_dp->dpcd[3]);
1718 return status;
1719 }
1720
1721 /**
1722 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1723 *
1724 * \return true if DP port is connected.
1725 * \return false if DP port is disconnected.
1726 */
1727 static enum drm_connector_status
1728 cdv_intel_dp_detect(struct drm_connector *connector, bool force)
1729 {
1730 struct gma_encoder *encoder = gma_attached_encoder(connector);
1731 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1732 enum drm_connector_status status;
1733 struct edid *edid = NULL;
1734 int edp = is_edp(encoder);
1735
1736 intel_dp->has_audio = false;
1737
1738 if (edp)
1739 cdv_intel_edp_panel_vdd_on(encoder);
1740 status = cdv_dp_detect(encoder);
1741 if (status != connector_status_connected) {
1742 if (edp)
1743 cdv_intel_edp_panel_vdd_off(encoder);
1744 return status;
1745 }
1746
1747 if (intel_dp->force_audio) {
1748 intel_dp->has_audio = intel_dp->force_audio > 0;
1749 } else {
1750 edid = drm_get_edid(connector, &intel_dp->adapter);
1751 if (edid) {
1752 intel_dp->has_audio = drm_detect_monitor_audio(edid);
1753 kfree(edid);
1754 }
1755 }
1756 if (edp)
1757 cdv_intel_edp_panel_vdd_off(encoder);
1758
1759 return connector_status_connected;
1760 }
1761
1762 static int cdv_intel_dp_get_modes(struct drm_connector *connector)
1763 {
1764 struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
1765 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
1766 struct edid *edid = NULL;
1767 int ret = 0;
1768 int edp = is_edp(intel_encoder);
1769
1770
1771 edid = drm_get_edid(connector, &intel_dp->adapter);
1772 if (edid) {
1773 drm_mode_connector_update_edid_property(connector, edid);
1774 ret = drm_add_edid_modes(connector, edid);
1775 kfree(edid);
1776 }
1777
1778 if (is_edp(intel_encoder)) {
1779 struct drm_device *dev = connector->dev;
1780 struct drm_psb_private *dev_priv = dev->dev_private;
1781
1782 cdv_intel_edp_panel_vdd_off(intel_encoder);
1783 if (ret) {
1784 if (edp && !intel_dp->panel_fixed_mode) {
1785 struct drm_display_mode *newmode;
1786 list_for_each_entry(newmode, &connector->probed_modes,
1787 head) {
1788 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1789 intel_dp->panel_fixed_mode =
1790 drm_mode_duplicate(dev, newmode);
1791 break;
1792 }
1793 }
1794 }
1795
1796 return ret;
1797 }
1798 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
1799 intel_dp->panel_fixed_mode =
1800 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1801 if (intel_dp->panel_fixed_mode) {
1802 intel_dp->panel_fixed_mode->type |=
1803 DRM_MODE_TYPE_PREFERRED;
1804 }
1805 }
1806 if (intel_dp->panel_fixed_mode != NULL) {
1807 struct drm_display_mode *mode;
1808 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
1809 drm_mode_probed_add(connector, mode);
1810 return 1;
1811 }
1812 }
1813
1814 return ret;
1815 }
1816
1817 static bool
1818 cdv_intel_dp_detect_audio(struct drm_connector *connector)
1819 {
1820 struct gma_encoder *encoder = gma_attached_encoder(connector);
1821 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1822 struct edid *edid;
1823 bool has_audio = false;
1824 int edp = is_edp(encoder);
1825
1826 if (edp)
1827 cdv_intel_edp_panel_vdd_on(encoder);
1828
1829 edid = drm_get_edid(connector, &intel_dp->adapter);
1830 if (edid) {
1831 has_audio = drm_detect_monitor_audio(edid);
1832 kfree(edid);
1833 }
1834 if (edp)
1835 cdv_intel_edp_panel_vdd_off(encoder);
1836
1837 return has_audio;
1838 }
1839
1840 static int
1841 cdv_intel_dp_set_property(struct drm_connector *connector,
1842 struct drm_property *property,
1843 uint64_t val)
1844 {
1845 struct drm_psb_private *dev_priv = connector->dev->dev_private;
1846 struct gma_encoder *encoder = gma_attached_encoder(connector);
1847 struct cdv_intel_dp *intel_dp = encoder->dev_priv;
1848 int ret;
1849
1850 ret = drm_object_property_set_value(&connector->base, property, val);
1851 if (ret)
1852 return ret;
1853
1854 if (property == dev_priv->force_audio_property) {
1855 int i = val;
1856 bool has_audio;
1857
1858 if (i == intel_dp->force_audio)
1859 return 0;
1860
1861 intel_dp->force_audio = i;
1862
1863 if (i == 0)
1864 has_audio = cdv_intel_dp_detect_audio(connector);
1865 else
1866 has_audio = i > 0;
1867
1868 if (has_audio == intel_dp->has_audio)
1869 return 0;
1870
1871 intel_dp->has_audio = has_audio;
1872 goto done;
1873 }
1874
1875 if (property == dev_priv->broadcast_rgb_property) {
1876 if (val == !!intel_dp->color_range)
1877 return 0;
1878
1879 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
1880 goto done;
1881 }
1882
1883 return -EINVAL;
1884
1885 done:
1886 if (encoder->base.crtc) {
1887 struct drm_crtc *crtc = encoder->base.crtc;
1888 drm_crtc_helper_set_mode(crtc, &crtc->mode,
1889 crtc->x, crtc->y,
1890 crtc->primary->fb);
1891 }
1892
1893 return 0;
1894 }
1895
1896 static void
1897 cdv_intel_dp_destroy(struct drm_connector *connector)
1898 {
1899 struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
1900 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
1901
1902 if (is_edp(gma_encoder)) {
1903 /* cdv_intel_panel_destroy_backlight(connector->dev); */
1904 if (intel_dp->panel_fixed_mode) {
1905 kfree(intel_dp->panel_fixed_mode);
1906 intel_dp->panel_fixed_mode = NULL;
1907 }
1908 }
1909 i2c_del_adapter(&intel_dp->adapter);
1910 drm_connector_unregister(connector);
1911 drm_connector_cleanup(connector);
1912 kfree(connector);
1913 }
1914
1915 static void cdv_intel_dp_encoder_destroy(struct drm_encoder *encoder)
1916 {
1917 drm_encoder_cleanup(encoder);
1918 }
1919
1920 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
1921 .dpms = cdv_intel_dp_dpms,
1922 .mode_fixup = cdv_intel_dp_mode_fixup,
1923 .prepare = cdv_intel_dp_prepare,
1924 .mode_set = cdv_intel_dp_mode_set,
1925 .commit = cdv_intel_dp_commit,
1926 };
1927
1928 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
1929 .dpms = drm_helper_connector_dpms,
1930 .detect = cdv_intel_dp_detect,
1931 .fill_modes = drm_helper_probe_single_connector_modes,
1932 .set_property = cdv_intel_dp_set_property,
1933 .destroy = cdv_intel_dp_destroy,
1934 };
1935
1936 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
1937 .get_modes = cdv_intel_dp_get_modes,
1938 .mode_valid = cdv_intel_dp_mode_valid,
1939 .best_encoder = gma_best_encoder,
1940 };
1941
1942 static const struct drm_encoder_funcs cdv_intel_dp_enc_funcs = {
1943 .destroy = cdv_intel_dp_encoder_destroy,
1944 };
1945
1946
1947 static void cdv_intel_dp_add_properties(struct drm_connector *connector)
1948 {
1949 cdv_intel_attach_force_audio_property(connector);
1950 cdv_intel_attach_broadcast_rgb_property(connector);
1951 }
1952
1953 /* check the VBT to see whether the eDP is on DP-D port */
1954 static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
1955 {
1956 struct drm_psb_private *dev_priv = dev->dev_private;
1957 struct child_device_config *p_child;
1958 int i;
1959
1960 if (!dev_priv->child_dev_num)
1961 return false;
1962
1963 for (i = 0; i < dev_priv->child_dev_num; i++) {
1964 p_child = dev_priv->child_dev + i;
1965
1966 if (p_child->dvo_port == PORT_IDPC &&
1967 p_child->device_type == DEVICE_TYPE_eDP)
1968 return true;
1969 }
1970 return false;
1971 }
1972
1973 /* Cedarview display clock gating
1974
1975 We need this disable dot get correct behaviour while enabling
1976 DP/eDP. TODO - investigate if we can turn it back to normality
1977 after enabling */
1978 static void cdv_disable_intel_clock_gating(struct drm_device *dev)
1979 {
1980 u32 reg_value;
1981 reg_value = REG_READ(DSPCLK_GATE_D);
1982
1983 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
1984 DPUNIT_PIPEA_GATE_DISABLE |
1985 DPCUNIT_CLOCK_GATE_DISABLE |
1986 DPLSUNIT_CLOCK_GATE_DISABLE |
1987 DPOUNIT_CLOCK_GATE_DISABLE |
1988 DPIOUNIT_CLOCK_GATE_DISABLE);
1989
1990 REG_WRITE(DSPCLK_GATE_D, reg_value);
1991
1992 udelay(500);
1993 }
1994
1995 void
1996 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
1997 {
1998 struct gma_encoder *gma_encoder;
1999 struct gma_connector *gma_connector;
2000 struct drm_connector *connector;
2001 struct drm_encoder *encoder;
2002 struct cdv_intel_dp *intel_dp;
2003 const char *name = NULL;
2004 int type = DRM_MODE_CONNECTOR_DisplayPort;
2005
2006 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
2007 if (!gma_encoder)
2008 return;
2009 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
2010 if (!gma_connector)
2011 goto err_connector;
2012 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
2013 if (!intel_dp)
2014 goto err_priv;
2015
2016 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
2017 type = DRM_MODE_CONNECTOR_eDP;
2018
2019 connector = &gma_connector->base;
2020 encoder = &gma_encoder->base;
2021
2022 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
2023 drm_encoder_init(dev, encoder, &cdv_intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS);
2024
2025 gma_connector_attach_encoder(gma_connector, gma_encoder);
2026
2027 if (type == DRM_MODE_CONNECTOR_DisplayPort)
2028 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
2029 else
2030 gma_encoder->type = INTEL_OUTPUT_EDP;
2031
2032
2033 gma_encoder->dev_priv=intel_dp;
2034 intel_dp->encoder = gma_encoder;
2035 intel_dp->output_reg = output_reg;
2036
2037 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
2038 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
2039
2040 connector->polled = DRM_CONNECTOR_POLL_HPD;
2041 connector->interlace_allowed = false;
2042 connector->doublescan_allowed = false;
2043
2044 drm_connector_register(connector);
2045
2046 /* Set up the DDC bus. */
2047 switch (output_reg) {
2048 case DP_B:
2049 name = "DPDDC-B";
2050 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
2051 break;
2052 case DP_C:
2053 name = "DPDDC-C";
2054 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
2055 break;
2056 }
2057
2058 cdv_disable_intel_clock_gating(dev);
2059
2060 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
2061 /* FIXME:fail check */
2062 cdv_intel_dp_add_properties(connector);
2063
2064 if (is_edp(gma_encoder)) {
2065 int ret;
2066 struct edp_power_seq cur;
2067 u32 pp_on, pp_off, pp_div;
2068 u32 pwm_ctrl;
2069
2070 pp_on = REG_READ(PP_CONTROL);
2071 pp_on &= ~PANEL_UNLOCK_MASK;
2072 pp_on |= PANEL_UNLOCK_REGS;
2073
2074 REG_WRITE(PP_CONTROL, pp_on);
2075
2076 pwm_ctrl = REG_READ(BLC_PWM_CTL2);
2077 pwm_ctrl |= PWM_PIPE_B;
2078 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
2079
2080 pp_on = REG_READ(PP_ON_DELAYS);
2081 pp_off = REG_READ(PP_OFF_DELAYS);
2082 pp_div = REG_READ(PP_DIVISOR);
2083
2084 /* Pull timing values out of registers */
2085 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2086 PANEL_POWER_UP_DELAY_SHIFT;
2087
2088 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2089 PANEL_LIGHT_ON_DELAY_SHIFT;
2090
2091 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2092 PANEL_LIGHT_OFF_DELAY_SHIFT;
2093
2094 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2095 PANEL_POWER_DOWN_DELAY_SHIFT;
2096
2097 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2098 PANEL_POWER_CYCLE_DELAY_SHIFT);
2099
2100 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2101 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2102
2103
2104 intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
2105 intel_dp->backlight_on_delay = cur.t8 / 10;
2106 intel_dp->backlight_off_delay = cur.t9 / 10;
2107 intel_dp->panel_power_down_delay = cur.t10 / 10;
2108 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
2109
2110 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2111 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2112 intel_dp->panel_power_cycle_delay);
2113
2114 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2115 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2116
2117
2118 cdv_intel_edp_panel_vdd_on(gma_encoder);
2119 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
2120 intel_dp->dpcd,
2121 sizeof(intel_dp->dpcd));
2122 cdv_intel_edp_panel_vdd_off(gma_encoder);
2123 if (ret == 0) {
2124 /* if this fails, presume the device is a ghost */
2125 DRM_INFO("failed to retrieve link info, disabling eDP\n");
2126 cdv_intel_dp_encoder_destroy(encoder);
2127 cdv_intel_dp_destroy(connector);
2128 goto err_priv;
2129 } else {
2130 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
2131 intel_dp->dpcd[0], intel_dp->dpcd[1],
2132 intel_dp->dpcd[2], intel_dp->dpcd[3]);
2133
2134 }
2135 /* The CDV reference driver moves pnale backlight setup into the displays that
2136 have a backlight: this is a good idea and one we should probably adopt, however
2137 we need to migrate all the drivers before we can do that */
2138 /*cdv_intel_panel_setup_backlight(dev); */
2139 }
2140 return;
2141
2142 err_priv:
2143 kfree(gma_connector);
2144 err_connector:
2145 kfree(gma_encoder);
2146 }