Merge branches 'irq-fixes-for-linus' and 'sched-fixes-for-linus' of git://git.kernel...
[deliverable/linux.git] / drivers / gpu / drm / i915 / intel_dp.c
1 /*
2 * Copyright © 2008 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Keith Packard <keithp@keithp.com>
25 *
26 */
27
28 #include <linux/i2c.h>
29 #include <linux/slab.h>
30 #include "drmP.h"
31 #include "drm.h"
32 #include "drm_crtc.h"
33 #include "drm_crtc_helper.h"
34 #include "intel_drv.h"
35 #include "i915_drm.h"
36 #include "i915_drv.h"
37 #include "drm_dp_helper.h"
38
39
40 #define DP_LINK_STATUS_SIZE 6
41 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
42
43 #define DP_LINK_CONFIGURATION_SIZE 9
44
45 struct intel_dp {
46 struct intel_encoder base;
47 uint32_t output_reg;
48 uint32_t DP;
49 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
50 bool has_audio;
51 int force_audio;
52 uint32_t color_range;
53 int dpms_mode;
54 uint8_t link_bw;
55 uint8_t lane_count;
56 uint8_t dpcd[4];
57 struct i2c_adapter adapter;
58 struct i2c_algo_dp_aux_data algo;
59 bool is_pch_edp;
60 uint8_t train_set[4];
61 uint8_t link_status[DP_LINK_STATUS_SIZE];
62
63 struct drm_property *force_audio_property;
64 };
65
66 /**
67 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
68 * @intel_dp: DP struct
69 *
70 * If a CPU or PCH DP output is attached to an eDP panel, this function
71 * will return true, and false otherwise.
72 */
73 static bool is_edp(struct intel_dp *intel_dp)
74 {
75 return intel_dp->base.type == INTEL_OUTPUT_EDP;
76 }
77
78 /**
79 * is_pch_edp - is the port on the PCH and attached to an eDP panel?
80 * @intel_dp: DP struct
81 *
82 * Returns true if the given DP struct corresponds to a PCH DP port attached
83 * to an eDP panel, false otherwise. Helpful for determining whether we
84 * may need FDI resources for a given DP output or not.
85 */
86 static bool is_pch_edp(struct intel_dp *intel_dp)
87 {
88 return intel_dp->is_pch_edp;
89 }
90
91 static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
92 {
93 return container_of(encoder, struct intel_dp, base.base);
94 }
95
96 static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
97 {
98 return container_of(intel_attached_encoder(connector),
99 struct intel_dp, base);
100 }
101
102 /**
103 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
104 * @encoder: DRM encoder
105 *
106 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed
107 * by intel_display.c.
108 */
109 bool intel_encoder_is_pch_edp(struct drm_encoder *encoder)
110 {
111 struct intel_dp *intel_dp;
112
113 if (!encoder)
114 return false;
115
116 intel_dp = enc_to_intel_dp(encoder);
117
118 return is_pch_edp(intel_dp);
119 }
120
121 static void intel_dp_start_link_train(struct intel_dp *intel_dp);
122 static void intel_dp_complete_link_train(struct intel_dp *intel_dp);
123 static void intel_dp_link_down(struct intel_dp *intel_dp);
124
125 void
126 intel_edp_link_config (struct intel_encoder *intel_encoder,
127 int *lane_num, int *link_bw)
128 {
129 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
130
131 *lane_num = intel_dp->lane_count;
132 if (intel_dp->link_bw == DP_LINK_BW_1_62)
133 *link_bw = 162000;
134 else if (intel_dp->link_bw == DP_LINK_BW_2_7)
135 *link_bw = 270000;
136 }
137
138 static int
139 intel_dp_max_lane_count(struct intel_dp *intel_dp)
140 {
141 int max_lane_count = 4;
142
143 if (intel_dp->dpcd[0] >= 0x11) {
144 max_lane_count = intel_dp->dpcd[2] & 0x1f;
145 switch (max_lane_count) {
146 case 1: case 2: case 4:
147 break;
148 default:
149 max_lane_count = 4;
150 }
151 }
152 return max_lane_count;
153 }
154
155 static int
156 intel_dp_max_link_bw(struct intel_dp *intel_dp)
157 {
158 int max_link_bw = intel_dp->dpcd[1];
159
160 switch (max_link_bw) {
161 case DP_LINK_BW_1_62:
162 case DP_LINK_BW_2_7:
163 break;
164 default:
165 max_link_bw = DP_LINK_BW_1_62;
166 break;
167 }
168 return max_link_bw;
169 }
170
171 static int
172 intel_dp_link_clock(uint8_t link_bw)
173 {
174 if (link_bw == DP_LINK_BW_2_7)
175 return 270000;
176 else
177 return 162000;
178 }
179
180 /* I think this is a fiction */
181 static int
182 intel_dp_link_required(struct drm_device *dev, struct intel_dp *intel_dp, int pixel_clock)
183 {
184 struct drm_i915_private *dev_priv = dev->dev_private;
185
186 if (is_edp(intel_dp))
187 return (pixel_clock * dev_priv->edp.bpp + 7) / 8;
188 else
189 return pixel_clock * 3;
190 }
191
192 static int
193 intel_dp_max_data_rate(int max_link_clock, int max_lanes)
194 {
195 return (max_link_clock * max_lanes * 8) / 10;
196 }
197
198 static int
199 intel_dp_mode_valid(struct drm_connector *connector,
200 struct drm_display_mode *mode)
201 {
202 struct intel_dp *intel_dp = intel_attached_dp(connector);
203 struct drm_device *dev = connector->dev;
204 struct drm_i915_private *dev_priv = dev->dev_private;
205 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
206 int max_lanes = intel_dp_max_lane_count(intel_dp);
207
208 if (is_edp(intel_dp) && dev_priv->panel_fixed_mode) {
209 if (mode->hdisplay > dev_priv->panel_fixed_mode->hdisplay)
210 return MODE_PANEL;
211
212 if (mode->vdisplay > dev_priv->panel_fixed_mode->vdisplay)
213 return MODE_PANEL;
214 }
215
216 /* only refuse the mode on non eDP since we have seen some wierd eDP panels
217 which are outside spec tolerances but somehow work by magic */
218 if (!is_edp(intel_dp) &&
219 (intel_dp_link_required(connector->dev, intel_dp, mode->clock)
220 > intel_dp_max_data_rate(max_link_clock, max_lanes)))
221 return MODE_CLOCK_HIGH;
222
223 if (mode->clock < 10000)
224 return MODE_CLOCK_LOW;
225
226 return MODE_OK;
227 }
228
229 static uint32_t
230 pack_aux(uint8_t *src, int src_bytes)
231 {
232 int i;
233 uint32_t v = 0;
234
235 if (src_bytes > 4)
236 src_bytes = 4;
237 for (i = 0; i < src_bytes; i++)
238 v |= ((uint32_t) src[i]) << ((3-i) * 8);
239 return v;
240 }
241
242 static void
243 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
244 {
245 int i;
246 if (dst_bytes > 4)
247 dst_bytes = 4;
248 for (i = 0; i < dst_bytes; i++)
249 dst[i] = src >> ((3-i) * 8);
250 }
251
252 /* hrawclock is 1/4 the FSB frequency */
253 static int
254 intel_hrawclk(struct drm_device *dev)
255 {
256 struct drm_i915_private *dev_priv = dev->dev_private;
257 uint32_t clkcfg;
258
259 clkcfg = I915_READ(CLKCFG);
260 switch (clkcfg & CLKCFG_FSB_MASK) {
261 case CLKCFG_FSB_400:
262 return 100;
263 case CLKCFG_FSB_533:
264 return 133;
265 case CLKCFG_FSB_667:
266 return 166;
267 case CLKCFG_FSB_800:
268 return 200;
269 case CLKCFG_FSB_1067:
270 return 266;
271 case CLKCFG_FSB_1333:
272 return 333;
273 /* these two are just a guess; one of them might be right */
274 case CLKCFG_FSB_1600:
275 case CLKCFG_FSB_1600_ALT:
276 return 400;
277 default:
278 return 133;
279 }
280 }
281
282 static int
283 intel_dp_aux_ch(struct intel_dp *intel_dp,
284 uint8_t *send, int send_bytes,
285 uint8_t *recv, int recv_size)
286 {
287 uint32_t output_reg = intel_dp->output_reg;
288 struct drm_device *dev = intel_dp->base.base.dev;
289 struct drm_i915_private *dev_priv = dev->dev_private;
290 uint32_t ch_ctl = output_reg + 0x10;
291 uint32_t ch_data = ch_ctl + 4;
292 int i;
293 int recv_bytes;
294 uint32_t status;
295 uint32_t aux_clock_divider;
296 int try, precharge;
297
298 /* The clock divider is based off the hrawclk,
299 * and would like to run at 2MHz. So, take the
300 * hrawclk value and divide by 2 and use that
301 *
302 * Note that PCH attached eDP panels should use a 125MHz input
303 * clock divider.
304 */
305 if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) {
306 if (IS_GEN6(dev))
307 aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */
308 else
309 aux_clock_divider = 225; /* eDP input clock at 450Mhz */
310 } else if (HAS_PCH_SPLIT(dev))
311 aux_clock_divider = 62; /* IRL input clock fixed at 125Mhz */
312 else
313 aux_clock_divider = intel_hrawclk(dev) / 2;
314
315 if (IS_GEN6(dev))
316 precharge = 3;
317 else
318 precharge = 5;
319
320 if (I915_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
321 DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
322 I915_READ(ch_ctl));
323 return -EBUSY;
324 }
325
326 /* Must try at least 3 times according to DP spec */
327 for (try = 0; try < 5; try++) {
328 /* Load the send data into the aux channel data registers */
329 for (i = 0; i < send_bytes; i += 4)
330 I915_WRITE(ch_data + i,
331 pack_aux(send + i, send_bytes - i));
332
333 /* Send the command and wait for it to complete */
334 I915_WRITE(ch_ctl,
335 DP_AUX_CH_CTL_SEND_BUSY |
336 DP_AUX_CH_CTL_TIME_OUT_400us |
337 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
338 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
339 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
340 DP_AUX_CH_CTL_DONE |
341 DP_AUX_CH_CTL_TIME_OUT_ERROR |
342 DP_AUX_CH_CTL_RECEIVE_ERROR);
343 for (;;) {
344 status = I915_READ(ch_ctl);
345 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
346 break;
347 udelay(100);
348 }
349
350 /* Clear done status and any errors */
351 I915_WRITE(ch_ctl,
352 status |
353 DP_AUX_CH_CTL_DONE |
354 DP_AUX_CH_CTL_TIME_OUT_ERROR |
355 DP_AUX_CH_CTL_RECEIVE_ERROR);
356 if (status & DP_AUX_CH_CTL_DONE)
357 break;
358 }
359
360 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
361 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
362 return -EBUSY;
363 }
364
365 /* Check for timeout or receive error.
366 * Timeouts occur when the sink is not connected
367 */
368 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
369 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
370 return -EIO;
371 }
372
373 /* Timeouts occur when the device isn't connected, so they're
374 * "normal" -- don't fill the kernel log with these */
375 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
376 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
377 return -ETIMEDOUT;
378 }
379
380 /* Unload any bytes sent back from the other side */
381 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
382 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
383 if (recv_bytes > recv_size)
384 recv_bytes = recv_size;
385
386 for (i = 0; i < recv_bytes; i += 4)
387 unpack_aux(I915_READ(ch_data + i),
388 recv + i, recv_bytes - i);
389
390 return recv_bytes;
391 }
392
393 /* Write data to the aux channel in native mode */
394 static int
395 intel_dp_aux_native_write(struct intel_dp *intel_dp,
396 uint16_t address, uint8_t *send, int send_bytes)
397 {
398 int ret;
399 uint8_t msg[20];
400 int msg_bytes;
401 uint8_t ack;
402
403 if (send_bytes > 16)
404 return -1;
405 msg[0] = AUX_NATIVE_WRITE << 4;
406 msg[1] = address >> 8;
407 msg[2] = address & 0xff;
408 msg[3] = send_bytes - 1;
409 memcpy(&msg[4], send, send_bytes);
410 msg_bytes = send_bytes + 4;
411 for (;;) {
412 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
413 if (ret < 0)
414 return ret;
415 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
416 break;
417 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
418 udelay(100);
419 else
420 return -EIO;
421 }
422 return send_bytes;
423 }
424
425 /* Write a single byte to the aux channel in native mode */
426 static int
427 intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
428 uint16_t address, uint8_t byte)
429 {
430 return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
431 }
432
433 /* read bytes from a native aux channel */
434 static int
435 intel_dp_aux_native_read(struct intel_dp *intel_dp,
436 uint16_t address, uint8_t *recv, int recv_bytes)
437 {
438 uint8_t msg[4];
439 int msg_bytes;
440 uint8_t reply[20];
441 int reply_bytes;
442 uint8_t ack;
443 int ret;
444
445 msg[0] = AUX_NATIVE_READ << 4;
446 msg[1] = address >> 8;
447 msg[2] = address & 0xff;
448 msg[3] = recv_bytes - 1;
449
450 msg_bytes = 4;
451 reply_bytes = recv_bytes + 1;
452
453 for (;;) {
454 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
455 reply, reply_bytes);
456 if (ret == 0)
457 return -EPROTO;
458 if (ret < 0)
459 return ret;
460 ack = reply[0];
461 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) {
462 memcpy(recv, reply + 1, ret - 1);
463 return ret - 1;
464 }
465 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
466 udelay(100);
467 else
468 return -EIO;
469 }
470 }
471
472 static int
473 intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
474 uint8_t write_byte, uint8_t *read_byte)
475 {
476 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
477 struct intel_dp *intel_dp = container_of(adapter,
478 struct intel_dp,
479 adapter);
480 uint16_t address = algo_data->address;
481 uint8_t msg[5];
482 uint8_t reply[2];
483 unsigned retry;
484 int msg_bytes;
485 int reply_bytes;
486 int ret;
487
488 /* Set up the command byte */
489 if (mode & MODE_I2C_READ)
490 msg[0] = AUX_I2C_READ << 4;
491 else
492 msg[0] = AUX_I2C_WRITE << 4;
493
494 if (!(mode & MODE_I2C_STOP))
495 msg[0] |= AUX_I2C_MOT << 4;
496
497 msg[1] = address >> 8;
498 msg[2] = address;
499
500 switch (mode) {
501 case MODE_I2C_WRITE:
502 msg[3] = 0;
503 msg[4] = write_byte;
504 msg_bytes = 5;
505 reply_bytes = 1;
506 break;
507 case MODE_I2C_READ:
508 msg[3] = 0;
509 msg_bytes = 4;
510 reply_bytes = 2;
511 break;
512 default:
513 msg_bytes = 3;
514 reply_bytes = 1;
515 break;
516 }
517
518 for (retry = 0; retry < 5; retry++) {
519 ret = intel_dp_aux_ch(intel_dp,
520 msg, msg_bytes,
521 reply, reply_bytes);
522 if (ret < 0) {
523 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
524 return ret;
525 }
526
527 switch (reply[0] & AUX_NATIVE_REPLY_MASK) {
528 case AUX_NATIVE_REPLY_ACK:
529 /* I2C-over-AUX Reply field is only valid
530 * when paired with AUX ACK.
531 */
532 break;
533 case AUX_NATIVE_REPLY_NACK:
534 DRM_DEBUG_KMS("aux_ch native nack\n");
535 return -EREMOTEIO;
536 case AUX_NATIVE_REPLY_DEFER:
537 udelay(100);
538 continue;
539 default:
540 DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
541 reply[0]);
542 return -EREMOTEIO;
543 }
544
545 switch (reply[0] & AUX_I2C_REPLY_MASK) {
546 case AUX_I2C_REPLY_ACK:
547 if (mode == MODE_I2C_READ) {
548 *read_byte = reply[1];
549 }
550 return reply_bytes - 1;
551 case AUX_I2C_REPLY_NACK:
552 DRM_DEBUG_KMS("aux_i2c nack\n");
553 return -EREMOTEIO;
554 case AUX_I2C_REPLY_DEFER:
555 DRM_DEBUG_KMS("aux_i2c defer\n");
556 udelay(100);
557 break;
558 default:
559 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
560 return -EREMOTEIO;
561 }
562 }
563
564 DRM_ERROR("too many retries, giving up\n");
565 return -EREMOTEIO;
566 }
567
568 static int
569 intel_dp_i2c_init(struct intel_dp *intel_dp,
570 struct intel_connector *intel_connector, const char *name)
571 {
572 DRM_DEBUG_KMS("i2c_init %s\n", name);
573 intel_dp->algo.running = false;
574 intel_dp->algo.address = 0;
575 intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch;
576
577 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
578 intel_dp->adapter.owner = THIS_MODULE;
579 intel_dp->adapter.class = I2C_CLASS_DDC;
580 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
581 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
582 intel_dp->adapter.algo_data = &intel_dp->algo;
583 intel_dp->adapter.dev.parent = &intel_connector->base.kdev;
584
585 return i2c_dp_aux_add_bus(&intel_dp->adapter);
586 }
587
588 static bool
589 intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
590 struct drm_display_mode *adjusted_mode)
591 {
592 struct drm_device *dev = encoder->dev;
593 struct drm_i915_private *dev_priv = dev->dev_private;
594 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
595 int lane_count, clock;
596 int max_lane_count = intel_dp_max_lane_count(intel_dp);
597 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
598 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
599
600 if (is_edp(intel_dp) && dev_priv->panel_fixed_mode) {
601 intel_fixed_panel_mode(dev_priv->panel_fixed_mode, adjusted_mode);
602 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
603 mode, adjusted_mode);
604 /*
605 * the mode->clock is used to calculate the Data&Link M/N
606 * of the pipe. For the eDP the fixed clock should be used.
607 */
608 mode->clock = dev_priv->panel_fixed_mode->clock;
609 }
610
611 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
612 for (clock = 0; clock <= max_clock; clock++) {
613 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
614
615 if (intel_dp_link_required(encoder->dev, intel_dp, mode->clock)
616 <= link_avail) {
617 intel_dp->link_bw = bws[clock];
618 intel_dp->lane_count = lane_count;
619 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
620 DRM_DEBUG_KMS("Display port link bw %02x lane "
621 "count %d clock %d\n",
622 intel_dp->link_bw, intel_dp->lane_count,
623 adjusted_mode->clock);
624 return true;
625 }
626 }
627 }
628
629 if (is_edp(intel_dp)) {
630 /* okay we failed just pick the highest */
631 intel_dp->lane_count = max_lane_count;
632 intel_dp->link_bw = bws[max_clock];
633 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
634 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
635 "count %d clock %d\n",
636 intel_dp->link_bw, intel_dp->lane_count,
637 adjusted_mode->clock);
638
639 return true;
640 }
641
642 return false;
643 }
644
645 struct intel_dp_m_n {
646 uint32_t tu;
647 uint32_t gmch_m;
648 uint32_t gmch_n;
649 uint32_t link_m;
650 uint32_t link_n;
651 };
652
653 static void
654 intel_reduce_ratio(uint32_t *num, uint32_t *den)
655 {
656 while (*num > 0xffffff || *den > 0xffffff) {
657 *num >>= 1;
658 *den >>= 1;
659 }
660 }
661
662 static void
663 intel_dp_compute_m_n(int bpp,
664 int nlanes,
665 int pixel_clock,
666 int link_clock,
667 struct intel_dp_m_n *m_n)
668 {
669 m_n->tu = 64;
670 m_n->gmch_m = (pixel_clock * bpp) >> 3;
671 m_n->gmch_n = link_clock * nlanes;
672 intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
673 m_n->link_m = pixel_clock;
674 m_n->link_n = link_clock;
675 intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
676 }
677
678 void
679 intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
680 struct drm_display_mode *adjusted_mode)
681 {
682 struct drm_device *dev = crtc->dev;
683 struct drm_mode_config *mode_config = &dev->mode_config;
684 struct drm_encoder *encoder;
685 struct drm_i915_private *dev_priv = dev->dev_private;
686 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
687 int lane_count = 4, bpp = 24;
688 struct intel_dp_m_n m_n;
689 int pipe = intel_crtc->pipe;
690
691 /*
692 * Find the lane count in the intel_encoder private
693 */
694 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
695 struct intel_dp *intel_dp;
696
697 if (encoder->crtc != crtc)
698 continue;
699
700 intel_dp = enc_to_intel_dp(encoder);
701 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) {
702 lane_count = intel_dp->lane_count;
703 break;
704 } else if (is_edp(intel_dp)) {
705 lane_count = dev_priv->edp.lanes;
706 bpp = dev_priv->edp.bpp;
707 break;
708 }
709 }
710
711 /*
712 * Compute the GMCH and Link ratios. The '3' here is
713 * the number of bytes_per_pixel post-LUT, which we always
714 * set up for 8-bits of R/G/B, or 3 bytes total.
715 */
716 intel_dp_compute_m_n(bpp, lane_count,
717 mode->clock, adjusted_mode->clock, &m_n);
718
719 if (HAS_PCH_SPLIT(dev)) {
720 I915_WRITE(TRANSDATA_M1(pipe),
721 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
722 m_n.gmch_m);
723 I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n);
724 I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m);
725 I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n);
726 } else {
727 I915_WRITE(PIPE_GMCH_DATA_M(pipe),
728 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
729 m_n.gmch_m);
730 I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
731 I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
732 I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
733 }
734 }
735
736 static void
737 intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
738 struct drm_display_mode *adjusted_mode)
739 {
740 struct drm_device *dev = encoder->dev;
741 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
742 struct drm_crtc *crtc = intel_dp->base.base.crtc;
743 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
744
745 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
746 intel_dp->DP |= intel_dp->color_range;
747
748 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
749 intel_dp->DP |= DP_SYNC_HS_HIGH;
750 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
751 intel_dp->DP |= DP_SYNC_VS_HIGH;
752
753 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
754 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
755 else
756 intel_dp->DP |= DP_LINK_TRAIN_OFF;
757
758 switch (intel_dp->lane_count) {
759 case 1:
760 intel_dp->DP |= DP_PORT_WIDTH_1;
761 break;
762 case 2:
763 intel_dp->DP |= DP_PORT_WIDTH_2;
764 break;
765 case 4:
766 intel_dp->DP |= DP_PORT_WIDTH_4;
767 break;
768 }
769 if (intel_dp->has_audio)
770 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
771
772 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
773 intel_dp->link_configuration[0] = intel_dp->link_bw;
774 intel_dp->link_configuration[1] = intel_dp->lane_count;
775
776 /*
777 * Check for DPCD version > 1.1 and enhanced framing support
778 */
779 if (intel_dp->dpcd[0] >= 0x11 && (intel_dp->dpcd[2] & DP_ENHANCED_FRAME_CAP)) {
780 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
781 intel_dp->DP |= DP_ENHANCED_FRAMING;
782 }
783
784 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
785 if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev))
786 intel_dp->DP |= DP_PIPEB_SELECT;
787
788 if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) {
789 /* don't miss out required setting for eDP */
790 intel_dp->DP |= DP_PLL_ENABLE;
791 if (adjusted_mode->clock < 200000)
792 intel_dp->DP |= DP_PLL_FREQ_160MHZ;
793 else
794 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
795 }
796 }
797
798 static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
799 {
800 struct drm_device *dev = intel_dp->base.base.dev;
801 struct drm_i915_private *dev_priv = dev->dev_private;
802 u32 pp;
803
804 /*
805 * If the panel wasn't on, make sure there's not a currently
806 * active PP sequence before enabling AUX VDD.
807 */
808 if (!(I915_READ(PCH_PP_STATUS) & PP_ON))
809 msleep(dev_priv->panel_t3);
810
811 pp = I915_READ(PCH_PP_CONTROL);
812 pp |= EDP_FORCE_VDD;
813 I915_WRITE(PCH_PP_CONTROL, pp);
814 POSTING_READ(PCH_PP_CONTROL);
815 }
816
817 static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp)
818 {
819 struct drm_device *dev = intel_dp->base.base.dev;
820 struct drm_i915_private *dev_priv = dev->dev_private;
821 u32 pp;
822
823 pp = I915_READ(PCH_PP_CONTROL);
824 pp &= ~EDP_FORCE_VDD;
825 I915_WRITE(PCH_PP_CONTROL, pp);
826 POSTING_READ(PCH_PP_CONTROL);
827
828 /* Make sure sequencer is idle before allowing subsequent activity */
829 msleep(dev_priv->panel_t12);
830 }
831
832 /* Returns true if the panel was already on when called */
833 static bool ironlake_edp_panel_on (struct intel_dp *intel_dp)
834 {
835 struct drm_device *dev = intel_dp->base.base.dev;
836 struct drm_i915_private *dev_priv = dev->dev_private;
837 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_STATE_ON_IDLE;
838
839 if (I915_READ(PCH_PP_STATUS) & PP_ON)
840 return true;
841
842 pp = I915_READ(PCH_PP_CONTROL);
843
844 /* ILK workaround: disable reset around power sequence */
845 pp &= ~PANEL_POWER_RESET;
846 I915_WRITE(PCH_PP_CONTROL, pp);
847 POSTING_READ(PCH_PP_CONTROL);
848
849 pp |= PANEL_UNLOCK_REGS | POWER_TARGET_ON;
850 I915_WRITE(PCH_PP_CONTROL, pp);
851 POSTING_READ(PCH_PP_CONTROL);
852
853 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_on_mask) == idle_on_mask,
854 5000))
855 DRM_ERROR("panel on wait timed out: 0x%08x\n",
856 I915_READ(PCH_PP_STATUS));
857
858 pp |= PANEL_POWER_RESET; /* restore panel reset bit */
859 I915_WRITE(PCH_PP_CONTROL, pp);
860 POSTING_READ(PCH_PP_CONTROL);
861
862 return false;
863 }
864
865 static void ironlake_edp_panel_off (struct drm_device *dev)
866 {
867 struct drm_i915_private *dev_priv = dev->dev_private;
868 u32 pp, idle_off_mask = PP_ON | PP_SEQUENCE_MASK |
869 PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK;
870
871 pp = I915_READ(PCH_PP_CONTROL);
872
873 /* ILK workaround: disable reset around power sequence */
874 pp &= ~PANEL_POWER_RESET;
875 I915_WRITE(PCH_PP_CONTROL, pp);
876 POSTING_READ(PCH_PP_CONTROL);
877
878 pp &= ~POWER_TARGET_ON;
879 I915_WRITE(PCH_PP_CONTROL, pp);
880 POSTING_READ(PCH_PP_CONTROL);
881
882 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_off_mask) == 0, 5000))
883 DRM_ERROR("panel off wait timed out: 0x%08x\n",
884 I915_READ(PCH_PP_STATUS));
885
886 pp |= PANEL_POWER_RESET; /* restore panel reset bit */
887 I915_WRITE(PCH_PP_CONTROL, pp);
888 POSTING_READ(PCH_PP_CONTROL);
889 }
890
891 static void ironlake_edp_backlight_on (struct drm_device *dev)
892 {
893 struct drm_i915_private *dev_priv = dev->dev_private;
894 u32 pp;
895
896 DRM_DEBUG_KMS("\n");
897 /*
898 * If we enable the backlight right away following a panel power
899 * on, we may see slight flicker as the panel syncs with the eDP
900 * link. So delay a bit to make sure the image is solid before
901 * allowing it to appear.
902 */
903 msleep(300);
904 pp = I915_READ(PCH_PP_CONTROL);
905 pp |= EDP_BLC_ENABLE;
906 I915_WRITE(PCH_PP_CONTROL, pp);
907 }
908
909 static void ironlake_edp_backlight_off (struct drm_device *dev)
910 {
911 struct drm_i915_private *dev_priv = dev->dev_private;
912 u32 pp;
913
914 DRM_DEBUG_KMS("\n");
915 pp = I915_READ(PCH_PP_CONTROL);
916 pp &= ~EDP_BLC_ENABLE;
917 I915_WRITE(PCH_PP_CONTROL, pp);
918 }
919
920 static void ironlake_edp_pll_on(struct drm_encoder *encoder)
921 {
922 struct drm_device *dev = encoder->dev;
923 struct drm_i915_private *dev_priv = dev->dev_private;
924 u32 dpa_ctl;
925
926 DRM_DEBUG_KMS("\n");
927 dpa_ctl = I915_READ(DP_A);
928 dpa_ctl |= DP_PLL_ENABLE;
929 I915_WRITE(DP_A, dpa_ctl);
930 POSTING_READ(DP_A);
931 udelay(200);
932 }
933
934 static void ironlake_edp_pll_off(struct drm_encoder *encoder)
935 {
936 struct drm_device *dev = encoder->dev;
937 struct drm_i915_private *dev_priv = dev->dev_private;
938 u32 dpa_ctl;
939
940 dpa_ctl = I915_READ(DP_A);
941 dpa_ctl &= ~DP_PLL_ENABLE;
942 I915_WRITE(DP_A, dpa_ctl);
943 POSTING_READ(DP_A);
944 udelay(200);
945 }
946
947 static void intel_dp_prepare(struct drm_encoder *encoder)
948 {
949 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
950 struct drm_device *dev = encoder->dev;
951
952 if (is_edp(intel_dp)) {
953 ironlake_edp_backlight_off(dev);
954 ironlake_edp_panel_off(dev);
955 if (!is_pch_edp(intel_dp))
956 ironlake_edp_pll_on(encoder);
957 else
958 ironlake_edp_pll_off(encoder);
959 }
960 intel_dp_link_down(intel_dp);
961 }
962
963 static void intel_dp_commit(struct drm_encoder *encoder)
964 {
965 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
966 struct drm_device *dev = encoder->dev;
967
968 if (is_edp(intel_dp))
969 ironlake_edp_panel_vdd_on(intel_dp);
970
971 intel_dp_start_link_train(intel_dp);
972
973 if (is_edp(intel_dp)) {
974 ironlake_edp_panel_on(intel_dp);
975 ironlake_edp_panel_vdd_off(intel_dp);
976 }
977
978 intel_dp_complete_link_train(intel_dp);
979
980 if (is_edp(intel_dp))
981 ironlake_edp_backlight_on(dev);
982 }
983
984 static void
985 intel_dp_dpms(struct drm_encoder *encoder, int mode)
986 {
987 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
988 struct drm_device *dev = encoder->dev;
989 struct drm_i915_private *dev_priv = dev->dev_private;
990 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
991
992 if (mode != DRM_MODE_DPMS_ON) {
993 if (is_edp(intel_dp))
994 ironlake_edp_backlight_off(dev);
995 intel_dp_link_down(intel_dp);
996 if (is_edp(intel_dp))
997 ironlake_edp_panel_off(dev);
998 if (is_edp(intel_dp) && !is_pch_edp(intel_dp))
999 ironlake_edp_pll_off(encoder);
1000 } else {
1001 if (is_edp(intel_dp))
1002 ironlake_edp_panel_vdd_on(intel_dp);
1003 if (!(dp_reg & DP_PORT_EN)) {
1004 intel_dp_start_link_train(intel_dp);
1005 if (is_edp(intel_dp)) {
1006 ironlake_edp_panel_on(intel_dp);
1007 ironlake_edp_panel_vdd_off(intel_dp);
1008 }
1009 intel_dp_complete_link_train(intel_dp);
1010 }
1011 if (is_edp(intel_dp))
1012 ironlake_edp_backlight_on(dev);
1013 }
1014 intel_dp->dpms_mode = mode;
1015 }
1016
1017 /*
1018 * Fetch AUX CH registers 0x202 - 0x207 which contain
1019 * link status information
1020 */
1021 static bool
1022 intel_dp_get_link_status(struct intel_dp *intel_dp)
1023 {
1024 int ret;
1025
1026 ret = intel_dp_aux_native_read(intel_dp,
1027 DP_LANE0_1_STATUS,
1028 intel_dp->link_status, DP_LINK_STATUS_SIZE);
1029 if (ret != DP_LINK_STATUS_SIZE)
1030 return false;
1031 return true;
1032 }
1033
1034 static uint8_t
1035 intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1036 int r)
1037 {
1038 return link_status[r - DP_LANE0_1_STATUS];
1039 }
1040
1041 static uint8_t
1042 intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1043 int lane)
1044 {
1045 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1046 int s = ((lane & 1) ?
1047 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1048 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1049 uint8_t l = intel_dp_link_status(link_status, i);
1050
1051 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1052 }
1053
1054 static uint8_t
1055 intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1056 int lane)
1057 {
1058 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1059 int s = ((lane & 1) ?
1060 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1061 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1062 uint8_t l = intel_dp_link_status(link_status, i);
1063
1064 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1065 }
1066
1067
1068 #if 0
1069 static char *voltage_names[] = {
1070 "0.4V", "0.6V", "0.8V", "1.2V"
1071 };
1072 static char *pre_emph_names[] = {
1073 "0dB", "3.5dB", "6dB", "9.5dB"
1074 };
1075 static char *link_train_names[] = {
1076 "pattern 1", "pattern 2", "idle", "off"
1077 };
1078 #endif
1079
1080 /*
1081 * These are source-specific values; current Intel hardware supports
1082 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1083 */
1084 #define I830_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_800
1085
1086 static uint8_t
1087 intel_dp_pre_emphasis_max(uint8_t voltage_swing)
1088 {
1089 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1090 case DP_TRAIN_VOLTAGE_SWING_400:
1091 return DP_TRAIN_PRE_EMPHASIS_6;
1092 case DP_TRAIN_VOLTAGE_SWING_600:
1093 return DP_TRAIN_PRE_EMPHASIS_6;
1094 case DP_TRAIN_VOLTAGE_SWING_800:
1095 return DP_TRAIN_PRE_EMPHASIS_3_5;
1096 case DP_TRAIN_VOLTAGE_SWING_1200:
1097 default:
1098 return DP_TRAIN_PRE_EMPHASIS_0;
1099 }
1100 }
1101
1102 static void
1103 intel_get_adjust_train(struct intel_dp *intel_dp)
1104 {
1105 uint8_t v = 0;
1106 uint8_t p = 0;
1107 int lane;
1108
1109 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1110 uint8_t this_v = intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1111 uint8_t this_p = intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1112
1113 if (this_v > v)
1114 v = this_v;
1115 if (this_p > p)
1116 p = this_p;
1117 }
1118
1119 if (v >= I830_DP_VOLTAGE_MAX)
1120 v = I830_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1121
1122 if (p >= intel_dp_pre_emphasis_max(v))
1123 p = intel_dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1124
1125 for (lane = 0; lane < 4; lane++)
1126 intel_dp->train_set[lane] = v | p;
1127 }
1128
1129 static uint32_t
1130 intel_dp_signal_levels(uint8_t train_set, int lane_count)
1131 {
1132 uint32_t signal_levels = 0;
1133
1134 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
1135 case DP_TRAIN_VOLTAGE_SWING_400:
1136 default:
1137 signal_levels |= DP_VOLTAGE_0_4;
1138 break;
1139 case DP_TRAIN_VOLTAGE_SWING_600:
1140 signal_levels |= DP_VOLTAGE_0_6;
1141 break;
1142 case DP_TRAIN_VOLTAGE_SWING_800:
1143 signal_levels |= DP_VOLTAGE_0_8;
1144 break;
1145 case DP_TRAIN_VOLTAGE_SWING_1200:
1146 signal_levels |= DP_VOLTAGE_1_2;
1147 break;
1148 }
1149 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1150 case DP_TRAIN_PRE_EMPHASIS_0:
1151 default:
1152 signal_levels |= DP_PRE_EMPHASIS_0;
1153 break;
1154 case DP_TRAIN_PRE_EMPHASIS_3_5:
1155 signal_levels |= DP_PRE_EMPHASIS_3_5;
1156 break;
1157 case DP_TRAIN_PRE_EMPHASIS_6:
1158 signal_levels |= DP_PRE_EMPHASIS_6;
1159 break;
1160 case DP_TRAIN_PRE_EMPHASIS_9_5:
1161 signal_levels |= DP_PRE_EMPHASIS_9_5;
1162 break;
1163 }
1164 return signal_levels;
1165 }
1166
1167 /* Gen6's DP voltage swing and pre-emphasis control */
1168 static uint32_t
1169 intel_gen6_edp_signal_levels(uint8_t train_set)
1170 {
1171 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1172 DP_TRAIN_PRE_EMPHASIS_MASK);
1173 switch (signal_levels) {
1174 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1175 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1176 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1177 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1178 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1179 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1180 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6:
1181 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1182 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1183 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1184 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1185 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1186 case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0:
1187 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1188 default:
1189 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1190 "0x%x\n", signal_levels);
1191 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1192 }
1193 }
1194
1195 static uint8_t
1196 intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1197 int lane)
1198 {
1199 int i = DP_LANE0_1_STATUS + (lane >> 1);
1200 int s = (lane & 1) * 4;
1201 uint8_t l = intel_dp_link_status(link_status, i);
1202
1203 return (l >> s) & 0xf;
1204 }
1205
1206 /* Check for clock recovery is done on all channels */
1207 static bool
1208 intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1209 {
1210 int lane;
1211 uint8_t lane_status;
1212
1213 for (lane = 0; lane < lane_count; lane++) {
1214 lane_status = intel_get_lane_status(link_status, lane);
1215 if ((lane_status & DP_LANE_CR_DONE) == 0)
1216 return false;
1217 }
1218 return true;
1219 }
1220
1221 /* Check to see if channel eq is done on all channels */
1222 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1223 DP_LANE_CHANNEL_EQ_DONE|\
1224 DP_LANE_SYMBOL_LOCKED)
1225 static bool
1226 intel_channel_eq_ok(struct intel_dp *intel_dp)
1227 {
1228 uint8_t lane_align;
1229 uint8_t lane_status;
1230 int lane;
1231
1232 lane_align = intel_dp_link_status(intel_dp->link_status,
1233 DP_LANE_ALIGN_STATUS_UPDATED);
1234 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1235 return false;
1236 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1237 lane_status = intel_get_lane_status(intel_dp->link_status, lane);
1238 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1239 return false;
1240 }
1241 return true;
1242 }
1243
1244 static bool
1245 intel_dp_set_link_train(struct intel_dp *intel_dp,
1246 uint32_t dp_reg_value,
1247 uint8_t dp_train_pat)
1248 {
1249 struct drm_device *dev = intel_dp->base.base.dev;
1250 struct drm_i915_private *dev_priv = dev->dev_private;
1251 int ret;
1252
1253 I915_WRITE(intel_dp->output_reg, dp_reg_value);
1254 POSTING_READ(intel_dp->output_reg);
1255
1256 intel_dp_aux_native_write_1(intel_dp,
1257 DP_TRAINING_PATTERN_SET,
1258 dp_train_pat);
1259
1260 ret = intel_dp_aux_native_write(intel_dp,
1261 DP_TRAINING_LANE0_SET,
1262 intel_dp->train_set, 4);
1263 if (ret != 4)
1264 return false;
1265
1266 return true;
1267 }
1268
1269 /* Enable corresponding port and start training pattern 1 */
1270 static void
1271 intel_dp_start_link_train(struct intel_dp *intel_dp)
1272 {
1273 struct drm_device *dev = intel_dp->base.base.dev;
1274 struct drm_i915_private *dev_priv = dev->dev_private;
1275 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1276 int i;
1277 uint8_t voltage;
1278 bool clock_recovery = false;
1279 int tries;
1280 u32 reg;
1281 uint32_t DP = intel_dp->DP;
1282
1283 /* Enable output, wait for it to become active */
1284 I915_WRITE(intel_dp->output_reg, intel_dp->DP);
1285 POSTING_READ(intel_dp->output_reg);
1286 intel_wait_for_vblank(dev, intel_crtc->pipe);
1287
1288 /* Write the link configuration data */
1289 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1290 intel_dp->link_configuration,
1291 DP_LINK_CONFIGURATION_SIZE);
1292
1293 DP |= DP_PORT_EN;
1294 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
1295 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1296 else
1297 DP &= ~DP_LINK_TRAIN_MASK;
1298 memset(intel_dp->train_set, 0, 4);
1299 voltage = 0xff;
1300 tries = 0;
1301 clock_recovery = false;
1302 for (;;) {
1303 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1304 uint32_t signal_levels;
1305 if (IS_GEN6(dev) && is_edp(intel_dp)) {
1306 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1307 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1308 } else {
1309 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count);
1310 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1311 }
1312
1313 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
1314 reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1315 else
1316 reg = DP | DP_LINK_TRAIN_PAT_1;
1317
1318 if (!intel_dp_set_link_train(intel_dp, reg,
1319 DP_TRAINING_PATTERN_1))
1320 break;
1321 /* Set training pattern 1 */
1322
1323 udelay(100);
1324 if (!intel_dp_get_link_status(intel_dp))
1325 break;
1326
1327 if (intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1328 clock_recovery = true;
1329 break;
1330 }
1331
1332 /* Check to see if we've tried the max voltage */
1333 for (i = 0; i < intel_dp->lane_count; i++)
1334 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1335 break;
1336 if (i == intel_dp->lane_count)
1337 break;
1338
1339 /* Check to see if we've tried the same voltage 5 times */
1340 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1341 ++tries;
1342 if (tries == 5)
1343 break;
1344 } else
1345 tries = 0;
1346 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1347
1348 /* Compute new intel_dp->train_set as requested by target */
1349 intel_get_adjust_train(intel_dp);
1350 }
1351
1352 intel_dp->DP = DP;
1353 }
1354
1355 static void
1356 intel_dp_complete_link_train(struct intel_dp *intel_dp)
1357 {
1358 struct drm_device *dev = intel_dp->base.base.dev;
1359 struct drm_i915_private *dev_priv = dev->dev_private;
1360 bool channel_eq = false;
1361 int tries, cr_tries;
1362 u32 reg;
1363 uint32_t DP = intel_dp->DP;
1364
1365 /* channel equalization */
1366 tries = 0;
1367 cr_tries = 0;
1368 channel_eq = false;
1369 for (;;) {
1370 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1371 uint32_t signal_levels;
1372
1373 if (cr_tries > 5) {
1374 DRM_ERROR("failed to train DP, aborting\n");
1375 intel_dp_link_down(intel_dp);
1376 break;
1377 }
1378
1379 if (IS_GEN6(dev) && is_edp(intel_dp)) {
1380 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1381 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1382 } else {
1383 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count);
1384 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1385 }
1386
1387 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
1388 reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1389 else
1390 reg = DP | DP_LINK_TRAIN_PAT_2;
1391
1392 /* channel eq pattern */
1393 if (!intel_dp_set_link_train(intel_dp, reg,
1394 DP_TRAINING_PATTERN_2))
1395 break;
1396
1397 udelay(400);
1398 if (!intel_dp_get_link_status(intel_dp))
1399 break;
1400
1401 /* Make sure clock is still ok */
1402 if (!intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1403 intel_dp_start_link_train(intel_dp);
1404 cr_tries++;
1405 continue;
1406 }
1407
1408 if (intel_channel_eq_ok(intel_dp)) {
1409 channel_eq = true;
1410 break;
1411 }
1412
1413 /* Try 5 times, then try clock recovery if that fails */
1414 if (tries > 5) {
1415 intel_dp_link_down(intel_dp);
1416 intel_dp_start_link_train(intel_dp);
1417 tries = 0;
1418 cr_tries++;
1419 continue;
1420 }
1421
1422 /* Compute new intel_dp->train_set as requested by target */
1423 intel_get_adjust_train(intel_dp);
1424 ++tries;
1425 }
1426
1427 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
1428 reg = DP | DP_LINK_TRAIN_OFF_CPT;
1429 else
1430 reg = DP | DP_LINK_TRAIN_OFF;
1431
1432 I915_WRITE(intel_dp->output_reg, reg);
1433 POSTING_READ(intel_dp->output_reg);
1434 intel_dp_aux_native_write_1(intel_dp,
1435 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1436 }
1437
1438 static void
1439 intel_dp_link_down(struct intel_dp *intel_dp)
1440 {
1441 struct drm_device *dev = intel_dp->base.base.dev;
1442 struct drm_i915_private *dev_priv = dev->dev_private;
1443 uint32_t DP = intel_dp->DP;
1444
1445 if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1446 return;
1447
1448 DRM_DEBUG_KMS("\n");
1449
1450 if (is_edp(intel_dp)) {
1451 DP &= ~DP_PLL_ENABLE;
1452 I915_WRITE(intel_dp->output_reg, DP);
1453 POSTING_READ(intel_dp->output_reg);
1454 udelay(100);
1455 }
1456
1457 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) {
1458 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1459 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1460 } else {
1461 DP &= ~DP_LINK_TRAIN_MASK;
1462 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1463 }
1464 POSTING_READ(intel_dp->output_reg);
1465
1466 msleep(17);
1467
1468 if (is_edp(intel_dp))
1469 DP |= DP_LINK_TRAIN_OFF;
1470
1471 if (!HAS_PCH_CPT(dev) &&
1472 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
1473 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1474 /* Hardware workaround: leaving our transcoder select
1475 * set to transcoder B while it's off will prevent the
1476 * corresponding HDMI output on transcoder A.
1477 *
1478 * Combine this with another hardware workaround:
1479 * transcoder select bit can only be cleared while the
1480 * port is enabled.
1481 */
1482 DP &= ~DP_PIPEB_SELECT;
1483 I915_WRITE(intel_dp->output_reg, DP);
1484
1485 /* Changes to enable or select take place the vblank
1486 * after being written.
1487 */
1488 intel_wait_for_vblank(dev, intel_crtc->pipe);
1489 }
1490
1491 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1492 POSTING_READ(intel_dp->output_reg);
1493 }
1494
1495 /*
1496 * According to DP spec
1497 * 5.1.2:
1498 * 1. Read DPCD
1499 * 2. Configure link according to Receiver Capabilities
1500 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3
1501 * 4. Check link status on receipt of hot-plug interrupt
1502 */
1503
1504 static void
1505 intel_dp_check_link_status(struct intel_dp *intel_dp)
1506 {
1507 if (!intel_dp->base.base.crtc)
1508 return;
1509
1510 if (!intel_dp_get_link_status(intel_dp)) {
1511 intel_dp_link_down(intel_dp);
1512 return;
1513 }
1514
1515 if (!intel_channel_eq_ok(intel_dp)) {
1516 intel_dp_start_link_train(intel_dp);
1517 intel_dp_complete_link_train(intel_dp);
1518 }
1519 }
1520
1521 static enum drm_connector_status
1522 ironlake_dp_detect(struct intel_dp *intel_dp)
1523 {
1524 enum drm_connector_status status;
1525
1526 /* Can't disconnect eDP, but you can close the lid... */
1527 if (is_edp(intel_dp)) {
1528 status = intel_panel_detect(intel_dp->base.base.dev);
1529 if (status == connector_status_unknown)
1530 status = connector_status_connected;
1531 return status;
1532 }
1533
1534 status = connector_status_disconnected;
1535 if (intel_dp_aux_native_read(intel_dp,
1536 0x000, intel_dp->dpcd,
1537 sizeof (intel_dp->dpcd))
1538 == sizeof(intel_dp->dpcd)) {
1539 if (intel_dp->dpcd[0] != 0)
1540 status = connector_status_connected;
1541 }
1542 DRM_DEBUG_KMS("DPCD: %hx%hx%hx%hx\n", intel_dp->dpcd[0],
1543 intel_dp->dpcd[1], intel_dp->dpcd[2], intel_dp->dpcd[3]);
1544 return status;
1545 }
1546
1547 static enum drm_connector_status
1548 g4x_dp_detect(struct intel_dp *intel_dp)
1549 {
1550 struct drm_device *dev = intel_dp->base.base.dev;
1551 struct drm_i915_private *dev_priv = dev->dev_private;
1552 enum drm_connector_status status;
1553 uint32_t temp, bit;
1554
1555 switch (intel_dp->output_reg) {
1556 case DP_B:
1557 bit = DPB_HOTPLUG_INT_STATUS;
1558 break;
1559 case DP_C:
1560 bit = DPC_HOTPLUG_INT_STATUS;
1561 break;
1562 case DP_D:
1563 bit = DPD_HOTPLUG_INT_STATUS;
1564 break;
1565 default:
1566 return connector_status_unknown;
1567 }
1568
1569 temp = I915_READ(PORT_HOTPLUG_STAT);
1570
1571 if ((temp & bit) == 0)
1572 return connector_status_disconnected;
1573
1574 status = connector_status_disconnected;
1575 if (intel_dp_aux_native_read(intel_dp, 0x000, intel_dp->dpcd,
1576 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1577 {
1578 if (intel_dp->dpcd[0] != 0)
1579 status = connector_status_connected;
1580 }
1581
1582 return status;
1583 }
1584
1585 /**
1586 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1587 *
1588 * \return true if DP port is connected.
1589 * \return false if DP port is disconnected.
1590 */
1591 static enum drm_connector_status
1592 intel_dp_detect(struct drm_connector *connector, bool force)
1593 {
1594 struct intel_dp *intel_dp = intel_attached_dp(connector);
1595 struct drm_device *dev = intel_dp->base.base.dev;
1596 enum drm_connector_status status;
1597 struct edid *edid = NULL;
1598
1599 intel_dp->has_audio = false;
1600
1601 if (HAS_PCH_SPLIT(dev))
1602 status = ironlake_dp_detect(intel_dp);
1603 else
1604 status = g4x_dp_detect(intel_dp);
1605 if (status != connector_status_connected)
1606 return status;
1607
1608 if (intel_dp->force_audio) {
1609 intel_dp->has_audio = intel_dp->force_audio > 0;
1610 } else {
1611 edid = drm_get_edid(connector, &intel_dp->adapter);
1612 if (edid) {
1613 intel_dp->has_audio = drm_detect_monitor_audio(edid);
1614 connector->display_info.raw_edid = NULL;
1615 kfree(edid);
1616 }
1617 }
1618
1619 return connector_status_connected;
1620 }
1621
1622 static int intel_dp_get_modes(struct drm_connector *connector)
1623 {
1624 struct intel_dp *intel_dp = intel_attached_dp(connector);
1625 struct drm_device *dev = intel_dp->base.base.dev;
1626 struct drm_i915_private *dev_priv = dev->dev_private;
1627 int ret;
1628
1629 /* We should parse the EDID data and find out if it has an audio sink
1630 */
1631
1632 ret = intel_ddc_get_modes(connector, &intel_dp->adapter);
1633 if (ret) {
1634 if (is_edp(intel_dp) && !dev_priv->panel_fixed_mode) {
1635 struct drm_display_mode *newmode;
1636 list_for_each_entry(newmode, &connector->probed_modes,
1637 head) {
1638 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1639 dev_priv->panel_fixed_mode =
1640 drm_mode_duplicate(dev, newmode);
1641 break;
1642 }
1643 }
1644 }
1645
1646 return ret;
1647 }
1648
1649 /* if eDP has no EDID, try to use fixed panel mode from VBT */
1650 if (is_edp(intel_dp)) {
1651 if (dev_priv->panel_fixed_mode != NULL) {
1652 struct drm_display_mode *mode;
1653 mode = drm_mode_duplicate(dev, dev_priv->panel_fixed_mode);
1654 drm_mode_probed_add(connector, mode);
1655 return 1;
1656 }
1657 }
1658 return 0;
1659 }
1660
1661 static bool
1662 intel_dp_detect_audio(struct drm_connector *connector)
1663 {
1664 struct intel_dp *intel_dp = intel_attached_dp(connector);
1665 struct edid *edid;
1666 bool has_audio = false;
1667
1668 edid = drm_get_edid(connector, &intel_dp->adapter);
1669 if (edid) {
1670 has_audio = drm_detect_monitor_audio(edid);
1671
1672 connector->display_info.raw_edid = NULL;
1673 kfree(edid);
1674 }
1675
1676 return has_audio;
1677 }
1678
1679 static int
1680 intel_dp_set_property(struct drm_connector *connector,
1681 struct drm_property *property,
1682 uint64_t val)
1683 {
1684 struct drm_i915_private *dev_priv = connector->dev->dev_private;
1685 struct intel_dp *intel_dp = intel_attached_dp(connector);
1686 int ret;
1687
1688 ret = drm_connector_property_set_value(connector, property, val);
1689 if (ret)
1690 return ret;
1691
1692 if (property == intel_dp->force_audio_property) {
1693 int i = val;
1694 bool has_audio;
1695
1696 if (i == intel_dp->force_audio)
1697 return 0;
1698
1699 intel_dp->force_audio = i;
1700
1701 if (i == 0)
1702 has_audio = intel_dp_detect_audio(connector);
1703 else
1704 has_audio = i > 0;
1705
1706 if (has_audio == intel_dp->has_audio)
1707 return 0;
1708
1709 intel_dp->has_audio = has_audio;
1710 goto done;
1711 }
1712
1713 if (property == dev_priv->broadcast_rgb_property) {
1714 if (val == !!intel_dp->color_range)
1715 return 0;
1716
1717 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
1718 goto done;
1719 }
1720
1721 return -EINVAL;
1722
1723 done:
1724 if (intel_dp->base.base.crtc) {
1725 struct drm_crtc *crtc = intel_dp->base.base.crtc;
1726 drm_crtc_helper_set_mode(crtc, &crtc->mode,
1727 crtc->x, crtc->y,
1728 crtc->fb);
1729 }
1730
1731 return 0;
1732 }
1733
1734 static void
1735 intel_dp_destroy (struct drm_connector *connector)
1736 {
1737 drm_sysfs_connector_remove(connector);
1738 drm_connector_cleanup(connector);
1739 kfree(connector);
1740 }
1741
1742 static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
1743 {
1744 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1745
1746 i2c_del_adapter(&intel_dp->adapter);
1747 drm_encoder_cleanup(encoder);
1748 kfree(intel_dp);
1749 }
1750
1751 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
1752 .dpms = intel_dp_dpms,
1753 .mode_fixup = intel_dp_mode_fixup,
1754 .prepare = intel_dp_prepare,
1755 .mode_set = intel_dp_mode_set,
1756 .commit = intel_dp_commit,
1757 };
1758
1759 static const struct drm_connector_funcs intel_dp_connector_funcs = {
1760 .dpms = drm_helper_connector_dpms,
1761 .detect = intel_dp_detect,
1762 .fill_modes = drm_helper_probe_single_connector_modes,
1763 .set_property = intel_dp_set_property,
1764 .destroy = intel_dp_destroy,
1765 };
1766
1767 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
1768 .get_modes = intel_dp_get_modes,
1769 .mode_valid = intel_dp_mode_valid,
1770 .best_encoder = intel_best_encoder,
1771 };
1772
1773 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1774 .destroy = intel_dp_encoder_destroy,
1775 };
1776
1777 static void
1778 intel_dp_hot_plug(struct intel_encoder *intel_encoder)
1779 {
1780 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
1781
1782 if (intel_dp->dpms_mode == DRM_MODE_DPMS_ON)
1783 intel_dp_check_link_status(intel_dp);
1784 }
1785
1786 /* Return which DP Port should be selected for Transcoder DP control */
1787 int
1788 intel_trans_dp_port_sel (struct drm_crtc *crtc)
1789 {
1790 struct drm_device *dev = crtc->dev;
1791 struct drm_mode_config *mode_config = &dev->mode_config;
1792 struct drm_encoder *encoder;
1793
1794 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
1795 struct intel_dp *intel_dp;
1796
1797 if (encoder->crtc != crtc)
1798 continue;
1799
1800 intel_dp = enc_to_intel_dp(encoder);
1801 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT)
1802 return intel_dp->output_reg;
1803 }
1804
1805 return -1;
1806 }
1807
1808 /* check the VBT to see whether the eDP is on DP-D port */
1809 bool intel_dpd_is_edp(struct drm_device *dev)
1810 {
1811 struct drm_i915_private *dev_priv = dev->dev_private;
1812 struct child_device_config *p_child;
1813 int i;
1814
1815 if (!dev_priv->child_dev_num)
1816 return false;
1817
1818 for (i = 0; i < dev_priv->child_dev_num; i++) {
1819 p_child = dev_priv->child_dev + i;
1820
1821 if (p_child->dvo_port == PORT_IDPD &&
1822 p_child->device_type == DEVICE_TYPE_eDP)
1823 return true;
1824 }
1825 return false;
1826 }
1827
1828 static void
1829 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
1830 {
1831 struct drm_device *dev = connector->dev;
1832
1833 intel_dp->force_audio_property =
1834 drm_property_create(dev, DRM_MODE_PROP_RANGE, "force_audio", 2);
1835 if (intel_dp->force_audio_property) {
1836 intel_dp->force_audio_property->values[0] = -1;
1837 intel_dp->force_audio_property->values[1] = 1;
1838 drm_connector_attach_property(connector, intel_dp->force_audio_property, 0);
1839 }
1840
1841 intel_attach_broadcast_rgb_property(connector);
1842 }
1843
1844 void
1845 intel_dp_init(struct drm_device *dev, int output_reg)
1846 {
1847 struct drm_i915_private *dev_priv = dev->dev_private;
1848 struct drm_connector *connector;
1849 struct intel_dp *intel_dp;
1850 struct intel_encoder *intel_encoder;
1851 struct intel_connector *intel_connector;
1852 const char *name = NULL;
1853 int type;
1854
1855 intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL);
1856 if (!intel_dp)
1857 return;
1858
1859 intel_dp->output_reg = output_reg;
1860 intel_dp->dpms_mode = -1;
1861
1862 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
1863 if (!intel_connector) {
1864 kfree(intel_dp);
1865 return;
1866 }
1867 intel_encoder = &intel_dp->base;
1868
1869 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
1870 if (intel_dpd_is_edp(dev))
1871 intel_dp->is_pch_edp = true;
1872
1873 if (output_reg == DP_A || is_pch_edp(intel_dp)) {
1874 type = DRM_MODE_CONNECTOR_eDP;
1875 intel_encoder->type = INTEL_OUTPUT_EDP;
1876 } else {
1877 type = DRM_MODE_CONNECTOR_DisplayPort;
1878 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
1879 }
1880
1881 connector = &intel_connector->base;
1882 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
1883 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
1884
1885 connector->polled = DRM_CONNECTOR_POLL_HPD;
1886
1887 if (output_reg == DP_B || output_reg == PCH_DP_B)
1888 intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT);
1889 else if (output_reg == DP_C || output_reg == PCH_DP_C)
1890 intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT);
1891 else if (output_reg == DP_D || output_reg == PCH_DP_D)
1892 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
1893
1894 if (is_edp(intel_dp))
1895 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
1896
1897 intel_encoder->crtc_mask = (1 << 0) | (1 << 1);
1898 connector->interlace_allowed = true;
1899 connector->doublescan_allowed = 0;
1900
1901 drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
1902 DRM_MODE_ENCODER_TMDS);
1903 drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs);
1904
1905 intel_connector_attach_encoder(intel_connector, intel_encoder);
1906 drm_sysfs_connector_add(connector);
1907
1908 /* Set up the DDC bus. */
1909 switch (output_reg) {
1910 case DP_A:
1911 name = "DPDDC-A";
1912 break;
1913 case DP_B:
1914 case PCH_DP_B:
1915 dev_priv->hotplug_supported_mask |=
1916 HDMIB_HOTPLUG_INT_STATUS;
1917 name = "DPDDC-B";
1918 break;
1919 case DP_C:
1920 case PCH_DP_C:
1921 dev_priv->hotplug_supported_mask |=
1922 HDMIC_HOTPLUG_INT_STATUS;
1923 name = "DPDDC-C";
1924 break;
1925 case DP_D:
1926 case PCH_DP_D:
1927 dev_priv->hotplug_supported_mask |=
1928 HDMID_HOTPLUG_INT_STATUS;
1929 name = "DPDDC-D";
1930 break;
1931 }
1932
1933 intel_dp_i2c_init(intel_dp, intel_connector, name);
1934
1935 /* Cache some DPCD data in the eDP case */
1936 if (is_edp(intel_dp)) {
1937 int ret;
1938 u32 pp_on, pp_div;
1939
1940 pp_on = I915_READ(PCH_PP_ON_DELAYS);
1941 pp_div = I915_READ(PCH_PP_DIVISOR);
1942
1943 /* Get T3 & T12 values (note: VESA not bspec terminology) */
1944 dev_priv->panel_t3 = (pp_on & 0x1fff0000) >> 16;
1945 dev_priv->panel_t3 /= 10; /* t3 in 100us units */
1946 dev_priv->panel_t12 = pp_div & 0xf;
1947 dev_priv->panel_t12 *= 100; /* t12 in 100ms units */
1948
1949 ironlake_edp_panel_vdd_on(intel_dp);
1950 ret = intel_dp_aux_native_read(intel_dp, DP_DPCD_REV,
1951 intel_dp->dpcd,
1952 sizeof(intel_dp->dpcd));
1953 ironlake_edp_panel_vdd_off(intel_dp);
1954 if (ret == sizeof(intel_dp->dpcd)) {
1955 if (intel_dp->dpcd[0] >= 0x11)
1956 dev_priv->no_aux_handshake = intel_dp->dpcd[3] &
1957 DP_NO_AUX_HANDSHAKE_LINK_TRAINING;
1958 } else {
1959 /* if this fails, presume the device is a ghost */
1960 DRM_ERROR("failed to retrieve link info\n");
1961 intel_dp_destroy(&intel_connector->base);
1962 intel_dp_encoder_destroy(&intel_dp->base.base);
1963 return;
1964 }
1965 }
1966
1967 intel_encoder->hot_plug = intel_dp_hot_plug;
1968
1969 if (is_edp(intel_dp)) {
1970 /* initialize panel mode from VBT if available for eDP */
1971 if (dev_priv->lfp_lvds_vbt_mode) {
1972 dev_priv->panel_fixed_mode =
1973 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1974 if (dev_priv->panel_fixed_mode) {
1975 dev_priv->panel_fixed_mode->type |=
1976 DRM_MODE_TYPE_PREFERRED;
1977 }
1978 }
1979 }
1980
1981 intel_dp_add_properties(intel_dp, connector);
1982
1983 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
1984 * 0xd. Failure to do so will result in spurious interrupts being
1985 * generated on the port when a cable is not attached.
1986 */
1987 if (IS_G4X(dev) && !IS_GM45(dev)) {
1988 u32 temp = I915_READ(PEG_BAND_GAP_DATA);
1989 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
1990 }
1991 }
This page took 0.075061 seconds and 6 git commands to generate.