2 * Copyright © 2008 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Keith Packard <keithp@keithp.com>
28 #include <linux/i2c.h>
29 #include <linux/slab.h>
33 #include "drm_crtc_helper.h"
34 #include "intel_drv.h"
37 #include "drm_dp_helper.h"
40 #define DP_LINK_STATUS_SIZE 6
41 #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
43 #define DP_LINK_CONFIGURATION_SIZE 9
45 #define IS_eDP(i) ((i)->base.type == INTEL_OUTPUT_EDP)
46 #define IS_PCH_eDP(i) ((i)->is_pch_edp)
49 struct intel_encoder base;
52 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
58 struct i2c_adapter adapter;
59 struct i2c_algo_dp_aux_data algo;
63 static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
65 return container_of(enc_to_intel_encoder(encoder), struct intel_dp, base);
68 static void intel_dp_link_train(struct intel_dp *intel_dp);
69 static void intel_dp_link_down(struct intel_dp *intel_dp);
72 intel_edp_link_config (struct intel_encoder *intel_encoder,
73 int *lane_num, int *link_bw)
75 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
77 *lane_num = intel_dp->lane_count;
78 if (intel_dp->link_bw == DP_LINK_BW_1_62)
80 else if (intel_dp->link_bw == DP_LINK_BW_2_7)
85 intel_dp_max_lane_count(struct intel_dp *intel_dp)
87 int max_lane_count = 4;
89 if (intel_dp->dpcd[0] >= 0x11) {
90 max_lane_count = intel_dp->dpcd[2] & 0x1f;
91 switch (max_lane_count) {
92 case 1: case 2: case 4:
98 return max_lane_count;
102 intel_dp_max_link_bw(struct intel_dp *intel_dp)
104 int max_link_bw = intel_dp->dpcd[1];
106 switch (max_link_bw) {
107 case DP_LINK_BW_1_62:
111 max_link_bw = DP_LINK_BW_1_62;
118 intel_dp_link_clock(uint8_t link_bw)
120 if (link_bw == DP_LINK_BW_2_7)
126 /* I think this is a fiction */
128 intel_dp_link_required(struct drm_device *dev, struct intel_dp *intel_dp, int pixel_clock)
130 struct drm_i915_private *dev_priv = dev->dev_private;
132 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
133 return (pixel_clock * dev_priv->edp_bpp) / 8;
135 return pixel_clock * 3;
139 intel_dp_max_data_rate(int max_link_clock, int max_lanes)
141 return (max_link_clock * max_lanes * 8) / 10;
145 intel_dp_mode_valid(struct drm_connector *connector,
146 struct drm_display_mode *mode)
148 struct drm_encoder *encoder = intel_attached_encoder(connector);
149 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
150 struct drm_device *dev = connector->dev;
151 struct drm_i915_private *dev_priv = dev->dev_private;
152 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
153 int max_lanes = intel_dp_max_lane_count(intel_dp);
155 if ((IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) &&
156 dev_priv->panel_fixed_mode) {
157 if (mode->hdisplay > dev_priv->panel_fixed_mode->hdisplay)
160 if (mode->vdisplay > dev_priv->panel_fixed_mode->vdisplay)
164 /* only refuse the mode on non eDP since we have seen some wierd eDP panels
165 which are outside spec tolerances but somehow work by magic */
166 if (!IS_eDP(intel_dp) &&
167 (intel_dp_link_required(connector->dev, intel_dp, mode->clock)
168 > intel_dp_max_data_rate(max_link_clock, max_lanes)))
169 return MODE_CLOCK_HIGH;
171 if (mode->clock < 10000)
172 return MODE_CLOCK_LOW;
178 pack_aux(uint8_t *src, int src_bytes)
185 for (i = 0; i < src_bytes; i++)
186 v |= ((uint32_t) src[i]) << ((3-i) * 8);
191 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
196 for (i = 0; i < dst_bytes; i++)
197 dst[i] = src >> ((3-i) * 8);
200 /* hrawclock is 1/4 the FSB frequency */
202 intel_hrawclk(struct drm_device *dev)
204 struct drm_i915_private *dev_priv = dev->dev_private;
207 clkcfg = I915_READ(CLKCFG);
208 switch (clkcfg & CLKCFG_FSB_MASK) {
217 case CLKCFG_FSB_1067:
219 case CLKCFG_FSB_1333:
221 /* these two are just a guess; one of them might be right */
222 case CLKCFG_FSB_1600:
223 case CLKCFG_FSB_1600_ALT:
231 intel_dp_aux_ch(struct intel_dp *intel_dp,
232 uint8_t *send, int send_bytes,
233 uint8_t *recv, int recv_size)
235 uint32_t output_reg = intel_dp->output_reg;
236 struct drm_device *dev = intel_dp->base.enc.dev;
237 struct drm_i915_private *dev_priv = dev->dev_private;
238 uint32_t ch_ctl = output_reg + 0x10;
239 uint32_t ch_data = ch_ctl + 4;
244 uint32_t aux_clock_divider;
247 /* The clock divider is based off the hrawclk,
248 * and would like to run at 2MHz. So, take the
249 * hrawclk value and divide by 2 and use that
251 if (IS_eDP(intel_dp)) {
253 aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */
255 aux_clock_divider = 225; /* eDP input clock at 450Mhz */
256 } else if (HAS_PCH_SPLIT(dev))
257 aux_clock_divider = 62; /* IRL input clock fixed at 125Mhz */
259 aux_clock_divider = intel_hrawclk(dev) / 2;
266 /* Must try at least 3 times according to DP spec */
267 for (try = 0; try < 5; try++) {
268 /* Load the send data into the aux channel data registers */
269 for (i = 0; i < send_bytes; i += 4) {
270 uint32_t d = pack_aux(send + i, send_bytes - i);
272 I915_WRITE(ch_data + i, d);
275 ctl = (DP_AUX_CH_CTL_SEND_BUSY |
276 DP_AUX_CH_CTL_TIME_OUT_400us |
277 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
278 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
279 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
281 DP_AUX_CH_CTL_TIME_OUT_ERROR |
282 DP_AUX_CH_CTL_RECEIVE_ERROR);
284 /* Send the command and wait for it to complete */
285 I915_WRITE(ch_ctl, ctl);
286 (void) I915_READ(ch_ctl);
289 status = I915_READ(ch_ctl);
290 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
294 /* Clear done status and any errors */
295 I915_WRITE(ch_ctl, (status |
297 DP_AUX_CH_CTL_TIME_OUT_ERROR |
298 DP_AUX_CH_CTL_RECEIVE_ERROR));
299 (void) I915_READ(ch_ctl);
300 if ((status & DP_AUX_CH_CTL_TIME_OUT_ERROR) == 0)
304 if ((status & DP_AUX_CH_CTL_DONE) == 0) {
305 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
309 /* Check for timeout or receive error.
310 * Timeouts occur when the sink is not connected
312 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
313 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
317 /* Timeouts occur when the device isn't connected, so they're
318 * "normal" -- don't fill the kernel log with these */
319 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
320 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
324 /* Unload any bytes sent back from the other side */
325 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
326 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
328 if (recv_bytes > recv_size)
329 recv_bytes = recv_size;
331 for (i = 0; i < recv_bytes; i += 4) {
332 uint32_t d = I915_READ(ch_data + i);
334 unpack_aux(d, recv + i, recv_bytes - i);
340 /* Write data to the aux channel in native mode */
342 intel_dp_aux_native_write(struct intel_dp *intel_dp,
343 uint16_t address, uint8_t *send, int send_bytes)
352 msg[0] = AUX_NATIVE_WRITE << 4;
353 msg[1] = address >> 8;
354 msg[2] = address & 0xff;
355 msg[3] = send_bytes - 1;
356 memcpy(&msg[4], send, send_bytes);
357 msg_bytes = send_bytes + 4;
359 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
362 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
364 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
372 /* Write a single byte to the aux channel in native mode */
374 intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
375 uint16_t address, uint8_t byte)
377 return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
380 /* read bytes from a native aux channel */
382 intel_dp_aux_native_read(struct intel_dp *intel_dp,
383 uint16_t address, uint8_t *recv, int recv_bytes)
392 msg[0] = AUX_NATIVE_READ << 4;
393 msg[1] = address >> 8;
394 msg[2] = address & 0xff;
395 msg[3] = recv_bytes - 1;
398 reply_bytes = recv_bytes + 1;
401 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
408 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) {
409 memcpy(recv, reply + 1, ret - 1);
412 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
420 intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
421 uint8_t write_byte, uint8_t *read_byte)
423 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
424 struct intel_dp *intel_dp = container_of(adapter,
427 uint16_t address = algo_data->address;
434 /* Set up the command byte */
435 if (mode & MODE_I2C_READ)
436 msg[0] = AUX_I2C_READ << 4;
438 msg[0] = AUX_I2C_WRITE << 4;
440 if (!(mode & MODE_I2C_STOP))
441 msg[0] |= AUX_I2C_MOT << 4;
443 msg[1] = address >> 8;
465 ret = intel_dp_aux_ch(intel_dp,
469 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
472 switch (reply[0] & AUX_I2C_REPLY_MASK) {
473 case AUX_I2C_REPLY_ACK:
474 if (mode == MODE_I2C_READ) {
475 *read_byte = reply[1];
477 return reply_bytes - 1;
478 case AUX_I2C_REPLY_NACK:
479 DRM_DEBUG_KMS("aux_ch nack\n");
481 case AUX_I2C_REPLY_DEFER:
482 DRM_DEBUG_KMS("aux_ch defer\n");
486 DRM_ERROR("aux_ch invalid reply 0x%02x\n", reply[0]);
493 intel_dp_i2c_init(struct intel_dp *intel_dp,
494 struct intel_connector *intel_connector, const char *name)
496 DRM_DEBUG_KMS("i2c_init %s\n", name);
497 intel_dp->algo.running = false;
498 intel_dp->algo.address = 0;
499 intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch;
501 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
502 intel_dp->adapter.owner = THIS_MODULE;
503 intel_dp->adapter.class = I2C_CLASS_DDC;
504 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
505 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
506 intel_dp->adapter.algo_data = &intel_dp->algo;
507 intel_dp->adapter.dev.parent = &intel_connector->base.kdev;
509 return i2c_dp_aux_add_bus(&intel_dp->adapter);
513 intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
514 struct drm_display_mode *adjusted_mode)
516 struct drm_device *dev = encoder->dev;
517 struct drm_i915_private *dev_priv = dev->dev_private;
518 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
519 int lane_count, clock;
520 int max_lane_count = intel_dp_max_lane_count(intel_dp);
521 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
522 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
524 if ((IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) &&
525 dev_priv->panel_fixed_mode) {
526 intel_fixed_panel_mode(dev_priv->panel_fixed_mode, adjusted_mode);
527 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
528 mode, adjusted_mode);
530 * the mode->clock is used to calculate the Data&Link M/N
531 * of the pipe. For the eDP the fixed clock should be used.
533 mode->clock = dev_priv->panel_fixed_mode->clock;
536 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
537 for (clock = 0; clock <= max_clock; clock++) {
538 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
540 if (intel_dp_link_required(encoder->dev, intel_dp, mode->clock)
542 intel_dp->link_bw = bws[clock];
543 intel_dp->lane_count = lane_count;
544 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
545 DRM_DEBUG_KMS("Display port link bw %02x lane "
546 "count %d clock %d\n",
547 intel_dp->link_bw, intel_dp->lane_count,
548 adjusted_mode->clock);
554 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) {
555 /* okay we failed just pick the highest */
556 intel_dp->lane_count = max_lane_count;
557 intel_dp->link_bw = bws[max_clock];
558 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
559 DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
560 "count %d clock %d\n",
561 intel_dp->link_bw, intel_dp->lane_count,
562 adjusted_mode->clock);
570 struct intel_dp_m_n {
579 intel_reduce_ratio(uint32_t *num, uint32_t *den)
581 while (*num > 0xffffff || *den > 0xffffff) {
588 intel_dp_compute_m_n(int bpp,
592 struct intel_dp_m_n *m_n)
595 m_n->gmch_m = (pixel_clock * bpp) >> 3;
596 m_n->gmch_n = link_clock * nlanes;
597 intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
598 m_n->link_m = pixel_clock;
599 m_n->link_n = link_clock;
600 intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
603 bool intel_pch_has_edp(struct drm_crtc *crtc)
605 struct drm_device *dev = crtc->dev;
606 struct drm_mode_config *mode_config = &dev->mode_config;
607 struct drm_encoder *encoder;
609 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
610 struct intel_dp *intel_dp;
612 if (encoder->crtc != crtc)
615 intel_dp = enc_to_intel_dp(encoder);
616 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT)
617 return intel_dp->is_pch_edp;
623 intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
624 struct drm_display_mode *adjusted_mode)
626 struct drm_device *dev = crtc->dev;
627 struct drm_mode_config *mode_config = &dev->mode_config;
628 struct drm_encoder *encoder;
629 struct drm_i915_private *dev_priv = dev->dev_private;
630 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
631 int lane_count = 4, bpp = 24;
632 struct intel_dp_m_n m_n;
635 * Find the lane count in the intel_encoder private
637 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
638 struct intel_dp *intel_dp;
640 if (encoder->crtc != crtc)
643 intel_dp = enc_to_intel_dp(encoder);
644 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) {
645 lane_count = intel_dp->lane_count;
646 if (IS_PCH_eDP(intel_dp))
647 bpp = dev_priv->edp_bpp;
653 * Compute the GMCH and Link ratios. The '3' here is
654 * the number of bytes_per_pixel post-LUT, which we always
655 * set up for 8-bits of R/G/B, or 3 bytes total.
657 intel_dp_compute_m_n(bpp, lane_count,
658 mode->clock, adjusted_mode->clock, &m_n);
660 if (HAS_PCH_SPLIT(dev)) {
661 if (intel_crtc->pipe == 0) {
662 I915_WRITE(TRANSA_DATA_M1,
663 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
665 I915_WRITE(TRANSA_DATA_N1, m_n.gmch_n);
666 I915_WRITE(TRANSA_DP_LINK_M1, m_n.link_m);
667 I915_WRITE(TRANSA_DP_LINK_N1, m_n.link_n);
669 I915_WRITE(TRANSB_DATA_M1,
670 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
672 I915_WRITE(TRANSB_DATA_N1, m_n.gmch_n);
673 I915_WRITE(TRANSB_DP_LINK_M1, m_n.link_m);
674 I915_WRITE(TRANSB_DP_LINK_N1, m_n.link_n);
677 if (intel_crtc->pipe == 0) {
678 I915_WRITE(PIPEA_GMCH_DATA_M,
679 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
681 I915_WRITE(PIPEA_GMCH_DATA_N,
683 I915_WRITE(PIPEA_DP_LINK_M, m_n.link_m);
684 I915_WRITE(PIPEA_DP_LINK_N, m_n.link_n);
686 I915_WRITE(PIPEB_GMCH_DATA_M,
687 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
689 I915_WRITE(PIPEB_GMCH_DATA_N,
691 I915_WRITE(PIPEB_DP_LINK_M, m_n.link_m);
692 I915_WRITE(PIPEB_DP_LINK_N, m_n.link_n);
698 intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
699 struct drm_display_mode *adjusted_mode)
701 struct drm_device *dev = encoder->dev;
702 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
703 struct drm_crtc *crtc = intel_dp->base.enc.crtc;
704 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
706 intel_dp->DP = (DP_VOLTAGE_0_4 |
709 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
710 intel_dp->DP |= DP_SYNC_HS_HIGH;
711 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
712 intel_dp->DP |= DP_SYNC_VS_HIGH;
714 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
715 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
717 intel_dp->DP |= DP_LINK_TRAIN_OFF;
719 switch (intel_dp->lane_count) {
721 intel_dp->DP |= DP_PORT_WIDTH_1;
724 intel_dp->DP |= DP_PORT_WIDTH_2;
727 intel_dp->DP |= DP_PORT_WIDTH_4;
730 if (intel_dp->has_audio)
731 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
733 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
734 intel_dp->link_configuration[0] = intel_dp->link_bw;
735 intel_dp->link_configuration[1] = intel_dp->lane_count;
738 * Check for DPCD version > 1.1 and enhanced framing support
740 if (intel_dp->dpcd[0] >= 0x11 && (intel_dp->dpcd[2] & DP_ENHANCED_FRAME_CAP)) {
741 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
742 intel_dp->DP |= DP_ENHANCED_FRAMING;
745 /* CPT DP's pipe select is decided in TRANS_DP_CTL */
746 if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev))
747 intel_dp->DP |= DP_PIPEB_SELECT;
749 if (IS_eDP(intel_dp)) {
750 /* don't miss out required setting for eDP */
751 intel_dp->DP |= DP_PLL_ENABLE;
752 if (adjusted_mode->clock < 200000)
753 intel_dp->DP |= DP_PLL_FREQ_160MHZ;
755 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
759 static void ironlake_edp_panel_on (struct drm_device *dev)
761 struct drm_i915_private *dev_priv = dev->dev_private;
764 if (I915_READ(PCH_PP_STATUS) & PP_ON)
767 pp = I915_READ(PCH_PP_CONTROL);
769 /* ILK workaround: disable reset around power sequence */
770 pp &= ~PANEL_POWER_RESET;
771 I915_WRITE(PCH_PP_CONTROL, pp);
772 POSTING_READ(PCH_PP_CONTROL);
774 pp |= PANEL_UNLOCK_REGS | POWER_TARGET_ON;
775 I915_WRITE(PCH_PP_CONTROL, pp);
777 if (wait_for(I915_READ(PCH_PP_STATUS) & PP_ON, 5000, 10))
778 DRM_ERROR("panel on wait timed out: 0x%08x\n",
779 I915_READ(PCH_PP_STATUS));
781 pp &= ~(PANEL_UNLOCK_REGS | EDP_FORCE_VDD);
782 pp |= PANEL_POWER_RESET; /* restore panel reset bit */
783 I915_WRITE(PCH_PP_CONTROL, pp);
784 POSTING_READ(PCH_PP_CONTROL);
787 static void ironlake_edp_panel_off (struct drm_device *dev)
789 struct drm_i915_private *dev_priv = dev->dev_private;
792 pp = I915_READ(PCH_PP_CONTROL);
794 /* ILK workaround: disable reset around power sequence */
795 pp &= ~PANEL_POWER_RESET;
796 I915_WRITE(PCH_PP_CONTROL, pp);
797 POSTING_READ(PCH_PP_CONTROL);
799 pp &= ~POWER_TARGET_ON;
800 I915_WRITE(PCH_PP_CONTROL, pp);
802 if (wait_for((I915_READ(PCH_PP_STATUS) & PP_ON) == 0, 5000, 10))
803 DRM_ERROR("panel off wait timed out: 0x%08x\n",
804 I915_READ(PCH_PP_STATUS));
806 /* Make sure VDD is enabled so DP AUX will work */
807 pp |= EDP_FORCE_VDD | PANEL_POWER_RESET; /* restore panel reset bit */
808 I915_WRITE(PCH_PP_CONTROL, pp);
809 POSTING_READ(PCH_PP_CONTROL);
812 static void ironlake_edp_backlight_on (struct drm_device *dev)
814 struct drm_i915_private *dev_priv = dev->dev_private;
818 pp = I915_READ(PCH_PP_CONTROL);
819 pp |= EDP_BLC_ENABLE;
820 I915_WRITE(PCH_PP_CONTROL, pp);
823 static void ironlake_edp_backlight_off (struct drm_device *dev)
825 struct drm_i915_private *dev_priv = dev->dev_private;
829 pp = I915_READ(PCH_PP_CONTROL);
830 pp &= ~EDP_BLC_ENABLE;
831 I915_WRITE(PCH_PP_CONTROL, pp);
834 static void ironlake_edp_pll_on(struct drm_encoder *encoder)
836 struct drm_device *dev = encoder->dev;
837 struct drm_i915_private *dev_priv = dev->dev_private;
841 dpa_ctl = I915_READ(DP_A);
842 dpa_ctl &= ~DP_PLL_ENABLE;
843 I915_WRITE(DP_A, dpa_ctl);
846 static void ironlake_edp_pll_off(struct drm_encoder *encoder)
848 struct drm_device *dev = encoder->dev;
849 struct drm_i915_private *dev_priv = dev->dev_private;
852 dpa_ctl = I915_READ(DP_A);
853 dpa_ctl |= DP_PLL_ENABLE;
854 I915_WRITE(DP_A, dpa_ctl);
858 static void intel_dp_prepare(struct drm_encoder *encoder)
860 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
861 struct drm_device *dev = encoder->dev;
862 struct drm_i915_private *dev_priv = dev->dev_private;
863 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
865 if (IS_eDP(intel_dp)) {
866 ironlake_edp_backlight_off(dev);
867 ironlake_edp_panel_on(dev);
868 ironlake_edp_pll_on(encoder);
870 if (dp_reg & DP_PORT_EN)
871 intel_dp_link_down(intel_dp);
874 static void intel_dp_commit(struct drm_encoder *encoder)
876 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
877 struct drm_device *dev = encoder->dev;
878 struct drm_i915_private *dev_priv = dev->dev_private;
879 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
881 if (!(dp_reg & DP_PORT_EN)) {
882 intel_dp_link_train(intel_dp);
884 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
885 ironlake_edp_backlight_on(dev);
889 intel_dp_dpms(struct drm_encoder *encoder, int mode)
891 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
892 struct drm_device *dev = encoder->dev;
893 struct drm_i915_private *dev_priv = dev->dev_private;
894 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
896 if (mode != DRM_MODE_DPMS_ON) {
897 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) {
898 ironlake_edp_backlight_off(dev);
899 ironlake_edp_panel_off(dev);
901 if (dp_reg & DP_PORT_EN)
902 intel_dp_link_down(intel_dp);
903 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
904 ironlake_edp_pll_off(encoder);
906 if (!(dp_reg & DP_PORT_EN)) {
907 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
908 ironlake_edp_panel_on(dev);
909 intel_dp_link_train(intel_dp);
910 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp))
911 ironlake_edp_backlight_on(dev);
914 intel_dp->dpms_mode = mode;
918 * Fetch AUX CH registers 0x202 - 0x207 which contain
919 * link status information
922 intel_dp_get_link_status(struct intel_dp *intel_dp,
923 uint8_t link_status[DP_LINK_STATUS_SIZE])
927 ret = intel_dp_aux_native_read(intel_dp,
929 link_status, DP_LINK_STATUS_SIZE);
930 if (ret != DP_LINK_STATUS_SIZE)
936 intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
939 return link_status[r - DP_LANE0_1_STATUS];
943 intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
946 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
947 int s = ((lane & 1) ?
948 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
949 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
950 uint8_t l = intel_dp_link_status(link_status, i);
952 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
956 intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
959 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
960 int s = ((lane & 1) ?
961 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
962 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
963 uint8_t l = intel_dp_link_status(link_status, i);
965 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
970 static char *voltage_names[] = {
971 "0.4V", "0.6V", "0.8V", "1.2V"
973 static char *pre_emph_names[] = {
974 "0dB", "3.5dB", "6dB", "9.5dB"
976 static char *link_train_names[] = {
977 "pattern 1", "pattern 2", "idle", "off"
982 * These are source-specific values; current Intel hardware supports
983 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
985 #define I830_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_800
988 intel_dp_pre_emphasis_max(uint8_t voltage_swing)
990 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
991 case DP_TRAIN_VOLTAGE_SWING_400:
992 return DP_TRAIN_PRE_EMPHASIS_6;
993 case DP_TRAIN_VOLTAGE_SWING_600:
994 return DP_TRAIN_PRE_EMPHASIS_6;
995 case DP_TRAIN_VOLTAGE_SWING_800:
996 return DP_TRAIN_PRE_EMPHASIS_3_5;
997 case DP_TRAIN_VOLTAGE_SWING_1200:
999 return DP_TRAIN_PRE_EMPHASIS_0;
1004 intel_get_adjust_train(struct intel_dp *intel_dp,
1005 uint8_t link_status[DP_LINK_STATUS_SIZE],
1007 uint8_t train_set[4])
1013 for (lane = 0; lane < lane_count; lane++) {
1014 uint8_t this_v = intel_get_adjust_request_voltage(link_status, lane);
1015 uint8_t this_p = intel_get_adjust_request_pre_emphasis(link_status, lane);
1023 if (v >= I830_DP_VOLTAGE_MAX)
1024 v = I830_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1026 if (p >= intel_dp_pre_emphasis_max(v))
1027 p = intel_dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1029 for (lane = 0; lane < 4; lane++)
1030 train_set[lane] = v | p;
1034 intel_dp_signal_levels(uint8_t train_set, int lane_count)
1036 uint32_t signal_levels = 0;
1038 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
1039 case DP_TRAIN_VOLTAGE_SWING_400:
1041 signal_levels |= DP_VOLTAGE_0_4;
1043 case DP_TRAIN_VOLTAGE_SWING_600:
1044 signal_levels |= DP_VOLTAGE_0_6;
1046 case DP_TRAIN_VOLTAGE_SWING_800:
1047 signal_levels |= DP_VOLTAGE_0_8;
1049 case DP_TRAIN_VOLTAGE_SWING_1200:
1050 signal_levels |= DP_VOLTAGE_1_2;
1053 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1054 case DP_TRAIN_PRE_EMPHASIS_0:
1056 signal_levels |= DP_PRE_EMPHASIS_0;
1058 case DP_TRAIN_PRE_EMPHASIS_3_5:
1059 signal_levels |= DP_PRE_EMPHASIS_3_5;
1061 case DP_TRAIN_PRE_EMPHASIS_6:
1062 signal_levels |= DP_PRE_EMPHASIS_6;
1064 case DP_TRAIN_PRE_EMPHASIS_9_5:
1065 signal_levels |= DP_PRE_EMPHASIS_9_5;
1068 return signal_levels;
1071 /* Gen6's DP voltage swing and pre-emphasis control */
1073 intel_gen6_edp_signal_levels(uint8_t train_set)
1075 switch (train_set & (DP_TRAIN_VOLTAGE_SWING_MASK|DP_TRAIN_PRE_EMPHASIS_MASK)) {
1076 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1077 return EDP_LINK_TRAIN_400MV_0DB_SNB_B;
1078 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1079 return EDP_LINK_TRAIN_400MV_6DB_SNB_B;
1080 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1081 return EDP_LINK_TRAIN_600MV_3_5DB_SNB_B;
1082 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1083 return EDP_LINK_TRAIN_800MV_0DB_SNB_B;
1085 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level\n");
1086 return EDP_LINK_TRAIN_400MV_0DB_SNB_B;
1091 intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1094 int i = DP_LANE0_1_STATUS + (lane >> 1);
1095 int s = (lane & 1) * 4;
1096 uint8_t l = intel_dp_link_status(link_status, i);
1098 return (l >> s) & 0xf;
1101 /* Check for clock recovery is done on all channels */
1103 intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1106 uint8_t lane_status;
1108 for (lane = 0; lane < lane_count; lane++) {
1109 lane_status = intel_get_lane_status(link_status, lane);
1110 if ((lane_status & DP_LANE_CR_DONE) == 0)
1116 /* Check to see if channel eq is done on all channels */
1117 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1118 DP_LANE_CHANNEL_EQ_DONE|\
1119 DP_LANE_SYMBOL_LOCKED)
1121 intel_channel_eq_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1124 uint8_t lane_status;
1127 lane_align = intel_dp_link_status(link_status,
1128 DP_LANE_ALIGN_STATUS_UPDATED);
1129 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1131 for (lane = 0; lane < lane_count; lane++) {
1132 lane_status = intel_get_lane_status(link_status, lane);
1133 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1140 intel_dp_set_link_train(struct intel_dp *intel_dp,
1141 uint32_t dp_reg_value,
1142 uint8_t dp_train_pat,
1143 uint8_t train_set[4],
1146 struct drm_device *dev = intel_dp->base.enc.dev;
1147 struct drm_i915_private *dev_priv = dev->dev_private;
1148 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.enc.crtc);
1151 I915_WRITE(intel_dp->output_reg, dp_reg_value);
1152 POSTING_READ(intel_dp->output_reg);
1154 intel_wait_for_vblank(dev, intel_crtc->pipe);
1156 intel_dp_aux_native_write_1(intel_dp,
1157 DP_TRAINING_PATTERN_SET,
1160 ret = intel_dp_aux_native_write(intel_dp,
1161 DP_TRAINING_LANE0_SET, train_set, 4);
1169 intel_dp_link_train(struct intel_dp *intel_dp)
1171 struct drm_device *dev = intel_dp->base.enc.dev;
1172 struct drm_i915_private *dev_priv = dev->dev_private;
1173 uint8_t train_set[4];
1174 uint8_t link_status[DP_LINK_STATUS_SIZE];
1177 bool clock_recovery = false;
1178 bool channel_eq = false;
1182 uint32_t DP = intel_dp->DP;
1184 /* Write the link configuration data */
1185 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1186 intel_dp->link_configuration,
1187 DP_LINK_CONFIGURATION_SIZE);
1190 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1191 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1193 DP &= ~DP_LINK_TRAIN_MASK;
1194 memset(train_set, 0, 4);
1197 clock_recovery = false;
1199 /* Use train_set[0] to set the voltage and pre emphasis values */
1200 uint32_t signal_levels;
1201 if (IS_GEN6(dev) && IS_eDP(intel_dp)) {
1202 signal_levels = intel_gen6_edp_signal_levels(train_set[0]);
1203 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1205 signal_levels = intel_dp_signal_levels(train_set[0], intel_dp->lane_count);
1206 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1209 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1210 reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1212 reg = DP | DP_LINK_TRAIN_PAT_1;
1214 if (!intel_dp_set_link_train(intel_dp, reg,
1215 DP_TRAINING_PATTERN_1, train_set, first))
1218 /* Set training pattern 1 */
1221 if (!intel_dp_get_link_status(intel_dp, link_status))
1224 if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1225 clock_recovery = true;
1229 /* Check to see if we've tried the max voltage */
1230 for (i = 0; i < intel_dp->lane_count; i++)
1231 if ((train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1233 if (i == intel_dp->lane_count)
1236 /* Check to see if we've tried the same voltage 5 times */
1237 if ((train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1243 voltage = train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1245 /* Compute new train_set as requested by target */
1246 intel_get_adjust_train(intel_dp, link_status, intel_dp->lane_count, train_set);
1249 /* channel equalization */
1253 /* Use train_set[0] to set the voltage and pre emphasis values */
1254 uint32_t signal_levels;
1256 if (IS_GEN6(dev) && IS_eDP(intel_dp)) {
1257 signal_levels = intel_gen6_edp_signal_levels(train_set[0]);
1258 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1260 signal_levels = intel_dp_signal_levels(train_set[0], intel_dp->lane_count);
1261 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1264 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1265 reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1267 reg = DP | DP_LINK_TRAIN_PAT_2;
1269 /* channel eq pattern */
1270 if (!intel_dp_set_link_train(intel_dp, reg,
1271 DP_TRAINING_PATTERN_2, train_set,
1276 if (!intel_dp_get_link_status(intel_dp, link_status))
1279 if (intel_channel_eq_ok(link_status, intel_dp->lane_count)) {
1288 /* Compute new train_set as requested by target */
1289 intel_get_adjust_train(intel_dp, link_status, intel_dp->lane_count, train_set);
1293 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp))
1294 reg = DP | DP_LINK_TRAIN_OFF_CPT;
1296 reg = DP | DP_LINK_TRAIN_OFF;
1298 I915_WRITE(intel_dp->output_reg, reg);
1299 POSTING_READ(intel_dp->output_reg);
1300 intel_dp_aux_native_write_1(intel_dp,
1301 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1305 intel_dp_link_down(struct intel_dp *intel_dp)
1307 struct drm_device *dev = intel_dp->base.enc.dev;
1308 struct drm_i915_private *dev_priv = dev->dev_private;
1309 uint32_t DP = intel_dp->DP;
1311 DRM_DEBUG_KMS("\n");
1313 if (IS_eDP(intel_dp)) {
1314 DP &= ~DP_PLL_ENABLE;
1315 I915_WRITE(intel_dp->output_reg, DP);
1316 POSTING_READ(intel_dp->output_reg);
1320 if (HAS_PCH_CPT(dev) && !IS_eDP(intel_dp)) {
1321 DP &= ~DP_LINK_TRAIN_MASK_CPT;
1322 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1323 POSTING_READ(intel_dp->output_reg);
1325 DP &= ~DP_LINK_TRAIN_MASK;
1326 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1327 POSTING_READ(intel_dp->output_reg);
1332 if (IS_eDP(intel_dp))
1333 DP |= DP_LINK_TRAIN_OFF;
1334 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1335 POSTING_READ(intel_dp->output_reg);
1339 * According to DP spec
1342 * 2. Configure link according to Receiver Capabilities
1343 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3
1344 * 4. Check link status on receipt of hot-plug interrupt
1348 intel_dp_check_link_status(struct intel_dp *intel_dp)
1350 uint8_t link_status[DP_LINK_STATUS_SIZE];
1352 if (!intel_dp->base.enc.crtc)
1355 if (!intel_dp_get_link_status(intel_dp, link_status)) {
1356 intel_dp_link_down(intel_dp);
1360 if (!intel_channel_eq_ok(link_status, intel_dp->lane_count))
1361 intel_dp_link_train(intel_dp);
1364 static enum drm_connector_status
1365 ironlake_dp_detect(struct drm_connector *connector)
1367 struct drm_encoder *encoder = intel_attached_encoder(connector);
1368 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1369 enum drm_connector_status status;
1371 status = connector_status_disconnected;
1372 if (intel_dp_aux_native_read(intel_dp,
1373 0x000, intel_dp->dpcd,
1374 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1376 if (intel_dp->dpcd[0] != 0)
1377 status = connector_status_connected;
1379 DRM_DEBUG_KMS("DPCD: %hx%hx%hx%hx\n", intel_dp->dpcd[0],
1380 intel_dp->dpcd[1], intel_dp->dpcd[2], intel_dp->dpcd[3]);
1385 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1387 * \return true if DP port is connected.
1388 * \return false if DP port is disconnected.
1390 static enum drm_connector_status
1391 intel_dp_detect(struct drm_connector *connector)
1393 struct drm_encoder *encoder = intel_attached_encoder(connector);
1394 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1395 struct drm_device *dev = intel_dp->base.enc.dev;
1396 struct drm_i915_private *dev_priv = dev->dev_private;
1398 enum drm_connector_status status;
1400 intel_dp->has_audio = false;
1402 if (HAS_PCH_SPLIT(dev))
1403 return ironlake_dp_detect(connector);
1405 switch (intel_dp->output_reg) {
1407 bit = DPB_HOTPLUG_INT_STATUS;
1410 bit = DPC_HOTPLUG_INT_STATUS;
1413 bit = DPD_HOTPLUG_INT_STATUS;
1416 return connector_status_unknown;
1419 temp = I915_READ(PORT_HOTPLUG_STAT);
1421 if ((temp & bit) == 0)
1422 return connector_status_disconnected;
1424 status = connector_status_disconnected;
1425 if (intel_dp_aux_native_read(intel_dp,
1426 0x000, intel_dp->dpcd,
1427 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
1429 if (intel_dp->dpcd[0] != 0)
1430 status = connector_status_connected;
1435 static int intel_dp_get_modes(struct drm_connector *connector)
1437 struct drm_encoder *encoder = intel_attached_encoder(connector);
1438 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1439 struct drm_device *dev = intel_dp->base.enc.dev;
1440 struct drm_i915_private *dev_priv = dev->dev_private;
1443 /* We should parse the EDID data and find out if it has an audio sink
1446 ret = intel_ddc_get_modes(connector, intel_dp->base.ddc_bus);
1448 if ((IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) &&
1449 !dev_priv->panel_fixed_mode) {
1450 struct drm_display_mode *newmode;
1451 list_for_each_entry(newmode, &connector->probed_modes,
1453 if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
1454 dev_priv->panel_fixed_mode =
1455 drm_mode_duplicate(dev, newmode);
1464 /* if eDP has no EDID, try to use fixed panel mode from VBT */
1465 if (IS_eDP(intel_dp) || IS_PCH_eDP(intel_dp)) {
1466 if (dev_priv->panel_fixed_mode != NULL) {
1467 struct drm_display_mode *mode;
1468 mode = drm_mode_duplicate(dev, dev_priv->panel_fixed_mode);
1469 drm_mode_probed_add(connector, mode);
1477 intel_dp_destroy (struct drm_connector *connector)
1479 drm_sysfs_connector_remove(connector);
1480 drm_connector_cleanup(connector);
1484 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
1485 .dpms = intel_dp_dpms,
1486 .mode_fixup = intel_dp_mode_fixup,
1487 .prepare = intel_dp_prepare,
1488 .mode_set = intel_dp_mode_set,
1489 .commit = intel_dp_commit,
1492 static const struct drm_connector_funcs intel_dp_connector_funcs = {
1493 .dpms = drm_helper_connector_dpms,
1494 .detect = intel_dp_detect,
1495 .fill_modes = drm_helper_probe_single_connector_modes,
1496 .destroy = intel_dp_destroy,
1499 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
1500 .get_modes = intel_dp_get_modes,
1501 .mode_valid = intel_dp_mode_valid,
1502 .best_encoder = intel_attached_encoder,
1505 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1506 .destroy = intel_encoder_destroy,
1510 intel_dp_hot_plug(struct intel_encoder *intel_encoder)
1512 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
1514 if (intel_dp->dpms_mode == DRM_MODE_DPMS_ON)
1515 intel_dp_check_link_status(intel_dp);
1518 /* Return which DP Port should be selected for Transcoder DP control */
1520 intel_trans_dp_port_sel (struct drm_crtc *crtc)
1522 struct drm_device *dev = crtc->dev;
1523 struct drm_mode_config *mode_config = &dev->mode_config;
1524 struct drm_encoder *encoder;
1526 list_for_each_entry(encoder, &mode_config->encoder_list, head) {
1527 struct intel_dp *intel_dp;
1529 if (encoder->crtc != crtc)
1532 intel_dp = enc_to_intel_dp(encoder);
1533 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT)
1534 return intel_dp->output_reg;
1540 /* check the VBT to see whether the eDP is on DP-D port */
1541 bool intel_dpd_is_edp(struct drm_device *dev)
1543 struct drm_i915_private *dev_priv = dev->dev_private;
1544 struct child_device_config *p_child;
1547 if (!dev_priv->child_dev_num)
1550 for (i = 0; i < dev_priv->child_dev_num; i++) {
1551 p_child = dev_priv->child_dev + i;
1553 if (p_child->dvo_port == PORT_IDPD &&
1554 p_child->device_type == DEVICE_TYPE_eDP)
1561 intel_dp_init(struct drm_device *dev, int output_reg)
1563 struct drm_i915_private *dev_priv = dev->dev_private;
1564 struct drm_connector *connector;
1565 struct intel_dp *intel_dp;
1566 struct intel_encoder *intel_encoder;
1567 struct intel_connector *intel_connector;
1568 const char *name = NULL;
1571 intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL);
1575 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
1576 if (!intel_connector) {
1580 intel_encoder = &intel_dp->base;
1582 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
1583 if (intel_dpd_is_edp(dev))
1584 intel_dp->is_pch_edp = true;
1586 if (output_reg == DP_A || IS_PCH_eDP(intel_dp)) {
1587 type = DRM_MODE_CONNECTOR_eDP;
1588 intel_encoder->type = INTEL_OUTPUT_EDP;
1590 type = DRM_MODE_CONNECTOR_DisplayPort;
1591 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
1594 connector = &intel_connector->base;
1595 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
1596 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
1598 connector->polled = DRM_CONNECTOR_POLL_HPD;
1600 if (output_reg == DP_B || output_reg == PCH_DP_B)
1601 intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT);
1602 else if (output_reg == DP_C || output_reg == PCH_DP_C)
1603 intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT);
1604 else if (output_reg == DP_D || output_reg == PCH_DP_D)
1605 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
1607 if (IS_eDP(intel_dp))
1608 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
1610 intel_encoder->crtc_mask = (1 << 0) | (1 << 1);
1611 connector->interlace_allowed = true;
1612 connector->doublescan_allowed = 0;
1614 intel_dp->output_reg = output_reg;
1615 intel_dp->has_audio = false;
1616 intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
1618 drm_encoder_init(dev, &intel_encoder->enc, &intel_dp_enc_funcs,
1619 DRM_MODE_ENCODER_TMDS);
1620 drm_encoder_helper_add(&intel_encoder->enc, &intel_dp_helper_funcs);
1622 drm_mode_connector_attach_encoder(&intel_connector->base,
1623 &intel_encoder->enc);
1624 drm_sysfs_connector_add(connector);
1626 /* Set up the DDC bus. */
1627 switch (output_reg) {
1633 dev_priv->hotplug_supported_mask |=
1634 HDMIB_HOTPLUG_INT_STATUS;
1639 dev_priv->hotplug_supported_mask |=
1640 HDMIC_HOTPLUG_INT_STATUS;
1645 dev_priv->hotplug_supported_mask |=
1646 HDMID_HOTPLUG_INT_STATUS;
1651 intel_dp_i2c_init(intel_dp, intel_connector, name);
1653 intel_encoder->ddc_bus = &intel_dp->adapter;
1654 intel_encoder->hot_plug = intel_dp_hot_plug;
1656 if (output_reg == DP_A || IS_PCH_eDP(intel_dp)) {
1657 /* initialize panel mode from VBT if available for eDP */
1658 if (dev_priv->lfp_lvds_vbt_mode) {
1659 dev_priv->panel_fixed_mode =
1660 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
1661 if (dev_priv->panel_fixed_mode) {
1662 dev_priv->panel_fixed_mode->type |=
1663 DRM_MODE_TYPE_PREFERRED;
1668 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
1669 * 0xd. Failure to do so will result in spurious interrupts being
1670 * generated on the port when a cable is not attached.
1672 if (IS_G4X(dev) && !IS_GM45(dev)) {
1673 u32 temp = I915_READ(PEG_BAND_GAP_DATA);
1674 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);