Subversion Repositories Kolibri OS

Compare Revisions

Regard whitespace Rev 3242 → Rev 3243

/drivers/video/drm/i915/intel_dp.c
36,8 → 36,6
#include <drm/i915_drm.h>
#include "i915_drv.h"
 
#define DP_RECEIVER_CAP_SIZE 0xf
#define DP_LINK_STATUS_SIZE 6
#define DP_LINK_CHECK_TIMEOUT (10 * 1000)
 
/**
49,7 → 47,9
*/
static bool is_edp(struct intel_dp *intel_dp)
{
return intel_dp->base.type == INTEL_OUTPUT_EDP;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
 
return intel_dig_port->base.type == INTEL_OUTPUT_EDP;
}
 
/**
76,15 → 76,16
return is_edp(intel_dp) && !is_pch_edp(intel_dp);
}
 
static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
static struct drm_device *intel_dp_to_dev(struct intel_dp *intel_dp)
{
return container_of(encoder, struct intel_dp, base.base);
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
 
return intel_dig_port->base.base.dev;
}
 
static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
{
return container_of(intel_attached_encoder(connector),
struct intel_dp, base);
return enc_to_intel_dp(&intel_attached_encoder(connector)->base);
}
 
/**
106,8 → 107,6
return is_pch_edp(intel_dp);
}
 
static void intel_dp_start_link_train(struct intel_dp *intel_dp);
static void intel_dp_complete_link_train(struct intel_dp *intel_dp);
static void intel_dp_link_down(struct intel_dp *intel_dp);
 
void
114,13 → 113,10
intel_edp_link_config(struct intel_encoder *intel_encoder,
int *lane_num, int *link_bw)
{
struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
 
*lane_num = intel_dp->lane_count;
if (intel_dp->link_bw == DP_LINK_BW_1_62)
*link_bw = 162000;
else if (intel_dp->link_bw == DP_LINK_BW_2_7)
*link_bw = 270000;
*link_bw = drm_dp_bw_code_to_link_rate(intel_dp->link_bw);
}
 
int
127,28 → 123,16
intel_edp_target_clock(struct intel_encoder *intel_encoder,
struct drm_display_mode *mode)
{
struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
struct intel_connector *intel_connector = intel_dp->attached_connector;
 
if (intel_dp->panel_fixed_mode)
return intel_dp->panel_fixed_mode->clock;
if (intel_connector->panel.fixed_mode)
return intel_connector->panel.fixed_mode->clock;
else
return mode->clock;
}
 
static int
intel_dp_max_lane_count(struct intel_dp *intel_dp)
{
int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
switch (max_lane_count) {
case 1: case 2: case 4:
break;
default:
max_lane_count = 4;
}
return max_lane_count;
}
 
static int
intel_dp_max_link_bw(struct intel_dp *intel_dp)
{
int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
208,7 → 192,7
bool adjust_mode)
{
int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
int max_lanes = intel_dp_max_lane_count(intel_dp);
int max_lanes = drm_dp_max_lane_count(intel_dp->dpcd);
int max_rate, mode_rate;
 
mode_rate = intel_dp_link_required(mode->clock, 24);
234,12 → 218,14
struct drm_display_mode *mode)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct intel_connector *intel_connector = to_intel_connector(connector);
struct drm_display_mode *fixed_mode = intel_connector->panel.fixed_mode;
 
if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
if (is_edp(intel_dp) && fixed_mode) {
if (mode->hdisplay > fixed_mode->hdisplay)
return MODE_PANEL;
 
if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
if (mode->vdisplay > fixed_mode->vdisplay)
return MODE_PANEL;
}
 
285,6 → 271,10
struct drm_i915_private *dev_priv = dev->dev_private;
uint32_t clkcfg;
 
/* There is no CLKCFG reg in Valleyview. VLV hrawclk is 200 MHz */
if (IS_VALLEYVIEW(dev))
return 200;
 
clkcfg = I915_READ(CLKCFG);
switch (clkcfg & CLKCFG_FSB_MASK) {
case CLKCFG_FSB_400:
310,7 → 300,7
 
static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
 
return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0;
318,7 → 308,7
 
static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
 
return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0;
327,7 → 317,7
static void
intel_dp_check_edp(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
 
if (!is_edp(intel_dp))
346,7 → 336,8
uint8_t *recv, int recv_size)
{
uint32_t output_reg = intel_dp->output_reg;
struct drm_device *dev = intel_dp->base.base.dev;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_device *dev = intel_dig_port->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
uint32_t ch_ctl = output_reg + 0x10;
uint32_t ch_data = ch_ctl + 4;
356,6 → 347,29
uint32_t aux_clock_divider;
int try, precharge;
 
if (IS_HASWELL(dev)) {
switch (intel_dig_port->port) {
case PORT_A:
ch_ctl = DPA_AUX_CH_CTL;
ch_data = DPA_AUX_CH_DATA1;
break;
case PORT_B:
ch_ctl = PCH_DPB_AUX_CH_CTL;
ch_data = PCH_DPB_AUX_CH_DATA1;
break;
case PORT_C:
ch_ctl = PCH_DPC_AUX_CH_CTL;
ch_data = PCH_DPC_AUX_CH_DATA1;
break;
case PORT_D:
ch_ctl = PCH_DPD_AUX_CH_CTL;
ch_data = PCH_DPD_AUX_CH_DATA1;
break;
default:
BUG();
}
}
 
intel_dp_check_edp(intel_dp);
/* The clock divider is based off the hrawclk,
* and would like to run at 2MHz. So, take the
365,12 → 379,16
* clock divider.
*/
if (is_cpu_edp(intel_dp)) {
if (IS_GEN6(dev) || IS_GEN7(dev))
if (IS_HASWELL(dev))
aux_clock_divider = intel_ddi_get_cdclk_freq(dev_priv) >> 1;
else if (IS_VALLEYVIEW(dev))
aux_clock_divider = 100;
else if (IS_GEN6(dev) || IS_GEN7(dev))
aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */
else
aux_clock_divider = 225; /* eDP input clock at 450Mhz */
} else if (HAS_PCH_SPLIT(dev))
aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */
aux_clock_divider = DIV_ROUND_UP(intel_pch_rawclk(dev), 2);
else
aux_clock_divider = intel_hrawclk(dev) / 2;
 
642,9 → 660,6
return -EREMOTEIO;
}
 
static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp);
static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
 
static int
intel_dp_i2c_init(struct intel_dp *intel_dp,
struct intel_connector *intel_connector, const char *name)
670,7 → 685,7
return ret;
}
 
static bool
bool
intel_dp_mode_fixup(struct drm_encoder *encoder,
const struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
677,15 → 692,18
{
struct drm_device *dev = encoder->dev;
struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
struct intel_connector *intel_connector = intel_dp->attached_connector;
int lane_count, clock;
int max_lane_count = intel_dp_max_lane_count(intel_dp);
int max_lane_count = drm_dp_max_lane_count(intel_dp->dpcd);
int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
int bpp, mode_rate;
static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
 
if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
intel_fixed_panel_mode(intel_connector->panel.fixed_mode,
adjusted_mode);
intel_pch_panel_fitting(dev,
intel_connector->panel.fitting_mode,
mode, adjusted_mode);
}
 
762,21 → 780,23
struct drm_display_mode *adjusted_mode)
{
struct drm_device *dev = crtc->dev;
struct intel_encoder *encoder;
struct intel_encoder *intel_encoder;
struct intel_dp *intel_dp;
struct drm_i915_private *dev_priv = dev->dev_private;
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
int lane_count = 4;
struct intel_dp_m_n m_n;
int pipe = intel_crtc->pipe;
enum transcoder cpu_transcoder = intel_crtc->cpu_transcoder;
 
/*
* Find the lane count in the intel_encoder private
*/
for_each_encoder_on_crtc(dev, crtc, encoder) {
struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
for_each_encoder_on_crtc(dev, crtc, intel_encoder) {
intel_dp = enc_to_intel_dp(&intel_encoder->base);
 
if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
intel_dp->base.type == INTEL_OUTPUT_EDP)
if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT ||
intel_encoder->type == INTEL_OUTPUT_EDP)
{
lane_count = intel_dp->lane_count;
break;
791,17 → 811,25
intel_dp_compute_m_n(intel_crtc->bpp, lane_count,
mode->clock, adjusted_mode->clock, &m_n);
 
if (HAS_PCH_SPLIT(dev)) {
I915_WRITE(TRANSDATA_M1(pipe),
((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
m_n.gmch_m);
if (IS_HASWELL(dev)) {
I915_WRITE(PIPE_DATA_M1(cpu_transcoder),
TU_SIZE(m_n.tu) | m_n.gmch_m);
I915_WRITE(PIPE_DATA_N1(cpu_transcoder), m_n.gmch_n);
I915_WRITE(PIPE_LINK_M1(cpu_transcoder), m_n.link_m);
I915_WRITE(PIPE_LINK_N1(cpu_transcoder), m_n.link_n);
} else if (HAS_PCH_SPLIT(dev)) {
I915_WRITE(TRANSDATA_M1(pipe), TU_SIZE(m_n.tu) | m_n.gmch_m);
I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n);
I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m);
I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n);
} else if (IS_VALLEYVIEW(dev)) {
I915_WRITE(PIPE_DATA_M1(pipe), TU_SIZE(m_n.tu) | m_n.gmch_m);
I915_WRITE(PIPE_DATA_N1(pipe), m_n.gmch_n);
I915_WRITE(PIPE_LINK_M1(pipe), m_n.link_m);
I915_WRITE(PIPE_LINK_N1(pipe), m_n.link_n);
} else {
I915_WRITE(PIPE_GMCH_DATA_M(pipe),
((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
m_n.gmch_m);
TU_SIZE(m_n.tu) | m_n.gmch_m);
I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
808,6 → 836,21
}
}
 
void intel_dp_init_link_config(struct intel_dp *intel_dp)
{
memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
intel_dp->link_configuration[0] = intel_dp->link_bw;
intel_dp->link_configuration[1] = intel_dp->lane_count;
intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
/*
* Check for DPCD version > 1.1 and enhanced framing support
*/
if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
(intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
}
}
 
static void
intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
815,7 → 858,7
struct drm_device *dev = encoder->dev;
struct drm_i915_private *dev_priv = dev->dev_private;
struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
struct drm_crtc *crtc = intel_dp->base.base.crtc;
struct drm_crtc *crtc = encoder->crtc;
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
 
/*
860,21 → 903,12
intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
intel_write_eld(encoder, adjusted_mode);
}
memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
intel_dp->link_configuration[0] = intel_dp->link_bw;
intel_dp->link_configuration[1] = intel_dp->lane_count;
intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
/*
* Check for DPCD version > 1.1 and enhanced framing support
*/
if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
(intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
}
 
intel_dp_init_link_config(intel_dp);
 
/* Split out the IBX/CPU vs CPT settings */
 
if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) {
if (is_cpu_edp(intel_dp) && IS_GEN7(dev) && !IS_VALLEYVIEW(dev)) {
if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
intel_dp->DP |= DP_SYNC_HS_HIGH;
if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
931,7 → 965,7
u32 mask,
u32 value)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
 
DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
978,9 → 1012,9
return control;
}
 
static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
1019,7 → 1053,7
 
static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
1037,8 → 1071,18
}
}
 
static void ironlake_panel_vdd_work(struct work_struct *__work)
{
// struct intel_dp *intel_dp = container_of(to_delayed_work(__work),
// struct intel_dp, panel_vdd_work);
// struct drm_device *dev = intel_dp_to_dev(intel_dp);
//
// mutex_lock(&dev->mode_config.mutex);
// ironlake_panel_vdd_off_sync(intel_dp);
// mutex_unlock(&dev->mode_config.mutex);
}
 
static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
{
if (!is_edp(intel_dp))
return;
1061,9 → 1105,9
}
}
 
static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
void ironlake_edp_panel_on(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
1103,9 → 1147,9
}
}
 
static void ironlake_edp_panel_off(struct intel_dp *intel_dp)
void ironlake_edp_panel_off(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
1128,10 → 1172,12
ironlake_wait_panel_off(intel_dp);
}
 
static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_device *dev = intel_dig_port->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
int pipe = to_intel_crtc(intel_dig_port->base.base.crtc)->pipe;
u32 pp;
 
if (!is_edp(intel_dp))
1149,11 → 1195,13
pp |= EDP_BLC_ENABLE;
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
 
intel_panel_enable_backlight(dev, pipe);
}
 
static void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
1160,6 → 1208,8
if (!is_edp(intel_dp))
return;
 
intel_panel_disable_backlight(dev);
 
DRM_DEBUG_KMS("\n");
pp = ironlake_get_pp_control(dev_priv);
pp &= ~EDP_BLC_ENABLE;
1170,8 → 1220,9
 
static void ironlake_edp_pll_on(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_crtc *crtc = intel_dp->base.base.crtc;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_crtc *crtc = intel_dig_port->base.base.crtc;
struct drm_device *dev = crtc->dev;
struct drm_i915_private *dev_priv = dev->dev_private;
u32 dpa_ctl;
 
1195,8 → 1246,9
 
static void ironlake_edp_pll_off(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_crtc *crtc = intel_dp->base.base.crtc;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_crtc *crtc = intel_dig_port->base.base.crtc;
struct drm_device *dev = crtc->dev;
struct drm_i915_private *dev_priv = dev->dev_private;
u32 dpa_ctl;
 
1218,7 → 1270,7
}
 
/* If the sink supports it, try to set the power state appropriately */
static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
{
int ret, i;
 
1288,10 → 1340,11
return true;
}
}
 
DRM_DEBUG_KMS("No pipe for dp port 0x%x found\n",
intel_dp->output_reg);
}
 
DRM_DEBUG_KMS("No pipe for dp port 0x%x found\n", intel_dp->output_reg);
 
return true;
}
 
1386,38 → 1439,6
DP_LINK_STATUS_SIZE);
}
 
static uint8_t
intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
int r)
{
return link_status[r - DP_LANE0_1_STATUS];
}
 
static uint8_t
intel_get_adjust_request_voltage(uint8_t adjust_request[2],
int lane)
{
int s = ((lane & 1) ?
DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
uint8_t l = adjust_request[lane>>1];
 
return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
}
 
static uint8_t
intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2],
int lane)
{
int s = ((lane & 1) ?
DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
uint8_t l = adjust_request[lane>>1];
 
return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
}
 
 
#if 0
static char *voltage_names[] = {
"0.4V", "0.6V", "0.8V", "1.2V"
1438,7 → 1459,7
static uint8_t
intel_dp_voltage_max(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
 
if (IS_GEN7(dev) && is_cpu_edp(intel_dp))
return DP_TRAIN_VOLTAGE_SWING_800;
1451,12 → 1472,24
static uint8_t
intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
 
if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
if (IS_HASWELL(dev)) {
switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
case DP_TRAIN_VOLTAGE_SWING_400:
return DP_TRAIN_PRE_EMPHASIS_9_5;
case DP_TRAIN_VOLTAGE_SWING_600:
return DP_TRAIN_PRE_EMPHASIS_6;
case DP_TRAIN_VOLTAGE_SWING_800:
return DP_TRAIN_PRE_EMPHASIS_3_5;
case DP_TRAIN_VOLTAGE_SWING_1200:
default:
return DP_TRAIN_PRE_EMPHASIS_0;
}
} else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) {
switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
case DP_TRAIN_VOLTAGE_SWING_400:
return DP_TRAIN_PRE_EMPHASIS_6;
case DP_TRAIN_VOLTAGE_SWING_600:
case DP_TRAIN_VOLTAGE_SWING_800:
return DP_TRAIN_PRE_EMPHASIS_3_5;
1484,13 → 1517,12
uint8_t v = 0;
uint8_t p = 0;
int lane;
uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS);
uint8_t voltage_max;
uint8_t preemph_max;
 
for (lane = 0; lane < intel_dp->lane_count; lane++) {
uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane);
uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane);
uint8_t this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
uint8_t this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
 
if (this_v > v)
v = this_v;
1607,64 → 1639,88
}
}
 
static uint8_t
intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
int lane)
/* Gen7.5's (HSW) DP voltage swing and pre-emphasis control */
static uint32_t
intel_dp_signal_levels_hsw(uint8_t train_set)
{
int s = (lane & 1) * 4;
uint8_t l = link_status[lane>>1];
int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
DP_TRAIN_PRE_EMPHASIS_MASK);
switch (signal_levels) {
case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
return DDI_BUF_EMP_400MV_0DB_HSW;
case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
return DDI_BUF_EMP_400MV_3_5DB_HSW;
case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
return DDI_BUF_EMP_400MV_6DB_HSW;
case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_9_5:
return DDI_BUF_EMP_400MV_9_5DB_HSW;
 
return (l >> s) & 0xf;
}
case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
return DDI_BUF_EMP_600MV_0DB_HSW;
case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
return DDI_BUF_EMP_600MV_3_5DB_HSW;
case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6:
return DDI_BUF_EMP_600MV_6DB_HSW;
 
/* Check for clock recovery is done on all channels */
static bool
intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
{
int lane;
uint8_t lane_status;
 
for (lane = 0; lane < lane_count; lane++) {
lane_status = intel_get_lane_status(link_status, lane);
if ((lane_status & DP_LANE_CR_DONE) == 0)
return false;
case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
return DDI_BUF_EMP_800MV_0DB_HSW;
case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
return DDI_BUF_EMP_800MV_3_5DB_HSW;
default:
DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
"0x%x\n", signal_levels);
return DDI_BUF_EMP_400MV_0DB_HSW;
}
return true;
}
 
/* Check to see if channel eq is done on all channels */
#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
DP_LANE_CHANNEL_EQ_DONE|\
DP_LANE_SYMBOL_LOCKED)
static bool
intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
{
uint8_t lane_align;
uint8_t lane_status;
int lane;
 
lane_align = intel_dp_link_status(link_status,
DP_LANE_ALIGN_STATUS_UPDATED);
if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
return false;
for (lane = 0; lane < intel_dp->lane_count; lane++) {
lane_status = intel_get_lane_status(link_status, lane);
if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
return false;
}
return true;
}
 
static bool
intel_dp_set_link_train(struct intel_dp *intel_dp,
uint32_t dp_reg_value,
uint8_t dp_train_pat)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_device *dev = intel_dig_port->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
enum port port = intel_dig_port->port;
int ret;
uint32_t temp;
 
if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
if (IS_HASWELL(dev)) {
temp = I915_READ(DP_TP_CTL(port));
 
if (dp_train_pat & DP_LINK_SCRAMBLING_DISABLE)
temp |= DP_TP_CTL_SCRAMBLE_DISABLE;
else
temp &= ~DP_TP_CTL_SCRAMBLE_DISABLE;
 
temp &= ~DP_TP_CTL_LINK_TRAIN_MASK;
switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
case DP_TRAINING_PATTERN_DISABLE:
temp |= DP_TP_CTL_LINK_TRAIN_IDLE;
I915_WRITE(DP_TP_CTL(port), temp);
 
if (wait_for((I915_READ(DP_TP_STATUS(port)) &
DP_TP_STATUS_IDLE_DONE), 1))
DRM_ERROR("Timed out waiting for DP idle patterns\n");
 
temp &= ~DP_TP_CTL_LINK_TRAIN_MASK;
temp |= DP_TP_CTL_LINK_TRAIN_NORMAL;
 
break;
case DP_TRAINING_PATTERN_1:
temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
break;
case DP_TRAINING_PATTERN_2:
temp |= DP_TP_CTL_LINK_TRAIN_PAT2;
break;
case DP_TRAINING_PATTERN_3:
temp |= DP_TP_CTL_LINK_TRAIN_PAT3;
break;
}
I915_WRITE(DP_TP_CTL(port), temp);
 
} else if (HAS_PCH_CPT(dev) &&
(IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
dp_reg_value &= ~DP_LINK_TRAIN_MASK_CPT;
 
switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
1724,10 → 1780,11
}
 
/* Enable corresponding port and start training pattern 1 */
static void
void
intel_dp_start_link_train(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_encoder *encoder = &dp_to_dig_port(intel_dp)->base.base;
struct drm_device *dev = encoder->dev;
int i;
uint8_t voltage;
bool clock_recovery = false;
1734,6 → 1791,9
int voltage_tries, loop_tries;
uint32_t DP = intel_dp->DP;
 
if (IS_HASWELL(dev))
intel_ddi_prepare_link_retrain(encoder);
 
/* Write the link configuration data */
intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
intel_dp->link_configuration,
1751,8 → 1811,11
uint8_t link_status[DP_LINK_STATUS_SIZE];
uint32_t signal_levels;
 
 
if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
if (IS_HASWELL(dev)) {
signal_levels = intel_dp_signal_levels_hsw(
intel_dp->train_set[0]);
DP = (DP & ~DDI_BUF_EMP_MASK) | signal_levels;
} else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) {
signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1760,23 → 1823,24
DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
} else {
signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels);
DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
}
DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n",
signal_levels);
 
/* Set training pattern 1 */
if (!intel_dp_set_link_train(intel_dp, DP,
DP_TRAINING_PATTERN_1 |
DP_LINK_SCRAMBLING_DISABLE))
break;
/* Set training pattern 1 */
 
udelay(100);
drm_dp_link_train_clock_recovery_delay(intel_dp->dpcd);
if (!intel_dp_get_link_status(intel_dp, link_status)) {
DRM_ERROR("failed to get link status\n");
break;
}
 
if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
if (drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) {
DRM_DEBUG_KMS("clock recovery OK\n");
clock_recovery = true;
break;
1815,10 → 1879,10
intel_dp->DP = DP;
}
 
static void
void
intel_dp_complete_link_train(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
bool channel_eq = false;
int tries, cr_tries;
uint32_t DP = intel_dp->DP;
1838,7 → 1902,10
break;
}
 
if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
if (IS_HASWELL(dev)) {
signal_levels = intel_dp_signal_levels_hsw(intel_dp->train_set[0]);
DP = (DP & ~DDI_BUF_EMP_MASK) | signal_levels;
} else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) {
signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1855,18 → 1922,18
DP_LINK_SCRAMBLING_DISABLE))
break;
 
udelay(400);
drm_dp_link_train_channel_eq_delay(intel_dp->dpcd);
if (!intel_dp_get_link_status(intel_dp, link_status))
break;
 
/* Make sure clock is still ok */
if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
if (!drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) {
intel_dp_start_link_train(intel_dp);
cr_tries++;
continue;
}
 
if (intel_channel_eq_ok(intel_dp, link_status)) {
if (drm_dp_channel_eq_ok(link_status, intel_dp->lane_count)) {
channel_eq = true;
break;
}
1885,6 → 1952,9
++tries;
}
 
if (channel_eq)
DRM_DEBUG_KMS("Channel EQ done. DP Training successfull\n");
 
intel_dp_set_link_train(intel_dp, DP, DP_TRAINING_PATTERN_DISABLE);
}
 
1891,10 → 1961,29
static void
intel_dp_link_down(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_device *dev = intel_dig_port->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
uint32_t DP = intel_dp->DP;
 
/*
* DDI code has a strict mode set sequence and we should try to respect
* it, otherwise we might hang the machine in many different ways. So we
* really should be disabling the port only on a complete crtc_disable
* sequence. This function is just called under two conditions on DDI
* code:
* - Link train failed while doing crtc_enable, and on this case we
* really should respect the mode set sequence and wait for a
* crtc_disable.
* - Someone turned the monitor off and intel_dp_check_link_status
* called us. We don't need to disable the whole port on this case, so
* when someone turns the monitor on again,
* intel_ddi_prepare_link_retrain will take care of redoing the link
* train.
*/
if (IS_HASWELL(dev))
return;
 
if (WARN_ON((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0))
return;
 
1913,7 → 2002,7
 
if (HAS_PCH_IBX(dev) &&
I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
struct drm_crtc *crtc = intel_dp->base.base.crtc;
struct drm_crtc *crtc = intel_dig_port->base.base.crtc;
 
/* Hardware workaround: leaving our transcoder select
* set to transcoder B while it's off will prevent the
2014,7 → 2103,7
intel_dp_handle_test_request(struct intel_dp *intel_dp)
{
/* NAK by default */
intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK);
intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_NAK);
}
 
/*
2026,16 → 2115,17
* 4. Check link status on receipt of hot-plug interrupt
*/
 
static void
void
intel_dp_check_link_status(struct intel_dp *intel_dp)
{
struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
u8 sink_irq_vector;
u8 link_status[DP_LINK_STATUS_SIZE];
 
if (!intel_dp->base.connectors_active)
if (!intel_encoder->connectors_active)
return;
 
if (WARN_ON(!intel_dp->base.base.crtc))
if (WARN_ON(!intel_encoder->base.crtc))
return;
 
/* Try to read receiver status if the link appears to be up */
2064,9 → 2154,9
DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
}
 
if (!intel_channel_eq_ok(intel_dp, link_status)) {
if (!drm_dp_channel_eq_ok(link_status, intel_dp->lane_count)) {
DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
drm_get_encoder_name(&intel_dp->base.base));
drm_get_encoder_name(&intel_encoder->base));
intel_dp_start_link_train(intel_dp);
intel_dp_complete_link_train(intel_dp);
}
2115,11 → 2205,12
static enum drm_connector_status
ironlake_dp_detect(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp_to_dev(intel_dp);
enum drm_connector_status status;
 
/* Can't disconnect eDP, but you can close the lid... */
if (is_edp(intel_dp)) {
status = intel_panel_detect(intel_dp->base.base.dev);
status = intel_panel_detect(dev);
if (status == connector_status_unknown)
status = connector_status_connected;
return status;
2131,7 → 2222,7
static enum drm_connector_status
g4x_dp_detect(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct drm_i915_private *dev_priv = dev->dev_private;
uint32_t bit;
 
2158,44 → 2249,45
static struct edid *
intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct intel_connector *intel_connector = to_intel_connector(connector);
 
/* use cached edid if we have one */
if (intel_connector->edid) {
struct edid *edid;
int size;
 
if (is_edp(intel_dp)) {
if (!intel_dp->edid)
/* invalid edid */
if (IS_ERR(intel_connector->edid))
return NULL;
 
size = (intel_dp->edid->extensions + 1) * EDID_LENGTH;
size = (intel_connector->edid->extensions + 1) * EDID_LENGTH;
edid = kmalloc(size, GFP_KERNEL);
if (!edid)
return NULL;
 
memcpy(edid, intel_dp->edid, size);
memcpy(edid, intel_connector->edid, size);
return edid;
}
 
edid = drm_get_edid(connector, adapter);
return edid;
return drm_get_edid(connector, adapter);
}
 
static int
intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
int ret;
struct intel_connector *intel_connector = to_intel_connector(connector);
 
if (is_edp(intel_dp)) {
drm_mode_connector_update_edid_property(connector,
intel_dp->edid);
ret = drm_add_edid_modes(connector, intel_dp->edid);
drm_edid_to_eld(connector,
intel_dp->edid);
return intel_dp->edid_mode_count;
/* use cached edid if we have one */
if (intel_connector->edid) {
/* invalid edid */
if (IS_ERR(intel_connector->edid))
return 0;
 
return intel_connector_update_modes(connector,
intel_connector->edid);
}
 
ret = intel_ddc_get_modes(connector, adapter);
return ret;
return intel_ddc_get_modes(connector, adapter);
}
 
 
2209,9 → 2301,12
intel_dp_detect(struct drm_connector *connector, bool force)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct drm_device *dev = intel_dp->base.base.dev;
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct intel_encoder *intel_encoder = &intel_dig_port->base;
struct drm_device *dev = connector->dev;
enum drm_connector_status status;
struct edid *edid = NULL;
char dpcd_hex_dump[sizeof(intel_dp->dpcd) * 3];
 
intel_dp->has_audio = false;
 
2220,10 → 2315,9
else
status = g4x_dp_detect(intel_dp);
 
DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n",
intel_dp->dpcd[0], intel_dp->dpcd[1], intel_dp->dpcd[2],
intel_dp->dpcd[3], intel_dp->dpcd[4], intel_dp->dpcd[5],
intel_dp->dpcd[6], intel_dp->dpcd[7]);
// hex_dump_to_buffer(intel_dp->dpcd, sizeof(intel_dp->dpcd),
// 32, 1, dpcd_hex_dump, sizeof(dpcd_hex_dump), false);
// DRM_DEBUG_KMS("DPCD: %s\n", dpcd_hex_dump);
 
if (status != connector_status_connected)
return status;
2230,9 → 2324,8
 
intel_dp_probe_oui(intel_dp);
 
/*
if (intel_dp->force_audio) {
intel_dp->has_audio = intel_dp->force_audio > 0;
if (intel_dp->force_audio != HDMI_AUDIO_AUTO) {
intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON);
} else {
edid = intel_dp_get_edid(connector, &intel_dp->adapter);
if (edid) {
2240,7 → 2333,9
kfree(edid);
}
}
*/
 
if (intel_encoder->type != INTEL_OUTPUT_EDP)
intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
return connector_status_connected;
}
 
2247,8 → 2342,8
static int intel_dp_get_modes(struct drm_connector *connector)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
struct intel_connector *intel_connector = to_intel_connector(connector);
struct drm_device *dev = connector->dev;
int ret;
 
/* We should parse the EDID data and find out if it has an audio sink
2255,35 → 2350,15
*/
 
ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter);
if (ret) {
if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) {
struct drm_display_mode *newmode;
list_for_each_entry(newmode, &connector->probed_modes,
head) {
if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) {
intel_dp->panel_fixed_mode =
drm_mode_duplicate(dev, newmode);
break;
}
}
}
if (ret)
return ret;
}
 
/* if eDP has no EDID, try to use fixed panel mode from VBT */
if (is_edp(intel_dp)) {
/* initialize panel mode from VBT if available for eDP */
if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) {
intel_dp->panel_fixed_mode =
drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
if (intel_dp->panel_fixed_mode) {
intel_dp->panel_fixed_mode->type |=
DRM_MODE_TYPE_PREFERRED;
}
}
if (intel_dp->panel_fixed_mode) {
/* if eDP has no EDID, fall back to fixed mode */
if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
struct drm_display_mode *mode;
mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
mode = drm_mode_duplicate(dev,
intel_connector->panel.fixed_mode);
if (mode) {
drm_mode_probed_add(connector, mode);
return 1;
}
2291,10 → 2366,22
return 0;
}
 
static bool
intel_dp_detect_audio(struct drm_connector *connector)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct edid *edid;
bool has_audio = false;
 
edid = intel_dp_get_edid(connector, &intel_dp->adapter);
if (edid) {
has_audio = drm_detect_monitor_audio(edid);
kfree(edid);
}
 
return has_audio;
}
 
 
static int
intel_dp_set_property(struct drm_connector *connector,
struct drm_property *property,
2301,10 → 2388,12
uint64_t val)
{
struct drm_i915_private *dev_priv = connector->dev->dev_private;
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct intel_connector *intel_connector = to_intel_connector(connector);
struct intel_encoder *intel_encoder = intel_attached_encoder(connector);
struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
int ret;
 
ret = drm_connector_property_set_value(connector, property, val);
ret = drm_object_property_set_value(&connector->base, property, val);
if (ret)
return ret;
#if 0
2338,11 → 2427,27
}
#endif
 
if (is_edp(intel_dp) &&
property == connector->dev->mode_config.scaling_mode_property) {
if (val == DRM_MODE_SCALE_NONE) {
DRM_DEBUG_KMS("no scaling not supported\n");
return -EINVAL;
}
 
if (intel_connector->panel.fitting_mode == val) {
/* the eDP scaling property is not changed */
return 0;
}
intel_connector->panel.fitting_mode = val;
 
goto done;
}
 
return -EINVAL;
 
done:
if (intel_dp->base.base.crtc) {
struct drm_crtc *crtc = intel_dp->base.base.crtc;
if (intel_encoder->base.crtc) {
struct drm_crtc *crtc = intel_encoder->base.crtc;
intel_set_mode(crtc, &crtc->mode,
crtc->x, crtc->y, crtc->fb);
}
2355,9 → 2460,15
{
struct drm_device *dev = connector->dev;
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct intel_connector *intel_connector = to_intel_connector(connector);
 
if (is_edp(intel_dp))
if (!IS_ERR_OR_NULL(intel_connector->edid))
kfree(intel_connector->edid);
 
if (is_edp(intel_dp)) {
intel_panel_destroy_backlight(dev);
intel_panel_fini(&intel_connector->panel);
}
 
drm_sysfs_connector_remove(connector);
drm_connector_cleanup(connector);
2364,17 → 2475,18
kfree(connector);
}
 
static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
void intel_dp_encoder_destroy(struct drm_encoder *encoder)
{
struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
struct intel_digital_port *intel_dig_port = enc_to_dig_port(encoder);
struct intel_dp *intel_dp = &intel_dig_port->dp;
 
// i2c_del_adapter(&intel_dp->adapter);
i2c_del_adapter(&intel_dp->adapter);
drm_encoder_cleanup(encoder);
if (is_edp(intel_dp)) {
// cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
ironlake_panel_vdd_off_sync(intel_dp);
}
kfree(intel_dp);
kfree(intel_dig_port);
}
 
static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
2404,7 → 2516,7
static void
intel_dp_hot_plug(struct intel_encoder *intel_encoder)
{
struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
 
intel_dp_check_link_status(intel_dp);
}
2414,13 → 2526,14
intel_trans_dp_port_sel(struct drm_crtc *crtc)
{
struct drm_device *dev = crtc->dev;
struct intel_encoder *encoder;
struct intel_encoder *intel_encoder;
struct intel_dp *intel_dp;
 
for_each_encoder_on_crtc(dev, crtc, encoder) {
struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
for_each_encoder_on_crtc(dev, crtc, intel_encoder) {
intel_dp = enc_to_intel_dp(&intel_encoder->base);
 
if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
intel_dp->base.type == INTEL_OUTPUT_EDP)
if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT ||
intel_encoder->type == INTEL_OUTPUT_EDP)
return intel_dp->output_reg;
}
 
2450,79 → 2563,205
static void
intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
{
struct intel_connector *intel_connector = to_intel_connector(connector);
 
intel_attach_force_audio_property(connector);
intel_attach_broadcast_rgb_property(connector);
 
if (is_edp(intel_dp)) {
drm_mode_create_scaling_mode_property(connector->dev);
drm_object_attach_property(
&connector->base,
connector->dev->mode_config.scaling_mode_property,
DRM_MODE_SCALE_ASPECT);
intel_connector->panel.fitting_mode = DRM_MODE_SCALE_ASPECT;
}
}
 
static void
intel_dp_init_panel_power_sequencer(struct drm_device *dev,
struct intel_dp *intel_dp,
struct edp_power_seq *out)
{
struct drm_i915_private *dev_priv = dev->dev_private;
struct edp_power_seq cur, vbt, spec, final;
u32 pp_on, pp_off, pp_div, pp;
 
/* Workaround: Need to write PP_CONTROL with the unlock key as
* the very first thing. */
pp = ironlake_get_pp_control(dev_priv);
I915_WRITE(PCH_PP_CONTROL, pp);
 
pp_on = I915_READ(PCH_PP_ON_DELAYS);
pp_off = I915_READ(PCH_PP_OFF_DELAYS);
pp_div = I915_READ(PCH_PP_DIVISOR);
 
/* Pull timing values out of registers */
cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
PANEL_POWER_UP_DELAY_SHIFT;
 
cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
PANEL_LIGHT_ON_DELAY_SHIFT;
 
cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
PANEL_LIGHT_OFF_DELAY_SHIFT;
 
cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
PANEL_POWER_DOWN_DELAY_SHIFT;
 
cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
 
DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
 
vbt = dev_priv->edp.pps;
 
/* Upper limits from eDP 1.3 spec. Note that we use the clunky units of
* our hw here, which are all in 100usec. */
spec.t1_t3 = 210 * 10;
spec.t8 = 50 * 10; /* no limit for t8, use t7 instead */
spec.t9 = 50 * 10; /* no limit for t9, make it symmetric with t8 */
spec.t10 = 500 * 10;
/* This one is special and actually in units of 100ms, but zero
* based in the hw (so we need to add 100 ms). But the sw vbt
* table multiplies it with 1000 to make it in units of 100usec,
* too. */
spec.t11_t12 = (510 + 100) * 10;
 
DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
 
/* Use the max of the register settings and vbt. If both are
* unset, fall back to the spec limits. */
#define assign_final(field) final.field = (max(cur.field, vbt.field) == 0 ? \
spec.field : \
max(cur.field, vbt.field))
assign_final(t1_t3);
assign_final(t8);
assign_final(t9);
assign_final(t10);
assign_final(t11_t12);
#undef assign_final
 
#define get_delay(field) (DIV_ROUND_UP(final.field, 10))
intel_dp->panel_power_up_delay = get_delay(t1_t3);
intel_dp->backlight_on_delay = get_delay(t8);
intel_dp->backlight_off_delay = get_delay(t9);
intel_dp->panel_power_down_delay = get_delay(t10);
intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
#undef get_delay
 
DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
intel_dp->panel_power_cycle_delay);
 
DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
 
if (out)
*out = final;
}
 
static void
intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
struct intel_dp *intel_dp,
struct edp_power_seq *seq)
{
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp_on, pp_off, pp_div;
 
/* And finally store the new values in the power sequencer. */
pp_on = (seq->t1_t3 << PANEL_POWER_UP_DELAY_SHIFT) |
(seq->t8 << PANEL_LIGHT_ON_DELAY_SHIFT);
pp_off = (seq->t9 << PANEL_LIGHT_OFF_DELAY_SHIFT) |
(seq->t10 << PANEL_POWER_DOWN_DELAY_SHIFT);
/* Compute the divisor for the pp clock, simply match the Bspec
* formula. */
pp_div = ((100 * intel_pch_rawclk(dev))/2 - 1)
<< PP_REFERENCE_DIVIDER_SHIFT;
pp_div |= (DIV_ROUND_UP(seq->t11_t12, 1000)
<< PANEL_POWER_CYCLE_DELAY_SHIFT);
 
/* Haswell doesn't have any port selection bits for the panel
* power sequencer any more. */
if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) {
if (is_cpu_edp(intel_dp))
pp_on |= PANEL_POWER_PORT_DP_A;
else
pp_on |= PANEL_POWER_PORT_DP_D;
}
 
I915_WRITE(PCH_PP_ON_DELAYS, pp_on);
I915_WRITE(PCH_PP_OFF_DELAYS, pp_off);
I915_WRITE(PCH_PP_DIVISOR, pp_div);
 
DRM_DEBUG_KMS("panel power sequencer register settings: PP_ON %#x, PP_OFF %#x, PP_DIV %#x\n",
I915_READ(PCH_PP_ON_DELAYS),
I915_READ(PCH_PP_OFF_DELAYS),
I915_READ(PCH_PP_DIVISOR));
}
 
void
intel_dp_init(struct drm_device *dev, int output_reg, enum port port)
intel_dp_init_connector(struct intel_digital_port *intel_dig_port,
struct intel_connector *intel_connector)
{
struct drm_connector *connector = &intel_connector->base;
struct intel_dp *intel_dp = &intel_dig_port->dp;
struct intel_encoder *intel_encoder = &intel_dig_port->base;
struct drm_device *dev = intel_encoder->base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
struct drm_connector *connector;
struct intel_dp *intel_dp;
struct intel_encoder *intel_encoder;
struct intel_connector *intel_connector;
struct drm_display_mode *fixed_mode = NULL;
struct edp_power_seq power_seq = { 0 };
enum port port = intel_dig_port->port;
const char *name = NULL;
int type;
 
intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL);
if (!intel_dp)
return;
 
intel_dp->output_reg = output_reg;
intel_dp->port = port;
/* Preserve the current hw state. */
intel_dp->DP = I915_READ(intel_dp->output_reg);
intel_dp->attached_connector = intel_connector;
 
intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
if (!intel_connector) {
kfree(intel_dp);
return;
}
intel_encoder = &intel_dp->base;
 
if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
if (HAS_PCH_SPLIT(dev) && port == PORT_D)
if (intel_dpd_is_edp(dev))
intel_dp->is_pch_edp = true;
 
if (output_reg == DP_A || is_pch_edp(intel_dp)) {
/*
* FIXME : We need to initialize built-in panels before external panels.
* For X0, DP_C is fixed as eDP. Revisit this as part of VLV eDP cleanup
*/
if (IS_VALLEYVIEW(dev) && port == PORT_C) {
type = DRM_MODE_CONNECTOR_eDP;
intel_encoder->type = INTEL_OUTPUT_EDP;
} else if (port == PORT_A || is_pch_edp(intel_dp)) {
type = DRM_MODE_CONNECTOR_eDP;
intel_encoder->type = INTEL_OUTPUT_EDP;
} else {
/* The intel_encoder->type value may be INTEL_OUTPUT_UNKNOWN for
* DDI or INTEL_OUTPUT_DISPLAYPORT for the older gens, so don't
* rewrite it.
*/
type = DRM_MODE_CONNECTOR_DisplayPort;
intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
}
 
connector = &intel_connector->base;
drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
 
connector->polled = DRM_CONNECTOR_POLL_HPD;
 
intel_encoder->cloneable = false;
 
// INIT_DELAYED_WORK(&intel_dp->panel_vdd_work,
// ironlake_panel_vdd_work);
 
intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
 
connector->interlace_allowed = true;
connector->doublescan_allowed = 0;
 
drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
DRM_MODE_ENCODER_TMDS);
drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs);
INIT_DELAYED_WORK(&intel_dp->panel_vdd_work,
ironlake_panel_vdd_work);
 
intel_connector_attach_encoder(intel_connector, intel_encoder);
drm_sysfs_connector_add(connector);
 
intel_encoder->enable = intel_enable_dp;
intel_encoder->pre_enable = intel_pre_enable_dp;
intel_encoder->disable = intel_disable_dp;
intel_encoder->post_disable = intel_post_disable_dp;
intel_encoder->get_hw_state = intel_dp_get_hw_state;
if (IS_HASWELL(dev))
intel_connector->get_hw_state = intel_ddi_connector_get_hw_state;
else
intel_connector->get_hw_state = intel_connector_get_hw_state;
 
 
/* Set up the DDC bus. */
switch (port) {
case PORT_A:
2545,66 → 2784,15
break;
}
 
/* Cache some DPCD data in the eDP case */
if (is_edp(intel_dp)) {
struct edp_power_seq cur, vbt;
u32 pp_on, pp_off, pp_div;
if (is_edp(intel_dp))
intel_dp_init_panel_power_sequencer(dev, intel_dp, &power_seq);
 
pp_on = I915_READ(PCH_PP_ON_DELAYS);
pp_off = I915_READ(PCH_PP_OFF_DELAYS);
pp_div = I915_READ(PCH_PP_DIVISOR);
 
if (!pp_on || !pp_off || !pp_div) {
DRM_INFO("bad panel power sequencing delays, disabling panel\n");
intel_dp_encoder_destroy(&intel_dp->base.base);
intel_dp_destroy(&intel_connector->base);
return;
}
 
/* Pull timing values out of registers */
cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
PANEL_POWER_UP_DELAY_SHIFT;
 
cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
PANEL_LIGHT_ON_DELAY_SHIFT;
 
cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
PANEL_LIGHT_OFF_DELAY_SHIFT;
 
cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
PANEL_POWER_DOWN_DELAY_SHIFT;
 
cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
 
DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
 
vbt = dev_priv->edp.pps;
 
DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
 
#define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10)
 
intel_dp->panel_power_up_delay = get_delay(t1_t3);
intel_dp->backlight_on_delay = get_delay(t8);
intel_dp->backlight_off_delay = get_delay(t9);
intel_dp->panel_power_down_delay = get_delay(t10);
intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
 
DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
intel_dp->panel_power_cycle_delay);
 
DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
}
 
intel_dp_i2c_init(intel_dp, intel_connector, name);
 
/* Cache DPCD and EDID for edp. */
if (is_edp(intel_dp)) {
bool ret;
struct drm_display_mode *scan;
struct edid *edid;
 
ironlake_edp_panel_vdd_on(intel_dp);
2619,29 → 2807,51
} else {
/* if this fails, presume the device is a ghost */
DRM_INFO("failed to retrieve link info, disabling eDP\n");
intel_dp_encoder_destroy(&intel_dp->base.base);
intel_dp_destroy(&intel_connector->base);
intel_dp_encoder_destroy(&intel_encoder->base);
intel_dp_destroy(connector);
return;
}
 
/* We now know it's not a ghost, init power sequence regs. */
intel_dp_init_panel_power_sequencer_registers(dev, intel_dp,
&power_seq);
 
ironlake_edp_panel_vdd_on(intel_dp);
edid = drm_get_edid(connector, &intel_dp->adapter);
if (edid) {
drm_mode_connector_update_edid_property(connector,
edid);
intel_dp->edid_mode_count =
drm_add_edid_modes(connector, edid);
if (drm_add_edid_modes(connector, edid)) {
drm_mode_connector_update_edid_property(connector, edid);
drm_edid_to_eld(connector, edid);
intel_dp->edid = edid;
} else {
kfree(edid);
edid = ERR_PTR(-EINVAL);
}
} else {
edid = ERR_PTR(-ENOENT);
}
intel_connector->edid = edid;
 
/* prefer fixed mode from EDID if available */
list_for_each_entry(scan, &connector->probed_modes, head) {
if ((scan->type & DRM_MODE_TYPE_PREFERRED)) {
fixed_mode = drm_mode_duplicate(dev, scan);
break;
}
}
 
/* fallback to VBT if available for eDP */
if (!fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
fixed_mode = drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
if (fixed_mode)
fixed_mode->type |= DRM_MODE_TYPE_PREFERRED;
}
 
ironlake_edp_panel_vdd_off(intel_dp, false);
}
 
intel_encoder->hot_plug = intel_dp_hot_plug;
 
if (is_edp(intel_dp)) {
dev_priv->int_edp_connector = connector;
intel_panel_setup_backlight(dev);
intel_panel_init(&intel_connector->panel, fixed_mode);
intel_panel_setup_backlight(connector);
}
 
intel_dp_add_properties(intel_dp, connector);
2655,3 → 2865,45
I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
}
}
 
void
intel_dp_init(struct drm_device *dev, int output_reg, enum port port)
{
struct intel_digital_port *intel_dig_port;
struct intel_encoder *intel_encoder;
struct drm_encoder *encoder;
struct intel_connector *intel_connector;
 
intel_dig_port = kzalloc(sizeof(struct intel_digital_port), GFP_KERNEL);
if (!intel_dig_port)
return;
 
intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
if (!intel_connector) {
kfree(intel_dig_port);
return;
}
 
intel_encoder = &intel_dig_port->base;
encoder = &intel_encoder->base;
 
drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
DRM_MODE_ENCODER_TMDS);
drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs);
 
intel_encoder->enable = intel_enable_dp;
intel_encoder->pre_enable = intel_pre_enable_dp;
intel_encoder->disable = intel_disable_dp;
intel_encoder->post_disable = intel_post_disable_dp;
intel_encoder->get_hw_state = intel_dp_get_hw_state;
 
intel_dig_port->port = port;
intel_dig_port->dp.output_reg = output_reg;
 
intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
intel_encoder->cloneable = false;
intel_encoder->hot_plug = intel_dp_hot_plug;
 
intel_dp_init_connector(intel_dig_port, intel_connector);
}