Subversion Repositories Kolibri OS

Compare Revisions

Regard whitespace Rev 2341 → Rev 2342

/drivers/video/drm/i915/intel_dp.c
36,7 → 36,7
#include "i915_drv.h"
#include "drm_dp_helper.h"
 
 
#define DP_RECEIVER_CAP_SIZE 0xf
#define DP_LINK_STATUS_SIZE 6
#define DP_LINK_CHECK_TIMEOUT (10 * 1000)
 
53,12 → 53,18
int dpms_mode;
uint8_t link_bw;
uint8_t lane_count;
uint8_t dpcd[8];
uint8_t dpcd[DP_RECEIVER_CAP_SIZE];
struct i2c_adapter adapter;
struct i2c_algo_dp_aux_data algo;
bool is_pch_edp;
uint8_t train_set[4];
uint8_t link_status[DP_LINK_STATUS_SIZE];
int panel_power_up_delay;
int panel_power_down_delay;
int panel_power_cycle_delay;
int backlight_on_delay;
int backlight_off_delay;
struct drm_display_mode *panel_fixed_mode; /* for eDP */
bool want_panel_vdd;
};
 
/**
86,6 → 92,17
return intel_dp->is_pch_edp;
}
 
/**
* is_cpu_edp - is the port on the CPU and attached to an eDP panel?
* @intel_dp: DP struct
*
* Returns true if the given DP struct corresponds to a CPU eDP port.
*/
static bool is_cpu_edp(struct intel_dp *intel_dp)
{
return is_edp(intel_dp) && !is_pch_edp(intel_dp);
}
 
static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
{
return container_of(encoder, struct intel_dp, base.base);
136,10 → 153,7
static int
intel_dp_max_lane_count(struct intel_dp *intel_dp)
{
int max_lane_count = 4;
 
if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
switch (max_lane_count) {
case 1: case 2: case 4:
break;
146,7 → 160,6
default:
max_lane_count = 4;
}
}
return max_lane_count;
}
 
175,18 → 188,36
return 162000;
}
 
/* I think this is a fiction */
/*
* The units on the numbers in the next two are... bizarre. Examples will
* make it clearer; this one parallels an example in the eDP spec.
*
* intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
*
* 270000 * 1 * 8 / 10 == 216000
*
* The actual data capacity of that configuration is 2.16Gbit/s, so the
* units are decakilobits. ->clock in a drm_display_mode is in kilohertz -
* or equivalently, kilopixels per second - so for 1680x1050R it'd be
* 119000. At 18bpp that's 2142000 kilobits per second.
*
* Thus the strange-looking division by 10 in intel_dp_link_required, to
* get the result in decakilobits instead of kilobits.
*/
 
static int
intel_dp_link_required(struct drm_device *dev, struct intel_dp *intel_dp, int pixel_clock)
intel_dp_link_required(struct intel_dp *intel_dp, int pixel_clock, int check_bpp)
{
struct drm_crtc *crtc = intel_dp->base.base.crtc;
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
int bpp = 24;
 
if (intel_crtc)
if (check_bpp)
bpp = check_bpp;
else if (intel_crtc)
bpp = intel_crtc->bpp;
 
return (pixel_clock * bpp + 7) / 8;
return (pixel_clock * bpp + 9) / 10;
}
 
static int
200,25 → 231,29
struct drm_display_mode *mode)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct drm_device *dev = connector->dev;
struct drm_i915_private *dev_priv = dev->dev_private;
int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
int max_lanes = intel_dp_max_lane_count(intel_dp);
int max_rate, mode_rate;
 
if (is_edp(intel_dp) && dev_priv->panel_fixed_mode) {
if (mode->hdisplay > dev_priv->panel_fixed_mode->hdisplay)
if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
return MODE_PANEL;
 
if (mode->vdisplay > dev_priv->panel_fixed_mode->vdisplay)
if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
return MODE_PANEL;
}
 
/* only refuse the mode on non eDP since we have seen some weird eDP panels
which are outside spec tolerances but somehow work by magic */
if (!is_edp(intel_dp) &&
(intel_dp_link_required(connector->dev, intel_dp, mode->clock)
> intel_dp_max_data_rate(max_link_clock, max_lanes)))
mode_rate = intel_dp_link_required(intel_dp, mode->clock, 0);
max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
 
if (mode_rate > max_rate) {
mode_rate = intel_dp_link_required(intel_dp,
mode->clock, 18);
if (mode_rate > max_rate)
return MODE_CLOCK_HIGH;
else
mode->private_flags |= INTEL_MODE_DP_FORCE_6BPC;
}
 
if (mode->clock < 10000)
return MODE_CLOCK_LOW;
279,6 → 314,38
}
}
 
static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
 
return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0;
}
 
static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
 
return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0;
}
 
static void
intel_dp_check_edp(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
 
if (!is_edp(intel_dp))
return;
if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
WARN(1, "eDP powered off while attempting aux channel communication.\n");
DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
I915_READ(PCH_PP_STATUS),
I915_READ(PCH_PP_CONTROL));
}
}
 
static int
intel_dp_aux_ch(struct intel_dp *intel_dp,
uint8_t *send, int send_bytes,
295,6 → 362,7
uint32_t aux_clock_divider;
int try, precharge;
 
intel_dp_check_edp(intel_dp);
/* The clock divider is based off the hrawclk,
* and would like to run at 2MHz. So, take the
* hrawclk value and divide by 2 and use that
302,9 → 370,9
* Note that PCH attached eDP panels should use a 125MHz input
* clock divider.
*/
if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) {
if (IS_GEN6(dev))
aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */
if (is_cpu_edp(intel_dp)) {
if (IS_GEN6(dev) || IS_GEN7(dev))
aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */
else
aux_clock_divider = 225; /* eDP input clock at 450Mhz */
} else if (HAS_PCH_SPLIT(dev))
408,6 → 476,7
int msg_bytes;
uint8_t ack;
 
intel_dp_check_edp(intel_dp);
if (send_bytes > 16)
return -1;
msg[0] = AUX_NATIVE_WRITE << 4;
450,6 → 519,7
uint8_t ack;
int ret;
 
intel_dp_check_edp(intel_dp);
msg[0] = AUX_NATIVE_READ << 4;
msg[1] = address >> 8;
msg[2] = address & 0xff;
493,6 → 563,7
int reply_bytes;
int ret;
 
intel_dp_check_edp(intel_dp);
/* Set up the command byte */
if (mode & MODE_I2C_READ)
msg[0] = AUX_I2C_READ << 4;
573,10 → 644,15
return -EREMOTEIO;
}
 
static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp);
static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
 
static int
intel_dp_i2c_init(struct intel_dp *intel_dp,
struct intel_connector *intel_connector, const char *name)
{
int ret;
 
DRM_DEBUG_KMS("i2c_init %s\n", name);
intel_dp->algo.running = false;
intel_dp->algo.address = 0;
590,7 → 666,10
intel_dp->adapter.algo_data = &intel_dp->algo;
intel_dp->adapter.dev.parent = &intel_connector->base.kdev;
 
return i2c_dp_aux_add_bus(&intel_dp->adapter);
ironlake_edp_panel_vdd_on(intel_dp);
ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
ironlake_edp_panel_vdd_off(intel_dp, false);
return ret;
}
 
static bool
598,15 → 677,15
struct drm_display_mode *adjusted_mode)
{
struct drm_device *dev = encoder->dev;
struct drm_i915_private *dev_priv = dev->dev_private;
struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
int lane_count, clock;
int max_lane_count = intel_dp_max_lane_count(intel_dp);
int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
int bpp = mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 0;
static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
 
if (is_edp(intel_dp) && dev_priv->panel_fixed_mode) {
intel_fixed_panel_mode(dev_priv->panel_fixed_mode, adjusted_mode);
if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
mode, adjusted_mode);
/*
613,7 → 692,7
* the mode->clock is used to calculate the Data&Link M/N
* of the pipe. For the eDP the fixed clock should be used.
*/
mode->clock = dev_priv->panel_fixed_mode->clock;
mode->clock = intel_dp->panel_fixed_mode->clock;
}
 
for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
620,7 → 699,7
for (clock = 0; clock <= max_clock; clock++) {
int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
 
if (intel_dp_link_required(encoder->dev, intel_dp, mode->clock)
if (intel_dp_link_required(intel_dp, mode->clock, bpp)
<= link_avail) {
intel_dp->link_bw = bws[clock];
intel_dp->lane_count = lane_count;
634,19 → 713,6
}
}
 
if (is_edp(intel_dp)) {
/* okay we failed just pick the highest */
intel_dp->lane_count = max_lane_count;
intel_dp->link_bw = bws[max_clock];
adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
"count %d clock %d\n",
intel_dp->link_bw, intel_dp->lane_count,
adjusted_mode->clock);
 
return true;
}
 
return false;
}
 
706,12 → 772,11
continue;
 
intel_dp = enc_to_intel_dp(encoder);
if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) {
if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
intel_dp->base.type == INTEL_OUTPUT_EDP)
{
lane_count = intel_dp->lane_count;
break;
} else if (is_edp(intel_dp)) {
lane_count = dev_priv->edp.lanes;
break;
}
}
 
740,28 → 805,54
}
}
 
static void ironlake_edp_pll_on(struct drm_encoder *encoder);
static void ironlake_edp_pll_off(struct drm_encoder *encoder);
 
static void
intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
{
struct drm_device *dev = encoder->dev;
struct drm_i915_private *dev_priv = dev->dev_private;
struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
struct drm_crtc *crtc = intel_dp->base.base.crtc;
struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
 
intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
intel_dp->DP |= intel_dp->color_range;
/* Turn on the eDP PLL if needed */
if (is_edp(intel_dp)) {
if (!is_pch_edp(intel_dp))
ironlake_edp_pll_on(encoder);
else
ironlake_edp_pll_off(encoder);
}
 
if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
intel_dp->DP |= DP_SYNC_HS_HIGH;
if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
intel_dp->DP |= DP_SYNC_VS_HIGH;
/*
* There are four kinds of DP registers:
*
* IBX PCH
* SNB CPU
* IVB CPU
* CPT PCH
*
* IBX PCH and CPU are the same for almost everything,
* except that the CPU DP PLL is configured in this
* register
*
* CPT PCH is quite different, having many bits moved
* to the TRANS_DP_CTL register instead. That
* configuration happens (oddly) in ironlake_pch_enable
*/
 
if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
else
intel_dp->DP |= DP_LINK_TRAIN_OFF;
/* Preserve the BIOS-computed detected bit. This is
* supposed to be read-only.
*/
intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
 
/* Handle DP bits in common between all three register formats */
 
intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
 
switch (intel_dp->lane_count) {
case 1:
intel_dp->DP |= DP_PORT_WIDTH_1;
773,14 → 864,16
intel_dp->DP |= DP_PORT_WIDTH_4;
break;
}
if (intel_dp->has_audio)
if (intel_dp->has_audio) {
DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
pipe_name(intel_crtc->pipe));
intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
 
intel_write_eld(encoder, adjusted_mode);
}
memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
intel_dp->link_configuration[0] = intel_dp->link_bw;
intel_dp->link_configuration[1] = intel_dp->lane_count;
intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
 
/*
* Check for DPCD version > 1.1 and enhanced framing support
*/
787,14 → 880,44
if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
(intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
intel_dp->DP |= DP_ENHANCED_FRAMING;
}
 
/* CPT DP's pipe select is decided in TRANS_DP_CTL */
if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev))
/* Split out the IBX/CPU vs CPT settings */
 
if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) {
if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
intel_dp->DP |= DP_SYNC_HS_HIGH;
if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
intel_dp->DP |= DP_SYNC_VS_HIGH;
intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
 
if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
intel_dp->DP |= DP_ENHANCED_FRAMING;
 
intel_dp->DP |= intel_crtc->pipe << 29;
 
/* don't miss out required setting for eDP */
intel_dp->DP |= DP_PLL_ENABLE;
if (adjusted_mode->clock < 200000)
intel_dp->DP |= DP_PLL_FREQ_160MHZ;
else
intel_dp->DP |= DP_PLL_FREQ_270MHZ;
} else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) {
intel_dp->DP |= intel_dp->color_range;
 
if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
intel_dp->DP |= DP_SYNC_HS_HIGH;
if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
intel_dp->DP |= DP_SYNC_VS_HIGH;
intel_dp->DP |= DP_LINK_TRAIN_OFF;
 
if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
intel_dp->DP |= DP_ENHANCED_FRAMING;
 
if (intel_crtc->pipe == 1)
intel_dp->DP |= DP_PIPEB_SELECT;
 
if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) {
if (is_cpu_edp(intel_dp)) {
/* don't miss out required setting for eDP */
intel_dp->DP |= DP_PLL_ENABLE;
if (adjusted_mode->clock < 200000)
802,8 → 925,71
else
intel_dp->DP |= DP_PLL_FREQ_270MHZ;
}
} else {
intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
}
}
 
#define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
#define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE)
 
#define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK)
#define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
 
#define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
#define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE)
 
static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
u32 mask,
u32 value)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
 
DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
mask, value,
I915_READ(PCH_PP_STATUS),
I915_READ(PCH_PP_CONTROL));
 
if (_wait_for((I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10)) {
DRM_ERROR("Panel status timeout: status %08x control %08x\n",
I915_READ(PCH_PP_STATUS),
I915_READ(PCH_PP_CONTROL));
}
}
 
static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
{
DRM_DEBUG_KMS("Wait for panel power on\n");
ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
}
 
static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
{
DRM_DEBUG_KMS("Wait for panel power off time\n");
ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
}
 
static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp)
{
DRM_DEBUG_KMS("Wait for panel power cycle\n");
ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
}
 
 
/* Read the current pp_control value, unlocking the register if it
* is locked
*/
 
static u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv)
{
u32 control = I915_READ(PCH_PP_CONTROL);
 
control &= ~PANEL_UNLOCK_MASK;
control |= PANEL_UNLOCK_REGS;
return control;
}
 
static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
810,98 → 996,155
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
/*
* If the panel wasn't on, make sure there's not a currently
* active PP sequence before enabling AUX VDD.
*/
if (!(I915_READ(PCH_PP_STATUS) & PP_ON))
msleep(dev_priv->panel_t3);
if (!is_edp(intel_dp))
return;
DRM_DEBUG_KMS("Turn eDP VDD on\n");
 
pp = I915_READ(PCH_PP_CONTROL);
WARN(intel_dp->want_panel_vdd,
"eDP VDD already requested on\n");
 
intel_dp->want_panel_vdd = true;
 
if (ironlake_edp_have_panel_vdd(intel_dp)) {
DRM_DEBUG_KMS("eDP VDD already on\n");
return;
}
 
if (!ironlake_edp_have_panel_power(intel_dp))
ironlake_wait_panel_power_cycle(intel_dp);
 
pp = ironlake_get_pp_control(dev_priv);
pp |= EDP_FORCE_VDD;
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
 
/*
* If the panel wasn't on, delay before accessing aux channel
*/
if (!ironlake_edp_have_panel_power(intel_dp)) {
DRM_DEBUG_KMS("eDP was not running\n");
msleep(intel_dp->panel_power_up_delay);
}
}
 
static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp)
static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
pp = I915_READ(PCH_PP_CONTROL);
if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
pp = ironlake_get_pp_control(dev_priv);
pp &= ~EDP_FORCE_VDD;
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
 
/* Make sure sequencer is idle before allowing subsequent activity */
msleep(dev_priv->panel_t12);
DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
 
msleep(intel_dp->panel_power_down_delay);
}
}
 
/* Returns true if the panel was already on when called */
static bool ironlake_edp_panel_on (struct intel_dp *intel_dp)
 
static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
{
if (!is_edp(intel_dp))
return;
 
DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on");
 
intel_dp->want_panel_vdd = false;
 
if (sync) {
ironlake_panel_vdd_off_sync(intel_dp);
} else {
/*
* Queue the timer to fire a long
* time from now (relative to the power down delay)
* to keep the panel power up across a sequence of operations
*/
// schedule_delayed_work(&intel_dp->panel_vdd_work,
// msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5));
}
}
 
static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_STATE_ON_IDLE;
u32 pp;
 
if (I915_READ(PCH_PP_STATUS) & PP_ON)
return true;
if (!is_edp(intel_dp))
return;
 
pp = I915_READ(PCH_PP_CONTROL);
DRM_DEBUG_KMS("Turn eDP power on\n");
 
if (ironlake_edp_have_panel_power(intel_dp)) {
DRM_DEBUG_KMS("eDP power already on\n");
return;
}
 
ironlake_wait_panel_power_cycle(intel_dp);
 
pp = ironlake_get_pp_control(dev_priv);
if (IS_GEN5(dev)) {
/* ILK workaround: disable reset around power sequence */
pp &= ~PANEL_POWER_RESET;
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
}
 
pp |= PANEL_UNLOCK_REGS | POWER_TARGET_ON;
pp |= POWER_TARGET_ON;
if (!IS_GEN5(dev))
pp |= PANEL_POWER_RESET;
 
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
 
if (wait_for((I915_READ(PCH_PP_STATUS) & idle_on_mask) == idle_on_mask,
5000))
DRM_ERROR("panel on wait timed out: 0x%08x\n",
I915_READ(PCH_PP_STATUS));
ironlake_wait_panel_on(intel_dp);
 
if (IS_GEN5(dev)) {
pp |= PANEL_POWER_RESET; /* restore panel reset bit */
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
 
return false;
}
}
 
static void ironlake_edp_panel_off (struct drm_device *dev)
static void ironlake_edp_panel_off(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp, idle_off_mask = PP_ON | PP_SEQUENCE_MASK |
PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK;
u32 pp;
 
pp = I915_READ(PCH_PP_CONTROL);
if (!is_edp(intel_dp))
return;
 
/* ILK workaround: disable reset around power sequence */
pp &= ~PANEL_POWER_RESET;
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
DRM_DEBUG_KMS("Turn eDP power off\n");
 
pp &= ~POWER_TARGET_ON;
WARN(intel_dp->want_panel_vdd, "Cannot turn power off while VDD is on\n");
 
pp = ironlake_get_pp_control(dev_priv);
pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE);
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
 
if (wait_for((I915_READ(PCH_PP_STATUS) & idle_off_mask) == 0, 5000))
DRM_ERROR("panel off wait timed out: 0x%08x\n",
I915_READ(PCH_PP_STATUS));
 
pp |= PANEL_POWER_RESET; /* restore panel reset bit */
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
ironlake_wait_panel_off(intel_dp);
}
 
static void ironlake_edp_backlight_on (struct drm_device *dev)
static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
if (!is_edp(intel_dp))
return;
 
DRM_DEBUG_KMS("\n");
/*
* If we enable the backlight right away following a panel power
909,21 → 1152,28
* link. So delay a bit to make sure the image is solid before
* allowing it to appear.
*/
msleep(300);
pp = I915_READ(PCH_PP_CONTROL);
msleep(intel_dp->backlight_on_delay);
pp = ironlake_get_pp_control(dev_priv);
pp |= EDP_BLC_ENABLE;
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
}
 
static void ironlake_edp_backlight_off (struct drm_device *dev)
static void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
struct drm_i915_private *dev_priv = dev->dev_private;
u32 pp;
 
if (!is_edp(intel_dp))
return;
 
DRM_DEBUG_KMS("\n");
pp = I915_READ(PCH_PP_CONTROL);
pp = ironlake_get_pp_control(dev_priv);
pp &= ~EDP_BLC_ENABLE;
I915_WRITE(PCH_PP_CONTROL, pp);
POSTING_READ(PCH_PP_CONTROL);
msleep(intel_dp->backlight_off_delay);
}
 
static void ironlake_edp_pll_on(struct drm_encoder *encoder)
986,43 → 1236,39
static void intel_dp_prepare(struct drm_encoder *encoder)
{
struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
struct drm_device *dev = encoder->dev;
 
ironlake_edp_backlight_off(intel_dp);
ironlake_edp_panel_off(intel_dp);
 
/* Wake up the sink first */
ironlake_edp_panel_vdd_on(intel_dp);
intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
intel_dp_link_down(intel_dp);
ironlake_edp_panel_vdd_off(intel_dp, false);
 
if (is_edp(intel_dp)) {
ironlake_edp_backlight_off(dev);
ironlake_edp_panel_off(dev);
if (!is_pch_edp(intel_dp))
ironlake_edp_pll_on(encoder);
else
ironlake_edp_pll_off(encoder);
/* Make sure the panel is off before trying to
* change the mode
*/
}
intel_dp_link_down(intel_dp);
}
 
static void intel_dp_commit(struct drm_encoder *encoder)
{
struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
struct drm_device *dev = encoder->dev;
struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
 
if (is_edp(intel_dp))
ironlake_edp_panel_vdd_on(intel_dp);
 
intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
intel_dp_start_link_train(intel_dp);
 
if (is_edp(intel_dp)) {
ironlake_edp_panel_on(intel_dp);
ironlake_edp_panel_vdd_off(intel_dp);
}
 
ironlake_edp_panel_vdd_off(intel_dp, true);
intel_dp_complete_link_train(intel_dp);
ironlake_edp_backlight_on(intel_dp);
 
if (is_edp(intel_dp))
ironlake_edp_backlight_on(dev);
intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
 
intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
if (HAS_PCH_CPT(dev))
intel_cpt_verify_modeset(dev, intel_crtc->pipe);
}
 
static void
1034,29 → 1280,31
uint32_t dp_reg = I915_READ(intel_dp->output_reg);
 
if (mode != DRM_MODE_DPMS_ON) {
if (is_edp(intel_dp))
ironlake_edp_backlight_off(dev);
ironlake_edp_backlight_off(intel_dp);
ironlake_edp_panel_off(intel_dp);
 
ironlake_edp_panel_vdd_on(intel_dp);
intel_dp_sink_dpms(intel_dp, mode);
intel_dp_link_down(intel_dp);
if (is_edp(intel_dp))
ironlake_edp_panel_off(dev);
if (is_edp(intel_dp) && !is_pch_edp(intel_dp))
ironlake_edp_panel_vdd_off(intel_dp, false);
 
if (is_cpu_edp(intel_dp))
ironlake_edp_pll_off(encoder);
} else {
if (is_edp(intel_dp))
if (is_cpu_edp(intel_dp))
ironlake_edp_pll_on(encoder);
 
ironlake_edp_panel_vdd_on(intel_dp);
intel_dp_sink_dpms(intel_dp, mode);
if (!(dp_reg & DP_PORT_EN)) {
intel_dp_start_link_train(intel_dp);
if (is_edp(intel_dp)) {
ironlake_edp_panel_on(intel_dp);
ironlake_edp_panel_vdd_off(intel_dp);
}
ironlake_edp_panel_vdd_off(intel_dp, true);
intel_dp_complete_link_train(intel_dp);
} else
ironlake_edp_panel_vdd_off(intel_dp, false);
ironlake_edp_backlight_on(intel_dp);
}
if (is_edp(intel_dp))
ironlake_edp_backlight_on(dev);
}
intel_dp->dpms_mode = mode;
}
 
1090,11 → 1338,11
* link status information
*/
static bool
intel_dp_get_link_status(struct intel_dp *intel_dp)
intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
{
return intel_dp_aux_native_read_retry(intel_dp,
DP_LANE0_1_STATUS,
intel_dp->link_status,
link_status,
DP_LINK_STATUS_SIZE);
}
 
1106,27 → 1354,25
}
 
static uint8_t
intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
intel_get_adjust_request_voltage(uint8_t adjust_request[2],
int lane)
{
int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
int s = ((lane & 1) ?
DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
uint8_t l = intel_dp_link_status(link_status, i);
uint8_t l = adjust_request[lane>>1];
 
return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
}
 
static uint8_t
intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2],
int lane)
{
int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
int s = ((lane & 1) ?
DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
uint8_t l = intel_dp_link_status(link_status, i);
uint8_t l = adjust_request[lane>>1];
 
return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
}
1148,16 → 1394,41
* These are source-specific values; current Intel hardware supports
* a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
*/
#define I830_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_800
 
static uint8_t
intel_dp_pre_emphasis_max(uint8_t voltage_swing)
intel_dp_voltage_max(struct intel_dp *intel_dp)
{
struct drm_device *dev = intel_dp->base.base.dev;
 
if (IS_GEN7(dev) && is_cpu_edp(intel_dp))
return DP_TRAIN_VOLTAGE_SWING_800;
else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
return DP_TRAIN_VOLTAGE_SWING_1200;
else
return DP_TRAIN_VOLTAGE_SWING_800;
}
 
static uint8_t
intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
{
struct drm_device *dev = intel_dp->base.base.dev;
 
if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
case DP_TRAIN_VOLTAGE_SWING_400:
return DP_TRAIN_PRE_EMPHASIS_6;
case DP_TRAIN_VOLTAGE_SWING_600:
case DP_TRAIN_VOLTAGE_SWING_800:
return DP_TRAIN_PRE_EMPHASIS_3_5;
default:
return DP_TRAIN_PRE_EMPHASIS_0;
}
} else {
switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
case DP_TRAIN_VOLTAGE_SWING_400:
return DP_TRAIN_PRE_EMPHASIS_6;
case DP_TRAIN_VOLTAGE_SWING_600:
return DP_TRAIN_PRE_EMPHASIS_6;
case DP_TRAIN_VOLTAGE_SWING_800:
return DP_TRAIN_PRE_EMPHASIS_3_5;
case DP_TRAIN_VOLTAGE_SWING_1200:
1165,17 → 1436,21
return DP_TRAIN_PRE_EMPHASIS_0;
}
}
}
 
static void
intel_get_adjust_train(struct intel_dp *intel_dp)
intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
{
uint8_t v = 0;
uint8_t p = 0;
int lane;
uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS);
uint8_t voltage_max;
uint8_t preemph_max;
 
for (lane = 0; lane < intel_dp->lane_count; lane++) {
uint8_t this_v = intel_get_adjust_request_voltage(intel_dp->link_status, lane);
uint8_t this_p = intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane);
uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane);
 
if (this_v > v)
v = this_v;
1183,11 → 1458,13
p = this_p;
}
 
if (v >= I830_DP_VOLTAGE_MAX)
v = I830_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
voltage_max = intel_dp_voltage_max(intel_dp);
if (v >= voltage_max)
v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
 
if (p >= intel_dp_pre_emphasis_max(v))
p = intel_dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
preemph_max = intel_dp_pre_emphasis_max(intel_dp, v);
if (p >= preemph_max)
p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
 
for (lane = 0; lane < 4; lane++)
intel_dp->train_set[lane] = v | p;
1194,7 → 1471,7
}
 
static uint32_t
intel_dp_signal_levels(uint8_t train_set, int lane_count)
intel_dp_signal_levels(uint8_t train_set)
{
uint32_t signal_levels = 0;
 
1259,13 → 1536,43
}
}
 
/* Gen7's DP voltage swing and pre-emphasis control */
static uint32_t
intel_gen7_edp_signal_levels(uint8_t train_set)
{
int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
DP_TRAIN_PRE_EMPHASIS_MASK);
switch (signal_levels) {
case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
return EDP_LINK_TRAIN_400MV_0DB_IVB;
case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
return EDP_LINK_TRAIN_400MV_6DB_IVB;
 
case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
return EDP_LINK_TRAIN_600MV_0DB_IVB;
case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
 
case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
return EDP_LINK_TRAIN_800MV_0DB_IVB;
case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
 
default:
DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
"0x%x\n", signal_levels);
return EDP_LINK_TRAIN_500MV_0DB_IVB;
}
}
 
static uint8_t
intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
int lane)
{
int i = DP_LANE0_1_STATUS + (lane >> 1);
int s = (lane & 1) * 4;
uint8_t l = intel_dp_link_status(link_status, i);
uint8_t l = link_status[lane>>1];
 
return (l >> s) & 0xf;
}
1290,18 → 1597,18
DP_LANE_CHANNEL_EQ_DONE|\
DP_LANE_SYMBOL_LOCKED)
static bool
intel_channel_eq_ok(struct intel_dp *intel_dp)
intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
{
uint8_t lane_align;
uint8_t lane_status;
int lane;
 
lane_align = intel_dp_link_status(intel_dp->link_status,
lane_align = intel_dp_link_status(link_status,
DP_LANE_ALIGN_STATUS_UPDATED);
if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
return false;
for (lane = 0; lane < intel_dp->lane_count; lane++) {
lane_status = intel_get_lane_status(intel_dp->link_status, lane);
lane_status = intel_get_lane_status(link_status, lane);
if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
return false;
}
1326,8 → 1633,9
 
ret = intel_dp_aux_native_write(intel_dp,
DP_TRAINING_LANE0_SET,
intel_dp->train_set, 4);
if (ret != 4)
intel_dp->train_set,
intel_dp->lane_count);
if (ret != intel_dp->lane_count)
return false;
 
return true;
1343,7 → 1651,7
int i;
uint8_t voltage;
bool clock_recovery = false;
int tries;
int voltage_tries, loop_tries;
u32 reg;
uint32_t DP = intel_dp->DP;
 
1364,26 → 1672,35
DP_LINK_CONFIGURATION_SIZE);
 
DP |= DP_PORT_EN;
if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
 
if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
DP &= ~DP_LINK_TRAIN_MASK_CPT;
else
DP &= ~DP_LINK_TRAIN_MASK;
memset(intel_dp->train_set, 0, 4);
voltage = 0xff;
tries = 0;
voltage_tries = 0;
loop_tries = 0;
clock_recovery = false;
for (;;) {
/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
uint8_t link_status[DP_LINK_STATUS_SIZE];
uint32_t signal_levels;
if (IS_GEN6(dev) && is_edp(intel_dp)) {
 
 
if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
} else {
signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count);
signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels);
DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
}
 
if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
else
reg = DP | DP_LINK_TRAIN_PAT_1;
1395,10 → 1712,13
/* Set training pattern 1 */
 
udelay(100);
if (!intel_dp_get_link_status(intel_dp))
if (!intel_dp_get_link_status(intel_dp, link_status)) {
DRM_ERROR("failed to get link status\n");
break;
}
 
if (intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
DRM_DEBUG_KMS("clock recovery OK\n");
clock_recovery = true;
break;
}
1407,20 → 1727,30
for (i = 0; i < intel_dp->lane_count; i++)
if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
break;
if (i == intel_dp->lane_count)
if (i == intel_dp->lane_count) {
++loop_tries;
if (loop_tries == 5) {
DRM_DEBUG_KMS("too many full retries, give up\n");
break;
}
memset(intel_dp->train_set, 0, 4);
voltage_tries = 0;
continue;
}
 
/* Check to see if we've tried the same voltage 5 times */
if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
++tries;
if (tries == 5)
++voltage_tries;
if (voltage_tries == 5) {
DRM_DEBUG_KMS("too many voltage retries, give up\n");
break;
}
} else
tries = 0;
voltage_tries = 0;
voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
 
/* Compute new intel_dp->train_set as requested by target */
intel_get_adjust_train(intel_dp);
intel_get_adjust_train(intel_dp, link_status);
}
 
intel_dp->DP = DP;
1443,6 → 1773,7
for (;;) {
/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
uint32_t signal_levels;
uint8_t link_status[DP_LINK_STATUS_SIZE];
 
if (cr_tries > 5) {
DRM_ERROR("failed to train DP, aborting\n");
1450,15 → 1781,18
break;
}
 
if (IS_GEN6(dev) && is_edp(intel_dp)) {
if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
} else {
signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count);
signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
}
 
if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
else
reg = DP | DP_LINK_TRAIN_PAT_2;
1470,17 → 1804,17
break;
 
udelay(400);
if (!intel_dp_get_link_status(intel_dp))
if (!intel_dp_get_link_status(intel_dp, link_status))
break;
 
/* Make sure clock is still ok */
if (!intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
intel_dp_start_link_train(intel_dp);
cr_tries++;
continue;
}
 
if (intel_channel_eq_ok(intel_dp)) {
if (intel_channel_eq_ok(intel_dp, link_status)) {
channel_eq = true;
break;
}
1495,11 → 1829,11
}
 
/* Compute new intel_dp->train_set as requested by target */
intel_get_adjust_train(intel_dp);
intel_get_adjust_train(intel_dp, link_status);
++tries;
}
 
if (HAS_PCH_CPT(dev) && !is_edp(intel_dp))
if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
reg = DP | DP_LINK_TRAIN_OFF_CPT;
else
reg = DP | DP_LINK_TRAIN_OFF;
1529,7 → 1863,7
udelay(100);
}
 
if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) {
if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
DP &= ~DP_LINK_TRAIN_MASK_CPT;
I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
} else {
1540,8 → 1874,12
 
msleep(17);
 
if (is_edp(intel_dp))
if (is_edp(intel_dp)) {
if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
DP |= DP_LINK_TRAIN_OFF_CPT;
else
DP |= DP_LINK_TRAIN_OFF;
}
 
if (!HAS_PCH_CPT(dev) &&
I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
1576,8 → 1914,10
intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe);
}
 
DP &= ~DP_AUDIO_OUTPUT_ENABLE;
I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
POSTING_READ(intel_dp->output_reg);
msleep(intel_dp->panel_power_down_delay);
}
 
static bool
1592,6 → 1932,27
return false;
}
 
static bool
intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
{
int ret;
 
ret = intel_dp_aux_native_read_retry(intel_dp,
DP_DEVICE_SERVICE_IRQ_VECTOR,
sink_irq_vector, 1);
if (!ret)
return false;
 
return true;
}
 
static void
intel_dp_handle_test_request(struct intel_dp *intel_dp)
{
/* NAK by default */
intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK);
}
 
/*
* According to DP spec
* 5.1.2:
1604,6 → 1965,9
static void
intel_dp_check_link_status(struct intel_dp *intel_dp)
{
u8 sink_irq_vector;
u8 link_status[DP_LINK_STATUS_SIZE];
 
if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON)
return;
 
1611,7 → 1975,7
return;
 
/* Try to read receiver status if the link appears to be up */
if (!intel_dp_get_link_status(intel_dp)) {
if (!intel_dp_get_link_status(intel_dp, link_status)) {
intel_dp_link_down(intel_dp);
return;
}
1622,7 → 1986,21
return;
}
 
if (!intel_channel_eq_ok(intel_dp)) {
/* Try to read the source of the interrupt */
if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
/* Clear interrupt source */
intel_dp_aux_native_write_1(intel_dp,
DP_DEVICE_SERVICE_IRQ_VECTOR,
sink_irq_vector);
 
if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
intel_dp_handle_test_request(intel_dp);
if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
}
 
if (!intel_channel_eq_ok(intel_dp, link_status)) {
DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
drm_get_encoder_name(&intel_dp->base.base));
intel_dp_start_link_train(intel_dp);
1683,6 → 2061,31
return intel_dp_detect_dpcd(intel_dp);
}
 
static struct edid *
intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
struct edid *edid;
 
ironlake_edp_panel_vdd_on(intel_dp);
edid = drm_get_edid(connector, adapter);
ironlake_edp_panel_vdd_off(intel_dp, false);
return edid;
}
 
static int
intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter)
{
struct intel_dp *intel_dp = intel_attached_dp(connector);
int ret;
 
ironlake_edp_panel_vdd_on(intel_dp);
ret = intel_ddc_get_modes(connector, adapter);
ironlake_edp_panel_vdd_off(intel_dp, false);
return ret;
}
 
 
/**
* Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
*
1736,28 → 2139,36
/* We should parse the EDID data and find out if it has an audio sink
*/
 
ret = intel_ddc_get_modes(connector, &intel_dp->adapter);
ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter);
if (ret) {
if (is_edp(intel_dp) && !dev_priv->panel_fixed_mode) {
if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) {
struct drm_display_mode *newmode;
list_for_each_entry(newmode, &connector->probed_modes,
head) {
if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
dev_priv->panel_fixed_mode =
if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) {
intel_dp->panel_fixed_mode =
drm_mode_duplicate(dev, newmode);
break;
}
}
}
 
return ret;
}
 
/* if eDP has no EDID, try to use fixed panel mode from VBT */
if (is_edp(intel_dp)) {
if (dev_priv->panel_fixed_mode != NULL) {
/* initialize panel mode from VBT if available for eDP */
if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) {
intel_dp->panel_fixed_mode =
drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
if (intel_dp->panel_fixed_mode) {
intel_dp->panel_fixed_mode->type |=
DRM_MODE_TYPE_PREFERRED;
}
}
if (intel_dp->panel_fixed_mode) {
struct drm_display_mode *mode;
mode = drm_mode_duplicate(dev, dev_priv->panel_fixed_mode);
mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
drm_mode_probed_add(connector, mode);
return 1;
}
1844,6 → 2255,10
 
// i2c_del_adapter(&intel_dp->adapter);
drm_encoder_cleanup(encoder);
if (is_edp(intel_dp)) {
// cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
ironlake_panel_vdd_off_sync(intel_dp);
}
kfree(intel_dp);
}
 
1896,7 → 2311,8
continue;
 
intel_dp = enc_to_intel_dp(encoder);
if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT)
if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
intel_dp->base.type == INTEL_OUTPUT_EDP)
return intel_dp->output_reg;
}
 
1980,10 → 2396,13
else if (output_reg == DP_D || output_reg == PCH_DP_D)
intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
 
if (is_edp(intel_dp))
if (is_edp(intel_dp)) {
intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
// INIT_DELAYED_WORK(&intel_dp->panel_vdd_work,
// ironlake_panel_vdd_work);
}
 
intel_encoder->crtc_mask = (1 << 0) | (1 << 1);
intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
connector->interlace_allowed = true;
connector->doublescan_allowed = 0;
 
2019,25 → 2438,59
break;
}
 
intel_dp_i2c_init(intel_dp, intel_connector, name);
 
/* Cache some DPCD data in the eDP case */
if (is_edp(intel_dp)) {
bool ret;
u32 pp_on, pp_div;
struct edp_power_seq cur, vbt;
u32 pp_on, pp_off, pp_div;
 
pp_on = I915_READ(PCH_PP_ON_DELAYS);
pp_off = I915_READ(PCH_PP_OFF_DELAYS);
pp_div = I915_READ(PCH_PP_DIVISOR);
 
/* Get T3 & T12 values (note: VESA not bspec terminology) */
dev_priv->panel_t3 = (pp_on & 0x1fff0000) >> 16;
dev_priv->panel_t3 /= 10; /* t3 in 100us units */
dev_priv->panel_t12 = pp_div & 0xf;
dev_priv->panel_t12 *= 100; /* t12 in 100ms units */
/* Pull timing values out of registers */
cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
PANEL_POWER_UP_DELAY_SHIFT;
 
cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
PANEL_LIGHT_ON_DELAY_SHIFT;
 
cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
PANEL_LIGHT_OFF_DELAY_SHIFT;
 
cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
PANEL_POWER_DOWN_DELAY_SHIFT;
 
cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
 
DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
 
vbt = dev_priv->edp.pps;
 
DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
 
#define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10)
 
intel_dp->panel_power_up_delay = get_delay(t1_t3);
intel_dp->backlight_on_delay = get_delay(t8);
intel_dp->backlight_off_delay = get_delay(t9);
intel_dp->panel_power_down_delay = get_delay(t10);
intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
 
DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
intel_dp->panel_power_cycle_delay);
 
DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
 
ironlake_edp_panel_vdd_on(intel_dp);
ret = intel_dp_get_dpcd(intel_dp);
ironlake_edp_panel_vdd_off(intel_dp);
ironlake_edp_panel_vdd_off(intel_dp, false);
 
if (ret) {
if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
dev_priv->no_aux_handshake =
2052,18 → 2505,11
}
}
 
intel_dp_i2c_init(intel_dp, intel_connector, name);
 
intel_encoder->hot_plug = intel_dp_hot_plug;
 
if (is_edp(intel_dp)) {
/* initialize panel mode from VBT if available for eDP */
if (dev_priv->lfp_lvds_vbt_mode) {
dev_priv->panel_fixed_mode =
drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
if (dev_priv->panel_fixed_mode) {
dev_priv->panel_fixed_mode->type |=
DRM_MODE_TYPE_PREFERRED;
}
}
dev_priv->int_edp_connector = connector;
intel_panel_setup_backlight(dev);
}