Subversion Repositories Kolibri OS

Rev

Rev 4539 | Rev 5060 | Go to most recent revision | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 4539 Rev 4560
Line 36... Line 36...
36
#include 
36
#include 
37
#include "i915_drv.h"
37
#include "i915_drv.h"
Line 38... Line 38...
38
 
38
 
Line -... Line 39...
-
 
39
#define DP_LINK_CHECK_TIMEOUT   (10 * 1000)
-
 
40
 
-
 
41
struct dp_link_dpll {
-
 
42
	int link_bw;
-
 
43
	struct dpll dpll;
-
 
44
};
-
 
45
 
-
 
46
static const struct dp_link_dpll gen4_dpll[] = {
-
 
47
	{ DP_LINK_BW_1_62,
-
 
48
		{ .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } },
-
 
49
	{ DP_LINK_BW_2_7,
-
 
50
		{ .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } }
-
 
51
};
-
 
52
 
-
 
53
static const struct dp_link_dpll pch_dpll[] = {
-
 
54
	{ DP_LINK_BW_1_62,
-
 
55
		{ .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } },
-
 
56
	{ DP_LINK_BW_2_7,
-
 
57
		{ .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } }
-
 
58
};
-
 
59
 
-
 
60
static const struct dp_link_dpll vlv_dpll[] = {
-
 
61
	{ DP_LINK_BW_1_62,
-
 
62
		{ .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } },
-
 
63
	{ DP_LINK_BW_2_7,
-
 
64
		{ .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } }
39
#define DP_LINK_CHECK_TIMEOUT   (10 * 1000)
65
};
40
 
66
 
41
/**
67
/**
42
 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
68
 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
43
 * @intel_dp: DP struct
69
 * @intel_dp: DP struct
Line 114... Line 140...
114
intel_dp_max_data_rate(int max_link_clock, int max_lanes)
140
intel_dp_max_data_rate(int max_link_clock, int max_lanes)
115
{
141
{
116
	return (max_link_clock * max_lanes * 8) / 10;
142
	return (max_link_clock * max_lanes * 8) / 10;
117
}
143
}
Line 118... Line 144...
118
 
144
 
119
static int
145
static enum drm_mode_status
120
intel_dp_mode_valid(struct drm_connector *connector,
146
intel_dp_mode_valid(struct drm_connector *connector,
121
		    struct drm_display_mode *mode)
147
		    struct drm_display_mode *mode)
122
{
148
{
123
	struct intel_dp *intel_dp = intel_attached_dp(connector);
149
	struct intel_dp *intel_dp = intel_attached_dp(connector);
Line 209... Line 235...
209
	default:
235
	default:
210
		return 133;
236
		return 133;
211
	}
237
	}
212
}
238
}
Line -... Line 239...
-
 
239
 
-
 
240
static void
-
 
241
intel_dp_init_panel_power_sequencer(struct drm_device *dev,
-
 
242
				    struct intel_dp *intel_dp,
-
 
243
				    struct edp_power_seq *out);
-
 
244
static void
-
 
245
intel_dp_init_panel_power_sequencer_registers(struct drm_device *dev,
-
 
246
					      struct intel_dp *intel_dp,
-
 
247
					      struct edp_power_seq *out);
-
 
248
 
-
 
249
static enum pipe
-
 
250
vlv_power_sequencer_pipe(struct intel_dp *intel_dp)
-
 
251
{
-
 
252
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
-
 
253
	struct drm_crtc *crtc = intel_dig_port->base.base.crtc;
-
 
254
	struct drm_device *dev = intel_dig_port->base.base.dev;
-
 
255
	struct drm_i915_private *dev_priv = dev->dev_private;
-
 
256
	enum port port = intel_dig_port->port;
-
 
257
	enum pipe pipe;
-
 
258
 
-
 
259
	/* modeset should have pipe */
-
 
260
	if (crtc)
-
 
261
		return to_intel_crtc(crtc)->pipe;
-
 
262
 
-
 
263
	/* init time, try to find a pipe with this port selected */
-
 
264
	for (pipe = PIPE_A; pipe <= PIPE_B; pipe++) {
-
 
265
		u32 port_sel = I915_READ(VLV_PIPE_PP_ON_DELAYS(pipe)) &
-
 
266
			PANEL_PORT_SELECT_MASK;
-
 
267
		if (port_sel == PANEL_PORT_SELECT_DPB_VLV && port == PORT_B)
-
 
268
			return pipe;
-
 
269
		if (port_sel == PANEL_PORT_SELECT_DPC_VLV && port == PORT_C)
-
 
270
			return pipe;
-
 
271
	}
-
 
272
 
-
 
273
	/* shrug */
-
 
274
	return PIPE_A;
-
 
275
}
-
 
276
 
-
 
277
static u32 _pp_ctrl_reg(struct intel_dp *intel_dp)
-
 
278
{
-
 
279
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
-
 
280
 
-
 
281
	if (HAS_PCH_SPLIT(dev))
-
 
282
		return PCH_PP_CONTROL;
-
 
283
	else
-
 
284
		return VLV_PIPE_PP_CONTROL(vlv_power_sequencer_pipe(intel_dp));
-
 
285
}
-
 
286
 
-
 
287
static u32 _pp_stat_reg(struct intel_dp *intel_dp)
-
 
288
{
-
 
289
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
-
 
290
 
-
 
291
	if (HAS_PCH_SPLIT(dev))
-
 
292
		return PCH_PP_STATUS;
-
 
293
	else
-
 
294
		return VLV_PIPE_PP_STATUS(vlv_power_sequencer_pipe(intel_dp));
-
 
295
}
213
 
296
 
214
static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
297
static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
215
{
298
{
216
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
299
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
217
	struct drm_i915_private *dev_priv = dev->dev_private;
-
 
Line 218... Line -...
218
	u32 pp_stat_reg;
-
 
219
 
300
	struct drm_i915_private *dev_priv = dev->dev_private;
220
	pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS;
301
 
Line 221... Line 302...
221
	return (I915_READ(pp_stat_reg) & PP_ON) != 0;
302
	return (I915_READ(_pp_stat_reg(intel_dp)) & PP_ON) != 0;
222
}
303
}
223
 
304
 
224
static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
305
static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
225
{
-
 
Line 226... Line -...
226
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
-
 
227
	struct drm_i915_private *dev_priv = dev->dev_private;
306
{
228
	u32 pp_ctrl_reg;
307
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
Line 229... Line 308...
229
 
308
	struct drm_i915_private *dev_priv = dev->dev_private;
230
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
309
 
231
	return (I915_READ(pp_ctrl_reg) & EDP_FORCE_VDD) != 0;
310
	return (I915_READ(_pp_ctrl_reg(intel_dp)) & EDP_FORCE_VDD) != 0;
232
}
311
}
233
 
312
 
234
static void
-
 
Line 235... Line 313...
235
intel_dp_check_edp(struct intel_dp *intel_dp)
313
static void
236
{
314
intel_dp_check_edp(struct intel_dp *intel_dp)
Line 237... Line -...
237
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
-
 
238
	struct drm_i915_private *dev_priv = dev->dev_private;
-
 
239
	u32 pp_stat_reg, pp_ctrl_reg;
-
 
240
 
315
{
241
	if (!is_edp(intel_dp))
316
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
242
		return;
317
	struct drm_i915_private *dev_priv = dev->dev_private;
243
 
318
 
244
	pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS;
319
	if (!is_edp(intel_dp))
245
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
320
		return;
246
 
321
 
Line 247... Line 322...
247
	if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
322
	if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
248
		WARN(1, "eDP powered off while attempting aux channel communication.\n");
323
		WARN(1, "eDP powered off while attempting aux channel communication.\n");
Line 327... Line 402...
327
	uint32_t ch_data = ch_ctl + 4;
402
	uint32_t ch_data = ch_ctl + 4;
328
	uint32_t aux_clock_divider;
403
	uint32_t aux_clock_divider;
329
	int i, ret, recv_bytes;
404
	int i, ret, recv_bytes;
330
	uint32_t status;
405
	uint32_t status;
331
	int try, precharge, clock = 0;
406
	int try, precharge, clock = 0;
332
	bool has_aux_irq = INTEL_INFO(dev)->gen >= 5 && !IS_VALLEYVIEW(dev);
407
	bool has_aux_irq = true;
-
 
408
	uint32_t timeout;
Line 333... Line 409...
333
 
409
 
334
	/* dp aux is extremely sensitive to irq latency, hence request the
410
	/* dp aux is extremely sensitive to irq latency, hence request the
335
	 * lowest possible wakeup latency and so prevent the cpu from going into
411
	 * lowest possible wakeup latency and so prevent the cpu from going into
336
	 * deep sleep states.
412
	 * deep sleep states.
Line 342... Line 418...
342
	if (IS_GEN6(dev))
418
	if (IS_GEN6(dev))
343
		precharge = 3;
419
		precharge = 3;
344
	else
420
	else
345
		precharge = 5;
421
		precharge = 5;
Line -... Line 422...
-
 
422
 
-
 
423
	if (IS_BROADWELL(dev) && ch_ctl == DPA_AUX_CH_CTL)
-
 
424
		timeout = DP_AUX_CH_CTL_TIME_OUT_600us;
-
 
425
	else
-
 
426
		timeout = DP_AUX_CH_CTL_TIME_OUT_400us;
346
 
427
 
Line 347... Line 428...
347
	intel_aux_display_runtime_get(dev_priv);
428
	intel_aux_display_runtime_get(dev_priv);
348
 
429
 
349
	/* Try to wait for any previous AUX channel activity */
430
	/* Try to wait for any previous AUX channel activity */
Line 359... Line 440...
359
		     I915_READ(ch_ctl));
440
		     I915_READ(ch_ctl));
360
		ret = -EBUSY;
441
		ret = -EBUSY;
361
		goto out;
442
		goto out;
362
	}
443
	}
Line -... Line 444...
-
 
444
 
-
 
445
	/* Only 5 data registers! */
-
 
446
	if (WARN_ON(send_bytes > 20 || recv_size > 20)) {
-
 
447
		ret = -E2BIG;
-
 
448
		goto out;
-
 
449
	}
363
 
450
 
364
	while ((aux_clock_divider = get_aux_clock_divider(intel_dp, clock++))) {
451
	while ((aux_clock_divider = get_aux_clock_divider(intel_dp, clock++))) {
365
	/* Must try at least 3 times according to DP spec */
452
	/* Must try at least 3 times according to DP spec */
366
	for (try = 0; try < 5; try++) {
453
	for (try = 0; try < 5; try++) {
367
		/* Load the send data into the aux channel data registers */
454
		/* Load the send data into the aux channel data registers */
Line 371... Line 458...
371
 
458
 
372
		/* Send the command and wait for it to complete */
459
		/* Send the command and wait for it to complete */
373
		I915_WRITE(ch_ctl,
460
		I915_WRITE(ch_ctl,
374
			   DP_AUX_CH_CTL_SEND_BUSY |
461
			   DP_AUX_CH_CTL_SEND_BUSY |
375
			   (has_aux_irq ? DP_AUX_CH_CTL_INTERRUPT : 0) |
462
			   (has_aux_irq ? DP_AUX_CH_CTL_INTERRUPT : 0) |
376
			   DP_AUX_CH_CTL_TIME_OUT_400us |
463
				   timeout |
377
			   (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
464
			   (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
378
			   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
465
			   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
379
			   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
466
			   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
380
			   DP_AUX_CH_CTL_DONE |
467
			   DP_AUX_CH_CTL_DONE |
Line 449... Line 536...
449
	int ret;
536
	int ret;
450
	uint8_t	msg[20];
537
	uint8_t	msg[20];
451
	int msg_bytes;
538
	int msg_bytes;
452
	uint8_t	ack;
539
	uint8_t	ack;
Line -... Line 540...
-
 
540
 
-
 
541
	if (WARN_ON(send_bytes > 16))
-
 
542
		return -E2BIG;
453
 
543
 
454
	intel_dp_check_edp(intel_dp);
-
 
455
	if (send_bytes > 16)
-
 
456
		return -1;
544
	intel_dp_check_edp(intel_dp);
457
	msg[0] = AUX_NATIVE_WRITE << 4;
545
	msg[0] = DP_AUX_NATIVE_WRITE << 4;
458
	msg[1] = address >> 8;
546
	msg[1] = address >> 8;
459
	msg[2] = address & 0xff;
547
	msg[2] = address & 0xff;
460
	msg[3] = send_bytes - 1;
548
	msg[3] = send_bytes - 1;
461
	memcpy(&msg[4], send, send_bytes);
549
	memcpy(&msg[4], send, send_bytes);
462
	msg_bytes = send_bytes + 4;
550
	msg_bytes = send_bytes + 4;
463
	for (;;) {
551
	for (;;) {
464
		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
552
		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
465
		if (ret < 0)
553
		if (ret < 0)
-
 
554
			return ret;
466
			return ret;
555
		ack >>= 4;
467
		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
556
		if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
468
			break;
557
			break;
469
		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
558
		else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
470
			udelay(100);
559
			udelay(100);
471
		else
560
		else
472
			return -EIO;
561
			return -EIO;
473
	}
562
	}
Line 492... Line 581...
492
	uint8_t reply[20];
581
	uint8_t reply[20];
493
	int reply_bytes;
582
	int reply_bytes;
494
	uint8_t ack;
583
	uint8_t ack;
495
	int ret;
584
	int ret;
Line -... Line 585...
-
 
585
 
-
 
586
	if (WARN_ON(recv_bytes > 19))
-
 
587
		return -E2BIG;
496
 
588
 
497
	intel_dp_check_edp(intel_dp);
589
	intel_dp_check_edp(intel_dp);
498
	msg[0] = AUX_NATIVE_READ << 4;
590
	msg[0] = DP_AUX_NATIVE_READ << 4;
499
	msg[1] = address >> 8;
591
	msg[1] = address >> 8;
500
	msg[2] = address & 0xff;
592
	msg[2] = address & 0xff;
Line 501... Line 593...
501
	msg[3] = recv_bytes - 1;
593
	msg[3] = recv_bytes - 1;
Line 508... Line 600...
508
				      reply, reply_bytes);
600
				      reply, reply_bytes);
509
		if (ret == 0)
601
		if (ret == 0)
510
			return -EPROTO;
602
			return -EPROTO;
511
		if (ret < 0)
603
		if (ret < 0)
512
			return ret;
604
			return ret;
513
		ack = reply[0];
605
		ack = reply[0] >> 4;
514
		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) {
606
		if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
515
			memcpy(recv, reply + 1, ret - 1);
607
			memcpy(recv, reply + 1, ret - 1);
516
			return ret - 1;
608
			return ret - 1;
517
		}
609
		}
518
		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
610
		else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
519
			udelay(100);
611
			udelay(100);
520
		else
612
		else
521
			return -EIO;
613
			return -EIO;
522
	}
614
	}
523
}
615
}
Line 536... Line 628...
536
	unsigned retry;
628
	unsigned retry;
537
	int msg_bytes;
629
	int msg_bytes;
538
	int reply_bytes;
630
	int reply_bytes;
539
	int ret;
631
	int ret;
Line -... Line 632...
-
 
632
 
540
 
633
	ironlake_edp_panel_vdd_on(intel_dp);
541
	intel_dp_check_edp(intel_dp);
634
	intel_dp_check_edp(intel_dp);
542
	/* Set up the command byte */
635
	/* Set up the command byte */
543
	if (mode & MODE_I2C_READ)
636
	if (mode & MODE_I2C_READ)
544
		msg[0] = AUX_I2C_READ << 4;
637
		msg[0] = DP_AUX_I2C_READ << 4;
545
	else
638
	else
Line 546... Line 639...
546
		msg[0] = AUX_I2C_WRITE << 4;
639
		msg[0] = DP_AUX_I2C_WRITE << 4;
547
 
640
 
Line 548... Line 641...
548
	if (!(mode & MODE_I2C_STOP))
641
	if (!(mode & MODE_I2C_STOP))
549
		msg[0] |= AUX_I2C_MOT << 4;
642
		msg[0] |= DP_AUX_I2C_MOT << 4;
Line 550... Line 643...
550
 
643
 
Line 567... Line 660...
567
		msg_bytes = 3;
660
		msg_bytes = 3;
568
		reply_bytes = 1;
661
		reply_bytes = 1;
569
		break;
662
		break;
570
	}
663
	}
Line -... Line 664...
-
 
664
 
-
 
665
	/*
-
 
666
	 * DP1.2 sections 2.7.7.1.5.6.1 and 2.7.7.1.6.6.1: A DP Source device is
-
 
667
	 * required to retry at least seven times upon receiving AUX_DEFER
-
 
668
	 * before giving up the AUX transaction.
571
 
669
	 */
572
	for (retry = 0; retry < 5; retry++) {
670
	for (retry = 0; retry < 7; retry++) {
573
		ret = intel_dp_aux_ch(intel_dp,
671
		ret = intel_dp_aux_ch(intel_dp,
574
				      msg, msg_bytes,
672
				      msg, msg_bytes,
575
				      reply, reply_bytes);
673
				      reply, reply_bytes);
576
		if (ret < 0) {
674
		if (ret < 0) {
577
			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
675
			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
578
			return ret;
676
			goto out;
Line 579... Line 677...
579
		}
677
		}
580
 
678
 
581
		switch (reply[0] & AUX_NATIVE_REPLY_MASK) {
679
		switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
582
		case AUX_NATIVE_REPLY_ACK:
680
		case DP_AUX_NATIVE_REPLY_ACK:
583
			/* I2C-over-AUX Reply field is only valid
681
			/* I2C-over-AUX Reply field is only valid
584
			 * when paired with AUX ACK.
682
			 * when paired with AUX ACK.
585
			 */
683
			 */
586
			break;
684
			break;
587
		case AUX_NATIVE_REPLY_NACK:
685
		case DP_AUX_NATIVE_REPLY_NACK:
-
 
686
			DRM_DEBUG_KMS("aux_ch native nack\n");
588
			DRM_DEBUG_KMS("aux_ch native nack\n");
687
			ret = -EREMOTEIO;
-
 
688
			goto out;
-
 
689
		case DP_AUX_NATIVE_REPLY_DEFER:
-
 
690
			/*
-
 
691
			 * For now, just give more slack to branch devices. We
-
 
692
			 * could check the DPCD for I2C bit rate capabilities,
-
 
693
			 * and if available, adjust the interval. We could also
-
 
694
			 * be more careful with DP-to-Legacy adapters where a
589
			return -EREMOTEIO;
695
			 * long legacy cable may force very low I2C bit rates.
590
		case AUX_NATIVE_REPLY_DEFER:
696
			 */
591
			udelay(500);
697
            udelay(400);
592
			continue;
698
			continue;
593
		default:
699
		default:
594
			DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
700
			DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
-
 
701
				  reply[0]);
595
				  reply[0]);
702
			ret = -EREMOTEIO;
Line 596... Line 703...
596
			return -EREMOTEIO;
703
			goto out;
597
		}
704
		}
598
 
705
 
599
		switch (reply[0] & AUX_I2C_REPLY_MASK) {
706
		switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
600
		case AUX_I2C_REPLY_ACK:
707
		case DP_AUX_I2C_REPLY_ACK:
601
			if (mode == MODE_I2C_READ) {
708
			if (mode == MODE_I2C_READ) {
-
 
709
				*read_byte = reply[1];
602
				*read_byte = reply[1];
710
			}
603
			}
711
			ret = reply_bytes - 1;
604
			return reply_bytes - 1;
712
			goto out;
-
 
713
		case DP_AUX_I2C_REPLY_NACK:
605
		case AUX_I2C_REPLY_NACK:
714
			DRM_DEBUG_KMS("aux_i2c nack\n");
606
			DRM_DEBUG_KMS("aux_i2c nack\n");
715
			ret = -EREMOTEIO;
607
			return -EREMOTEIO;
716
			goto out;
608
		case AUX_I2C_REPLY_DEFER:
717
		case DP_AUX_I2C_REPLY_DEFER:
609
			DRM_DEBUG_KMS("aux_i2c defer\n");
718
			DRM_DEBUG_KMS("aux_i2c defer\n");
610
			udelay(100);
719
			udelay(100);
611
			break;
720
			break;
-
 
721
		default:
612
		default:
722
			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
613
			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
723
			ret = -EREMOTEIO;
Line 614... Line 724...
614
			return -EREMOTEIO;
724
			goto out;
615
		}
725
		}
-
 
726
	}
-
 
727
 
-
 
728
	DRM_ERROR("too many retries, giving up\n");
-
 
729
	ret = -EREMOTEIO;
616
	}
730
 
Line 617... Line 731...
617
 
731
out:
618
	DRM_ERROR("too many retries, giving up\n");
732
	ironlake_edp_panel_vdd_off(intel_dp, false);
619
	return -EREMOTEIO;
733
	return ret;
Line 634... Line 748...
634
	intel_dp->adapter.owner = THIS_MODULE;
748
	intel_dp->adapter.owner = THIS_MODULE;
635
	intel_dp->adapter.class = I2C_CLASS_DDC;
749
	intel_dp->adapter.class = I2C_CLASS_DDC;
636
	strncpy(intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
750
	strncpy(intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
637
	intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
751
	intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
638
	intel_dp->adapter.algo_data = &intel_dp->algo;
752
	intel_dp->adapter.algo_data = &intel_dp->algo;
639
	intel_dp->adapter.dev.parent = &intel_connector->base.kdev;
753
	intel_dp->adapter.dev.parent = intel_connector->base.kdev;
Line 640... Line -...
640
 
-
 
641
	ironlake_edp_panel_vdd_on(intel_dp);
754
 
642
	ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
-
 
643
	ironlake_edp_panel_vdd_off(intel_dp, false);
755
	ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
644
	return ret;
756
	return ret;
Line 645... Line 757...
645
}
757
}
646
 
758
 
647
static void
759
static void
648
intel_dp_set_clock(struct intel_encoder *encoder,
760
intel_dp_set_clock(struct intel_encoder *encoder,
649
		   struct intel_crtc_config *pipe_config, int link_bw)
761
		   struct intel_crtc_config *pipe_config, int link_bw)
-
 
762
{
-
 
763
	struct drm_device *dev = encoder->base.dev;
Line 650... Line 764...
650
{
764
	const struct dp_link_dpll *divisor = NULL;
651
	struct drm_device *dev = encoder->base.dev;
-
 
652
 
-
 
653
	if (IS_G4X(dev)) {
-
 
654
		if (link_bw == DP_LINK_BW_1_62) {
765
	int i, count = 0;
655
			pipe_config->dpll.p1 = 2;
-
 
656
			pipe_config->dpll.p2 = 10;
-
 
657
			pipe_config->dpll.n = 2;
-
 
658
			pipe_config->dpll.m1 = 23;
-
 
659
			pipe_config->dpll.m2 = 8;
-
 
660
		} else {
-
 
661
			pipe_config->dpll.p1 = 1;
-
 
662
			pipe_config->dpll.p2 = 10;
-
 
663
			pipe_config->dpll.n = 1;
-
 
664
			pipe_config->dpll.m1 = 14;
766
 
665
			pipe_config->dpll.m2 = 2;
767
	if (IS_G4X(dev)) {
666
		}
768
		divisor = gen4_dpll;
667
		pipe_config->clock_set = true;
769
		count = ARRAY_SIZE(gen4_dpll);
668
	} else if (IS_HASWELL(dev)) {
-
 
669
		/* Haswell has special-purpose DP DDI clocks. */
770
	} else if (IS_HASWELL(dev)) {
670
	} else if (HAS_PCH_SPLIT(dev)) {
-
 
671
		if (link_bw == DP_LINK_BW_1_62) {
771
		/* Haswell has special-purpose DP DDI clocks. */
672
			pipe_config->dpll.n = 1;
772
	} else if (HAS_PCH_SPLIT(dev)) {
673
			pipe_config->dpll.p1 = 2;
-
 
674
			pipe_config->dpll.p2 = 10;
-
 
675
			pipe_config->dpll.m1 = 12;
773
		divisor = pch_dpll;
676
			pipe_config->dpll.m2 = 9;
774
		count = ARRAY_SIZE(pch_dpll);
677
		} else {
-
 
678
			pipe_config->dpll.n = 2;
-
 
679
			pipe_config->dpll.p1 = 1;
-
 
680
			pipe_config->dpll.p2 = 10;
775
	} else if (IS_VALLEYVIEW(dev)) {
-
 
776
		divisor = vlv_dpll;
-
 
777
		count = ARRAY_SIZE(vlv_dpll);
-
 
778
		}
-
 
779
 
-
 
780
	if (divisor && count) {
681
			pipe_config->dpll.m1 = 14;
781
		for (i = 0; i < count; i++) {
682
			pipe_config->dpll.m2 = 8;
782
			if (link_bw == divisor[i].link_bw) {
683
		}
-
 
-
 
783
				pipe_config->dpll = divisor[i].dpll;
-
 
784
		pipe_config->clock_set = true;
684
		pipe_config->clock_set = true;
785
				break;
685
	} else if (IS_VALLEYVIEW(dev)) {
786
			}
Line 686... Line 787...
686
		/* FIXME: Need to figure out optimized DP clocks for vlv. */
787
		}
687
	}
788
	}
Line 724... Line 825...
724
	if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
825
	if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
725
		return false;
826
		return false;
Line 726... Line 827...
726
 
827
 
727
	DRM_DEBUG_KMS("DP link computation with max lane count %i "
828
	DRM_DEBUG_KMS("DP link computation with max lane count %i "
728
		      "max bw %02x pixel clock %iKHz\n",
829
		      "max bw %02x pixel clock %iKHz\n",
-
 
830
		      max_lane_count, bws[max_clock],
Line 729... Line 831...
729
		      max_lane_count, bws[max_clock], adjusted_mode->clock);
831
		      adjusted_mode->crtc_clock);
730
 
832
 
731
	/* Walk through all bpp values. Luckily they're all nicely spaced with 2
833
	/* Walk through all bpp values. Luckily they're all nicely spaced with 2
732
	 * bpc in between. */
834
	 * bpc in between. */
-
 
835
	bpp = pipe_config->pipe_bpp;
733
	bpp = pipe_config->pipe_bpp;
836
	if (is_edp(intel_dp) && dev_priv->vbt.edp_bpp &&
734
	if (is_edp(intel_dp) && dev_priv->vbt.edp_bpp) {
837
	    dev_priv->vbt.edp_bpp < bpp) {
735
		DRM_DEBUG_KMS("clamping bpp for eDP panel to BIOS-provided %i\n",
838
		DRM_DEBUG_KMS("clamping bpp for eDP panel to BIOS-provided %i\n",
736
			      dev_priv->vbt.edp_bpp);
839
			      dev_priv->vbt.edp_bpp);
Line 737... Line 840...
737
		bpp = min_t(int, bpp, dev_priv->vbt.edp_bpp);
840
		bpp = dev_priv->vbt.edp_bpp;
738
	}
841
	}
-
 
842
 
Line 739... Line 843...
739
 
843
	for (; bpp >= 6*3; bpp -= 2*3) {
740
	for (; bpp >= 6*3; bpp -= 2*3) {
844
		mode_rate = intel_dp_link_required(adjusted_mode->crtc_clock,
741
		mode_rate = intel_dp_link_required(adjusted_mode->clock, bpp);
845
						   bpp);
742
 
846
 
Line 781... Line 885...
781
		      pipe_config->port_clock, bpp);
885
		      pipe_config->port_clock, bpp);
782
				DRM_DEBUG_KMS("DP link bw required %i available %i\n",
886
				DRM_DEBUG_KMS("DP link bw required %i available %i\n",
783
					      mode_rate, link_avail);
887
					      mode_rate, link_avail);
Line 784... Line 888...
784
 
888
 
785
	intel_link_compute_m_n(bpp, lane_count,
889
	intel_link_compute_m_n(bpp, lane_count,
-
 
890
			       adjusted_mode->crtc_clock,
786
			       adjusted_mode->clock, pipe_config->port_clock,
891
			       pipe_config->port_clock,
Line 787... Line 892...
787
			       &pipe_config->dp_m_n);
892
			       &pipe_config->dp_m_n);
Line 788... Line 893...
788
 
893
 
789
	intel_dp_set_clock(encoder, pipe_config, intel_dp->link_bw);
894
	intel_dp_set_clock(encoder, pipe_config, intel_dp->link_bw);
Line 790... Line -...
790
 
-
 
791
	return true;
-
 
792
}
-
 
793
 
-
 
794
void intel_dp_init_link_config(struct intel_dp *intel_dp)
-
 
795
{
-
 
796
	memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
-
 
797
	intel_dp->link_configuration[0] = intel_dp->link_bw;
-
 
798
	intel_dp->link_configuration[1] = intel_dp->lane_count;
-
 
799
	intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
-
 
800
	/*
-
 
801
	 * Check for DPCD version > 1.1 and enhanced framing support
-
 
802
	 */
-
 
803
	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
-
 
804
	    (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
-
 
805
		intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
895
 
806
	}
896
	return true;
807
}
897
}
808
 
898
 
809
static void ironlake_set_pll_cpu_edp(struct intel_dp *intel_dp)
899
static void ironlake_set_pll_cpu_edp(struct intel_dp *intel_dp)
Line 876... Line 966...
876
				 pipe_name(crtc->pipe));
966
				 pipe_name(crtc->pipe));
877
		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
967
		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
878
		intel_write_eld(&encoder->base, adjusted_mode);
968
		intel_write_eld(&encoder->base, adjusted_mode);
879
	}
969
	}
Line 880... Line -...
880
 
-
 
881
	intel_dp_init_link_config(intel_dp);
-
 
882
 
970
 
Line 883... Line 971...
883
	/* Split out the IBX/CPU vs CPT settings */
971
	/* Split out the IBX/CPU vs CPT settings */
884
 
972
 
885
	if (port == PORT_A && IS_GEN7(dev) && !IS_VALLEYVIEW(dev)) {
973
	if (port == PORT_A && IS_GEN7(dev) && !IS_VALLEYVIEW(dev)) {
886
		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
974
		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
887
			intel_dp->DP |= DP_SYNC_HS_HIGH;
975
			intel_dp->DP |= DP_SYNC_HS_HIGH;
888
		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
976
		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
Line 889... Line 977...
889
			intel_dp->DP |= DP_SYNC_VS_HIGH;
977
			intel_dp->DP |= DP_SYNC_VS_HIGH;
890
		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
978
		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
Line 891... Line 979...
891
 
979
 
892
		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
980
		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
893
			intel_dp->DP |= DP_ENHANCED_FRAMING;
981
			intel_dp->DP |= DP_ENHANCED_FRAMING;
Line 901... Line 989...
901
			intel_dp->DP |= DP_SYNC_HS_HIGH;
989
			intel_dp->DP |= DP_SYNC_HS_HIGH;
902
		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
990
		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
903
			intel_dp->DP |= DP_SYNC_VS_HIGH;
991
			intel_dp->DP |= DP_SYNC_VS_HIGH;
904
		intel_dp->DP |= DP_LINK_TRAIN_OFF;
992
		intel_dp->DP |= DP_LINK_TRAIN_OFF;
Line 905... Line 993...
905
 
993
 
906
		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
994
		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
Line 907... Line 995...
907
		intel_dp->DP |= DP_ENHANCED_FRAMING;
995
		intel_dp->DP |= DP_ENHANCED_FRAMING;
908
 
996
 
909
		if (crtc->pipe == 1)
997
		if (crtc->pipe == 1)
Line 931... Line 1019...
931
{
1019
{
932
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1020
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
933
	struct drm_i915_private *dev_priv = dev->dev_private;
1021
	struct drm_i915_private *dev_priv = dev->dev_private;
934
	u32 pp_stat_reg, pp_ctrl_reg;
1022
	u32 pp_stat_reg, pp_ctrl_reg;
Line 935... Line 1023...
935
 
1023
 
936
	pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS;
1024
	pp_stat_reg = _pp_stat_reg(intel_dp);
Line 937... Line 1025...
937
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
1025
	pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
938
 
1026
 
939
	DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
1027
	DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
940
		      mask, value,
1028
		      mask, value,
Line 944... Line 1032...
944
	if (_wait_for((I915_READ(pp_stat_reg) & mask) == value, 5000, 10)) {
1032
	if (_wait_for((I915_READ(pp_stat_reg) & mask) == value, 5000, 10)) {
945
		DRM_ERROR("Panel status timeout: status %08x control %08x\n",
1033
		DRM_ERROR("Panel status timeout: status %08x control %08x\n",
946
				I915_READ(pp_stat_reg),
1034
				I915_READ(pp_stat_reg),
947
				I915_READ(pp_ctrl_reg));
1035
				I915_READ(pp_ctrl_reg));
948
	}
1036
	}
-
 
1037
 
-
 
1038
	DRM_DEBUG_KMS("Wait complete\n");
949
}
1039
}
Line 950... Line 1040...
950
 
1040
 
951
static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
1041
static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
952
{
1042
{
Line 974... Line 1064...
974
static  u32 ironlake_get_pp_control(struct intel_dp *intel_dp)
1064
static  u32 ironlake_get_pp_control(struct intel_dp *intel_dp)
975
{
1065
{
976
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1066
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
977
	struct drm_i915_private *dev_priv = dev->dev_private;
1067
	struct drm_i915_private *dev_priv = dev->dev_private;
978
	u32 control;
1068
	u32 control;
979
	u32 pp_ctrl_reg;
-
 
980
 
-
 
981
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
-
 
982
	control = I915_READ(pp_ctrl_reg);
-
 
Line -... Line 1069...
-
 
1069
 
983
 
1070
	control = I915_READ(_pp_ctrl_reg(intel_dp));
984
	control &= ~PANEL_UNLOCK_MASK;
1071
	control &= ~PANEL_UNLOCK_MASK;
985
	control |= PANEL_UNLOCK_REGS;
1072
	control |= PANEL_UNLOCK_REGS;
986
	return control;
1073
	return control;
Line 993... Line 1080...
993
	u32 pp;
1080
	u32 pp;
994
	u32 pp_stat_reg, pp_ctrl_reg;
1081
	u32 pp_stat_reg, pp_ctrl_reg;
Line 995... Line 1082...
995
 
1082
 
996
	if (!is_edp(intel_dp))
1083
	if (!is_edp(intel_dp))
997
		return;
-
 
Line 998... Line 1084...
998
	DRM_DEBUG_KMS("Turn eDP VDD on\n");
1084
		return;
999
 
1085
 
Line 1000... Line 1086...
1000
	WARN(intel_dp->want_panel_vdd,
1086
	WARN(intel_dp->want_panel_vdd,
Line 1001... Line 1087...
1001
	     "eDP VDD already requested on\n");
1087
	     "eDP VDD already requested on\n");
1002
 
-
 
1003
	intel_dp->want_panel_vdd = true;
1088
 
-
 
1089
	intel_dp->want_panel_vdd = true;
-
 
1090
 
1004
 
1091
	if (ironlake_edp_have_panel_vdd(intel_dp))
-
 
1092
		return;
Line 1005... Line 1093...
1005
	if (ironlake_edp_have_panel_vdd(intel_dp)) {
1093
 
1006
		DRM_DEBUG_KMS("eDP VDD already on\n");
1094
	intel_runtime_pm_get(dev_priv);
Line 1007... Line 1095...
1007
		return;
1095
 
1008
	}
1096
	DRM_DEBUG_KMS("Turning eDP VDD on\n");
Line 1009... Line 1097...
1009
 
1097
 
1010
	if (!ironlake_edp_have_panel_power(intel_dp))
1098
	if (!ironlake_edp_have_panel_power(intel_dp))
Line 1011... Line 1099...
1011
		ironlake_wait_panel_power_cycle(intel_dp);
1099
		ironlake_wait_panel_power_cycle(intel_dp);
1012
 
1100
 
1013
	pp = ironlake_get_pp_control(intel_dp);
1101
	pp = ironlake_get_pp_control(intel_dp);
1014
	pp |= EDP_FORCE_VDD;
1102
	pp |= EDP_FORCE_VDD;
Line 1037... Line 1125...
1037
	u32 pp_stat_reg, pp_ctrl_reg;
1125
	u32 pp_stat_reg, pp_ctrl_reg;
Line 1038... Line 1126...
1038
 
1126
 
Line 1039... Line 1127...
1039
	WARN_ON(!mutex_is_locked(&dev->mode_config.mutex));
1127
	WARN_ON(!mutex_is_locked(&dev->mode_config.mutex));
-
 
1128
 
-
 
1129
	if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
1040
 
1130
		DRM_DEBUG_KMS("Turning eDP VDD off\n");
1041
	if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
1131
 
Line 1042... Line 1132...
1042
		pp = ironlake_get_pp_control(intel_dp);
1132
		pp = ironlake_get_pp_control(intel_dp);
1043
	pp &= ~EDP_FORCE_VDD;
1133
	pp &= ~EDP_FORCE_VDD;
Line 1044... Line 1134...
1044
 
1134
 
1045
		pp_stat_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_STATUS : PCH_PP_STATUS;
1135
		pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
Line 1046... Line 1136...
1046
		pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
1136
		pp_stat_reg = _pp_stat_reg(intel_dp);
1047
 
1137
 
1048
		I915_WRITE(pp_ctrl_reg, pp);
1138
		I915_WRITE(pp_ctrl_reg, pp);
-
 
1139
		POSTING_READ(pp_ctrl_reg);
-
 
1140
 
1049
		POSTING_READ(pp_ctrl_reg);
1141
	/* Make sure sequencer is idle before allowing subsequent activity */
-
 
1142
		DRM_DEBUG_KMS("PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
-
 
1143
		I915_READ(pp_stat_reg), I915_READ(pp_ctrl_reg));
1050
 
1144
 
1051
	/* Make sure sequencer is idle before allowing subsequent activity */
1145
		if ((pp & POWER_TARGET_ON) == 0)
Line 1052... Line 1146...
1052
		DRM_DEBUG_KMS("PP_STATUS: 0x%08x PP_CONTROL: 0x%08x\n",
1146
			msleep(intel_dp->panel_power_cycle_delay);
1053
		I915_READ(pp_stat_reg), I915_READ(pp_ctrl_reg));
1147
 
Line 1069... Line 1163...
1069
void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1163
void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1070
{
1164
{
1071
	if (!is_edp(intel_dp))
1165
	if (!is_edp(intel_dp))
1072
		return;
1166
		return;
Line 1073... Line -...
1073
 
-
 
1074
	DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
1167
 
Line 1075... Line 1168...
1075
	WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on");
1168
	WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on");
Line 1076... Line 1169...
1076
 
1169
 
Line 1106... Line 1199...
1106
		return;
1199
		return;
1107
	}
1200
	}
Line 1108... Line 1201...
1108
 
1201
 
Line -... Line 1202...
-
 
1202
	ironlake_wait_panel_power_cycle(intel_dp);
1109
	ironlake_wait_panel_power_cycle(intel_dp);
1203
 
1110
 
1204
	pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1111
	pp = ironlake_get_pp_control(intel_dp);
1205
	pp = ironlake_get_pp_control(intel_dp);
1112
	if (IS_GEN5(dev)) {
1206
	if (IS_GEN5(dev)) {
1113
	/* ILK workaround: disable reset around power sequence */
1207
	/* ILK workaround: disable reset around power sequence */
1114
	pp &= ~PANEL_POWER_RESET;
1208
	pp &= ~PANEL_POWER_RESET;
1115
	I915_WRITE(PCH_PP_CONTROL, pp);
1209
		I915_WRITE(pp_ctrl_reg, pp);
Line 1116... Line 1210...
1116
	POSTING_READ(PCH_PP_CONTROL);
1210
		POSTING_READ(pp_ctrl_reg);
1117
	}
1211
	}
1118
 
1212
 
Line 1119... Line -...
1119
	pp |= POWER_TARGET_ON;
-
 
1120
	if (!IS_GEN5(dev))
-
 
1121
		pp |= PANEL_POWER_RESET;
1213
	pp |= POWER_TARGET_ON;
1122
 
1214
	if (!IS_GEN5(dev))
Line 1123... Line 1215...
1123
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
1215
		pp |= PANEL_POWER_RESET;
Line 1124... Line 1216...
1124
 
1216
 
1125
	I915_WRITE(pp_ctrl_reg, pp);
1217
	I915_WRITE(pp_ctrl_reg, pp);
1126
	POSTING_READ(pp_ctrl_reg);
1218
	POSTING_READ(pp_ctrl_reg);
1127
 
1219
 
1128
	ironlake_wait_panel_on(intel_dp);
1220
	ironlake_wait_panel_on(intel_dp);
1129
 
1221
 
Line 1130... Line 1222...
1130
	if (IS_GEN5(dev)) {
1222
	if (IS_GEN5(dev)) {
1131
	pp |= PANEL_POWER_RESET; /* restore panel reset bit */
1223
	pp |= PANEL_POWER_RESET; /* restore panel reset bit */
Line 1144... Line 1236...
1144
	if (!is_edp(intel_dp))
1236
	if (!is_edp(intel_dp))
1145
		return;
1237
		return;
Line 1146... Line 1238...
1146
 
1238
 
Line 1147... Line -...
1147
	DRM_DEBUG_KMS("Turn eDP power off\n");
-
 
1148
 
-
 
1149
	WARN(!intel_dp->want_panel_vdd, "Need VDD to turn off panel\n");
1239
	DRM_DEBUG_KMS("Turn eDP power off\n");
1150
 
1240
 
1151
	pp = ironlake_get_pp_control(intel_dp);
1241
	pp = ironlake_get_pp_control(intel_dp);
1152
	/* We need to switch off panel power _and_ force vdd, for otherwise some
1242
	/* We need to switch off panel power _and_ force vdd, for otherwise some
Line 1153... Line 1243...
1153
	 * panels get very unhappy and cease to work. */
1243
	 * panels get very unhappy and cease to work. */
Line 1154... Line 1244...
1154
	pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1244
	pp &= ~(POWER_TARGET_ON | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1155
 
1245
 
Line 1156... Line -...
1156
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
-
 
1157
 
-
 
1158
	I915_WRITE(pp_ctrl_reg, pp);
1246
	pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1159
	POSTING_READ(pp_ctrl_reg);
1247
 
Line 1160... Line 1248...
1160
 
1248
	I915_WRITE(pp_ctrl_reg, pp);
1161
	intel_dp->want_panel_vdd = false;
1249
	POSTING_READ(pp_ctrl_reg);
1162
 
1250
 
1163
	ironlake_wait_panel_off(intel_dp);
1251
	ironlake_wait_panel_off(intel_dp);
1164
}
1252
}
1165
 
-
 
1166
void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1253
 
1167
{
1254
void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
Line 1168... Line 1255...
1168
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1255
{
1169
	struct drm_device *dev = intel_dig_port->base.base.dev;
1256
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
Line 1184... Line 1271...
1184
	 */
1271
	 */
1185
	msleep(intel_dp->backlight_on_delay);
1272
	msleep(intel_dp->backlight_on_delay);
1186
	pp = ironlake_get_pp_control(intel_dp);
1273
	pp = ironlake_get_pp_control(intel_dp);
1187
	pp |= EDP_BLC_ENABLE;
1274
	pp |= EDP_BLC_ENABLE;
Line 1188... Line 1275...
1188
 
1275
 
Line 1189... Line 1276...
1189
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
1276
	pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
1190
 
1277
 
Line 1191... Line 1278...
1191
	I915_WRITE(pp_ctrl_reg, pp);
1278
	I915_WRITE(pp_ctrl_reg, pp);
1192
	POSTING_READ(pp_ctrl_reg);
1279
	POSTING_READ(pp_ctrl_reg);
Line 1193... Line 1280...
1193
 
1280
 
1194
	intel_panel_enable_backlight(dev, pipe);
1281
	intel_panel_enable_backlight(intel_dp->attached_connector);
1195
}
1282
}
Line 1202... Line 1289...
1202
	u32 pp_ctrl_reg;
1289
	u32 pp_ctrl_reg;
Line 1203... Line 1290...
1203
 
1290
 
1204
	if (!is_edp(intel_dp))
1291
	if (!is_edp(intel_dp))
Line 1205... Line 1292...
1205
		return;
1292
		return;
Line 1206... Line 1293...
1206
 
1293
 
1207
	intel_panel_disable_backlight(dev);
1294
	intel_panel_disable_backlight(intel_dp->attached_connector);
1208
 
1295
 
Line 1209... Line 1296...
1209
	DRM_DEBUG_KMS("\n");
1296
	DRM_DEBUG_KMS("\n");
Line 1210... Line 1297...
1210
	pp = ironlake_get_pp_control(intel_dp);
1297
	pp = ironlake_get_pp_control(intel_dp);
1211
	pp &= ~EDP_BLC_ENABLE;
1298
	pp &= ~EDP_BLC_ENABLE;
1212
 
1299
 
1213
	pp_ctrl_reg = IS_VALLEYVIEW(dev) ? PIPEA_PP_CONTROL : PCH_PP_CONTROL;
1300
	pp_ctrl_reg = _pp_ctrl_reg(intel_dp);
Line 1355... Line 1442...
1355
	u32 tmp, flags = 0;
1442
	u32 tmp, flags = 0;
1356
	struct drm_device *dev = encoder->base.dev;
1443
	struct drm_device *dev = encoder->base.dev;
1357
	struct drm_i915_private *dev_priv = dev->dev_private;
1444
	struct drm_i915_private *dev_priv = dev->dev_private;
1358
	enum port port = dp_to_dig_port(intel_dp)->port;
1445
	enum port port = dp_to_dig_port(intel_dp)->port;
1359
	struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
1446
	struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
-
 
1447
	int dotclock;
Line 1360... Line 1448...
1360
 
1448
 
1361
	if ((port == PORT_A) || !HAS_PCH_CPT(dev)) {
1449
	if ((port == PORT_A) || !HAS_PCH_CPT(dev)) {
1362
		tmp = I915_READ(intel_dp->output_reg);
1450
		tmp = I915_READ(intel_dp->output_reg);
1363
		if (tmp & DP_SYNC_HS_HIGH)
1451
		if (tmp & DP_SYNC_HS_HIGH)
Line 1382... Line 1470...
1382
			flags |= DRM_MODE_FLAG_NVSYNC;
1470
			flags |= DRM_MODE_FLAG_NVSYNC;
1383
	}
1471
	}
Line 1384... Line 1472...
1384
 
1472
 
Line -... Line 1473...
-
 
1473
	pipe_config->adjusted_mode.flags |= flags;
-
 
1474
 
-
 
1475
	pipe_config->has_dp_encoder = true;
-
 
1476
 
1385
	pipe_config->adjusted_mode.flags |= flags;
1477
	intel_dp_get_m_n(crtc, pipe_config);
1386
 
1478
 
1387
	if (dp_to_dig_port(intel_dp)->port == PORT_A) {
1479
	if (port == PORT_A) {
1388
		if ((I915_READ(DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_160MHZ)
1480
		if ((I915_READ(DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_160MHZ)
1389
			pipe_config->port_clock = 162000;
1481
			pipe_config->port_clock = 162000;
1390
		else
1482
		else
Line -... Line 1483...
-
 
1483
			pipe_config->port_clock = 270000;
-
 
1484
	}
-
 
1485
 
-
 
1486
	dotclock = intel_dotclock_calculate(pipe_config->port_clock,
-
 
1487
					    &pipe_config->dp_m_n);
-
 
1488
 
-
 
1489
	if (HAS_PCH_SPLIT(dev_priv->dev) && port != PORT_A)
-
 
1490
		ironlake_check_encoder_dotclock(pipe_config, dotclock);
1391
			pipe_config->port_clock = 270000;
1491
 
1392
	}
1492
	pipe_config->adjusted_mode.crtc_clock = dotclock;
1393
 
1493
 
1394
	if (is_edp(intel_dp) && dev_priv->vbt.edp_bpp &&
1494
	if (is_edp(intel_dp) && dev_priv->vbt.edp_bpp &&
1395
	    pipe_config->pipe_bpp > dev_priv->vbt.edp_bpp) {
1495
	    pipe_config->pipe_bpp > dev_priv->vbt.edp_bpp) {
Line 1410... Line 1510...
1410
			      pipe_config->pipe_bpp, dev_priv->vbt.edp_bpp);
1510
			      pipe_config->pipe_bpp, dev_priv->vbt.edp_bpp);
1411
		dev_priv->vbt.edp_bpp = pipe_config->pipe_bpp;
1511
		dev_priv->vbt.edp_bpp = pipe_config->pipe_bpp;
1412
	}
1512
	}
1413
}
1513
}
Line 1414... Line 1514...
1414
 
1514
 
1415
static bool is_edp_psr(struct intel_dp *intel_dp)
1515
static bool is_edp_psr(struct drm_device *dev)
1416
{
1516
{
-
 
1517
	struct drm_i915_private *dev_priv = dev->dev_private;
1417
	return is_edp(intel_dp) &&
1518
 
1418
		intel_dp->psr_dpcd[0] & DP_PSR_IS_SUPPORTED;
1519
	return dev_priv->psr.sink_support;
Line 1419... Line 1520...
1419
}
1520
}
1420
 
1521
 
1421
static bool intel_edp_is_psr_enabled(struct drm_device *dev)
1522
static bool intel_edp_is_psr_enabled(struct drm_device *dev)
Line 1422... Line 1523...
1422
{
1523
{
1423
	struct drm_i915_private *dev_priv = dev->dev_private;
1524
	struct drm_i915_private *dev_priv = dev->dev_private;
Line 1424... Line 1525...
1424
 
1525
 
1425
	if (!IS_HASWELL(dev))
1526
	if (!HAS_PSR(dev))
Line 1426... Line 1527...
1426
		return false;
1527
		return false;
1427
 
1528
 
1428
	return I915_READ(EDP_PSR_CTL) & EDP_PSR_ENABLE;
1529
	return I915_READ(EDP_PSR_CTL(dev)) & EDP_PSR_ENABLE;
Line 1473... Line 1574...
1473
	psr_vsc.sdp_header.HB2 = 0x2;
1574
	psr_vsc.sdp_header.HB2 = 0x2;
1474
	psr_vsc.sdp_header.HB3 = 0x8;
1575
	psr_vsc.sdp_header.HB3 = 0x8;
1475
	intel_edp_psr_write_vsc(intel_dp, &psr_vsc);
1576
	intel_edp_psr_write_vsc(intel_dp, &psr_vsc);
Line 1476... Line 1577...
1476
 
1577
 
1477
	/* Avoid continuous PSR exit by masking memup and hpd */
1578
	/* Avoid continuous PSR exit by masking memup and hpd */
1478
	I915_WRITE(EDP_PSR_DEBUG_CTL, EDP_PSR_DEBUG_MASK_MEMUP |
1579
	I915_WRITE(EDP_PSR_DEBUG_CTL(dev), EDP_PSR_DEBUG_MASK_MEMUP |
Line 1479... Line 1580...
1479
		   EDP_PSR_DEBUG_MASK_HPD | EDP_PSR_DEBUG_MASK_LPSP);
1580
		   EDP_PSR_DEBUG_MASK_HPD | EDP_PSR_DEBUG_MASK_LPSP);
1480
 
1581
 
Line 1498... Line 1599...
1498
		intel_dp_aux_native_write_1(intel_dp, DP_PSR_EN_CFG,
1599
		intel_dp_aux_native_write_1(intel_dp, DP_PSR_EN_CFG,
1499
					    DP_PSR_ENABLE |
1600
					    DP_PSR_ENABLE |
1500
					    DP_PSR_MAIN_LINK_ACTIVE);
1601
					    DP_PSR_MAIN_LINK_ACTIVE);
Line 1501... Line 1602...
1501
 
1602
 
1502
	/* Setup AUX registers */
1603
	/* Setup AUX registers */
1503
	I915_WRITE(EDP_PSR_AUX_DATA1, EDP_PSR_DPCD_COMMAND);
1604
	I915_WRITE(EDP_PSR_AUX_DATA1(dev), EDP_PSR_DPCD_COMMAND);
1504
	I915_WRITE(EDP_PSR_AUX_DATA2, EDP_PSR_DPCD_NORMAL_OPERATION);
1605
	I915_WRITE(EDP_PSR_AUX_DATA2(dev), EDP_PSR_DPCD_NORMAL_OPERATION);
1505
	I915_WRITE(EDP_PSR_AUX_CTL,
1606
	I915_WRITE(EDP_PSR_AUX_CTL(dev),
1506
		   DP_AUX_CH_CTL_TIME_OUT_400us |
1607
		   DP_AUX_CH_CTL_TIME_OUT_400us |
1507
		   (msg_size << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
1608
		   (msg_size << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
1508
		   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
1609
		   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
1509
		   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT));
1610
		   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT));
Line 1514... Line 1615...
1514
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1615
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1515
	struct drm_i915_private *dev_priv = dev->dev_private;
1616
	struct drm_i915_private *dev_priv = dev->dev_private;
1516
	uint32_t max_sleep_time = 0x1f;
1617
	uint32_t max_sleep_time = 0x1f;
1517
	uint32_t idle_frames = 1;
1618
	uint32_t idle_frames = 1;
1518
	uint32_t val = 0x0;
1619
	uint32_t val = 0x0;
-
 
1620
	const uint32_t link_entry_time = EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
Line 1519... Line 1621...
1519
 
1621
 
1520
	if (intel_dp->psr_dpcd[1] & DP_PSR_NO_TRAIN_ON_EXIT) {
1622
	if (intel_dp->psr_dpcd[1] & DP_PSR_NO_TRAIN_ON_EXIT) {
1521
		val |= EDP_PSR_LINK_STANDBY;
1623
		val |= EDP_PSR_LINK_STANDBY;
1522
		val |= EDP_PSR_TP2_TP3_TIME_0us;
1624
		val |= EDP_PSR_TP2_TP3_TIME_0us;
1523
		val |= EDP_PSR_TP1_TIME_0us;
1625
		val |= EDP_PSR_TP1_TIME_0us;
1524
		val |= EDP_PSR_SKIP_AUX_EXIT;
1626
		val |= EDP_PSR_SKIP_AUX_EXIT;
1525
	} else
1627
	} else
Line 1526... Line 1628...
1526
		val |= EDP_PSR_LINK_DISABLE;
1628
		val |= EDP_PSR_LINK_DISABLE;
1527
 
1629
 
1528
	I915_WRITE(EDP_PSR_CTL, val |
1630
	I915_WRITE(EDP_PSR_CTL(dev), val |
1529
		   EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES |
1631
		   IS_BROADWELL(dev) ? 0 : link_entry_time |
1530
		   max_sleep_time << EDP_PSR_MAX_SLEEP_TIME_SHIFT |
1632
		   max_sleep_time << EDP_PSR_MAX_SLEEP_TIME_SHIFT |
1531
		   idle_frames << EDP_PSR_IDLE_FRAME_SHIFT |
1633
		   idle_frames << EDP_PSR_IDLE_FRAME_SHIFT |
Line 1540... Line 1642...
1540
	struct drm_crtc *crtc = dig_port->base.base.crtc;
1642
	struct drm_crtc *crtc = dig_port->base.base.crtc;
1541
	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1643
	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1542
	struct drm_i915_gem_object *obj = to_intel_framebuffer(crtc->fb)->obj;
1644
	struct drm_i915_gem_object *obj = to_intel_framebuffer(crtc->fb)->obj;
1543
	struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
1645
	struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
Line -... Line 1646...
-
 
1646
 
-
 
1647
	dev_priv->psr.source_ok = false;
1544
 
1648
 
1545
	if (!IS_HASWELL(dev)) {
1649
	if (!HAS_PSR(dev)) {
1546
		DRM_DEBUG_KMS("PSR not supported on this platform\n");
-
 
1547
		dev_priv->no_psr_reason = PSR_NO_SOURCE;
1650
		DRM_DEBUG_KMS("PSR not supported on this platform\n");
1548
		return false;
1651
		return false;
Line 1549... Line 1652...
1549
	}
1652
	}
1550
 
1653
 
1551
	if ((intel_encoder->type != INTEL_OUTPUT_EDP) ||
1654
	if ((intel_encoder->type != INTEL_OUTPUT_EDP) ||
1552
	    (dig_port->port != PORT_A)) {
-
 
1553
		DRM_DEBUG_KMS("HSW ties PSR to DDI A (eDP)\n");
-
 
1554
		dev_priv->no_psr_reason = PSR_HSW_NOT_DDIA;
-
 
1555
		return false;
-
 
1556
	}
-
 
1557
 
-
 
1558
	if (!is_edp_psr(intel_dp)) {
-
 
1559
		DRM_DEBUG_KMS("PSR not supported by this panel\n");
1655
	    (dig_port->port != PORT_A)) {
1560
		dev_priv->no_psr_reason = PSR_NO_SINK;
1656
		DRM_DEBUG_KMS("HSW ties PSR to DDI A (eDP)\n");
Line 1561... Line 1657...
1561
		return false;
1657
		return false;
1562
	}
1658
	}
1563
 
-
 
1564
	if (!i915_enable_psr) {
1659
 
1565
		DRM_DEBUG_KMS("PSR disable by flag\n");
1660
	if (!i915_enable_psr) {
Line 1566... Line 1661...
1566
		dev_priv->no_psr_reason = PSR_MODULE_PARAM;
1661
		DRM_DEBUG_KMS("PSR disable by flag\n");
1567
		return false;
1662
		return false;
1568
	}
1663
	}
1569
 
-
 
1570
	crtc = dig_port->base.base.crtc;
1664
 
1571
	if (crtc == NULL) {
1665
	crtc = dig_port->base.base.crtc;
Line 1572... Line 1666...
1572
		DRM_DEBUG_KMS("crtc not active for PSR\n");
1666
	if (crtc == NULL) {
1573
		dev_priv->no_psr_reason = PSR_CRTC_NOT_ACTIVE;
1667
		DRM_DEBUG_KMS("crtc not active for PSR\n");
1574
		return false;
1668
		return false;
1575
	}
-
 
1576
 
1669
	}
1577
	intel_crtc = to_intel_crtc(crtc);
1670
 
Line 1578... Line 1671...
1578
	if (!intel_crtc->active || !crtc->fb || !crtc->mode.clock) {
1671
	intel_crtc = to_intel_crtc(crtc);
1579
		DRM_DEBUG_KMS("crtc not active for PSR\n");
1672
	if (!intel_crtc_active(crtc)) {
1580
		dev_priv->no_psr_reason = PSR_CRTC_NOT_ACTIVE;
1673
		DRM_DEBUG_KMS("crtc not active for PSR\n");
1581
		return false;
1674
		return false;
1582
	}
-
 
1583
 
1675
	}
1584
	obj = to_intel_framebuffer(crtc->fb)->obj;
1676
 
Line 1585... Line 1677...
1585
	if (obj->tiling_mode != I915_TILING_X ||
1677
	obj = to_intel_framebuffer(crtc->fb)->obj;
1586
	    obj->fence_reg == I915_FENCE_REG_NONE) {
1678
	if (obj->tiling_mode != I915_TILING_X ||
1587
		DRM_DEBUG_KMS("PSR condition failed: fb not tiled or fenced\n");
-
 
1588
		dev_priv->no_psr_reason = PSR_NOT_TILED;
1679
	    obj->fence_reg == I915_FENCE_REG_NONE) {
1589
		return false;
1680
		DRM_DEBUG_KMS("PSR condition failed: fb not tiled or fenced\n");
Line 1590... Line 1681...
1590
	}
1681
		return false;
1591
 
1682
	}
1592
	if (I915_READ(SPRCTL(intel_crtc->pipe)) & SPRITE_ENABLE) {
1683
 
1593
		DRM_DEBUG_KMS("PSR condition failed: Sprite is Enabled\n");
-
 
1594
		dev_priv->no_psr_reason = PSR_SPRITE_ENABLED;
1684
	if (I915_READ(SPRCTL(intel_crtc->pipe)) & SPRITE_ENABLE) {
1595
		return false;
1685
		DRM_DEBUG_KMS("PSR condition failed: Sprite is Enabled\n");
Line 1596... Line 1686...
1596
	}
1686
		return false;
1597
 
1687
	}
1598
	if (I915_READ(HSW_STEREO_3D_CTL(intel_crtc->config.cpu_transcoder)) &
-
 
1599
	    S3D_ENABLE) {
1688
 
1600
		DRM_DEBUG_KMS("PSR condition failed: Stereo 3D is Enabled\n");
1689
	if (I915_READ(HSW_STEREO_3D_CTL(intel_crtc->config.cpu_transcoder)) &
Line -... Line 1690...
-
 
1690
	    S3D_ENABLE) {
1601
		dev_priv->no_psr_reason = PSR_S3D_ENABLED;
1691
		DRM_DEBUG_KMS("PSR condition failed: Stereo 3D is Enabled\n");
1602
		return false;
1692
		return false;
Line 1603... Line 1693...
1603
	}
1693
	}
1604
 
1694
 
Line 1644... Line 1734...
1644
	struct drm_i915_private *dev_priv = dev->dev_private;
1734
	struct drm_i915_private *dev_priv = dev->dev_private;
Line 1645... Line 1735...
1645
 
1735
 
1646
	if (!intel_edp_is_psr_enabled(dev))
1736
	if (!intel_edp_is_psr_enabled(dev))
Line -... Line 1737...
-
 
1737
		return;
1647
		return;
1738
 
Line 1648... Line 1739...
1648
 
1739
	I915_WRITE(EDP_PSR_CTL(dev),
1649
	I915_WRITE(EDP_PSR_CTL, I915_READ(EDP_PSR_CTL) & ~EDP_PSR_ENABLE);
1740
		   I915_READ(EDP_PSR_CTL(dev)) & ~EDP_PSR_ENABLE);
1650
 
1741
 
1651
	/* Wait till PSR is idle */
1742
	/* Wait till PSR is idle */
1652
	if (_wait_for((I915_READ(EDP_PSR_STATUS_CTL) &
1743
	if (_wait_for((I915_READ(EDP_PSR_STATUS_CTL(dev)) &
Line 1653... Line 1744...
1653
		       EDP_PSR_STATUS_STATE_MASK) == 0, 2000, 10))
1744
		       EDP_PSR_STATUS_STATE_MASK) == 0, 2000, 10))
Line 1661... Line 1752...
1661
 
1752
 
1662
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, base.head)
1753
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, base.head)
1663
		if (encoder->type == INTEL_OUTPUT_EDP) {
1754
		if (encoder->type == INTEL_OUTPUT_EDP) {
Line 1664... Line 1755...
1664
			intel_dp = enc_to_intel_dp(&encoder->base);
1755
			intel_dp = enc_to_intel_dp(&encoder->base);
1665
 
1756
 
Line 1666... Line 1757...
1666
			if (!is_edp_psr(intel_dp))
1757
			if (!is_edp_psr(dev))
1667
				return;
1758
				return;
1668
 
1759
 
Line 1680... Line 1771...
1680
	enum port port = dp_to_dig_port(intel_dp)->port;
1771
	enum port port = dp_to_dig_port(intel_dp)->port;
1681
	struct drm_device *dev = encoder->base.dev;
1772
	struct drm_device *dev = encoder->base.dev;
Line 1682... Line 1773...
1682
 
1773
 
1683
	/* Make sure the panel is off before trying to change the mode. But also
1774
	/* Make sure the panel is off before trying to change the mode. But also
1684
	 * ensure that we have vdd while we switch off the panel. */
-
 
1685
		ironlake_edp_panel_vdd_on(intel_dp);
1775
	 * ensure that we have vdd while we switch off the panel. */
1686
	ironlake_edp_backlight_off(intel_dp);
1776
	ironlake_edp_backlight_off(intel_dp);
1687
	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1777
	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_OFF);
Line 1688... Line 1778...
1688
	ironlake_edp_panel_off(intel_dp);
1778
	ironlake_edp_panel_off(intel_dp);
1689
 
1779
 
1690
	/* cpu edp my only be disable _after_ the cpu pipe/plane is disabled. */
1780
	/* cpu edp my only be disable _after_ the cpu pipe/plane is disabled. */
Line 1720... Line 1810...
1720
			intel_dp_start_link_train(intel_dp);
1810
			intel_dp_start_link_train(intel_dp);
1721
				ironlake_edp_panel_on(intel_dp);
1811
				ironlake_edp_panel_on(intel_dp);
1722
			ironlake_edp_panel_vdd_off(intel_dp, true);
1812
			ironlake_edp_panel_vdd_off(intel_dp, true);
1723
			intel_dp_complete_link_train(intel_dp);
1813
			intel_dp_complete_link_train(intel_dp);
1724
	intel_dp_stop_link_train(intel_dp);
1814
	intel_dp_stop_link_train(intel_dp);
-
 
1815
}
-
 
1816
 
-
 
1817
static void g4x_enable_dp(struct intel_encoder *encoder)
-
 
1818
{
-
 
1819
	struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
-
 
1820
 
-
 
1821
	intel_enable_dp(encoder);
1725
		ironlake_edp_backlight_on(intel_dp);
1822
		ironlake_edp_backlight_on(intel_dp);
1726
}
1823
}
Line 1727... Line 1824...
1727
 
1824
 
1728
static void vlv_enable_dp(struct intel_encoder *encoder)
1825
static void vlv_enable_dp(struct intel_encoder *encoder)
-
 
1826
{
-
 
1827
	struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
-
 
1828
 
1729
{
1829
	ironlake_edp_backlight_on(intel_dp);
Line 1730... Line 1830...
1730
}
1830
}
1731
 
1831
 
1732
static void intel_pre_enable_dp(struct intel_encoder *encoder)
1832
static void g4x_pre_enable_dp(struct intel_encoder *encoder)
1733
{
1833
{
Line 1734... Line 1834...
1734
	struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1834
	struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
Line 1743... Line 1843...
1743
	struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1843
	struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1744
	struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
1844
	struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
1745
	struct drm_device *dev = encoder->base.dev;
1845
	struct drm_device *dev = encoder->base.dev;
1746
	struct drm_i915_private *dev_priv = dev->dev_private;
1846
	struct drm_i915_private *dev_priv = dev->dev_private;
1747
	struct intel_crtc *intel_crtc = to_intel_crtc(encoder->base.crtc);
1847
	struct intel_crtc *intel_crtc = to_intel_crtc(encoder->base.crtc);
1748
		int port = vlv_dport_to_channel(dport);
1848
	enum dpio_channel port = vlv_dport_to_channel(dport);
1749
		int pipe = intel_crtc->pipe;
1849
		int pipe = intel_crtc->pipe;
-
 
1850
	struct edp_power_seq power_seq;
1750
		u32 val;
1851
		u32 val;
Line 1751... Line 1852...
1751
 
1852
 
Line 1752... Line 1853...
1752
	mutex_lock(&dev_priv->dpio_lock);
1853
	mutex_lock(&dev_priv->dpio_lock);
1753
 
1854
 
1754
		val = vlv_dpio_read(dev_priv, DPIO_DATA_LANE_A(port));
1855
	val = vlv_dpio_read(dev_priv, pipe, VLV_PCS01_DW8(port));
1755
		val = 0;
1856
		val = 0;
1756
		if (pipe)
1857
		if (pipe)
1757
			val |= (1<<21);
1858
			val |= (1<<21);
1758
		else
1859
		else
1759
			val &= ~(1<<21);
1860
			val &= ~(1<<21);
1760
		val |= 0x001000c4;
1861
		val |= 0x001000c4;
1761
		vlv_dpio_write(dev_priv, DPIO_DATA_CHANNEL(port), val);
1862
	vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW8(port), val);
Line 1762... Line 1863...
1762
	vlv_dpio_write(dev_priv, DPIO_PCS_CLOCKBUF0(port), 0x00760018);
1863
	vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW14(port), 0x00760018);
Line -... Line 1864...
-
 
1864
	vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW23(port), 0x00400888);
-
 
1865
 
-
 
1866
	mutex_unlock(&dev_priv->dpio_lock);
-
 
1867
 
-
 
1868
	/* init power sequencer on this pipe and port */
1763
	vlv_dpio_write(dev_priv, DPIO_PCS_CLOCKBUF8(port), 0x00400888);
1869
	intel_dp_init_panel_power_sequencer(dev, intel_dp, &power_seq);
Line 1764... Line 1870...
1764
 
1870
	intel_dp_init_panel_power_sequencer_registers(dev, intel_dp,
1765
	mutex_unlock(&dev_priv->dpio_lock);
1871
						      &power_seq);
Line 1766... Line 1872...
1766
 
1872
 
1767
	intel_enable_dp(encoder);
1873
	intel_enable_dp(encoder);
1768
 
1874
 
1769
	vlv_wait_port_ready(dev_priv, port);
1875
	vlv_wait_port_ready(dev_priv, dport);
1770
}
1876
}
-
 
1877
 
-
 
1878
static void vlv_dp_pre_pll_enable(struct intel_encoder *encoder)
1771
 
1879
{
1772
static void intel_dp_pre_pll_enable(struct intel_encoder *encoder)
-
 
1773
{
1880
	struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
1774
	struct intel_digital_port *dport = enc_to_dig_port(&encoder->base);
-
 
Line 1775... Line 1881...
1775
	struct drm_device *dev = encoder->base.dev;
1881
	struct drm_device *dev = encoder->base.dev;
1776
	struct drm_i915_private *dev_priv = dev->dev_private;
1882
	struct drm_i915_private *dev_priv = dev->dev_private;
1777
	int port = vlv_dport_to_channel(dport);
1883
	struct intel_crtc *intel_crtc =
1778
 
1884
		to_intel_crtc(encoder->base.crtc);
1779
	if (!IS_VALLEYVIEW(dev))
1885
	enum dpio_channel port = vlv_dport_to_channel(dport);
1780
		return;
1886
	int pipe = intel_crtc->pipe;
1781
 
1887
 
1782
	/* Program Tx lane resets to default */
1888
	/* Program Tx lane resets to default */
1783
	mutex_lock(&dev_priv->dpio_lock);
1889
	mutex_lock(&dev_priv->dpio_lock);
1784
	vlv_dpio_write(dev_priv, DPIO_PCS_TX(port),
1890
	vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW0(port),
Line 1785... Line 1891...
1785
			 DPIO_PCS_TX_LANE2_RESET |
1891
			 DPIO_PCS_TX_LANE2_RESET |
1786
			 DPIO_PCS_TX_LANE1_RESET);
1892
			 DPIO_PCS_TX_LANE1_RESET);
1787
	vlv_dpio_write(dev_priv, DPIO_PCS_CLK(port),
1893
	vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW1(port),
1788
			 DPIO_PCS_CLK_CRI_RXEB_EIOS_EN |
1894
			 DPIO_PCS_CLK_CRI_RXEB_EIOS_EN |
1789
			 DPIO_PCS_CLK_CRI_RXDIGFILTSG_EN |
1895
			 DPIO_PCS_CLK_CRI_RXDIGFILTSG_EN |
1790
			 (1<
1896
			 (1<
Line 1791... Line 1897...
1791
				 DPIO_PCS_CLK_SOFT_RESET);
1897
				 DPIO_PCS_CLK_SOFT_RESET);
1792
 
1898
 
Line 1833... Line 1939...
1833
					      DP_LANE0_1_STATUS,
1939
					      DP_LANE0_1_STATUS,
1834
					      link_status,
1940
					      link_status,
1835
					      DP_LINK_STATUS_SIZE);
1941
					      DP_LINK_STATUS_SIZE);
1836
}
1942
}
Line 1837... Line -...
1837
 
-
 
1838
#if 0
-
 
1839
static char	*voltage_names[] = {
-
 
1840
	"0.4V", "0.6V", "0.8V", "1.2V"
-
 
1841
};
-
 
1842
static char	*pre_emph_names[] = {
-
 
1843
	"0dB", "3.5dB", "6dB", "9.5dB"
-
 
1844
};
-
 
1845
static char	*link_train_names[] = {
-
 
1846
	"pattern 1", "pattern 2", "idle", "off"
-
 
1847
};
-
 
1848
#endif
-
 
1849
 
1943
 
1850
/*
1944
/*
1851
 * These are source-specific values; current Intel hardware supports
1945
 * These are source-specific values; current Intel hardware supports
1852
 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1946
 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
Line 1856... Line 1950...
1856
intel_dp_voltage_max(struct intel_dp *intel_dp)
1950
intel_dp_voltage_max(struct intel_dp *intel_dp)
1857
{
1951
{
1858
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1952
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1859
	enum port port = dp_to_dig_port(intel_dp)->port;
1953
	enum port port = dp_to_dig_port(intel_dp)->port;
Line 1860... Line 1954...
1860
 
1954
 
1861
	if (IS_VALLEYVIEW(dev))
1955
	if (IS_VALLEYVIEW(dev) || IS_BROADWELL(dev))
1862
		return DP_TRAIN_VOLTAGE_SWING_1200;
1956
		return DP_TRAIN_VOLTAGE_SWING_1200;
1863
	else if (IS_GEN7(dev) && port == PORT_A)
1957
	else if (IS_GEN7(dev) && port == PORT_A)
1864
		return DP_TRAIN_VOLTAGE_SWING_800;
1958
		return DP_TRAIN_VOLTAGE_SWING_800;
1865
	else if (HAS_PCH_CPT(dev) && port != PORT_A)
1959
	else if (HAS_PCH_CPT(dev) && port != PORT_A)
Line 1872... Line 1966...
1872
intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
1966
intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
1873
{
1967
{
1874
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1968
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1875
	enum port port = dp_to_dig_port(intel_dp)->port;
1969
	enum port port = dp_to_dig_port(intel_dp)->port;
Line 1876... Line 1970...
1876
 
1970
 
-
 
1971
	if (IS_BROADWELL(dev)) {
-
 
1972
		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
-
 
1973
		case DP_TRAIN_VOLTAGE_SWING_400:
-
 
1974
		case DP_TRAIN_VOLTAGE_SWING_600:
-
 
1975
			return DP_TRAIN_PRE_EMPHASIS_6;
-
 
1976
		case DP_TRAIN_VOLTAGE_SWING_800:
-
 
1977
			return DP_TRAIN_PRE_EMPHASIS_3_5;
-
 
1978
		case DP_TRAIN_VOLTAGE_SWING_1200:
-
 
1979
		default:
-
 
1980
			return DP_TRAIN_PRE_EMPHASIS_0;
-
 
1981
		}
1877
	if (HAS_DDI(dev)) {
1982
	} else if (IS_HASWELL(dev)) {
1878
		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1983
		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1879
		case DP_TRAIN_VOLTAGE_SWING_400:
1984
		case DP_TRAIN_VOLTAGE_SWING_400:
1880
			return DP_TRAIN_PRE_EMPHASIS_9_5;
1985
			return DP_TRAIN_PRE_EMPHASIS_9_5;
1881
		case DP_TRAIN_VOLTAGE_SWING_600:
1986
		case DP_TRAIN_VOLTAGE_SWING_600:
Line 1926... Line 2031...
1926
static uint32_t intel_vlv_signal_levels(struct intel_dp *intel_dp)
2031
static uint32_t intel_vlv_signal_levels(struct intel_dp *intel_dp)
1927
{
2032
{
1928
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
2033
	struct drm_device *dev = intel_dp_to_dev(intel_dp);
1929
	struct drm_i915_private *dev_priv = dev->dev_private;
2034
	struct drm_i915_private *dev_priv = dev->dev_private;
1930
	struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
2035
	struct intel_digital_port *dport = dp_to_dig_port(intel_dp);
-
 
2036
	struct intel_crtc *intel_crtc =
-
 
2037
		to_intel_crtc(dport->base.base.crtc);
1931
	unsigned long demph_reg_value, preemph_reg_value,
2038
	unsigned long demph_reg_value, preemph_reg_value,
1932
		uniqtranscale_reg_value;
2039
		uniqtranscale_reg_value;
1933
	uint8_t train_set = intel_dp->train_set[0];
2040
	uint8_t train_set = intel_dp->train_set[0];
1934
	int port = vlv_dport_to_channel(dport);
2041
	enum dpio_channel port = vlv_dport_to_channel(dport);
-
 
2042
	int pipe = intel_crtc->pipe;
Line 1935... Line 2043...
1935
 
2043
 
1936
	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
2044
	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1937
	case DP_TRAIN_PRE_EMPHASIS_0:
2045
	case DP_TRAIN_PRE_EMPHASIS_0:
1938
		preemph_reg_value = 0x0004000;
2046
		preemph_reg_value = 0x0004000;
Line 2005... Line 2113...
2005
	default:
2113
	default:
2006
		return 0;
2114
		return 0;
2007
	}
2115
	}
Line 2008... Line 2116...
2008
 
2116
 
2009
	mutex_lock(&dev_priv->dpio_lock);
2117
	mutex_lock(&dev_priv->dpio_lock);
2010
	vlv_dpio_write(dev_priv, DPIO_TX_OCALINIT(port), 0x00000000);
2118
	vlv_dpio_write(dev_priv, pipe, VLV_TX_DW5(port), 0x00000000);
2011
	vlv_dpio_write(dev_priv, DPIO_TX_SWING_CTL4(port), demph_reg_value);
2119
	vlv_dpio_write(dev_priv, pipe, VLV_TX_DW4(port), demph_reg_value);
2012
	vlv_dpio_write(dev_priv, DPIO_TX_SWING_CTL2(port),
2120
	vlv_dpio_write(dev_priv, pipe, VLV_TX_DW2(port),
2013
			 uniqtranscale_reg_value);
2121
			 uniqtranscale_reg_value);
2014
	vlv_dpio_write(dev_priv, DPIO_TX_SWING_CTL3(port), 0x0C782040);
2122
	vlv_dpio_write(dev_priv, pipe, VLV_TX_DW3(port), 0x0C782040);
2015
	vlv_dpio_write(dev_priv, DPIO_PCS_STAGGER0(port), 0x00030000);
2123
	vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW11(port), 0x00030000);
2016
	vlv_dpio_write(dev_priv, DPIO_PCS_CTL_OVER1(port), preemph_reg_value);
2124
	vlv_dpio_write(dev_priv, pipe, VLV_PCS_DW9(port), preemph_reg_value);
2017
	vlv_dpio_write(dev_priv, DPIO_TX_OCALINIT(port), 0x80000000);
2125
	vlv_dpio_write(dev_priv, pipe, VLV_TX_DW5(port), 0x80000000);
Line 2018... Line 2126...
2018
	mutex_unlock(&dev_priv->dpio_lock);
2126
	mutex_unlock(&dev_priv->dpio_lock);
2019
 
2127
 
Line 2020... Line 2128...
2020
	return 0;
2128
	return 0;
2021
}
2129
}
-
 
2130
 
2022
 
2131
static void
2023
static void
2132
intel_get_adjust_train(struct intel_dp *intel_dp,
2024
intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
2133
		       const uint8_t link_status[DP_LINK_STATUS_SIZE])
2025
{
2134
{
2026
	uint8_t v = 0;
2135
	uint8_t v = 0;
Line 2180... Line 2289...
2180
			      "0x%x\n", signal_levels);
2289
			      "0x%x\n", signal_levels);
2181
		return DDI_BUF_EMP_400MV_0DB_HSW;
2290
		return DDI_BUF_EMP_400MV_0DB_HSW;
2182
	}
2291
	}
2183
}
2292
}
Line -... Line 2293...
-
 
2293
 
-
 
2294
static uint32_t
-
 
2295
intel_bdw_signal_levels(uint8_t train_set)
-
 
2296
{
-
 
2297
	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
-
 
2298
					 DP_TRAIN_PRE_EMPHASIS_MASK);
-
 
2299
	switch (signal_levels) {
-
 
2300
	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
-
 
2301
		return DDI_BUF_EMP_400MV_0DB_BDW;	/* Sel0 */
-
 
2302
	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
-
 
2303
		return DDI_BUF_EMP_400MV_3_5DB_BDW;	/* Sel1 */
-
 
2304
	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
-
 
2305
		return DDI_BUF_EMP_400MV_6DB_BDW;	/* Sel2 */
-
 
2306
 
-
 
2307
	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
-
 
2308
		return DDI_BUF_EMP_600MV_0DB_BDW;	/* Sel3 */
-
 
2309
	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
-
 
2310
		return DDI_BUF_EMP_600MV_3_5DB_BDW;	/* Sel4 */
-
 
2311
	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6:
-
 
2312
		return DDI_BUF_EMP_600MV_6DB_BDW;	/* Sel5 */
-
 
2313
 
-
 
2314
	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
-
 
2315
		return DDI_BUF_EMP_800MV_0DB_BDW;	/* Sel6 */
-
 
2316
	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
-
 
2317
		return DDI_BUF_EMP_800MV_3_5DB_BDW;	/* Sel7 */
-
 
2318
 
-
 
2319
	case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0:
-
 
2320
		return DDI_BUF_EMP_1200MV_0DB_BDW;	/* Sel8 */
-
 
2321
 
-
 
2322
	default:
-
 
2323
		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
-
 
2324
			      "0x%x\n", signal_levels);
-
 
2325
		return DDI_BUF_EMP_400MV_0DB_BDW;	/* Sel0 */
-
 
2326
	}
-
 
2327
}
2184
 
2328
 
2185
/* Properly updates "DP" with the correct signal levels. */
2329
/* Properly updates "DP" with the correct signal levels. */
2186
static void
2330
static void
2187
intel_dp_set_signal_levels(struct intel_dp *intel_dp, uint32_t *DP)
2331
intel_dp_set_signal_levels(struct intel_dp *intel_dp, uint32_t *DP)
2188
{
2332
{
2189
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2333
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2190
	enum port port = intel_dig_port->port;
2334
	enum port port = intel_dig_port->port;
2191
	struct drm_device *dev = intel_dig_port->base.base.dev;
2335
	struct drm_device *dev = intel_dig_port->base.base.dev;
2192
	uint32_t signal_levels, mask;
2336
	uint32_t signal_levels, mask;
Line 2193... Line 2337...
2193
	uint8_t train_set = intel_dp->train_set[0];
2337
	uint8_t train_set = intel_dp->train_set[0];
-
 
2338
 
-
 
2339
	if (IS_BROADWELL(dev)) {
-
 
2340
		signal_levels = intel_bdw_signal_levels(train_set);
2194
 
2341
		mask = DDI_BUF_EMP_MASK;
2195
	if (HAS_DDI(dev)) {
2342
	} else if (IS_HASWELL(dev)) {
2196
		signal_levels = intel_hsw_signal_levels(train_set);
2343
		signal_levels = intel_hsw_signal_levels(train_set);
2197
		mask = DDI_BUF_EMP_MASK;
2344
		mask = DDI_BUF_EMP_MASK;
2198
	} else if (IS_VALLEYVIEW(dev)) {
2345
	} else if (IS_VALLEYVIEW(dev)) {
Line 2214... Line 2361...
2214
	*DP = (*DP & ~mask) | signal_levels;
2361
	*DP = (*DP & ~mask) | signal_levels;
2215
}
2362
}
Line 2216... Line 2363...
2216
 
2363
 
2217
static bool
2364
static bool
2218
intel_dp_set_link_train(struct intel_dp *intel_dp,
2365
intel_dp_set_link_train(struct intel_dp *intel_dp,
2219
			uint32_t dp_reg_value,
2366
			uint32_t *DP,
2220
			uint8_t dp_train_pat)
2367
			uint8_t dp_train_pat)
2221
{
2368
{
2222
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2369
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2223
	struct drm_device *dev = intel_dig_port->base.base.dev;
2370
	struct drm_device *dev = intel_dig_port->base.base.dev;
2224
	struct drm_i915_private *dev_priv = dev->dev_private;
2371
	struct drm_i915_private *dev_priv = dev->dev_private;
-
 
2372
	enum port port = intel_dig_port->port;
2225
	enum port port = intel_dig_port->port;
2373
	uint8_t buf[sizeof(intel_dp->train_set) + 1];
Line 2226... Line 2374...
2226
	int ret;
2374
	int ret, len;
2227
 
2375
 
Line 2228... Line 2376...
2228
	if (HAS_DDI(dev)) {
2376
	if (HAS_DDI(dev)) {
Line 2250... Line 2398...
2250
			break;
2398
			break;
2251
		}
2399
		}
2252
		I915_WRITE(DP_TP_CTL(port), temp);
2400
		I915_WRITE(DP_TP_CTL(port), temp);
Line 2253... Line 2401...
2253
 
2401
 
2254
	} else if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || port != PORT_A)) {
2402
	} else if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || port != PORT_A)) {
Line 2255... Line 2403...
2255
		dp_reg_value &= ~DP_LINK_TRAIN_MASK_CPT;
2403
		*DP &= ~DP_LINK_TRAIN_MASK_CPT;
2256
 
2404
 
2257
		switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2405
		switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2258
		case DP_TRAINING_PATTERN_DISABLE:
2406
		case DP_TRAINING_PATTERN_DISABLE:
2259
			dp_reg_value |= DP_LINK_TRAIN_OFF_CPT;
2407
			*DP |= DP_LINK_TRAIN_OFF_CPT;
2260
			break;
2408
			break;
2261
		case DP_TRAINING_PATTERN_1:
2409
		case DP_TRAINING_PATTERN_1:
2262
			dp_reg_value |= DP_LINK_TRAIN_PAT_1_CPT;
2410
			*DP |= DP_LINK_TRAIN_PAT_1_CPT;
2263
			break;
2411
			break;
2264
		case DP_TRAINING_PATTERN_2:
2412
		case DP_TRAINING_PATTERN_2:
2265
			dp_reg_value |= DP_LINK_TRAIN_PAT_2_CPT;
2413
			*DP |= DP_LINK_TRAIN_PAT_2_CPT;
2266
			break;
2414
			break;
2267
		case DP_TRAINING_PATTERN_3:
2415
		case DP_TRAINING_PATTERN_3:
2268
			DRM_ERROR("DP training pattern 3 not supported\n");
2416
			DRM_ERROR("DP training pattern 3 not supported\n");
2269
			dp_reg_value |= DP_LINK_TRAIN_PAT_2_CPT;
2417
			*DP |= DP_LINK_TRAIN_PAT_2_CPT;
Line 2270... Line 2418...
2270
			break;
2418
			break;
2271
		}
2419
		}
Line 2272... Line 2420...
2272
 
2420
 
2273
	} else {
2421
	} else {
2274
		dp_reg_value &= ~DP_LINK_TRAIN_MASK;
2422
		*DP &= ~DP_LINK_TRAIN_MASK;
2275
 
2423
 
2276
		switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2424
		switch (dp_train_pat & DP_TRAINING_PATTERN_MASK) {
2277
		case DP_TRAINING_PATTERN_DISABLE:
2425
		case DP_TRAINING_PATTERN_DISABLE:
2278
			dp_reg_value |= DP_LINK_TRAIN_OFF;
2426
			*DP |= DP_LINK_TRAIN_OFF;
2279
			break;
2427
			break;
2280
		case DP_TRAINING_PATTERN_1:
2428
		case DP_TRAINING_PATTERN_1:
2281
			dp_reg_value |= DP_LINK_TRAIN_PAT_1;
2429
			*DP |= DP_LINK_TRAIN_PAT_1;
2282
			break;
2430
			break;
2283
		case DP_TRAINING_PATTERN_2:
2431
		case DP_TRAINING_PATTERN_2:
2284
			dp_reg_value |= DP_LINK_TRAIN_PAT_2;
2432
			*DP |= DP_LINK_TRAIN_PAT_2;
2285
			break;
2433
			break;
2286
		case DP_TRAINING_PATTERN_3:
2434
		case DP_TRAINING_PATTERN_3:
2287
			DRM_ERROR("DP training pattern 3 not supported\n");
2435
			DRM_ERROR("DP training pattern 3 not supported\n");
Line 2288... Line 2436...
2288
			dp_reg_value |= DP_LINK_TRAIN_PAT_2;
2436
			*DP |= DP_LINK_TRAIN_PAT_2;
2289
			break;
2437
			break;
Line 2290... Line -...
2290
		}
-
 
2291
	}
-
 
2292
 
2438
		}
2293
	I915_WRITE(intel_dp->output_reg, dp_reg_value);
-
 
2294
	POSTING_READ(intel_dp->output_reg);
2439
	}
2295
 
2440
 
-
 
2441
	I915_WRITE(intel_dp->output_reg, *DP);
-
 
2442
	POSTING_READ(intel_dp->output_reg);
-
 
2443
 
-
 
2444
	buf[0] = dp_train_pat;
-
 
2445
	if ((dp_train_pat & DP_TRAINING_PATTERN_MASK) ==
-
 
2446
	    DP_TRAINING_PATTERN_DISABLE) {
-
 
2447
		/* don't write DP_TRAINING_LANEx_SET on disable */
-
 
2448
		len = 1;
2296
	intel_dp_aux_native_write_1(intel_dp,
2449
	} else {
-
 
2450
		/* DP_TRAINING_LANEx_SET follow DP_TRAINING_PATTERN_SET */
-
 
2451
		memcpy(buf + 1, intel_dp->train_set, intel_dp->lane_count);
-
 
2452
		len = intel_dp->lane_count + 1;
-
 
2453
	}
-
 
2454
 
-
 
2455
	ret = intel_dp_aux_native_write(intel_dp, DP_TRAINING_PATTERN_SET,
-
 
2456
					buf, len);
2297
				    DP_TRAINING_PATTERN_SET,
2457
 
-
 
2458
	return ret == len;
-
 
2459
}
-
 
2460
 
-
 
2461
static bool
-
 
2462
intel_dp_reset_link_train(struct intel_dp *intel_dp, uint32_t *DP,
-
 
2463
			uint8_t dp_train_pat)
-
 
2464
{
-
 
2465
	memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
-
 
2466
	intel_dp_set_signal_levels(intel_dp, DP);
-
 
2467
	return intel_dp_set_link_train(intel_dp, DP, dp_train_pat);
-
 
2468
}
-
 
2469
 
-
 
2470
static bool
-
 
2471
intel_dp_update_link_train(struct intel_dp *intel_dp, uint32_t *DP,
-
 
2472
			   const uint8_t link_status[DP_LINK_STATUS_SIZE])
-
 
2473
{
-
 
2474
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
-
 
2475
	struct drm_device *dev = intel_dig_port->base.base.dev;
-
 
2476
	struct drm_i915_private *dev_priv = dev->dev_private;
-
 
2477
	int ret;
-
 
2478
 
-
 
2479
	intel_get_adjust_train(intel_dp, link_status);
2298
				    dp_train_pat);
2480
	intel_dp_set_signal_levels(intel_dp, DP);
2299
 
2481
 
2300
	if ((dp_train_pat & DP_TRAINING_PATTERN_MASK) !=
-
 
2301
	    DP_TRAINING_PATTERN_DISABLE) {
-
 
2302
	ret = intel_dp_aux_native_write(intel_dp,
-
 
Line 2303... Line 2482...
2303
					DP_TRAINING_LANE0_SET,
2482
	I915_WRITE(intel_dp->output_reg, *DP);
2304
					intel_dp->train_set,
2483
	POSTING_READ(intel_dp->output_reg);
Line 2305... Line 2484...
2305
					intel_dp->lane_count);
2484
 
2306
	if (ret != intel_dp->lane_count)
2485
	ret = intel_dp_aux_native_write(intel_dp, DP_TRAINING_LANE0_SET,
2307
		return false;
2486
					intel_dp->train_set,
Line 2349... Line 2528...
2349
	struct drm_device *dev = encoder->dev;
2528
	struct drm_device *dev = encoder->dev;
2350
	int i;
2529
	int i;
2351
	uint8_t voltage;
2530
	uint8_t voltage;
2352
	int voltage_tries, loop_tries;
2531
	int voltage_tries, loop_tries;
2353
	uint32_t DP = intel_dp->DP;
2532
	uint32_t DP = intel_dp->DP;
-
 
2533
	uint8_t link_config[2];
Line 2354... Line 2534...
2354
 
2534
 
2355
	if (HAS_DDI(dev))
2535
	if (HAS_DDI(dev))
Line 2356... Line 2536...
2356
		intel_ddi_prepare_link_retrain(encoder);
2536
		intel_ddi_prepare_link_retrain(encoder);
-
 
2537
 
-
 
2538
	/* Write the link configuration data */
-
 
2539
	link_config[0] = intel_dp->link_bw;
-
 
2540
	link_config[1] = intel_dp->lane_count;
2357
 
2541
	if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
-
 
2542
		link_config[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
2358
	/* Write the link configuration data */
2543
	intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, link_config, 2);
2359
	intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
2544
 
-
 
2545
	link_config[0] = 0;
Line 2360... Line 2546...
2360
				  intel_dp->link_configuration,
2546
	link_config[1] = DP_SET_ANSI_8B10B;
Line -... Line 2547...
-
 
2547
	intel_dp_aux_native_write(intel_dp, DP_DOWNSPREAD_CTRL, link_config, 2);
2361
				  DP_LINK_CONFIGURATION_SIZE);
2548
 
-
 
2549
	DP |= DP_PORT_EN;
-
 
2550
 
-
 
2551
	/* clock recovery */
-
 
2552
	if (!intel_dp_reset_link_train(intel_dp, &DP,
-
 
2553
				       DP_TRAINING_PATTERN_1 |
-
 
2554
				       DP_LINK_SCRAMBLING_DISABLE)) {
2362
 
2555
		DRM_ERROR("failed to enable link training\n");
2363
	DP |= DP_PORT_EN;
2556
		return;
2364
 
2557
	}
2365
	memset(intel_dp->train_set, 0, 4);
2558
 
2366
	voltage = 0xff;
-
 
2367
	voltage_tries = 0;
2559
	voltage = 0xff;
Line 2368... Line -...
2368
	loop_tries = 0;
-
 
2369
	for (;;) {
-
 
2370
		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
-
 
2371
		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
-
 
2372
 
-
 
2373
		intel_dp_set_signal_levels(intel_dp, &DP);
-
 
2374
 
-
 
2375
		/* Set training pattern 1 */
-
 
2376
		if (!intel_dp_set_link_train(intel_dp, DP,
2560
	voltage_tries = 0;
2377
					     DP_TRAINING_PATTERN_1 |
2561
	loop_tries = 0;
2378
					     DP_LINK_SCRAMBLING_DISABLE))
2562
	for (;;) {
2379
			break;
2563
		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
2380
 
2564
 
Line 2394... Line 2578...
2394
			if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
2578
			if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
2395
				break;
2579
				break;
2396
		if (i == intel_dp->lane_count) {
2580
		if (i == intel_dp->lane_count) {
2397
			++loop_tries;
2581
			++loop_tries;
2398
			if (loop_tries == 5) {
2582
			if (loop_tries == 5) {
2399
				DRM_DEBUG_KMS("too many full retries, give up\n");
2583
				DRM_ERROR("too many full retries, give up\n");
2400
			break;
2584
			break;
2401
			}
2585
			}
2402
			memset(intel_dp->train_set, 0, 4);
2586
			intel_dp_reset_link_train(intel_dp, &DP,
-
 
2587
						  DP_TRAINING_PATTERN_1 |
-
 
2588
						  DP_LINK_SCRAMBLING_DISABLE);
2403
			voltage_tries = 0;
2589
			voltage_tries = 0;
2404
			continue;
2590
			continue;
2405
		}
2591
		}
Line 2406... Line 2592...
2406
 
2592
 
2407
		/* Check to see if we've tried the same voltage 5 times */
2593
		/* Check to see if we've tried the same voltage 5 times */
2408
		if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
2594
		if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
2409
			++voltage_tries;
2595
			++voltage_tries;
2410
			if (voltage_tries == 5) {
2596
			if (voltage_tries == 5) {
2411
				DRM_DEBUG_KMS("too many voltage retries, give up\n");
2597
				DRM_ERROR("too many voltage retries, give up\n");
2412
				break;
2598
				break;
2413
			}
2599
			}
2414
		} else
2600
		} else
2415
			voltage_tries = 0;
2601
			voltage_tries = 0;
Line 2416... Line 2602...
2416
		voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
2602
		voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
2417
 
2603
 
-
 
2604
		/* Update training set as requested by target */
-
 
2605
		if (!intel_dp_update_link_train(intel_dp, &DP, link_status)) {
-
 
2606
			DRM_ERROR("failed to update link training\n");
2418
		/* Compute new intel_dp->train_set as requested by target */
2607
			break;
Line 2419... Line 2608...
2419
		intel_get_adjust_train(intel_dp, link_status);
2608
		}
2420
	}
2609
	}
Line 2428... Line 2617...
2428
	bool channel_eq = false;
2617
	bool channel_eq = false;
2429
	int tries, cr_tries;
2618
	int tries, cr_tries;
2430
	uint32_t DP = intel_dp->DP;
2619
	uint32_t DP = intel_dp->DP;
Line 2431... Line 2620...
2431
 
2620
 
-
 
2621
	/* channel equalization */
-
 
2622
	if (!intel_dp_set_link_train(intel_dp, &DP,
-
 
2623
				     DP_TRAINING_PATTERN_2 |
-
 
2624
				     DP_LINK_SCRAMBLING_DISABLE)) {
-
 
2625
		DRM_ERROR("failed to start channel equalization\n");
-
 
2626
		return;
-
 
2627
	}
2432
	/* channel equalization */
2628
 
2433
	tries = 0;
2629
	tries = 0;
2434
	cr_tries = 0;
2630
	cr_tries = 0;
2435
	channel_eq = false;
2631
	channel_eq = false;
2436
	for (;;) {
2632
	for (;;) {
Line 2437... Line 2633...
2437
		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
2633
		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
2438
 
2634
 
2439
		if (cr_tries > 5) {
-
 
2440
			DRM_ERROR("failed to train DP, aborting\n");
2635
		if (cr_tries > 5) {
2441
			intel_dp_link_down(intel_dp);
2636
			DRM_ERROR("failed to train DP, aborting\n");
Line 2442... Line -...
2442
			break;
-
 
2443
		}
-
 
2444
 
-
 
2445
		intel_dp_set_signal_levels(intel_dp, &DP);
-
 
2446
 
-
 
2447
		/* channel eq pattern */
-
 
2448
		if (!intel_dp_set_link_train(intel_dp, DP,
-
 
2449
					     DP_TRAINING_PATTERN_2 |
-
 
2450
					     DP_LINK_SCRAMBLING_DISABLE))
2637
			break;
2451
			break;
2638
		}
-
 
2639
 
2452
 
2640
		drm_dp_link_train_channel_eq_delay(intel_dp->dpcd);
-
 
2641
		if (!intel_dp_get_link_status(intel_dp, link_status)) {
Line 2453... Line 2642...
2453
		drm_dp_link_train_channel_eq_delay(intel_dp->dpcd);
2642
			DRM_ERROR("failed to get link status\n");
2454
		if (!intel_dp_get_link_status(intel_dp, link_status))
2643
			break;
2455
			break;
2644
		}
-
 
2645
 
-
 
2646
		/* Make sure clock is still ok */
-
 
2647
		if (!drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) {
2456
 
2648
			intel_dp_start_link_train(intel_dp);
2457
		/* Make sure clock is still ok */
2649
			intel_dp_set_link_train(intel_dp, &DP,
2458
		if (!drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) {
2650
						DP_TRAINING_PATTERN_2 |
Line 2459... Line 2651...
2459
			intel_dp_start_link_train(intel_dp);
2651
						DP_LINK_SCRAMBLING_DISABLE);
Line 2468... Line 2660...
2468
 
2660
 
2469
		/* Try 5 times, then try clock recovery if that fails */
2661
		/* Try 5 times, then try clock recovery if that fails */
2470
		if (tries > 5) {
2662
		if (tries > 5) {
2471
			intel_dp_link_down(intel_dp);
2663
			intel_dp_link_down(intel_dp);
-
 
2664
			intel_dp_start_link_train(intel_dp);
-
 
2665
			intel_dp_set_link_train(intel_dp, &DP,
-
 
2666
						DP_TRAINING_PATTERN_2 |
2472
			intel_dp_start_link_train(intel_dp);
2667
						DP_LINK_SCRAMBLING_DISABLE);
2473
			tries = 0;
2668
			tries = 0;
2474
			cr_tries++;
2669
			cr_tries++;
2475
			continue;
2670
			continue;
Line 2476... Line 2671...
2476
		}
2671
		}
2477
 
2672
 
-
 
2673
		/* Update training set as requested by target */
-
 
2674
		if (!intel_dp_update_link_train(intel_dp, &DP, link_status)) {
-
 
2675
			DRM_ERROR("failed to update link training\n");
2478
		/* Compute new intel_dp->train_set as requested by target */
2676
			break;
2479
		intel_get_adjust_train(intel_dp, link_status);
2677
		}
Line 2480... Line 2678...
2480
		++tries;
2678
		++tries;
Line 2489... Line 2687...
2489
 
2687
 
Line 2490... Line 2688...
2490
}
2688
}
2491
 
2689
 
2492
void intel_dp_stop_link_train(struct intel_dp *intel_dp)
2690
void intel_dp_stop_link_train(struct intel_dp *intel_dp)
2493
{
2691
{
2494
	intel_dp_set_link_train(intel_dp, intel_dp->DP,
2692
	intel_dp_set_link_train(intel_dp, &intel_dp->DP,
Line 2495... Line 2693...
2495
				DP_TRAINING_PATTERN_DISABLE);
2693
				DP_TRAINING_PATTERN_DISABLE);
2496
}
2694
}
Line 2576... Line 2774...
2576
}
2774
}
Line 2577... Line 2775...
2577
 
2775
 
2578
static bool
2776
static bool
2579
intel_dp_get_dpcd(struct intel_dp *intel_dp)
2777
intel_dp_get_dpcd(struct intel_dp *intel_dp)
-
 
2778
{
-
 
2779
	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
-
 
2780
	struct drm_device *dev = dig_port->base.base.dev;
-
 
2781
	struct drm_i915_private *dev_priv = dev->dev_private;
2580
{
2782
 
Line 2581... Line 2783...
2581
	char dpcd_hex_dump[sizeof(intel_dp->dpcd) * 3];
2783
	char dpcd_hex_dump[sizeof(intel_dp->dpcd) * 3];
2582
 
2784
 
2583
	if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd,
2785
	if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd,
Line 2591... Line 2793...
2591
	if (intel_dp->dpcd[DP_DPCD_REV] == 0)
2793
	if (intel_dp->dpcd[DP_DPCD_REV] == 0)
2592
		return false; /* DPCD not present */
2794
		return false; /* DPCD not present */
Line 2593... Line 2795...
2593
 
2795
 
2594
	/* Check if the panel supports PSR */
2796
	/* Check if the panel supports PSR */
-
 
2797
	memset(intel_dp->psr_dpcd, 0, sizeof(intel_dp->psr_dpcd));
2595
	memset(intel_dp->psr_dpcd, 0, sizeof(intel_dp->psr_dpcd));
2798
	if (is_edp(intel_dp)) {
2596
	intel_dp_aux_native_read_retry(intel_dp, DP_PSR_SUPPORT,
2799
	intel_dp_aux_native_read_retry(intel_dp, DP_PSR_SUPPORT,
2597
				       intel_dp->psr_dpcd,
2800
				       intel_dp->psr_dpcd,
-
 
2801
				       sizeof(intel_dp->psr_dpcd));
2598
				       sizeof(intel_dp->psr_dpcd));
2802
		if (intel_dp->psr_dpcd[0] & DP_PSR_IS_SUPPORTED) {
2599
	if (is_edp_psr(intel_dp))
2803
			dev_priv->psr.sink_support = true;
-
 
2804
		DRM_DEBUG_KMS("Detected EDP PSR Panel.\n");
-
 
2805
		}
-
 
2806
	}
2600
		DRM_DEBUG_KMS("Detected EDP PSR Panel.\n");
2807
 
2601
	if (!(intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
2808
	if (!(intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
2602
	      DP_DWN_STRM_PORT_PRESENT))
2809
	      DP_DWN_STRM_PORT_PRESENT))
Line 2603... Line 2810...
2603
		return true; /* native DP sink */
2810
		return true; /* native DP sink */
Line 2677... Line 2884...
2677
	if (WARN_ON(!intel_encoder->base.crtc))
2884
	if (WARN_ON(!intel_encoder->base.crtc))
2678
		return;
2885
		return;
Line 2679... Line 2886...
2679
 
2886
 
2680
	/* Try to read receiver status if the link appears to be up */
2887
	/* Try to read receiver status if the link appears to be up */
2681
	if (!intel_dp_get_link_status(intel_dp, link_status)) {
-
 
2682
		intel_dp_link_down(intel_dp);
2888
	if (!intel_dp_get_link_status(intel_dp, link_status)) {
2683
		return;
2889
		return;
Line 2684... Line 2890...
2684
	}
2890
	}
2685
 
2891
 
2686
	/* Now read the DPCD to see if it's actually running */
-
 
2687
	if (!intel_dp_get_dpcd(intel_dp)) {
2892
	/* Now read the DPCD to see if it's actually running */
2688
		intel_dp_link_down(intel_dp);
2893
	if (!intel_dp_get_dpcd(intel_dp)) {
Line 2689... Line 2894...
2689
		return;
2894
		return;
2690
	}
2895
	}
Line 2715... Line 2920...
2715
/* XXX this is probably wrong for multiple downstream ports */
2920
/* XXX this is probably wrong for multiple downstream ports */
2716
static enum drm_connector_status
2921
static enum drm_connector_status
2717
intel_dp_detect_dpcd(struct intel_dp *intel_dp)
2922
intel_dp_detect_dpcd(struct intel_dp *intel_dp)
2718
{
2923
{
2719
	uint8_t *dpcd = intel_dp->dpcd;
2924
	uint8_t *dpcd = intel_dp->dpcd;
2720
	bool hpd;
-
 
2721
	uint8_t type;
2925
	uint8_t type;
Line 2722... Line 2926...
2722
 
2926
 
2723
	if (!intel_dp_get_dpcd(intel_dp))
2927
	if (!intel_dp_get_dpcd(intel_dp))
Line 2724... Line 2928...
2724
		return connector_status_disconnected;
2928
		return connector_status_disconnected;
2725
 
2929
 
2726
	/* if there's no downstream port, we're done */
2930
	/* if there's no downstream port, we're done */
Line 2727... Line 2931...
2727
	if (!(dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_PRESENT))
2931
	if (!(dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_PRESENT))
-
 
2932
		return connector_status_connected;
2728
		return connector_status_connected;
2933
 
2729
 
-
 
2730
	/* If we're HPD-aware, SINK_COUNT changes dynamically */
2934
	/* If we're HPD-aware, SINK_COUNT changes dynamically */
2731
	hpd = !!(intel_dp->downstream_ports[0] & DP_DS_PORT_HPD);
2935
	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
2732
	if (hpd) {
2936
	    intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
2733
		uint8_t reg;
2937
		uint8_t reg;
2734
		if (!intel_dp_aux_native_read_retry(intel_dp, DP_SINK_COUNT,
2938
		if (!intel_dp_aux_native_read_retry(intel_dp, DP_SINK_COUNT,
Line 2741... Line 2945...
2741
	/* If no HPD, poke DDC gently */
2945
	/* If no HPD, poke DDC gently */
2742
	if (drm_probe_ddc(&intel_dp->adapter))
2946
	if (drm_probe_ddc(&intel_dp->adapter))
2743
		return connector_status_connected;
2947
		return connector_status_connected;
Line 2744... Line 2948...
2744
 
2948
 
-
 
2949
	/* Well we tried, say unknown for unreliable port types */
2745
	/* Well we tried, say unknown for unreliable port types */
2950
	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
-
 
2951
	type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
2746
	type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
2952
		if (type == DP_DS_PORT_TYPE_VGA ||
2747
	if (type == DP_DS_PORT_TYPE_VGA || type == DP_DS_PORT_TYPE_NON_EDID)
2953
		    type == DP_DS_PORT_TYPE_NON_EDID)
-
 
2954
		return connector_status_unknown;
-
 
2955
	} else {
-
 
2956
		type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
-
 
2957
			DP_DWN_STRM_PORT_TYPE_MASK;
-
 
2958
		if (type == DP_DWN_STRM_PORT_TYPE_ANALOG ||
-
 
2959
		    type == DP_DWN_STRM_PORT_TYPE_OTHER)
-
 
2960
		return connector_status_unknown;
Line 2748... Line 2961...
2748
		return connector_status_unknown;
2961
	}
2749
 
2962
 
2750
	/* Anything else is out of spec, warn and ignore */
2963
	/* Anything else is out of spec, warn and ignore */
2751
	DRM_DEBUG_KMS("Broken DP branch device, ignoring\n");
2964
	DRM_DEBUG_KMS("Broken DP branch device, ignoring\n");
Line 2790... Line 3003...
2790
		if (status == connector_status_unknown)
3003
		if (status == connector_status_unknown)
2791
			status = connector_status_connected;
3004
			status = connector_status_connected;
2792
		return status;
3005
		return status;
2793
	}
3006
	}
Line -... Line 3007...
-
 
3007
 
2794
 
3008
	if (IS_VALLEYVIEW(dev)) {
2795
	switch (intel_dig_port->port) {
3009
		switch (intel_dig_port->port) {
2796
	case PORT_B:
3010
		case PORT_B:
2797
		bit = PORTB_HOTPLUG_LIVE_STATUS;
3011
			bit = PORTB_HOTPLUG_LIVE_STATUS_VLV;
2798
		break;
3012
			break;
2799
	case PORT_C:
3013
		case PORT_C:
2800
		bit = PORTC_HOTPLUG_LIVE_STATUS;
3014
			bit = PORTC_HOTPLUG_LIVE_STATUS_VLV;
2801
		break;
3015
			break;
2802
	case PORT_D:
3016
		case PORT_D:
2803
		bit = PORTD_HOTPLUG_LIVE_STATUS;
3017
			bit = PORTD_HOTPLUG_LIVE_STATUS_VLV;
2804
		break;
3018
			break;
2805
	default:
3019
		default:
2806
		return connector_status_unknown;
3020
			return connector_status_unknown;
-
 
3021
		}
-
 
3022
	} else {
-
 
3023
	switch (intel_dig_port->port) {
-
 
3024
	case PORT_B:
-
 
3025
			bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
-
 
3026
		break;
-
 
3027
	case PORT_C:
-
 
3028
			bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
-
 
3029
		break;
-
 
3030
	case PORT_D:
-
 
3031
			bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
-
 
3032
		break;
-
 
3033
	default:
-
 
3034
		return connector_status_unknown;
-
 
3035
	}
Line 2807... Line 3036...
2807
	}
3036
	}
2808
 
3037
 
Line 2809... Line 3038...
2809
	if ((I915_READ(PORT_HOTPLUG_STAT) & bit) == 0)
3038
	if ((I915_READ(PORT_HOTPLUG_STAT) & bit) == 0)
Line 2817... Line 3046...
2817
{
3046
{
2818
	struct intel_connector *intel_connector = to_intel_connector(connector);
3047
	struct intel_connector *intel_connector = to_intel_connector(connector);
Line 2819... Line 3048...
2819
 
3048
 
2820
	/* use cached edid if we have one */
3049
	/* use cached edid if we have one */
2821
	if (intel_connector->edid) {
-
 
2822
	struct edid	*edid;
-
 
2823
	int size;
-
 
2824
 
3050
	if (intel_connector->edid) {
2825
		/* invalid edid */
3051
		/* invalid edid */
2826
		if (IS_ERR(intel_connector->edid))
3052
		if (IS_ERR(intel_connector->edid))
Line 2827... Line -...
2827
			return NULL;
-
 
2828
 
3053
			return NULL;
2829
		size = (intel_connector->edid->extensions + 1) * EDID_LENGTH;
-
 
2830
		edid = kmemdup(intel_connector->edid, size, GFP_KERNEL);
-
 
2831
		if (!edid)
-
 
2832
			return NULL;
-
 
2833
 
3054
 
Line 2834... Line 3055...
2834
		return edid;
3055
		return drm_edid_duplicate(intel_connector->edid);
2835
	}
3056
	}
Line 2860... Line 3081...
2860
{
3081
{
2861
	struct intel_dp *intel_dp = intel_attached_dp(connector);
3082
	struct intel_dp *intel_dp = intel_attached_dp(connector);
2862
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
3083
	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2863
	struct intel_encoder *intel_encoder = &intel_dig_port->base;
3084
	struct intel_encoder *intel_encoder = &intel_dig_port->base;
2864
	struct drm_device *dev = connector->dev;
3085
	struct drm_device *dev = connector->dev;
-
 
3086
	struct drm_i915_private *dev_priv = dev->dev_private;
2865
	enum drm_connector_status status;
3087
	enum drm_connector_status status;
2866
	struct edid *edid = NULL;
3088
	struct edid *edid = NULL;
Line -... Line 3089...
-
 
3089
 
-
 
3090
	intel_runtime_pm_get(dev_priv);
2867
 
3091
 
2868
	DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n",
3092
	DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n",
Line 2869... Line 3093...
2869
		      connector->base.id, drm_get_connector_name(connector));
3093
		      connector->base.id, drm_get_connector_name(connector));
Line 2874... Line 3098...
2874
		status = ironlake_dp_detect(intel_dp);
3098
		status = ironlake_dp_detect(intel_dp);
2875
	else
3099
	else
2876
		status = g4x_dp_detect(intel_dp);
3100
		status = g4x_dp_detect(intel_dp);
Line 2877... Line 3101...
2877
 
3101
 
2878
	if (status != connector_status_connected)
3102
	if (status != connector_status_connected)
Line 2879... Line 3103...
2879
		return status;
3103
		goto out;
Line 2880... Line 3104...
2880
 
3104
 
2881
	intel_dp_probe_oui(intel_dp);
3105
	intel_dp_probe_oui(intel_dp);
Line 2890... Line 3114...
2890
		}
3114
		}
2891
	}
3115
	}
Line 2892... Line 3116...
2892
 
3116
 
2893
	if (intel_encoder->type != INTEL_OUTPUT_EDP)
3117
	if (intel_encoder->type != INTEL_OUTPUT_EDP)
2894
		intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
3118
		intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
-
 
3119
	status = connector_status_connected;
-
 
3120
 
-
 
3121
out:
-
 
3122
	intel_runtime_pm_put(dev_priv);
2895
	return connector_status_connected;
3123
	return status;
Line 2896... Line 3124...
2896
}
3124
}
2897
 
3125
 
2898
static int intel_dp_get_modes(struct drm_connector *connector)
3126
static int intel_dp_get_modes(struct drm_connector *connector)
Line 3037... Line 3265...
3037
	/* Can't call is_edp() since the encoder may have been destroyed
3265
	/* Can't call is_edp() since the encoder may have been destroyed
3038
	 * already. */
3266
	 * already. */
3039
	if (connector->connector_type == DRM_MODE_CONNECTOR_eDP)
3267
	if (connector->connector_type == DRM_MODE_CONNECTOR_eDP)
3040
		intel_panel_fini(&intel_connector->panel);
3268
		intel_panel_fini(&intel_connector->panel);
Line 3041... Line -...
3041
 
-
 
3042
	drm_sysfs_connector_remove(connector);
3269
 
3043
	drm_connector_cleanup(connector);
3270
	drm_connector_cleanup(connector);
3044
	kfree(connector);
3271
	kfree(connector);
Line 3045... Line 3272...
3045
}
3272
}
Line 3105... Line 3332...
3105
 
3332
 
3106
	return -1;
3333
	return -1;
Line 3107... Line 3334...
3107
}
3334
}
3108
 
3335
 
3109
/* check the VBT to see whether the eDP is on DP-D port */
3336
/* check the VBT to see whether the eDP is on DP-D port */
3110
bool intel_dpd_is_edp(struct drm_device *dev)
3337
bool intel_dp_is_edp(struct drm_device *dev, enum port port)
3111
{
3338
{
3112
	struct drm_i915_private *dev_priv = dev->dev_private;
3339
	struct drm_i915_private *dev_priv = dev->dev_private;
-
 
3340
	union child_device_config *p_child;
-
 
3341
	int i;
-
 
3342
	static const short port_mapping[] = {
-
 
3343
		[PORT_B] = PORT_IDPB,
-
 
3344
		[PORT_C] = PORT_IDPC,
-
 
3345
		[PORT_D] = PORT_IDPD,
-
 
3346
	};
-
 
3347
 
Line 3113... Line 3348...
3113
	struct child_device_config *p_child;
3348
	if (port == PORT_A)
3114
	int i;
3349
		return true;
Line 3115... Line 3350...
3115
 
3350
 
3116
	if (!dev_priv->vbt.child_dev_num)
3351
	if (!dev_priv->vbt.child_dev_num)
Line 3117... Line 3352...
3117
		return false;
3352
		return false;
3118
 
3353
 
-
 
3354
	for (i = 0; i < dev_priv->vbt.child_dev_num; i++) {
3119
	for (i = 0; i < dev_priv->vbt.child_dev_num; i++) {
3355
		p_child = dev_priv->vbt.child_dev + i;
3120
		p_child = dev_priv->vbt.child_dev + i;
3356
 
3121
 
3357
		if (p_child->common.dvo_port == port_mapping[port] &&
3122
		if (p_child->dvo_port == PORT_IDPD &&
3358
		    (p_child->common.device_type & DEVICE_TYPE_eDP_BITS) ==
Line 3151... Line 3387...
3151
				    struct edp_power_seq *out)
3387
				    struct edp_power_seq *out)
3152
{
3388
{
3153
	struct drm_i915_private *dev_priv = dev->dev_private;
3389
	struct drm_i915_private *dev_priv = dev->dev_private;
3154
	struct edp_power_seq cur, vbt, spec, final;
3390
	struct edp_power_seq cur, vbt, spec, final;
3155
	u32 pp_on, pp_off, pp_div, pp;
3391
	u32 pp_on, pp_off, pp_div, pp;
3156
	int pp_control_reg, pp_on_reg, pp_off_reg, pp_div_reg;
3392
	int pp_ctrl_reg, pp_on_reg, pp_off_reg, pp_div_reg;
Line 3157... Line 3393...
3157
 
3393
 
3158
	if (HAS_PCH_SPLIT(dev)) {
3394
	if (HAS_PCH_SPLIT(dev)) {
3159
		pp_control_reg = PCH_PP_CONTROL;
3395
		pp_ctrl_reg = PCH_PP_CONTROL;
3160
		pp_on_reg = PCH_PP_ON_DELAYS;
3396
		pp_on_reg = PCH_PP_ON_DELAYS;
3161
		pp_off_reg = PCH_PP_OFF_DELAYS;
3397
		pp_off_reg = PCH_PP_OFF_DELAYS;
3162
		pp_div_reg = PCH_PP_DIVISOR;
3398
		pp_div_reg = PCH_PP_DIVISOR;
-
 
3399
	} else {
-
 
3400
		enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
3163
	} else {
3401
 
3164
		pp_control_reg = PIPEA_PP_CONTROL;
3402
		pp_ctrl_reg = VLV_PIPE_PP_CONTROL(pipe);
3165
		pp_on_reg = PIPEA_PP_ON_DELAYS;
3403
		pp_on_reg = VLV_PIPE_PP_ON_DELAYS(pipe);
3166
		pp_off_reg = PIPEA_PP_OFF_DELAYS;
3404
		pp_off_reg = VLV_PIPE_PP_OFF_DELAYS(pipe);
3167
		pp_div_reg = PIPEA_PP_DIVISOR;
3405
		pp_div_reg = VLV_PIPE_PP_DIVISOR(pipe);
Line 3168... Line 3406...
3168
	}
3406
	}
3169
 
3407
 
3170
	/* Workaround: Need to write PP_CONTROL with the unlock key as
3408
	/* Workaround: Need to write PP_CONTROL with the unlock key as
3171
	 * the very first thing. */
3409
	 * the very first thing. */
Line 3172... Line 3410...
3172
	pp = ironlake_get_pp_control(intel_dp);
3410
	pp = ironlake_get_pp_control(intel_dp);
3173
	I915_WRITE(pp_control_reg, pp);
3411
	I915_WRITE(pp_ctrl_reg, pp);
3174
 
3412
 
Line 3256... Line 3494...
3256
	if (HAS_PCH_SPLIT(dev)) {
3494
	if (HAS_PCH_SPLIT(dev)) {
3257
		pp_on_reg = PCH_PP_ON_DELAYS;
3495
		pp_on_reg = PCH_PP_ON_DELAYS;
3258
		pp_off_reg = PCH_PP_OFF_DELAYS;
3496
		pp_off_reg = PCH_PP_OFF_DELAYS;
3259
		pp_div_reg = PCH_PP_DIVISOR;
3497
		pp_div_reg = PCH_PP_DIVISOR;
3260
	} else {
3498
	} else {
-
 
3499
		enum pipe pipe = vlv_power_sequencer_pipe(intel_dp);
-
 
3500
 
3261
		pp_on_reg = PIPEA_PP_ON_DELAYS;
3501
		pp_on_reg = VLV_PIPE_PP_ON_DELAYS(pipe);
3262
		pp_off_reg = PIPEA_PP_OFF_DELAYS;
3502
		pp_off_reg = VLV_PIPE_PP_OFF_DELAYS(pipe);
3263
		pp_div_reg = PIPEA_PP_DIVISOR;
3503
		pp_div_reg = VLV_PIPE_PP_DIVISOR(pipe);
3264
	}
3504
	}
Line 3265... Line 3505...
3265
 
3505
 
3266
	/* And finally store the new values in the power sequencer. */
3506
	/* And finally store the new values in the power sequencer. */
3267
	pp_on = (seq->t1_t3 << PANEL_POWER_UP_DELAY_SHIFT) |
3507
	pp_on = (seq->t1_t3 << PANEL_POWER_UP_DELAY_SHIFT) |
Line 3275... Line 3515...
3275
			<< PANEL_POWER_CYCLE_DELAY_SHIFT);
3515
			<< PANEL_POWER_CYCLE_DELAY_SHIFT);
Line 3276... Line 3516...
3276
 
3516
 
3277
	/* Haswell doesn't have any port selection bits for the panel
3517
	/* Haswell doesn't have any port selection bits for the panel
3278
	 * power sequencer any more. */
3518
	 * power sequencer any more. */
-
 
3519
	if (IS_VALLEYVIEW(dev)) {
-
 
3520
		if (dp_to_dig_port(intel_dp)->port == PORT_B)
-
 
3521
			port_sel = PANEL_PORT_SELECT_DPB_VLV;
3279
	if (IS_VALLEYVIEW(dev)) {
3522
		else
3280
		port_sel = I915_READ(pp_on_reg) & 0xc0000000;
3523
			port_sel = PANEL_PORT_SELECT_DPC_VLV;
3281
	} else if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) {
3524
	} else if (HAS_PCH_IBX(dev) || HAS_PCH_CPT(dev)) {
3282
		if (dp_to_dig_port(intel_dp)->port == PORT_A)
3525
		if (dp_to_dig_port(intel_dp)->port == PORT_A)
3283
			port_sel = PANEL_POWER_PORT_DP_A;
3526
			port_sel = PANEL_PORT_SELECT_DPA;
3284
		else
3527
		else
3285
			port_sel = PANEL_POWER_PORT_DP_D;
3528
			port_sel = PANEL_PORT_SELECT_DPD;
Line 3286... Line 3529...
3286
	}
3529
	}
Line 3287... Line 3530...
3287
 
3530
 
Line 3333... Line 3576...
3333
 
3576
 
3334
	/* We now know it's not a ghost, init power sequence regs. */
3577
	/* We now know it's not a ghost, init power sequence regs. */
3335
	intel_dp_init_panel_power_sequencer_registers(dev, intel_dp,
3578
	intel_dp_init_panel_power_sequencer_registers(dev, intel_dp,
Line 3336... Line -...
3336
						      &power_seq);
-
 
3337
 
3579
						      &power_seq);
3338
	ironlake_edp_panel_vdd_on(intel_dp);
3580
 
3339
	edid = drm_get_edid(connector, &intel_dp->adapter);
3581
	edid = drm_get_edid(connector, &intel_dp->adapter);
3340
	if (edid) {
3582
	if (edid) {
3341
		if (drm_add_edid_modes(connector, edid)) {
3583
		if (drm_add_edid_modes(connector, edid)) {
Line 3365... Line 3607...
3365
					dev_priv->vbt.lfp_lvds_vbt_mode);
3607
					dev_priv->vbt.lfp_lvds_vbt_mode);
3366
		if (fixed_mode)
3608
		if (fixed_mode)
3367
			fixed_mode->type |= DRM_MODE_TYPE_PREFERRED;
3609
			fixed_mode->type |= DRM_MODE_TYPE_PREFERRED;
3368
	}
3610
	}
Line 3369... Line -...
3369
 
-
 
3370
	ironlake_edp_panel_vdd_off(intel_dp, false);
-
 
3371
 
3611
 
3372
	intel_panel_init(&intel_connector->panel, fixed_mode);
3612
	intel_panel_init(&intel_connector->panel, fixed_mode);
Line 3373... Line 3613...
3373
	intel_panel_setup_backlight(connector);
3613
	intel_panel_setup_backlight(connector);
3374
 
3614
 
Line 3390... Line 3630...
3390
 
3630
 
3391
	/* Preserve the current hw state. */
3631
	/* Preserve the current hw state. */
3392
	intel_dp->DP = I915_READ(intel_dp->output_reg);
3632
	intel_dp->DP = I915_READ(intel_dp->output_reg);
Line 3393... Line 3633...
3393
	intel_dp->attached_connector = intel_connector;
3633
	intel_dp->attached_connector = intel_connector;
3394
 
-
 
3395
	type = DRM_MODE_CONNECTOR_DisplayPort;
-
 
3396
	/*
-
 
3397
	 * FIXME : We need to initialize built-in panels before external panels.
-
 
3398
	 * For X0, DP_C is fixed as eDP. Revisit this as part of VLV eDP cleanup
-
 
3399
	 */
-
 
3400
	switch (port) {
3634
 
3401
	case PORT_A:
3635
	if (intel_dp_is_edp(dev, port))
3402
		type = DRM_MODE_CONNECTOR_eDP;
-
 
3403
		break;
-
 
3404
	case PORT_C:
3636
		type = DRM_MODE_CONNECTOR_eDP;
3405
		if (IS_VALLEYVIEW(dev))
-
 
3406
		type = DRM_MODE_CONNECTOR_eDP;
-
 
3407
		break;
-
 
3408
	case PORT_D:
-
 
3409
		if (HAS_PCH_SPLIT(dev) && intel_dpd_is_edp(dev))
-
 
3410
			type = DRM_MODE_CONNECTOR_eDP;
-
 
3411
		break;
-
 
3412
	default:	/* silence GCC warning */
-
 
Line 3413... Line 3637...
3413
		break;
3637
	else
3414
	}
3638
	type = DRM_MODE_CONNECTOR_DisplayPort;
3415
 
3639
 
3416
	/*
3640
	/*
Line 3523... Line 3747...
3523
	struct intel_digital_port *intel_dig_port;
3747
	struct intel_digital_port *intel_dig_port;
3524
	struct intel_encoder *intel_encoder;
3748
	struct intel_encoder *intel_encoder;
3525
	struct drm_encoder *encoder;
3749
	struct drm_encoder *encoder;
3526
	struct intel_connector *intel_connector;
3750
	struct intel_connector *intel_connector;
Line 3527... Line 3751...
3527
 
3751
 
3528
	intel_dig_port = kzalloc(sizeof(struct intel_digital_port), GFP_KERNEL);
3752
	intel_dig_port = kzalloc(sizeof(*intel_dig_port), GFP_KERNEL);
3529
	if (!intel_dig_port)
3753
	if (!intel_dig_port)
Line 3530... Line 3754...
3530
		return;
3754
		return;
3531
 
3755
 
3532
	intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
3756
	intel_connector = kzalloc(sizeof(*intel_connector), GFP_KERNEL);
3533
	if (!intel_connector) {
3757
	if (!intel_connector) {
3534
		kfree(intel_dig_port);
3758
		kfree(intel_dig_port);
Line 3546... Line 3770...
3546
	intel_encoder->disable = intel_disable_dp;
3770
	intel_encoder->disable = intel_disable_dp;
3547
	intel_encoder->post_disable = intel_post_disable_dp;
3771
	intel_encoder->post_disable = intel_post_disable_dp;
3548
	intel_encoder->get_hw_state = intel_dp_get_hw_state;
3772
	intel_encoder->get_hw_state = intel_dp_get_hw_state;
3549
	intel_encoder->get_config = intel_dp_get_config;
3773
	intel_encoder->get_config = intel_dp_get_config;
3550
	if (IS_VALLEYVIEW(dev)) {
3774
	if (IS_VALLEYVIEW(dev)) {
3551
		intel_encoder->pre_pll_enable = intel_dp_pre_pll_enable;
3775
		intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
3552
		intel_encoder->pre_enable = vlv_pre_enable_dp;
3776
		intel_encoder->pre_enable = vlv_pre_enable_dp;
3553
		intel_encoder->enable = vlv_enable_dp;
3777
		intel_encoder->enable = vlv_enable_dp;
3554
	} else {
3778
	} else {
3555
		intel_encoder->pre_enable = intel_pre_enable_dp;
3779
		intel_encoder->pre_enable = g4x_pre_enable_dp;
3556
		intel_encoder->enable = intel_enable_dp;
3780
		intel_encoder->enable = g4x_enable_dp;
3557
	}
3781
	}
Line 3558... Line 3782...
3558
 
3782
 
3559
	intel_dig_port->port = port;
3783
	intel_dig_port->port = port;