Subversion Repositories Kolibri OS

Rev

Rev 1430 | Rev 2004 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1123 serge 1
/*
2
 * Copyright (c) 2006 Luc Verhaegen (quirks list)
3
 * Copyright (c) 2007-2008 Intel Corporation
4
 *   Jesse Barnes 
1963 serge 5
 * Copyright 2010 Red Hat, Inc.
1123 serge 6
 *
7
 * DDC probing routines (drm_ddc_read & drm_do_probe_ddc_edid) originally from
8
 * FB layer.
9
 *   Copyright (C) 2006 Dennis Munsie 
10
 *
11
 * Permission is hereby granted, free of charge, to any person obtaining a
12
 * copy of this software and associated documentation files (the "Software"),
13
 * to deal in the Software without restriction, including without limitation
14
 * the rights to use, copy, modify, merge, publish, distribute, sub license,
15
 * and/or sell copies of the Software, and to permit persons to whom the
16
 * Software is furnished to do so, subject to the following conditions:
17
 *
18
 * The above copyright notice and this permission notice (including the
19
 * next paragraph) shall be included in all copies or substantial portions
20
 * of the Software.
21
 *
22
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
25
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
27
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
28
 * DEALINGS IN THE SOFTWARE.
29
 */
1221 serge 30
#include 
1963 serge 31
#include 
1125 serge 32
#include 
1123 serge 33
#include "drmP.h"
34
#include "drm_edid.h"
1963 serge 35
#include "drm_edid_modes.h"
1123 serge 36
 
1963 serge 37
#define version_greater(edid, maj, min) \
38
	(((edid)->version > (maj)) || \
39
	 ((edid)->version == (maj) && (edid)->revision > (min)))
1123 serge 40
 
1963 serge 41
#define EDID_EST_TIMINGS 16
42
#define EDID_STD_TIMINGS 8
43
#define EDID_DETAILED_TIMINGS 4
44
 
1123 serge 45
/*
46
 * EDID blocks out in the wild have a variety of bugs, try to collect
47
 * them here (note that userspace may work around broken monitors first,
48
 * but fixes should make their way here so that the kernel "just works"
49
 * on as many displays as possible).
50
 */
51
 
52
/* First detailed mode wrong, use largest 60Hz mode */
53
#define EDID_QUIRK_PREFER_LARGE_60		(1 << 0)
54
/* Reported 135MHz pixel clock is too high, needs adjustment */
55
#define EDID_QUIRK_135_CLOCK_TOO_HIGH		(1 << 1)
56
/* Prefer the largest mode at 75 Hz */
57
#define EDID_QUIRK_PREFER_LARGE_75		(1 << 2)
58
/* Detail timing is in cm not mm */
59
#define EDID_QUIRK_DETAILED_IN_CM		(1 << 3)
60
/* Detailed timing descriptors have bogus size values, so just take the
61
 * maximum size and use that.
62
 */
63
#define EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE	(1 << 4)
64
/* Monitor forgot to set the first detailed is preferred bit. */
65
#define EDID_QUIRK_FIRST_DETAILED_PREFERRED	(1 << 5)
66
/* use +hsync +vsync for detailed mode */
67
#define EDID_QUIRK_DETAILED_SYNC_PP		(1 << 6)
68
 
1963 serge 69
struct detailed_mode_closure {
70
	struct drm_connector *connector;
71
	struct edid *edid;
72
	bool preferred;
73
	u32 quirks;
74
	int modes;
75
};
1430 serge 76
 
1179 serge 77
#define LEVEL_DMT	0
78
#define LEVEL_GTF	1
1963 serge 79
#define LEVEL_GTF2	2
80
#define LEVEL_CVT	3
1179 serge 81
 
1123 serge 82
static struct edid_quirk {
83
	char *vendor;
84
	int product_id;
85
	u32 quirks;
86
} edid_quirk_list[] = {
87
	/* Acer AL1706 */
88
	{ "ACR", 44358, EDID_QUIRK_PREFER_LARGE_60 },
89
	/* Acer F51 */
90
	{ "API", 0x7602, EDID_QUIRK_PREFER_LARGE_60 },
91
	/* Unknown Acer */
92
	{ "ACR", 2423, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
93
 
94
	/* Belinea 10 15 55 */
95
	{ "MAX", 1516, EDID_QUIRK_PREFER_LARGE_60 },
96
	{ "MAX", 0x77e, EDID_QUIRK_PREFER_LARGE_60 },
97
 
98
	/* Envision Peripherals, Inc. EN-7100e */
99
	{ "EPI", 59264, EDID_QUIRK_135_CLOCK_TOO_HIGH },
1963 serge 100
	/* Envision EN2028 */
101
	{ "EPI", 8232, EDID_QUIRK_PREFER_LARGE_60 },
1123 serge 102
 
103
	/* Funai Electronics PM36B */
104
	{ "FCM", 13600, EDID_QUIRK_PREFER_LARGE_75 |
105
	  EDID_QUIRK_DETAILED_IN_CM },
106
 
107
	/* LG Philips LCD LP154W01-A5 */
108
	{ "LPL", 0, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
109
	{ "LPL", 0x2a00, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
110
 
111
	/* Philips 107p5 CRT */
112
	{ "PHL", 57364, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
113
 
114
	/* Proview AY765C */
115
	{ "PTS", 765, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
116
 
117
	/* Samsung SyncMaster 205BW.  Note: irony */
118
	{ "SAM", 541, EDID_QUIRK_DETAILED_SYNC_PP },
119
	/* Samsung SyncMaster 22[5-6]BW */
120
	{ "SAM", 596, EDID_QUIRK_PREFER_LARGE_60 },
121
	{ "SAM", 638, EDID_QUIRK_PREFER_LARGE_60 },
122
};
123
 
1963 serge 124
/*** DDC fetch and block validation ***/
1123 serge 125
 
1221 serge 126
static const u8 edid_header[] = {
127
	0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00
128
};
1123 serge 129
 
1963 serge 130
/*
131
 * Sanity check the EDID block (base or extension).  Return 0 if the block
132
 * doesn't check out, or 1 if it's valid.
1123 serge 133
 */
1963 serge 134
static bool
135
drm_edid_block_valid(u8 *raw_edid)
1123 serge 136
{
1963 serge 137
	int i;
1123 serge 138
	u8 csum = 0;
1963 serge 139
	struct edid *edid = (struct edid *)raw_edid;
1123 serge 140
 
1963 serge 141
	if (raw_edid[0] == 0x00) {
142
		int score = 0;
143
 
1321 serge 144
	for (i = 0; i < sizeof(edid_header); i++)
145
		if (raw_edid[i] == edid_header[i])
146
			score++;
147
 
148
	if (score == 8) ;
149
	else if (score >= 6) {
150
		DRM_DEBUG("Fixing EDID header, your hardware may be failing\n");
151
		memcpy(raw_edid, edid_header, sizeof(edid_header));
1963 serge 152
		} else {
1123 serge 153
		goto bad;
1963 serge 154
		}
155
	}
1123 serge 156
 
157
	for (i = 0; i < EDID_LENGTH; i++)
158
		csum += raw_edid[i];
159
	if (csum) {
160
		DRM_ERROR("EDID checksum is invalid, remainder is %d\n", csum);
1963 serge 161
 
162
		/* allow CEA to slide through, switches mangle this */
163
		if (raw_edid[0] != 0x02)
1123 serge 164
		goto bad;
165
	}
166
 
1963 serge 167
	/* per-block-type checks */
168
	switch (raw_edid[0]) {
169
	case 0: /* base */
1321 serge 170
	if (edid->version != 1) {
171
		DRM_ERROR("EDID has major version %d, instead of 1\n", edid->version);
172
		goto bad;
173
	}
174
 
175
	if (edid->revision > 4)
176
		DRM_DEBUG("EDID minor > 4, assuming backward compatibility\n");
1963 serge 177
		break;
1321 serge 178
 
1963 serge 179
	default:
180
		break;
181
	}
182
 
1123 serge 183
	return 1;
184
 
185
bad:
186
	if (raw_edid) {
187
		DRM_ERROR("Raw EDID:\n");
188
//       print_hex_dump_bytes(KERN_ERR, DUMP_PREFIX_NONE, raw_edid, EDID_LENGTH);
189
//       printk("\n");
190
	}
191
	return 0;
192
}
1963 serge 193
 
194
/**
195
 * drm_edid_is_valid - sanity check EDID data
196
 * @edid: EDID data
197
 *
198
 * Sanity-check an entire EDID record (including extensions)
199
 */
200
bool drm_edid_is_valid(struct edid *edid)
201
{
202
	int i;
203
	u8 *raw = (u8 *)edid;
204
 
205
	if (!edid)
206
		return false;
207
 
208
	for (i = 0; i <= edid->extensions; i++)
209
		if (!drm_edid_block_valid(raw + i * EDID_LENGTH))
210
			return false;
211
 
212
	return true;
213
}
1430 serge 214
EXPORT_SYMBOL(drm_edid_is_valid);
1123 serge 215
 
1963 serge 216
#define DDC_ADDR 0x50
217
#define DDC_SEGMENT_ADDR 0x30
1123 serge 218
/**
1963 serge 219
 * Get EDID information via I2C.
220
 *
221
 * \param adapter : i2c device adaptor
222
 * \param buf     : EDID data buffer to be filled
223
 * \param len     : EDID data buffer length
224
 * \return 0 on success or -1 on failure.
225
 *
226
 * Try to fetch EDID information by calling i2c driver function.
227
 */
228
static int
229
drm_do_probe_ddc_edid(struct i2c_adapter *adapter, unsigned char *buf,
230
		      int block, int len)
231
{
232
	unsigned char start = block * EDID_LENGTH;
233
	int ret, retries = 5;
234
 
235
	/* The core i2c driver will automatically retry the transfer if the
236
	 * adapter reports EAGAIN. However, we find that bit-banging transfers
237
	 * are susceptible to errors under a heavily loaded machine and
238
	 * generate spurious NAKs and timeouts. Retrying the transfer
239
	 * of the individual block a few times seems to overcome this.
240
	 */
241
	do {
242
	struct i2c_msg msgs[] = {
243
		{
244
			.addr	= DDC_ADDR,
245
			.flags	= 0,
246
			.len	= 1,
247
			.buf	= &start,
248
		}, {
249
			.addr	= DDC_ADDR,
250
			.flags	= I2C_M_RD,
251
			.len	= len,
252
			.buf	= buf,
253
		}
254
	};
255
		ret = i2c_transfer(adapter, msgs, 2);
256
	} while (ret != 2 && --retries);
257
 
258
	return ret == 2 ? 0 : -1;
259
}
260
 
261
static u8 *
262
drm_do_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
263
{
264
	int i, j = 0, valid_extensions = 0;
265
	u8 *block, *new;
266
    size_t alloc_size;
267
 
268
	if ((block = kmalloc(EDID_LENGTH, GFP_KERNEL)) == NULL)
269
		return NULL;
270
 
271
	/* base block fetch */
272
	for (i = 0; i < 4; i++) {
273
		if (drm_do_probe_ddc_edid(adapter, block, 0, EDID_LENGTH))
274
			goto out;
275
		if (drm_edid_block_valid(block))
276
			break;
277
	}
278
	if (i == 4)
279
		goto carp;
280
 
281
	/* if there's no extensions, we're done */
282
	if (block[0x7e] == 0)
283
		return block;
284
 
285
    alloc_size = (block[0x7e] + 1) * EDID_LENGTH ;
286
 
287
    new = kmalloc(alloc_size, GFP_KERNEL);
288
 
289
	if (!new)
290
		goto out;
291
 
292
    memcpy(new, block, EDID_LENGTH);
293
    kfree(block);
294
 
295
	block = new;
296
 
297
	for (j = 1; j <= block[0x7e]; j++) {
298
		for (i = 0; i < 4; i++) {
299
			if (drm_do_probe_ddc_edid(adapter,
300
				  block + (valid_extensions + 1) * EDID_LENGTH,
301
				  j, EDID_LENGTH))
302
				goto out;
303
			if (drm_edid_block_valid(block + (valid_extensions + 1) * EDID_LENGTH)) {
304
				valid_extensions++;
305
				break;
306
		}
307
		}
308
		if (i == 4)
309
			dev_warn(connector->dev->dev,
310
			 "%s: Ignoring invalid EDID block %d.\n",
311
			 drm_get_connector_name(connector), j);
312
	}
313
 
314
	if (valid_extensions != block[0x7e]) {
315
		block[EDID_LENGTH-1] += block[0x7e] - valid_extensions;
316
		block[0x7e] = valid_extensions;
317
        new = kmalloc((valid_extensions + 1) * EDID_LENGTH, GFP_KERNEL);
318
        if (!new)
319
			goto out;
320
        memcpy(new, block, alloc_size);
321
        kfree(block);
322
		block = new;
323
	}
324
 
325
	return block;
326
 
327
carp:
328
	dev_warn(connector->dev->dev, "%s: EDID block %d invalid.\n",
329
		 drm_get_connector_name(connector), j);
330
 
331
out:
332
	kfree(block);
333
	return NULL;
334
}
335
 
336
/**
337
 * Probe DDC presence.
338
 *
339
 * \param adapter : i2c device adaptor
340
 * \return 1 on success
341
 */
342
static bool
343
drm_probe_ddc(struct i2c_adapter *adapter)
344
{
345
	unsigned char out;
346
 
347
	return (drm_do_probe_ddc_edid(adapter, &out, 0, 1) == 0);
348
}
349
 
350
/**
351
 * drm_get_edid - get EDID data, if available
352
 * @connector: connector we're probing
353
 * @adapter: i2c adapter to use for DDC
354
 *
355
 * Poke the given i2c channel to grab EDID data if possible.  If found,
356
 * attach it to the connector.
357
 *
358
 * Return edid data or NULL if we couldn't find any.
359
 */
360
struct edid *drm_get_edid(struct drm_connector *connector,
361
			  struct i2c_adapter *adapter)
362
{
363
	struct edid *edid = NULL;
364
 
365
	if (drm_probe_ddc(adapter))
366
		edid = (struct edid *)drm_do_get_edid(connector, adapter);
367
 
368
	connector->display_info.raw_edid = (char *)edid;
369
 
370
	return edid;
371
 
372
}
373
EXPORT_SYMBOL(drm_get_edid);
374
 
375
/*** EDID parsing ***/
376
 
377
/**
1123 serge 378
 * edid_vendor - match a string against EDID's obfuscated vendor field
379
 * @edid: EDID to match
380
 * @vendor: vendor string
381
 *
382
 * Returns true if @vendor is in @edid, false otherwise
383
 */
384
static bool edid_vendor(struct edid *edid, char *vendor)
385
{
386
	char edid_vendor[3];
387
 
388
	edid_vendor[0] = ((edid->mfg_id[0] & 0x7c) >> 2) + '@';
389
	edid_vendor[1] = (((edid->mfg_id[0] & 0x3) << 3) |
390
			  ((edid->mfg_id[1] & 0xe0) >> 5)) + '@';
391
	edid_vendor[2] = (edid->mfg_id[1] & 0x1f) + '@';
392
 
393
	return !strncmp(edid_vendor, vendor, 3);
394
}
395
 
396
/**
397
 * edid_get_quirks - return quirk flags for a given EDID
398
 * @edid: EDID to process
399
 *
400
 * This tells subsequent routines what fixes they need to apply.
401
 */
402
static u32 edid_get_quirks(struct edid *edid)
403
{
404
	struct edid_quirk *quirk;
405
	int i;
406
 
407
	for (i = 0; i < ARRAY_SIZE(edid_quirk_list); i++) {
408
		quirk = &edid_quirk_list[i];
409
 
410
		if (edid_vendor(edid, quirk->vendor) &&
411
		    (EDID_PRODUCT_ID(edid) == quirk->product_id))
412
			return quirk->quirks;
413
	}
414
 
415
	return 0;
416
}
417
 
418
#define MODE_SIZE(m) ((m)->hdisplay * (m)->vdisplay)
419
#define MODE_REFRESH_DIFF(m,r) (abs((m)->vrefresh - target_refresh))
420
 
421
/**
422
 * edid_fixup_preferred - set preferred modes based on quirk list
423
 * @connector: has mode list to fix up
424
 * @quirks: quirks list
425
 *
426
 * Walk the mode list for @connector, clearing the preferred status
427
 * on existing modes and setting it anew for the right mode ala @quirks.
428
 */
429
static void edid_fixup_preferred(struct drm_connector *connector,
430
				 u32 quirks)
431
{
432
	struct drm_display_mode *t, *cur_mode, *preferred_mode;
433
	int target_refresh = 0;
434
 
435
	if (list_empty(&connector->probed_modes))
436
		return;
437
 
438
	if (quirks & EDID_QUIRK_PREFER_LARGE_60)
439
		target_refresh = 60;
440
	if (quirks & EDID_QUIRK_PREFER_LARGE_75)
441
		target_refresh = 75;
442
 
443
	preferred_mode = list_first_entry(&connector->probed_modes,
444
					  struct drm_display_mode, head);
445
 
446
	list_for_each_entry_safe(cur_mode, t, &connector->probed_modes, head) {
447
		cur_mode->type &= ~DRM_MODE_TYPE_PREFERRED;
448
 
449
		if (cur_mode == preferred_mode)
450
			continue;
451
 
452
		/* Largest mode is preferred */
453
		if (MODE_SIZE(cur_mode) > MODE_SIZE(preferred_mode))
454
			preferred_mode = cur_mode;
455
 
456
		/* At a given size, try to get closest to target refresh */
457
		if ((MODE_SIZE(cur_mode) == MODE_SIZE(preferred_mode)) &&
458
		    MODE_REFRESH_DIFF(cur_mode, target_refresh) <
459
		    MODE_REFRESH_DIFF(preferred_mode, target_refresh)) {
460
			preferred_mode = cur_mode;
461
		}
462
	}
463
 
464
	preferred_mode->type |= DRM_MODE_TYPE_PREFERRED;
465
}
466
 
1963 serge 467
struct drm_display_mode *drm_mode_find_dmt(struct drm_device *dev,
1179 serge 468
			int hsize, int vsize, int fresh)
469
{
1963 serge 470
	struct drm_display_mode *mode = NULL;
1321 serge 471
	int i;
1179 serge 472
 
1321 serge 473
	for (i = 0; i < drm_num_dmt_modes; i++) {
1963 serge 474
		const struct drm_display_mode *ptr = &drm_dmt_modes[i];
1179 serge 475
		if (hsize == ptr->hdisplay &&
476
			vsize == ptr->vdisplay &&
477
			fresh == drm_mode_vrefresh(ptr)) {
478
			/* get the expected default mode */
479
			mode = drm_mode_duplicate(dev, ptr);
480
			break;
481
		}
482
	}
483
	return mode;
484
}
1963 serge 485
EXPORT_SYMBOL(drm_mode_find_dmt);
1221 serge 486
 
1963 serge 487
typedef void detailed_cb(struct detailed_timing *timing, void *closure);
488
 
489
static void
490
cea_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
491
{
492
	int i, n = 0;
493
	u8 rev = ext[0x01], d = ext[0x02];
494
	u8 *det_base = ext + d;
495
 
496
	switch (rev) {
497
	case 0:
498
		/* can't happen */
499
		return;
500
	case 1:
501
		/* have to infer how many blocks we have, check pixel clock */
502
		for (i = 0; i < 6; i++)
503
			if (det_base[18*i] || det_base[18*i+1])
504
				n++;
505
		break;
506
	default:
507
		/* explicit count */
508
		n = min(ext[0x03] & 0x0f, 6);
509
		break;
510
	}
511
 
512
	for (i = 0; i < n; i++)
513
		cb((struct detailed_timing *)(det_base + 18 * i), closure);
514
}
515
 
516
static void
517
vtb_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
518
{
519
	unsigned int i, n = min((int)ext[0x02], 6);
520
	u8 *det_base = ext + 5;
521
 
522
	if (ext[0x01] != 1)
523
		return; /* unknown version */
524
 
525
	for (i = 0; i < n; i++)
526
		cb((struct detailed_timing *)(det_base + 18 * i), closure);
527
}
528
 
529
static void
530
drm_for_each_detailed_block(u8 *raw_edid, detailed_cb *cb, void *closure)
531
{
532
	int i;
533
	struct edid *edid = (struct edid *)raw_edid;
534
 
535
	if (edid == NULL)
536
		return;
537
 
538
	for (i = 0; i < EDID_DETAILED_TIMINGS; i++)
539
		cb(&(edid->detailed_timings[i]), closure);
540
 
541
	for (i = 1; i <= raw_edid[0x7e]; i++) {
542
		u8 *ext = raw_edid + (i * EDID_LENGTH);
543
		switch (*ext) {
544
		case CEA_EXT:
545
			cea_for_each_detailed_block(ext, cb, closure);
546
			break;
547
		case VTB_EXT:
548
			vtb_for_each_detailed_block(ext, cb, closure);
549
			break;
550
		default:
551
			break;
552
		}
553
	}
554
}
555
 
556
static void
557
is_rb(struct detailed_timing *t, void *data)
558
{
559
	u8 *r = (u8 *)t;
560
	if (r[3] == EDID_DETAIL_MONITOR_RANGE)
561
		if (r[15] & 0x10)
562
			*(bool *)data = true;
563
}
564
 
565
/* EDID 1.4 defines this explicitly.  For EDID 1.3, we guess, badly. */
566
static bool
567
drm_monitor_supports_rb(struct edid *edid)
568
{
569
	if (edid->revision >= 4) {
570
		bool ret;
571
		drm_for_each_detailed_block((u8 *)edid, is_rb, &ret);
572
		return ret;
573
	}
574
 
575
	return ((edid->input & DRM_EDID_INPUT_DIGITAL) != 0);
576
}
577
 
578
static void
579
find_gtf2(struct detailed_timing *t, void *data)
580
{
581
	u8 *r = (u8 *)t;
582
	if (r[3] == EDID_DETAIL_MONITOR_RANGE && r[10] == 0x02)
583
		*(u8 **)data = r;
584
}
585
 
586
/* Secondary GTF curve kicks in above some break frequency */
587
static int
588
drm_gtf2_hbreak(struct edid *edid)
589
{
590
	u8 *r = NULL;
591
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
592
	return r ? (r[12] * 2) : 0;
593
}
594
 
595
static int
596
drm_gtf2_2c(struct edid *edid)
597
{
598
	u8 *r = NULL;
599
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
600
	return r ? r[13] : 0;
601
}
602
 
603
static int
604
drm_gtf2_m(struct edid *edid)
605
{
606
	u8 *r = NULL;
607
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
608
	return r ? (r[15] << 8) + r[14] : 0;
609
}
610
 
611
static int
612
drm_gtf2_k(struct edid *edid)
613
{
614
	u8 *r = NULL;
615
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
616
	return r ? r[16] : 0;
617
}
618
 
619
static int
620
drm_gtf2_2j(struct edid *edid)
621
{
622
	u8 *r = NULL;
623
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
624
	return r ? r[17] : 0;
625
}
626
 
627
/**
628
 * standard_timing_level - get std. timing level(CVT/GTF/DMT)
629
 * @edid: EDID block to scan
630
 */
631
static int standard_timing_level(struct edid *edid)
632
{
633
	if (edid->revision >= 2) {
634
		if (edid->revision >= 4 && (edid->features & DRM_EDID_FEATURE_DEFAULT_GTF))
635
			return LEVEL_CVT;
636
		if (drm_gtf2_hbreak(edid))
637
			return LEVEL_GTF2;
638
		return LEVEL_GTF;
639
	}
640
	return LEVEL_DMT;
641
}
642
 
1221 serge 643
/*
644
 * 0 is reserved.  The spec says 0x01 fill for unused timings.  Some old
645
 * monitors fill with ascii space (0x20) instead.
646
 */
647
static int
648
bad_std_timing(u8 a, u8 b)
649
{
650
	return (a == 0x00 && b == 0x00) ||
651
	       (a == 0x01 && b == 0x01) ||
652
	       (a == 0x20 && b == 0x20);
653
}
654
 
1123 serge 655
/**
656
 * drm_mode_std - convert standard mode info (width, height, refresh) into mode
657
 * @t: standard timing params
1221 serge 658
 * @timing_level: standard timing level
1123 serge 659
 *
660
 * Take the standard timing params (in this case width, aspect, and refresh)
1221 serge 661
 * and convert them into a real mode using CVT/GTF/DMT.
1123 serge 662
 */
1963 serge 663
static struct drm_display_mode *
664
drm_mode_std(struct drm_connector *connector, struct edid *edid,
665
	     struct std_timing *t, int revision)
1123 serge 666
{
1963 serge 667
	struct drm_device *dev = connector->dev;
668
	struct drm_display_mode *m, *mode = NULL;
1179 serge 669
	int hsize, vsize;
670
	int vrefresh_rate;
1123 serge 671
	unsigned aspect_ratio = (t->vfreq_aspect & EDID_TIMING_ASPECT_MASK)
672
		>> EDID_TIMING_ASPECT_SHIFT;
1179 serge 673
	unsigned vfreq = (t->vfreq_aspect & EDID_TIMING_VFREQ_MASK)
674
		>> EDID_TIMING_VFREQ_SHIFT;
1963 serge 675
	int timing_level = standard_timing_level(edid);
1123 serge 676
 
1221 serge 677
	if (bad_std_timing(t->hsize, t->vfreq_aspect))
678
		return NULL;
679
 
1179 serge 680
	/* According to the EDID spec, the hdisplay = hsize * 8 + 248 */
681
	hsize = t->hsize * 8 + 248;
682
	/* vrefresh_rate = vfreq + 60 */
683
	vrefresh_rate = vfreq + 60;
684
	/* the vdisplay is calculated based on the aspect ratio */
1221 serge 685
	if (aspect_ratio == 0) {
686
		if (revision < 3)
687
			vsize = hsize;
688
		else
1123 serge 689
		vsize = (hsize * 10) / 16;
1221 serge 690
	} else if (aspect_ratio == 1)
1123 serge 691
		vsize = (hsize * 3) / 4;
692
	else if (aspect_ratio == 2)
693
		vsize = (hsize * 4) / 5;
694
	else
695
		vsize = (hsize * 9) / 16;
1963 serge 696
 
697
	/* HDTV hack, part 1 */
698
	if (vrefresh_rate == 60 &&
699
	    ((hsize == 1360 && vsize == 765) ||
700
	     (hsize == 1368 && vsize == 769))) {
701
		hsize = 1366;
702
		vsize = 768;
703
	}
704
 
705
	/*
706
	 * If this connector already has a mode for this size and refresh
707
	 * rate (because it came from detailed or CVT info), use that
708
	 * instead.  This way we don't have to guess at interlace or
709
	 * reduced blanking.
710
	 */
711
	list_for_each_entry(m, &connector->probed_modes, head)
712
		if (m->hdisplay == hsize && m->vdisplay == vsize &&
713
		    drm_mode_vrefresh(m) == vrefresh_rate)
714
			return NULL;
715
 
716
	/* HDTV hack, part 2 */
717
	if (hsize == 1366 && vsize == 768 && vrefresh_rate == 60) {
718
		mode = drm_cvt_mode(dev, 1366, 768, vrefresh_rate, 0, 0,
1221 serge 719
				    false);
1179 serge 720
		mode->hdisplay = 1366;
1963 serge 721
		mode->hsync_start = mode->hsync_start - 1;
722
		mode->hsync_end = mode->hsync_end - 1;
1179 serge 723
		return mode;
724
	}
1963 serge 725
 
1179 serge 726
	/* check whether it can be found in default mode table */
1963 serge 727
	mode = drm_mode_find_dmt(dev, hsize, vsize, vrefresh_rate);
1179 serge 728
	if (mode)
729
		return mode;
1123 serge 730
 
1179 serge 731
	switch (timing_level) {
732
	case LEVEL_DMT:
733
		break;
734
	case LEVEL_GTF:
735
		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
736
		break;
1963 serge 737
	case LEVEL_GTF2:
738
		/*
739
		 * This is potentially wrong if there's ever a monitor with
740
		 * more than one ranges section, each claiming a different
741
		 * secondary GTF curve.  Please don't do that.
742
		 */
743
		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
744
		if (drm_mode_hsync(mode) > drm_gtf2_hbreak(edid)) {
745
			kfree(mode);
746
			mode = drm_gtf_mode_complex(dev, hsize, vsize,
747
						    vrefresh_rate, 0, 0,
748
						    drm_gtf2_m(edid),
749
						    drm_gtf2_2c(edid),
750
						    drm_gtf2_k(edid),
751
						    drm_gtf2_2j(edid));
752
		}
753
		break;
1179 serge 754
	case LEVEL_CVT:
1221 serge 755
		mode = drm_cvt_mode(dev, hsize, vsize, vrefresh_rate, 0, 0,
756
				    false);
1179 serge 757
		break;
758
	}
1123 serge 759
	return mode;
760
}
761
 
1428 serge 762
/*
763
 * EDID is delightfully ambiguous about how interlaced modes are to be
764
 * encoded.  Our internal representation is of frame height, but some
765
 * HDTV detailed timings are encoded as field height.
766
 *
767
 * The format list here is from CEA, in frame size.  Technically we
768
 * should be checking refresh rate too.  Whatever.
769
 */
770
static void
771
drm_mode_do_interlace_quirk(struct drm_display_mode *mode,
772
			    struct detailed_pixel_timing *pt)
773
{
774
	int i;
775
	static const struct {
776
		int w, h;
777
	} cea_interlaced[] = {
778
		{ 1920, 1080 },
779
		{  720,  480 },
780
		{ 1440,  480 },
781
		{ 2880,  480 },
782
		{  720,  576 },
783
		{ 1440,  576 },
784
		{ 2880,  576 },
785
	};
786
 
787
	if (!(pt->misc & DRM_EDID_PT_INTERLACED))
788
		return;
789
 
1963 serge 790
	for (i = 0; i < ARRAY_SIZE(cea_interlaced); i++) {
1428 serge 791
		if ((mode->hdisplay == cea_interlaced[i].w) &&
792
		    (mode->vdisplay == cea_interlaced[i].h / 2)) {
793
			mode->vdisplay *= 2;
794
			mode->vsync_start *= 2;
795
			mode->vsync_end *= 2;
796
			mode->vtotal *= 2;
797
			mode->vtotal |= 1;
798
		}
799
	}
800
 
801
	mode->flags |= DRM_MODE_FLAG_INTERLACE;
802
}
803
 
1123 serge 804
/**
805
 * drm_mode_detailed - create a new mode from an EDID detailed timing section
806
 * @dev: DRM device (needed to create new mode)
807
 * @edid: EDID block
808
 * @timing: EDID detailed timing info
809
 * @quirks: quirks to apply
810
 *
811
 * An EDID detailed timing block contains enough info for us to create and
812
 * return a new struct drm_display_mode.
813
 */
814
static struct drm_display_mode *drm_mode_detailed(struct drm_device *dev,
815
						  struct edid *edid,
816
						  struct detailed_timing *timing,
817
						  u32 quirks)
818
{
819
	struct drm_display_mode *mode;
820
	struct detailed_pixel_timing *pt = &timing->data.pixel_data;
821
	unsigned hactive = (pt->hactive_hblank_hi & 0xf0) << 4 | pt->hactive_lo;
822
	unsigned vactive = (pt->vactive_vblank_hi & 0xf0) << 4 | pt->vactive_lo;
823
	unsigned hblank = (pt->hactive_hblank_hi & 0xf) << 8 | pt->hblank_lo;
824
	unsigned vblank = (pt->vactive_vblank_hi & 0xf) << 8 | pt->vblank_lo;
825
	unsigned hsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc0) << 2 | pt->hsync_offset_lo;
826
	unsigned hsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x30) << 4 | pt->hsync_pulse_width_lo;
827
	unsigned vsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc) >> 2 | pt->vsync_offset_pulse_width_lo >> 4;
828
	unsigned vsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x3) << 4 | (pt->vsync_offset_pulse_width_lo & 0xf);
829
 
830
	/* ignore tiny modes */
831
	if (hactive < 64 || vactive < 64)
832
		return NULL;
833
 
834
	if (pt->misc & DRM_EDID_PT_STEREO) {
835
		printk(KERN_WARNING "stereo mode not supported\n");
836
		return NULL;
837
	}
838
	if (!(pt->misc & DRM_EDID_PT_SEPARATE_SYNC)) {
1404 serge 839
		printk(KERN_WARNING "composite sync not supported\n");
1123 serge 840
	}
841
 
1246 serge 842
	/* it is incorrect if hsync/vsync width is zero */
843
	if (!hsync_pulse_width || !vsync_pulse_width) {
844
		DRM_DEBUG_KMS("Incorrect Detailed timing. "
845
				"Wrong Hsync/Vsync pulse width\n");
846
		return NULL;
847
	}
1123 serge 848
	mode = drm_mode_create(dev);
849
	if (!mode)
850
		return NULL;
851
 
852
	mode->type = DRM_MODE_TYPE_DRIVER;
853
 
854
	if (quirks & EDID_QUIRK_135_CLOCK_TOO_HIGH)
855
		timing->pixel_clock = cpu_to_le16(1088);
856
 
857
	mode->clock = le16_to_cpu(timing->pixel_clock) * 10;
858
 
859
	mode->hdisplay = hactive;
860
	mode->hsync_start = mode->hdisplay + hsync_offset;
861
	mode->hsync_end = mode->hsync_start + hsync_pulse_width;
862
	mode->htotal = mode->hdisplay + hblank;
863
 
864
	mode->vdisplay = vactive;
865
	mode->vsync_start = mode->vdisplay + vsync_offset;
866
	mode->vsync_end = mode->vsync_start + vsync_pulse_width;
867
	mode->vtotal = mode->vdisplay + vblank;
868
 
1313 serge 869
	/* Some EDIDs have bogus h/vtotal values */
870
	if (mode->hsync_end > mode->htotal)
871
		mode->htotal = mode->hsync_end + 1;
872
	if (mode->vsync_end > mode->vtotal)
873
		mode->vtotal = mode->vsync_end + 1;
874
 
1963 serge 875
	drm_mode_do_interlace_quirk(mode, pt);
876
 
1123 serge 877
	drm_mode_set_name(mode);
878
 
879
	if (quirks & EDID_QUIRK_DETAILED_SYNC_PP) {
880
		pt->misc |= DRM_EDID_PT_HSYNC_POSITIVE | DRM_EDID_PT_VSYNC_POSITIVE;
881
	}
882
 
883
	mode->flags |= (pt->misc & DRM_EDID_PT_HSYNC_POSITIVE) ?
884
		DRM_MODE_FLAG_PHSYNC : DRM_MODE_FLAG_NHSYNC;
885
	mode->flags |= (pt->misc & DRM_EDID_PT_VSYNC_POSITIVE) ?
886
		DRM_MODE_FLAG_PVSYNC : DRM_MODE_FLAG_NVSYNC;
887
 
888
	mode->width_mm = pt->width_mm_lo | (pt->width_height_mm_hi & 0xf0) << 4;
889
	mode->height_mm = pt->height_mm_lo | (pt->width_height_mm_hi & 0xf) << 8;
890
 
891
	if (quirks & EDID_QUIRK_DETAILED_IN_CM) {
892
		mode->width_mm *= 10;
893
		mode->height_mm *= 10;
894
	}
895
 
896
	if (quirks & EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE) {
897
		mode->width_mm = edid->width_cm * 10;
898
		mode->height_mm = edid->height_cm * 10;
899
	}
900
 
901
	return mode;
902
}
903
 
1963 serge 904
static bool
905
mode_is_rb(const struct drm_display_mode *mode)
906
{
907
	return (mode->htotal - mode->hdisplay == 160) &&
908
	       (mode->hsync_end - mode->hdisplay == 80) &&
909
	       (mode->hsync_end - mode->hsync_start == 32) &&
910
	       (mode->vsync_start - mode->vdisplay == 3);
911
}
912
 
913
static bool
914
mode_in_hsync_range(const struct drm_display_mode *mode,
915
		    struct edid *edid, u8 *t)
916
{
917
	int hsync, hmin, hmax;
918
 
919
	hmin = t[7];
920
	if (edid->revision >= 4)
921
	    hmin += ((t[4] & 0x04) ? 255 : 0);
922
	hmax = t[8];
923
	if (edid->revision >= 4)
924
	    hmax += ((t[4] & 0x08) ? 255 : 0);
925
	hsync = drm_mode_hsync(mode);
926
 
927
	return (hsync <= hmax && hsync >= hmin);
928
}
929
 
930
static bool
931
mode_in_vsync_range(const struct drm_display_mode *mode,
932
		    struct edid *edid, u8 *t)
933
{
934
	int vsync, vmin, vmax;
935
 
936
	vmin = t[5];
937
	if (edid->revision >= 4)
938
	    vmin += ((t[4] & 0x01) ? 255 : 0);
939
	vmax = t[6];
940
	if (edid->revision >= 4)
941
	    vmax += ((t[4] & 0x02) ? 255 : 0);
942
	vsync = drm_mode_vrefresh(mode);
943
 
944
	return (vsync <= vmax && vsync >= vmin);
945
}
946
 
947
static u32
948
range_pixel_clock(struct edid *edid, u8 *t)
949
{
950
	/* unspecified */
951
	if (t[9] == 0 || t[9] == 255)
952
		return 0;
953
 
954
	/* 1.4 with CVT support gives us real precision, yay */
955
	if (edid->revision >= 4 && t[10] == 0x04)
956
		return (t[9] * 10000) - ((t[12] >> 2) * 250);
957
 
958
	/* 1.3 is pathetic, so fuzz up a bit */
959
	return t[9] * 10000 + 5001;
960
}
961
 
962
static bool
963
mode_in_range(const struct drm_display_mode *mode, struct edid *edid,
964
	      struct detailed_timing *timing)
965
{
966
	u32 max_clock;
967
	u8 *t = (u8 *)timing;
968
 
969
	if (!mode_in_hsync_range(mode, edid, t))
970
		return false;
971
 
972
	if (!mode_in_vsync_range(mode, edid, t))
973
		return false;
974
 
975
	if ((max_clock = range_pixel_clock(edid, t)))
976
		if (mode->clock > max_clock)
977
			return false;
978
 
979
	/* 1.4 max horizontal check */
980
	if (edid->revision >= 4 && t[10] == 0x04)
981
		if (t[13] && mode->hdisplay > 8 * (t[13] + (256 * (t[12]&0x3))))
982
			return false;
983
 
984
	if (mode_is_rb(mode) && !drm_monitor_supports_rb(edid))
985
		return false;
986
 
987
	return true;
988
}
989
 
1123 serge 990
/*
1963 serge 991
 * XXX If drm_dmt_modes ever regrows the CVT-R modes (and it will) this will
992
 * need to account for them.
1123 serge 993
 */
1963 serge 994
static int
995
drm_gtf_modes_for_range(struct drm_connector *connector, struct edid *edid,
996
				   struct detailed_timing *timing)
997
{
998
	int i, modes = 0;
999
	struct drm_display_mode *newmode;
1000
	struct drm_device *dev = connector->dev;
1123 serge 1001
 
1963 serge 1002
	for (i = 0; i < drm_num_dmt_modes; i++) {
1003
		if (mode_in_range(drm_dmt_modes + i, edid, timing)) {
1004
			newmode = drm_mode_duplicate(dev, &drm_dmt_modes[i]);
1005
			if (newmode) {
1006
				drm_mode_probed_add(connector, newmode);
1007
				modes++;
1008
			}
1009
		}
1010
	}
1123 serge 1011
 
1963 serge 1012
	return modes;
1013
}
1014
 
1015
static void
1016
do_inferred_modes(struct detailed_timing *timing, void *c)
1017
{
1018
	struct detailed_mode_closure *closure = c;
1019
	struct detailed_non_pixel *data = &timing->data.other_data;
1020
	int gtf = (closure->edid->features & DRM_EDID_FEATURE_DEFAULT_GTF);
1021
 
1022
	if (gtf && data->type == EDID_DETAIL_MONITOR_RANGE)
1023
		closure->modes += drm_gtf_modes_for_range(closure->connector,
1024
							  closure->edid,
1025
							  timing);
1026
}
1027
 
1028
static int
1029
add_inferred_modes(struct drm_connector *connector, struct edid *edid)
1030
{
1031
	struct detailed_mode_closure closure = {
1032
		connector, edid, 0, 0, 0
1033
	};
1034
 
1035
	if (version_greater(edid, 1, 0))
1036
		drm_for_each_detailed_block((u8 *)edid, do_inferred_modes,
1037
					    &closure);
1038
 
1039
	return closure.modes;
1040
}
1041
 
1042
static int
1043
drm_est3_modes(struct drm_connector *connector, struct detailed_timing *timing)
1044
{
1045
	int i, j, m, modes = 0;
1046
	struct drm_display_mode *mode;
1047
	u8 *est = ((u8 *)timing) + 5;
1048
 
1049
	for (i = 0; i < 6; i++) {
1050
		for (j = 7; j > 0; j--) {
1051
			m = (i * 8) + (7 - j);
1052
			if (m >= ARRAY_SIZE(est3_modes))
1053
				break;
1054
			if (est[i] & (1 << j)) {
1055
				mode = drm_mode_find_dmt(connector->dev,
1056
							 est3_modes[m].w,
1057
							 est3_modes[m].h,
1058
							 est3_modes[m].r
1059
							 /*, est3_modes[m].rb */);
1060
				if (mode) {
1061
					drm_mode_probed_add(connector, mode);
1062
					modes++;
1063
				}
1064
			}
1065
		}
1066
	}
1067
 
1068
	return modes;
1069
}
1070
 
1071
static void
1072
do_established_modes(struct detailed_timing *timing, void *c)
1073
{
1074
	struct detailed_mode_closure *closure = c;
1075
		struct detailed_non_pixel *data = &timing->data.other_data;
1076
 
1077
	if (data->type == EDID_DETAIL_EST_TIMINGS)
1078
		closure->modes += drm_est3_modes(closure->connector, timing);
1079
}
1080
 
1123 serge 1081
/**
1082
 * add_established_modes - get est. modes from EDID and add them
1083
 * @edid: EDID block to scan
1084
 *
1085
 * Each EDID block contains a bitmap of the supported "established modes" list
1086
 * (defined above).  Tease them out and add them to the global modes list.
1087
 */
1963 serge 1088
static int
1089
add_established_modes(struct drm_connector *connector, struct edid *edid)
1123 serge 1090
{
1091
	struct drm_device *dev = connector->dev;
1092
	unsigned long est_bits = edid->established_timings.t1 |
1093
		(edid->established_timings.t2 << 8) |
1094
		((edid->established_timings.mfg_rsvd & 0x80) << 9);
1095
	int i, modes = 0;
1963 serge 1096
	struct detailed_mode_closure closure = {
1097
		connector, edid, 0, 0, 0
1098
	};
1123 serge 1099
 
1963 serge 1100
	for (i = 0; i <= EDID_EST_TIMINGS; i++) {
1123 serge 1101
		if (est_bits & (1<
1102
			struct drm_display_mode *newmode;
1103
			newmode = drm_mode_duplicate(dev, &edid_est_modes[i]);
1104
			if (newmode) {
1963 serge 1105
		drm_mode_probed_add(connector, newmode);
1123 serge 1106
				modes++;
1107
			}
1108
		}
1963 serge 1109
	}
1123 serge 1110
 
1963 serge 1111
	if (version_greater(edid, 1, 0))
1112
		    drm_for_each_detailed_block((u8 *)edid,
1113
						do_established_modes, &closure);
1114
 
1115
	return modes + closure.modes;
1123 serge 1116
}
1963 serge 1117
 
1118
static void
1119
do_standard_modes(struct detailed_timing *timing, void *c)
1179 serge 1120
{
1963 serge 1121
	struct detailed_mode_closure *closure = c;
1122
	struct detailed_non_pixel *data = &timing->data.other_data;
1123
	struct drm_connector *connector = closure->connector;
1124
	struct edid *edid = closure->edid;
1125
 
1126
	if (data->type == EDID_DETAIL_STD_MODES) {
1127
		int i;
1128
		for (i = 0; i < 6; i++) {
1129
				struct std_timing *std;
1130
				struct drm_display_mode *newmode;
1131
 
1132
			std = &data->data.timings[i];
1133
			newmode = drm_mode_std(connector, edid, std,
1134
					       edid->revision);
1135
				if (newmode) {
1136
					drm_mode_probed_add(connector, newmode);
1137
				closure->modes++;
1138
				}
1139
			}
1140
		}
1179 serge 1141
}
1123 serge 1142
 
1143
/**
1144
 * add_standard_modes - get std. modes from EDID and add them
1145
 * @edid: EDID block to scan
1146
 *
1963 serge 1147
 * Standard modes can be calculated using the appropriate standard (DMT,
1148
 * GTF or CVT. Grab them from @edid and add them to the list.
1123 serge 1149
 */
1963 serge 1150
static int
1151
add_standard_modes(struct drm_connector *connector, struct edid *edid)
1123 serge 1152
{
1153
	int i, modes = 0;
1963 serge 1154
	struct detailed_mode_closure closure = {
1155
		connector, edid, 0, 0, 0
1156
	};
1123 serge 1157
 
1158
	for (i = 0; i < EDID_STD_TIMINGS; i++) {
1159
		struct drm_display_mode *newmode;
1160
 
1963 serge 1161
		newmode = drm_mode_std(connector, edid,
1162
				       &edid->standard_timings[i],
1163
				       edid->revision);
1123 serge 1164
		if (newmode) {
1165
			drm_mode_probed_add(connector, newmode);
1166
			modes++;
1167
		}
1168
	}
1169
 
1963 serge 1170
	if (version_greater(edid, 1, 0))
1171
		drm_for_each_detailed_block((u8 *)edid, do_standard_modes,
1172
					    &closure);
1123 serge 1173
 
1963 serge 1174
	/* XXX should also look for standard codes in VTB blocks */
1321 serge 1175
 
1963 serge 1176
	return modes + closure.modes;
1321 serge 1177
}
1178
 
1179
static int drm_cvt_modes(struct drm_connector *connector,
1180
			 struct detailed_timing *timing)
1181
{
1123 serge 1182
	int i, j, modes = 0;
1321 serge 1183
	struct drm_display_mode *newmode;
1184
	struct drm_device *dev = connector->dev;
1185
	struct cvt_timing *cvt;
1186
	const int rates[] = { 60, 85, 75, 60, 50 };
1404 serge 1187
	const u8 empty[3] = { 0, 0, 0 };
1123 serge 1188
 
1321 serge 1189
	for (i = 0; i < 4; i++) {
1404 serge 1190
		int uninitialized_var(width), height;
1321 serge 1191
		cvt = &(timing->data.other_data.data.cvt[i]);
1179 serge 1192
 
1404 serge 1193
		if (!memcmp(cvt->code, empty, 3))
1963 serge 1194
				continue;
1404 serge 1195
 
1196
		height = (cvt->code[0] + ((cvt->code[1] & 0xf0) << 4) + 1) * 2;
1197
		switch (cvt->code[1] & 0x0c) {
1321 serge 1198
		case 0x00:
1199
			width = height * 4 / 3;
1200
			break;
1404 serge 1201
		case 0x04:
1321 serge 1202
			width = height * 16 / 9;
1203
			break;
1404 serge 1204
		case 0x08:
1321 serge 1205
			width = height * 16 / 10;
1206
			break;
1404 serge 1207
		case 0x0c:
1321 serge 1208
			width = height * 15 / 9;
1209
			break;
1210
		}
1211
 
1212
		for (j = 1; j < 5; j++) {
1213
			if (cvt->code[2] & (1 << j)) {
1214
				newmode = drm_cvt_mode(dev, width, height,
1215
						       rates[j], j == 0,
1216
						       false, false);
1217
				if (newmode) {
1218
					drm_mode_probed_add(connector, newmode);
1219
					modes++;
1220
				}
1221
			}
1222
		}
1963 serge 1223
		}
1321 serge 1224
 
1225
	return modes;
1226
}
1227
 
1963 serge 1228
static void
1229
do_cvt_mode(struct detailed_timing *timing, void *c)
1321 serge 1230
{
1963 serge 1231
	struct detailed_mode_closure *closure = c;
1232
	struct detailed_non_pixel *data = &timing->data.other_data;
1123 serge 1233
 
1963 serge 1234
	if (data->type == EDID_DETAIL_CVT_3BYTE)
1235
		closure->modes += drm_cvt_modes(closure->connector, timing);
1236
}
1321 serge 1237
 
1963 serge 1238
static int
1239
add_cvt_modes(struct drm_connector *connector, struct edid *edid)
1240
{
1241
	struct detailed_mode_closure closure = {
1242
		connector, edid, 0, 0, 0
1243
	};
1321 serge 1244
 
1963 serge 1245
	if (version_greater(edid, 1, 2))
1246
		drm_for_each_detailed_block((u8 *)edid, do_cvt_mode, &closure);
1321 serge 1247
 
1963 serge 1248
	/* XXX should also look for CVT codes in VTB blocks */
1123 serge 1249
 
1963 serge 1250
	return closure.modes;
1321 serge 1251
}
1252
 
1963 serge 1253
static void
1254
do_detailed_mode(struct detailed_timing *timing, void *c)
1321 serge 1255
{
1963 serge 1256
	struct detailed_mode_closure *closure = c;
1257
	struct drm_display_mode *newmode;
1321 serge 1258
 
1963 serge 1259
	if (timing->pixel_clock) {
1260
		newmode = drm_mode_detailed(closure->connector->dev,
1261
					    closure->edid, timing,
1262
					    closure->quirks);
1263
		if (!newmode)
1264
			return;
1321 serge 1265
 
1963 serge 1266
		if (closure->preferred)
1267
			newmode->type |= DRM_MODE_TYPE_PREFERRED;
1123 serge 1268
 
1963 serge 1269
		drm_mode_probed_add(closure->connector, newmode);
1270
		closure->modes++;
1271
		closure->preferred = 0;
1272
	}
1179 serge 1273
}
1321 serge 1274
 
1963 serge 1275
/*
1276
 * add_detailed_modes - Add modes from detailed timings
1179 serge 1277
 * @connector: attached connector
1963 serge 1278
 * @edid: EDID block to scan
1179 serge 1279
 * @quirks: quirks to apply
1280
 */
1963 serge 1281
static int
1282
add_detailed_modes(struct drm_connector *connector, struct edid *edid,
1283
		   u32 quirks)
1179 serge 1284
{
1963 serge 1285
	struct detailed_mode_closure closure = {
1286
		connector,
1287
		edid,
1288
		1,
1289
		quirks,
1290
 
1291
	};
1179 serge 1292
 
1963 serge 1293
	if (closure.preferred && !version_greater(edid, 1, 3))
1294
		closure.preferred =
1295
		    (edid->features & DRM_EDID_FEATURE_PREFERRED_TIMING);
1179 serge 1296
 
1963 serge 1297
	drm_for_each_detailed_block((u8 *)edid, do_detailed_mode, &closure);
1179 serge 1298
 
1963 serge 1299
	return closure.modes;
1300
}
1179 serge 1301
 
1963 serge 1302
#define HDMI_IDENTIFIER 0x000C03
1303
#define AUDIO_BLOCK	0x01
1304
#define VENDOR_BLOCK    0x03
1305
#define EDID_BASIC_AUDIO	(1 << 6)
1179 serge 1306
 
1123 serge 1307
/**
1963 serge 1308
 * Search EDID for CEA extension block.
1123 serge 1309
 */
1963 serge 1310
u8 *drm_find_cea_extension(struct edid *edid)
1123 serge 1311
{
1963 serge 1312
	u8 *edid_ext = NULL;
1321 serge 1313
	int i;
1123 serge 1314
 
1963 serge 1315
	/* No EDID or EDID extensions */
1316
	if (edid == NULL || edid->extensions == 0)
1317
		return NULL;
1321 serge 1318
 
1963 serge 1319
	/* Find CEA extension */
1320
	for (i = 0; i < edid->extensions; i++) {
1321
		edid_ext = (u8 *)edid + EDID_LENGTH * (i + 1);
1322
		if (edid_ext[0] == CEA_EXT)
1323
			break;
1123 serge 1324
	}
1325
 
1963 serge 1326
	if (i == edid->extensions)
1327
		return NULL;
1123 serge 1328
 
1963 serge 1329
	return edid_ext;
1123 serge 1330
}
1963 serge 1331
EXPORT_SYMBOL(drm_find_cea_extension);
1123 serge 1332
 
1333
/**
1334
 * drm_detect_hdmi_monitor - detect whether monitor is hdmi.
1335
 * @edid: monitor EDID information
1336
 *
1337
 * Parse the CEA extension according to CEA-861-B.
1338
 * Return true if HDMI, false if not or unknown.
1339
 */
1340
bool drm_detect_hdmi_monitor(struct edid *edid)
1341
{
1963 serge 1342
	u8 *edid_ext;
1343
	int i, hdmi_id;
1123 serge 1344
	int start_offset, end_offset;
1345
	bool is_hdmi = false;
1346
 
1963 serge 1347
	edid_ext = drm_find_cea_extension(edid);
1348
	if (!edid_ext)
1123 serge 1349
		goto end;
1350
 
1351
	/* Data block offset in CEA extension block */
1352
	start_offset = 4;
1353
	end_offset = edid_ext[2];
1354
 
1355
	/*
1356
	 * Because HDMI identifier is in Vendor Specific Block,
1357
	 * search it from all data blocks of CEA extension.
1358
	 */
1359
	for (i = start_offset; i < end_offset;
1360
		/* Increased by data block len */
1361
		i += ((edid_ext[i] & 0x1f) + 1)) {
1362
		/* Find vendor specific block */
1363
		if ((edid_ext[i] >> 5) == VENDOR_BLOCK) {
1364
			hdmi_id = edid_ext[i + 1] | (edid_ext[i + 2] << 8) |
1365
				  edid_ext[i + 3] << 16;
1366
			/* Find HDMI identifier */
1367
			if (hdmi_id == HDMI_IDENTIFIER)
1368
				is_hdmi = true;
1369
			break;
1370
		}
1371
	}
1372
 
1373
end:
1374
	return is_hdmi;
1375
}
1376
EXPORT_SYMBOL(drm_detect_hdmi_monitor);
1377
 
1378
/**
1963 serge 1379
 * drm_detect_monitor_audio - check monitor audio capability
1380
 *
1381
 * Monitor should have CEA extension block.
1382
 * If monitor has 'basic audio', but no CEA audio blocks, it's 'basic
1383
 * audio' only. If there is any audio extension block and supported
1384
 * audio format, assume at least 'basic audio' support, even if 'basic
1385
 * audio' is not defined in EDID.
1386
 *
1387
 */
1388
bool drm_detect_monitor_audio(struct edid *edid)
1389
{
1390
	u8 *edid_ext;
1391
	int i, j;
1392
	bool has_audio = false;
1393
	int start_offset, end_offset;
1394
 
1395
	edid_ext = drm_find_cea_extension(edid);
1396
	if (!edid_ext)
1397
		goto end;
1398
 
1399
	has_audio = ((edid_ext[3] & EDID_BASIC_AUDIO) != 0);
1400
 
1401
	if (has_audio) {
1402
		DRM_DEBUG_KMS("Monitor has basic audio support\n");
1403
		goto end;
1404
	}
1405
 
1406
	/* Data block offset in CEA extension block */
1407
	start_offset = 4;
1408
	end_offset = edid_ext[2];
1409
 
1410
	for (i = start_offset; i < end_offset;
1411
			i += ((edid_ext[i] & 0x1f) + 1)) {
1412
		if ((edid_ext[i] >> 5) == AUDIO_BLOCK) {
1413
			has_audio = true;
1414
			for (j = 1; j < (edid_ext[i] & 0x1f); j += 3)
1415
				DRM_DEBUG_KMS("CEA audio format %d\n",
1416
					      (edid_ext[i + j] >> 3) & 0xf);
1417
			goto end;
1418
		}
1419
	}
1420
end:
1421
	return has_audio;
1422
}
1423
EXPORT_SYMBOL(drm_detect_monitor_audio);
1424
 
1425
/**
1426
 * drm_add_display_info - pull display info out if present
1427
 * @edid: EDID data
1428
 * @info: display info (attached to connector)
1429
 *
1430
 * Grab any available display info and stuff it into the drm_display_info
1431
 * structure that's part of the connector.  Useful for tracking bpp and
1432
 * color spaces.
1433
 */
1434
static void drm_add_display_info(struct edid *edid,
1435
				 struct drm_display_info *info)
1436
{
1437
	info->width_mm = edid->width_cm * 10;
1438
	info->height_mm = edid->height_cm * 10;
1439
 
1440
	/* driver figures it out in this case */
1441
	info->bpc = 0;
1442
	info->color_formats = 0;
1443
 
1444
	/* Only defined for 1.4 with digital displays */
1445
	if (edid->revision < 4)
1446
		return;
1447
 
1448
	if (!(edid->input & DRM_EDID_INPUT_DIGITAL))
1449
		return;
1450
 
1451
	switch (edid->input & DRM_EDID_DIGITAL_DEPTH_MASK) {
1452
	case DRM_EDID_DIGITAL_DEPTH_6:
1453
		info->bpc = 6;
1454
		break;
1455
	case DRM_EDID_DIGITAL_DEPTH_8:
1456
		info->bpc = 8;
1457
		break;
1458
	case DRM_EDID_DIGITAL_DEPTH_10:
1459
		info->bpc = 10;
1460
		break;
1461
	case DRM_EDID_DIGITAL_DEPTH_12:
1462
		info->bpc = 12;
1463
		break;
1464
	case DRM_EDID_DIGITAL_DEPTH_14:
1465
		info->bpc = 14;
1466
		break;
1467
	case DRM_EDID_DIGITAL_DEPTH_16:
1468
		info->bpc = 16;
1469
		break;
1470
	case DRM_EDID_DIGITAL_DEPTH_UNDEF:
1471
	default:
1472
		info->bpc = 0;
1473
		break;
1474
	}
1475
 
1476
	info->color_formats = DRM_COLOR_FORMAT_RGB444;
1477
	if (info->color_formats & DRM_EDID_FEATURE_RGB_YCRCB444)
1478
		info->color_formats = DRM_COLOR_FORMAT_YCRCB444;
1479
	if (info->color_formats & DRM_EDID_FEATURE_RGB_YCRCB422)
1480
		info->color_formats = DRM_COLOR_FORMAT_YCRCB422;
1481
}
1482
 
1483
/**
1123 serge 1484
 * drm_add_edid_modes - add modes from EDID data, if available
1485
 * @connector: connector we're probing
1486
 * @edid: edid data
1487
 *
1488
 * Add the specified modes to the connector's mode list.
1489
 *
1490
 * Return number of modes added or 0 if we couldn't find any.
1491
 */
1492
int drm_add_edid_modes(struct drm_connector *connector, struct edid *edid)
1493
{
1494
	int num_modes = 0;
1495
	u32 quirks;
1496
 
1497
	if (edid == NULL) {
1498
		return 0;
1499
	}
1430 serge 1500
	if (!drm_edid_is_valid(edid)) {
1963 serge 1501
		dev_warn(connector->dev->dev, "%s: EDID invalid.\n",
1246 serge 1502
			 drm_get_connector_name(connector));
1123 serge 1503
		return 0;
1504
	}
1505
 
1506
	quirks = edid_get_quirks(edid);
1507
 
1963 serge 1508
	/*
1509
	 * EDID spec says modes should be preferred in this order:
1510
	 * - preferred detailed mode
1511
	 * - other detailed modes from base block
1512
	 * - detailed modes from extension blocks
1513
	 * - CVT 3-byte code modes
1514
	 * - standard timing codes
1515
	 * - established timing codes
1516
	 * - modes inferred from GTF or CVT range information
1517
	 *
1518
	 * We get this pretty much right.
1519
	 *
1520
	 * XXX order for additional mode types in extension blocks?
1521
	 */
1522
	num_modes += add_detailed_modes(connector, edid, quirks);
1523
	num_modes += add_cvt_modes(connector, edid);
1524
	num_modes += add_standard_modes(connector, edid);
1123 serge 1525
	num_modes += add_established_modes(connector, edid);
1963 serge 1526
	num_modes += add_inferred_modes(connector, edid);
1123 serge 1527
 
1528
	if (quirks & (EDID_QUIRK_PREFER_LARGE_60 | EDID_QUIRK_PREFER_LARGE_75))
1529
		edid_fixup_preferred(connector, quirks);
1530
 
1963 serge 1531
	drm_add_display_info(edid, &connector->display_info);
1123 serge 1532
 
1533
	return num_modes;
1534
}
1535
EXPORT_SYMBOL(drm_add_edid_modes);
1179 serge 1536
 
1537
/**
1538
 * drm_add_modes_noedid - add modes for the connectors without EDID
1539
 * @connector: connector we're probing
1540
 * @hdisplay: the horizontal display limit
1541
 * @vdisplay: the vertical display limit
1542
 *
1543
 * Add the specified modes to the connector's mode list. Only when the
1544
 * hdisplay/vdisplay is not beyond the given limit, it will be added.
1545
 *
1546
 * Return number of modes added or 0 if we couldn't find any.
1547
 */
1548
int drm_add_modes_noedid(struct drm_connector *connector,
1549
			int hdisplay, int vdisplay)
1550
{
1551
	int i, count, num_modes = 0;
1963 serge 1552
	struct drm_display_mode *mode;
1179 serge 1553
	struct drm_device *dev = connector->dev;
1554
 
1555
	count = sizeof(drm_dmt_modes) / sizeof(struct drm_display_mode);
1556
	if (hdisplay < 0)
1557
		hdisplay = 0;
1558
	if (vdisplay < 0)
1559
		vdisplay = 0;
1560
 
1561
	for (i = 0; i < count; i++) {
1963 serge 1562
		const struct drm_display_mode *ptr = &drm_dmt_modes[i];
1179 serge 1563
		if (hdisplay && vdisplay) {
1564
			/*
1565
			 * Only when two are valid, they will be used to check
1566
			 * whether the mode should be added to the mode list of
1567
			 * the connector.
1568
			 */
1569
			if (ptr->hdisplay > hdisplay ||
1570
					ptr->vdisplay > vdisplay)
1571
				continue;
1572
		}
1321 serge 1573
		if (drm_mode_vrefresh(ptr) > 61)
1574
			continue;
1179 serge 1575
		mode = drm_mode_duplicate(dev, ptr);
1576
		if (mode) {
1577
			drm_mode_probed_add(connector, mode);
1578
			num_modes++;
1579
		}
1580
	}
1581
	return num_modes;
1582
}
1583
EXPORT_SYMBOL(drm_add_modes_noedid);