Subversion Repositories Kolibri OS

Rev

Rev 1963 | Rev 2160 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1123 serge 1
/*
2
 * Copyright (c) 2006 Luc Verhaegen (quirks list)
3
 * Copyright (c) 2007-2008 Intel Corporation
4
 *   Jesse Barnes 
1963 serge 5
 * Copyright 2010 Red Hat, Inc.
1123 serge 6
 *
7
 * DDC probing routines (drm_ddc_read & drm_do_probe_ddc_edid) originally from
8
 * FB layer.
9
 *   Copyright (C) 2006 Dennis Munsie 
10
 *
11
 * Permission is hereby granted, free of charge, to any person obtaining a
12
 * copy of this software and associated documentation files (the "Software"),
13
 * to deal in the Software without restriction, including without limitation
14
 * the rights to use, copy, modify, merge, publish, distribute, sub license,
15
 * and/or sell copies of the Software, and to permit persons to whom the
16
 * Software is furnished to do so, subject to the following conditions:
17
 *
18
 * The above copyright notice and this permission notice (including the
19
 * next paragraph) shall be included in all copies or substantial portions
20
 * of the Software.
21
 *
22
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24
 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
25
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
27
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
28
 * DEALINGS IN THE SOFTWARE.
29
 */
1221 serge 30
#include 
1963 serge 31
#include 
1125 serge 32
#include 
1123 serge 33
#include "drmP.h"
34
#include "drm_edid.h"
1963 serge 35
#include "drm_edid_modes.h"
1123 serge 36
 
1963 serge 37
#define version_greater(edid, maj, min) \
38
	(((edid)->version > (maj)) || \
39
	 ((edid)->version == (maj) && (edid)->revision > (min)))
1123 serge 40
 
1963 serge 41
#define EDID_EST_TIMINGS 16
42
#define EDID_STD_TIMINGS 8
43
#define EDID_DETAILED_TIMINGS 4
44
 
1123 serge 45
/*
46
 * EDID blocks out in the wild have a variety of bugs, try to collect
47
 * them here (note that userspace may work around broken monitors first,
48
 * but fixes should make their way here so that the kernel "just works"
49
 * on as many displays as possible).
50
 */
51
 
52
/* First detailed mode wrong, use largest 60Hz mode */
53
#define EDID_QUIRK_PREFER_LARGE_60		(1 << 0)
54
/* Reported 135MHz pixel clock is too high, needs adjustment */
55
#define EDID_QUIRK_135_CLOCK_TOO_HIGH		(1 << 1)
56
/* Prefer the largest mode at 75 Hz */
57
#define EDID_QUIRK_PREFER_LARGE_75		(1 << 2)
58
/* Detail timing is in cm not mm */
59
#define EDID_QUIRK_DETAILED_IN_CM		(1 << 3)
60
/* Detailed timing descriptors have bogus size values, so just take the
61
 * maximum size and use that.
62
 */
63
#define EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE	(1 << 4)
64
/* Monitor forgot to set the first detailed is preferred bit. */
65
#define EDID_QUIRK_FIRST_DETAILED_PREFERRED	(1 << 5)
66
/* use +hsync +vsync for detailed mode */
67
#define EDID_QUIRK_DETAILED_SYNC_PP		(1 << 6)
68
 
1963 serge 69
struct detailed_mode_closure {
70
	struct drm_connector *connector;
71
	struct edid *edid;
72
	bool preferred;
73
	u32 quirks;
74
	int modes;
75
};
1430 serge 76
 
1179 serge 77
#define LEVEL_DMT	0
78
#define LEVEL_GTF	1
1963 serge 79
#define LEVEL_GTF2	2
80
#define LEVEL_CVT	3
1179 serge 81
 
1123 serge 82
static struct edid_quirk {
83
	char *vendor;
84
	int product_id;
85
	u32 quirks;
86
} edid_quirk_list[] = {
87
	/* Acer AL1706 */
88
	{ "ACR", 44358, EDID_QUIRK_PREFER_LARGE_60 },
89
	/* Acer F51 */
90
	{ "API", 0x7602, EDID_QUIRK_PREFER_LARGE_60 },
91
	/* Unknown Acer */
92
	{ "ACR", 2423, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
93
 
94
	/* Belinea 10 15 55 */
95
	{ "MAX", 1516, EDID_QUIRK_PREFER_LARGE_60 },
96
	{ "MAX", 0x77e, EDID_QUIRK_PREFER_LARGE_60 },
97
 
98
	/* Envision Peripherals, Inc. EN-7100e */
99
	{ "EPI", 59264, EDID_QUIRK_135_CLOCK_TOO_HIGH },
1963 serge 100
	/* Envision EN2028 */
101
	{ "EPI", 8232, EDID_QUIRK_PREFER_LARGE_60 },
1123 serge 102
 
103
	/* Funai Electronics PM36B */
104
	{ "FCM", 13600, EDID_QUIRK_PREFER_LARGE_75 |
105
	  EDID_QUIRK_DETAILED_IN_CM },
106
 
107
	/* LG Philips LCD LP154W01-A5 */
108
	{ "LPL", 0, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
109
	{ "LPL", 0x2a00, EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE },
110
 
111
	/* Philips 107p5 CRT */
112
	{ "PHL", 57364, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
113
 
114
	/* Proview AY765C */
115
	{ "PTS", 765, EDID_QUIRK_FIRST_DETAILED_PREFERRED },
116
 
117
	/* Samsung SyncMaster 205BW.  Note: irony */
118
	{ "SAM", 541, EDID_QUIRK_DETAILED_SYNC_PP },
119
	/* Samsung SyncMaster 22[5-6]BW */
120
	{ "SAM", 596, EDID_QUIRK_PREFER_LARGE_60 },
121
	{ "SAM", 638, EDID_QUIRK_PREFER_LARGE_60 },
122
};
123
 
1963 serge 124
/*** DDC fetch and block validation ***/
1123 serge 125
 
1221 serge 126
static const u8 edid_header[] = {
127
	0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00
128
};
1123 serge 129
 
1963 serge 130
/*
131
 * Sanity check the EDID block (base or extension).  Return 0 if the block
132
 * doesn't check out, or 1 if it's valid.
1123 serge 133
 */
1963 serge 134
static bool
135
drm_edid_block_valid(u8 *raw_edid)
1123 serge 136
{
1963 serge 137
	int i;
1123 serge 138
	u8 csum = 0;
1963 serge 139
	struct edid *edid = (struct edid *)raw_edid;
1123 serge 140
 
1963 serge 141
	if (raw_edid[0] == 0x00) {
142
		int score = 0;
143
 
1321 serge 144
	for (i = 0; i < sizeof(edid_header); i++)
145
		if (raw_edid[i] == edid_header[i])
146
			score++;
147
 
148
	if (score == 8) ;
149
	else if (score >= 6) {
150
		DRM_DEBUG("Fixing EDID header, your hardware may be failing\n");
151
		memcpy(raw_edid, edid_header, sizeof(edid_header));
1963 serge 152
		} else {
1123 serge 153
		goto bad;
1963 serge 154
		}
155
	}
1123 serge 156
 
157
	for (i = 0; i < EDID_LENGTH; i++)
158
		csum += raw_edid[i];
159
	if (csum) {
160
		DRM_ERROR("EDID checksum is invalid, remainder is %d\n", csum);
1963 serge 161
 
162
		/* allow CEA to slide through, switches mangle this */
163
		if (raw_edid[0] != 0x02)
1123 serge 164
		goto bad;
165
	}
166
 
1963 serge 167
	/* per-block-type checks */
168
	switch (raw_edid[0]) {
169
	case 0: /* base */
1321 serge 170
	if (edid->version != 1) {
171
		DRM_ERROR("EDID has major version %d, instead of 1\n", edid->version);
172
		goto bad;
173
	}
174
 
175
	if (edid->revision > 4)
176
		DRM_DEBUG("EDID minor > 4, assuming backward compatibility\n");
1963 serge 177
		break;
1321 serge 178
 
1963 serge 179
	default:
180
		break;
181
	}
182
 
1123 serge 183
	return 1;
184
 
185
bad:
186
	if (raw_edid) {
2004 serge 187
		printk(KERN_ERR "Raw EDID:\n");
1123 serge 188
//       print_hex_dump_bytes(KERN_ERR, DUMP_PREFIX_NONE, raw_edid, EDID_LENGTH);
2004 serge 189
		printk(KERN_ERR "\n");
1123 serge 190
	}
191
	return 0;
192
}
1963 serge 193
 
194
/**
195
 * drm_edid_is_valid - sanity check EDID data
196
 * @edid: EDID data
197
 *
198
 * Sanity-check an entire EDID record (including extensions)
199
 */
200
bool drm_edid_is_valid(struct edid *edid)
201
{
202
	int i;
203
	u8 *raw = (u8 *)edid;
204
 
205
	if (!edid)
206
		return false;
207
 
208
	for (i = 0; i <= edid->extensions; i++)
209
		if (!drm_edid_block_valid(raw + i * EDID_LENGTH))
210
			return false;
211
 
212
	return true;
213
}
1430 serge 214
EXPORT_SYMBOL(drm_edid_is_valid);
1123 serge 215
 
1963 serge 216
#define DDC_ADDR 0x50
217
#define DDC_SEGMENT_ADDR 0x30
1123 serge 218
/**
1963 serge 219
 * Get EDID information via I2C.
220
 *
221
 * \param adapter : i2c device adaptor
222
 * \param buf     : EDID data buffer to be filled
223
 * \param len     : EDID data buffer length
224
 * \return 0 on success or -1 on failure.
225
 *
226
 * Try to fetch EDID information by calling i2c driver function.
227
 */
228
static int
229
drm_do_probe_ddc_edid(struct i2c_adapter *adapter, unsigned char *buf,
230
		      int block, int len)
231
{
232
	unsigned char start = block * EDID_LENGTH;
233
	int ret, retries = 5;
234
 
235
	/* The core i2c driver will automatically retry the transfer if the
236
	 * adapter reports EAGAIN. However, we find that bit-banging transfers
237
	 * are susceptible to errors under a heavily loaded machine and
238
	 * generate spurious NAKs and timeouts. Retrying the transfer
239
	 * of the individual block a few times seems to overcome this.
240
	 */
241
	do {
242
	struct i2c_msg msgs[] = {
243
		{
244
			.addr	= DDC_ADDR,
245
			.flags	= 0,
246
			.len	= 1,
247
			.buf	= &start,
248
		}, {
249
			.addr	= DDC_ADDR,
250
			.flags	= I2C_M_RD,
251
			.len	= len,
252
			.buf	= buf,
253
		}
254
	};
255
		ret = i2c_transfer(adapter, msgs, 2);
256
	} while (ret != 2 && --retries);
257
 
258
	return ret == 2 ? 0 : -1;
259
}
260
 
2004 serge 261
static bool drm_edid_is_zero(u8 *in_edid, int length)
262
{
263
	int i;
264
	u32 *raw_edid = (u32 *)in_edid;
265
 
266
	for (i = 0; i < length / 4; i++)
267
		if (*(raw_edid + i) != 0)
268
			return false;
269
	return true;
270
}
271
 
1963 serge 272
static u8 *
273
drm_do_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
274
{
275
	int i, j = 0, valid_extensions = 0;
276
	u8 *block, *new;
277
    size_t alloc_size;
278
 
279
	if ((block = kmalloc(EDID_LENGTH, GFP_KERNEL)) == NULL)
280
		return NULL;
281
 
282
	/* base block fetch */
283
	for (i = 0; i < 4; i++) {
284
		if (drm_do_probe_ddc_edid(adapter, block, 0, EDID_LENGTH))
285
			goto out;
286
		if (drm_edid_block_valid(block))
287
			break;
2004 serge 288
		if (i == 0 && drm_edid_is_zero(block, EDID_LENGTH)) {
289
			connector->null_edid_counter++;
290
			goto carp;
291
		}
1963 serge 292
	}
293
	if (i == 4)
294
		goto carp;
295
 
296
	/* if there's no extensions, we're done */
297
	if (block[0x7e] == 0)
298
		return block;
299
 
300
    alloc_size = (block[0x7e] + 1) * EDID_LENGTH ;
301
 
302
    new = kmalloc(alloc_size, GFP_KERNEL);
303
 
304
	if (!new)
305
		goto out;
306
 
307
    memcpy(new, block, EDID_LENGTH);
308
    kfree(block);
309
 
310
	block = new;
311
 
312
	for (j = 1; j <= block[0x7e]; j++) {
313
		for (i = 0; i < 4; i++) {
314
			if (drm_do_probe_ddc_edid(adapter,
315
				  block + (valid_extensions + 1) * EDID_LENGTH,
316
				  j, EDID_LENGTH))
317
				goto out;
318
			if (drm_edid_block_valid(block + (valid_extensions + 1) * EDID_LENGTH)) {
319
				valid_extensions++;
320
				break;
321
		}
322
		}
323
		if (i == 4)
324
			dev_warn(connector->dev->dev,
325
			 "%s: Ignoring invalid EDID block %d.\n",
326
			 drm_get_connector_name(connector), j);
327
	}
328
 
329
	if (valid_extensions != block[0x7e]) {
330
		block[EDID_LENGTH-1] += block[0x7e] - valid_extensions;
331
		block[0x7e] = valid_extensions;
332
        new = kmalloc((valid_extensions + 1) * EDID_LENGTH, GFP_KERNEL);
333
        if (!new)
334
			goto out;
335
        memcpy(new, block, alloc_size);
336
        kfree(block);
337
		block = new;
338
	}
339
 
340
	return block;
341
 
342
carp:
343
	dev_warn(connector->dev->dev, "%s: EDID block %d invalid.\n",
344
		 drm_get_connector_name(connector), j);
345
 
346
out:
347
	kfree(block);
348
	return NULL;
349
}
350
 
351
/**
352
 * Probe DDC presence.
353
 *
354
 * \param adapter : i2c device adaptor
355
 * \return 1 on success
356
 */
357
static bool
358
drm_probe_ddc(struct i2c_adapter *adapter)
359
{
360
	unsigned char out;
361
 
362
	return (drm_do_probe_ddc_edid(adapter, &out, 0, 1) == 0);
363
}
364
 
365
/**
366
 * drm_get_edid - get EDID data, if available
367
 * @connector: connector we're probing
368
 * @adapter: i2c adapter to use for DDC
369
 *
370
 * Poke the given i2c channel to grab EDID data if possible.  If found,
371
 * attach it to the connector.
372
 *
373
 * Return edid data or NULL if we couldn't find any.
374
 */
375
struct edid *drm_get_edid(struct drm_connector *connector,
376
			  struct i2c_adapter *adapter)
377
{
378
	struct edid *edid = NULL;
379
 
380
	if (drm_probe_ddc(adapter))
381
		edid = (struct edid *)drm_do_get_edid(connector, adapter);
382
 
383
	connector->display_info.raw_edid = (char *)edid;
384
 
385
	return edid;
386
 
387
}
388
EXPORT_SYMBOL(drm_get_edid);
389
 
390
/*** EDID parsing ***/
391
 
392
/**
1123 serge 393
 * edid_vendor - match a string against EDID's obfuscated vendor field
394
 * @edid: EDID to match
395
 * @vendor: vendor string
396
 *
397
 * Returns true if @vendor is in @edid, false otherwise
398
 */
399
static bool edid_vendor(struct edid *edid, char *vendor)
400
{
401
	char edid_vendor[3];
402
 
403
	edid_vendor[0] = ((edid->mfg_id[0] & 0x7c) >> 2) + '@';
404
	edid_vendor[1] = (((edid->mfg_id[0] & 0x3) << 3) |
405
			  ((edid->mfg_id[1] & 0xe0) >> 5)) + '@';
406
	edid_vendor[2] = (edid->mfg_id[1] & 0x1f) + '@';
407
 
408
	return !strncmp(edid_vendor, vendor, 3);
409
}
410
 
411
/**
412
 * edid_get_quirks - return quirk flags for a given EDID
413
 * @edid: EDID to process
414
 *
415
 * This tells subsequent routines what fixes they need to apply.
416
 */
417
static u32 edid_get_quirks(struct edid *edid)
418
{
419
	struct edid_quirk *quirk;
420
	int i;
421
 
422
	for (i = 0; i < ARRAY_SIZE(edid_quirk_list); i++) {
423
		quirk = &edid_quirk_list[i];
424
 
425
		if (edid_vendor(edid, quirk->vendor) &&
426
		    (EDID_PRODUCT_ID(edid) == quirk->product_id))
427
			return quirk->quirks;
428
	}
429
 
430
	return 0;
431
}
432
 
433
#define MODE_SIZE(m) ((m)->hdisplay * (m)->vdisplay)
434
#define MODE_REFRESH_DIFF(m,r) (abs((m)->vrefresh - target_refresh))
435
 
436
/**
437
 * edid_fixup_preferred - set preferred modes based on quirk list
438
 * @connector: has mode list to fix up
439
 * @quirks: quirks list
440
 *
441
 * Walk the mode list for @connector, clearing the preferred status
442
 * on existing modes and setting it anew for the right mode ala @quirks.
443
 */
444
static void edid_fixup_preferred(struct drm_connector *connector,
445
				 u32 quirks)
446
{
447
	struct drm_display_mode *t, *cur_mode, *preferred_mode;
448
	int target_refresh = 0;
449
 
450
	if (list_empty(&connector->probed_modes))
451
		return;
452
 
453
	if (quirks & EDID_QUIRK_PREFER_LARGE_60)
454
		target_refresh = 60;
455
	if (quirks & EDID_QUIRK_PREFER_LARGE_75)
456
		target_refresh = 75;
457
 
458
	preferred_mode = list_first_entry(&connector->probed_modes,
459
					  struct drm_display_mode, head);
460
 
461
	list_for_each_entry_safe(cur_mode, t, &connector->probed_modes, head) {
462
		cur_mode->type &= ~DRM_MODE_TYPE_PREFERRED;
463
 
464
		if (cur_mode == preferred_mode)
465
			continue;
466
 
467
		/* Largest mode is preferred */
468
		if (MODE_SIZE(cur_mode) > MODE_SIZE(preferred_mode))
469
			preferred_mode = cur_mode;
470
 
471
		/* At a given size, try to get closest to target refresh */
472
		if ((MODE_SIZE(cur_mode) == MODE_SIZE(preferred_mode)) &&
473
		    MODE_REFRESH_DIFF(cur_mode, target_refresh) <
474
		    MODE_REFRESH_DIFF(preferred_mode, target_refresh)) {
475
			preferred_mode = cur_mode;
476
		}
477
	}
478
 
479
	preferred_mode->type |= DRM_MODE_TYPE_PREFERRED;
480
}
481
 
1963 serge 482
struct drm_display_mode *drm_mode_find_dmt(struct drm_device *dev,
1179 serge 483
			int hsize, int vsize, int fresh)
484
{
1963 serge 485
	struct drm_display_mode *mode = NULL;
1321 serge 486
	int i;
1179 serge 487
 
1321 serge 488
	for (i = 0; i < drm_num_dmt_modes; i++) {
1963 serge 489
		const struct drm_display_mode *ptr = &drm_dmt_modes[i];
1179 serge 490
		if (hsize == ptr->hdisplay &&
491
			vsize == ptr->vdisplay &&
492
			fresh == drm_mode_vrefresh(ptr)) {
493
			/* get the expected default mode */
494
			mode = drm_mode_duplicate(dev, ptr);
495
			break;
496
		}
497
	}
498
	return mode;
499
}
1963 serge 500
EXPORT_SYMBOL(drm_mode_find_dmt);
1221 serge 501
 
1963 serge 502
typedef void detailed_cb(struct detailed_timing *timing, void *closure);
503
 
504
static void
505
cea_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
506
{
507
	int i, n = 0;
508
	u8 rev = ext[0x01], d = ext[0x02];
509
	u8 *det_base = ext + d;
510
 
511
	switch (rev) {
512
	case 0:
513
		/* can't happen */
514
		return;
515
	case 1:
516
		/* have to infer how many blocks we have, check pixel clock */
517
		for (i = 0; i < 6; i++)
518
			if (det_base[18*i] || det_base[18*i+1])
519
				n++;
520
		break;
521
	default:
522
		/* explicit count */
523
		n = min(ext[0x03] & 0x0f, 6);
524
		break;
525
	}
526
 
527
	for (i = 0; i < n; i++)
528
		cb((struct detailed_timing *)(det_base + 18 * i), closure);
529
}
530
 
531
static void
532
vtb_for_each_detailed_block(u8 *ext, detailed_cb *cb, void *closure)
533
{
534
	unsigned int i, n = min((int)ext[0x02], 6);
535
	u8 *det_base = ext + 5;
536
 
537
	if (ext[0x01] != 1)
538
		return; /* unknown version */
539
 
540
	for (i = 0; i < n; i++)
541
		cb((struct detailed_timing *)(det_base + 18 * i), closure);
542
}
543
 
544
static void
545
drm_for_each_detailed_block(u8 *raw_edid, detailed_cb *cb, void *closure)
546
{
547
	int i;
548
	struct edid *edid = (struct edid *)raw_edid;
549
 
550
	if (edid == NULL)
551
		return;
552
 
553
	for (i = 0; i < EDID_DETAILED_TIMINGS; i++)
554
		cb(&(edid->detailed_timings[i]), closure);
555
 
556
	for (i = 1; i <= raw_edid[0x7e]; i++) {
557
		u8 *ext = raw_edid + (i * EDID_LENGTH);
558
		switch (*ext) {
559
		case CEA_EXT:
560
			cea_for_each_detailed_block(ext, cb, closure);
561
			break;
562
		case VTB_EXT:
563
			vtb_for_each_detailed_block(ext, cb, closure);
564
			break;
565
		default:
566
			break;
567
		}
568
	}
569
}
570
 
571
static void
572
is_rb(struct detailed_timing *t, void *data)
573
{
574
	u8 *r = (u8 *)t;
575
	if (r[3] == EDID_DETAIL_MONITOR_RANGE)
576
		if (r[15] & 0x10)
577
			*(bool *)data = true;
578
}
579
 
580
/* EDID 1.4 defines this explicitly.  For EDID 1.3, we guess, badly. */
581
static bool
582
drm_monitor_supports_rb(struct edid *edid)
583
{
584
	if (edid->revision >= 4) {
585
		bool ret;
586
		drm_for_each_detailed_block((u8 *)edid, is_rb, &ret);
587
		return ret;
588
	}
589
 
590
	return ((edid->input & DRM_EDID_INPUT_DIGITAL) != 0);
591
}
592
 
593
static void
594
find_gtf2(struct detailed_timing *t, void *data)
595
{
596
	u8 *r = (u8 *)t;
597
	if (r[3] == EDID_DETAIL_MONITOR_RANGE && r[10] == 0x02)
598
		*(u8 **)data = r;
599
}
600
 
601
/* Secondary GTF curve kicks in above some break frequency */
602
static int
603
drm_gtf2_hbreak(struct edid *edid)
604
{
605
	u8 *r = NULL;
606
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
607
	return r ? (r[12] * 2) : 0;
608
}
609
 
610
static int
611
drm_gtf2_2c(struct edid *edid)
612
{
613
	u8 *r = NULL;
614
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
615
	return r ? r[13] : 0;
616
}
617
 
618
static int
619
drm_gtf2_m(struct edid *edid)
620
{
621
	u8 *r = NULL;
622
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
623
	return r ? (r[15] << 8) + r[14] : 0;
624
}
625
 
626
static int
627
drm_gtf2_k(struct edid *edid)
628
{
629
	u8 *r = NULL;
630
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
631
	return r ? r[16] : 0;
632
}
633
 
634
static int
635
drm_gtf2_2j(struct edid *edid)
636
{
637
	u8 *r = NULL;
638
	drm_for_each_detailed_block((u8 *)edid, find_gtf2, &r);
639
	return r ? r[17] : 0;
640
}
641
 
642
/**
643
 * standard_timing_level - get std. timing level(CVT/GTF/DMT)
644
 * @edid: EDID block to scan
645
 */
646
static int standard_timing_level(struct edid *edid)
647
{
648
	if (edid->revision >= 2) {
649
		if (edid->revision >= 4 && (edid->features & DRM_EDID_FEATURE_DEFAULT_GTF))
650
			return LEVEL_CVT;
651
		if (drm_gtf2_hbreak(edid))
652
			return LEVEL_GTF2;
653
		return LEVEL_GTF;
654
	}
655
	return LEVEL_DMT;
656
}
657
 
1221 serge 658
/*
659
 * 0 is reserved.  The spec says 0x01 fill for unused timings.  Some old
660
 * monitors fill with ascii space (0x20) instead.
661
 */
662
static int
663
bad_std_timing(u8 a, u8 b)
664
{
665
	return (a == 0x00 && b == 0x00) ||
666
	       (a == 0x01 && b == 0x01) ||
667
	       (a == 0x20 && b == 0x20);
668
}
669
 
1123 serge 670
/**
671
 * drm_mode_std - convert standard mode info (width, height, refresh) into mode
672
 * @t: standard timing params
1221 serge 673
 * @timing_level: standard timing level
1123 serge 674
 *
675
 * Take the standard timing params (in this case width, aspect, and refresh)
1221 serge 676
 * and convert them into a real mode using CVT/GTF/DMT.
1123 serge 677
 */
1963 serge 678
static struct drm_display_mode *
679
drm_mode_std(struct drm_connector *connector, struct edid *edid,
680
	     struct std_timing *t, int revision)
1123 serge 681
{
1963 serge 682
	struct drm_device *dev = connector->dev;
683
	struct drm_display_mode *m, *mode = NULL;
1179 serge 684
	int hsize, vsize;
685
	int vrefresh_rate;
1123 serge 686
	unsigned aspect_ratio = (t->vfreq_aspect & EDID_TIMING_ASPECT_MASK)
687
		>> EDID_TIMING_ASPECT_SHIFT;
1179 serge 688
	unsigned vfreq = (t->vfreq_aspect & EDID_TIMING_VFREQ_MASK)
689
		>> EDID_TIMING_VFREQ_SHIFT;
1963 serge 690
	int timing_level = standard_timing_level(edid);
1123 serge 691
 
1221 serge 692
	if (bad_std_timing(t->hsize, t->vfreq_aspect))
693
		return NULL;
694
 
1179 serge 695
	/* According to the EDID spec, the hdisplay = hsize * 8 + 248 */
696
	hsize = t->hsize * 8 + 248;
697
	/* vrefresh_rate = vfreq + 60 */
698
	vrefresh_rate = vfreq + 60;
699
	/* the vdisplay is calculated based on the aspect ratio */
1221 serge 700
	if (aspect_ratio == 0) {
701
		if (revision < 3)
702
			vsize = hsize;
703
		else
1123 serge 704
		vsize = (hsize * 10) / 16;
1221 serge 705
	} else if (aspect_ratio == 1)
1123 serge 706
		vsize = (hsize * 3) / 4;
707
	else if (aspect_ratio == 2)
708
		vsize = (hsize * 4) / 5;
709
	else
710
		vsize = (hsize * 9) / 16;
1963 serge 711
 
712
	/* HDTV hack, part 1 */
713
	if (vrefresh_rate == 60 &&
714
	    ((hsize == 1360 && vsize == 765) ||
715
	     (hsize == 1368 && vsize == 769))) {
716
		hsize = 1366;
717
		vsize = 768;
718
	}
719
 
720
	/*
721
	 * If this connector already has a mode for this size and refresh
722
	 * rate (because it came from detailed or CVT info), use that
723
	 * instead.  This way we don't have to guess at interlace or
724
	 * reduced blanking.
725
	 */
726
	list_for_each_entry(m, &connector->probed_modes, head)
727
		if (m->hdisplay == hsize && m->vdisplay == vsize &&
728
		    drm_mode_vrefresh(m) == vrefresh_rate)
729
			return NULL;
730
 
731
	/* HDTV hack, part 2 */
732
	if (hsize == 1366 && vsize == 768 && vrefresh_rate == 60) {
733
		mode = drm_cvt_mode(dev, 1366, 768, vrefresh_rate, 0, 0,
1221 serge 734
				    false);
1179 serge 735
		mode->hdisplay = 1366;
1963 serge 736
		mode->hsync_start = mode->hsync_start - 1;
737
		mode->hsync_end = mode->hsync_end - 1;
1179 serge 738
		return mode;
739
	}
1963 serge 740
 
1179 serge 741
	/* check whether it can be found in default mode table */
1963 serge 742
	mode = drm_mode_find_dmt(dev, hsize, vsize, vrefresh_rate);
1179 serge 743
	if (mode)
744
		return mode;
1123 serge 745
 
1179 serge 746
	switch (timing_level) {
747
	case LEVEL_DMT:
748
		break;
749
	case LEVEL_GTF:
750
		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
751
		break;
1963 serge 752
	case LEVEL_GTF2:
753
		/*
754
		 * This is potentially wrong if there's ever a monitor with
755
		 * more than one ranges section, each claiming a different
756
		 * secondary GTF curve.  Please don't do that.
757
		 */
758
		mode = drm_gtf_mode(dev, hsize, vsize, vrefresh_rate, 0, 0);
759
		if (drm_mode_hsync(mode) > drm_gtf2_hbreak(edid)) {
760
			kfree(mode);
761
			mode = drm_gtf_mode_complex(dev, hsize, vsize,
762
						    vrefresh_rate, 0, 0,
763
						    drm_gtf2_m(edid),
764
						    drm_gtf2_2c(edid),
765
						    drm_gtf2_k(edid),
766
						    drm_gtf2_2j(edid));
767
		}
768
		break;
1179 serge 769
	case LEVEL_CVT:
1221 serge 770
		mode = drm_cvt_mode(dev, hsize, vsize, vrefresh_rate, 0, 0,
771
				    false);
1179 serge 772
		break;
773
	}
1123 serge 774
	return mode;
775
}
776
 
1428 serge 777
/*
778
 * EDID is delightfully ambiguous about how interlaced modes are to be
779
 * encoded.  Our internal representation is of frame height, but some
780
 * HDTV detailed timings are encoded as field height.
781
 *
782
 * The format list here is from CEA, in frame size.  Technically we
783
 * should be checking refresh rate too.  Whatever.
784
 */
785
static void
786
drm_mode_do_interlace_quirk(struct drm_display_mode *mode,
787
			    struct detailed_pixel_timing *pt)
788
{
789
	int i;
790
	static const struct {
791
		int w, h;
792
	} cea_interlaced[] = {
793
		{ 1920, 1080 },
794
		{  720,  480 },
795
		{ 1440,  480 },
796
		{ 2880,  480 },
797
		{  720,  576 },
798
		{ 1440,  576 },
799
		{ 2880,  576 },
800
	};
801
 
802
	if (!(pt->misc & DRM_EDID_PT_INTERLACED))
803
		return;
804
 
1963 serge 805
	for (i = 0; i < ARRAY_SIZE(cea_interlaced); i++) {
1428 serge 806
		if ((mode->hdisplay == cea_interlaced[i].w) &&
807
		    (mode->vdisplay == cea_interlaced[i].h / 2)) {
808
			mode->vdisplay *= 2;
809
			mode->vsync_start *= 2;
810
			mode->vsync_end *= 2;
811
			mode->vtotal *= 2;
812
			mode->vtotal |= 1;
813
		}
814
	}
815
 
816
	mode->flags |= DRM_MODE_FLAG_INTERLACE;
817
}
818
 
1123 serge 819
/**
820
 * drm_mode_detailed - create a new mode from an EDID detailed timing section
821
 * @dev: DRM device (needed to create new mode)
822
 * @edid: EDID block
823
 * @timing: EDID detailed timing info
824
 * @quirks: quirks to apply
825
 *
826
 * An EDID detailed timing block contains enough info for us to create and
827
 * return a new struct drm_display_mode.
828
 */
829
static struct drm_display_mode *drm_mode_detailed(struct drm_device *dev,
830
						  struct edid *edid,
831
						  struct detailed_timing *timing,
832
						  u32 quirks)
833
{
834
	struct drm_display_mode *mode;
835
	struct detailed_pixel_timing *pt = &timing->data.pixel_data;
836
	unsigned hactive = (pt->hactive_hblank_hi & 0xf0) << 4 | pt->hactive_lo;
837
	unsigned vactive = (pt->vactive_vblank_hi & 0xf0) << 4 | pt->vactive_lo;
838
	unsigned hblank = (pt->hactive_hblank_hi & 0xf) << 8 | pt->hblank_lo;
839
	unsigned vblank = (pt->vactive_vblank_hi & 0xf) << 8 | pt->vblank_lo;
840
	unsigned hsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc0) << 2 | pt->hsync_offset_lo;
841
	unsigned hsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x30) << 4 | pt->hsync_pulse_width_lo;
842
	unsigned vsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc) >> 2 | pt->vsync_offset_pulse_width_lo >> 4;
843
	unsigned vsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x3) << 4 | (pt->vsync_offset_pulse_width_lo & 0xf);
844
 
845
	/* ignore tiny modes */
846
	if (hactive < 64 || vactive < 64)
847
		return NULL;
848
 
849
	if (pt->misc & DRM_EDID_PT_STEREO) {
850
		printk(KERN_WARNING "stereo mode not supported\n");
851
		return NULL;
852
	}
853
	if (!(pt->misc & DRM_EDID_PT_SEPARATE_SYNC)) {
1404 serge 854
		printk(KERN_WARNING "composite sync not supported\n");
1123 serge 855
	}
856
 
1246 serge 857
	/* it is incorrect if hsync/vsync width is zero */
858
	if (!hsync_pulse_width || !vsync_pulse_width) {
859
		DRM_DEBUG_KMS("Incorrect Detailed timing. "
860
				"Wrong Hsync/Vsync pulse width\n");
861
		return NULL;
862
	}
1123 serge 863
	mode = drm_mode_create(dev);
864
	if (!mode)
865
		return NULL;
866
 
867
	mode->type = DRM_MODE_TYPE_DRIVER;
868
 
869
	if (quirks & EDID_QUIRK_135_CLOCK_TOO_HIGH)
870
		timing->pixel_clock = cpu_to_le16(1088);
871
 
872
	mode->clock = le16_to_cpu(timing->pixel_clock) * 10;
873
 
874
	mode->hdisplay = hactive;
875
	mode->hsync_start = mode->hdisplay + hsync_offset;
876
	mode->hsync_end = mode->hsync_start + hsync_pulse_width;
877
	mode->htotal = mode->hdisplay + hblank;
878
 
879
	mode->vdisplay = vactive;
880
	mode->vsync_start = mode->vdisplay + vsync_offset;
881
	mode->vsync_end = mode->vsync_start + vsync_pulse_width;
882
	mode->vtotal = mode->vdisplay + vblank;
883
 
1313 serge 884
	/* Some EDIDs have bogus h/vtotal values */
885
	if (mode->hsync_end > mode->htotal)
886
		mode->htotal = mode->hsync_end + 1;
887
	if (mode->vsync_end > mode->vtotal)
888
		mode->vtotal = mode->vsync_end + 1;
889
 
1963 serge 890
	drm_mode_do_interlace_quirk(mode, pt);
891
 
1123 serge 892
	drm_mode_set_name(mode);
893
 
894
	if (quirks & EDID_QUIRK_DETAILED_SYNC_PP) {
895
		pt->misc |= DRM_EDID_PT_HSYNC_POSITIVE | DRM_EDID_PT_VSYNC_POSITIVE;
896
	}
897
 
898
	mode->flags |= (pt->misc & DRM_EDID_PT_HSYNC_POSITIVE) ?
899
		DRM_MODE_FLAG_PHSYNC : DRM_MODE_FLAG_NHSYNC;
900
	mode->flags |= (pt->misc & DRM_EDID_PT_VSYNC_POSITIVE) ?
901
		DRM_MODE_FLAG_PVSYNC : DRM_MODE_FLAG_NVSYNC;
902
 
903
	mode->width_mm = pt->width_mm_lo | (pt->width_height_mm_hi & 0xf0) << 4;
904
	mode->height_mm = pt->height_mm_lo | (pt->width_height_mm_hi & 0xf) << 8;
905
 
906
	if (quirks & EDID_QUIRK_DETAILED_IN_CM) {
907
		mode->width_mm *= 10;
908
		mode->height_mm *= 10;
909
	}
910
 
911
	if (quirks & EDID_QUIRK_DETAILED_USE_MAXIMUM_SIZE) {
912
		mode->width_mm = edid->width_cm * 10;
913
		mode->height_mm = edid->height_cm * 10;
914
	}
915
 
916
	return mode;
917
}
918
 
1963 serge 919
static bool
920
mode_is_rb(const struct drm_display_mode *mode)
921
{
922
	return (mode->htotal - mode->hdisplay == 160) &&
923
	       (mode->hsync_end - mode->hdisplay == 80) &&
924
	       (mode->hsync_end - mode->hsync_start == 32) &&
925
	       (mode->vsync_start - mode->vdisplay == 3);
926
}
927
 
928
static bool
929
mode_in_hsync_range(const struct drm_display_mode *mode,
930
		    struct edid *edid, u8 *t)
931
{
932
	int hsync, hmin, hmax;
933
 
934
	hmin = t[7];
935
	if (edid->revision >= 4)
936
	    hmin += ((t[4] & 0x04) ? 255 : 0);
937
	hmax = t[8];
938
	if (edid->revision >= 4)
939
	    hmax += ((t[4] & 0x08) ? 255 : 0);
940
	hsync = drm_mode_hsync(mode);
941
 
942
	return (hsync <= hmax && hsync >= hmin);
943
}
944
 
945
static bool
946
mode_in_vsync_range(const struct drm_display_mode *mode,
947
		    struct edid *edid, u8 *t)
948
{
949
	int vsync, vmin, vmax;
950
 
951
	vmin = t[5];
952
	if (edid->revision >= 4)
953
	    vmin += ((t[4] & 0x01) ? 255 : 0);
954
	vmax = t[6];
955
	if (edid->revision >= 4)
956
	    vmax += ((t[4] & 0x02) ? 255 : 0);
957
	vsync = drm_mode_vrefresh(mode);
958
 
959
	return (vsync <= vmax && vsync >= vmin);
960
}
961
 
962
static u32
963
range_pixel_clock(struct edid *edid, u8 *t)
964
{
965
	/* unspecified */
966
	if (t[9] == 0 || t[9] == 255)
967
		return 0;
968
 
969
	/* 1.4 with CVT support gives us real precision, yay */
970
	if (edid->revision >= 4 && t[10] == 0x04)
971
		return (t[9] * 10000) - ((t[12] >> 2) * 250);
972
 
973
	/* 1.3 is pathetic, so fuzz up a bit */
974
	return t[9] * 10000 + 5001;
975
}
976
 
977
static bool
978
mode_in_range(const struct drm_display_mode *mode, struct edid *edid,
979
	      struct detailed_timing *timing)
980
{
981
	u32 max_clock;
982
	u8 *t = (u8 *)timing;
983
 
984
	if (!mode_in_hsync_range(mode, edid, t))
985
		return false;
986
 
987
	if (!mode_in_vsync_range(mode, edid, t))
988
		return false;
989
 
990
	if ((max_clock = range_pixel_clock(edid, t)))
991
		if (mode->clock > max_clock)
992
			return false;
993
 
994
	/* 1.4 max horizontal check */
995
	if (edid->revision >= 4 && t[10] == 0x04)
996
		if (t[13] && mode->hdisplay > 8 * (t[13] + (256 * (t[12]&0x3))))
997
			return false;
998
 
999
	if (mode_is_rb(mode) && !drm_monitor_supports_rb(edid))
1000
		return false;
1001
 
1002
	return true;
1003
}
1004
 
1123 serge 1005
/*
1963 serge 1006
 * XXX If drm_dmt_modes ever regrows the CVT-R modes (and it will) this will
1007
 * need to account for them.
1123 serge 1008
 */
1963 serge 1009
static int
1010
drm_gtf_modes_for_range(struct drm_connector *connector, struct edid *edid,
1011
				   struct detailed_timing *timing)
1012
{
1013
	int i, modes = 0;
1014
	struct drm_display_mode *newmode;
1015
	struct drm_device *dev = connector->dev;
1123 serge 1016
 
1963 serge 1017
	for (i = 0; i < drm_num_dmt_modes; i++) {
1018
		if (mode_in_range(drm_dmt_modes + i, edid, timing)) {
1019
			newmode = drm_mode_duplicate(dev, &drm_dmt_modes[i]);
1020
			if (newmode) {
1021
				drm_mode_probed_add(connector, newmode);
1022
				modes++;
1023
			}
1024
		}
1025
	}
1123 serge 1026
 
1963 serge 1027
	return modes;
1028
}
1029
 
1030
static void
1031
do_inferred_modes(struct detailed_timing *timing, void *c)
1032
{
1033
	struct detailed_mode_closure *closure = c;
1034
	struct detailed_non_pixel *data = &timing->data.other_data;
1035
	int gtf = (closure->edid->features & DRM_EDID_FEATURE_DEFAULT_GTF);
1036
 
1037
	if (gtf && data->type == EDID_DETAIL_MONITOR_RANGE)
1038
		closure->modes += drm_gtf_modes_for_range(closure->connector,
1039
							  closure->edid,
1040
							  timing);
1041
}
1042
 
1043
static int
1044
add_inferred_modes(struct drm_connector *connector, struct edid *edid)
1045
{
1046
	struct detailed_mode_closure closure = {
1047
		connector, edid, 0, 0, 0
1048
	};
1049
 
1050
	if (version_greater(edid, 1, 0))
1051
		drm_for_each_detailed_block((u8 *)edid, do_inferred_modes,
1052
					    &closure);
1053
 
1054
	return closure.modes;
1055
}
1056
 
1057
static int
1058
drm_est3_modes(struct drm_connector *connector, struct detailed_timing *timing)
1059
{
1060
	int i, j, m, modes = 0;
1061
	struct drm_display_mode *mode;
1062
	u8 *est = ((u8 *)timing) + 5;
1063
 
1064
	for (i = 0; i < 6; i++) {
1065
		for (j = 7; j > 0; j--) {
1066
			m = (i * 8) + (7 - j);
1067
			if (m >= ARRAY_SIZE(est3_modes))
1068
				break;
1069
			if (est[i] & (1 << j)) {
1070
				mode = drm_mode_find_dmt(connector->dev,
1071
							 est3_modes[m].w,
1072
							 est3_modes[m].h,
1073
							 est3_modes[m].r
1074
							 /*, est3_modes[m].rb */);
1075
				if (mode) {
1076
					drm_mode_probed_add(connector, mode);
1077
					modes++;
1078
				}
1079
			}
1080
		}
1081
	}
1082
 
1083
	return modes;
1084
}
1085
 
1086
static void
1087
do_established_modes(struct detailed_timing *timing, void *c)
1088
{
1089
	struct detailed_mode_closure *closure = c;
1090
		struct detailed_non_pixel *data = &timing->data.other_data;
1091
 
1092
	if (data->type == EDID_DETAIL_EST_TIMINGS)
1093
		closure->modes += drm_est3_modes(closure->connector, timing);
1094
}
1095
 
1123 serge 1096
/**
1097
 * add_established_modes - get est. modes from EDID and add them
1098
 * @edid: EDID block to scan
1099
 *
1100
 * Each EDID block contains a bitmap of the supported "established modes" list
1101
 * (defined above).  Tease them out and add them to the global modes list.
1102
 */
1963 serge 1103
static int
1104
add_established_modes(struct drm_connector *connector, struct edid *edid)
1123 serge 1105
{
1106
	struct drm_device *dev = connector->dev;
1107
	unsigned long est_bits = edid->established_timings.t1 |
1108
		(edid->established_timings.t2 << 8) |
1109
		((edid->established_timings.mfg_rsvd & 0x80) << 9);
1110
	int i, modes = 0;
1963 serge 1111
	struct detailed_mode_closure closure = {
1112
		connector, edid, 0, 0, 0
1113
	};
1123 serge 1114
 
1963 serge 1115
	for (i = 0; i <= EDID_EST_TIMINGS; i++) {
1123 serge 1116
		if (est_bits & (1<
1117
			struct drm_display_mode *newmode;
1118
			newmode = drm_mode_duplicate(dev, &edid_est_modes[i]);
1119
			if (newmode) {
1963 serge 1120
		drm_mode_probed_add(connector, newmode);
1123 serge 1121
				modes++;
1122
			}
1123
		}
1963 serge 1124
	}
1123 serge 1125
 
1963 serge 1126
	if (version_greater(edid, 1, 0))
1127
		    drm_for_each_detailed_block((u8 *)edid,
1128
						do_established_modes, &closure);
1129
 
1130
	return modes + closure.modes;
1123 serge 1131
}
1963 serge 1132
 
1133
static void
1134
do_standard_modes(struct detailed_timing *timing, void *c)
1179 serge 1135
{
1963 serge 1136
	struct detailed_mode_closure *closure = c;
1137
	struct detailed_non_pixel *data = &timing->data.other_data;
1138
	struct drm_connector *connector = closure->connector;
1139
	struct edid *edid = closure->edid;
1140
 
1141
	if (data->type == EDID_DETAIL_STD_MODES) {
1142
		int i;
1143
		for (i = 0; i < 6; i++) {
1144
				struct std_timing *std;
1145
				struct drm_display_mode *newmode;
1146
 
1147
			std = &data->data.timings[i];
1148
			newmode = drm_mode_std(connector, edid, std,
1149
					       edid->revision);
1150
				if (newmode) {
1151
					drm_mode_probed_add(connector, newmode);
1152
				closure->modes++;
1153
				}
1154
			}
1155
		}
1179 serge 1156
}
1123 serge 1157
 
1158
/**
1159
 * add_standard_modes - get std. modes from EDID and add them
1160
 * @edid: EDID block to scan
1161
 *
1963 serge 1162
 * Standard modes can be calculated using the appropriate standard (DMT,
1163
 * GTF or CVT. Grab them from @edid and add them to the list.
1123 serge 1164
 */
1963 serge 1165
static int
1166
add_standard_modes(struct drm_connector *connector, struct edid *edid)
1123 serge 1167
{
1168
	int i, modes = 0;
1963 serge 1169
	struct detailed_mode_closure closure = {
1170
		connector, edid, 0, 0, 0
1171
	};
1123 serge 1172
 
1173
	for (i = 0; i < EDID_STD_TIMINGS; i++) {
1174
		struct drm_display_mode *newmode;
1175
 
1963 serge 1176
		newmode = drm_mode_std(connector, edid,
1177
				       &edid->standard_timings[i],
1178
				       edid->revision);
1123 serge 1179
		if (newmode) {
1180
			drm_mode_probed_add(connector, newmode);
1181
			modes++;
1182
		}
1183
	}
1184
 
1963 serge 1185
	if (version_greater(edid, 1, 0))
1186
		drm_for_each_detailed_block((u8 *)edid, do_standard_modes,
1187
					    &closure);
1123 serge 1188
 
1963 serge 1189
	/* XXX should also look for standard codes in VTB blocks */
1321 serge 1190
 
1963 serge 1191
	return modes + closure.modes;
1321 serge 1192
}
1193
 
1194
static int drm_cvt_modes(struct drm_connector *connector,
1195
			 struct detailed_timing *timing)
1196
{
1123 serge 1197
	int i, j, modes = 0;
1321 serge 1198
	struct drm_display_mode *newmode;
1199
	struct drm_device *dev = connector->dev;
1200
	struct cvt_timing *cvt;
1201
	const int rates[] = { 60, 85, 75, 60, 50 };
1404 serge 1202
	const u8 empty[3] = { 0, 0, 0 };
1123 serge 1203
 
1321 serge 1204
	for (i = 0; i < 4; i++) {
1404 serge 1205
		int uninitialized_var(width), height;
1321 serge 1206
		cvt = &(timing->data.other_data.data.cvt[i]);
1179 serge 1207
 
1404 serge 1208
		if (!memcmp(cvt->code, empty, 3))
1963 serge 1209
				continue;
1404 serge 1210
 
1211
		height = (cvt->code[0] + ((cvt->code[1] & 0xf0) << 4) + 1) * 2;
1212
		switch (cvt->code[1] & 0x0c) {
1321 serge 1213
		case 0x00:
1214
			width = height * 4 / 3;
1215
			break;
1404 serge 1216
		case 0x04:
1321 serge 1217
			width = height * 16 / 9;
1218
			break;
1404 serge 1219
		case 0x08:
1321 serge 1220
			width = height * 16 / 10;
1221
			break;
1404 serge 1222
		case 0x0c:
1321 serge 1223
			width = height * 15 / 9;
1224
			break;
1225
		}
1226
 
1227
		for (j = 1; j < 5; j++) {
1228
			if (cvt->code[2] & (1 << j)) {
1229
				newmode = drm_cvt_mode(dev, width, height,
1230
						       rates[j], j == 0,
1231
						       false, false);
1232
				if (newmode) {
1233
					drm_mode_probed_add(connector, newmode);
1234
					modes++;
1235
				}
1236
			}
1237
		}
1963 serge 1238
		}
1321 serge 1239
 
1240
	return modes;
1241
}
1242
 
1963 serge 1243
static void
1244
do_cvt_mode(struct detailed_timing *timing, void *c)
1321 serge 1245
{
1963 serge 1246
	struct detailed_mode_closure *closure = c;
1247
	struct detailed_non_pixel *data = &timing->data.other_data;
1123 serge 1248
 
1963 serge 1249
	if (data->type == EDID_DETAIL_CVT_3BYTE)
1250
		closure->modes += drm_cvt_modes(closure->connector, timing);
1251
}
1321 serge 1252
 
1963 serge 1253
static int
1254
add_cvt_modes(struct drm_connector *connector, struct edid *edid)
1255
{
1256
	struct detailed_mode_closure closure = {
1257
		connector, edid, 0, 0, 0
1258
	};
1321 serge 1259
 
1963 serge 1260
	if (version_greater(edid, 1, 2))
1261
		drm_for_each_detailed_block((u8 *)edid, do_cvt_mode, &closure);
1321 serge 1262
 
1963 serge 1263
	/* XXX should also look for CVT codes in VTB blocks */
1123 serge 1264
 
1963 serge 1265
	return closure.modes;
1321 serge 1266
}
1267
 
1963 serge 1268
static void
1269
do_detailed_mode(struct detailed_timing *timing, void *c)
1321 serge 1270
{
1963 serge 1271
	struct detailed_mode_closure *closure = c;
1272
	struct drm_display_mode *newmode;
1321 serge 1273
 
1963 serge 1274
	if (timing->pixel_clock) {
1275
		newmode = drm_mode_detailed(closure->connector->dev,
1276
					    closure->edid, timing,
1277
					    closure->quirks);
1278
		if (!newmode)
1279
			return;
1321 serge 1280
 
1963 serge 1281
		if (closure->preferred)
1282
			newmode->type |= DRM_MODE_TYPE_PREFERRED;
1123 serge 1283
 
1963 serge 1284
		drm_mode_probed_add(closure->connector, newmode);
1285
		closure->modes++;
1286
		closure->preferred = 0;
1287
	}
1179 serge 1288
}
1321 serge 1289
 
1963 serge 1290
/*
1291
 * add_detailed_modes - Add modes from detailed timings
1179 serge 1292
 * @connector: attached connector
1963 serge 1293
 * @edid: EDID block to scan
1179 serge 1294
 * @quirks: quirks to apply
1295
 */
1963 serge 1296
static int
1297
add_detailed_modes(struct drm_connector *connector, struct edid *edid,
1298
		   u32 quirks)
1179 serge 1299
{
1963 serge 1300
	struct detailed_mode_closure closure = {
1301
		connector,
1302
		edid,
1303
		1,
1304
		quirks,
1305
 
1306
	};
1179 serge 1307
 
1963 serge 1308
	if (closure.preferred && !version_greater(edid, 1, 3))
1309
		closure.preferred =
1310
		    (edid->features & DRM_EDID_FEATURE_PREFERRED_TIMING);
1179 serge 1311
 
1963 serge 1312
	drm_for_each_detailed_block((u8 *)edid, do_detailed_mode, &closure);
1179 serge 1313
 
1963 serge 1314
	return closure.modes;
1315
}
1179 serge 1316
 
1963 serge 1317
#define HDMI_IDENTIFIER 0x000C03
1318
#define AUDIO_BLOCK	0x01
1319
#define VENDOR_BLOCK    0x03
1320
#define EDID_BASIC_AUDIO	(1 << 6)
1179 serge 1321
 
1123 serge 1322
/**
1963 serge 1323
 * Search EDID for CEA extension block.
1123 serge 1324
 */
1963 serge 1325
u8 *drm_find_cea_extension(struct edid *edid)
1123 serge 1326
{
1963 serge 1327
	u8 *edid_ext = NULL;
1321 serge 1328
	int i;
1123 serge 1329
 
1963 serge 1330
	/* No EDID or EDID extensions */
1331
	if (edid == NULL || edid->extensions == 0)
1332
		return NULL;
1321 serge 1333
 
1963 serge 1334
	/* Find CEA extension */
1335
	for (i = 0; i < edid->extensions; i++) {
1336
		edid_ext = (u8 *)edid + EDID_LENGTH * (i + 1);
1337
		if (edid_ext[0] == CEA_EXT)
1338
			break;
1123 serge 1339
	}
1340
 
1963 serge 1341
	if (i == edid->extensions)
1342
		return NULL;
1123 serge 1343
 
1963 serge 1344
	return edid_ext;
1123 serge 1345
}
1963 serge 1346
EXPORT_SYMBOL(drm_find_cea_extension);
1123 serge 1347
 
1348
/**
1349
 * drm_detect_hdmi_monitor - detect whether monitor is hdmi.
1350
 * @edid: monitor EDID information
1351
 *
1352
 * Parse the CEA extension according to CEA-861-B.
1353
 * Return true if HDMI, false if not or unknown.
1354
 */
1355
bool drm_detect_hdmi_monitor(struct edid *edid)
1356
{
1963 serge 1357
	u8 *edid_ext;
1358
	int i, hdmi_id;
1123 serge 1359
	int start_offset, end_offset;
1360
	bool is_hdmi = false;
1361
 
1963 serge 1362
	edid_ext = drm_find_cea_extension(edid);
1363
	if (!edid_ext)
1123 serge 1364
		goto end;
1365
 
1366
	/* Data block offset in CEA extension block */
1367
	start_offset = 4;
1368
	end_offset = edid_ext[2];
1369
 
1370
	/*
1371
	 * Because HDMI identifier is in Vendor Specific Block,
1372
	 * search it from all data blocks of CEA extension.
1373
	 */
1374
	for (i = start_offset; i < end_offset;
1375
		/* Increased by data block len */
1376
		i += ((edid_ext[i] & 0x1f) + 1)) {
1377
		/* Find vendor specific block */
1378
		if ((edid_ext[i] >> 5) == VENDOR_BLOCK) {
1379
			hdmi_id = edid_ext[i + 1] | (edid_ext[i + 2] << 8) |
1380
				  edid_ext[i + 3] << 16;
1381
			/* Find HDMI identifier */
1382
			if (hdmi_id == HDMI_IDENTIFIER)
1383
				is_hdmi = true;
1384
			break;
1385
		}
1386
	}
1387
 
1388
end:
1389
	return is_hdmi;
1390
}
1391
EXPORT_SYMBOL(drm_detect_hdmi_monitor);
1392
 
1393
/**
1963 serge 1394
 * drm_detect_monitor_audio - check monitor audio capability
1395
 *
1396
 * Monitor should have CEA extension block.
1397
 * If monitor has 'basic audio', but no CEA audio blocks, it's 'basic
1398
 * audio' only. If there is any audio extension block and supported
1399
 * audio format, assume at least 'basic audio' support, even if 'basic
1400
 * audio' is not defined in EDID.
1401
 *
1402
 */
1403
bool drm_detect_monitor_audio(struct edid *edid)
1404
{
1405
	u8 *edid_ext;
1406
	int i, j;
1407
	bool has_audio = false;
1408
	int start_offset, end_offset;
1409
 
1410
	edid_ext = drm_find_cea_extension(edid);
1411
	if (!edid_ext)
1412
		goto end;
1413
 
1414
	has_audio = ((edid_ext[3] & EDID_BASIC_AUDIO) != 0);
1415
 
1416
	if (has_audio) {
1417
		DRM_DEBUG_KMS("Monitor has basic audio support\n");
1418
		goto end;
1419
	}
1420
 
1421
	/* Data block offset in CEA extension block */
1422
	start_offset = 4;
1423
	end_offset = edid_ext[2];
1424
 
1425
	for (i = start_offset; i < end_offset;
1426
			i += ((edid_ext[i] & 0x1f) + 1)) {
1427
		if ((edid_ext[i] >> 5) == AUDIO_BLOCK) {
1428
			has_audio = true;
1429
			for (j = 1; j < (edid_ext[i] & 0x1f); j += 3)
1430
				DRM_DEBUG_KMS("CEA audio format %d\n",
1431
					      (edid_ext[i + j] >> 3) & 0xf);
1432
			goto end;
1433
		}
1434
	}
1435
end:
1436
	return has_audio;
1437
}
1438
EXPORT_SYMBOL(drm_detect_monitor_audio);
1439
 
1440
/**
1441
 * drm_add_display_info - pull display info out if present
1442
 * @edid: EDID data
1443
 * @info: display info (attached to connector)
1444
 *
1445
 * Grab any available display info and stuff it into the drm_display_info
1446
 * structure that's part of the connector.  Useful for tracking bpp and
1447
 * color spaces.
1448
 */
1449
static void drm_add_display_info(struct edid *edid,
1450
				 struct drm_display_info *info)
1451
{
1452
	info->width_mm = edid->width_cm * 10;
1453
	info->height_mm = edid->height_cm * 10;
1454
 
1455
	/* driver figures it out in this case */
1456
	info->bpc = 0;
1457
	info->color_formats = 0;
1458
 
1459
	/* Only defined for 1.4 with digital displays */
1460
	if (edid->revision < 4)
1461
		return;
1462
 
1463
	if (!(edid->input & DRM_EDID_INPUT_DIGITAL))
1464
		return;
1465
 
1466
	switch (edid->input & DRM_EDID_DIGITAL_DEPTH_MASK) {
1467
	case DRM_EDID_DIGITAL_DEPTH_6:
1468
		info->bpc = 6;
1469
		break;
1470
	case DRM_EDID_DIGITAL_DEPTH_8:
1471
		info->bpc = 8;
1472
		break;
1473
	case DRM_EDID_DIGITAL_DEPTH_10:
1474
		info->bpc = 10;
1475
		break;
1476
	case DRM_EDID_DIGITAL_DEPTH_12:
1477
		info->bpc = 12;
1478
		break;
1479
	case DRM_EDID_DIGITAL_DEPTH_14:
1480
		info->bpc = 14;
1481
		break;
1482
	case DRM_EDID_DIGITAL_DEPTH_16:
1483
		info->bpc = 16;
1484
		break;
1485
	case DRM_EDID_DIGITAL_DEPTH_UNDEF:
1486
	default:
1487
		info->bpc = 0;
1488
		break;
1489
	}
1490
 
1491
	info->color_formats = DRM_COLOR_FORMAT_RGB444;
1492
	if (info->color_formats & DRM_EDID_FEATURE_RGB_YCRCB444)
1493
		info->color_formats = DRM_COLOR_FORMAT_YCRCB444;
1494
	if (info->color_formats & DRM_EDID_FEATURE_RGB_YCRCB422)
1495
		info->color_formats = DRM_COLOR_FORMAT_YCRCB422;
1496
}
1497
 
1498
/**
1123 serge 1499
 * drm_add_edid_modes - add modes from EDID data, if available
1500
 * @connector: connector we're probing
1501
 * @edid: edid data
1502
 *
1503
 * Add the specified modes to the connector's mode list.
1504
 *
1505
 * Return number of modes added or 0 if we couldn't find any.
1506
 */
1507
int drm_add_edid_modes(struct drm_connector *connector, struct edid *edid)
1508
{
1509
	int num_modes = 0;
1510
	u32 quirks;
1511
 
1512
	if (edid == NULL) {
1513
		return 0;
1514
	}
1430 serge 1515
	if (!drm_edid_is_valid(edid)) {
1963 serge 1516
		dev_warn(connector->dev->dev, "%s: EDID invalid.\n",
1246 serge 1517
			 drm_get_connector_name(connector));
1123 serge 1518
		return 0;
1519
	}
1520
 
1521
	quirks = edid_get_quirks(edid);
1522
 
1963 serge 1523
	/*
1524
	 * EDID spec says modes should be preferred in this order:
1525
	 * - preferred detailed mode
1526
	 * - other detailed modes from base block
1527
	 * - detailed modes from extension blocks
1528
	 * - CVT 3-byte code modes
1529
	 * - standard timing codes
1530
	 * - established timing codes
1531
	 * - modes inferred from GTF or CVT range information
1532
	 *
1533
	 * We get this pretty much right.
1534
	 *
1535
	 * XXX order for additional mode types in extension blocks?
1536
	 */
1537
	num_modes += add_detailed_modes(connector, edid, quirks);
1538
	num_modes += add_cvt_modes(connector, edid);
1539
	num_modes += add_standard_modes(connector, edid);
1123 serge 1540
	num_modes += add_established_modes(connector, edid);
1963 serge 1541
	num_modes += add_inferred_modes(connector, edid);
1123 serge 1542
 
1543
	if (quirks & (EDID_QUIRK_PREFER_LARGE_60 | EDID_QUIRK_PREFER_LARGE_75))
1544
		edid_fixup_preferred(connector, quirks);
1545
 
1963 serge 1546
	drm_add_display_info(edid, &connector->display_info);
1123 serge 1547
 
1548
	return num_modes;
1549
}
1550
EXPORT_SYMBOL(drm_add_edid_modes);
1179 serge 1551
 
1552
/**
1553
 * drm_add_modes_noedid - add modes for the connectors without EDID
1554
 * @connector: connector we're probing
1555
 * @hdisplay: the horizontal display limit
1556
 * @vdisplay: the vertical display limit
1557
 *
1558
 * Add the specified modes to the connector's mode list. Only when the
1559
 * hdisplay/vdisplay is not beyond the given limit, it will be added.
1560
 *
1561
 * Return number of modes added or 0 if we couldn't find any.
1562
 */
1563
int drm_add_modes_noedid(struct drm_connector *connector,
1564
			int hdisplay, int vdisplay)
1565
{
1566
	int i, count, num_modes = 0;
1963 serge 1567
	struct drm_display_mode *mode;
1179 serge 1568
	struct drm_device *dev = connector->dev;
1569
 
1570
	count = sizeof(drm_dmt_modes) / sizeof(struct drm_display_mode);
1571
	if (hdisplay < 0)
1572
		hdisplay = 0;
1573
	if (vdisplay < 0)
1574
		vdisplay = 0;
1575
 
1576
	for (i = 0; i < count; i++) {
1963 serge 1577
		const struct drm_display_mode *ptr = &drm_dmt_modes[i];
1179 serge 1578
		if (hdisplay && vdisplay) {
1579
			/*
1580
			 * Only when two are valid, they will be used to check
1581
			 * whether the mode should be added to the mode list of
1582
			 * the connector.
1583
			 */
1584
			if (ptr->hdisplay > hdisplay ||
1585
					ptr->vdisplay > vdisplay)
1586
				continue;
1587
		}
1321 serge 1588
		if (drm_mode_vrefresh(ptr) > 61)
1589
			continue;
1179 serge 1590
		mode = drm_mode_duplicate(dev, ptr);
1591
		if (mode) {
1592
			drm_mode_probed_add(connector, mode);
1593
			num_modes++;
1594
		}
1595
	}
1596
	return num_modes;
1597
}
1598
EXPORT_SYMBOL(drm_add_modes_noedid);