Subversion Repositories Kolibri OS

Rev

Rev 4075 | Rev 5060 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
1123 serge 1
/**************************************************************************
2
 *
3
 * Copyright 2006 Tungsten Graphics, Inc., Bismarck, ND., USA.
4
 * All Rights Reserved.
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a
7
 * copy of this software and associated documentation files (the
8
 * "Software"), to deal in the Software without restriction, including
9
 * without limitation the rights to use, copy, modify, merge, publish,
10
 * distribute, sub license, and/or sell copies of the Software, and to
11
 * permit persons to whom the Software is furnished to do so, subject to
12
 * the following conditions:
13
 *
14
 * The above copyright notice and this permission notice (including the
15
 * next paragraph) shall be included in all copies or substantial portions
16
 * of the Software.
17
 *
18
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20
 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
21
 * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
22
 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23
 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
24
 * USE OR OTHER DEALINGS IN THE SOFTWARE.
25
 *
26
 *
27
 **************************************************************************/
28
 
29
/*
30
 * Generic simple memory manager implementation. Intended to be used as a base
31
 * class implementation for more advanced memory managers.
32
 *
33
 * Note that the algorithm used is quite simple and there might be substantial
34
 * performance gains if a smarter free list is implemented. Currently it is just an
35
 * unordered stack of free regions. This could easily be improved if an RB-tree
36
 * is used instead. At least if we expect heavy fragmentation.
37
 *
38
 * Aligned allocations can also see improvement.
39
 *
40
 * Authors:
41
 * Thomas Hellström 
42
 */
43
 
3031 serge 44
#include 
45
#include 
1963 serge 46
#include 
1179 serge 47
#include 
3031 serge 48
#include 
1123 serge 49
 
50
#define MM_UNUSED_TARGET 4
51
 
4104 Serge 52
static struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
53
						unsigned long size,
54
						unsigned alignment,
55
						unsigned long color,
56
						enum drm_mm_search_flags flags);
57
static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
58
						unsigned long size,
59
						unsigned alignment,
60
						unsigned long color,
61
						unsigned long start,
62
						unsigned long end,
63
						enum drm_mm_search_flags flags);
1123 serge 64
 
1963 serge 65
static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
66
				 struct drm_mm_node *node,
3031 serge 67
				 unsigned long size, unsigned alignment,
68
				 unsigned long color)
1123 serge 69
{
1963 serge 70
	struct drm_mm *mm = hole_node->mm;
71
	unsigned long hole_start = drm_mm_hole_node_start(hole_node);
72
	unsigned long hole_end = drm_mm_hole_node_end(hole_node);
3031 serge 73
	unsigned long adj_start = hole_start;
74
	unsigned long adj_end = hole_end;
1123 serge 75
 
3480 Serge 76
	BUG_ON(node->allocated);
1963 serge 77
 
3031 serge 78
	if (mm->color_adjust)
79
		mm->color_adjust(hole_node, color, &adj_start, &adj_end);
1963 serge 80
 
3031 serge 81
	if (alignment) {
82
		unsigned tmp = adj_start % alignment;
83
		if (tmp)
84
			adj_start += alignment - tmp;
85
	}
86
 
87
	if (adj_start == hole_start) {
1963 serge 88
		hole_node->hole_follows = 0;
3031 serge 89
		list_del(&hole_node->hole_stack);
90
	}
1963 serge 91
 
3031 serge 92
	node->start = adj_start;
1963 serge 93
	node->size = size;
94
	node->mm = mm;
3031 serge 95
	node->color = color;
1963 serge 96
	node->allocated = 1;
97
 
98
	INIT_LIST_HEAD(&node->hole_stack);
99
	list_add(&node->node_list, &hole_node->node_list);
100
 
3031 serge 101
	BUG_ON(node->start + node->size > adj_end);
1963 serge 102
 
3031 serge 103
	node->hole_follows = 0;
3480 Serge 104
	if (__drm_mm_hole_node_start(node) < hole_end) {
1963 serge 105
		list_add(&node->hole_stack, &mm->hole_stack);
106
		node->hole_follows = 1;
1123 serge 107
	}
108
}
109
 
4104 Serge 110
int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node)
3480 Serge 111
{
4104 Serge 112
	struct drm_mm_node *hole;
113
	unsigned long end = node->start + node->size;
3480 Serge 114
	unsigned long hole_start;
115
	unsigned long hole_end;
116
 
4104 Serge 117
	BUG_ON(node == NULL);
118
 
119
	/* Find the relevant hole to add our node to */
3480 Serge 120
	drm_mm_for_each_hole(hole, mm, hole_start, hole_end) {
4104 Serge 121
		if (hole_start > node->start || hole_end < end)
3480 Serge 122
			continue;
123
 
124
		node->mm = mm;
125
		node->allocated = 1;
126
 
127
		INIT_LIST_HEAD(&node->hole_stack);
128
		list_add(&node->node_list, &hole->node_list);
129
 
4104 Serge 130
		if (node->start == hole_start) {
3480 Serge 131
			hole->hole_follows = 0;
132
			list_del_init(&hole->hole_stack);
133
		}
134
 
135
		node->hole_follows = 0;
136
		if (end != hole_end) {
137
			list_add(&node->hole_stack, &mm->hole_stack);
138
			node->hole_follows = 1;
139
		}
140
 
4104 Serge 141
		return 0;
3480 Serge 142
	}
143
 
4104 Serge 144
	WARN(1, "no hole found for node 0x%lx + 0x%lx\n",
145
	     node->start, node->size);
146
	return -ENOSPC;
3480 Serge 147
}
4104 Serge 148
EXPORT_SYMBOL(drm_mm_reserve_node);
3480 Serge 149
 
1963 serge 150
/**
151
 * Search for free space and insert a preallocated memory node. Returns
152
 * -ENOSPC if no suitable free area is available. The preallocated memory node
153
 * must be cleared.
154
 */
3192 Serge 155
int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
156
			       unsigned long size, unsigned alignment,
4104 Serge 157
			       unsigned long color,
158
			       enum drm_mm_search_flags flags)
1963 serge 159
{
160
	struct drm_mm_node *hole_node;
1123 serge 161
 
3192 Serge 162
	hole_node = drm_mm_search_free_generic(mm, size, alignment,
4104 Serge 163
					       color, flags);
1963 serge 164
	if (!hole_node)
165
		return -ENOSPC;
166
 
3192 Serge 167
	drm_mm_insert_helper(hole_node, node, size, alignment, color);
1963 serge 168
	return 0;
1123 serge 169
}
3192 Serge 170
EXPORT_SYMBOL(drm_mm_insert_node_generic);
171
 
1963 serge 172
static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node,
173
				       struct drm_mm_node *node,
174
				       unsigned long size, unsigned alignment,
3031 serge 175
				       unsigned long color,
1963 serge 176
				       unsigned long start, unsigned long end)
1123 serge 177
{
1963 serge 178
	struct drm_mm *mm = hole_node->mm;
179
	unsigned long hole_start = drm_mm_hole_node_start(hole_node);
180
	unsigned long hole_end = drm_mm_hole_node_end(hole_node);
3031 serge 181
	unsigned long adj_start = hole_start;
182
	unsigned long adj_end = hole_end;
1123 serge 183
 
1963 serge 184
	BUG_ON(!hole_node->hole_follows || node->allocated);
1123 serge 185
 
3192 Serge 186
	if (adj_start < start)
187
		adj_start = start;
188
	if (adj_end > end)
189
		adj_end = end;
190
 
3031 serge 191
	if (mm->color_adjust)
192
		mm->color_adjust(hole_node, color, &adj_start, &adj_end);
1123 serge 193
 
3031 serge 194
	if (alignment) {
195
		unsigned tmp = adj_start % alignment;
1963 serge 196
	if (tmp)
3031 serge 197
			adj_start += alignment - tmp;
198
	}
1963 serge 199
 
3031 serge 200
	if (adj_start == hole_start) {
1963 serge 201
		hole_node->hole_follows = 0;
3031 serge 202
		list_del(&hole_node->hole_stack);
1123 serge 203
	}
204
 
3031 serge 205
	node->start = adj_start;
1963 serge 206
	node->size = size;
207
	node->mm = mm;
3031 serge 208
	node->color = color;
1963 serge 209
	node->allocated = 1;
210
 
211
	INIT_LIST_HEAD(&node->hole_stack);
212
	list_add(&node->node_list, &hole_node->node_list);
213
 
3031 serge 214
	BUG_ON(node->start + node->size > adj_end);
1963 serge 215
	BUG_ON(node->start + node->size > end);
216
 
3031 serge 217
	node->hole_follows = 0;
3480 Serge 218
	if (__drm_mm_hole_node_start(node) < hole_end) {
1963 serge 219
		list_add(&node->hole_stack, &mm->hole_stack);
220
		node->hole_follows = 1;
1123 serge 221
	}
222
}
223
 
1963 serge 224
/**
225
 * Search for free space and insert a preallocated memory node. Returns
226
 * -ENOSPC if no suitable free area is available. This is for range
227
 * restricted allocations. The preallocated memory node must be cleared.
1123 serge 228
 */
3192 Serge 229
int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *node,
230
					unsigned long size, unsigned alignment, unsigned long color,
4104 Serge 231
					unsigned long start, unsigned long end,
232
					enum drm_mm_search_flags flags)
1963 serge 233
{
234
	struct drm_mm_node *hole_node;
1123 serge 235
 
3192 Serge 236
	hole_node = drm_mm_search_free_in_range_generic(mm,
237
							size, alignment, color,
4104 Serge 238
							start, end, flags);
1963 serge 239
	if (!hole_node)
240
		return -ENOSPC;
241
 
3192 Serge 242
	drm_mm_insert_helper_range(hole_node, node,
243
				   size, alignment, color,
1963 serge 244
				   start, end);
245
	return 0;
246
}
3192 Serge 247
EXPORT_SYMBOL(drm_mm_insert_node_in_range_generic);
248
 
1963 serge 249
/**
250
 * Remove a memory node from the allocator.
251
 */
252
void drm_mm_remove_node(struct drm_mm_node *node)
1123 serge 253
{
1963 serge 254
	struct drm_mm *mm = node->mm;
255
	struct drm_mm_node *prev_node;
1123 serge 256
 
4104 Serge 257
	if (WARN_ON(!node->allocated))
258
		return;
259
 
1963 serge 260
	BUG_ON(node->scanned_block || node->scanned_prev_free
261
				   || node->scanned_next_free);
1123 serge 262
 
1963 serge 263
	prev_node =
264
	    list_entry(node->node_list.prev, struct drm_mm_node, node_list);
1123 serge 265
 
1963 serge 266
	if (node->hole_follows) {
3480 Serge 267
		BUG_ON(__drm_mm_hole_node_start(node) ==
268
		       __drm_mm_hole_node_end(node));
1963 serge 269
		list_del(&node->hole_stack);
270
	} else
3480 Serge 271
		BUG_ON(__drm_mm_hole_node_start(node) !=
272
		       __drm_mm_hole_node_end(node));
1963 serge 273
 
3480 Serge 274
 
1963 serge 275
	if (!prev_node->hole_follows) {
276
		prev_node->hole_follows = 1;
277
		list_add(&prev_node->hole_stack, &mm->hole_stack);
1123 serge 278
				} else
1963 serge 279
		list_move(&prev_node->hole_stack, &mm->hole_stack);
280
 
281
	list_del(&node->node_list);
282
	node->allocated = 0;
283
}
284
EXPORT_SYMBOL(drm_mm_remove_node);
285
 
286
static int check_free_hole(unsigned long start, unsigned long end,
287
			   unsigned long size, unsigned alignment)
288
{
289
	if (end - start < size)
290
		return 0;
291
 
292
	if (alignment) {
293
		unsigned tmp = start % alignment;
294
		if (tmp)
3031 serge 295
			start += alignment - tmp;
1123 serge 296
	}
1963 serge 297
 
3031 serge 298
	return end >= start + size;
1123 serge 299
}
300
 
4104 Serge 301
static struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
1123 serge 302
				       unsigned long size,
3031 serge 303
					       unsigned alignment,
304
					       unsigned long color,
4104 Serge 305
						      enum drm_mm_search_flags flags)
1123 serge 306
{
307
	struct drm_mm_node *entry;
308
	struct drm_mm_node *best;
3480 Serge 309
	unsigned long adj_start;
310
	unsigned long adj_end;
1123 serge 311
	unsigned long best_size;
312
 
1963 serge 313
	BUG_ON(mm->scanned_blocks);
314
 
1123 serge 315
	best = NULL;
316
	best_size = ~0UL;
317
 
3480 Serge 318
	drm_mm_for_each_hole(entry, mm, adj_start, adj_end) {
3031 serge 319
		if (mm->color_adjust) {
320
			mm->color_adjust(entry, color, &adj_start, &adj_end);
321
			if (adj_end <= adj_start)
322
				continue;
323
		}
324
 
325
		if (!check_free_hole(adj_start, adj_end, size, alignment))
1123 serge 326
			continue;
327
 
4104 Serge 328
		if (!(flags & DRM_MM_SEARCH_BEST))
1123 serge 329
				return entry;
1963 serge 330
 
1404 serge 331
			if (entry->size < best_size) {
1123 serge 332
				best = entry;
333
				best_size = entry->size;
334
			}
335
		}
336
 
337
	return best;
338
}
339
 
4104 Serge 340
static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
1321 serge 341
						unsigned long size,
342
						unsigned alignment,
3031 serge 343
							unsigned long color,
1321 serge 344
						unsigned long start,
345
						unsigned long end,
4104 Serge 346
							enum drm_mm_search_flags flags)
1321 serge 347
{
348
	struct drm_mm_node *entry;
349
	struct drm_mm_node *best;
3480 Serge 350
	unsigned long adj_start;
351
	unsigned long adj_end;
1321 serge 352
	unsigned long best_size;
353
 
1963 serge 354
	BUG_ON(mm->scanned_blocks);
355
 
1321 serge 356
	best = NULL;
357
	best_size = ~0UL;
358
 
3480 Serge 359
	drm_mm_for_each_hole(entry, mm, adj_start, adj_end) {
360
		if (adj_start < start)
361
			adj_start = start;
362
		if (adj_end > end)
363
			adj_end = end;
1321 serge 364
 
3031 serge 365
		if (mm->color_adjust) {
366
			mm->color_adjust(entry, color, &adj_start, &adj_end);
367
			if (adj_end <= adj_start)
368
				continue;
369
		}
370
 
1963 serge 371
		if (!check_free_hole(adj_start, adj_end, size, alignment))
1321 serge 372
			continue;
373
 
4104 Serge 374
		if (!(flags & DRM_MM_SEARCH_BEST))
1321 serge 375
				return entry;
1963 serge 376
 
1404 serge 377
			if (entry->size < best_size) {
1321 serge 378
				best = entry;
379
				best_size = entry->size;
380
			}
381
		}
382
 
383
	return best;
384
}
385
 
1963 serge 386
/**
387
 * Moves an allocation. To be used with embedded struct drm_mm_node.
388
 */
389
void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new)
390
{
391
	list_replace(&old->node_list, &new->node_list);
392
	list_replace(&old->hole_stack, &new->hole_stack);
393
	new->hole_follows = old->hole_follows;
394
	new->mm = old->mm;
395
	new->start = old->start;
396
	new->size = old->size;
3031 serge 397
	new->color = old->color;
1963 serge 398
 
399
	old->allocated = 0;
400
	new->allocated = 1;
401
}
402
EXPORT_SYMBOL(drm_mm_replace_node);
403
 
404
/**
405
 * Initializa lru scanning.
406
 *
407
 * This simply sets up the scanning routines with the parameters for the desired
408
 * hole.
409
 *
410
 * Warning: As long as the scan list is non-empty, no other operations than
411
 * adding/removing nodes to/from the scan list are allowed.
412
 */
3031 serge 413
void drm_mm_init_scan(struct drm_mm *mm,
414
		      unsigned long size,
415
		      unsigned alignment,
416
		      unsigned long color)
1963 serge 417
{
3031 serge 418
	mm->scan_color = color;
1963 serge 419
	mm->scan_alignment = alignment;
420
	mm->scan_size = size;
421
	mm->scanned_blocks = 0;
422
	mm->scan_hit_start = 0;
3192 Serge 423
	mm->scan_hit_end = 0;
1963 serge 424
	mm->scan_check_range = 0;
425
	mm->prev_scanned_node = NULL;
426
}
427
EXPORT_SYMBOL(drm_mm_init_scan);
428
 
429
/**
430
 * Initializa lru scanning.
431
 *
432
 * This simply sets up the scanning routines with the parameters for the desired
433
 * hole. This version is for range-restricted scans.
434
 *
435
 * Warning: As long as the scan list is non-empty, no other operations than
436
 * adding/removing nodes to/from the scan list are allowed.
437
 */
3031 serge 438
void drm_mm_init_scan_with_range(struct drm_mm *mm,
439
				 unsigned long size,
1963 serge 440
				 unsigned alignment,
3031 serge 441
				 unsigned long color,
1963 serge 442
				 unsigned long start,
443
				 unsigned long end)
444
{
3031 serge 445
	mm->scan_color = color;
1963 serge 446
	mm->scan_alignment = alignment;
447
	mm->scan_size = size;
448
	mm->scanned_blocks = 0;
449
	mm->scan_hit_start = 0;
3192 Serge 450
	mm->scan_hit_end = 0;
1963 serge 451
	mm->scan_start = start;
452
	mm->scan_end = end;
453
	mm->scan_check_range = 1;
454
	mm->prev_scanned_node = NULL;
455
}
456
EXPORT_SYMBOL(drm_mm_init_scan_with_range);
457
 
458
/**
459
 * Add a node to the scan list that might be freed to make space for the desired
460
 * hole.
461
 *
462
 * Returns non-zero, if a hole has been found, zero otherwise.
463
 */
464
int drm_mm_scan_add_block(struct drm_mm_node *node)
465
{
466
	struct drm_mm *mm = node->mm;
467
	struct drm_mm_node *prev_node;
468
	unsigned long hole_start, hole_end;
3192 Serge 469
	unsigned long adj_start, adj_end;
1963 serge 470
 
471
	mm->scanned_blocks++;
472
 
473
	BUG_ON(node->scanned_block);
474
	node->scanned_block = 1;
475
 
476
		prev_node = list_entry(node->node_list.prev, struct drm_mm_node,
477
				       node_list);
478
 
479
	node->scanned_preceeds_hole = prev_node->hole_follows;
480
	prev_node->hole_follows = 1;
481
	list_del(&node->node_list);
482
	node->node_list.prev = &prev_node->node_list;
483
	node->node_list.next = &mm->prev_scanned_node->node_list;
484
	mm->prev_scanned_node = node;
485
 
3192 Serge 486
	adj_start = hole_start = drm_mm_hole_node_start(prev_node);
487
	adj_end = hole_end = drm_mm_hole_node_end(prev_node);
3031 serge 488
 
489
	if (mm->scan_check_range) {
490
		if (adj_start < mm->scan_start)
491
			adj_start = mm->scan_start;
492
		if (adj_end > mm->scan_end)
493
			adj_end = mm->scan_end;
1963 serge 494
	}
495
 
3192 Serge 496
	if (mm->color_adjust)
497
		mm->color_adjust(prev_node, mm->scan_color,
498
				 &adj_start, &adj_end);
499
 
3031 serge 500
	if (check_free_hole(adj_start, adj_end,
1963 serge 501
			    mm->scan_size, mm->scan_alignment)) {
502
		mm->scan_hit_start = hole_start;
3192 Serge 503
		mm->scan_hit_end = hole_end;
1963 serge 504
		return 1;
505
	}
506
 
507
	return 0;
508
}
509
EXPORT_SYMBOL(drm_mm_scan_add_block);
510
 
511
/**
512
 * Remove a node from the scan list.
513
 *
514
 * Nodes _must_ be removed in the exact same order from the scan list as they
515
 * have been added, otherwise the internal state of the memory manager will be
516
 * corrupted.
517
 *
518
 * When the scan list is empty, the selected memory nodes can be freed. An
4104 Serge 519
 * immediately following drm_mm_search_free with !DRM_MM_SEARCH_BEST will then
520
 * return the just freed block (because its at the top of the free_stack list).
1963 serge 521
 *
522
 * Returns one if this block should be evicted, zero otherwise. Will always
523
 * return zero when no hole has been found.
524
 */
525
int drm_mm_scan_remove_block(struct drm_mm_node *node)
526
{
527
	struct drm_mm *mm = node->mm;
528
	struct drm_mm_node *prev_node;
529
 
530
	mm->scanned_blocks--;
531
 
532
	BUG_ON(!node->scanned_block);
533
	node->scanned_block = 0;
534
 
535
	prev_node = list_entry(node->node_list.prev, struct drm_mm_node,
536
			       node_list);
537
 
538
	prev_node->hole_follows = node->scanned_preceeds_hole;
539
	list_add(&node->node_list, &prev_node->node_list);
540
 
3192 Serge 541
	 return (drm_mm_hole_node_end(node) > mm->scan_hit_start &&
542
		 node->start < mm->scan_hit_end);
1963 serge 543
}
544
EXPORT_SYMBOL(drm_mm_scan_remove_block);
545
 
1123 serge 546
int drm_mm_clean(struct drm_mm * mm)
547
{
1963 serge 548
	struct list_head *head = &mm->head_node.node_list;
1123 serge 549
 
550
	return (head->next->next == head);
551
}
1126 serge 552
EXPORT_SYMBOL(drm_mm_clean);
1123 serge 553
 
4075 Serge 554
void drm_mm_init(struct drm_mm * mm, unsigned long start, unsigned long size)
1123 serge 555
{
1963 serge 556
	INIT_LIST_HEAD(&mm->hole_stack);
557
	mm->scanned_blocks = 0;
1123 serge 558
 
1963 serge 559
	/* Clever trick to avoid a special case in the free hole tracking. */
560
	INIT_LIST_HEAD(&mm->head_node.node_list);
561
	INIT_LIST_HEAD(&mm->head_node.hole_stack);
562
	mm->head_node.hole_follows = 1;
563
	mm->head_node.scanned_block = 0;
564
	mm->head_node.scanned_prev_free = 0;
565
	mm->head_node.scanned_next_free = 0;
566
	mm->head_node.mm = mm;
567
	mm->head_node.start = start + size;
568
	mm->head_node.size = start - mm->head_node.start;
569
	list_add_tail(&mm->head_node.hole_stack, &mm->hole_stack);
570
 
3031 serge 571
	mm->color_adjust = NULL;
1123 serge 572
}
1126 serge 573
EXPORT_SYMBOL(drm_mm_init);
1123 serge 574
 
575
void drm_mm_takedown(struct drm_mm * mm)
576
{
4104 Serge 577
	WARN(!list_empty(&mm->head_node.node_list),
578
	     "Memory manager not clean during takedown.\n");
1123 serge 579
}
1126 serge 580
EXPORT_SYMBOL(drm_mm_takedown);
3192 Serge 581
 
4075 Serge 582
static unsigned long drm_mm_debug_hole(struct drm_mm_node *entry,
583
				       const char *prefix)
3192 Serge 584
{
585
	unsigned long hole_start, hole_end, hole_size;
586
 
4075 Serge 587
	if (entry->hole_follows) {
588
		hole_start = drm_mm_hole_node_start(entry);
589
		hole_end = drm_mm_hole_node_end(entry);
3192 Serge 590
	hole_size = hole_end - hole_start;
591
		printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: free\n",
592
			prefix, hole_start, hole_end,
593
			hole_size);
4075 Serge 594
		return hole_size;
595
	}
3192 Serge 596
 
4075 Serge 597
	return 0;
598
}
599
 
600
void drm_mm_debug_table(struct drm_mm *mm, const char *prefix)
601
{
602
	struct drm_mm_node *entry;
603
	unsigned long total_used = 0, total_free = 0, total = 0;
604
 
605
	total_free += drm_mm_debug_hole(&mm->head_node, prefix);
606
 
3192 Serge 607
	drm_mm_for_each_node(entry, mm) {
608
		printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: used\n",
609
			prefix, entry->start, entry->start + entry->size,
610
			entry->size);
611
		total_used += entry->size;
4075 Serge 612
		total_free += drm_mm_debug_hole(entry, prefix);
3192 Serge 613
		}
614
	total = total_free + total_used;
615
 
616
	printk(KERN_DEBUG "%s total: %lu, used %lu free %lu\n", prefix, total,
617
		total_used, total_free);
618
}
619
EXPORT_SYMBOL(drm_mm_debug_table);
620
 
621
#if defined(CONFIG_DEBUG_FS)
3764 Serge 622
static unsigned long drm_mm_dump_hole(struct seq_file *m, struct drm_mm_node *entry)
3192 Serge 623
{
624
	unsigned long hole_start, hole_end, hole_size;
625
 
3764 Serge 626
	if (entry->hole_follows) {
627
		hole_start = drm_mm_hole_node_start(entry);
628
		hole_end = drm_mm_hole_node_end(entry);
3192 Serge 629
	hole_size = hole_end - hole_start;
630
		seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: free\n",
631
				hole_start, hole_end, hole_size);
3764 Serge 632
		return hole_size;
633
	}
3192 Serge 634
 
3764 Serge 635
	return 0;
636
}
637
 
638
int drm_mm_dump_table(struct seq_file *m, struct drm_mm *mm)
639
{
640
	struct drm_mm_node *entry;
641
	unsigned long total_used = 0, total_free = 0, total = 0;
642
 
643
	total_free += drm_mm_dump_hole(m, &mm->head_node);
644
 
3192 Serge 645
	drm_mm_for_each_node(entry, mm) {
646
		seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: used\n",
647
				entry->start, entry->start + entry->size,
648
				entry->size);
649
		total_used += entry->size;
3764 Serge 650
		total_free += drm_mm_dump_hole(m, entry);
3192 Serge 651
		}
652
	total = total_free + total_used;
653
 
654
	seq_printf(m, "total: %lu, used %lu free %lu\n", total, total_used, total_free);
655
	return 0;
656
}
657
EXPORT_SYMBOL(drm_mm_dump_table);
658
#endif