Rev 5056 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 5056 | Rev 6082 | ||
---|---|---|---|
1 | /************************************************************************** |
1 | /************************************************************************** |
2 | * |
2 | * |
3 | * Copyright 2006-2008 Tungsten Graphics, Inc., Cedar Park, TX. USA. |
3 | * Copyright 2006-2008 Tungsten Graphics, Inc., Cedar Park, TX. USA. |
4 | * All Rights Reserved. |
4 | * All Rights Reserved. |
5 | * |
5 | * |
6 | * Permission is hereby granted, free of charge, to any person obtaining a |
6 | * Permission is hereby granted, free of charge, to any person obtaining a |
7 | * copy of this software and associated documentation files (the |
7 | * copy of this software and associated documentation files (the |
8 | * "Software"), to deal in the Software without restriction, including |
8 | * "Software"), to deal in the Software without restriction, including |
9 | * without limitation the rights to use, copy, modify, merge, publish, |
9 | * without limitation the rights to use, copy, modify, merge, publish, |
10 | * distribute, sub license, and/or sell copies of the Software, and to |
10 | * distribute, sub license, and/or sell copies of the Software, and to |
11 | * permit persons to whom the Software is furnished to do so, subject to |
11 | * permit persons to whom the Software is furnished to do so, subject to |
12 | * the following conditions: |
12 | * the following conditions: |
13 | * |
13 | * |
14 | * The above copyright notice and this permission notice (including the |
14 | * The above copyright notice and this permission notice (including the |
15 | * next paragraph) shall be included in all copies or substantial portions |
15 | * next paragraph) shall be included in all copies or substantial portions |
16 | * of the Software. |
16 | * of the Software. |
17 | * |
17 | * |
18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
20 | * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL |
20 | * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL |
21 | * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, |
21 | * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, |
22 | * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
22 | * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
23 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
23 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
24 | * USE OR OTHER DEALINGS IN THE SOFTWARE. |
24 | * USE OR OTHER DEALINGS IN THE SOFTWARE. |
25 | * |
25 | * |
26 | * |
26 | * |
27 | **************************************************************************/ |
27 | **************************************************************************/ |
28 | /* |
28 | /* |
29 | * Authors: |
29 | * Authors: |
30 | * Thomas Hellstrom |
30 | * Thomas Hellstrom |
31 | */ |
31 | */ |
32 | 32 | ||
33 | #ifndef _DRM_MM_H_ |
33 | #ifndef _DRM_MM_H_ |
34 | #define _DRM_MM_H_ |
34 | #define _DRM_MM_H_ |
35 | 35 | ||
36 | /* |
36 | /* |
37 | * Generic range manager structs |
37 | * Generic range manager structs |
38 | */ |
38 | */ |
39 | #include |
39 | #include |
40 | #include |
40 | #include |
41 | #include |
41 | #include |
42 | #include |
42 | #include |
43 | #ifdef CONFIG_DEBUG_FS |
43 | #ifdef CONFIG_DEBUG_FS |
44 | #include |
44 | #include |
45 | #endif |
45 | #endif |
46 | 46 | ||
47 | enum drm_mm_search_flags { |
47 | enum drm_mm_search_flags { |
48 | DRM_MM_SEARCH_DEFAULT = 0, |
48 | DRM_MM_SEARCH_DEFAULT = 0, |
49 | DRM_MM_SEARCH_BEST = 1 << 0, |
49 | DRM_MM_SEARCH_BEST = 1 << 0, |
50 | DRM_MM_SEARCH_BELOW = 1 << 1, |
50 | DRM_MM_SEARCH_BELOW = 1 << 1, |
51 | }; |
51 | }; |
52 | 52 | ||
53 | enum drm_mm_allocator_flags { |
53 | enum drm_mm_allocator_flags { |
54 | DRM_MM_CREATE_DEFAULT = 0, |
54 | DRM_MM_CREATE_DEFAULT = 0, |
55 | DRM_MM_CREATE_TOP = 1 << 0, |
55 | DRM_MM_CREATE_TOP = 1 << 0, |
56 | }; |
56 | }; |
57 | 57 | ||
58 | #define DRM_MM_BOTTOMUP DRM_MM_SEARCH_DEFAULT, DRM_MM_CREATE_DEFAULT |
58 | #define DRM_MM_BOTTOMUP DRM_MM_SEARCH_DEFAULT, DRM_MM_CREATE_DEFAULT |
59 | #define DRM_MM_TOPDOWN DRM_MM_SEARCH_BELOW, DRM_MM_CREATE_TOP |
59 | #define DRM_MM_TOPDOWN DRM_MM_SEARCH_BELOW, DRM_MM_CREATE_TOP |
60 | 60 | ||
61 | struct drm_mm_node { |
61 | struct drm_mm_node { |
62 | struct list_head node_list; |
62 | struct list_head node_list; |
63 | struct list_head hole_stack; |
63 | struct list_head hole_stack; |
64 | unsigned hole_follows : 1; |
64 | unsigned hole_follows : 1; |
65 | unsigned scanned_block : 1; |
65 | unsigned scanned_block : 1; |
66 | unsigned scanned_prev_free : 1; |
66 | unsigned scanned_prev_free : 1; |
67 | unsigned scanned_next_free : 1; |
67 | unsigned scanned_next_free : 1; |
68 | unsigned scanned_preceeds_hole : 1; |
68 | unsigned scanned_preceeds_hole : 1; |
69 | unsigned allocated : 1; |
69 | unsigned allocated : 1; |
70 | unsigned long color; |
70 | unsigned long color; |
71 | unsigned long start; |
71 | u64 start; |
72 | unsigned long size; |
72 | u64 size; |
73 | struct drm_mm *mm; |
73 | struct drm_mm *mm; |
74 | }; |
74 | }; |
75 | 75 | ||
76 | struct drm_mm { |
76 | struct drm_mm { |
77 | /* List of all memory nodes that immediately precede a free hole. */ |
77 | /* List of all memory nodes that immediately precede a free hole. */ |
78 | struct list_head hole_stack; |
78 | struct list_head hole_stack; |
79 | /* head_node.node_list is the list of all memory nodes, ordered |
79 | /* head_node.node_list is the list of all memory nodes, ordered |
80 | * according to the (increasing) start address of the memory node. */ |
80 | * according to the (increasing) start address of the memory node. */ |
81 | struct drm_mm_node head_node; |
81 | struct drm_mm_node head_node; |
82 | unsigned int scan_check_range : 1; |
82 | unsigned int scan_check_range : 1; |
83 | unsigned scan_alignment; |
83 | unsigned scan_alignment; |
84 | unsigned long scan_color; |
84 | unsigned long scan_color; |
85 | unsigned long scan_size; |
85 | u64 scan_size; |
86 | unsigned long scan_hit_start; |
86 | u64 scan_hit_start; |
87 | unsigned long scan_hit_end; |
87 | u64 scan_hit_end; |
88 | unsigned scanned_blocks; |
88 | unsigned scanned_blocks; |
89 | unsigned long scan_start; |
89 | u64 scan_start; |
90 | unsigned long scan_end; |
90 | u64 scan_end; |
91 | struct drm_mm_node *prev_scanned_node; |
91 | struct drm_mm_node *prev_scanned_node; |
92 | 92 | ||
93 | void (*color_adjust)(struct drm_mm_node *node, unsigned long color, |
93 | void (*color_adjust)(struct drm_mm_node *node, unsigned long color, |
94 | unsigned long *start, unsigned long *end); |
94 | u64 *start, u64 *end); |
95 | }; |
95 | }; |
96 | 96 | ||
97 | /** |
97 | /** |
98 | * drm_mm_node_allocated - checks whether a node is allocated |
98 | * drm_mm_node_allocated - checks whether a node is allocated |
99 | * @node: drm_mm_node to check |
99 | * @node: drm_mm_node to check |
100 | * |
100 | * |
101 | * Drivers should use this helpers for proper encapusulation of drm_mm |
101 | * Drivers should use this helpers for proper encapusulation of drm_mm |
102 | * internals. |
102 | * internals. |
103 | * |
103 | * |
104 | * Returns: |
104 | * Returns: |
105 | * True if the @node is allocated. |
105 | * True if the @node is allocated. |
106 | */ |
106 | */ |
107 | static inline bool drm_mm_node_allocated(struct drm_mm_node *node) |
107 | static inline bool drm_mm_node_allocated(struct drm_mm_node *node) |
108 | { |
108 | { |
109 | return node->allocated; |
109 | return node->allocated; |
110 | } |
110 | } |
111 | 111 | ||
112 | /** |
112 | /** |
113 | * drm_mm_initialized - checks whether an allocator is initialized |
113 | * drm_mm_initialized - checks whether an allocator is initialized |
114 | * @mm: drm_mm to check |
114 | * @mm: drm_mm to check |
115 | * |
115 | * |
116 | * Drivers should use this helpers for proper encapusulation of drm_mm |
116 | * Drivers should use this helpers for proper encapusulation of drm_mm |
117 | * internals. |
117 | * internals. |
118 | * |
118 | * |
119 | * Returns: |
119 | * Returns: |
120 | * True if the @mm is initialized. |
120 | * True if the @mm is initialized. |
121 | */ |
121 | */ |
122 | static inline bool drm_mm_initialized(struct drm_mm *mm) |
122 | static inline bool drm_mm_initialized(struct drm_mm *mm) |
123 | { |
123 | { |
124 | return mm->hole_stack.next; |
124 | return mm->hole_stack.next; |
125 | } |
125 | } |
126 | 126 | ||
127 | static inline unsigned long __drm_mm_hole_node_start(struct drm_mm_node *hole_node) |
127 | static inline u64 __drm_mm_hole_node_start(struct drm_mm_node *hole_node) |
128 | { |
128 | { |
129 | return hole_node->start + hole_node->size; |
129 | return hole_node->start + hole_node->size; |
130 | } |
130 | } |
131 | 131 | ||
132 | /** |
132 | /** |
133 | * drm_mm_hole_node_start - computes the start of the hole following @node |
133 | * drm_mm_hole_node_start - computes the start of the hole following @node |
134 | * @hole_node: drm_mm_node which implicitly tracks the following hole |
134 | * @hole_node: drm_mm_node which implicitly tracks the following hole |
135 | * |
135 | * |
136 | * This is useful for driver-sepific debug dumpers. Otherwise drivers should not |
136 | * This is useful for driver-sepific debug dumpers. Otherwise drivers should not |
137 | * inspect holes themselves. Drivers must check first whether a hole indeed |
137 | * inspect holes themselves. Drivers must check first whether a hole indeed |
138 | * follows by looking at node->hole_follows. |
138 | * follows by looking at node->hole_follows. |
139 | * |
139 | * |
140 | * Returns: |
140 | * Returns: |
141 | * Start of the subsequent hole. |
141 | * Start of the subsequent hole. |
142 | */ |
142 | */ |
143 | static inline unsigned long drm_mm_hole_node_start(struct drm_mm_node *hole_node) |
143 | static inline u64 drm_mm_hole_node_start(struct drm_mm_node *hole_node) |
144 | { |
144 | { |
145 | BUG_ON(!hole_node->hole_follows); |
145 | BUG_ON(!hole_node->hole_follows); |
146 | return __drm_mm_hole_node_start(hole_node); |
146 | return __drm_mm_hole_node_start(hole_node); |
147 | } |
147 | } |
148 | 148 | ||
149 | static inline unsigned long __drm_mm_hole_node_end(struct drm_mm_node *hole_node) |
149 | static inline u64 __drm_mm_hole_node_end(struct drm_mm_node *hole_node) |
150 | { |
150 | { |
151 | return list_entry(hole_node->node_list.next, |
151 | return list_entry(hole_node->node_list.next, |
152 | struct drm_mm_node, node_list)->start; |
152 | struct drm_mm_node, node_list)->start; |
153 | } |
153 | } |
154 | 154 | ||
155 | /** |
155 | /** |
156 | * drm_mm_hole_node_end - computes the end of the hole following @node |
156 | * drm_mm_hole_node_end - computes the end of the hole following @node |
157 | * @hole_node: drm_mm_node which implicitly tracks the following hole |
157 | * @hole_node: drm_mm_node which implicitly tracks the following hole |
158 | * |
158 | * |
159 | * This is useful for driver-sepific debug dumpers. Otherwise drivers should not |
159 | * This is useful for driver-sepific debug dumpers. Otherwise drivers should not |
160 | * inspect holes themselves. Drivers must check first whether a hole indeed |
160 | * inspect holes themselves. Drivers must check first whether a hole indeed |
161 | * follows by looking at node->hole_follows. |
161 | * follows by looking at node->hole_follows. |
162 | * |
162 | * |
163 | * Returns: |
163 | * Returns: |
164 | * End of the subsequent hole. |
164 | * End of the subsequent hole. |
165 | */ |
165 | */ |
166 | static inline unsigned long drm_mm_hole_node_end(struct drm_mm_node *hole_node) |
166 | static inline u64 drm_mm_hole_node_end(struct drm_mm_node *hole_node) |
167 | { |
167 | { |
168 | return __drm_mm_hole_node_end(hole_node); |
168 | return __drm_mm_hole_node_end(hole_node); |
169 | } |
169 | } |
170 | 170 | ||
171 | /** |
171 | /** |
172 | * drm_mm_for_each_node - iterator to walk over all allocated nodes |
172 | * drm_mm_for_each_node - iterator to walk over all allocated nodes |
173 | * @entry: drm_mm_node structure to assign to in each iteration step |
173 | * @entry: drm_mm_node structure to assign to in each iteration step |
174 | * @mm: drm_mm allocator to walk |
174 | * @mm: drm_mm allocator to walk |
175 | * |
175 | * |
176 | * This iterator walks over all nodes in the range allocator. It is implemented |
176 | * This iterator walks over all nodes in the range allocator. It is implemented |
177 | * with list_for_each, so not save against removal of elements. |
177 | * with list_for_each, so not save against removal of elements. |
178 | */ |
178 | */ |
179 | #define drm_mm_for_each_node(entry, mm) list_for_each_entry(entry, \ |
179 | #define drm_mm_for_each_node(entry, mm) list_for_each_entry(entry, \ |
180 | &(mm)->head_node.node_list, \ |
180 | &(mm)->head_node.node_list, \ |
181 | node_list) |
181 | node_list) |
182 | 182 | ||
183 | /** |
183 | /** |
184 | * drm_mm_for_each_hole - iterator to walk over all holes |
184 | * drm_mm_for_each_hole - iterator to walk over all holes |
185 | * @entry: drm_mm_node used internally to track progress |
185 | * @entry: drm_mm_node used internally to track progress |
186 | * @mm: drm_mm allocator to walk |
186 | * @mm: drm_mm allocator to walk |
187 | * @hole_start: ulong variable to assign the hole start to on each iteration |
187 | * @hole_start: ulong variable to assign the hole start to on each iteration |
188 | * @hole_end: ulong variable to assign the hole end to on each iteration |
188 | * @hole_end: ulong variable to assign the hole end to on each iteration |
189 | * |
189 | * |
190 | * This iterator walks over all holes in the range allocator. It is implemented |
190 | * This iterator walks over all holes in the range allocator. It is implemented |
191 | * with list_for_each, so not save against removal of elements. @entry is used |
191 | * with list_for_each, so not save against removal of elements. @entry is used |
192 | * internally and will not reflect a real drm_mm_node for the very first hole. |
192 | * internally and will not reflect a real drm_mm_node for the very first hole. |
193 | * Hence users of this iterator may not access it. |
193 | * Hence users of this iterator may not access it. |
194 | * |
194 | * |
195 | * Implementation Note: |
195 | * Implementation Note: |
196 | * We need to inline list_for_each_entry in order to be able to set hole_start |
196 | * We need to inline list_for_each_entry in order to be able to set hole_start |
197 | * and hole_end on each iteration while keeping the macro sane. |
197 | * and hole_end on each iteration while keeping the macro sane. |
198 | * |
198 | * |
199 | * The __drm_mm_for_each_hole version is similar, but with added support for |
199 | * The __drm_mm_for_each_hole version is similar, but with added support for |
200 | * going backwards. |
200 | * going backwards. |
201 | */ |
201 | */ |
202 | #define drm_mm_for_each_hole(entry, mm, hole_start, hole_end) \ |
202 | #define drm_mm_for_each_hole(entry, mm, hole_start, hole_end) \ |
203 | for (entry = list_entry((mm)->hole_stack.next, struct drm_mm_node, hole_stack); \ |
203 | for (entry = list_entry((mm)->hole_stack.next, struct drm_mm_node, hole_stack); \ |
204 | &entry->hole_stack != &(mm)->hole_stack ? \ |
204 | &entry->hole_stack != &(mm)->hole_stack ? \ |
205 | hole_start = drm_mm_hole_node_start(entry), \ |
205 | hole_start = drm_mm_hole_node_start(entry), \ |
206 | hole_end = drm_mm_hole_node_end(entry), \ |
206 | hole_end = drm_mm_hole_node_end(entry), \ |
207 | 1 : 0; \ |
207 | 1 : 0; \ |
208 | entry = list_entry(entry->hole_stack.next, struct drm_mm_node, hole_stack)) |
208 | entry = list_entry(entry->hole_stack.next, struct drm_mm_node, hole_stack)) |
209 | 209 | ||
210 | #define __drm_mm_for_each_hole(entry, mm, hole_start, hole_end, backwards) \ |
210 | #define __drm_mm_for_each_hole(entry, mm, hole_start, hole_end, backwards) \ |
211 | for (entry = list_entry((backwards) ? (mm)->hole_stack.prev : (mm)->hole_stack.next, struct drm_mm_node, hole_stack); \ |
211 | for (entry = list_entry((backwards) ? (mm)->hole_stack.prev : (mm)->hole_stack.next, struct drm_mm_node, hole_stack); \ |
212 | &entry->hole_stack != &(mm)->hole_stack ? \ |
212 | &entry->hole_stack != &(mm)->hole_stack ? \ |
213 | hole_start = drm_mm_hole_node_start(entry), \ |
213 | hole_start = drm_mm_hole_node_start(entry), \ |
214 | hole_end = drm_mm_hole_node_end(entry), \ |
214 | hole_end = drm_mm_hole_node_end(entry), \ |
215 | 1 : 0; \ |
215 | 1 : 0; \ |
216 | entry = list_entry((backwards) ? entry->hole_stack.prev : entry->hole_stack.next, struct drm_mm_node, hole_stack)) |
216 | entry = list_entry((backwards) ? entry->hole_stack.prev : entry->hole_stack.next, struct drm_mm_node, hole_stack)) |
217 | 217 | ||
218 | /* |
218 | /* |
219 | * Basic range manager support (drm_mm.c) |
219 | * Basic range manager support (drm_mm.c) |
220 | */ |
220 | */ |
221 | int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node); |
221 | int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node); |
222 | 222 | ||
223 | int drm_mm_insert_node_generic(struct drm_mm *mm, |
223 | int drm_mm_insert_node_generic(struct drm_mm *mm, |
224 | struct drm_mm_node *node, |
224 | struct drm_mm_node *node, |
225 | unsigned long size, |
225 | u64 size, |
226 | unsigned alignment, |
226 | unsigned alignment, |
227 | unsigned long color, |
227 | unsigned long color, |
228 | enum drm_mm_search_flags sflags, |
228 | enum drm_mm_search_flags sflags, |
229 | enum drm_mm_allocator_flags aflags); |
229 | enum drm_mm_allocator_flags aflags); |
230 | /** |
230 | /** |
231 | * drm_mm_insert_node - search for space and insert @node |
231 | * drm_mm_insert_node - search for space and insert @node |
232 | * @mm: drm_mm to allocate from |
232 | * @mm: drm_mm to allocate from |
233 | * @node: preallocate node to insert |
233 | * @node: preallocate node to insert |
234 | * @size: size of the allocation |
234 | * @size: size of the allocation |
235 | * @alignment: alignment of the allocation |
235 | * @alignment: alignment of the allocation |
236 | * @flags: flags to fine-tune the allocation |
236 | * @flags: flags to fine-tune the allocation |
237 | * |
237 | * |
238 | * This is a simplified version of drm_mm_insert_node_generic() with @color set |
238 | * This is a simplified version of drm_mm_insert_node_generic() with @color set |
239 | * to 0. |
239 | * to 0. |
240 | * |
240 | * |
241 | * The preallocated node must be cleared to 0. |
241 | * The preallocated node must be cleared to 0. |
242 | * |
242 | * |
243 | * Returns: |
243 | * Returns: |
244 | * 0 on success, -ENOSPC if there's no suitable hole. |
244 | * 0 on success, -ENOSPC if there's no suitable hole. |
245 | */ |
245 | */ |
246 | static inline int drm_mm_insert_node(struct drm_mm *mm, |
246 | static inline int drm_mm_insert_node(struct drm_mm *mm, |
247 | struct drm_mm_node *node, |
247 | struct drm_mm_node *node, |
248 | unsigned long size, |
248 | u64 size, |
249 | unsigned alignment, |
249 | unsigned alignment, |
250 | enum drm_mm_search_flags flags) |
250 | enum drm_mm_search_flags flags) |
251 | { |
251 | { |
252 | return drm_mm_insert_node_generic(mm, node, size, alignment, 0, flags, |
252 | return drm_mm_insert_node_generic(mm, node, size, alignment, 0, flags, |
253 | DRM_MM_CREATE_DEFAULT); |
253 | DRM_MM_CREATE_DEFAULT); |
254 | } |
254 | } |
255 | 255 | ||
256 | int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, |
256 | int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, |
257 | struct drm_mm_node *node, |
257 | struct drm_mm_node *node, |
258 | unsigned long size, |
258 | u64 size, |
259 | unsigned alignment, |
259 | unsigned alignment, |
260 | unsigned long color, |
260 | unsigned long color, |
261 | unsigned long start, |
261 | u64 start, |
262 | unsigned long end, |
262 | u64 end, |
263 | enum drm_mm_search_flags sflags, |
263 | enum drm_mm_search_flags sflags, |
264 | enum drm_mm_allocator_flags aflags); |
264 | enum drm_mm_allocator_flags aflags); |
265 | /** |
265 | /** |
266 | * drm_mm_insert_node_in_range - ranged search for space and insert @node |
266 | * drm_mm_insert_node_in_range - ranged search for space and insert @node |
267 | * @mm: drm_mm to allocate from |
267 | * @mm: drm_mm to allocate from |
268 | * @node: preallocate node to insert |
268 | * @node: preallocate node to insert |
269 | * @size: size of the allocation |
269 | * @size: size of the allocation |
270 | * @alignment: alignment of the allocation |
270 | * @alignment: alignment of the allocation |
271 | * @start: start of the allowed range for this node |
271 | * @start: start of the allowed range for this node |
272 | * @end: end of the allowed range for this node |
272 | * @end: end of the allowed range for this node |
273 | * @flags: flags to fine-tune the allocation |
273 | * @flags: flags to fine-tune the allocation |
274 | * |
274 | * |
275 | * This is a simplified version of drm_mm_insert_node_in_range_generic() with |
275 | * This is a simplified version of drm_mm_insert_node_in_range_generic() with |
276 | * @color set to 0. |
276 | * @color set to 0. |
277 | * |
277 | * |
278 | * The preallocated node must be cleared to 0. |
278 | * The preallocated node must be cleared to 0. |
279 | * |
279 | * |
280 | * Returns: |
280 | * Returns: |
281 | * 0 on success, -ENOSPC if there's no suitable hole. |
281 | * 0 on success, -ENOSPC if there's no suitable hole. |
282 | */ |
282 | */ |
283 | static inline int drm_mm_insert_node_in_range(struct drm_mm *mm, |
283 | static inline int drm_mm_insert_node_in_range(struct drm_mm *mm, |
284 | struct drm_mm_node *node, |
284 | struct drm_mm_node *node, |
285 | unsigned long size, |
285 | u64 size, |
286 | unsigned alignment, |
286 | unsigned alignment, |
287 | unsigned long start, |
287 | u64 start, |
288 | unsigned long end, |
288 | u64 end, |
289 | enum drm_mm_search_flags flags) |
289 | enum drm_mm_search_flags flags) |
290 | { |
290 | { |
291 | return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, |
291 | return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, |
292 | 0, start, end, flags, |
292 | 0, start, end, flags, |
293 | DRM_MM_CREATE_DEFAULT); |
293 | DRM_MM_CREATE_DEFAULT); |
294 | } |
294 | } |
295 | 295 | ||
296 | void drm_mm_remove_node(struct drm_mm_node *node); |
296 | void drm_mm_remove_node(struct drm_mm_node *node); |
297 | void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new); |
297 | void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new); |
298 | void drm_mm_init(struct drm_mm *mm, |
298 | void drm_mm_init(struct drm_mm *mm, |
299 | unsigned long start, |
299 | u64 start, |
300 | unsigned long size); |
300 | u64 size); |
301 | void drm_mm_takedown(struct drm_mm *mm); |
301 | void drm_mm_takedown(struct drm_mm *mm); |
302 | bool drm_mm_clean(struct drm_mm *mm); |
302 | bool drm_mm_clean(struct drm_mm *mm); |
303 | 303 | ||
304 | void drm_mm_init_scan(struct drm_mm *mm, |
304 | void drm_mm_init_scan(struct drm_mm *mm, |
305 | unsigned long size, |
305 | u64 size, |
306 | unsigned alignment, |
306 | unsigned alignment, |
307 | unsigned long color); |
307 | unsigned long color); |
308 | void drm_mm_init_scan_with_range(struct drm_mm *mm, |
308 | void drm_mm_init_scan_with_range(struct drm_mm *mm, |
309 | unsigned long size, |
309 | u64 size, |
310 | unsigned alignment, |
310 | unsigned alignment, |
311 | unsigned long color, |
311 | unsigned long color, |
312 | unsigned long start, |
312 | u64 start, |
313 | unsigned long end); |
313 | u64 end); |
314 | bool drm_mm_scan_add_block(struct drm_mm_node *node); |
314 | bool drm_mm_scan_add_block(struct drm_mm_node *node); |
315 | bool drm_mm_scan_remove_block(struct drm_mm_node *node); |
315 | bool drm_mm_scan_remove_block(struct drm_mm_node *node); |
316 | 316 | ||
317 | void drm_mm_debug_table(struct drm_mm *mm, const char *prefix); |
317 | void drm_mm_debug_table(struct drm_mm *mm, const char *prefix); |
318 | #ifdef CONFIG_DEBUG_FS |
318 | #ifdef CONFIG_DEBUG_FS |
319 | int drm_mm_dump_table(struct seq_file *m, struct drm_mm *mm); |
319 | int drm_mm_dump_table(struct seq_file *m, struct drm_mm *mm); |
320 | #endif |
320 | #endif |
321 | 321 | ||
322 | #endif><>><>><> |
322 | #endif><>><>><> |