drm_mm.c revision 1.2 1 1.1 riastrad /**************************************************************************
2 1.1 riastrad *
3 1.1 riastrad * Copyright 2006 Tungsten Graphics, Inc., Bismarck, ND., USA.
4 1.1 riastrad * All Rights Reserved.
5 1.1 riastrad *
6 1.1 riastrad * Permission is hereby granted, free of charge, to any person obtaining a
7 1.1 riastrad * copy of this software and associated documentation files (the
8 1.1 riastrad * "Software"), to deal in the Software without restriction, including
9 1.1 riastrad * without limitation the rights to use, copy, modify, merge, publish,
10 1.1 riastrad * distribute, sub license, and/or sell copies of the Software, and to
11 1.1 riastrad * permit persons to whom the Software is furnished to do so, subject to
12 1.1 riastrad * the following conditions:
13 1.1 riastrad *
14 1.1 riastrad * The above copyright notice and this permission notice (including the
15 1.1 riastrad * next paragraph) shall be included in all copies or substantial portions
16 1.1 riastrad * of the Software.
17 1.1 riastrad *
18 1.1 riastrad * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 1.1 riastrad * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 1.1 riastrad * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
21 1.1 riastrad * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
22 1.1 riastrad * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23 1.1 riastrad * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
24 1.1 riastrad * USE OR OTHER DEALINGS IN THE SOFTWARE.
25 1.1 riastrad *
26 1.1 riastrad *
27 1.1 riastrad **************************************************************************/
28 1.1 riastrad
29 1.1 riastrad /*
30 1.1 riastrad * Generic simple memory manager implementation. Intended to be used as a base
31 1.1 riastrad * class implementation for more advanced memory managers.
32 1.1 riastrad *
33 1.1 riastrad * Note that the algorithm used is quite simple and there might be substantial
34 1.1 riastrad * performance gains if a smarter free list is implemented. Currently it is just an
35 1.1 riastrad * unordered stack of free regions. This could easily be improved if an RB-tree
36 1.1 riastrad * is used instead. At least if we expect heavy fragmentation.
37 1.1 riastrad *
38 1.1 riastrad * Aligned allocations can also see improvement.
39 1.1 riastrad *
40 1.1 riastrad * Authors:
41 1.1 riastrad * Thomas Hellstrm <thomas-at-tungstengraphics-dot-com>
42 1.1 riastrad */
43 1.1 riastrad
44 1.1 riastrad #include <drm/drmP.h>
45 1.1 riastrad #include <drm/drm_mm.h>
46 1.1 riastrad #include <linux/slab.h>
47 1.1 riastrad #include <linux/seq_file.h>
48 1.1 riastrad #include <linux/export.h>
49 1.2 riastrad #include <linux/printk.h>
50 1.2 riastrad #include <asm/bug.h>
51 1.1 riastrad
52 1.1 riastrad #define MM_UNUSED_TARGET 4
53 1.1 riastrad
54 1.1 riastrad static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, int atomic)
55 1.1 riastrad {
56 1.1 riastrad struct drm_mm_node *child;
57 1.1 riastrad
58 1.1 riastrad if (atomic)
59 1.1 riastrad child = kzalloc(sizeof(*child), GFP_ATOMIC);
60 1.1 riastrad else
61 1.1 riastrad child = kzalloc(sizeof(*child), GFP_KERNEL);
62 1.1 riastrad
63 1.1 riastrad if (unlikely(child == NULL)) {
64 1.1 riastrad spin_lock(&mm->unused_lock);
65 1.1 riastrad if (list_empty(&mm->unused_nodes))
66 1.1 riastrad child = NULL;
67 1.1 riastrad else {
68 1.1 riastrad child =
69 1.1 riastrad list_entry(mm->unused_nodes.next,
70 1.1 riastrad struct drm_mm_node, node_list);
71 1.1 riastrad list_del(&child->node_list);
72 1.1 riastrad --mm->num_unused;
73 1.1 riastrad }
74 1.1 riastrad spin_unlock(&mm->unused_lock);
75 1.1 riastrad }
76 1.1 riastrad return child;
77 1.1 riastrad }
78 1.1 riastrad
79 1.1 riastrad /* drm_mm_pre_get() - pre allocate drm_mm_node structure
80 1.1 riastrad * drm_mm: memory manager struct we are pre-allocating for
81 1.1 riastrad *
82 1.1 riastrad * Returns 0 on success or -ENOMEM if allocation fails.
83 1.1 riastrad */
84 1.1 riastrad int drm_mm_pre_get(struct drm_mm *mm)
85 1.1 riastrad {
86 1.1 riastrad struct drm_mm_node *node;
87 1.1 riastrad
88 1.1 riastrad spin_lock(&mm->unused_lock);
89 1.1 riastrad while (mm->num_unused < MM_UNUSED_TARGET) {
90 1.1 riastrad spin_unlock(&mm->unused_lock);
91 1.1 riastrad node = kzalloc(sizeof(*node), GFP_KERNEL);
92 1.1 riastrad spin_lock(&mm->unused_lock);
93 1.1 riastrad
94 1.1 riastrad if (unlikely(node == NULL)) {
95 1.1 riastrad int ret = (mm->num_unused < 2) ? -ENOMEM : 0;
96 1.1 riastrad spin_unlock(&mm->unused_lock);
97 1.1 riastrad return ret;
98 1.1 riastrad }
99 1.1 riastrad ++mm->num_unused;
100 1.1 riastrad list_add_tail(&node->node_list, &mm->unused_nodes);
101 1.1 riastrad }
102 1.1 riastrad spin_unlock(&mm->unused_lock);
103 1.1 riastrad return 0;
104 1.1 riastrad }
105 1.1 riastrad EXPORT_SYMBOL(drm_mm_pre_get);
106 1.1 riastrad
107 1.1 riastrad static inline unsigned long drm_mm_hole_node_start(struct drm_mm_node *hole_node)
108 1.1 riastrad {
109 1.1 riastrad return hole_node->start + hole_node->size;
110 1.1 riastrad }
111 1.1 riastrad
112 1.1 riastrad static inline unsigned long drm_mm_hole_node_end(struct drm_mm_node *hole_node)
113 1.1 riastrad {
114 1.1 riastrad struct drm_mm_node *next_node =
115 1.1 riastrad list_entry(hole_node->node_list.next, struct drm_mm_node,
116 1.1 riastrad node_list);
117 1.1 riastrad
118 1.1 riastrad return next_node->start;
119 1.1 riastrad }
120 1.1 riastrad
121 1.1 riastrad static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
122 1.1 riastrad struct drm_mm_node *node,
123 1.1 riastrad unsigned long size, unsigned alignment,
124 1.1 riastrad unsigned long color)
125 1.1 riastrad {
126 1.1 riastrad struct drm_mm *mm = hole_node->mm;
127 1.1 riastrad unsigned long hole_start = drm_mm_hole_node_start(hole_node);
128 1.1 riastrad unsigned long hole_end = drm_mm_hole_node_end(hole_node);
129 1.1 riastrad unsigned long adj_start = hole_start;
130 1.1 riastrad unsigned long adj_end = hole_end;
131 1.1 riastrad
132 1.1 riastrad BUG_ON(!hole_node->hole_follows || node->allocated);
133 1.1 riastrad
134 1.1 riastrad if (mm->color_adjust)
135 1.1 riastrad mm->color_adjust(hole_node, color, &adj_start, &adj_end);
136 1.1 riastrad
137 1.1 riastrad if (alignment) {
138 1.1 riastrad unsigned tmp = adj_start % alignment;
139 1.1 riastrad if (tmp)
140 1.1 riastrad adj_start += alignment - tmp;
141 1.1 riastrad }
142 1.1 riastrad
143 1.1 riastrad if (adj_start == hole_start) {
144 1.1 riastrad hole_node->hole_follows = 0;
145 1.1 riastrad list_del(&hole_node->hole_stack);
146 1.1 riastrad }
147 1.1 riastrad
148 1.1 riastrad node->start = adj_start;
149 1.1 riastrad node->size = size;
150 1.1 riastrad node->mm = mm;
151 1.1 riastrad node->color = color;
152 1.1 riastrad node->allocated = 1;
153 1.1 riastrad
154 1.1 riastrad INIT_LIST_HEAD(&node->hole_stack);
155 1.1 riastrad list_add(&node->node_list, &hole_node->node_list);
156 1.1 riastrad
157 1.1 riastrad BUG_ON(node->start + node->size > adj_end);
158 1.1 riastrad
159 1.1 riastrad node->hole_follows = 0;
160 1.1 riastrad if (node->start + node->size < hole_end) {
161 1.1 riastrad list_add(&node->hole_stack, &mm->hole_stack);
162 1.1 riastrad node->hole_follows = 1;
163 1.1 riastrad }
164 1.1 riastrad }
165 1.1 riastrad
166 1.1 riastrad struct drm_mm_node *drm_mm_get_block_generic(struct drm_mm_node *hole_node,
167 1.1 riastrad unsigned long size,
168 1.1 riastrad unsigned alignment,
169 1.1 riastrad unsigned long color,
170 1.1 riastrad int atomic)
171 1.1 riastrad {
172 1.1 riastrad struct drm_mm_node *node;
173 1.1 riastrad
174 1.1 riastrad node = drm_mm_kmalloc(hole_node->mm, atomic);
175 1.1 riastrad if (unlikely(node == NULL))
176 1.1 riastrad return NULL;
177 1.1 riastrad
178 1.1 riastrad drm_mm_insert_helper(hole_node, node, size, alignment, color);
179 1.1 riastrad
180 1.1 riastrad return node;
181 1.1 riastrad }
182 1.1 riastrad EXPORT_SYMBOL(drm_mm_get_block_generic);
183 1.1 riastrad
184 1.1 riastrad /**
185 1.1 riastrad * Search for free space and insert a preallocated memory node. Returns
186 1.1 riastrad * -ENOSPC if no suitable free area is available. The preallocated memory node
187 1.1 riastrad * must be cleared.
188 1.1 riastrad */
189 1.1 riastrad int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
190 1.1 riastrad unsigned long size, unsigned alignment,
191 1.1 riastrad unsigned long color)
192 1.1 riastrad {
193 1.1 riastrad struct drm_mm_node *hole_node;
194 1.1 riastrad
195 1.1 riastrad hole_node = drm_mm_search_free_generic(mm, size, alignment,
196 1.1 riastrad color, 0);
197 1.1 riastrad if (!hole_node)
198 1.1 riastrad return -ENOSPC;
199 1.1 riastrad
200 1.1 riastrad drm_mm_insert_helper(hole_node, node, size, alignment, color);
201 1.1 riastrad return 0;
202 1.1 riastrad }
203 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node_generic);
204 1.1 riastrad
205 1.1 riastrad int drm_mm_insert_node(struct drm_mm *mm, struct drm_mm_node *node,
206 1.1 riastrad unsigned long size, unsigned alignment)
207 1.1 riastrad {
208 1.1 riastrad return drm_mm_insert_node_generic(mm, node, size, alignment, 0);
209 1.1 riastrad }
210 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node);
211 1.1 riastrad
212 1.1 riastrad static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node,
213 1.1 riastrad struct drm_mm_node *node,
214 1.1 riastrad unsigned long size, unsigned alignment,
215 1.1 riastrad unsigned long color,
216 1.1 riastrad unsigned long start, unsigned long end)
217 1.1 riastrad {
218 1.1 riastrad struct drm_mm *mm = hole_node->mm;
219 1.1 riastrad unsigned long hole_start = drm_mm_hole_node_start(hole_node);
220 1.1 riastrad unsigned long hole_end = drm_mm_hole_node_end(hole_node);
221 1.1 riastrad unsigned long adj_start = hole_start;
222 1.1 riastrad unsigned long adj_end = hole_end;
223 1.1 riastrad
224 1.1 riastrad BUG_ON(!hole_node->hole_follows || node->allocated);
225 1.1 riastrad
226 1.1 riastrad if (adj_start < start)
227 1.1 riastrad adj_start = start;
228 1.1 riastrad if (adj_end > end)
229 1.1 riastrad adj_end = end;
230 1.1 riastrad
231 1.1 riastrad if (mm->color_adjust)
232 1.1 riastrad mm->color_adjust(hole_node, color, &adj_start, &adj_end);
233 1.1 riastrad
234 1.1 riastrad if (alignment) {
235 1.1 riastrad unsigned tmp = adj_start % alignment;
236 1.1 riastrad if (tmp)
237 1.1 riastrad adj_start += alignment - tmp;
238 1.1 riastrad }
239 1.1 riastrad
240 1.1 riastrad if (adj_start == hole_start) {
241 1.1 riastrad hole_node->hole_follows = 0;
242 1.1 riastrad list_del(&hole_node->hole_stack);
243 1.1 riastrad }
244 1.1 riastrad
245 1.1 riastrad node->start = adj_start;
246 1.1 riastrad node->size = size;
247 1.1 riastrad node->mm = mm;
248 1.1 riastrad node->color = color;
249 1.1 riastrad node->allocated = 1;
250 1.1 riastrad
251 1.1 riastrad INIT_LIST_HEAD(&node->hole_stack);
252 1.1 riastrad list_add(&node->node_list, &hole_node->node_list);
253 1.1 riastrad
254 1.1 riastrad BUG_ON(node->start + node->size > adj_end);
255 1.1 riastrad BUG_ON(node->start + node->size > end);
256 1.1 riastrad
257 1.1 riastrad node->hole_follows = 0;
258 1.1 riastrad if (node->start + node->size < hole_end) {
259 1.1 riastrad list_add(&node->hole_stack, &mm->hole_stack);
260 1.1 riastrad node->hole_follows = 1;
261 1.1 riastrad }
262 1.1 riastrad }
263 1.1 riastrad
264 1.1 riastrad struct drm_mm_node *drm_mm_get_block_range_generic(struct drm_mm_node *hole_node,
265 1.1 riastrad unsigned long size,
266 1.1 riastrad unsigned alignment,
267 1.1 riastrad unsigned long color,
268 1.1 riastrad unsigned long start,
269 1.1 riastrad unsigned long end,
270 1.1 riastrad int atomic)
271 1.1 riastrad {
272 1.1 riastrad struct drm_mm_node *node;
273 1.1 riastrad
274 1.1 riastrad node = drm_mm_kmalloc(hole_node->mm, atomic);
275 1.1 riastrad if (unlikely(node == NULL))
276 1.1 riastrad return NULL;
277 1.1 riastrad
278 1.1 riastrad drm_mm_insert_helper_range(hole_node, node, size, alignment, color,
279 1.1 riastrad start, end);
280 1.1 riastrad
281 1.1 riastrad return node;
282 1.1 riastrad }
283 1.1 riastrad EXPORT_SYMBOL(drm_mm_get_block_range_generic);
284 1.1 riastrad
285 1.1 riastrad /**
286 1.1 riastrad * Search for free space and insert a preallocated memory node. Returns
287 1.1 riastrad * -ENOSPC if no suitable free area is available. This is for range
288 1.1 riastrad * restricted allocations. The preallocated memory node must be cleared.
289 1.1 riastrad */
290 1.1 riastrad int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *node,
291 1.1 riastrad unsigned long size, unsigned alignment, unsigned long color,
292 1.1 riastrad unsigned long start, unsigned long end)
293 1.1 riastrad {
294 1.1 riastrad struct drm_mm_node *hole_node;
295 1.1 riastrad
296 1.1 riastrad hole_node = drm_mm_search_free_in_range_generic(mm,
297 1.1 riastrad size, alignment, color,
298 1.1 riastrad start, end, 0);
299 1.1 riastrad if (!hole_node)
300 1.1 riastrad return -ENOSPC;
301 1.1 riastrad
302 1.1 riastrad drm_mm_insert_helper_range(hole_node, node,
303 1.1 riastrad size, alignment, color,
304 1.1 riastrad start, end);
305 1.1 riastrad return 0;
306 1.1 riastrad }
307 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node_in_range_generic);
308 1.1 riastrad
309 1.1 riastrad int drm_mm_insert_node_in_range(struct drm_mm *mm, struct drm_mm_node *node,
310 1.1 riastrad unsigned long size, unsigned alignment,
311 1.1 riastrad unsigned long start, unsigned long end)
312 1.1 riastrad {
313 1.1 riastrad return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end);
314 1.1 riastrad }
315 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node_in_range);
316 1.1 riastrad
317 1.1 riastrad /**
318 1.1 riastrad * Remove a memory node from the allocator.
319 1.1 riastrad */
320 1.1 riastrad void drm_mm_remove_node(struct drm_mm_node *node)
321 1.1 riastrad {
322 1.1 riastrad struct drm_mm *mm = node->mm;
323 1.1 riastrad struct drm_mm_node *prev_node;
324 1.1 riastrad
325 1.1 riastrad BUG_ON(node->scanned_block || node->scanned_prev_free
326 1.1 riastrad || node->scanned_next_free);
327 1.1 riastrad
328 1.1 riastrad prev_node =
329 1.1 riastrad list_entry(node->node_list.prev, struct drm_mm_node, node_list);
330 1.1 riastrad
331 1.1 riastrad if (node->hole_follows) {
332 1.1 riastrad BUG_ON(drm_mm_hole_node_start(node)
333 1.1 riastrad == drm_mm_hole_node_end(node));
334 1.1 riastrad list_del(&node->hole_stack);
335 1.1 riastrad } else
336 1.1 riastrad BUG_ON(drm_mm_hole_node_start(node)
337 1.1 riastrad != drm_mm_hole_node_end(node));
338 1.1 riastrad
339 1.1 riastrad if (!prev_node->hole_follows) {
340 1.1 riastrad prev_node->hole_follows = 1;
341 1.1 riastrad list_add(&prev_node->hole_stack, &mm->hole_stack);
342 1.1 riastrad } else
343 1.1 riastrad list_move(&prev_node->hole_stack, &mm->hole_stack);
344 1.1 riastrad
345 1.1 riastrad list_del(&node->node_list);
346 1.1 riastrad node->allocated = 0;
347 1.1 riastrad }
348 1.1 riastrad EXPORT_SYMBOL(drm_mm_remove_node);
349 1.1 riastrad
350 1.1 riastrad /*
351 1.1 riastrad * Remove a memory node from the allocator and free the allocated struct
352 1.1 riastrad * drm_mm_node. Only to be used on a struct drm_mm_node obtained by one of the
353 1.1 riastrad * drm_mm_get_block functions.
354 1.1 riastrad */
355 1.1 riastrad void drm_mm_put_block(struct drm_mm_node *node)
356 1.1 riastrad {
357 1.1 riastrad
358 1.1 riastrad struct drm_mm *mm = node->mm;
359 1.1 riastrad
360 1.1 riastrad drm_mm_remove_node(node);
361 1.1 riastrad
362 1.1 riastrad spin_lock(&mm->unused_lock);
363 1.1 riastrad if (mm->num_unused < MM_UNUSED_TARGET) {
364 1.1 riastrad list_add(&node->node_list, &mm->unused_nodes);
365 1.1 riastrad ++mm->num_unused;
366 1.1 riastrad } else
367 1.1 riastrad kfree(node);
368 1.1 riastrad spin_unlock(&mm->unused_lock);
369 1.1 riastrad }
370 1.1 riastrad EXPORT_SYMBOL(drm_mm_put_block);
371 1.1 riastrad
372 1.1 riastrad static int check_free_hole(unsigned long start, unsigned long end,
373 1.1 riastrad unsigned long size, unsigned alignment)
374 1.1 riastrad {
375 1.1 riastrad if (end - start < size)
376 1.1 riastrad return 0;
377 1.1 riastrad
378 1.1 riastrad if (alignment) {
379 1.1 riastrad unsigned tmp = start % alignment;
380 1.1 riastrad if (tmp)
381 1.1 riastrad start += alignment - tmp;
382 1.1 riastrad }
383 1.1 riastrad
384 1.1 riastrad return end >= start + size;
385 1.1 riastrad }
386 1.1 riastrad
387 1.1 riastrad struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
388 1.1 riastrad unsigned long size,
389 1.1 riastrad unsigned alignment,
390 1.1 riastrad unsigned long color,
391 1.1 riastrad bool best_match)
392 1.1 riastrad {
393 1.1 riastrad struct drm_mm_node *entry;
394 1.1 riastrad struct drm_mm_node *best;
395 1.1 riastrad unsigned long best_size;
396 1.1 riastrad
397 1.1 riastrad BUG_ON(mm->scanned_blocks);
398 1.1 riastrad
399 1.1 riastrad best = NULL;
400 1.1 riastrad best_size = ~0UL;
401 1.1 riastrad
402 1.1 riastrad list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
403 1.1 riastrad unsigned long adj_start = drm_mm_hole_node_start(entry);
404 1.1 riastrad unsigned long adj_end = drm_mm_hole_node_end(entry);
405 1.1 riastrad
406 1.1 riastrad if (mm->color_adjust) {
407 1.1 riastrad mm->color_adjust(entry, color, &adj_start, &adj_end);
408 1.1 riastrad if (adj_end <= adj_start)
409 1.1 riastrad continue;
410 1.1 riastrad }
411 1.1 riastrad
412 1.1 riastrad BUG_ON(!entry->hole_follows);
413 1.1 riastrad if (!check_free_hole(adj_start, adj_end, size, alignment))
414 1.1 riastrad continue;
415 1.1 riastrad
416 1.1 riastrad if (!best_match)
417 1.1 riastrad return entry;
418 1.1 riastrad
419 1.1 riastrad if (entry->size < best_size) {
420 1.1 riastrad best = entry;
421 1.1 riastrad best_size = entry->size;
422 1.1 riastrad }
423 1.1 riastrad }
424 1.1 riastrad
425 1.1 riastrad return best;
426 1.1 riastrad }
427 1.1 riastrad EXPORT_SYMBOL(drm_mm_search_free_generic);
428 1.1 riastrad
429 1.1 riastrad struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
430 1.1 riastrad unsigned long size,
431 1.1 riastrad unsigned alignment,
432 1.1 riastrad unsigned long color,
433 1.1 riastrad unsigned long start,
434 1.1 riastrad unsigned long end,
435 1.1 riastrad bool best_match)
436 1.1 riastrad {
437 1.1 riastrad struct drm_mm_node *entry;
438 1.1 riastrad struct drm_mm_node *best;
439 1.1 riastrad unsigned long best_size;
440 1.1 riastrad
441 1.1 riastrad BUG_ON(mm->scanned_blocks);
442 1.1 riastrad
443 1.1 riastrad best = NULL;
444 1.1 riastrad best_size = ~0UL;
445 1.1 riastrad
446 1.1 riastrad list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
447 1.1 riastrad unsigned long adj_start = drm_mm_hole_node_start(entry) < start ?
448 1.1 riastrad start : drm_mm_hole_node_start(entry);
449 1.1 riastrad unsigned long adj_end = drm_mm_hole_node_end(entry) > end ?
450 1.1 riastrad end : drm_mm_hole_node_end(entry);
451 1.1 riastrad
452 1.1 riastrad BUG_ON(!entry->hole_follows);
453 1.1 riastrad
454 1.1 riastrad if (mm->color_adjust) {
455 1.1 riastrad mm->color_adjust(entry, color, &adj_start, &adj_end);
456 1.1 riastrad if (adj_end <= adj_start)
457 1.1 riastrad continue;
458 1.1 riastrad }
459 1.1 riastrad
460 1.1 riastrad if (!check_free_hole(adj_start, adj_end, size, alignment))
461 1.1 riastrad continue;
462 1.1 riastrad
463 1.1 riastrad if (!best_match)
464 1.1 riastrad return entry;
465 1.1 riastrad
466 1.1 riastrad if (entry->size < best_size) {
467 1.1 riastrad best = entry;
468 1.1 riastrad best_size = entry->size;
469 1.1 riastrad }
470 1.1 riastrad }
471 1.1 riastrad
472 1.1 riastrad return best;
473 1.1 riastrad }
474 1.1 riastrad EXPORT_SYMBOL(drm_mm_search_free_in_range_generic);
475 1.1 riastrad
476 1.1 riastrad /**
477 1.1 riastrad * Moves an allocation. To be used with embedded struct drm_mm_node.
478 1.1 riastrad */
479 1.1 riastrad void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new)
480 1.1 riastrad {
481 1.1 riastrad list_replace(&old->node_list, &new->node_list);
482 1.1 riastrad list_replace(&old->hole_stack, &new->hole_stack);
483 1.1 riastrad new->hole_follows = old->hole_follows;
484 1.1 riastrad new->mm = old->mm;
485 1.1 riastrad new->start = old->start;
486 1.1 riastrad new->size = old->size;
487 1.1 riastrad new->color = old->color;
488 1.1 riastrad
489 1.1 riastrad old->allocated = 0;
490 1.1 riastrad new->allocated = 1;
491 1.1 riastrad }
492 1.1 riastrad EXPORT_SYMBOL(drm_mm_replace_node);
493 1.1 riastrad
494 1.1 riastrad /**
495 1.1 riastrad * Initializa lru scanning.
496 1.1 riastrad *
497 1.1 riastrad * This simply sets up the scanning routines with the parameters for the desired
498 1.1 riastrad * hole.
499 1.1 riastrad *
500 1.1 riastrad * Warning: As long as the scan list is non-empty, no other operations than
501 1.1 riastrad * adding/removing nodes to/from the scan list are allowed.
502 1.1 riastrad */
503 1.1 riastrad void drm_mm_init_scan(struct drm_mm *mm,
504 1.1 riastrad unsigned long size,
505 1.1 riastrad unsigned alignment,
506 1.1 riastrad unsigned long color)
507 1.1 riastrad {
508 1.1 riastrad mm->scan_color = color;
509 1.1 riastrad mm->scan_alignment = alignment;
510 1.1 riastrad mm->scan_size = size;
511 1.1 riastrad mm->scanned_blocks = 0;
512 1.1 riastrad mm->scan_hit_start = 0;
513 1.1 riastrad mm->scan_hit_end = 0;
514 1.1 riastrad mm->scan_check_range = 0;
515 1.1 riastrad mm->prev_scanned_node = NULL;
516 1.1 riastrad }
517 1.1 riastrad EXPORT_SYMBOL(drm_mm_init_scan);
518 1.1 riastrad
519 1.1 riastrad /**
520 1.1 riastrad * Initializa lru scanning.
521 1.1 riastrad *
522 1.1 riastrad * This simply sets up the scanning routines with the parameters for the desired
523 1.1 riastrad * hole. This version is for range-restricted scans.
524 1.1 riastrad *
525 1.1 riastrad * Warning: As long as the scan list is non-empty, no other operations than
526 1.1 riastrad * adding/removing nodes to/from the scan list are allowed.
527 1.1 riastrad */
528 1.1 riastrad void drm_mm_init_scan_with_range(struct drm_mm *mm,
529 1.1 riastrad unsigned long size,
530 1.1 riastrad unsigned alignment,
531 1.1 riastrad unsigned long color,
532 1.1 riastrad unsigned long start,
533 1.1 riastrad unsigned long end)
534 1.1 riastrad {
535 1.1 riastrad mm->scan_color = color;
536 1.1 riastrad mm->scan_alignment = alignment;
537 1.1 riastrad mm->scan_size = size;
538 1.1 riastrad mm->scanned_blocks = 0;
539 1.1 riastrad mm->scan_hit_start = 0;
540 1.1 riastrad mm->scan_hit_end = 0;
541 1.1 riastrad mm->scan_start = start;
542 1.1 riastrad mm->scan_end = end;
543 1.1 riastrad mm->scan_check_range = 1;
544 1.1 riastrad mm->prev_scanned_node = NULL;
545 1.1 riastrad }
546 1.1 riastrad EXPORT_SYMBOL(drm_mm_init_scan_with_range);
547 1.1 riastrad
548 1.1 riastrad /**
549 1.1 riastrad * Add a node to the scan list that might be freed to make space for the desired
550 1.1 riastrad * hole.
551 1.1 riastrad *
552 1.1 riastrad * Returns non-zero, if a hole has been found, zero otherwise.
553 1.1 riastrad */
554 1.1 riastrad int drm_mm_scan_add_block(struct drm_mm_node *node)
555 1.1 riastrad {
556 1.1 riastrad struct drm_mm *mm = node->mm;
557 1.1 riastrad struct drm_mm_node *prev_node;
558 1.1 riastrad unsigned long hole_start, hole_end;
559 1.1 riastrad unsigned long adj_start, adj_end;
560 1.1 riastrad
561 1.1 riastrad mm->scanned_blocks++;
562 1.1 riastrad
563 1.1 riastrad BUG_ON(node->scanned_block);
564 1.1 riastrad node->scanned_block = 1;
565 1.1 riastrad
566 1.1 riastrad prev_node = list_entry(node->node_list.prev, struct drm_mm_node,
567 1.1 riastrad node_list);
568 1.1 riastrad
569 1.1 riastrad node->scanned_preceeds_hole = prev_node->hole_follows;
570 1.1 riastrad prev_node->hole_follows = 1;
571 1.1 riastrad list_del(&node->node_list);
572 1.1 riastrad node->node_list.prev = &prev_node->node_list;
573 1.1 riastrad node->node_list.next = &mm->prev_scanned_node->node_list;
574 1.1 riastrad mm->prev_scanned_node = node;
575 1.1 riastrad
576 1.1 riastrad adj_start = hole_start = drm_mm_hole_node_start(prev_node);
577 1.1 riastrad adj_end = hole_end = drm_mm_hole_node_end(prev_node);
578 1.1 riastrad
579 1.1 riastrad if (mm->scan_check_range) {
580 1.1 riastrad if (adj_start < mm->scan_start)
581 1.1 riastrad adj_start = mm->scan_start;
582 1.1 riastrad if (adj_end > mm->scan_end)
583 1.1 riastrad adj_end = mm->scan_end;
584 1.1 riastrad }
585 1.1 riastrad
586 1.1 riastrad if (mm->color_adjust)
587 1.1 riastrad mm->color_adjust(prev_node, mm->scan_color,
588 1.1 riastrad &adj_start, &adj_end);
589 1.1 riastrad
590 1.1 riastrad if (check_free_hole(adj_start, adj_end,
591 1.1 riastrad mm->scan_size, mm->scan_alignment)) {
592 1.1 riastrad mm->scan_hit_start = hole_start;
593 1.1 riastrad mm->scan_hit_end = hole_end;
594 1.1 riastrad return 1;
595 1.1 riastrad }
596 1.1 riastrad
597 1.1 riastrad return 0;
598 1.1 riastrad }
599 1.1 riastrad EXPORT_SYMBOL(drm_mm_scan_add_block);
600 1.1 riastrad
601 1.1 riastrad /**
602 1.1 riastrad * Remove a node from the scan list.
603 1.1 riastrad *
604 1.1 riastrad * Nodes _must_ be removed in the exact same order from the scan list as they
605 1.1 riastrad * have been added, otherwise the internal state of the memory manager will be
606 1.1 riastrad * corrupted.
607 1.1 riastrad *
608 1.1 riastrad * When the scan list is empty, the selected memory nodes can be freed. An
609 1.1 riastrad * immediately following drm_mm_search_free with best_match = 0 will then return
610 1.1 riastrad * the just freed block (because its at the top of the free_stack list).
611 1.1 riastrad *
612 1.1 riastrad * Returns one if this block should be evicted, zero otherwise. Will always
613 1.1 riastrad * return zero when no hole has been found.
614 1.1 riastrad */
615 1.1 riastrad int drm_mm_scan_remove_block(struct drm_mm_node *node)
616 1.1 riastrad {
617 1.1 riastrad struct drm_mm *mm = node->mm;
618 1.1 riastrad struct drm_mm_node *prev_node;
619 1.1 riastrad
620 1.1 riastrad mm->scanned_blocks--;
621 1.1 riastrad
622 1.1 riastrad BUG_ON(!node->scanned_block);
623 1.1 riastrad node->scanned_block = 0;
624 1.1 riastrad
625 1.1 riastrad prev_node = list_entry(node->node_list.prev, struct drm_mm_node,
626 1.1 riastrad node_list);
627 1.1 riastrad
628 1.1 riastrad prev_node->hole_follows = node->scanned_preceeds_hole;
629 1.1 riastrad list_add(&node->node_list, &prev_node->node_list);
630 1.1 riastrad
631 1.1 riastrad return (drm_mm_hole_node_end(node) > mm->scan_hit_start &&
632 1.1 riastrad node->start < mm->scan_hit_end);
633 1.1 riastrad }
634 1.1 riastrad EXPORT_SYMBOL(drm_mm_scan_remove_block);
635 1.1 riastrad
636 1.1 riastrad int drm_mm_clean(struct drm_mm * mm)
637 1.1 riastrad {
638 1.1 riastrad struct list_head *head = &mm->head_node.node_list;
639 1.1 riastrad
640 1.1 riastrad return (head->next->next == head);
641 1.1 riastrad }
642 1.1 riastrad EXPORT_SYMBOL(drm_mm_clean);
643 1.1 riastrad
644 1.1 riastrad int drm_mm_init(struct drm_mm * mm, unsigned long start, unsigned long size)
645 1.1 riastrad {
646 1.1 riastrad INIT_LIST_HEAD(&mm->hole_stack);
647 1.1 riastrad INIT_LIST_HEAD(&mm->unused_nodes);
648 1.1 riastrad mm->num_unused = 0;
649 1.1 riastrad mm->scanned_blocks = 0;
650 1.1 riastrad spin_lock_init(&mm->unused_lock);
651 1.1 riastrad
652 1.1 riastrad /* Clever trick to avoid a special case in the free hole tracking. */
653 1.1 riastrad INIT_LIST_HEAD(&mm->head_node.node_list);
654 1.1 riastrad INIT_LIST_HEAD(&mm->head_node.hole_stack);
655 1.1 riastrad mm->head_node.hole_follows = 1;
656 1.1 riastrad mm->head_node.scanned_block = 0;
657 1.1 riastrad mm->head_node.scanned_prev_free = 0;
658 1.1 riastrad mm->head_node.scanned_next_free = 0;
659 1.1 riastrad mm->head_node.mm = mm;
660 1.1 riastrad mm->head_node.start = start + size;
661 1.1 riastrad mm->head_node.size = start - mm->head_node.start;
662 1.1 riastrad list_add_tail(&mm->head_node.hole_stack, &mm->hole_stack);
663 1.1 riastrad
664 1.1 riastrad mm->color_adjust = NULL;
665 1.1 riastrad
666 1.1 riastrad return 0;
667 1.1 riastrad }
668 1.1 riastrad EXPORT_SYMBOL(drm_mm_init);
669 1.1 riastrad
670 1.1 riastrad void drm_mm_takedown(struct drm_mm * mm)
671 1.1 riastrad {
672 1.1 riastrad struct drm_mm_node *entry, *next;
673 1.1 riastrad
674 1.2 riastrad BUG_ON(!list_empty(&mm->head_node.node_list));
675 1.1 riastrad
676 1.1 riastrad spin_lock(&mm->unused_lock);
677 1.1 riastrad list_for_each_entry_safe(entry, next, &mm->unused_nodes, node_list) {
678 1.1 riastrad list_del(&entry->node_list);
679 1.1 riastrad kfree(entry);
680 1.1 riastrad --mm->num_unused;
681 1.1 riastrad }
682 1.1 riastrad spin_unlock(&mm->unused_lock);
683 1.1 riastrad
684 1.2 riastrad /*
685 1.2 riastrad * XXX The locking above can't be right -- either it is
686 1.2 riastrad * unnecessary or it is insufficient.
687 1.2 riastrad */
688 1.2 riastrad spin_lock_destroy(&mm->unused_lock);
689 1.2 riastrad
690 1.1 riastrad BUG_ON(mm->num_unused != 0);
691 1.1 riastrad }
692 1.1 riastrad EXPORT_SYMBOL(drm_mm_takedown);
693 1.1 riastrad
694 1.1 riastrad void drm_mm_debug_table(struct drm_mm *mm, const char *prefix)
695 1.1 riastrad {
696 1.1 riastrad struct drm_mm_node *entry;
697 1.1 riastrad unsigned long total_used = 0, total_free = 0, total = 0;
698 1.1 riastrad unsigned long hole_start, hole_end, hole_size;
699 1.1 riastrad
700 1.1 riastrad hole_start = drm_mm_hole_node_start(&mm->head_node);
701 1.1 riastrad hole_end = drm_mm_hole_node_end(&mm->head_node);
702 1.1 riastrad hole_size = hole_end - hole_start;
703 1.1 riastrad if (hole_size)
704 1.1 riastrad printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: free\n",
705 1.1 riastrad prefix, hole_start, hole_end,
706 1.1 riastrad hole_size);
707 1.1 riastrad total_free += hole_size;
708 1.1 riastrad
709 1.1 riastrad drm_mm_for_each_node(entry, mm) {
710 1.1 riastrad printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: used\n",
711 1.1 riastrad prefix, entry->start, entry->start + entry->size,
712 1.1 riastrad entry->size);
713 1.1 riastrad total_used += entry->size;
714 1.1 riastrad
715 1.1 riastrad if (entry->hole_follows) {
716 1.1 riastrad hole_start = drm_mm_hole_node_start(entry);
717 1.1 riastrad hole_end = drm_mm_hole_node_end(entry);
718 1.1 riastrad hole_size = hole_end - hole_start;
719 1.1 riastrad printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: free\n",
720 1.1 riastrad prefix, hole_start, hole_end,
721 1.1 riastrad hole_size);
722 1.1 riastrad total_free += hole_size;
723 1.1 riastrad }
724 1.1 riastrad }
725 1.1 riastrad total = total_free + total_used;
726 1.1 riastrad
727 1.1 riastrad printk(KERN_DEBUG "%s total: %lu, used %lu free %lu\n", prefix, total,
728 1.1 riastrad total_used, total_free);
729 1.1 riastrad }
730 1.1 riastrad EXPORT_SYMBOL(drm_mm_debug_table);
731 1.1 riastrad
732 1.1 riastrad #if defined(CONFIG_DEBUG_FS)
733 1.1 riastrad int drm_mm_dump_table(struct seq_file *m, struct drm_mm *mm)
734 1.1 riastrad {
735 1.1 riastrad struct drm_mm_node *entry;
736 1.1 riastrad unsigned long total_used = 0, total_free = 0, total = 0;
737 1.1 riastrad unsigned long hole_start, hole_end, hole_size;
738 1.1 riastrad
739 1.1 riastrad hole_start = drm_mm_hole_node_start(&mm->head_node);
740 1.1 riastrad hole_end = drm_mm_hole_node_end(&mm->head_node);
741 1.1 riastrad hole_size = hole_end - hole_start;
742 1.1 riastrad if (hole_size)
743 1.1 riastrad seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: free\n",
744 1.1 riastrad hole_start, hole_end, hole_size);
745 1.1 riastrad total_free += hole_size;
746 1.1 riastrad
747 1.1 riastrad drm_mm_for_each_node(entry, mm) {
748 1.1 riastrad seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: used\n",
749 1.1 riastrad entry->start, entry->start + entry->size,
750 1.1 riastrad entry->size);
751 1.1 riastrad total_used += entry->size;
752 1.1 riastrad if (entry->hole_follows) {
753 1.1 riastrad hole_start = drm_mm_hole_node_start(entry);
754 1.1 riastrad hole_end = drm_mm_hole_node_end(entry);
755 1.1 riastrad hole_size = hole_end - hole_start;
756 1.1 riastrad seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: free\n",
757 1.1 riastrad hole_start, hole_end, hole_size);
758 1.1 riastrad total_free += hole_size;
759 1.1 riastrad }
760 1.1 riastrad }
761 1.1 riastrad total = total_free + total_used;
762 1.1 riastrad
763 1.1 riastrad seq_printf(m, "total: %lu, used %lu free %lu\n", total, total_used, total_free);
764 1.1 riastrad return 0;
765 1.1 riastrad }
766 1.1 riastrad EXPORT_SYMBOL(drm_mm_dump_table);
767 1.1 riastrad #endif
768