drm_mm.c revision 1.1 1 1.1 riastrad /**************************************************************************
2 1.1 riastrad *
3 1.1 riastrad * Copyright 2006 Tungsten Graphics, Inc., Bismarck, ND., USA.
4 1.1 riastrad * All Rights Reserved.
5 1.1 riastrad *
6 1.1 riastrad * Permission is hereby granted, free of charge, to any person obtaining a
7 1.1 riastrad * copy of this software and associated documentation files (the
8 1.1 riastrad * "Software"), to deal in the Software without restriction, including
9 1.1 riastrad * without limitation the rights to use, copy, modify, merge, publish,
10 1.1 riastrad * distribute, sub license, and/or sell copies of the Software, and to
11 1.1 riastrad * permit persons to whom the Software is furnished to do so, subject to
12 1.1 riastrad * the following conditions:
13 1.1 riastrad *
14 1.1 riastrad * The above copyright notice and this permission notice (including the
15 1.1 riastrad * next paragraph) shall be included in all copies or substantial portions
16 1.1 riastrad * of the Software.
17 1.1 riastrad *
18 1.1 riastrad * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 1.1 riastrad * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 1.1 riastrad * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
21 1.1 riastrad * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
22 1.1 riastrad * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23 1.1 riastrad * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
24 1.1 riastrad * USE OR OTHER DEALINGS IN THE SOFTWARE.
25 1.1 riastrad *
26 1.1 riastrad *
27 1.1 riastrad **************************************************************************/
28 1.1 riastrad
29 1.1 riastrad /*
30 1.1 riastrad * Generic simple memory manager implementation. Intended to be used as a base
31 1.1 riastrad * class implementation for more advanced memory managers.
32 1.1 riastrad *
33 1.1 riastrad * Note that the algorithm used is quite simple and there might be substantial
34 1.1 riastrad * performance gains if a smarter free list is implemented. Currently it is just an
35 1.1 riastrad * unordered stack of free regions. This could easily be improved if an RB-tree
36 1.1 riastrad * is used instead. At least if we expect heavy fragmentation.
37 1.1 riastrad *
38 1.1 riastrad * Aligned allocations can also see improvement.
39 1.1 riastrad *
40 1.1 riastrad * Authors:
41 1.1 riastrad * Thomas Hellstrm <thomas-at-tungstengraphics-dot-com>
42 1.1 riastrad */
43 1.1 riastrad
44 1.1 riastrad #include <drm/drmP.h>
45 1.1 riastrad #include <drm/drm_mm.h>
46 1.1 riastrad #include <linux/slab.h>
47 1.1 riastrad #include <linux/seq_file.h>
48 1.1 riastrad #include <linux/export.h>
49 1.1 riastrad
50 1.1 riastrad #define MM_UNUSED_TARGET 4
51 1.1 riastrad
52 1.1 riastrad static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, int atomic)
53 1.1 riastrad {
54 1.1 riastrad struct drm_mm_node *child;
55 1.1 riastrad
56 1.1 riastrad if (atomic)
57 1.1 riastrad child = kzalloc(sizeof(*child), GFP_ATOMIC);
58 1.1 riastrad else
59 1.1 riastrad child = kzalloc(sizeof(*child), GFP_KERNEL);
60 1.1 riastrad
61 1.1 riastrad if (unlikely(child == NULL)) {
62 1.1 riastrad spin_lock(&mm->unused_lock);
63 1.1 riastrad if (list_empty(&mm->unused_nodes))
64 1.1 riastrad child = NULL;
65 1.1 riastrad else {
66 1.1 riastrad child =
67 1.1 riastrad list_entry(mm->unused_nodes.next,
68 1.1 riastrad struct drm_mm_node, node_list);
69 1.1 riastrad list_del(&child->node_list);
70 1.1 riastrad --mm->num_unused;
71 1.1 riastrad }
72 1.1 riastrad spin_unlock(&mm->unused_lock);
73 1.1 riastrad }
74 1.1 riastrad return child;
75 1.1 riastrad }
76 1.1 riastrad
77 1.1 riastrad /* drm_mm_pre_get() - pre allocate drm_mm_node structure
78 1.1 riastrad * drm_mm: memory manager struct we are pre-allocating for
79 1.1 riastrad *
80 1.1 riastrad * Returns 0 on success or -ENOMEM if allocation fails.
81 1.1 riastrad */
82 1.1 riastrad int drm_mm_pre_get(struct drm_mm *mm)
83 1.1 riastrad {
84 1.1 riastrad struct drm_mm_node *node;
85 1.1 riastrad
86 1.1 riastrad spin_lock(&mm->unused_lock);
87 1.1 riastrad while (mm->num_unused < MM_UNUSED_TARGET) {
88 1.1 riastrad spin_unlock(&mm->unused_lock);
89 1.1 riastrad node = kzalloc(sizeof(*node), GFP_KERNEL);
90 1.1 riastrad spin_lock(&mm->unused_lock);
91 1.1 riastrad
92 1.1 riastrad if (unlikely(node == NULL)) {
93 1.1 riastrad int ret = (mm->num_unused < 2) ? -ENOMEM : 0;
94 1.1 riastrad spin_unlock(&mm->unused_lock);
95 1.1 riastrad return ret;
96 1.1 riastrad }
97 1.1 riastrad ++mm->num_unused;
98 1.1 riastrad list_add_tail(&node->node_list, &mm->unused_nodes);
99 1.1 riastrad }
100 1.1 riastrad spin_unlock(&mm->unused_lock);
101 1.1 riastrad return 0;
102 1.1 riastrad }
103 1.1 riastrad EXPORT_SYMBOL(drm_mm_pre_get);
104 1.1 riastrad
105 1.1 riastrad static inline unsigned long drm_mm_hole_node_start(struct drm_mm_node *hole_node)
106 1.1 riastrad {
107 1.1 riastrad return hole_node->start + hole_node->size;
108 1.1 riastrad }
109 1.1 riastrad
110 1.1 riastrad static inline unsigned long drm_mm_hole_node_end(struct drm_mm_node *hole_node)
111 1.1 riastrad {
112 1.1 riastrad struct drm_mm_node *next_node =
113 1.1 riastrad list_entry(hole_node->node_list.next, struct drm_mm_node,
114 1.1 riastrad node_list);
115 1.1 riastrad
116 1.1 riastrad return next_node->start;
117 1.1 riastrad }
118 1.1 riastrad
119 1.1 riastrad static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
120 1.1 riastrad struct drm_mm_node *node,
121 1.1 riastrad unsigned long size, unsigned alignment,
122 1.1 riastrad unsigned long color)
123 1.1 riastrad {
124 1.1 riastrad struct drm_mm *mm = hole_node->mm;
125 1.1 riastrad unsigned long hole_start = drm_mm_hole_node_start(hole_node);
126 1.1 riastrad unsigned long hole_end = drm_mm_hole_node_end(hole_node);
127 1.1 riastrad unsigned long adj_start = hole_start;
128 1.1 riastrad unsigned long adj_end = hole_end;
129 1.1 riastrad
130 1.1 riastrad BUG_ON(!hole_node->hole_follows || node->allocated);
131 1.1 riastrad
132 1.1 riastrad if (mm->color_adjust)
133 1.1 riastrad mm->color_adjust(hole_node, color, &adj_start, &adj_end);
134 1.1 riastrad
135 1.1 riastrad if (alignment) {
136 1.1 riastrad unsigned tmp = adj_start % alignment;
137 1.1 riastrad if (tmp)
138 1.1 riastrad adj_start += alignment - tmp;
139 1.1 riastrad }
140 1.1 riastrad
141 1.1 riastrad if (adj_start == hole_start) {
142 1.1 riastrad hole_node->hole_follows = 0;
143 1.1 riastrad list_del(&hole_node->hole_stack);
144 1.1 riastrad }
145 1.1 riastrad
146 1.1 riastrad node->start = adj_start;
147 1.1 riastrad node->size = size;
148 1.1 riastrad node->mm = mm;
149 1.1 riastrad node->color = color;
150 1.1 riastrad node->allocated = 1;
151 1.1 riastrad
152 1.1 riastrad INIT_LIST_HEAD(&node->hole_stack);
153 1.1 riastrad list_add(&node->node_list, &hole_node->node_list);
154 1.1 riastrad
155 1.1 riastrad BUG_ON(node->start + node->size > adj_end);
156 1.1 riastrad
157 1.1 riastrad node->hole_follows = 0;
158 1.1 riastrad if (node->start + node->size < hole_end) {
159 1.1 riastrad list_add(&node->hole_stack, &mm->hole_stack);
160 1.1 riastrad node->hole_follows = 1;
161 1.1 riastrad }
162 1.1 riastrad }
163 1.1 riastrad
164 1.1 riastrad struct drm_mm_node *drm_mm_get_block_generic(struct drm_mm_node *hole_node,
165 1.1 riastrad unsigned long size,
166 1.1 riastrad unsigned alignment,
167 1.1 riastrad unsigned long color,
168 1.1 riastrad int atomic)
169 1.1 riastrad {
170 1.1 riastrad struct drm_mm_node *node;
171 1.1 riastrad
172 1.1 riastrad node = drm_mm_kmalloc(hole_node->mm, atomic);
173 1.1 riastrad if (unlikely(node == NULL))
174 1.1 riastrad return NULL;
175 1.1 riastrad
176 1.1 riastrad drm_mm_insert_helper(hole_node, node, size, alignment, color);
177 1.1 riastrad
178 1.1 riastrad return node;
179 1.1 riastrad }
180 1.1 riastrad EXPORT_SYMBOL(drm_mm_get_block_generic);
181 1.1 riastrad
182 1.1 riastrad /**
183 1.1 riastrad * Search for free space and insert a preallocated memory node. Returns
184 1.1 riastrad * -ENOSPC if no suitable free area is available. The preallocated memory node
185 1.1 riastrad * must be cleared.
186 1.1 riastrad */
187 1.1 riastrad int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
188 1.1 riastrad unsigned long size, unsigned alignment,
189 1.1 riastrad unsigned long color)
190 1.1 riastrad {
191 1.1 riastrad struct drm_mm_node *hole_node;
192 1.1 riastrad
193 1.1 riastrad hole_node = drm_mm_search_free_generic(mm, size, alignment,
194 1.1 riastrad color, 0);
195 1.1 riastrad if (!hole_node)
196 1.1 riastrad return -ENOSPC;
197 1.1 riastrad
198 1.1 riastrad drm_mm_insert_helper(hole_node, node, size, alignment, color);
199 1.1 riastrad return 0;
200 1.1 riastrad }
201 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node_generic);
202 1.1 riastrad
203 1.1 riastrad int drm_mm_insert_node(struct drm_mm *mm, struct drm_mm_node *node,
204 1.1 riastrad unsigned long size, unsigned alignment)
205 1.1 riastrad {
206 1.1 riastrad return drm_mm_insert_node_generic(mm, node, size, alignment, 0);
207 1.1 riastrad }
208 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node);
209 1.1 riastrad
210 1.1 riastrad static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node,
211 1.1 riastrad struct drm_mm_node *node,
212 1.1 riastrad unsigned long size, unsigned alignment,
213 1.1 riastrad unsigned long color,
214 1.1 riastrad unsigned long start, unsigned long end)
215 1.1 riastrad {
216 1.1 riastrad struct drm_mm *mm = hole_node->mm;
217 1.1 riastrad unsigned long hole_start = drm_mm_hole_node_start(hole_node);
218 1.1 riastrad unsigned long hole_end = drm_mm_hole_node_end(hole_node);
219 1.1 riastrad unsigned long adj_start = hole_start;
220 1.1 riastrad unsigned long adj_end = hole_end;
221 1.1 riastrad
222 1.1 riastrad BUG_ON(!hole_node->hole_follows || node->allocated);
223 1.1 riastrad
224 1.1 riastrad if (adj_start < start)
225 1.1 riastrad adj_start = start;
226 1.1 riastrad if (adj_end > end)
227 1.1 riastrad adj_end = end;
228 1.1 riastrad
229 1.1 riastrad if (mm->color_adjust)
230 1.1 riastrad mm->color_adjust(hole_node, color, &adj_start, &adj_end);
231 1.1 riastrad
232 1.1 riastrad if (alignment) {
233 1.1 riastrad unsigned tmp = adj_start % alignment;
234 1.1 riastrad if (tmp)
235 1.1 riastrad adj_start += alignment - tmp;
236 1.1 riastrad }
237 1.1 riastrad
238 1.1 riastrad if (adj_start == hole_start) {
239 1.1 riastrad hole_node->hole_follows = 0;
240 1.1 riastrad list_del(&hole_node->hole_stack);
241 1.1 riastrad }
242 1.1 riastrad
243 1.1 riastrad node->start = adj_start;
244 1.1 riastrad node->size = size;
245 1.1 riastrad node->mm = mm;
246 1.1 riastrad node->color = color;
247 1.1 riastrad node->allocated = 1;
248 1.1 riastrad
249 1.1 riastrad INIT_LIST_HEAD(&node->hole_stack);
250 1.1 riastrad list_add(&node->node_list, &hole_node->node_list);
251 1.1 riastrad
252 1.1 riastrad BUG_ON(node->start + node->size > adj_end);
253 1.1 riastrad BUG_ON(node->start + node->size > end);
254 1.1 riastrad
255 1.1 riastrad node->hole_follows = 0;
256 1.1 riastrad if (node->start + node->size < hole_end) {
257 1.1 riastrad list_add(&node->hole_stack, &mm->hole_stack);
258 1.1 riastrad node->hole_follows = 1;
259 1.1 riastrad }
260 1.1 riastrad }
261 1.1 riastrad
262 1.1 riastrad struct drm_mm_node *drm_mm_get_block_range_generic(struct drm_mm_node *hole_node,
263 1.1 riastrad unsigned long size,
264 1.1 riastrad unsigned alignment,
265 1.1 riastrad unsigned long color,
266 1.1 riastrad unsigned long start,
267 1.1 riastrad unsigned long end,
268 1.1 riastrad int atomic)
269 1.1 riastrad {
270 1.1 riastrad struct drm_mm_node *node;
271 1.1 riastrad
272 1.1 riastrad node = drm_mm_kmalloc(hole_node->mm, atomic);
273 1.1 riastrad if (unlikely(node == NULL))
274 1.1 riastrad return NULL;
275 1.1 riastrad
276 1.1 riastrad drm_mm_insert_helper_range(hole_node, node, size, alignment, color,
277 1.1 riastrad start, end);
278 1.1 riastrad
279 1.1 riastrad return node;
280 1.1 riastrad }
281 1.1 riastrad EXPORT_SYMBOL(drm_mm_get_block_range_generic);
282 1.1 riastrad
283 1.1 riastrad /**
284 1.1 riastrad * Search for free space and insert a preallocated memory node. Returns
285 1.1 riastrad * -ENOSPC if no suitable free area is available. This is for range
286 1.1 riastrad * restricted allocations. The preallocated memory node must be cleared.
287 1.1 riastrad */
288 1.1 riastrad int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *node,
289 1.1 riastrad unsigned long size, unsigned alignment, unsigned long color,
290 1.1 riastrad unsigned long start, unsigned long end)
291 1.1 riastrad {
292 1.1 riastrad struct drm_mm_node *hole_node;
293 1.1 riastrad
294 1.1 riastrad hole_node = drm_mm_search_free_in_range_generic(mm,
295 1.1 riastrad size, alignment, color,
296 1.1 riastrad start, end, 0);
297 1.1 riastrad if (!hole_node)
298 1.1 riastrad return -ENOSPC;
299 1.1 riastrad
300 1.1 riastrad drm_mm_insert_helper_range(hole_node, node,
301 1.1 riastrad size, alignment, color,
302 1.1 riastrad start, end);
303 1.1 riastrad return 0;
304 1.1 riastrad }
305 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node_in_range_generic);
306 1.1 riastrad
307 1.1 riastrad int drm_mm_insert_node_in_range(struct drm_mm *mm, struct drm_mm_node *node,
308 1.1 riastrad unsigned long size, unsigned alignment,
309 1.1 riastrad unsigned long start, unsigned long end)
310 1.1 riastrad {
311 1.1 riastrad return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end);
312 1.1 riastrad }
313 1.1 riastrad EXPORT_SYMBOL(drm_mm_insert_node_in_range);
314 1.1 riastrad
315 1.1 riastrad /**
316 1.1 riastrad * Remove a memory node from the allocator.
317 1.1 riastrad */
318 1.1 riastrad void drm_mm_remove_node(struct drm_mm_node *node)
319 1.1 riastrad {
320 1.1 riastrad struct drm_mm *mm = node->mm;
321 1.1 riastrad struct drm_mm_node *prev_node;
322 1.1 riastrad
323 1.1 riastrad BUG_ON(node->scanned_block || node->scanned_prev_free
324 1.1 riastrad || node->scanned_next_free);
325 1.1 riastrad
326 1.1 riastrad prev_node =
327 1.1 riastrad list_entry(node->node_list.prev, struct drm_mm_node, node_list);
328 1.1 riastrad
329 1.1 riastrad if (node->hole_follows) {
330 1.1 riastrad BUG_ON(drm_mm_hole_node_start(node)
331 1.1 riastrad == drm_mm_hole_node_end(node));
332 1.1 riastrad list_del(&node->hole_stack);
333 1.1 riastrad } else
334 1.1 riastrad BUG_ON(drm_mm_hole_node_start(node)
335 1.1 riastrad != drm_mm_hole_node_end(node));
336 1.1 riastrad
337 1.1 riastrad if (!prev_node->hole_follows) {
338 1.1 riastrad prev_node->hole_follows = 1;
339 1.1 riastrad list_add(&prev_node->hole_stack, &mm->hole_stack);
340 1.1 riastrad } else
341 1.1 riastrad list_move(&prev_node->hole_stack, &mm->hole_stack);
342 1.1 riastrad
343 1.1 riastrad list_del(&node->node_list);
344 1.1 riastrad node->allocated = 0;
345 1.1 riastrad }
346 1.1 riastrad EXPORT_SYMBOL(drm_mm_remove_node);
347 1.1 riastrad
348 1.1 riastrad /*
349 1.1 riastrad * Remove a memory node from the allocator and free the allocated struct
350 1.1 riastrad * drm_mm_node. Only to be used on a struct drm_mm_node obtained by one of the
351 1.1 riastrad * drm_mm_get_block functions.
352 1.1 riastrad */
353 1.1 riastrad void drm_mm_put_block(struct drm_mm_node *node)
354 1.1 riastrad {
355 1.1 riastrad
356 1.1 riastrad struct drm_mm *mm = node->mm;
357 1.1 riastrad
358 1.1 riastrad drm_mm_remove_node(node);
359 1.1 riastrad
360 1.1 riastrad spin_lock(&mm->unused_lock);
361 1.1 riastrad if (mm->num_unused < MM_UNUSED_TARGET) {
362 1.1 riastrad list_add(&node->node_list, &mm->unused_nodes);
363 1.1 riastrad ++mm->num_unused;
364 1.1 riastrad } else
365 1.1 riastrad kfree(node);
366 1.1 riastrad spin_unlock(&mm->unused_lock);
367 1.1 riastrad }
368 1.1 riastrad EXPORT_SYMBOL(drm_mm_put_block);
369 1.1 riastrad
370 1.1 riastrad static int check_free_hole(unsigned long start, unsigned long end,
371 1.1 riastrad unsigned long size, unsigned alignment)
372 1.1 riastrad {
373 1.1 riastrad if (end - start < size)
374 1.1 riastrad return 0;
375 1.1 riastrad
376 1.1 riastrad if (alignment) {
377 1.1 riastrad unsigned tmp = start % alignment;
378 1.1 riastrad if (tmp)
379 1.1 riastrad start += alignment - tmp;
380 1.1 riastrad }
381 1.1 riastrad
382 1.1 riastrad return end >= start + size;
383 1.1 riastrad }
384 1.1 riastrad
385 1.1 riastrad struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
386 1.1 riastrad unsigned long size,
387 1.1 riastrad unsigned alignment,
388 1.1 riastrad unsigned long color,
389 1.1 riastrad bool best_match)
390 1.1 riastrad {
391 1.1 riastrad struct drm_mm_node *entry;
392 1.1 riastrad struct drm_mm_node *best;
393 1.1 riastrad unsigned long best_size;
394 1.1 riastrad
395 1.1 riastrad BUG_ON(mm->scanned_blocks);
396 1.1 riastrad
397 1.1 riastrad best = NULL;
398 1.1 riastrad best_size = ~0UL;
399 1.1 riastrad
400 1.1 riastrad list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
401 1.1 riastrad unsigned long adj_start = drm_mm_hole_node_start(entry);
402 1.1 riastrad unsigned long adj_end = drm_mm_hole_node_end(entry);
403 1.1 riastrad
404 1.1 riastrad if (mm->color_adjust) {
405 1.1 riastrad mm->color_adjust(entry, color, &adj_start, &adj_end);
406 1.1 riastrad if (adj_end <= adj_start)
407 1.1 riastrad continue;
408 1.1 riastrad }
409 1.1 riastrad
410 1.1 riastrad BUG_ON(!entry->hole_follows);
411 1.1 riastrad if (!check_free_hole(adj_start, adj_end, size, alignment))
412 1.1 riastrad continue;
413 1.1 riastrad
414 1.1 riastrad if (!best_match)
415 1.1 riastrad return entry;
416 1.1 riastrad
417 1.1 riastrad if (entry->size < best_size) {
418 1.1 riastrad best = entry;
419 1.1 riastrad best_size = entry->size;
420 1.1 riastrad }
421 1.1 riastrad }
422 1.1 riastrad
423 1.1 riastrad return best;
424 1.1 riastrad }
425 1.1 riastrad EXPORT_SYMBOL(drm_mm_search_free_generic);
426 1.1 riastrad
427 1.1 riastrad struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
428 1.1 riastrad unsigned long size,
429 1.1 riastrad unsigned alignment,
430 1.1 riastrad unsigned long color,
431 1.1 riastrad unsigned long start,
432 1.1 riastrad unsigned long end,
433 1.1 riastrad bool best_match)
434 1.1 riastrad {
435 1.1 riastrad struct drm_mm_node *entry;
436 1.1 riastrad struct drm_mm_node *best;
437 1.1 riastrad unsigned long best_size;
438 1.1 riastrad
439 1.1 riastrad BUG_ON(mm->scanned_blocks);
440 1.1 riastrad
441 1.1 riastrad best = NULL;
442 1.1 riastrad best_size = ~0UL;
443 1.1 riastrad
444 1.1 riastrad list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
445 1.1 riastrad unsigned long adj_start = drm_mm_hole_node_start(entry) < start ?
446 1.1 riastrad start : drm_mm_hole_node_start(entry);
447 1.1 riastrad unsigned long adj_end = drm_mm_hole_node_end(entry) > end ?
448 1.1 riastrad end : drm_mm_hole_node_end(entry);
449 1.1 riastrad
450 1.1 riastrad BUG_ON(!entry->hole_follows);
451 1.1 riastrad
452 1.1 riastrad if (mm->color_adjust) {
453 1.1 riastrad mm->color_adjust(entry, color, &adj_start, &adj_end);
454 1.1 riastrad if (adj_end <= adj_start)
455 1.1 riastrad continue;
456 1.1 riastrad }
457 1.1 riastrad
458 1.1 riastrad if (!check_free_hole(adj_start, adj_end, size, alignment))
459 1.1 riastrad continue;
460 1.1 riastrad
461 1.1 riastrad if (!best_match)
462 1.1 riastrad return entry;
463 1.1 riastrad
464 1.1 riastrad if (entry->size < best_size) {
465 1.1 riastrad best = entry;
466 1.1 riastrad best_size = entry->size;
467 1.1 riastrad }
468 1.1 riastrad }
469 1.1 riastrad
470 1.1 riastrad return best;
471 1.1 riastrad }
472 1.1 riastrad EXPORT_SYMBOL(drm_mm_search_free_in_range_generic);
473 1.1 riastrad
474 1.1 riastrad /**
475 1.1 riastrad * Moves an allocation. To be used with embedded struct drm_mm_node.
476 1.1 riastrad */
477 1.1 riastrad void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new)
478 1.1 riastrad {
479 1.1 riastrad list_replace(&old->node_list, &new->node_list);
480 1.1 riastrad list_replace(&old->hole_stack, &new->hole_stack);
481 1.1 riastrad new->hole_follows = old->hole_follows;
482 1.1 riastrad new->mm = old->mm;
483 1.1 riastrad new->start = old->start;
484 1.1 riastrad new->size = old->size;
485 1.1 riastrad new->color = old->color;
486 1.1 riastrad
487 1.1 riastrad old->allocated = 0;
488 1.1 riastrad new->allocated = 1;
489 1.1 riastrad }
490 1.1 riastrad EXPORT_SYMBOL(drm_mm_replace_node);
491 1.1 riastrad
492 1.1 riastrad /**
493 1.1 riastrad * Initializa lru scanning.
494 1.1 riastrad *
495 1.1 riastrad * This simply sets up the scanning routines with the parameters for the desired
496 1.1 riastrad * hole.
497 1.1 riastrad *
498 1.1 riastrad * Warning: As long as the scan list is non-empty, no other operations than
499 1.1 riastrad * adding/removing nodes to/from the scan list are allowed.
500 1.1 riastrad */
501 1.1 riastrad void drm_mm_init_scan(struct drm_mm *mm,
502 1.1 riastrad unsigned long size,
503 1.1 riastrad unsigned alignment,
504 1.1 riastrad unsigned long color)
505 1.1 riastrad {
506 1.1 riastrad mm->scan_color = color;
507 1.1 riastrad mm->scan_alignment = alignment;
508 1.1 riastrad mm->scan_size = size;
509 1.1 riastrad mm->scanned_blocks = 0;
510 1.1 riastrad mm->scan_hit_start = 0;
511 1.1 riastrad mm->scan_hit_end = 0;
512 1.1 riastrad mm->scan_check_range = 0;
513 1.1 riastrad mm->prev_scanned_node = NULL;
514 1.1 riastrad }
515 1.1 riastrad EXPORT_SYMBOL(drm_mm_init_scan);
516 1.1 riastrad
517 1.1 riastrad /**
518 1.1 riastrad * Initializa lru scanning.
519 1.1 riastrad *
520 1.1 riastrad * This simply sets up the scanning routines with the parameters for the desired
521 1.1 riastrad * hole. This version is for range-restricted scans.
522 1.1 riastrad *
523 1.1 riastrad * Warning: As long as the scan list is non-empty, no other operations than
524 1.1 riastrad * adding/removing nodes to/from the scan list are allowed.
525 1.1 riastrad */
526 1.1 riastrad void drm_mm_init_scan_with_range(struct drm_mm *mm,
527 1.1 riastrad unsigned long size,
528 1.1 riastrad unsigned alignment,
529 1.1 riastrad unsigned long color,
530 1.1 riastrad unsigned long start,
531 1.1 riastrad unsigned long end)
532 1.1 riastrad {
533 1.1 riastrad mm->scan_color = color;
534 1.1 riastrad mm->scan_alignment = alignment;
535 1.1 riastrad mm->scan_size = size;
536 1.1 riastrad mm->scanned_blocks = 0;
537 1.1 riastrad mm->scan_hit_start = 0;
538 1.1 riastrad mm->scan_hit_end = 0;
539 1.1 riastrad mm->scan_start = start;
540 1.1 riastrad mm->scan_end = end;
541 1.1 riastrad mm->scan_check_range = 1;
542 1.1 riastrad mm->prev_scanned_node = NULL;
543 1.1 riastrad }
544 1.1 riastrad EXPORT_SYMBOL(drm_mm_init_scan_with_range);
545 1.1 riastrad
546 1.1 riastrad /**
547 1.1 riastrad * Add a node to the scan list that might be freed to make space for the desired
548 1.1 riastrad * hole.
549 1.1 riastrad *
550 1.1 riastrad * Returns non-zero, if a hole has been found, zero otherwise.
551 1.1 riastrad */
552 1.1 riastrad int drm_mm_scan_add_block(struct drm_mm_node *node)
553 1.1 riastrad {
554 1.1 riastrad struct drm_mm *mm = node->mm;
555 1.1 riastrad struct drm_mm_node *prev_node;
556 1.1 riastrad unsigned long hole_start, hole_end;
557 1.1 riastrad unsigned long adj_start, adj_end;
558 1.1 riastrad
559 1.1 riastrad mm->scanned_blocks++;
560 1.1 riastrad
561 1.1 riastrad BUG_ON(node->scanned_block);
562 1.1 riastrad node->scanned_block = 1;
563 1.1 riastrad
564 1.1 riastrad prev_node = list_entry(node->node_list.prev, struct drm_mm_node,
565 1.1 riastrad node_list);
566 1.1 riastrad
567 1.1 riastrad node->scanned_preceeds_hole = prev_node->hole_follows;
568 1.1 riastrad prev_node->hole_follows = 1;
569 1.1 riastrad list_del(&node->node_list);
570 1.1 riastrad node->node_list.prev = &prev_node->node_list;
571 1.1 riastrad node->node_list.next = &mm->prev_scanned_node->node_list;
572 1.1 riastrad mm->prev_scanned_node = node;
573 1.1 riastrad
574 1.1 riastrad adj_start = hole_start = drm_mm_hole_node_start(prev_node);
575 1.1 riastrad adj_end = hole_end = drm_mm_hole_node_end(prev_node);
576 1.1 riastrad
577 1.1 riastrad if (mm->scan_check_range) {
578 1.1 riastrad if (adj_start < mm->scan_start)
579 1.1 riastrad adj_start = mm->scan_start;
580 1.1 riastrad if (adj_end > mm->scan_end)
581 1.1 riastrad adj_end = mm->scan_end;
582 1.1 riastrad }
583 1.1 riastrad
584 1.1 riastrad if (mm->color_adjust)
585 1.1 riastrad mm->color_adjust(prev_node, mm->scan_color,
586 1.1 riastrad &adj_start, &adj_end);
587 1.1 riastrad
588 1.1 riastrad if (check_free_hole(adj_start, adj_end,
589 1.1 riastrad mm->scan_size, mm->scan_alignment)) {
590 1.1 riastrad mm->scan_hit_start = hole_start;
591 1.1 riastrad mm->scan_hit_end = hole_end;
592 1.1 riastrad return 1;
593 1.1 riastrad }
594 1.1 riastrad
595 1.1 riastrad return 0;
596 1.1 riastrad }
597 1.1 riastrad EXPORT_SYMBOL(drm_mm_scan_add_block);
598 1.1 riastrad
599 1.1 riastrad /**
600 1.1 riastrad * Remove a node from the scan list.
601 1.1 riastrad *
602 1.1 riastrad * Nodes _must_ be removed in the exact same order from the scan list as they
603 1.1 riastrad * have been added, otherwise the internal state of the memory manager will be
604 1.1 riastrad * corrupted.
605 1.1 riastrad *
606 1.1 riastrad * When the scan list is empty, the selected memory nodes can be freed. An
607 1.1 riastrad * immediately following drm_mm_search_free with best_match = 0 will then return
608 1.1 riastrad * the just freed block (because its at the top of the free_stack list).
609 1.1 riastrad *
610 1.1 riastrad * Returns one if this block should be evicted, zero otherwise. Will always
611 1.1 riastrad * return zero when no hole has been found.
612 1.1 riastrad */
613 1.1 riastrad int drm_mm_scan_remove_block(struct drm_mm_node *node)
614 1.1 riastrad {
615 1.1 riastrad struct drm_mm *mm = node->mm;
616 1.1 riastrad struct drm_mm_node *prev_node;
617 1.1 riastrad
618 1.1 riastrad mm->scanned_blocks--;
619 1.1 riastrad
620 1.1 riastrad BUG_ON(!node->scanned_block);
621 1.1 riastrad node->scanned_block = 0;
622 1.1 riastrad
623 1.1 riastrad prev_node = list_entry(node->node_list.prev, struct drm_mm_node,
624 1.1 riastrad node_list);
625 1.1 riastrad
626 1.1 riastrad prev_node->hole_follows = node->scanned_preceeds_hole;
627 1.1 riastrad list_add(&node->node_list, &prev_node->node_list);
628 1.1 riastrad
629 1.1 riastrad return (drm_mm_hole_node_end(node) > mm->scan_hit_start &&
630 1.1 riastrad node->start < mm->scan_hit_end);
631 1.1 riastrad }
632 1.1 riastrad EXPORT_SYMBOL(drm_mm_scan_remove_block);
633 1.1 riastrad
634 1.1 riastrad int drm_mm_clean(struct drm_mm * mm)
635 1.1 riastrad {
636 1.1 riastrad struct list_head *head = &mm->head_node.node_list;
637 1.1 riastrad
638 1.1 riastrad return (head->next->next == head);
639 1.1 riastrad }
640 1.1 riastrad EXPORT_SYMBOL(drm_mm_clean);
641 1.1 riastrad
642 1.1 riastrad int drm_mm_init(struct drm_mm * mm, unsigned long start, unsigned long size)
643 1.1 riastrad {
644 1.1 riastrad INIT_LIST_HEAD(&mm->hole_stack);
645 1.1 riastrad INIT_LIST_HEAD(&mm->unused_nodes);
646 1.1 riastrad mm->num_unused = 0;
647 1.1 riastrad mm->scanned_blocks = 0;
648 1.1 riastrad spin_lock_init(&mm->unused_lock);
649 1.1 riastrad
650 1.1 riastrad /* Clever trick to avoid a special case in the free hole tracking. */
651 1.1 riastrad INIT_LIST_HEAD(&mm->head_node.node_list);
652 1.1 riastrad INIT_LIST_HEAD(&mm->head_node.hole_stack);
653 1.1 riastrad mm->head_node.hole_follows = 1;
654 1.1 riastrad mm->head_node.scanned_block = 0;
655 1.1 riastrad mm->head_node.scanned_prev_free = 0;
656 1.1 riastrad mm->head_node.scanned_next_free = 0;
657 1.1 riastrad mm->head_node.mm = mm;
658 1.1 riastrad mm->head_node.start = start + size;
659 1.1 riastrad mm->head_node.size = start - mm->head_node.start;
660 1.1 riastrad list_add_tail(&mm->head_node.hole_stack, &mm->hole_stack);
661 1.1 riastrad
662 1.1 riastrad mm->color_adjust = NULL;
663 1.1 riastrad
664 1.1 riastrad return 0;
665 1.1 riastrad }
666 1.1 riastrad EXPORT_SYMBOL(drm_mm_init);
667 1.1 riastrad
668 1.1 riastrad void drm_mm_takedown(struct drm_mm * mm)
669 1.1 riastrad {
670 1.1 riastrad struct drm_mm_node *entry, *next;
671 1.1 riastrad
672 1.1 riastrad if (!list_empty(&mm->head_node.node_list)) {
673 1.1 riastrad DRM_ERROR("Memory manager not clean. Delaying takedown\n");
674 1.1 riastrad return;
675 1.1 riastrad }
676 1.1 riastrad
677 1.1 riastrad spin_lock(&mm->unused_lock);
678 1.1 riastrad list_for_each_entry_safe(entry, next, &mm->unused_nodes, node_list) {
679 1.1 riastrad list_del(&entry->node_list);
680 1.1 riastrad kfree(entry);
681 1.1 riastrad --mm->num_unused;
682 1.1 riastrad }
683 1.1 riastrad spin_unlock(&mm->unused_lock);
684 1.1 riastrad
685 1.1 riastrad BUG_ON(mm->num_unused != 0);
686 1.1 riastrad }
687 1.1 riastrad EXPORT_SYMBOL(drm_mm_takedown);
688 1.1 riastrad
689 1.1 riastrad void drm_mm_debug_table(struct drm_mm *mm, const char *prefix)
690 1.1 riastrad {
691 1.1 riastrad struct drm_mm_node *entry;
692 1.1 riastrad unsigned long total_used = 0, total_free = 0, total = 0;
693 1.1 riastrad unsigned long hole_start, hole_end, hole_size;
694 1.1 riastrad
695 1.1 riastrad hole_start = drm_mm_hole_node_start(&mm->head_node);
696 1.1 riastrad hole_end = drm_mm_hole_node_end(&mm->head_node);
697 1.1 riastrad hole_size = hole_end - hole_start;
698 1.1 riastrad if (hole_size)
699 1.1 riastrad printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: free\n",
700 1.1 riastrad prefix, hole_start, hole_end,
701 1.1 riastrad hole_size);
702 1.1 riastrad total_free += hole_size;
703 1.1 riastrad
704 1.1 riastrad drm_mm_for_each_node(entry, mm) {
705 1.1 riastrad printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: used\n",
706 1.1 riastrad prefix, entry->start, entry->start + entry->size,
707 1.1 riastrad entry->size);
708 1.1 riastrad total_used += entry->size;
709 1.1 riastrad
710 1.1 riastrad if (entry->hole_follows) {
711 1.1 riastrad hole_start = drm_mm_hole_node_start(entry);
712 1.1 riastrad hole_end = drm_mm_hole_node_end(entry);
713 1.1 riastrad hole_size = hole_end - hole_start;
714 1.1 riastrad printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: free\n",
715 1.1 riastrad prefix, hole_start, hole_end,
716 1.1 riastrad hole_size);
717 1.1 riastrad total_free += hole_size;
718 1.1 riastrad }
719 1.1 riastrad }
720 1.1 riastrad total = total_free + total_used;
721 1.1 riastrad
722 1.1 riastrad printk(KERN_DEBUG "%s total: %lu, used %lu free %lu\n", prefix, total,
723 1.1 riastrad total_used, total_free);
724 1.1 riastrad }
725 1.1 riastrad EXPORT_SYMBOL(drm_mm_debug_table);
726 1.1 riastrad
727 1.1 riastrad #if defined(CONFIG_DEBUG_FS)
728 1.1 riastrad int drm_mm_dump_table(struct seq_file *m, struct drm_mm *mm)
729 1.1 riastrad {
730 1.1 riastrad struct drm_mm_node *entry;
731 1.1 riastrad unsigned long total_used = 0, total_free = 0, total = 0;
732 1.1 riastrad unsigned long hole_start, hole_end, hole_size;
733 1.1 riastrad
734 1.1 riastrad hole_start = drm_mm_hole_node_start(&mm->head_node);
735 1.1 riastrad hole_end = drm_mm_hole_node_end(&mm->head_node);
736 1.1 riastrad hole_size = hole_end - hole_start;
737 1.1 riastrad if (hole_size)
738 1.1 riastrad seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: free\n",
739 1.1 riastrad hole_start, hole_end, hole_size);
740 1.1 riastrad total_free += hole_size;
741 1.1 riastrad
742 1.1 riastrad drm_mm_for_each_node(entry, mm) {
743 1.1 riastrad seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: used\n",
744 1.1 riastrad entry->start, entry->start + entry->size,
745 1.1 riastrad entry->size);
746 1.1 riastrad total_used += entry->size;
747 1.1 riastrad if (entry->hole_follows) {
748 1.1 riastrad hole_start = drm_mm_hole_node_start(entry);
749 1.1 riastrad hole_end = drm_mm_hole_node_end(entry);
750 1.1 riastrad hole_size = hole_end - hole_start;
751 1.1 riastrad seq_printf(m, "0x%08lx-0x%08lx: 0x%08lx: free\n",
752 1.1 riastrad hole_start, hole_end, hole_size);
753 1.1 riastrad total_free += hole_size;
754 1.1 riastrad }
755 1.1 riastrad }
756 1.1 riastrad total = total_free + total_used;
757 1.1 riastrad
758 1.1 riastrad seq_printf(m, "total: %lu, used %lu free %lu\n", total, total_used, total_free);
759 1.1 riastrad return 0;
760 1.1 riastrad }
761 1.1 riastrad EXPORT_SYMBOL(drm_mm_dump_table);
762 1.1 riastrad #endif
763