Commit adb040b8 authored by Chris Wilson's avatar Chris Wilson Committed by Daniel Vetter

drm: Use drm_mm_insert_node_in_range_generic() for everyone

Remove a superfluous helper as drm_mm_insert_node is equivalent to
insert_node_in_range with a range of [0, U64_MAX].
Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: default avatarJoonas Lahtinen <joonas.lahtinen@linux.intel.com>
Signed-off-by: default avatarDaniel Vetter <daniel.vetter@ffwll.ch>
Link: http://patchwork.freedesktop.org/patch/msgid/20161222083641.2691-37-chris@chris-wilson.co.uk
parent 3db93756
...@@ -92,11 +92,6 @@ ...@@ -92,11 +92,6 @@
* some basic allocator dumpers for debugging. * some basic allocator dumpers for debugging.
*/ */
static struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
u64 size,
u64 alignment,
unsigned long color,
enum drm_mm_search_flags flags);
static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
u64 size, u64 size,
u64 alignment, u64 alignment,
...@@ -230,6 +225,7 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, ...@@ -230,6 +225,7 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
struct drm_mm_node *node, struct drm_mm_node *node,
u64 size, u64 alignment, u64 size, u64 alignment,
unsigned long color, unsigned long color,
u64 range_start, u64 range_end,
enum drm_mm_allocator_flags flags) enum drm_mm_allocator_flags flags)
{ {
struct drm_mm *mm = hole_node->mm; struct drm_mm *mm = hole_node->mm;
...@@ -238,11 +234,14 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, ...@@ -238,11 +234,14 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
u64 adj_start = hole_start; u64 adj_start = hole_start;
u64 adj_end = hole_end; u64 adj_end = hole_end;
DRM_MM_BUG_ON(node->allocated); DRM_MM_BUG_ON(!drm_mm_hole_follows(hole_node) || node->allocated);
if (mm->color_adjust) if (mm->color_adjust)
mm->color_adjust(hole_node, color, &adj_start, &adj_end); mm->color_adjust(hole_node, color, &adj_start, &adj_end);
adj_start = max(adj_start, range_start);
adj_end = min(adj_end, range_end);
if (flags & DRM_MM_CREATE_TOP) if (flags & DRM_MM_CREATE_TOP)
adj_start = adj_end - size; adj_start = adj_end - size;
...@@ -258,9 +257,6 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, ...@@ -258,9 +257,6 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
} }
} }
DRM_MM_BUG_ON(adj_start < hole_start);
DRM_MM_BUG_ON(adj_end > hole_end);
if (adj_start == hole_start) { if (adj_start == hole_start) {
hole_node->hole_follows = 0; hole_node->hole_follows = 0;
list_del(&hole_node->hole_stack); list_del(&hole_node->hole_stack);
...@@ -276,7 +272,10 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, ...@@ -276,7 +272,10 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
drm_mm_interval_tree_add_node(hole_node, node); drm_mm_interval_tree_add_node(hole_node, node);
DRM_MM_BUG_ON(node->start < range_start);
DRM_MM_BUG_ON(node->start < adj_start);
DRM_MM_BUG_ON(node->start + node->size > adj_end); DRM_MM_BUG_ON(node->start + node->size > adj_end);
DRM_MM_BUG_ON(node->start + node->size > range_end);
node->hole_follows = 0; node->hole_follows = 0;
if (__drm_mm_hole_node_start(node) < hole_end) { if (__drm_mm_hole_node_start(node) < hole_end) {
...@@ -359,107 +358,6 @@ int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node) ...@@ -359,107 +358,6 @@ int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node)
} }
EXPORT_SYMBOL(drm_mm_reserve_node); EXPORT_SYMBOL(drm_mm_reserve_node);
/**
* drm_mm_insert_node_generic - search for space and insert @node
* @mm: drm_mm to allocate from
* @node: preallocate node to insert
* @size: size of the allocation
* @alignment: alignment of the allocation
* @color: opaque tag value to use for this node
* @sflags: flags to fine-tune the allocation search
* @aflags: flags to fine-tune the allocation behavior
*
* The preallocated node must be cleared to 0.
*
* Returns:
* 0 on success, -ENOSPC if there's no suitable hole.
*/
int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
u64 size, u64 alignment,
unsigned long color,
enum drm_mm_search_flags sflags,
enum drm_mm_allocator_flags aflags)
{
struct drm_mm_node *hole_node;
if (WARN_ON(size == 0))
return -EINVAL;
hole_node = drm_mm_search_free_generic(mm, size, alignment,
color, sflags);
if (!hole_node)
return -ENOSPC;
drm_mm_insert_helper(hole_node, node, size, alignment, color, aflags);
return 0;
}
EXPORT_SYMBOL(drm_mm_insert_node_generic);
static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node,
struct drm_mm_node *node,
u64 size, u64 alignment,
unsigned long color,
u64 start, u64 end,
enum drm_mm_allocator_flags flags)
{
struct drm_mm *mm = hole_node->mm;
u64 hole_start = drm_mm_hole_node_start(hole_node);
u64 hole_end = drm_mm_hole_node_end(hole_node);
u64 adj_start = hole_start;
u64 adj_end = hole_end;
DRM_MM_BUG_ON(!drm_mm_hole_follows(hole_node) || node->allocated);
if (mm->color_adjust)
mm->color_adjust(hole_node, color, &adj_start, &adj_end);
adj_start = max(adj_start, start);
adj_end = min(adj_end, end);
if (flags & DRM_MM_CREATE_TOP)
adj_start = adj_end - size;
if (alignment) {
u64 rem;
div64_u64_rem(adj_start, alignment, &rem);
if (rem) {
if (flags & DRM_MM_CREATE_TOP)
adj_start -= rem;
else
adj_start += alignment - rem;
}
}
if (adj_start == hole_start) {
hole_node->hole_follows = 0;
list_del(&hole_node->hole_stack);
}
node->start = adj_start;
node->size = size;
node->mm = mm;
node->color = color;
node->allocated = 1;
list_add(&node->node_list, &hole_node->node_list);
drm_mm_interval_tree_add_node(hole_node, node);
DRM_MM_BUG_ON(node->start < start);
DRM_MM_BUG_ON(node->start < adj_start);
DRM_MM_BUG_ON(node->start + node->size > adj_end);
DRM_MM_BUG_ON(node->start + node->size > end);
node->hole_follows = 0;
if (__drm_mm_hole_node_start(node) < hole_end) {
list_add(&node->hole_stack, &mm->hole_stack);
node->hole_follows = 1;
}
save_stack(node);
}
/** /**
* drm_mm_insert_node_in_range_generic - ranged search for space and insert @node * drm_mm_insert_node_in_range_generic - ranged search for space and insert @node
* @mm: drm_mm to allocate from * @mm: drm_mm to allocate from
...@@ -495,7 +393,7 @@ int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *n ...@@ -495,7 +393,7 @@ int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *n
if (!hole_node) if (!hole_node)
return -ENOSPC; return -ENOSPC;
drm_mm_insert_helper_range(hole_node, node, drm_mm_insert_helper(hole_node, node,
size, alignment, color, size, alignment, color,
start, end, aflags); start, end, aflags);
return 0; return 0;
...@@ -558,48 +456,6 @@ static int check_free_hole(u64 start, u64 end, u64 size, u64 alignment) ...@@ -558,48 +456,6 @@ static int check_free_hole(u64 start, u64 end, u64 size, u64 alignment)
return end >= start + size; return end >= start + size;
} }
static struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
u64 size,
u64 alignment,
unsigned long color,
enum drm_mm_search_flags flags)
{
struct drm_mm_node *entry;
struct drm_mm_node *best;
u64 adj_start;
u64 adj_end;
u64 best_size;
DRM_MM_BUG_ON(mm->scan_active);
best = NULL;
best_size = ~0UL;
__drm_mm_for_each_hole(entry, mm, adj_start, adj_end,
flags & DRM_MM_SEARCH_BELOW) {
u64 hole_size = adj_end - adj_start;
if (mm->color_adjust) {
mm->color_adjust(entry, color, &adj_start, &adj_end);
if (adj_end <= adj_start)
continue;
}
if (!check_free_hole(adj_start, adj_end, size, alignment))
continue;
if (!(flags & DRM_MM_SEARCH_BEST))
return entry;
if (hole_size < best_size) {
best = entry;
best_size = hole_size;
}
}
return best;
}
static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
u64 size, u64 size,
u64 alignment, u64 alignment,
......
...@@ -285,78 +285,100 @@ static inline u64 drm_mm_hole_node_end(const struct drm_mm_node *hole_node) ...@@ -285,78 +285,100 @@ static inline u64 drm_mm_hole_node_end(const struct drm_mm_node *hole_node)
* Basic range manager support (drm_mm.c) * Basic range manager support (drm_mm.c)
*/ */
int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node); int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node);
int drm_mm_insert_node_in_range_generic(struct drm_mm *mm,
int drm_mm_insert_node_generic(struct drm_mm *mm,
struct drm_mm_node *node, struct drm_mm_node *node,
u64 size, u64 size,
u64 alignment, u64 alignment,
unsigned long color, unsigned long color,
u64 start,
u64 end,
enum drm_mm_search_flags sflags, enum drm_mm_search_flags sflags,
enum drm_mm_allocator_flags aflags); enum drm_mm_allocator_flags aflags);
/** /**
* drm_mm_insert_node - search for space and insert @node * drm_mm_insert_node_in_range - ranged search for space and insert @node
* @mm: drm_mm to allocate from * @mm: drm_mm to allocate from
* @node: preallocate node to insert * @node: preallocate node to insert
* @size: size of the allocation * @size: size of the allocation
* @alignment: alignment of the allocation * @alignment: alignment of the allocation
* @start: start of the allowed range for this node
* @end: end of the allowed range for this node
* @flags: flags to fine-tune the allocation * @flags: flags to fine-tune the allocation
* *
* This is a simplified version of drm_mm_insert_node_generic() with @color set * This is a simplified version of drm_mm_insert_node_in_range_generic() with
* to 0. * @color set to 0.
* *
* The preallocated node must be cleared to 0. * The preallocated node must be cleared to 0.
* *
* Returns: * Returns:
* 0 on success, -ENOSPC if there's no suitable hole. * 0 on success, -ENOSPC if there's no suitable hole.
*/ */
static inline int drm_mm_insert_node(struct drm_mm *mm, static inline int drm_mm_insert_node_in_range(struct drm_mm *mm,
struct drm_mm_node *node, struct drm_mm_node *node,
u64 size, u64 size,
u64 alignment, u64 alignment,
u64 start,
u64 end,
enum drm_mm_search_flags flags) enum drm_mm_search_flags flags)
{ {
return drm_mm_insert_node_generic(mm, node, size, alignment, 0, flags, return drm_mm_insert_node_in_range_generic(mm, node, size, alignment,
0, start, end, flags,
DRM_MM_CREATE_DEFAULT); DRM_MM_CREATE_DEFAULT);
} }
int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, /**
struct drm_mm_node *node, * drm_mm_insert_node_generic - search for space and insert @node
u64 size, * @mm: drm_mm to allocate from
u64 alignment, * @node: preallocate node to insert
* @size: size of the allocation
* @alignment: alignment of the allocation
* @color: opaque tag value to use for this node
* @sflags: flags to fine-tune the allocation search
* @aflags: flags to fine-tune the allocation behavior
*
* The preallocated node must be cleared to 0.
*
* Returns:
* 0 on success, -ENOSPC if there's no suitable hole.
*/
static inline int
drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
u64 size, u64 alignment,
unsigned long color, unsigned long color,
u64 start,
u64 end,
enum drm_mm_search_flags sflags, enum drm_mm_search_flags sflags,
enum drm_mm_allocator_flags aflags); enum drm_mm_allocator_flags aflags)
{
return drm_mm_insert_node_in_range_generic(mm, node,
size, alignment, 0,
0, U64_MAX,
sflags, aflags);
}
/** /**
* drm_mm_insert_node_in_range - ranged search for space and insert @node * drm_mm_insert_node - search for space and insert @node
* @mm: drm_mm to allocate from * @mm: drm_mm to allocate from
* @node: preallocate node to insert * @node: preallocate node to insert
* @size: size of the allocation * @size: size of the allocation
* @alignment: alignment of the allocation * @alignment: alignment of the allocation
* @start: start of the allowed range for this node
* @end: end of the allowed range for this node
* @flags: flags to fine-tune the allocation * @flags: flags to fine-tune the allocation
* *
* This is a simplified version of drm_mm_insert_node_in_range_generic() with * This is a simplified version of drm_mm_insert_node_generic() with @color set
* @color set to 0. * to 0.
* *
* The preallocated node must be cleared to 0. * The preallocated node must be cleared to 0.
* *
* Returns: * Returns:
* 0 on success, -ENOSPC if there's no suitable hole. * 0 on success, -ENOSPC if there's no suitable hole.
*/ */
static inline int drm_mm_insert_node_in_range(struct drm_mm *mm, static inline int drm_mm_insert_node(struct drm_mm *mm,
struct drm_mm_node *node, struct drm_mm_node *node,
u64 size, u64 size,
u64 alignment, u64 alignment,
u64 start,
u64 end,
enum drm_mm_search_flags flags) enum drm_mm_search_flags flags)
{ {
return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, return drm_mm_insert_node_generic(mm, node,
0, start, end, flags, size, alignment, 0,
DRM_MM_CREATE_DEFAULT); flags, DRM_MM_CREATE_DEFAULT);
} }
void drm_mm_remove_node(struct drm_mm_node *node); void drm_mm_remove_node(struct drm_mm_node *node);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment