Commit bbba9693 authored by Chris Wilson's avatar Chris Wilson Committed by Daniel Vetter

drm: Micro-optimise drm_mm_for_each_node_in_range()

As we require valid start/end parameters, we can replace the initial
potential NULL with a pointer to the drm_mm.head_node and so reduce the
test on every iteration from a NULL + address comparison to just an
address comparison.

add/remove: 0/0 grow/shrink: 0/1 up/down: 0/-26 (-26)
function                                     old     new   delta
i915_gem_evict_for_node                      719     693     -26

(No other users outside of the test harness.)
Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Cc: Joonas Lahtinen <joonas.lahtinen@linux.intel.com>
Reviewed-by: default avatarJoonas Lahtinen <joonas.lahtinen@linux.intel.com>
Signed-off-by: default avatarDaniel Vetter <daniel.vetter@ffwll.ch>
Link: http://patchwork.freedesktop.org/patch/msgid/20170204111913.12416-1-chris@chris-wilson.co.uk
parent c5a82814
......@@ -170,7 +170,7 @@ struct drm_mm_node *
__drm_mm_interval_first(const struct drm_mm *mm, u64 start, u64 last)
{
return drm_mm_interval_tree_iter_first((struct rb_root *)&mm->interval_tree,
start, last);
start, last) ?: (struct drm_mm_node *)&mm->head_node;
}
EXPORT_SYMBOL(__drm_mm_interval_first);
......
......@@ -839,16 +839,18 @@ static bool assert_contiguous_in_range(struct drm_mm *mm,
n++;
}
drm_mm_for_each_node_in_range(node, mm, 0, start) {
if (node) {
if (start > 0) {
node = __drm_mm_interval_first(mm, 0, start - 1);
if (node->allocated) {
pr_err("node before start: node=%llx+%llu, start=%llx\n",
node->start, node->size, start);
return false;
}
}
drm_mm_for_each_node_in_range(node, mm, end, U64_MAX) {
if (node) {
if (end < U64_MAX) {
node = __drm_mm_interval_first(mm, end, U64_MAX);
if (node->allocated) {
pr_err("node after end: node=%llx+%llu, end=%llx\n",
node->start, node->size, end);
return false;
......
......@@ -459,10 +459,13 @@ __drm_mm_interval_first(const struct drm_mm *mm, u64 start, u64 last);
* but using the internal interval tree to accelerate the search for the
* starting node, and so not safe against removal of elements. It assumes
* that @end is within (or is the upper limit of) the drm_mm allocator.
* If [@start, @end] are beyond the range of the drm_mm, the iterator may walk
* over the special _unallocated_ &drm_mm.head_node, and may even continue
* indefinitely.
*/
#define drm_mm_for_each_node_in_range(node__, mm__, start__, end__) \
for (node__ = __drm_mm_interval_first((mm__), (start__), (end__)-1); \
node__ && node__->start < (end__); \
node__->start < (end__); \
node__ = list_next_entry(node__, node_list))
void drm_mm_scan_init_with_range(struct drm_mm_scan *scan,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment