Commit 8742267a authored by Chris Wilson's avatar Chris Wilson Committed by Daniel Vetter

drm/i915: Defer assignment of obj->gtt_space until after all possible mallocs

As we may invoke the shrinker whilst trying to allocate memory to hold
the gtt_space for this object, we need to be careful not to mark the
drm_mm_node as activated (by assigning it to this object) before we
have finished our sequence of allocations.

Note: We also need to move the binding of the object into the actual
pagetables down a bit. The best way seems to be to move it out into
the callsites.
Reported-by: default avatarImre Deak <imre.deak@gmail.com>
Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
[danvet: Added small note to commit message to summarize review
discussion.]
Signed-off-by: default avatarDaniel Vetter <daniel.vetter@ffwll.ch>
parent 776ca7cf
...@@ -2918,11 +2918,10 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj, ...@@ -2918,11 +2918,10 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj,
search_free: search_free:
if (map_and_fenceable) if (map_and_fenceable)
free_space = free_space = drm_mm_search_free_in_range_color(&dev_priv->mm.gtt_space,
drm_mm_search_free_in_range_color(&dev_priv->mm.gtt_space, size, alignment, obj->cache_level,
size, alignment, obj->cache_level, 0, dev_priv->mm.gtt_mappable_end,
0, dev_priv->mm.gtt_mappable_end, false);
false);
else else
free_space = drm_mm_search_free_color(&dev_priv->mm.gtt_space, free_space = drm_mm_search_free_color(&dev_priv->mm.gtt_space,
size, alignment, obj->cache_level, size, alignment, obj->cache_level,
...@@ -2930,18 +2929,18 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj, ...@@ -2930,18 +2929,18 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj,
if (free_space != NULL) { if (free_space != NULL) {
if (map_and_fenceable) if (map_and_fenceable)
obj->gtt_space = free_space =
drm_mm_get_block_range_generic(free_space, drm_mm_get_block_range_generic(free_space,
size, alignment, obj->cache_level, size, alignment, obj->cache_level,
0, dev_priv->mm.gtt_mappable_end, 0, dev_priv->mm.gtt_mappable_end,
false); false);
else else
obj->gtt_space = free_space =
drm_mm_get_block_generic(free_space, drm_mm_get_block_generic(free_space,
size, alignment, obj->cache_level, size, alignment, obj->cache_level,
false); false);
} }
if (obj->gtt_space == NULL) { if (free_space == NULL) {
ret = i915_gem_evict_something(dev, size, alignment, ret = i915_gem_evict_something(dev, size, alignment,
obj->cache_level, obj->cache_level,
map_and_fenceable, map_and_fenceable,
...@@ -2954,34 +2953,29 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj, ...@@ -2954,34 +2953,29 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj,
goto search_free; goto search_free;
} }
if (WARN_ON(!i915_gem_valid_gtt_space(dev, if (WARN_ON(!i915_gem_valid_gtt_space(dev,
obj->gtt_space, free_space,
obj->cache_level))) { obj->cache_level))) {
i915_gem_object_unpin_pages(obj); i915_gem_object_unpin_pages(obj);
drm_mm_put_block(obj->gtt_space); drm_mm_put_block(free_space);
obj->gtt_space = NULL;
return -EINVAL; return -EINVAL;
} }
ret = i915_gem_gtt_prepare_object(obj); ret = i915_gem_gtt_prepare_object(obj);
if (ret) { if (ret) {
i915_gem_object_unpin_pages(obj); i915_gem_object_unpin_pages(obj);
drm_mm_put_block(obj->gtt_space); drm_mm_put_block(free_space);
obj->gtt_space = NULL;
return ret; return ret;
} }
if (!dev_priv->mm.aliasing_ppgtt)
i915_gem_gtt_bind_object(obj, obj->cache_level);
list_move_tail(&obj->gtt_list, &dev_priv->mm.bound_list); list_move_tail(&obj->gtt_list, &dev_priv->mm.bound_list);
list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list); list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list);
obj->gtt_offset = obj->gtt_space->start; obj->gtt_space = free_space;
obj->gtt_offset = free_space->start;
fenceable = fenceable =
obj->gtt_space->size == fence_size && free_space->size == fence_size &&
(obj->gtt_space->start & (fence_alignment - 1)) == 0; (free_space->start & (fence_alignment - 1)) == 0;
mappable = mappable =
obj->gtt_offset + obj->base.size <= dev_priv->mm.gtt_mappable_end; obj->gtt_offset + obj->base.size <= dev_priv->mm.gtt_mappable_end;
...@@ -3452,11 +3446,16 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj, ...@@ -3452,11 +3446,16 @@ i915_gem_object_pin(struct drm_i915_gem_object *obj,
} }
if (obj->gtt_space == NULL) { if (obj->gtt_space == NULL) {
struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
ret = i915_gem_object_bind_to_gtt(obj, alignment, ret = i915_gem_object_bind_to_gtt(obj, alignment,
map_and_fenceable, map_and_fenceable,
nonblocking); nonblocking);
if (ret) if (ret)
return ret; return ret;
if (!dev_priv->mm.aliasing_ppgtt)
i915_gem_gtt_bind_object(obj, obj->cache_level);
} }
if (!obj->has_global_gtt_mapping && map_and_fenceable) if (!obj->has_global_gtt_mapping && map_and_fenceable)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment