Commit 8a338f4b authored by Chris Wilson's avatar Chris Wilson

drm/i915/gem: Try allocating va from free space

If the current node/entry location is occupied, and the object is not
pinned, try assigning it some free space. We cannot wait here, so if in
doubt, we unreserve and try to grab all at once.

v2: Use the final pin_flags so that we won't have to move the object if
we find the wrong free space.
Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: default avatarMatthew Auld <matthew.auld@intel.com>
Link: https://patchwork.freedesktop.org/patch/msgid/20200401194135.5442-1-chris@chris-wilson.co.uk
parent 0d961c46
...@@ -429,6 +429,32 @@ eb_vma_misplaced(const struct drm_i915_gem_exec_object2 *entry, ...@@ -429,6 +429,32 @@ eb_vma_misplaced(const struct drm_i915_gem_exec_object2 *entry,
return false; return false;
} }
static u64 eb_pin_flags(const struct drm_i915_gem_exec_object2 *entry,
unsigned int exec_flags)
{
u64 pin_flags = 0;
if (exec_flags & EXEC_OBJECT_NEEDS_GTT)
pin_flags |= PIN_GLOBAL;
/*
* Wa32bitGeneralStateOffset & Wa32bitInstructionBaseOffset,
* limit address to the first 4GBs for unflagged objects.
*/
if (!(exec_flags & EXEC_OBJECT_SUPPORTS_48B_ADDRESS))
pin_flags |= PIN_ZONE_4G;
if (exec_flags & __EXEC_OBJECT_NEEDS_MAP)
pin_flags |= PIN_MAPPABLE;
if (exec_flags & EXEC_OBJECT_PINNED)
pin_flags |= entry->offset | PIN_OFFSET_FIXED;
else if (exec_flags & __EXEC_OBJECT_NEEDS_BIAS)
pin_flags |= BATCH_OFFSET_BIAS | PIN_OFFSET_BIAS;
return pin_flags;
}
static inline bool static inline bool
eb_pin_vma(struct i915_execbuffer *eb, eb_pin_vma(struct i915_execbuffer *eb,
const struct drm_i915_gem_exec_object2 *entry, const struct drm_i915_gem_exec_object2 *entry,
...@@ -446,8 +472,19 @@ eb_pin_vma(struct i915_execbuffer *eb, ...@@ -446,8 +472,19 @@ eb_pin_vma(struct i915_execbuffer *eb,
if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_GTT)) if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_GTT))
pin_flags |= PIN_GLOBAL; pin_flags |= PIN_GLOBAL;
if (unlikely(i915_vma_pin(vma, 0, 0, pin_flags))) /* Attempt to reuse the current location if available */
return false; if (unlikely(i915_vma_pin(vma, 0, 0, pin_flags))) {
if (entry->flags & EXEC_OBJECT_PINNED)
return false;
/* Failing that pick any _free_ space if suitable */
if (unlikely(i915_vma_pin(vma,
entry->pad_to_size,
entry->alignment,
eb_pin_flags(entry, ev->flags) |
PIN_USER | PIN_NOEVICT)))
return false;
}
if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_FENCE)) { if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_FENCE)) {
if (unlikely(i915_vma_pin_fence(vma))) { if (unlikely(i915_vma_pin_fence(vma))) {
...@@ -588,28 +625,9 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb, ...@@ -588,28 +625,9 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb,
u64 pin_flags) u64 pin_flags)
{ {
struct drm_i915_gem_exec_object2 *entry = ev->exec; struct drm_i915_gem_exec_object2 *entry = ev->exec;
unsigned int exec_flags = ev->flags;
struct i915_vma *vma = ev->vma; struct i915_vma *vma = ev->vma;
int err; int err;
if (exec_flags & EXEC_OBJECT_NEEDS_GTT)
pin_flags |= PIN_GLOBAL;
/*
* Wa32bitGeneralStateOffset & Wa32bitInstructionBaseOffset,
* limit address to the first 4GBs for unflagged objects.
*/
if (!(exec_flags & EXEC_OBJECT_SUPPORTS_48B_ADDRESS))
pin_flags |= PIN_ZONE_4G;
if (exec_flags & __EXEC_OBJECT_NEEDS_MAP)
pin_flags |= PIN_MAPPABLE;
if (exec_flags & EXEC_OBJECT_PINNED)
pin_flags |= entry->offset | PIN_OFFSET_FIXED;
else if (exec_flags & __EXEC_OBJECT_NEEDS_BIAS)
pin_flags |= BATCH_OFFSET_BIAS | PIN_OFFSET_BIAS;
if (drm_mm_node_allocated(&vma->node) && if (drm_mm_node_allocated(&vma->node) &&
eb_vma_misplaced(entry, vma, ev->flags)) { eb_vma_misplaced(entry, vma, ev->flags)) {
err = i915_vma_unbind(vma); err = i915_vma_unbind(vma);
...@@ -619,7 +637,7 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb, ...@@ -619,7 +637,7 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb,
err = i915_vma_pin(vma, err = i915_vma_pin(vma,
entry->pad_to_size, entry->alignment, entry->pad_to_size, entry->alignment,
pin_flags); eb_pin_flags(entry, ev->flags) | pin_flags);
if (err) if (err)
return err; return err;
...@@ -628,7 +646,7 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb, ...@@ -628,7 +646,7 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb,
eb->args->flags |= __EXEC_HAS_RELOC; eb->args->flags |= __EXEC_HAS_RELOC;
} }
if (unlikely(exec_flags & EXEC_OBJECT_NEEDS_FENCE)) { if (unlikely(ev->flags & EXEC_OBJECT_NEEDS_FENCE)) {
err = i915_vma_pin_fence(vma); err = i915_vma_pin_fence(vma);
if (unlikely(err)) { if (unlikely(err)) {
i915_vma_unpin(vma); i915_vma_unpin(vma);
...@@ -636,10 +654,10 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb, ...@@ -636,10 +654,10 @@ static int eb_reserve_vma(const struct i915_execbuffer *eb,
} }
if (vma->fence) if (vma->fence)
exec_flags |= __EXEC_OBJECT_HAS_FENCE; ev->flags |= __EXEC_OBJECT_HAS_FENCE;
} }
ev->flags = exec_flags | __EXEC_OBJECT_HAS_PIN; ev->flags |= __EXEC_OBJECT_HAS_PIN;
GEM_BUG_ON(eb_vma_misplaced(entry, vma, ev->flags)); GEM_BUG_ON(eb_vma_misplaced(entry, vma, ev->flags));
return 0; return 0;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment