Commit 247177dd authored by Chris Wilson's avatar Chris Wilson

drm/i915: Always set the vma->pages

Previously, we would only set the vma->pages pointer for GGTT entries.
However, if we always set it, we can use it to prettify some code that
may want to access the backing store associated with the VMA (as
assigned to the VMA).
Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: default avatarJoonas Lahtinen <joonas.lahtinen@linux.intel.com>
Link: http://patchwork.freedesktop.org/patch/msgid/1471254551-25805-8-git-send-email-chris@chris-wilson.co.uk
parent 95b2ab56
...@@ -2868,12 +2868,12 @@ int i915_vma_unbind(struct i915_vma *vma) ...@@ -2868,12 +2868,12 @@ int i915_vma_unbind(struct i915_vma *vma)
if (i915_vma_is_ggtt(vma)) { if (i915_vma_is_ggtt(vma)) {
if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) { if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
obj->map_and_fenceable = false; obj->map_and_fenceable = false;
} else if (vma->ggtt_view.pages) { } else if (vma->pages) {
sg_free_table(vma->ggtt_view.pages); sg_free_table(vma->pages);
kfree(vma->ggtt_view.pages); kfree(vma->pages);
} }
vma->ggtt_view.pages = NULL;
} }
vma->pages = NULL;
/* Since the unbound list is global, only move to that list if /* Since the unbound list is global, only move to that list if
* no more VMAs exist. */ * no more VMAs exist. */
......
...@@ -170,11 +170,13 @@ static int ppgtt_bind_vma(struct i915_vma *vma, ...@@ -170,11 +170,13 @@ static int ppgtt_bind_vma(struct i915_vma *vma,
{ {
u32 pte_flags = 0; u32 pte_flags = 0;
vma->pages = vma->obj->pages;
/* Currently applicable only to VLV */ /* Currently applicable only to VLV */
if (vma->obj->gt_ro) if (vma->obj->gt_ro)
pte_flags |= PTE_READ_ONLY; pte_flags |= PTE_READ_ONLY;
vma->vm->insert_entries(vma->vm, vma->obj->pages, vma->node.start, vma->vm->insert_entries(vma->vm, vma->pages, vma->node.start,
cache_level, pte_flags); cache_level, pte_flags);
return 0; return 0;
...@@ -2618,8 +2620,7 @@ static int ggtt_bind_vma(struct i915_vma *vma, ...@@ -2618,8 +2620,7 @@ static int ggtt_bind_vma(struct i915_vma *vma,
if (obj->gt_ro) if (obj->gt_ro)
pte_flags |= PTE_READ_ONLY; pte_flags |= PTE_READ_ONLY;
vma->vm->insert_entries(vma->vm, vma->ggtt_view.pages, vma->vm->insert_entries(vma->vm, vma->pages, vma->node.start,
vma->node.start,
cache_level, pte_flags); cache_level, pte_flags);
/* /*
...@@ -2651,8 +2652,7 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma, ...@@ -2651,8 +2652,7 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma,
if (flags & I915_VMA_GLOBAL_BIND) { if (flags & I915_VMA_GLOBAL_BIND) {
vma->vm->insert_entries(vma->vm, vma->vm->insert_entries(vma->vm,
vma->ggtt_view.pages, vma->pages, vma->node.start,
vma->node.start,
cache_level, pte_flags); cache_level, pte_flags);
} }
...@@ -2660,8 +2660,7 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma, ...@@ -2660,8 +2660,7 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma,
struct i915_hw_ppgtt *appgtt = struct i915_hw_ppgtt *appgtt =
to_i915(vma->vm->dev)->mm.aliasing_ppgtt; to_i915(vma->vm->dev)->mm.aliasing_ppgtt;
appgtt->base.insert_entries(&appgtt->base, appgtt->base.insert_entries(&appgtt->base,
vma->ggtt_view.pages, vma->pages, vma->node.start,
vma->node.start,
cache_level, pte_flags); cache_level, pte_flags);
} }
...@@ -3557,28 +3556,27 @@ i915_get_ggtt_vma_pages(struct i915_vma *vma) ...@@ -3557,28 +3556,27 @@ i915_get_ggtt_vma_pages(struct i915_vma *vma)
{ {
int ret = 0; int ret = 0;
if (vma->ggtt_view.pages) if (vma->pages)
return 0; return 0;
if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL)
vma->ggtt_view.pages = vma->obj->pages; vma->pages = vma->obj->pages;
else if (vma->ggtt_view.type == I915_GGTT_VIEW_ROTATED) else if (vma->ggtt_view.type == I915_GGTT_VIEW_ROTATED)
vma->ggtt_view.pages = vma->pages =
intel_rotate_fb_obj_pages(&vma->ggtt_view.params.rotated, vma->obj); intel_rotate_fb_obj_pages(&vma->ggtt_view.params.rotated, vma->obj);
else if (vma->ggtt_view.type == I915_GGTT_VIEW_PARTIAL) else if (vma->ggtt_view.type == I915_GGTT_VIEW_PARTIAL)
vma->ggtt_view.pages = vma->pages = intel_partial_pages(&vma->ggtt_view, vma->obj);
intel_partial_pages(&vma->ggtt_view, vma->obj);
else else
WARN_ONCE(1, "GGTT view %u not implemented!\n", WARN_ONCE(1, "GGTT view %u not implemented!\n",
vma->ggtt_view.type); vma->ggtt_view.type);
if (!vma->ggtt_view.pages) { if (!vma->pages) {
DRM_ERROR("Failed to get pages for GGTT view type %u!\n", DRM_ERROR("Failed to get pages for GGTT view type %u!\n",
vma->ggtt_view.type); vma->ggtt_view.type);
ret = -EINVAL; ret = -EINVAL;
} else if (IS_ERR(vma->ggtt_view.pages)) { } else if (IS_ERR(vma->pages)) {
ret = PTR_ERR(vma->ggtt_view.pages); ret = PTR_ERR(vma->pages);
vma->ggtt_view.pages = NULL; vma->pages = NULL;
DRM_ERROR("Failed to get pages for VMA view type %u (%d)!\n", DRM_ERROR("Failed to get pages for VMA view type %u (%d)!\n",
vma->ggtt_view.type, ret); vma->ggtt_view.type, ret);
} }
......
...@@ -155,8 +155,6 @@ struct i915_ggtt_view { ...@@ -155,8 +155,6 @@ struct i915_ggtt_view {
} partial; } partial;
struct intel_rotation_info rotated; struct intel_rotation_info rotated;
} params; } params;
struct sg_table *pages;
}; };
extern const struct i915_ggtt_view i915_ggtt_view_normal; extern const struct i915_ggtt_view i915_ggtt_view_normal;
...@@ -176,6 +174,7 @@ struct i915_vma { ...@@ -176,6 +174,7 @@ struct i915_vma {
struct drm_mm_node node; struct drm_mm_node node;
struct drm_i915_gem_object *obj; struct drm_i915_gem_object *obj;
struct i915_address_space *vm; struct i915_address_space *vm;
struct sg_table *pages;
void __iomem *iomap; void __iomem *iomap;
u64 size; u64 size;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment