Commit fbcc3104 authored by Liam R. Howlett's avatar Liam R. Howlett Committed by Andrew Morton

mmap: convert __vma_adjust() to use vma iterator

Use the vma iterator internally for __vma_adjust().  Avoid using the maple
tree interface directly for type safety.

Link: https://lkml.kernel.org/r/20230120162650.984577-32-Liam.Howlett@oracle.comSigned-off-by: default avatarLiam R. Howlett <Liam.Howlett@oracle.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
parent 34403fa5
...@@ -2856,9 +2856,6 @@ extern struct vm_area_struct *copy_vma(struct vm_area_struct **, ...@@ -2856,9 +2856,6 @@ extern struct vm_area_struct *copy_vma(struct vm_area_struct **,
bool *need_rmap_locks); bool *need_rmap_locks);
extern void exit_mmap(struct mm_struct *); extern void exit_mmap(struct mm_struct *);
void vma_mas_store(struct vm_area_struct *vma, struct ma_state *mas);
void vma_mas_remove(struct vm_area_struct *vma, struct ma_state *mas);
static inline int check_data_rlimit(unsigned long rlim, static inline int check_data_rlimit(unsigned long rlim,
unsigned long new, unsigned long new,
unsigned long start, unsigned long start,
......
...@@ -432,56 +432,6 @@ static void __vma_link_file(struct vm_area_struct *vma, ...@@ -432,56 +432,6 @@ static void __vma_link_file(struct vm_area_struct *vma,
flush_dcache_mmap_unlock(mapping); flush_dcache_mmap_unlock(mapping);
} }
/*
* vma_mas_store() - Store a VMA in the maple tree.
* @vma: The vm_area_struct
* @mas: The maple state
*
* Efficient way to store a VMA in the maple tree when the @mas has already
* walked to the correct location.
*
* Note: the end address is inclusive in the maple tree.
*/
void vma_mas_store(struct vm_area_struct *vma, struct ma_state *mas)
{
trace_vma_store(mas->tree, vma);
mas_set_range(mas, vma->vm_start, vma->vm_end - 1);
mas_store_prealloc(mas, vma);
}
/*
* vma_mas_remove() - Remove a VMA from the maple tree.
* @vma: The vm_area_struct
* @mas: The maple state
*
* Efficient way to remove a VMA from the maple tree when the @mas has already
* been established and points to the correct location.
* Note: the end address is inclusive in the maple tree.
*/
void vma_mas_remove(struct vm_area_struct *vma, struct ma_state *mas)
{
trace_vma_mas_szero(mas->tree, vma->vm_start, vma->vm_end - 1);
mas->index = vma->vm_start;
mas->last = vma->vm_end - 1;
mas_store_prealloc(mas, NULL);
}
/*
* vma_mas_szero() - Set a given range to zero. Used when modifying a
* vm_area_struct start or end.
*
* @mas: The maple tree ma_state
* @start: The start address to zero
* @end: The end address to zero.
*/
static inline void vma_mas_szero(struct ma_state *mas, unsigned long start,
unsigned long end)
{
trace_vma_mas_szero(mas->tree, start, end - 1);
mas_set_range(mas, start, end - 1);
mas_store_prealloc(mas, NULL);
}
static int vma_link(struct mm_struct *mm, struct vm_area_struct *vma) static int vma_link(struct mm_struct *mm, struct vm_area_struct *vma)
{ {
VMA_ITERATOR(vmi, mm, 0); VMA_ITERATOR(vmi, mm, 0);
...@@ -641,7 +591,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, ...@@ -641,7 +591,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
bool vma_changed = false; bool vma_changed = false;
long adjust_next = 0; long adjust_next = 0;
int remove_next = 0; int remove_next = 0;
MA_STATE(mas, &mm->mm_mt, 0, 0); VMA_ITERATOR(vmi, mm, 0);
struct vm_area_struct *exporter = NULL, *importer = NULL; struct vm_area_struct *exporter = NULL, *importer = NULL;
if (next && !insert) { if (next && !insert) {
...@@ -726,7 +676,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, ...@@ -726,7 +676,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
} }
} }
if (mas_preallocate(&mas, GFP_KERNEL)) if (vma_iter_prealloc(&vmi))
return -ENOMEM; return -ENOMEM;
vma_adjust_trans_huge(orig_vma, start, end, adjust_next); vma_adjust_trans_huge(orig_vma, start, end, adjust_next);
...@@ -772,7 +722,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, ...@@ -772,7 +722,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
if (start != vma->vm_start) { if (start != vma->vm_start) {
if ((vma->vm_start < start) && if ((vma->vm_start < start) &&
(!insert || (insert->vm_end != start))) { (!insert || (insert->vm_end != start))) {
vma_mas_szero(&mas, vma->vm_start, start); vma_iter_clear(&vmi, vma->vm_start, start);
VM_WARN_ON(insert && insert->vm_start > vma->vm_start); VM_WARN_ON(insert && insert->vm_start > vma->vm_start);
} else { } else {
vma_changed = true; vma_changed = true;
...@@ -782,8 +732,8 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, ...@@ -782,8 +732,8 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
if (end != vma->vm_end) { if (end != vma->vm_end) {
if (vma->vm_end > end) { if (vma->vm_end > end) {
if (!insert || (insert->vm_start != end)) { if (!insert || (insert->vm_start != end)) {
vma_mas_szero(&mas, end, vma->vm_end); vma_iter_clear(&vmi, end, vma->vm_end);
mas_reset(&mas); vma_iter_set(&vmi, vma->vm_end);
VM_WARN_ON(insert && VM_WARN_ON(insert &&
insert->vm_end < vma->vm_end); insert->vm_end < vma->vm_end);
} }
...@@ -794,13 +744,13 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, ...@@ -794,13 +744,13 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
} }
if (vma_changed) if (vma_changed)
vma_mas_store(vma, &mas); vma_iter_store(&vmi, vma);
vma->vm_pgoff = pgoff; vma->vm_pgoff = pgoff;
if (adjust_next) { if (adjust_next) {
next->vm_start += adjust_next; next->vm_start += adjust_next;
next->vm_pgoff += adjust_next >> PAGE_SHIFT; next->vm_pgoff += adjust_next >> PAGE_SHIFT;
vma_mas_store(next, &mas); vma_iter_store(&vmi, next);
} }
if (file) { if (file) {
...@@ -820,8 +770,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, ...@@ -820,8 +770,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
* us to insert it before dropping the locks * us to insert it before dropping the locks
* (it may either follow vma or precede it). * (it may either follow vma or precede it).
*/ */
mas_reset(&mas); vma_iter_store(&vmi, insert);
vma_mas_store(insert, &mas);
mm->map_count++; mm->map_count++;
} }
...@@ -867,7 +816,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, ...@@ -867,7 +816,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start,
if (insert && file) if (insert && file)
uprobe_mmap(insert); uprobe_mmap(insert);
mas_destroy(&mas); vma_iter_free(&vmi);
validate_mm(mm); validate_mm(mm);
return 0; return 0;
...@@ -1999,7 +1948,8 @@ int expand_upwards(struct vm_area_struct *vma, unsigned long address) ...@@ -1999,7 +1948,8 @@ int expand_upwards(struct vm_area_struct *vma, unsigned long address)
anon_vma_interval_tree_pre_update_vma(vma); anon_vma_interval_tree_pre_update_vma(vma);
vma->vm_end = address; vma->vm_end = address;
/* Overwrite old entry in mtree. */ /* Overwrite old entry in mtree. */
vma_mas_store(vma, &mas); mas_set_range(&mas, vma->vm_start, address - 1);
mas_store_prealloc(&mas, vma);
anon_vma_interval_tree_post_update_vma(vma); anon_vma_interval_tree_post_update_vma(vma);
spin_unlock(&mm->page_table_lock); spin_unlock(&mm->page_table_lock);
...@@ -2081,7 +2031,8 @@ int expand_downwards(struct vm_area_struct *vma, unsigned long address) ...@@ -2081,7 +2031,8 @@ int expand_downwards(struct vm_area_struct *vma, unsigned long address)
vma->vm_start = address; vma->vm_start = address;
vma->vm_pgoff -= grow; vma->vm_pgoff -= grow;
/* Overwrite old entry in mtree. */ /* Overwrite old entry in mtree. */
vma_mas_store(vma, &mas); mas_set_range(&mas, address, vma->vm_end - 1);
mas_store_prealloc(&mas, vma);
anon_vma_interval_tree_post_update_vma(vma); anon_vma_interval_tree_post_update_vma(vma);
spin_unlock(&mm->page_table_lock); spin_unlock(&mm->page_table_lock);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment