Commit 54d516b1 authored by John Hubbard's avatar John Hubbard Committed by Linus Torvalds

mm/gup: small refactoring: simplify try_grab_page()

try_grab_page() does the same thing as try_grab_compound_head(..., refs=1,
...), just with a different API.  So there is a lot of code duplication
there.

Change try_grab_page() to call try_grab_compound_head(), while keeping the
API contract identical for callers.

Also, now that try_grab_compound_head() always has a caller, remove the
__maybe_unused annotation.

Link: https://lkml.kernel.org/r/20210813044133.1536842-3-jhubbard@nvidia.comSigned-off-by: default avatarJohn Hubbard <jhubbard@nvidia.com>
Reviewed-by: default avatarChristoph Hellwig <hch@lst.de>
Cc: Matthew Wilcox <willy@infradead.org>
Cc: Christian Borntraeger <borntraeger@de.ibm.com>
Cc: Heiko Carstens <hca@linux.ibm.com>
Cc: Vasily Gorbik <gor@linux.ibm.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent 3967db22
...@@ -1214,8 +1214,8 @@ static inline void get_page(struct page *page) ...@@ -1214,8 +1214,8 @@ static inline void get_page(struct page *page)
} }
bool __must_check try_grab_page(struct page *page, unsigned int flags); bool __must_check try_grab_page(struct page *page, unsigned int flags);
__maybe_unused struct page *try_grab_compound_head(struct page *page, int refs, struct page *try_grab_compound_head(struct page *page, int refs,
unsigned int flags); unsigned int flags);
static inline __must_check bool try_get_page(struct page *page) static inline __must_check bool try_get_page(struct page *page)
......
...@@ -124,8 +124,8 @@ static inline struct page *try_get_compound_head(struct page *page, int refs) ...@@ -124,8 +124,8 @@ static inline struct page *try_get_compound_head(struct page *page, int refs)
* considered failure, and furthermore, a likely bug in the caller, so a warning * considered failure, and furthermore, a likely bug in the caller, so a warning
* is also emitted. * is also emitted.
*/ */
__maybe_unused struct page *try_grab_compound_head(struct page *page, struct page *try_grab_compound_head(struct page *page,
int refs, unsigned int flags) int refs, unsigned int flags)
{ {
if (flags & FOLL_GET) if (flags & FOLL_GET)
return try_get_compound_head(page, refs); return try_get_compound_head(page, refs);
...@@ -208,35 +208,10 @@ static void put_compound_head(struct page *page, int refs, unsigned int flags) ...@@ -208,35 +208,10 @@ static void put_compound_head(struct page *page, int refs, unsigned int flags)
*/ */
bool __must_check try_grab_page(struct page *page, unsigned int flags) bool __must_check try_grab_page(struct page *page, unsigned int flags)
{ {
WARN_ON_ONCE((flags & (FOLL_GET | FOLL_PIN)) == (FOLL_GET | FOLL_PIN)); if (!(flags & (FOLL_GET | FOLL_PIN)))
return true;
if (flags & FOLL_GET) return try_grab_compound_head(page, 1, flags);
return try_get_page(page);
else if (flags & FOLL_PIN) {
int refs = 1;
page = compound_head(page);
if (WARN_ON_ONCE(page_ref_count(page) <= 0))
return false;
if (hpage_pincount_available(page))
hpage_pincount_add(page, 1);
else
refs = GUP_PIN_COUNTING_BIAS;
/*
* Similar to try_grab_compound_head(): even if using the
* hpage_pincount_add/_sub() routines, be sure to
* *also* increment the normal page refcount field at least
* once, so that the page really is pinned.
*/
page_ref_add(page, refs);
mod_node_page_state(page_pgdat(page), NR_FOLL_PIN_ACQUIRED, 1);
}
return true;
} }
/** /**
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment