Commit ce37eec0 authored by Andrey Konovalov's avatar Andrey Konovalov Committed by Andrew Morton

kasan: clean up and rename ____kasan_kmalloc

Introduce a new poison_kmalloc_redzone helper function that poisons the
redzone for kmalloc object.

Drop the confusingly named ____kasan_kmalloc function and instead use
poison_kmalloc_redzone along with the other required parts of
____kasan_kmalloc in the callers' code.

This is a preparatory change for the following patches in this series.

Link: https://lkml.kernel.org/r/5881232ad357ec0d59a5b1aefd9e0673a386399a.1703024586.git.andreyknvl@google.comSigned-off-by: default avatarAndrey Konovalov <andreyknvl@google.com>
Cc: Alexander Lobakin <alobakin@pm.me>
Cc: Alexander Potapenko <glider@google.com>
Cc: Andrey Ryabinin <ryabinin.a.a@gmail.com>
Cc: Breno Leitao <leitao@debian.org>
Cc: Dmitry Vyukov <dvyukov@google.com>
Cc: Evgenii Stepanov <eugenis@google.com>
Cc: Marco Elver <elver@google.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
parent b556a462
...@@ -312,26 +312,12 @@ void * __must_check __kasan_slab_alloc(struct kmem_cache *cache, ...@@ -312,26 +312,12 @@ void * __must_check __kasan_slab_alloc(struct kmem_cache *cache,
return tagged_object; return tagged_object;
} }
static inline void *____kasan_kmalloc(struct kmem_cache *cache, static inline void poison_kmalloc_redzone(struct kmem_cache *cache,
const void *object, size_t size, gfp_t flags) const void *object, size_t size, gfp_t flags)
{ {
unsigned long redzone_start; unsigned long redzone_start;
unsigned long redzone_end; unsigned long redzone_end;
if (gfpflags_allow_blocking(flags))
kasan_quarantine_reduce();
if (unlikely(object == NULL))
return NULL;
if (is_kfence_address(kasan_reset_tag(object)))
return (void *)object;
/*
* The object has already been unpoisoned by kasan_slab_alloc() for
* kmalloc() or by kasan_krealloc() for krealloc().
*/
/* /*
* The redzone has byte-level precision for the generic mode. * The redzone has byte-level precision for the generic mode.
* Partially poison the last object granule to cover the unaligned * Partially poison the last object granule to cover the unaligned
...@@ -355,14 +341,25 @@ static inline void *____kasan_kmalloc(struct kmem_cache *cache, ...@@ -355,14 +341,25 @@ static inline void *____kasan_kmalloc(struct kmem_cache *cache,
if (kasan_stack_collection_enabled() && is_kmalloc_cache(cache)) if (kasan_stack_collection_enabled() && is_kmalloc_cache(cache))
kasan_save_alloc_info(cache, (void *)object, flags); kasan_save_alloc_info(cache, (void *)object, flags);
/* Keep the tag that was set by kasan_slab_alloc(). */
return (void *)object;
} }
void * __must_check __kasan_kmalloc(struct kmem_cache *cache, const void *object, void * __must_check __kasan_kmalloc(struct kmem_cache *cache, const void *object,
size_t size, gfp_t flags) size_t size, gfp_t flags)
{ {
return ____kasan_kmalloc(cache, object, size, flags); if (gfpflags_allow_blocking(flags))
kasan_quarantine_reduce();
if (unlikely(object == NULL))
return NULL;
if (is_kfence_address(kasan_reset_tag(object)))
return (void *)object;
/* The object has already been unpoisoned by kasan_slab_alloc(). */
poison_kmalloc_redzone(cache, object, size, flags);
/* Keep the tag that was set by kasan_slab_alloc(). */
return (void *)object;
} }
EXPORT_SYMBOL(__kasan_kmalloc); EXPORT_SYMBOL(__kasan_kmalloc);
...@@ -408,6 +405,9 @@ void * __must_check __kasan_krealloc(const void *object, size_t size, gfp_t flag ...@@ -408,6 +405,9 @@ void * __must_check __kasan_krealloc(const void *object, size_t size, gfp_t flag
if (unlikely(object == ZERO_SIZE_PTR)) if (unlikely(object == ZERO_SIZE_PTR))
return (void *)object; return (void *)object;
if (is_kfence_address(kasan_reset_tag(object)))
return (void *)object;
/* /*
* Unpoison the object's data. * Unpoison the object's data.
* Part of it might already have been unpoisoned, but it's unknown * Part of it might already have been unpoisoned, but it's unknown
...@@ -420,8 +420,10 @@ void * __must_check __kasan_krealloc(const void *object, size_t size, gfp_t flag ...@@ -420,8 +420,10 @@ void * __must_check __kasan_krealloc(const void *object, size_t size, gfp_t flag
/* Piggy-back on kmalloc() instrumentation to poison the redzone. */ /* Piggy-back on kmalloc() instrumentation to poison the redzone. */
if (unlikely(!slab)) if (unlikely(!slab))
return __kasan_kmalloc_large(object, size, flags); return __kasan_kmalloc_large(object, size, flags);
else else {
return ____kasan_kmalloc(slab->slab_cache, object, size, flags); poison_kmalloc_redzone(slab->slab_cache, object, size, flags);
return (void *)object;
}
} }
bool __kasan_mempool_poison_pages(struct page *page, unsigned int order, bool __kasan_mempool_poison_pages(struct page *page, unsigned int order,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment