kasan: fix krealloc handling for tag-based mode
Right now tag-based KASAN can retag the memory that is reallocated via krealloc and return a differently tagged pointer even if the same slab object gets used and no reallocated technically happens. There are a few issues with this approach. One is that krealloc callers can't rely on comparing the return value with the passed argument to check whether reallocation happened. Another is that if a caller knows that no reallocation happened, that it can access object memory through the old pointer, which leads to false positives. Look at nf_ct_ext_add() to see an example. Fix this by keeping the same tag if the memory don't actually gets reallocated during krealloc. Link: http://lkml.kernel.org/r/bb2a71d17ed072bcc528cbee46fcbd71a6da3be4.1546540962.git.andreyknvl@google.com Signed-off-by: Andrey Konovalov <andreyknvl@google.com> Cc: Andrey Ryabinin <aryabinin@virtuozzo.com> Cc: Christoph Lameter <cl@linux.com> Cc: Dmitry Vyukov <dvyukov@google.com> Cc: Mark Rutland <mark.rutland@arm.com> Cc: Vincenzo Frascino <vincenzo.frascino@arm.com> Cc: Will Deacon <will.deacon@arm.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
parent
96fedce27e
commit
a3fe7cdf02
@ -347,28 +347,43 @@ void kasan_poison_object_data(struct kmem_cache *cache, void *object)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Since it's desirable to only call object contructors once during slab
|
* This function assigns a tag to an object considering the following:
|
||||||
* allocation, we preassign tags to all such objects. Also preassign tags for
|
* 1. A cache might have a constructor, which might save a pointer to a slab
|
||||||
* SLAB_TYPESAFE_BY_RCU slabs to avoid use-after-free reports.
|
* object somewhere (e.g. in the object itself). We preassign a tag for
|
||||||
* For SLAB allocator we can't preassign tags randomly since the freelist is
|
* each object in caches with constructors during slab creation and reuse
|
||||||
* stored as an array of indexes instead of a linked list. Assign tags based
|
* the same tag each time a particular object is allocated.
|
||||||
* on objects indexes, so that objects that are next to each other get
|
* 2. A cache might be SLAB_TYPESAFE_BY_RCU, which means objects can be
|
||||||
* different tags.
|
* accessed after being freed. We preassign tags for objects in these
|
||||||
* After a tag is assigned, the object always gets allocated with the same tag.
|
* caches as well.
|
||||||
* The reason is that we can't change tags for objects with constructors on
|
* 3. For SLAB allocator we can't preassign tags randomly since the freelist
|
||||||
* reallocation (even for non-SLAB_TYPESAFE_BY_RCU), because the constructor
|
* is stored as an array of indexes instead of a linked list. Assign tags
|
||||||
* code can save the pointer to the object somewhere (e.g. in the object
|
* based on objects indexes, so that objects that are next to each other
|
||||||
* itself). Then if we retag it, the old saved pointer will become invalid.
|
* get different tags.
|
||||||
*/
|
*/
|
||||||
static u8 assign_tag(struct kmem_cache *cache, const void *object, bool new)
|
static u8 assign_tag(struct kmem_cache *cache, const void *object,
|
||||||
|
bool init, bool krealloc)
|
||||||
{
|
{
|
||||||
if (!cache->ctor && !(cache->flags & SLAB_TYPESAFE_BY_RCU))
|
/* Reuse the same tag for krealloc'ed objects. */
|
||||||
return new ? KASAN_TAG_KERNEL : random_tag();
|
if (krealloc)
|
||||||
|
return get_tag(object);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If the cache neither has a constructor nor has SLAB_TYPESAFE_BY_RCU
|
||||||
|
* set, assign a tag when the object is being allocated (init == false).
|
||||||
|
*/
|
||||||
|
if (!cache->ctor && !(cache->flags & SLAB_TYPESAFE_BY_RCU))
|
||||||
|
return init ? KASAN_TAG_KERNEL : random_tag();
|
||||||
|
|
||||||
|
/* For caches that either have a constructor or SLAB_TYPESAFE_BY_RCU: */
|
||||||
#ifdef CONFIG_SLAB
|
#ifdef CONFIG_SLAB
|
||||||
|
/* For SLAB assign tags based on the object index in the freelist. */
|
||||||
return (u8)obj_to_index(cache, virt_to_page(object), (void *)object);
|
return (u8)obj_to_index(cache, virt_to_page(object), (void *)object);
|
||||||
#else
|
#else
|
||||||
return new ? random_tag() : get_tag(object);
|
/*
|
||||||
|
* For SLUB assign a random tag during slab creation, otherwise reuse
|
||||||
|
* the already assigned tag.
|
||||||
|
*/
|
||||||
|
return init ? random_tag() : get_tag(object);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -384,7 +399,8 @@ void * __must_check kasan_init_slab_obj(struct kmem_cache *cache,
|
|||||||
__memset(alloc_info, 0, sizeof(*alloc_info));
|
__memset(alloc_info, 0, sizeof(*alloc_info));
|
||||||
|
|
||||||
if (IS_ENABLED(CONFIG_KASAN_SW_TAGS))
|
if (IS_ENABLED(CONFIG_KASAN_SW_TAGS))
|
||||||
object = set_tag(object, assign_tag(cache, object, true));
|
object = set_tag(object,
|
||||||
|
assign_tag(cache, object, true, false));
|
||||||
|
|
||||||
return (void *)object;
|
return (void *)object;
|
||||||
}
|
}
|
||||||
@ -450,8 +466,8 @@ bool kasan_slab_free(struct kmem_cache *cache, void *object, unsigned long ip)
|
|||||||
return __kasan_slab_free(cache, object, ip, true);
|
return __kasan_slab_free(cache, object, ip, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void * __must_check kasan_kmalloc(struct kmem_cache *cache, const void *object,
|
static void *__kasan_kmalloc(struct kmem_cache *cache, const void *object,
|
||||||
size_t size, gfp_t flags)
|
size_t size, gfp_t flags, bool krealloc)
|
||||||
{
|
{
|
||||||
unsigned long redzone_start;
|
unsigned long redzone_start;
|
||||||
unsigned long redzone_end;
|
unsigned long redzone_end;
|
||||||
@ -469,7 +485,7 @@ void * __must_check kasan_kmalloc(struct kmem_cache *cache, const void *object,
|
|||||||
KASAN_SHADOW_SCALE_SIZE);
|
KASAN_SHADOW_SCALE_SIZE);
|
||||||
|
|
||||||
if (IS_ENABLED(CONFIG_KASAN_SW_TAGS))
|
if (IS_ENABLED(CONFIG_KASAN_SW_TAGS))
|
||||||
tag = assign_tag(cache, object, false);
|
tag = assign_tag(cache, object, false, krealloc);
|
||||||
|
|
||||||
/* Tag is ignored in set_tag without CONFIG_KASAN_SW_TAGS */
|
/* Tag is ignored in set_tag without CONFIG_KASAN_SW_TAGS */
|
||||||
kasan_unpoison_shadow(set_tag(object, tag), size);
|
kasan_unpoison_shadow(set_tag(object, tag), size);
|
||||||
@ -481,6 +497,12 @@ void * __must_check kasan_kmalloc(struct kmem_cache *cache, const void *object,
|
|||||||
|
|
||||||
return set_tag(object, tag);
|
return set_tag(object, tag);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void * __must_check kasan_kmalloc(struct kmem_cache *cache, const void *object,
|
||||||
|
size_t size, gfp_t flags)
|
||||||
|
{
|
||||||
|
return __kasan_kmalloc(cache, object, size, flags, false);
|
||||||
|
}
|
||||||
EXPORT_SYMBOL(kasan_kmalloc);
|
EXPORT_SYMBOL(kasan_kmalloc);
|
||||||
|
|
||||||
void * __must_check kasan_kmalloc_large(const void *ptr, size_t size,
|
void * __must_check kasan_kmalloc_large(const void *ptr, size_t size,
|
||||||
@ -520,7 +542,8 @@ void * __must_check kasan_krealloc(const void *object, size_t size, gfp_t flags)
|
|||||||
if (unlikely(!PageSlab(page)))
|
if (unlikely(!PageSlab(page)))
|
||||||
return kasan_kmalloc_large(object, size, flags);
|
return kasan_kmalloc_large(object, size, flags);
|
||||||
else
|
else
|
||||||
return kasan_kmalloc(page->slab_cache, object, size, flags);
|
return __kasan_kmalloc(page->slab_cache, object, size,
|
||||||
|
flags, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void kasan_poison_kfree(void *ptr, unsigned long ip)
|
void kasan_poison_kfree(void *ptr, unsigned long ip)
|
||||||
|
Loading…
Reference in New Issue
Block a user