radix-tree: use local locks
The preload functionality uses per-CPU variables and preempt-disable to ensure that it does not switch CPUs during its usage. This patch adds local_locks() instead preempt_disable() for the same purpose and to remain preemptible on -RT. Cc: stable-rt@vger.kernel.org Reported-and-debugged-by: Mike Galbraith <efault@gmx.de> Signed-off-by: Sebastian Andrzej Siewior <bigeasy@linutronix.de>
This commit is contained in:
parent
f7d6ec8d74
commit
8d2a55bcd6
|
@ -169,10 +169,7 @@ static inline bool idr_is_empty(const struct idr *idr)
|
||||||
* Each idr_preload() should be matched with an invocation of this
|
* Each idr_preload() should be matched with an invocation of this
|
||||||
* function. See idr_preload() for details.
|
* function. See idr_preload() for details.
|
||||||
*/
|
*/
|
||||||
static inline void idr_preload_end(void)
|
void idr_preload_end(void);
|
||||||
{
|
|
||||||
preempt_enable();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* idr_for_each_entry() - Iterate over an IDR's elements of a given type.
|
* idr_for_each_entry() - Iterate over an IDR's elements of a given type.
|
||||||
|
|
|
@ -226,6 +226,7 @@ unsigned int radix_tree_gang_lookup(const struct radix_tree_root *,
|
||||||
unsigned int max_items);
|
unsigned int max_items);
|
||||||
int radix_tree_preload(gfp_t gfp_mask);
|
int radix_tree_preload(gfp_t gfp_mask);
|
||||||
int radix_tree_maybe_preload(gfp_t gfp_mask);
|
int radix_tree_maybe_preload(gfp_t gfp_mask);
|
||||||
|
void radix_tree_preload_end(void);
|
||||||
void radix_tree_init(void);
|
void radix_tree_init(void);
|
||||||
void *radix_tree_tag_set(struct radix_tree_root *,
|
void *radix_tree_tag_set(struct radix_tree_root *,
|
||||||
unsigned long index, unsigned int tag);
|
unsigned long index, unsigned int tag);
|
||||||
|
@ -243,11 +244,6 @@ unsigned int radix_tree_gang_lookup_tag_slot(const struct radix_tree_root *,
|
||||||
unsigned int max_items, unsigned int tag);
|
unsigned int max_items, unsigned int tag);
|
||||||
int radix_tree_tagged(const struct radix_tree_root *, unsigned int tag);
|
int radix_tree_tagged(const struct radix_tree_root *, unsigned int tag);
|
||||||
|
|
||||||
static inline void radix_tree_preload_end(void)
|
|
||||||
{
|
|
||||||
preempt_enable();
|
|
||||||
}
|
|
||||||
|
|
||||||
void __rcu **idr_get_free(struct radix_tree_root *root,
|
void __rcu **idr_get_free(struct radix_tree_root *root,
|
||||||
struct radix_tree_iter *iter, gfp_t gfp,
|
struct radix_tree_iter *iter, gfp_t gfp,
|
||||||
unsigned long max);
|
unsigned long max);
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
#include <linux/slab.h>
|
#include <linux/slab.h>
|
||||||
#include <linux/string.h>
|
#include <linux/string.h>
|
||||||
#include <linux/xarray.h>
|
#include <linux/xarray.h>
|
||||||
|
#include <linux/locallock.h>
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Radix tree node cache.
|
* Radix tree node cache.
|
||||||
|
@ -72,6 +72,7 @@ struct radix_tree_preload {
|
||||||
struct radix_tree_node *nodes;
|
struct radix_tree_node *nodes;
|
||||||
};
|
};
|
||||||
static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
|
static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
|
||||||
|
static DEFINE_LOCAL_IRQ_LOCK(radix_tree_preloads_lock);
|
||||||
|
|
||||||
static inline struct radix_tree_node *entry_to_node(void *ptr)
|
static inline struct radix_tree_node *entry_to_node(void *ptr)
|
||||||
{
|
{
|
||||||
|
@ -269,12 +270,13 @@ radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent,
|
||||||
* succeed in getting a node here (and never reach
|
* succeed in getting a node here (and never reach
|
||||||
* kmem_cache_alloc)
|
* kmem_cache_alloc)
|
||||||
*/
|
*/
|
||||||
rtp = this_cpu_ptr(&radix_tree_preloads);
|
rtp = &get_locked_var(radix_tree_preloads_lock, radix_tree_preloads);
|
||||||
if (rtp->nr) {
|
if (rtp->nr) {
|
||||||
ret = rtp->nodes;
|
ret = rtp->nodes;
|
||||||
rtp->nodes = ret->parent;
|
rtp->nodes = ret->parent;
|
||||||
rtp->nr--;
|
rtp->nr--;
|
||||||
}
|
}
|
||||||
|
put_locked_var(radix_tree_preloads_lock, radix_tree_preloads);
|
||||||
/*
|
/*
|
||||||
* Update the allocation stack trace as this is more useful
|
* Update the allocation stack trace as this is more useful
|
||||||
* for debugging.
|
* for debugging.
|
||||||
|
@ -340,14 +342,14 @@ static __must_check int __radix_tree_preload(gfp_t gfp_mask, unsigned nr)
|
||||||
*/
|
*/
|
||||||
gfp_mask &= ~__GFP_ACCOUNT;
|
gfp_mask &= ~__GFP_ACCOUNT;
|
||||||
|
|
||||||
preempt_disable();
|
local_lock(radix_tree_preloads_lock);
|
||||||
rtp = this_cpu_ptr(&radix_tree_preloads);
|
rtp = this_cpu_ptr(&radix_tree_preloads);
|
||||||
while (rtp->nr < nr) {
|
while (rtp->nr < nr) {
|
||||||
preempt_enable();
|
local_unlock(radix_tree_preloads_lock);
|
||||||
node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
|
node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
|
||||||
if (node == NULL)
|
if (node == NULL)
|
||||||
goto out;
|
goto out;
|
||||||
preempt_disable();
|
local_lock(radix_tree_preloads_lock);
|
||||||
rtp = this_cpu_ptr(&radix_tree_preloads);
|
rtp = this_cpu_ptr(&radix_tree_preloads);
|
||||||
if (rtp->nr < nr) {
|
if (rtp->nr < nr) {
|
||||||
node->parent = rtp->nodes;
|
node->parent = rtp->nodes;
|
||||||
|
@ -389,11 +391,17 @@ int radix_tree_maybe_preload(gfp_t gfp_mask)
|
||||||
if (gfpflags_allow_blocking(gfp_mask))
|
if (gfpflags_allow_blocking(gfp_mask))
|
||||||
return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
|
return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
|
||||||
/* Preloading doesn't help anything with this gfp mask, skip it */
|
/* Preloading doesn't help anything with this gfp mask, skip it */
|
||||||
preempt_disable();
|
local_lock(radix_tree_preloads_lock);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL(radix_tree_maybe_preload);
|
EXPORT_SYMBOL(radix_tree_maybe_preload);
|
||||||
|
|
||||||
|
void radix_tree_preload_end(void)
|
||||||
|
{
|
||||||
|
local_unlock(radix_tree_preloads_lock);
|
||||||
|
}
|
||||||
|
EXPORT_SYMBOL(radix_tree_preload_end);
|
||||||
|
|
||||||
static unsigned radix_tree_load_root(const struct radix_tree_root *root,
|
static unsigned radix_tree_load_root(const struct radix_tree_root *root,
|
||||||
struct radix_tree_node **nodep, unsigned long *maxindex)
|
struct radix_tree_node **nodep, unsigned long *maxindex)
|
||||||
{
|
{
|
||||||
|
@ -1478,10 +1486,16 @@ EXPORT_SYMBOL(radix_tree_tagged);
|
||||||
void idr_preload(gfp_t gfp_mask)
|
void idr_preload(gfp_t gfp_mask)
|
||||||
{
|
{
|
||||||
if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE))
|
if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE))
|
||||||
preempt_disable();
|
local_lock(radix_tree_preloads_lock);
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL(idr_preload);
|
EXPORT_SYMBOL(idr_preload);
|
||||||
|
|
||||||
|
void idr_preload_end(void)
|
||||||
|
{
|
||||||
|
local_unlock(radix_tree_preloads_lock);
|
||||||
|
}
|
||||||
|
EXPORT_SYMBOL(idr_preload_end);
|
||||||
|
|
||||||
void __rcu **idr_get_free(struct radix_tree_root *root,
|
void __rcu **idr_get_free(struct radix_tree_root *root,
|
||||||
struct radix_tree_iter *iter, gfp_t gfp,
|
struct radix_tree_iter *iter, gfp_t gfp,
|
||||||
unsigned long max)
|
unsigned long max)
|
||||||
|
|
Loading…
Reference in New Issue