[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20240710102959.291a71349ec71ef48919a9fe@kernel.org>
Date: Wed, 10 Jul 2024 10:29:59 +0900
From: Masami Hiramatsu (Google) <mhiramat@...nel.org>
To: Peter Zijlstra <peterz@...radead.org>
Cc: mingo@...nel.org, andrii@...nel.org, linux-kernel@...r.kernel.org,
rostedt@...dmis.org, mhiramat@...nel.org, oleg@...hat.com,
jolsa@...nel.org, clm@...a.com, paulmck@...nel.org
Subject: Re: [PATCH 03/10] rbtree: Provide rb_find_rcu() / rb_find_add_rcu()
On Mon, 08 Jul 2024 11:12:44 +0200
Peter Zijlstra <peterz@...radead.org> wrote:
> Much like latch_tree, add two RCU methods for the regular RB-tree,
> which can be used in conjunction with a seqcount to provide lockless
> lookups.
>
> Signed-off-by: Peter Zijlstra (Intel) <peterz@...radead.org>
> ---
> include/linux/rbtree.h | 67 +++++++++++++++++++++++++++++++++++++++++++++++++
> 1 file changed, 67 insertions(+)
>
> --- a/include/linux/rbtree.h
> +++ b/include/linux/rbtree.h
> @@ -245,6 +245,42 @@ rb_find_add(struct rb_node *node, struct
> }
>
> /**
> + * rb_find_add_rcu() - find equivalent @node in @tree, or add @node
> + * @node: node to look-for / insert
> + * @tree: tree to search / modify
> + * @cmp: operator defining the node order
> + *
> + * Adds a Store-Release for link_node.
> + *
> + * Returns the rb_node matching @node, or NULL when no match is found and @node
> + * is inserted.
> + */
> +static __always_inline struct rb_node *
> +rb_find_add_rcu(struct rb_node *node, struct rb_root *tree,
> + int (*cmp)(struct rb_node *, const struct rb_node *))
> +{
> + struct rb_node **link = &tree->rb_node;
> + struct rb_node *parent = NULL;
> + int c;
> +
> + while (*link) {
Don't we need to use rcu_dereference_raw(*link) here?
> + parent = *link;
> + c = cmp(node, parent);
> +
> + if (c < 0)
> + link = &parent->rb_left;
> + else if (c > 0)
> + link = &parent->rb_right;
> + else
> + return parent;
> + }
> +
> + rb_link_node_rcu(node, parent, link);
> + rb_insert_color(node, tree);
> + return NULL;
> +}
> +
> +/**
> * rb_find() - find @key in tree @tree
> * @key: key to match
> * @tree: tree to search
> @@ -268,6 +304,37 @@ rb_find(const void *key, const struct rb
> else
> return node;
> }
> +
> + return NULL;
> +}
> +
> +/**
> + * rb_find_rcu() - find @key in tree @tree
> + * @key: key to match
> + * @tree: tree to search
> + * @cmp: operator defining the node order
> + *
> + * Notably, tree descent vs concurrent tree rotations is unsound and can result
> + * in false-negatives.
> + *
> + * Returns the rb_node matching @key or NULL.
> + */
> +static __always_inline struct rb_node *
> +rb_find_rcu(const void *key, const struct rb_root *tree,
> + int (*cmp)(const void *key, const struct rb_node *))
> +{
> + struct rb_node *node = tree->rb_node;
> +
> + while (node) {
> + int c = cmp(key, node);
> +
> + if (c < 0)
> + node = rcu_dereference_raw(node->rb_left);
> + else if (c > 0)
> + node = rcu_dereference_raw(node->rb_right);
> + else
> + return node;
> + }
>
> return NULL;
> }
>
>
--
Masami Hiramatsu (Google) <mhiramat@...nel.org>
Powered by blists - more mailing lists