[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20240203030058.60750-11-kuniyu@amazon.com>
Date: Fri, 2 Feb 2024 19:00:52 -0800
From: Kuniyuki Iwashima <kuniyu@...zon.com>
To: "David S. Miller" <davem@...emloft.net>, Eric Dumazet
<edumazet@...gle.com>, Jakub Kicinski <kuba@...nel.org>, Paolo Abeni
<pabeni@...hat.com>
CC: Kuniyuki Iwashima <kuniyu@...zon.com>, Kuniyuki Iwashima
<kuni1840@...il.com>, <netdev@...r.kernel.org>
Subject: [PATCH v1 net-next 10/16] af_unix: Skip GC if no cycle exists.
Once we run Tarjan's algorithm, we know whether cyclic references
exist. If there is no cycle, we set unix_graph_maybe_cyclic to
false and can skip the entire garbage collection next time.
When finalising SCC, we set unix_graph_maybe_cyclic to true if SCC
consists of multiple vertices.
Even if SCC is a single vertex, a cycle might exist as self-fd passing.
To detect the corner case, we can check all edges of the vertex, but
instead, we add a new field that counts the number of self-references
to finish the decision in O(1).
With this change, __unix_gc() is just a spin_lock dance in the normal
usage.
Signed-off-by: Kuniyuki Iwashima <kuniyu@...zon.com>
---
include/net/af_unix.h | 1 +
net/unix/garbage.c | 27 ++++++++++++++++++++++++++-
2 files changed, 27 insertions(+), 1 deletion(-)
diff --git a/include/net/af_unix.h b/include/net/af_unix.h
index b3ba5e949d62..59ec8d7880ce 100644
--- a/include/net/af_unix.h
+++ b/include/net/af_unix.h
@@ -36,6 +36,7 @@ struct unix_vertex {
struct list_head entry;
struct list_head scc_entry;
unsigned long out_degree;
+ unsigned long self_degree;
unsigned long index;
unsigned long lowlink;
};
diff --git a/net/unix/garbage.c b/net/unix/garbage.c
index 0f46df05a019..cbddca5d8dd1 100644
--- a/net/unix/garbage.c
+++ b/net/unix/garbage.c
@@ -106,12 +106,14 @@ void unix_init_vertex(struct unix_sock *u)
struct unix_vertex *vertex = &u->vertex;
vertex->out_degree = 0;
+ vertex->self_degree = 0;
INIT_LIST_HEAD(&vertex->edges);
INIT_LIST_HEAD(&vertex->entry);
INIT_LIST_HEAD(&vertex->scc_entry);
}
static bool unix_graph_updated;
+static bool unix_graph_maybe_cyclic;
static void unix_graph_update(struct unix_edge *edge)
{
@@ -122,6 +124,7 @@ static void unix_graph_update(struct unix_edge *edge)
return;
unix_graph_updated = true;
+ unix_graph_maybe_cyclic = true;
}
DEFINE_SPINLOCK(unix_gc_lock);
@@ -159,6 +162,9 @@ void unix_add_edges(struct scm_fp_list *fpl, struct unix_sock *receiver)
edge->predecessor = &inflight->vertex;
edge->successor = successor;
+ if (edge->predecessor == edge->successor)
+ edge->predecessor->self_degree++;
+
if (!edge->predecessor->out_degree++) {
edge->predecessor->index = unix_vertex_unvisited_index;
@@ -199,6 +205,9 @@ void unix_del_edges(struct scm_fp_list *fpl)
if (!--edge->predecessor->out_degree)
list_del_init(&edge->predecessor->entry);
+
+ if (edge->predecessor == edge->successor)
+ edge->predecessor->self_degree--;
}
WRITE_ONCE(unix_tot_inflight, unix_tot_inflight - fpl->count_unix);
@@ -218,6 +227,9 @@ void unix_update_edges(struct unix_sock *receiver)
list_for_each_entry(edge, &receiver->vertex.edges, embryo_entry) {
unix_graph_update(edge);
+ if (edge->predecessor == edge->successor)
+ edge->predecessor->self_degree--;
+
edge->successor = &receiver->vertex;
}
@@ -293,6 +305,14 @@ static void __unix_walk_scc(struct unix_vertex *vertex)
vertex->index = unix_vertex_grouped_index;
}
+ if (!list_is_singular(&scc)) {
+ unix_graph_maybe_cyclic = true;
+ } else {
+ vertex = list_first_entry(&scc, typeof(*vertex), scc_entry);
+ if (vertex->self_degree)
+ unix_graph_maybe_cyclic = true;
+ }
+
list_del(&scc);
}
@@ -308,6 +328,8 @@ static void __unix_walk_scc(struct unix_vertex *vertex)
static void unix_walk_scc(void)
{
+ unix_graph_maybe_cyclic = false;
+
while (!list_empty(&unix_unvisited_vertices)) {
struct unix_vertex *vertex;
@@ -485,6 +507,9 @@ static void __unix_gc(struct work_struct *work)
spin_lock(&unix_gc_lock);
+ if (!unix_graph_maybe_cyclic)
+ goto skip_gc;
+
if (unix_graph_updated)
unix_walk_scc();
else
@@ -573,7 +598,7 @@ static void __unix_gc(struct work_struct *work)
/* All candidates should have been detached by now. */
WARN_ON_ONCE(!list_empty(&gc_candidates));
-
+skip_gc:
/* Paired with READ_ONCE() in wait_for_unix_gc(). */
WRITE_ONCE(gc_in_progress, false);
--
2.30.2
Powered by blists - more mailing lists