[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20190829083132.22394-13-duyuyang@gmail.com>
Date: Thu, 29 Aug 2019 16:31:14 +0800
From: Yuyang Du <duyuyang@...il.com>
To: peterz@...radead.org, will.deacon@....com, mingo@...nel.org
Cc: bvanassche@....org, ming.lei@...hat.com, frederic@...nel.org,
tglx@...utronix.de, linux-kernel@...r.kernel.org,
longman@...hat.com, paulmck@...ux.vnet.ibm.com,
boqun.feng@...il.com, Yuyang Du <duyuyang@...il.com>
Subject: [PATCH v4 12/30] locking/lockdep: Specify the depth of current lock stack in lookup_chain_cache_add()
When looking up and adding a chain (i.e., in lookup_chain_cache_add() and
only in it), explicitly specify the depth of the held lock stack as the
chain. The depth now only equals curr->lockdep_depth.
No functional change.
Signed-off-by: Yuyang Du <duyuyang@...il.com>
---
kernel/locking/lockdep.c | 48 ++++++++++++++++++++++++++----------------------
1 file changed, 26 insertions(+), 22 deletions(-)
diff --git a/kernel/locking/lockdep.c b/kernel/locking/lockdep.c
index 1dda9de..569d3c1 100644
--- a/kernel/locking/lockdep.c
+++ b/kernel/locking/lockdep.c
@@ -2600,12 +2600,12 @@ struct lock_class *lock_chain_get_class(struct lock_chain *chain, int i)
* Returns the index of the first held_lock of the current chain
*/
static inline int get_first_held_lock(struct task_struct *curr,
- struct held_lock *hlock)
+ struct held_lock *hlock, int depth)
{
int i;
struct held_lock *hlock_curr;
- for (i = curr->lockdep_depth - 1; i >= 0; i--) {
+ for (i = depth - 1; i >= 0; i--) {
hlock_curr = curr->held_locks + i;
if (hlock_curr->irq_context != hlock->irq_context)
break;
@@ -2630,12 +2630,12 @@ static u64 print_chain_key_iteration(int class_idx, u64 chain_key)
}
static void
-print_chain_keys_held_locks(struct task_struct *curr, struct held_lock *hlock_next)
+print_chain_keys_held_locks(struct task_struct *curr,
+ struct held_lock *hlock_next, int depth)
{
struct held_lock *hlock;
u64 chain_key = INITIAL_CHAIN_KEY;
- int depth = curr->lockdep_depth;
- int i = get_first_held_lock(curr, hlock_next);
+ int i = get_first_held_lock(curr, hlock_next, depth);
printk("depth: %u (irq_context %u)\n", depth - i + 1,
hlock_next->irq_context);
@@ -2667,8 +2667,8 @@ static void print_chain_keys_chain(struct lock_chain *chain)
}
static void print_collision(struct task_struct *curr,
- struct held_lock *hlock_next,
- struct lock_chain *chain)
+ struct held_lock *hlock_next,
+ struct lock_chain *chain, int depth)
{
pr_warn("\n");
pr_warn("============================\n");
@@ -2679,7 +2679,7 @@ static void print_collision(struct task_struct *curr,
pr_warn("Hash chain already cached but the contents don't match!\n");
pr_warn("Held locks:");
- print_chain_keys_held_locks(curr, hlock_next);
+ print_chain_keys_held_locks(curr, hlock_next, depth);
pr_warn("Locks in cached chain:");
print_chain_keys_chain(chain);
@@ -2695,17 +2695,16 @@ static void print_collision(struct task_struct *curr,
* that there was a collision during the calculation of the chain_key.
* Returns: 0 not passed, 1 passed
*/
-static int check_no_collision(struct task_struct *curr,
- struct held_lock *hlock,
- struct lock_chain *chain)
+static int check_no_collision(struct task_struct *curr, struct held_lock *hlock,
+ struct lock_chain *chain, int depth)
{
#ifdef CONFIG_DEBUG_LOCKDEP
int i, j, id;
- i = get_first_held_lock(curr, hlock);
+ i = get_first_held_lock(curr, hlock, depth);
- if (DEBUG_LOCKS_WARN_ON(chain->depth != curr->lockdep_depth - (i - 1))) {
- print_collision(curr, hlock, chain);
+ if (DEBUG_LOCKS_WARN_ON(chain->depth != depth - (i - 1))) {
+ print_collision(curr, hlock, chain, depth);
return 0;
}
@@ -2713,7 +2712,7 @@ static int check_no_collision(struct task_struct *curr,
id = curr->held_locks[i].class_idx;
if (DEBUG_LOCKS_WARN_ON(chain_hlocks[chain->base + j] != id)) {
- print_collision(curr, hlock, chain);
+ print_collision(curr, hlock, chain, depth);
return 0;
}
}
@@ -2757,7 +2756,7 @@ static struct lock_chain *alloc_lock_chain(void)
*/
static inline struct lock_chain *add_chain_cache(struct task_struct *curr,
struct held_lock *hlock,
- u64 chain_key)
+ u64 chain_key, int depth)
{
struct lock_class *class = hlock_class(hlock);
struct hlist_head *hash_head = chainhashentry(chain_key);
@@ -2783,8 +2782,8 @@ static inline struct lock_chain *add_chain_cache(struct task_struct *curr,
}
chain->chain_key = chain_key;
chain->irq_context = hlock->irq_context;
- i = get_first_held_lock(curr, hlock);
- chain->depth = curr->lockdep_depth + 1 - i;
+ i = get_first_held_lock(curr, hlock, depth);
+ chain->depth = depth + 1 - i;
BUILD_BUG_ON((1UL << 24) <= ARRAY_SIZE(chain_hlocks));
BUILD_BUG_ON((1UL << 6) <= ARRAY_SIZE(curr->held_locks));
@@ -2837,17 +2836,21 @@ static inline struct lock_chain *lookup_chain_cache(u64 chain_key)
* add it and return the chain - in this case the new dependency
* chain will be validated. If the key is already hashed, return
* NULL. (On return with the new chain graph_lock is held.)
+ *
+ * If the key is not hashed, the new chain is composed of @hlock
+ * and @depth worth of the current held lock stack, of which the
+ * held locks are in the same context as @hlock.
*/
static inline struct lock_chain *
lookup_chain_cache_add(struct task_struct *curr, struct held_lock *hlock,
- u64 chain_key)
+ u64 chain_key, int depth)
{
struct lock_class *class = hlock_class(hlock);
struct lock_chain *chain = lookup_chain_cache(chain_key);
if (chain) {
cache_hit:
- if (!check_no_collision(curr, hlock, chain))
+ if (!check_no_collision(curr, hlock, chain, depth))
return NULL;
if (very_verbose(class)) {
@@ -2877,7 +2880,7 @@ static inline struct lock_chain *lookup_chain_cache(u64 chain_key)
goto cache_hit;
}
- return add_chain_cache(curr, hlock, chain_key);
+ return add_chain_cache(curr, hlock, chain_key, depth);
}
static int validate_chain(struct task_struct *curr, struct held_lock *hlock,
@@ -2895,7 +2898,8 @@ static int validate_chain(struct task_struct *curr, struct held_lock *hlock,
* graph_lock for us)
*/
if (!hlock->trylock && hlock->check &&
- (chain = lookup_chain_cache_add(curr, hlock, chain_key))) {
+ (chain = lookup_chain_cache_add(curr, hlock, chain_key,
+ curr->lockdep_depth))) {
/*
* Check whether last held lock:
*
--
1.8.3.1
Powered by blists - more mailing lists