Merge branch 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull locking updates from Ingo Molnar: "The main change in this cycle are initial preparatory bits of dynamic lockdep keys support from Bart Van Assche. There are also misc changes, a comment cleanup and a data structure cleanup" * 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: sched/fair: Clean up comment in nohz_idle_balance() locking/lockdep: Stop using RCU primitives to access 'all_lock_classes' locking/lockdep: Make concurrent lockdep_reset_lock() calls safe locking/lockdep: Remove a superfluous INIT_LIST_HEAD() statement locking/lockdep: Introduce lock_class_cache_is_registered() locking/lockdep: Inline __lockdep_init_map() locking/lockdep: Declare local symbols static tools/lib/lockdep/tests: Test the lockdep_reset_lock() implementation tools/lib/lockdep: Add dummy print_irqtrace_events() implementation tools/lib/lockdep: Rename "trywlock" into "trywrlock" tools/lib/lockdep/tests: Run lockdep tests a second time under Valgrind tools/lib/lockdep/tests: Improve testing accuracy tools/lib/lockdep/tests: Fix shellcheck warnings tools/lib/lockdep/tests: Display compiler warning and error messages locking/lockdep: Remove ::version from lock_class structure
This commit is contained in:
@@ -138,6 +138,9 @@ static struct lock_list list_entries[MAX_LOCKDEP_ENTRIES];
|
||||
* get freed - this significantly simplifies the debugging code.
|
||||
*/
|
||||
unsigned long nr_lock_classes;
|
||||
#ifndef CONFIG_DEBUG_LOCKDEP
|
||||
static
|
||||
#endif
|
||||
struct lock_class lock_classes[MAX_LOCKDEP_KEYS];
|
||||
|
||||
static inline struct lock_class *hlock_class(struct held_lock *hlock)
|
||||
@@ -626,7 +629,8 @@ static int static_obj(void *obj)
|
||||
|
||||
/*
|
||||
* To make lock name printouts unique, we calculate a unique
|
||||
* class->name_version generation counter:
|
||||
* class->name_version generation counter. The caller must hold the graph
|
||||
* lock.
|
||||
*/
|
||||
static int count_matching_names(struct lock_class *new_class)
|
||||
{
|
||||
@@ -636,7 +640,7 @@ static int count_matching_names(struct lock_class *new_class)
|
||||
if (!new_class->name)
|
||||
return 0;
|
||||
|
||||
list_for_each_entry_rcu(class, &all_lock_classes, lock_entry) {
|
||||
list_for_each_entry(class, &all_lock_classes, lock_entry) {
|
||||
if (new_class->key - new_class->subclass == class->key)
|
||||
return class->name_version;
|
||||
if (class->name && !strcmp(class->name, new_class->name))
|
||||
@@ -789,7 +793,6 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass, int force)
|
||||
class->key = key;
|
||||
class->name = lock->name;
|
||||
class->subclass = subclass;
|
||||
INIT_LIST_HEAD(&class->lock_entry);
|
||||
INIT_LIST_HEAD(&class->locks_before);
|
||||
INIT_LIST_HEAD(&class->locks_after);
|
||||
class->name_version = count_matching_names(class);
|
||||
@@ -801,7 +804,7 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass, int force)
|
||||
/*
|
||||
* Add it to the global list of classes:
|
||||
*/
|
||||
list_add_tail_rcu(&class->lock_entry, &all_lock_classes);
|
||||
list_add_tail(&class->lock_entry, &all_lock_classes);
|
||||
|
||||
if (verbose(class)) {
|
||||
graph_unlock();
|
||||
@@ -3088,7 +3091,7 @@ static int mark_lock(struct task_struct *curr, struct held_lock *this,
|
||||
/*
|
||||
* Initialize a lock instance's lock-class mapping info:
|
||||
*/
|
||||
static void __lockdep_init_map(struct lockdep_map *lock, const char *name,
|
||||
void lockdep_init_map(struct lockdep_map *lock, const char *name,
|
||||
struct lock_class_key *key, int subclass)
|
||||
{
|
||||
int i;
|
||||
@@ -3144,12 +3147,6 @@ static void __lockdep_init_map(struct lockdep_map *lock, const char *name,
|
||||
raw_local_irq_restore(flags);
|
||||
}
|
||||
}
|
||||
|
||||
void lockdep_init_map(struct lockdep_map *lock, const char *name,
|
||||
struct lock_class_key *key, int subclass)
|
||||
{
|
||||
__lockdep_init_map(lock, name, key, subclass);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(lockdep_init_map);
|
||||
|
||||
struct lock_class_key __lockdep_no_validate__;
|
||||
@@ -4126,6 +4123,9 @@ void lockdep_reset(void)
|
||||
raw_local_irq_restore(flags);
|
||||
}
|
||||
|
||||
/*
|
||||
* Remove all references to a lock class. The caller must hold the graph lock.
|
||||
*/
|
||||
static void zap_class(struct lock_class *class)
|
||||
{
|
||||
int i;
|
||||
@@ -4142,7 +4142,7 @@ static void zap_class(struct lock_class *class)
|
||||
* Unhash the class and remove it from the all_lock_classes list:
|
||||
*/
|
||||
hlist_del_rcu(&class->hash_entry);
|
||||
list_del_rcu(&class->lock_entry);
|
||||
list_del(&class->lock_entry);
|
||||
|
||||
RCU_INIT_POINTER(class->key, NULL);
|
||||
RCU_INIT_POINTER(class->name, NULL);
|
||||
@@ -4204,15 +4204,36 @@ void lockdep_free_key_range(void *start, unsigned long size)
|
||||
*/
|
||||
}
|
||||
|
||||
void lockdep_reset_lock(struct lockdep_map *lock)
|
||||
/*
|
||||
* Check whether any element of the @lock->class_cache[] array refers to a
|
||||
* registered lock class. The caller must hold either the graph lock or the
|
||||
* RCU read lock.
|
||||
*/
|
||||
static bool lock_class_cache_is_registered(struct lockdep_map *lock)
|
||||
{
|
||||
struct lock_class *class;
|
||||
struct hlist_head *head;
|
||||
unsigned long flags;
|
||||
int i, j;
|
||||
int locked;
|
||||
|
||||
for (i = 0; i < CLASSHASH_SIZE; i++) {
|
||||
head = classhash_table + i;
|
||||
hlist_for_each_entry_rcu(class, head, hash_entry) {
|
||||
for (j = 0; j < NR_LOCKDEP_CACHING_CLASSES; j++)
|
||||
if (lock->class_cache[j] == class)
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void lockdep_reset_lock(struct lockdep_map *lock)
|
||||
{
|
||||
struct lock_class *class;
|
||||
unsigned long flags;
|
||||
int j, locked;
|
||||
|
||||
raw_local_irq_save(flags);
|
||||
locked = graph_lock();
|
||||
|
||||
/*
|
||||
* Remove all classes this lock might have:
|
||||
@@ -4229,25 +4250,14 @@ void lockdep_reset_lock(struct lockdep_map *lock)
|
||||
* Debug check: in the end all mapped classes should
|
||||
* be gone.
|
||||
*/
|
||||
locked = graph_lock();
|
||||
for (i = 0; i < CLASSHASH_SIZE; i++) {
|
||||
head = classhash_table + i;
|
||||
hlist_for_each_entry_rcu(class, head, hash_entry) {
|
||||
int match = 0;
|
||||
|
||||
for (j = 0; j < NR_LOCKDEP_CACHING_CLASSES; j++)
|
||||
match |= class == lock->class_cache[j];
|
||||
|
||||
if (unlikely(match)) {
|
||||
if (debug_locks_off_graph_unlock()) {
|
||||
/*
|
||||
* We all just reset everything, how did it match?
|
||||
*/
|
||||
WARN_ON(1);
|
||||
}
|
||||
goto out_restore;
|
||||
}
|
||||
if (unlikely(lock_class_cache_is_registered(lock))) {
|
||||
if (debug_locks_off_graph_unlock()) {
|
||||
/*
|
||||
* We all just reset everything, how did it match?
|
||||
*/
|
||||
WARN_ON(1);
|
||||
}
|
||||
goto out_restore;
|
||||
}
|
||||
if (locked)
|
||||
graph_unlock();
|
||||
|
Reference in New Issue
Block a user