aboutsummaryrefslogtreecommitdiffstats
path: root/kernel
diff options
context:
space:
mode:
Diffstat (limited to 'kernel')
-rw-r--r--kernel/lockdep.c87
1 files changed, 54 insertions, 33 deletions
diff --git a/kernel/lockdep.c b/kernel/lockdep.c
index c1f34addd003..9bad17884513 100644
--- a/kernel/lockdep.c
+++ b/kernel/lockdep.c
@@ -1104,7 +1104,7 @@ extern void __error_too_big_MAX_LOCKDEP_SUBCLASSES(void);
1104 * itself, so actual lookup of the hash should be once per lock object. 1104 * itself, so actual lookup of the hash should be once per lock object.
1105 */ 1105 */
1106static inline struct lock_class * 1106static inline struct lock_class *
1107register_lock_class(struct lockdep_map *lock, unsigned int subclass) 1107look_up_lock_class(struct lockdep_map *lock, unsigned int subclass)
1108{ 1108{
1109 struct lockdep_subclass_key *key; 1109 struct lockdep_subclass_key *key;
1110 struct list_head *hash_head; 1110 struct list_head *hash_head;
@@ -1148,7 +1148,26 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass)
1148 */ 1148 */
1149 list_for_each_entry(class, hash_head, hash_entry) 1149 list_for_each_entry(class, hash_head, hash_entry)
1150 if (class->key == key) 1150 if (class->key == key)
1151 goto out_set; 1151 return class;
1152
1153 return NULL;
1154}
1155
1156/*
1157 * Register a lock's class in the hash-table, if the class is not present
1158 * yet. Otherwise we look it up. We cache the result in the lock object
1159 * itself, so actual lookup of the hash should be once per lock object.
1160 */
1161static inline struct lock_class *
1162register_lock_class(struct lockdep_map *lock, unsigned int subclass)
1163{
1164 struct lockdep_subclass_key *key;
1165 struct list_head *hash_head;
1166 struct lock_class *class;
1167
1168 class = look_up_lock_class(lock, subclass);
1169 if (likely(class))
1170 return class;
1152 1171
1153 /* 1172 /*
1154 * Debug-check: all keys must be persistent! 1173 * Debug-check: all keys must be persistent!
@@ -1163,6 +1182,9 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass)
1163 return NULL; 1182 return NULL;
1164 } 1183 }
1165 1184
1185 key = lock->key->subkeys + subclass;
1186 hash_head = classhashentry(key);
1187
1166 __raw_spin_lock(&hash_lock); 1188 __raw_spin_lock(&hash_lock);
1167 /* 1189 /*
1168 * We have to do the hash-walk again, to avoid races 1190 * We have to do the hash-walk again, to avoid races
@@ -1209,8 +1231,8 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass)
1209out_unlock_set: 1231out_unlock_set:
1210 __raw_spin_unlock(&hash_lock); 1232 __raw_spin_unlock(&hash_lock);
1211 1233
1212out_set: 1234 if (!subclass)
1213 lock->class[subclass] = class; 1235 lock->class_cache = class;
1214 1236
1215 DEBUG_LOCKS_WARN_ON(class->subclass != subclass); 1237 DEBUG_LOCKS_WARN_ON(class->subclass != subclass);
1216 1238
@@ -1914,7 +1936,7 @@ void lockdep_init_map(struct lockdep_map *lock, const char *name,
1914 } 1936 }
1915 lock->name = name; 1937 lock->name = name;
1916 lock->key = key; 1938 lock->key = key;
1917 memset(lock->class, 0, sizeof(lock->class[0])*MAX_LOCKDEP_SUBCLASSES); 1939 lock->class_cache = NULL;
1918} 1940}
1919 1941
1920EXPORT_SYMBOL_GPL(lockdep_init_map); 1942EXPORT_SYMBOL_GPL(lockdep_init_map);
@@ -1928,8 +1950,8 @@ static int __lock_acquire(struct lockdep_map *lock, unsigned int subclass,
1928 unsigned long ip) 1950 unsigned long ip)
1929{ 1951{
1930 struct task_struct *curr = current; 1952 struct task_struct *curr = current;
1953 struct lock_class *class = NULL;
1931 struct held_lock *hlock; 1954 struct held_lock *hlock;
1932 struct lock_class *class;
1933 unsigned int depth, id; 1955 unsigned int depth, id;
1934 int chain_head = 0; 1956 int chain_head = 0;
1935 u64 chain_key; 1957 u64 chain_key;
@@ -1947,8 +1969,11 @@ static int __lock_acquire(struct lockdep_map *lock, unsigned int subclass,
1947 return 0; 1969 return 0;
1948 } 1970 }
1949 1971
1950 class = lock->class[subclass]; 1972 if (!subclass)
1951 /* not cached yet? */ 1973 class = lock->class_cache;
1974 /*
1975 * Not cached yet or subclass?
1976 */
1952 if (unlikely(!class)) { 1977 if (unlikely(!class)) {
1953 class = register_lock_class(lock, subclass); 1978 class = register_lock_class(lock, subclass);
1954 if (!class) 1979 if (!class)
@@ -2449,48 +2474,44 @@ void lockdep_free_key_range(void *start, unsigned long size)
2449 2474
2450void lockdep_reset_lock(struct lockdep_map *lock) 2475void lockdep_reset_lock(struct lockdep_map *lock)
2451{ 2476{
2452 struct lock_class *class, *next, *entry; 2477 struct lock_class *class, *next;
2453 struct list_head *head; 2478 struct list_head *head;
2454 unsigned long flags; 2479 unsigned long flags;
2455 int i, j; 2480 int i, j;
2456 2481
2457 raw_local_irq_save(flags); 2482 raw_local_irq_save(flags);
2458 __raw_spin_lock(&hash_lock);
2459 2483
2460 /* 2484 /*
2461 * Remove all classes this lock has: 2485 * Remove all classes this lock might have:
2486 */
2487 for (j = 0; j < MAX_LOCKDEP_SUBCLASSES; j++) {
2488 /*
2489 * If the class exists we look it up and zap it:
2490 */
2491 class = look_up_lock_class(lock, j);
2492 if (class)
2493 zap_class(class);
2494 }
2495 /*
2496 * Debug check: in the end all mapped classes should
2497 * be gone.
2462 */ 2498 */
2499 __raw_spin_lock(&hash_lock);
2463 for (i = 0; i < CLASSHASH_SIZE; i++) { 2500 for (i = 0; i < CLASSHASH_SIZE; i++) {
2464 head = classhash_table + i; 2501 head = classhash_table + i;
2465 if (list_empty(head)) 2502 if (list_empty(head))
2466 continue; 2503 continue;
2467 list_for_each_entry_safe(class, next, head, hash_entry) { 2504 list_for_each_entry_safe(class, next, head, hash_entry) {
2468 for (j = 0; j < MAX_LOCKDEP_SUBCLASSES; j++) { 2505 if (unlikely(class == lock->class_cache)) {
2469 entry = lock->class[j]; 2506 __raw_spin_unlock(&hash_lock);
2470 if (class == entry) { 2507 DEBUG_LOCKS_WARN_ON(1);
2471 zap_class(class); 2508 goto out_restore;
2472 lock->class[j] = NULL;
2473 break;
2474 }
2475 } 2509 }
2476 } 2510 }
2477 } 2511 }
2478
2479 /*
2480 * Debug check: in the end all mapped classes should
2481 * be gone.
2482 */
2483 for (j = 0; j < MAX_LOCKDEP_SUBCLASSES; j++) {
2484 entry = lock->class[j];
2485 if (!entry)
2486 continue;
2487 __raw_spin_unlock(&hash_lock);
2488 DEBUG_LOCKS_WARN_ON(1);
2489 raw_local_irq_restore(flags);
2490 return;
2491 }
2492
2493 __raw_spin_unlock(&hash_lock); 2512 __raw_spin_unlock(&hash_lock);
2513
2514out_restore:
2494 raw_local_irq_restore(flags); 2515 raw_local_irq_restore(flags);
2495} 2516}
2496 2517