debug_locks        64 drivers/gpu/drm/i915/gem/i915_gem_object.h 	WARN_ON(debug_locks && !lock_is_held(&rcu_lock_map));
debug_locks       361 fs/xfs/xfs_inode.c 			return !debug_locks ||
debug_locks       346 include/linux/backing-dev.h 	WARN_ON_ONCE(debug_locks &&
debug_locks        11 include/linux/debug_locks.h extern int debug_locks __read_mostly;
debug_locks        17 include/linux/debug_locks.h 	return xchg(&debug_locks, 0);
debug_locks       388 include/linux/lockdep.h #define lockdep_depth(tsk)	(debug_locks ? (tsk)->lockdep_depth : 0)
debug_locks       391 include/linux/lockdep.h 		WARN_ON(debug_locks && !lockdep_is_held(l));	\
debug_locks       395 include/linux/lockdep.h 		WARN_ON(debug_locks && !lockdep_is_held_type(l, 0));	\
debug_locks       399 include/linux/lockdep.h 		WARN_ON(debug_locks && !lockdep_is_held_type(l, 1));	\
debug_locks       403 include/linux/lockdep.h 		WARN_ON_ONCE(debug_locks && !lockdep_is_held(l));	\
debug_locks       453 include/linux/lockdep.h # define lockdep_reset()		do { debug_locks = 1; } while (0)
debug_locks       633 include/linux/lockdep.h 		WARN_ONCE(debug_locks && !current->lockdep_recursion &&	\
debug_locks       639 include/linux/lockdep.h 		WARN_ONCE(debug_locks && !current->lockdep_recursion &&	\
debug_locks       645 include/linux/lockdep.h 		WARN_ONCE(debug_locks && !current->lockdep_recursion &&	\
debug_locks      1579 include/net/sock.h 	WARN_ON_ONCE(!lockdep_sock_is_held(sk) && debug_locks);
debug_locks        99 kernel/locking/lockdep.c 	if (!debug_locks) {
debug_locks       110 kernel/locking/lockdep.c 	if (debug_locks && !arch_spin_is_locked(&lockdep_lock)) {
debug_locks      1153 kernel/locking/lockdep.c 	if (!debug_locks)
debug_locks      2981 kernel/locking/lockdep.c 		if (unlikely(!debug_locks))
debug_locks      3402 kernel/locking/lockdep.c 	if (unlikely(!debug_locks || current->lockdep_recursion))
debug_locks      3449 kernel/locking/lockdep.c 	if (unlikely(!debug_locks || current->lockdep_recursion))
debug_locks      3479 kernel/locking/lockdep.c 	if (unlikely(!debug_locks || current->lockdep_recursion))
debug_locks      3519 kernel/locking/lockdep.c 	if (unlikely(!debug_locks || current->lockdep_recursion))
debug_locks      3745 kernel/locking/lockdep.c 		if (debug_locks)
debug_locks      3752 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      3828 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      3965 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      4129 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      4172 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      4228 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      4328 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      4355 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      4375 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      4403 kernel/locking/lockdep.c 	if (!debug_locks)
debug_locks      4431 kernel/locking/lockdep.c 	if (!debug_locks)
debug_locks      4714 kernel/locking/lockdep.c 	if (unlikely(!lock_stat || !debug_locks))
debug_locks      4734 kernel/locking/lockdep.c 	if (unlikely(!lock_stat || !debug_locks))
debug_locks      4766 kernel/locking/lockdep.c 	debug_locks = 1;
debug_locks      5290 kernel/locking/lockdep.c 	if (unlikely(!debug_locks))
debug_locks      5338 kernel/locking/lockdep.c 	if (unlikely(!debug_locks)) {
debug_locks      5366 kernel/locking/lockdep.c 	if (unlikely(!debug_locks)) {
debug_locks      5416 kernel/locking/lockdep.c 	       rcu_scheduler_active, debug_locks);
debug_locks       337 kernel/locking/lockdep_proc.c 			debug_locks);
debug_locks       522 kernel/locking/lockdep_proc.c 	if (unlikely(!debug_locks))
debug_locks        74 kernel/locking/mutex-debug.c 	if (likely(debug_locks)) {
debug_locks        77 kernel/locking/rtmutex-debug.c 	if (!debug_locks || chwalk == RT_MUTEX_FULL_CHAINWALK || !act_waiter)
debug_locks        91 kernel/locking/rtmutex-debug.c 	if (!waiter->deadlock_lock || !debug_locks)
debug_locks       262 kernel/module.c 	if (unlikely(!debug_locks))
debug_locks       251 kernel/rcu/update.c 	return rcu_scheduler_active != RCU_SCHEDULER_INACTIVE && debug_locks &&
debug_locks      1732 kernel/sched/core.c 	WARN_ON_ONCE(debug_locks && !(lockdep_is_held(&p->pi_lock) ||
debug_locks        25 lib/debug_locks.c int debug_locks __read_mostly = 1;
debug_locks        26 lib/debug_locks.c EXPORT_SYMBOL_GPL(debug_locks);
debug_locks        41 lib/debug_locks.c 	if (debug_locks && __debug_locks_off()) {
debug_locks      1148 lib/locking-selftest.c 	if (expected == FAILURE && debug_locks) {
debug_locks      1154 lib/locking-selftest.c 	if (debug_locks != expected) {
debug_locks      1165 lib/locking-selftest.c 			lockclass_mask, debug_locks, expected);
debug_locks      1974 lib/locking-selftest.c 	if (!debug_locks) {
debug_locks      2078 lib/locking-selftest.c 		debug_locks = 0;
debug_locks      2087 lib/locking-selftest.c 		debug_locks = 1;
debug_locks      2093 lib/locking-selftest.c 		debug_locks = 1;
debug_locks      2099 lib/locking-selftest.c 		debug_locks = 1;
debug_locks        49 lib/rhashtable.c 	return (debug_locks) ? lockdep_is_held(&ht->mutex) : 1;
debug_locks        55 lib/rhashtable.c 	if (!debug_locks)
debug_locks       116 net/openvswitch/datapath.c 	if (debug_locks)
debug_locks        11 tools/include/linux/debug_locks.h extern bool debug_locks;
debug_locks        12 tools/lib/lockdep/common.c bool debug_locks = true;