Kernel: Utilize AK::SourceLocation for LOCK_DEBUG instrumentation.

The previous `LOCKER(..)` instrumentation only covered some of the
cases where a lock is actually acquired. By utilizing the new
`AK::SourceLocation` functionality we can now reliably instrument
all calls to lock automatically.

Other changes:
- Tweak the message in `Thread::finalize()` which dumps leaked lock
  so it's more readable and includes the function information that is
  now available.

- Make the `LOCKER(..)` define a no-op, it will be cleaned up in a
  follow up change.
This commit is contained in:
Brian Gianforcaro 2021-04-24 15:17:02 -07:00 committed by Andreas Kling
parent 87724b3d09
commit 04156d53ca
Notes: sideshowbarker 2024-07-18 19:07:47 +09:00
4 changed files with 50 additions and 44 deletions

View File

@ -4,6 +4,7 @@
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/SourceLocation.h>
#include <AK/TemporaryChange.h>
#include <Kernel/Debug.h>
#include <Kernel/KSyms.h>
@ -13,12 +14,7 @@
namespace Kernel {
#if LOCK_DEBUG
void Lock::lock(Mode mode)
{
lock("unknown", 0, mode);
}
void Lock::lock(const char* file, int line, Mode mode)
void Lock::lock(Mode mode, const SourceLocation& location)
#else
void Lock::lock(Mode mode)
#endif
@ -52,9 +48,10 @@ void Lock::lock(Mode mode)
}
VERIFY(m_times_locked == 0);
m_times_locked++;
#if LOCK_DEBUG
if (current_thread) {
current_thread->holding_lock(*this, 1, file, line);
current_thread->holding_lock(*this, 1, location);
}
#endif
m_queue.should_block(true);
@ -77,8 +74,9 @@ void Lock::lock(Mode mode)
VERIFY(mode == Mode::Exclusive || mode == Mode::Shared);
VERIFY(m_times_locked > 0);
m_times_locked++;
#if LOCK_DEBUG
current_thread->holding_lock(*this, 1, file, line);
current_thread->holding_lock(*this, 1, location);
#endif
m_lock.store(false, AK::memory_order_release);
return;
@ -98,8 +96,9 @@ void Lock::lock(Mode mode)
it->value++;
else
m_shared_holders.set(current_thread, 1);
#if LOCK_DEBUG
current_thread->holding_lock(*this, 1, file, line);
current_thread->holding_lock(*this, 1, location);
#endif
m_lock.store(false, AK::memory_order_release);
return;
@ -168,10 +167,9 @@ void Lock::unlock()
#if LOCK_DEBUG
if (current_thread) {
current_thread->holding_lock(*this, -1);
current_thread->holding_lock(*this, -1, {});
}
#endif
m_lock.store(false, AK::memory_order_release);
if (unlocked_last) {
u32 did_wake = m_queue.wake_one();
@ -205,7 +203,7 @@ auto Lock::force_unlock_if_locked(u32& lock_count_to_restore) -> Mode
dbgln_if(LOCK_RESTORE_DEBUG, "Lock::force_unlock_if_locked @ {}: unlocking exclusive with lock count: {}", this, m_times_locked);
#if LOCK_DEBUG
m_holder->holding_lock(*this, -(int)m_times_locked);
m_holder->holding_lock(*this, -(int)m_times_locked, {});
#endif
m_holder = nullptr;
VERIFY(m_times_locked > 0);
@ -233,7 +231,7 @@ auto Lock::force_unlock_if_locked(u32& lock_count_to_restore) -> Mode
lock_count_to_restore = it->value;
VERIFY(lock_count_to_restore > 0);
#if LOCK_DEBUG
m_holder->holding_lock(*this, -(int)lock_count_to_restore);
m_holder->holding_lock(*this, -(int)lock_count_to_restore, {});
#endif
m_shared_holders.remove(it);
VERIFY(m_times_locked >= lock_count_to_restore);
@ -264,12 +262,7 @@ auto Lock::force_unlock_if_locked(u32& lock_count_to_restore) -> Mode
}
#if LOCK_DEBUG
void Lock::restore_lock(Mode mode, u32 lock_count)
{
return restore_lock("unknown", 0, mode, lock_count);
}
void Lock::restore_lock(const char* file, int line, Mode mode, u32 lock_count)
void Lock::restore_lock(Mode mode, u32 lock_count, const SourceLocation& location)
#else
void Lock::restore_lock(Mode mode, u32 lock_count)
#endif
@ -296,8 +289,9 @@ void Lock::restore_lock(Mode mode, u32 lock_count)
m_holder = current_thread;
m_queue.should_block(true);
m_lock.store(false, AK::memory_order_release);
#if LOCK_DEBUG
m_holder->holding_lock(*this, (int)lock_count, file, line);
m_holder->holding_lock(*this, (int)lock_count, location);
#endif
return;
}
@ -318,8 +312,9 @@ void Lock::restore_lock(Mode mode, u32 lock_count)
VERIFY(set_result == AK::HashSetResult::InsertedNewEntry);
m_queue.should_block(true);
m_lock.store(false, AK::memory_order_release);
#if LOCK_DEBUG
m_holder->holding_lock(*this, (int)lock_count, file, line);
m_holder->holding_lock(*this, (int)lock_count, location);
#endif
return;
}

View File

@ -30,14 +30,16 @@ public:
}
~Lock() = default;
void lock(Mode = Mode::Exclusive);
#if LOCK_DEBUG
void lock(const char* file, int line, Mode mode = Mode::Exclusive);
void restore_lock(const char* file, int line, Mode, u32);
void lock(Mode mode = Mode::Exclusive, const SourceLocation& location = SourceLocation::current());
void restore_lock(Mode, u32, const SourceLocation& location = SourceLocation::current());
#else
void lock(Mode = Mode::Exclusive);
void restore_lock(Mode, u32);
#endif
void unlock();
[[nodiscard]] Mode force_unlock_if_locked(u32&);
void restore_lock(Mode, u32);
[[nodiscard]] bool is_locked() const { return m_mode != Mode::Unlocked; }
void clear_waiters();
@ -79,17 +81,19 @@ private:
class Locker {
public:
#if LOCK_DEBUG
ALWAYS_INLINE explicit Locker(const char* file, int line, Lock& l, Lock::Mode mode = Lock::Mode::Exclusive)
: m_lock(l)
{
m_lock.lock(file, line, mode);
}
#endif
ALWAYS_INLINE explicit Locker(Lock& l, Lock::Mode mode = Lock::Mode::Exclusive, const SourceLocation& location = SourceLocation::current())
#else
ALWAYS_INLINE explicit Locker(Lock& l, Lock::Mode mode = Lock::Mode::Exclusive)
#endif
: m_lock(l)
{
#if LOCK_DEBUG
m_lock.lock(mode, location);
#else
m_lock.lock(mode);
#endif
}
ALWAYS_INLINE ~Locker()
{
if (m_locked)
@ -101,11 +105,21 @@ public:
m_locked = false;
m_lock.unlock();
}
#if LOCK_DEBUG
ALWAYS_INLINE void lock(Lock::Mode mode = Lock::Mode::Exclusive, const SourceLocation& location = SourceLocation::current())
#else
ALWAYS_INLINE void lock(Lock::Mode mode = Lock::Mode::Exclusive)
#endif
{
VERIFY(!m_locked);
m_locked = true;
#if LOCK_DEBUG
m_lock.lock(mode, location);
#else
m_lock.lock(mode);
#endif
}
private:
@ -113,13 +127,8 @@ private:
bool m_locked { true };
};
#if LOCK_DEBUG
# define LOCKER(...) Locker locker(__FILE__, __LINE__, __VA_ARGS__)
# define RESTORE_LOCK(lock, ...) (lock).restore_lock(__FILE__, __LINE__, __VA_ARGS__)
#else
# define LOCKER(...) Locker locker(__VA_ARGS__)
# define RESTORE_LOCK(lock, ...) (lock).restore_lock(__VA_ARGS__)
#endif
#define LOCKER(...) Locker locker(__VA_ARGS__)
#define RESTORE_LOCK(lock, ...) (lock).restore_lock(__VA_ARGS__)
template<typename T>
class Lockable {

View File

@ -360,8 +360,10 @@ void Thread::finalize()
if (lock_count() > 0) {
dbgln("Thread {} leaking {} Locks!", *this, lock_count());
ScopedSpinLock list_lock(m_holding_locks_lock);
for (auto& info : m_holding_locks_list)
dbgln(" - {} @ {} locked at {}:{} count: {}", info.lock->name(), info.lock, info.file, info.line, info.count);
for (auto& info : m_holding_locks_list) {
const auto& location = info.source_location;
dbgln(" - Lock: \"{}\" @ {} locked in function \"{}\" at \"{}:{}\" with a count of: {}", info.lock->name(), info.lock, location.function_name(), location.file_name(), location.line_number(), info.count);
}
VERIFY_NOT_REACHED();
}
#endif

View File

@ -12,6 +12,7 @@
#include <AK/IntrusiveList.h>
#include <AK/Optional.h>
#include <AK/OwnPtr.h>
#include <AK/SourceLocation.h>
#include <AK/String.h>
#include <AK/Time.h>
#include <AK/Vector.h>
@ -1066,7 +1067,7 @@ public:
RecursiveSpinLock& get_lock() const { return m_lock; }
#if LOCK_DEBUG
void holding_lock(Lock& lock, int refs_delta, const char* file = nullptr, int line = 0)
void holding_lock(Lock& lock, int refs_delta, const SourceLocation& location)
{
VERIFY(refs_delta != 0);
m_holding_locks.fetch_add(refs_delta, AK::MemoryOrder::memory_order_relaxed);
@ -1082,7 +1083,7 @@ public:
}
}
if (!have_existing)
m_holding_locks_list.append({ &lock, file ? file : "unknown", line, 1 });
m_holding_locks_list.append({ &lock, location, 1 });
} else {
VERIFY(refs_delta < 0);
bool found = false;
@ -1208,8 +1209,7 @@ private:
#if LOCK_DEBUG
struct HoldingLockInfo {
Lock* lock;
const char* file;
int line;
SourceLocation source_location;
unsigned count;
};
Atomic<u32> m_holding_locks { 0 };