summaryrefslogtreecommitdiff
path: root/Kernel/Arch/Spinlock.h
diff options
context:
space:
mode:
authorTimon Kruiper <timonkruiper@gmail.com>2022-05-02 23:56:30 +0200
committerAndreas Kling <kling@serenityos.org>2022-05-03 21:53:36 +0200
commit9abcb6700c3b3d0a465a1991aa06e2524dc68c07 (patch)
tree5940ec943f99c1a6ed0b654383fbbc9d098052db /Kernel/Arch/Spinlock.h
parent0d6d4508dffa763edf2a8bef4195c003b28308e4 (diff)
downloadserenity-9abcb6700c3b3d0a465a1991aa06e2524dc68c07.zip
Kernel: Move Arch/x86/Spinlock.h and add stubs for aarch64
The code in Spinlock.h has no architectural specific logic, thus can be moved to the Arch directory. This contains no functional change. Also add the Spinlock.cpp file for aarch64 which contains stubs for the lock and unlock functions.
Diffstat (limited to 'Kernel/Arch/Spinlock.h')
-rw-r--r--Kernel/Arch/Spinlock.h77
1 files changed, 68 insertions, 9 deletions
diff --git a/Kernel/Arch/Spinlock.h b/Kernel/Arch/Spinlock.h
index ac084e5e68..995dc36c37 100644
--- a/Kernel/Arch/Spinlock.h
+++ b/Kernel/Arch/Spinlock.h
@@ -1,17 +1,76 @@
/*
- * Copyright (c) 2020, Andreas Kling <kling@serenityos.org>
+ * Copyright (c) 2020-2022, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
-#include <AK/Platform.h>
+#include <Kernel/Arch/Processor.h>
+#include <Kernel/Locking/LockRank.h>
-#if ARCH(X86_64) || ARCH(I386)
-# include <Kernel/Arch/x86/Spinlock.h>
-#elif ARCH(AARCH64)
-# include <Kernel/Arch/aarch64/Spinlock.h>
-#else
-# error "Unknown architecture"
-#endif
+namespace Kernel {
+
+class Spinlock {
+ AK_MAKE_NONCOPYABLE(Spinlock);
+ AK_MAKE_NONMOVABLE(Spinlock);
+
+public:
+ Spinlock(LockRank rank = LockRank::None)
+ : m_rank(rank)
+ {
+ }
+
+ u32 lock();
+ void unlock(u32 prev_flags);
+
+ [[nodiscard]] ALWAYS_INLINE bool is_locked() const
+ {
+ return m_lock.load(AK::memory_order_relaxed) != 0;
+ }
+
+ ALWAYS_INLINE void initialize()
+ {
+ m_lock.store(0, AK::memory_order_relaxed);
+ }
+
+private:
+ Atomic<u8> m_lock { 0 };
+ const LockRank m_rank;
+};
+
+class RecursiveSpinlock {
+ AK_MAKE_NONCOPYABLE(RecursiveSpinlock);
+ AK_MAKE_NONMOVABLE(RecursiveSpinlock);
+
+public:
+ RecursiveSpinlock(LockRank rank = LockRank::None)
+ : m_rank(rank)
+ {
+ }
+
+ u32 lock();
+ void unlock(u32 prev_flags);
+
+ [[nodiscard]] ALWAYS_INLINE bool is_locked() const
+ {
+ return m_lock.load(AK::memory_order_relaxed) != 0;
+ }
+
+ [[nodiscard]] ALWAYS_INLINE bool is_locked_by_current_processor() const
+ {
+ return m_lock.load(AK::memory_order_relaxed) == FlatPtr(&Processor::current());
+ }
+
+ ALWAYS_INLINE void initialize()
+ {
+ m_lock.store(0, AK::memory_order_relaxed);
+ }
+
+private:
+ Atomic<FlatPtr> m_lock { 0 };
+ u32 m_recursions { 0 };
+ const LockRank m_rank;
+};
+
+}