diff options
-rw-r--r-- | AK/IntrusiveList.h | 258 | ||||
-rw-r--r-- | Kernel/Scheduler.cpp | 9 | ||||
-rw-r--r-- | Kernel/Thread.cpp | 39 | ||||
-rw-r--r-- | Kernel/Thread.h | 39 |
4 files changed, 299 insertions, 46 deletions
diff --git a/AK/IntrusiveList.h b/AK/IntrusiveList.h new file mode 100644 index 0000000000..59edf40c9d --- /dev/null +++ b/AK/IntrusiveList.h @@ -0,0 +1,258 @@ +#pragma once + +namespace AK { + +class IntrusiveListNode; +class IntrusiveListStorage { +private: + friend class IntrusiveListNode; + template<class T, IntrusiveListNode T::*member> + friend class IntrusiveList; + IntrusiveListNode* m_first { nullptr }; + IntrusiveListNode* m_last { nullptr }; +}; + +template<class T, IntrusiveListNode T::*member> +class IntrusiveList { +public: + IntrusiveList(); + ~IntrusiveList(); + + void clear(); + bool is_empty() const; + void append(T& n); + void prepend(T& n); + void remove(T& n); + bool contains(const T&) const; + T* first() const; + T* last() const; + + class Iterator { + public: + Iterator(); + Iterator(T* value); + + T* operator*() const; + T* operator->() const; + bool operator==(const Iterator& other) const; + bool operator!=(const Iterator& other) const { return !(*this == other); } + Iterator& operator++(); + Iterator& erase(); + + private: + T* m_value { nullptr }; + }; + + Iterator begin(); + Iterator end(); + +private: + static T* next(T* current); + static T* node_to_value(IntrusiveListNode& node); + IntrusiveListStorage m_storage; +}; + +class IntrusiveListNode { +public: + ~IntrusiveListNode(); + void remove(); + bool is_in_list() const; + +private: + template<class T, IntrusiveListNode T::*member> + friend class IntrusiveList; + IntrusiveListStorage* m_storage = nullptr; + IntrusiveListNode* m_next = nullptr; + IntrusiveListNode* m_prev = nullptr; +}; + +template<class T, IntrusiveListNode T::*member> +inline IntrusiveList<T, member>::Iterator::Iterator() +{ +} + +template<class T, IntrusiveListNode T::*member> +inline IntrusiveList<T, member>::Iterator::Iterator(T* value) + : m_value(value) +{ +} + +template<class T, IntrusiveListNode T::*member> +inline T* IntrusiveList<T, member>::Iterator::operator*() const +{ + return m_value; +} + +template<class T, IntrusiveListNode T::*member> +inline T* IntrusiveList<T, member>::Iterator::operator->() const +{ + return m_value; +} + +template<class T, IntrusiveListNode T::*member> +inline bool IntrusiveList<T, member>::Iterator::operator==(const Iterator& other) const +{ + return other.m_value == m_value; +} + +template<class T, IntrusiveListNode T::*member> +inline typename IntrusiveList<T, member>::Iterator& IntrusiveList<T, member>::Iterator::operator++() +{ + m_value = IntrusiveList<T, member>::next(m_value); + return *this; +} + +template<class T, IntrusiveListNode T::*member> +inline typename IntrusiveList<T, member>::Iterator& IntrusiveList<T, member>::Iterator::erase() +{ + T* old = m_value; + m_value = IntrusiveList<T, member>::next(m_value); + (old->*member).remove(); + return *this; +} + +template<class T, IntrusiveListNode T::*member> +inline IntrusiveList<T, member>::IntrusiveList() + +{ +} + +template<class T, IntrusiveListNode T::*member> +inline IntrusiveList<T, member>::~IntrusiveList() +{ + clear(); +} + +template<class T, IntrusiveListNode T::*member> +inline void IntrusiveList<T, member>::clear() +{ + while (m_storage.m_first) + m_storage.m_first->remove(); +} + +template<class T, IntrusiveListNode T::*member> +inline bool IntrusiveList<T, member>::is_empty() const +{ + return m_storage.m_first == nullptr; +} + +template<class T, IntrusiveListNode T::*member> +inline void IntrusiveList<T, member>::append(T& n) +{ + auto& nnode = n.*member; + if (nnode.m_storage) + nnode.remove(); + + nnode.m_storage = &m_storage; + nnode.m_prev = m_storage.m_last; + nnode.m_next = nullptr; + + if (m_storage.m_last) + m_storage.m_last->m_next = &nnode; + m_storage.m_last = &nnode; + if (!m_storage.m_first) + m_storage.m_first = &nnode; +} + +template<class T, IntrusiveListNode T::*member> +inline void IntrusiveList<T, member>::prepend(T& n) +{ + auto& nnode = n.*member; + if (nnode.m_storage) + nnode.remove(); + + nnode.m_storage = &m_storage; + nnode.m_prev = nullptr; + nnode.m_next = m_storage.m_first; + + if (m_storage.m_first) + m_storage.m_first->m_prev = &nnode; + m_storage.m_first = &nnode; + if (!m_storage.m_last) + m_storage.m_last = &nnode; +} + +template<class T, IntrusiveListNode T::*member> +inline void IntrusiveList<T, member>::remove(T& n) +{ + auto& nnode = n.*member; + if (nnode.m_storage) + nnode.remove(); +} + +template<class T, IntrusiveListNode T::*member> +inline bool IntrusiveList<T, member>::contains(const T& n) const +{ + auto& nnode = n.*member; + return nnode.m_storage == &m_storage; +} + +template<class T, IntrusiveListNode T::*member> +inline T* IntrusiveList<T, member>::first() const +{ + return m_storage.m_first ? node_to_value(*m_storage.m_first) : nullptr; +} + +template<class T, IntrusiveListNode T::*member> +inline T* IntrusiveList<T, member>::last() const +{ + return m_storage.m_last ? node_to_value(*m_storage.m_last) : nullptr; +} + +template<class T, IntrusiveListNode T::*member> +inline T* IntrusiveList<T, member>::next(T* current) +{ + auto& nextnode = (current->*member).m_next; + T* nextstruct = nextnode ? node_to_value(*nextnode) : nullptr; + return nextstruct; +} + +template<class T, IntrusiveListNode T::*member> +inline typename IntrusiveList<T, member>::Iterator IntrusiveList<T, member>::begin() +{ + return m_storage.m_first ? Iterator(node_to_value(*m_storage.m_first)) : Iterator(); +} + +template<class T, IntrusiveListNode T::*member> +inline typename IntrusiveList<T, member>::Iterator IntrusiveList<T, member>::end() +{ + return Iterator(); +} + +template<class T, IntrusiveListNode T::*member> +inline T* IntrusiveList<T, member>::node_to_value(IntrusiveListNode& node) +{ + return (T*)((char*)&node - ((char*)&(((T*)nullptr)->*member) - (char*)nullptr)); +} + +inline IntrusiveListNode::~IntrusiveListNode() +{ + if (m_storage) + remove(); +} + +inline void IntrusiveListNode::remove() +{ + ASSERT(m_storage); + if (m_storage->m_first == this) + m_storage->m_first = m_next; + if (m_storage->m_last == this) + m_storage->m_last = m_prev; + if (m_prev) + m_prev->m_next = m_next; + if (m_next) + m_next->m_prev = m_prev; + m_prev = nullptr; + m_next = nullptr; + m_storage = nullptr; +} + +inline bool IntrusiveListNode::is_in_list() const +{ + return m_storage != nullptr; +} + +} + +using AK::IntrusiveList; +using AK::IntrusiveListNode; diff --git a/Kernel/Scheduler.cpp b/Kernel/Scheduler.cpp index 7bef7efc55..fbae58cd0b 100644 --- a/Kernel/Scheduler.cpp +++ b/Kernel/Scheduler.cpp @@ -331,14 +331,15 @@ bool Scheduler::pick_next() }); #endif - if (g_runnable_threads->is_empty()) + auto& runnable_list = *Thread::g_runnable_threads; + if (runnable_list.is_empty()) return context_switch(s_colonel_process->main_thread()); - auto* previous_head = g_runnable_threads->head(); + auto* previous_head = runnable_list.first(); for (;;) { // Move head to tail. - g_runnable_threads->append(g_runnable_threads->remove_head()); - auto* thread = g_runnable_threads->head(); + runnable_list.append(*previous_head); + auto* thread = runnable_list.first(); if (!thread->process().is_being_inspected() && (thread->state() == Thread::Runnable || thread->state() == Thread::Running)) { #ifdef SCHEDULER_DEBUG diff --git a/Kernel/Thread.cpp b/Kernel/Thread.cpp index fafdac3536..c82e54a042 100644 --- a/Kernel/Thread.cpp +++ b/Kernel/Thread.cpp @@ -16,8 +16,8 @@ HashTable<Thread*>& thread_table() return *table; } -InlineLinkedList<Thread>* g_runnable_threads; -InlineLinkedList<Thread>* g_nonrunnable_threads; +Thread::SchedulerThreadList* Thread::g_runnable_threads; +Thread::SchedulerThreadList* Thread::g_nonrunnable_threads; static const u32 default_kernel_stack_size = 65536; static const u32 default_userspace_stack_size = 65536; @@ -75,7 +75,7 @@ Thread::Thread(Process& process) if (m_process.pid() != 0) { InterruptDisabler disabler; thread_table().set(this); - set_thread_list(g_nonrunnable_threads); + g_nonrunnable_threads->append(*this); } } @@ -85,8 +85,6 @@ Thread::~Thread() kfree_aligned(m_fpu_state); { InterruptDisabler disabler; - if (m_thread_list) - m_thread_list->remove(this); thread_table().remove(this); } @@ -534,8 +532,8 @@ KResult Thread::wait_for_connect(FileDescription& description) void Thread::initialize() { - g_runnable_threads = new InlineLinkedList<Thread>; - g_nonrunnable_threads = new InlineLinkedList<Thread>; + g_runnable_threads = new SchedulerThreadList; + g_nonrunnable_threads = new SchedulerThreadList; Scheduler::initialize(); } @@ -555,23 +553,20 @@ bool Thread::is_thread(void* ptr) return thread_table().contains((Thread*)ptr); } -void Thread::set_thread_list(InlineLinkedList<Thread>* thread_list) -{ - ASSERT_INTERRUPTS_DISABLED(); - ASSERT(pid() != 0); - if (m_thread_list == thread_list) - return; - if (m_thread_list) - m_thread_list->remove(this); - if (thread_list) - thread_list->append(this); - m_thread_list = thread_list; -} - void Thread::set_state(State new_state) { InterruptDisabler disabler; m_state = new_state; - if (m_process.pid() != 0) - set_thread_list(thread_list_for_state(new_state)); + if (m_process.pid() != 0) { + SchedulerThreadList* list = nullptr; + if (is_runnable_state(new_state)) + list = g_runnable_threads; + else + list = g_nonrunnable_threads; + + if (list->contains(*this)) + return; + + list->append(*this); + } } diff --git a/Kernel/Thread.h b/Kernel/Thread.h index 45a170e05d..e31e39f3a9 100644 --- a/Kernel/Thread.h +++ b/Kernel/Thread.h @@ -2,7 +2,7 @@ #include <AK/AKString.h> #include <AK/Function.h> -#include <AK/InlineLinkedList.h> +#include <AK/IntrusiveList.h> #include <AK/OwnPtr.h> #include <AK/RefPtr.h> #include <AK/Vector.h> @@ -29,10 +29,7 @@ struct SignalActionData { int flags { 0 }; }; -extern InlineLinkedList<Thread>* g_runnable_threads; -extern InlineLinkedList<Thread>* g_nonrunnable_threads; - -class Thread : public InlineLinkedListNode<Thread> { +class Thread { friend class Process; friend class Scheduler; @@ -253,13 +250,6 @@ public: Thread* clone(Process&); - // For InlineLinkedList - Thread* m_prev { nullptr }; - Thread* m_next { nullptr }; - - InlineLinkedList<Thread>* thread_list() { return m_thread_list; } - void set_thread_list(InlineLinkedList<Thread>*); - template<typename Callback> static IterationDecision for_each_in_state(State, Callback); template<typename Callback> @@ -276,7 +266,15 @@ public: return state == Thread::State::Running || state == Thread::State::Runnable; } - static InlineLinkedList<Thread>* thread_list_for_state(Thread::State state) +private: + IntrusiveListNode m_runnable_list_node; + + typedef IntrusiveList<Thread, &Thread::m_runnable_list_node> SchedulerThreadList; + +public: + static SchedulerThreadList* g_runnable_threads; + static SchedulerThreadList* g_nonrunnable_threads; + static SchedulerThreadList* thread_list_for_state(Thread::State state) { if (is_runnable_state(state)) return g_runnable_threads; @@ -301,7 +299,6 @@ private: Region* m_signal_stack_user_region { nullptr }; Blocker* m_blocker { nullptr }; FPUState* m_fpu_state { nullptr }; - InlineLinkedList<Thread>* m_thread_list { nullptr }; State m_state { Invalid }; bool m_has_used_fpu { false }; bool m_was_interrupted_while_blocked { false }; @@ -345,11 +342,12 @@ template<typename Callback> inline IterationDecision Thread::for_each_runnable(Callback callback) { ASSERT_INTERRUPTS_DISABLED(); - for (auto* thread = g_runnable_threads->head(); thread;) { - auto* next_thread = thread->next(); + auto& tl = *g_runnable_threads; + for (auto it = tl.begin(); it != tl.end();) { + auto thread = *it; + it = ++it; if (callback(*thread) == IterationDecision::Break) return IterationDecision::Break; - thread = next_thread; } return IterationDecision::Continue; @@ -359,11 +357,12 @@ template<typename Callback> inline IterationDecision Thread::for_each_nonrunnable(Callback callback) { ASSERT_INTERRUPTS_DISABLED(); - for (auto* thread = g_nonrunnable_threads->head(); thread;) { - auto* next_thread = thread->next(); + auto& tl = *g_nonrunnable_threads; + for (auto it = tl.begin(); it != tl.end();) { + auto thread = *it; + it = ++it; if (callback(*thread) == IterationDecision::Break) return IterationDecision::Break; - thread = next_thread; } return IterationDecision::Continue; |