blob: 4ffb0c15c8faaa47ee9af134fdda7c31f67d06d6 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
|
/*
* Copyright (c) 2020, the SerenityOS developers.
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Assertions.h>
#include <AK/Atomic.h>
#include <AK/Noncopyable.h>
#ifdef KERNEL
# include <Kernel/Arch/Processor.h>
# include <Kernel/Arch/ScopedCritical.h>
#endif
#ifndef __serenity__
# include <new>
#endif
namespace AK {
template<typename T>
struct SingletonInstanceCreator {
static T* create()
{
return new T();
}
};
template<typename T, T* (*InitFunction)() = SingletonInstanceCreator<T>::create>
class Singleton {
AK_MAKE_NONCOPYABLE(Singleton);
AK_MAKE_NONMOVABLE(Singleton);
public:
Singleton() = default;
template<bool allow_create = true>
static T* get(Atomic<T*>& obj_var)
{
T* obj = obj_var.load(AK::memory_order_acquire);
if (FlatPtr(obj) <= 0x1) {
// If this is the first time, see if we get to initialize it
#ifdef KERNEL
Kernel::ScopedCritical critical;
#endif
if constexpr (allow_create) {
if (obj == nullptr && obj_var.compare_exchange_strong(obj, (T*)0x1, AK::memory_order_acq_rel)) {
// We're the first one
obj = InitFunction();
obj_var.store(obj, AK::memory_order_release);
return obj;
}
}
// Someone else was faster, wait until they're done
while (obj == (T*)0x1) {
#ifdef KERNEL
Kernel::Processor::wait_check();
#else
// TODO: yield
#endif
obj = obj_var.load(AK::memory_order_acquire);
}
if constexpr (allow_create) {
// We should always return an instance if we allow creating one
VERIFY(obj != nullptr);
}
VERIFY(obj != (T*)0x1);
}
return obj;
}
T* ptr() const
{
return get(m_obj);
}
T* operator->() const
{
return ptr();
}
T& operator*() const
{
return *ptr();
}
operator T*() const
{
return ptr();
}
operator T&() const
{
return *ptr();
}
bool is_initialized() const
{
T* obj = m_obj.load(AK::MemoryOrder::memory_order_consume);
return FlatPtr(obj) > 0x1;
}
void ensure_instance()
{
ptr();
}
private:
mutable Atomic<T*> m_obj { nullptr };
};
}
using AK::Singleton;
|