1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
|
/*
* Copyright (c) 2021, Jan de Visser <jan@de-visser.net>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/WeakPtr.h>
#include <LibCore/Object.h>
#include <LibSQL/Forward.h>
#include <LibSQL/Heap.h>
#include <LibSQL/Index.h>
#include <LibSQL/Key.h>
namespace SQL {
/**
* The HashIndex class is a straightforward implementation of a persisted
* extendible hash table (see
* https://en.wikipedia.org/wiki/Extendible_hashing).
*/
class HashBucket : public IndexNode
, public Weakable<HashBucket> {
public:
HashBucket(HashIndex&, u32 index, u32 local_depth, u32 pointer);
~HashBucket() override = default;
Optional<u32> get(Key&);
bool insert(Key const&);
Vector<Key> const& entries()
{
inflate();
return m_entries;
}
Key const& operator[](size_t);
Key const& operator[](size_t ix) const
{
VERIFY(ix < m_entries.size());
return m_entries[ix];
}
[[nodiscard]] u32 local_depth() const { return m_local_depth; }
[[nodiscard]] u32 size() { return entries().size(); }
[[nodiscard]] u32 size() const { return m_entries.size(); }
[[nodiscard]] u32 index() const { return m_index; }
void serialize(ByteBuffer&) const override;
IndexNode* as_index_node() override { return dynamic_cast<IndexNode*>(this); }
[[nodiscard]] HashIndex const& hash_index() const { return m_hash_index; }
[[nodiscard]] HashBucket const* next_bucket();
[[nodiscard]] HashBucket const* previous_bucket();
void list_bucket();
private:
Optional<size_t> find_key_in_bucket(Key const&);
void set_index(u32 index) { m_index = index; }
void set_local_depth(u32 depth) { m_local_depth = depth; }
[[nodiscard]] size_t max_entries_in_bucket() const;
void inflate();
HashIndex& m_hash_index;
u32 m_local_depth { 1 };
u32 m_index { 0 };
Vector<Key> m_entries;
bool m_inflated { false };
friend HashIndex;
};
class HashIndex : public Index {
C_OBJECT(HashIndex);
public:
~HashIndex() override = default;
Optional<u32> get(Key&);
bool insert(Key const&);
bool insert(Key const&& entry) { return insert(entry); }
HashIndexIterator find(Key const&);
HashIndexIterator begin();
static HashIndexIterator end();
[[nodiscard]] u32 global_depth() const { return m_global_depth; }
[[nodiscard]] u32 size() const { return 1 << m_global_depth; }
[[nodiscard]] HashBucket* get_bucket(u32);
[[nodiscard]] u32 node_pointer(u32 node_number) const { return m_nodes[node_number]; }
[[nodiscard]] u32 first_node_pointer() const { return m_nodes[0]; }
[[nodiscard]] size_t nodes() const { return m_nodes.size(); }
void list_hash();
private:
HashIndex(Heap&, TupleDescriptor const&, u32);
void expand();
void write_directory_to_write_ahead_log();
HashBucket* append_bucket(u32 index, u32 local_depth, u32 pointer);
HashBucket* get_bucket_for_insert(Key const&);
[[nodiscard]] HashBucket* get_bucket_by_index(u32 index);
u32 m_global_depth { 1 };
Vector<u32> m_nodes;
Vector<OwnPtr<HashBucket>> m_buckets;
friend HashBucket;
friend HashDirectoryNode;
};
class HashDirectoryNode : public IndexNode {
public:
HashDirectoryNode(HashIndex&, u32, size_t);
HashDirectoryNode(HashIndex&, u32, ByteBuffer&);
HashDirectoryNode(HashDirectoryNode const& other) = default;
void serialize(ByteBuffer&) const override;
IndexNode* as_index_node() override { return dynamic_cast<IndexNode*>(this); }
[[nodiscard]] u32 number_of_pointers() const { return min(max_pointers_in_node(), m_hash_index.size() - m_offset); }
[[nodiscard]] bool is_last() const { return m_is_last; }
static constexpr size_t max_pointers_in_node() { return (BLOCKSIZE - 3 * sizeof(u32)) / (2 * sizeof(u32)); }
private:
HashIndex& m_hash_index;
size_t m_node_number { 0 };
size_t m_offset { 0 };
bool m_is_last { false };
};
class HashIndexIterator {
public:
[[nodiscard]] bool is_end() const { return !m_current; }
bool operator==(HashIndexIterator const& other) const;
bool operator!=(HashIndexIterator const& other) const { return !(*this == other); }
bool operator==(Key const& other) const;
bool operator!=(Key const& other) const { return !(*this == other); }
HashIndexIterator operator++()
{
*this = next();
return *this;
}
HashIndexIterator operator++(int)
{
*this = next();
return *this;
}
HashIndexIterator operator--()
{
*this = previous();
return *this;
}
HashIndexIterator const operator--(int)
{
*this = previous();
return *this;
}
Key const& operator*() const
{
VERIFY(!is_end());
return (*m_current)[m_index];
}
Key const& operator->() const
{
VERIFY(!is_end());
return (*m_current)[m_index];
}
HashIndexIterator& operator=(HashIndexIterator const&) = default;
HashIndexIterator(HashIndexIterator const&) = default;
private:
HashIndexIterator() = default;
explicit HashIndexIterator(HashBucket const*, size_t key_index = 0);
static HashIndexIterator end() { return HashIndexIterator(); }
[[nodiscard]] HashIndexIterator next();
[[nodiscard]] HashIndexIterator previous();
[[nodiscard]] Key key() const { return **this; }
WeakPtr<HashBucket> m_current;
size_t m_index { 0 };
friend HashIndex;
};
}
|