summaryrefslogtreecommitdiff
path: root/Userland
diff options
context:
space:
mode:
authorLenny Maiorani <lenny@colorado.edu>2021-05-17 15:17:06 -0600
committerAndreas Kling <kling@serenityos.org>2021-05-18 08:06:01 +0200
commitf91bcb8895cd6b76b2977ad0632fef521ba2f1d1 (patch)
tree44b2d7630caa1dbba075098f011a417f9afb28e5 /Userland
parentc2ae6c189e203a699ca495e1c0d62fb00b2fec5a (diff)
downloadserenity-f91bcb8895cd6b76b2977ad0632fef521ba2f1d1.zip
LibC: Simplify malloc size classes
Problem: - `size_classes` is a C-style array which makes it difficult to use in algorithms. - `all_of` algorithm is re-written for the specific implementation. Solution: - Change `size_classes` to be an `Array`. - Directly use the generic `all_of` algorithm instead of reimplementing.
Diffstat (limited to 'Userland')
-rw-r--r--Userland/DevTools/UserspaceEmulator/MallocTracer.cpp4
-rw-r--r--Userland/Libraries/LibC/malloc.cpp8
-rw-r--r--Userland/Libraries/LibC/mallocdefs.h18
3 files changed, 12 insertions, 18 deletions
diff --git a/Userland/DevTools/UserspaceEmulator/MallocTracer.cpp b/Userland/DevTools/UserspaceEmulator/MallocTracer.cpp
index 4f00c21c52..501d17c9af 100644
--- a/Userland/DevTools/UserspaceEmulator/MallocTracer.cpp
+++ b/Userland/DevTools/UserspaceEmulator/MallocTracer.cpp
@@ -68,7 +68,7 @@ void MallocTracer::target_did_malloc(Badge<Emulator>, FlatPtr address, size_t si
}));
auto& malloc_data = *mmap_region.malloc_metadata();
- bool is_chunked_block = malloc_data.chunk_size <= size_classes[num_size_classes - 1];
+ bool is_chunked_block = malloc_data.chunk_size <= size_classes[size_classes.size() - 1];
if (is_chunked_block)
malloc_data.mallocations.resize((ChunkedBlock::block_size - sizeof(ChunkedBlock)) / malloc_data.chunk_size);
else
@@ -92,7 +92,7 @@ ALWAYS_INLINE Mallocation* MallocRegionMetadata::mallocation_for_address(FlatPtr
ALWAYS_INLINE Optional<size_t> MallocRegionMetadata::chunk_index_for_address(FlatPtr address) const
{
- bool is_chunked_block = chunk_size <= size_classes[num_size_classes - 1];
+ bool is_chunked_block = chunk_size <= size_classes[size_classes.size() - 1];
if (!is_chunked_block) {
// This is a BigAllocationBlock
return 0;
diff --git a/Userland/Libraries/LibC/malloc.cpp b/Userland/Libraries/LibC/malloc.cpp
index 43519cf296..1da5b43ad9 100644
--- a/Userland/Libraries/LibC/malloc.cpp
+++ b/Userland/Libraries/LibC/malloc.cpp
@@ -102,12 +102,12 @@ struct BigAllocator {
// are run. Similarly, we can not allow global destructors to destruct
// them. We could have used AK::NeverDestoyed to prevent the latter,
// but it would have not helped with the former.
-static u8 g_allocators_storage[sizeof(Allocator) * num_size_classes];
+static u8 g_allocators_storage[sizeof(Allocator) * size_classes.size()];
static u8 g_big_allocators_storage[sizeof(BigAllocator)];
-static inline Allocator (&allocators())[num_size_classes]
+static inline Allocator (&allocators())[size_classes.size()]
{
- return reinterpret_cast<Allocator(&)[num_size_classes]>(g_allocators_storage);
+ return reinterpret_cast<Allocator(&)[size_classes.size()]>(g_allocators_storage);
}
static inline BigAllocator (&big_allocators())[1]
@@ -442,7 +442,7 @@ void __malloc_init()
if (secure_getenv("LIBC_PROFILE_MALLOC"))
s_profiling = true;
- for (size_t i = 0; i < num_size_classes; ++i) {
+ for (size_t i = 0; i < size_classes.size(); ++i) {
new (&allocators()[i]) Allocator();
allocators()[i].size = size_classes[i];
}
diff --git a/Userland/Libraries/LibC/mallocdefs.h b/Userland/Libraries/LibC/mallocdefs.h
index ebf90eeaef..4237219260 100644
--- a/Userland/Libraries/LibC/mallocdefs.h
+++ b/Userland/Libraries/LibC/mallocdefs.h
@@ -6,6 +6,8 @@
#pragma once
+#include <AK/AllOf.h>
+#include <AK/Array.h>
#include <AK/InlineLinkedList.h>
#include <AK/Types.h>
@@ -14,18 +16,10 @@
#define MALLOC_SCRUB_BYTE 0xdc
#define FREE_SCRUB_BYTE 0xed
-static constexpr unsigned short size_classes[] = { 8, 16, 32, 64, 128, 256, 504, 1016, 2032, 4088, 8184, 16376, 32752, 0 };
-static constexpr size_t num_size_classes = (sizeof(size_classes) / sizeof(unsigned short)) - 1;
-
-consteval bool check_size_classes_alignment()
-{
- for (size_t i = 0; i < num_size_classes; i++) {
- if ((size_classes[i] % 8) != 0)
- return false;
- }
- return true;
-}
-static_assert(check_size_classes_alignment());
+static constexpr Array<unsigned short, 13> size_classes { 8, 16, 32, 64, 128, 256, 504, 1016, 2032, 4088, 8184, 16376, 32752 };
+static constexpr auto malloc_alignment = 8;
+static_assert(all_of(size_classes.begin(), size_classes.end(),
+ [](const auto val) { return val % malloc_alignment == 0; }));
struct CommonHeader {
size_t m_magic;