summaryrefslogtreecommitdiff
path: root/Kernel/VM
diff options
context:
space:
mode:
authorAndreas Kling <awesomekling@gmail.com>2019-09-30 17:21:45 +0200
committerAndreas Kling <awesomekling@gmail.com>2019-09-30 17:22:16 +0200
commitac20919b13f82ea9f8d59fe1f34054b7060a96df (patch)
treeadf1ed506874a76048b38b7f41e73514640e1ab3 /Kernel/VM
parent1a279c5b2aba17b0a22cfff0624124ebc3dbebc7 (diff)
downloadserenity-ac20919b13f82ea9f8d59fe1f34054b7060a96df.zip
Kernel: Make it possible to turn off VM guard pages at compile time
This might be useful for debugging since guard pages introduce a fair amount of noise in the virtual address space.
Diffstat (limited to 'Kernel/VM')
-rw-r--r--Kernel/VM/RangeAllocator.cpp15
1 files changed, 11 insertions, 4 deletions
diff --git a/Kernel/VM/RangeAllocator.cpp b/Kernel/VM/RangeAllocator.cpp
index 8330a798be..2273befaaf 100644
--- a/Kernel/VM/RangeAllocator.cpp
+++ b/Kernel/VM/RangeAllocator.cpp
@@ -3,6 +3,7 @@
#include <Kernel/kstdio.h>
//#define VRA_DEBUG
+#define VM_GUARD_PAGES
RangeAllocator::RangeAllocator(VirtualAddress base, size_t size)
{
@@ -59,14 +60,20 @@ void RangeAllocator::carve_at_index(int index, const Range& range)
Range RangeAllocator::allocate_anywhere(size_t size)
{
+#ifdef VM_GUARD_PAGES
// NOTE: We pad VM allocations with a guard page on each side.
- size_t padded_size = size + PAGE_SIZE * 2;
+ size_t effective_size = size + PAGE_SIZE * 2;
+ size_t offset_from_effective_base = PAGE_SIZE;
+#else
+ size_t effective_size = size;
+ size_t offset_from_effective_base = 0;
+#endif
for (int i = 0; i < m_available_ranges.size(); ++i) {
auto& available_range = m_available_ranges[i];
- if (available_range.size() < padded_size)
+ if (available_range.size() < effective_size)
continue;
- Range allocated_range(available_range.base().offset(PAGE_SIZE), size);
- if (available_range.size() == padded_size) {
+ Range allocated_range(available_range.base().offset(offset_from_effective_base), size);
+ if (available_range.size() == effective_size) {
#ifdef VRA_DEBUG
dbgprintf("VRA: Allocated perfect-fit anywhere(%u): %x\n", size, allocated_range.base().get());
#endif