Optimize Arena Allocator to store page metadata within itself to save on allocs

This commit is contained in:
Wojtek Figat
2025-07-09 00:22:35 +02:00
parent 3abbf08f1f
commit bdaf31b54f
2 changed files with 9 additions and 18 deletions

View File

@@ -13,7 +13,6 @@ void ArenaAllocator::Free()
#if COMPILE_WITH_PROFILER
ProfilerMemory::OnGroupUpdate(ProfilerMemory::Groups::MallocArena, -(int64)page->Size, -1);
#endif
Allocator::Free(page->Memory);
Page* next = page->Next;
Allocator::Free(page);
page = next;
@@ -33,21 +32,20 @@ void* ArenaAllocator::Allocate(uint64 size, uint64 alignment)
// Create a new page if need to
if (!page)
{
uint64 pageSize = Math::Max<uint64>(_pageSize, size);
uint64 pageSize = Math::Max<uint64>(_pageSize, size + alignment + sizeof(Page));
#if COMPILE_WITH_PROFILER
ProfilerMemory::OnGroupUpdate(ProfilerMemory::Groups::MallocArena, (int64)pageSize, 1);
#endif
page = (Page*)Allocator::Allocate(sizeof(Page));
page->Memory = Allocator::Allocate(pageSize);
page = (Page*)Allocator::Allocate(pageSize);
page->Next = _first;
page->Offset = 0;
page->Offset = sizeof(Page);
page->Size = (uint32)pageSize;
_first = page;
}
// Allocate within a page
page->Offset = Math::AlignUp(page->Offset, (uint32)alignment);
void* mem = (byte*)page->Memory + page->Offset;
void* mem = (byte*)page + page->Offset;
page->Offset += (uint32)size;
return mem;
@@ -64,15 +62,11 @@ void ConcurrentArenaAllocator::Free()
#if COMPILE_WITH_PROFILER
ProfilerMemory::OnGroupUpdate(ProfilerMemory::Groups::MallocArena, -(int64)page->Size, -1);
#endif
if (_free1)
_free1(page->Memory);
else
_free2(page->Memory, page->Size);
Page* next = page->Next;
if (_free1)
_free1(page);
else
_free2(page, sizeof(Page));
_free2(page, page->Size);
page = next;
}
@@ -103,7 +97,7 @@ RETRY:
goto RETRY;
}
Platform::InterlockedAdd(&_totalBytes, (int64)size);
return (byte*)page->Memory + offsetAligned;
return (byte*)page + offsetAligned;
}
}
@@ -113,14 +107,13 @@ RETRY:
// Check if page was unchanged by any other thread
if ((Page*)Platform::AtomicRead(&_first) == page)
{
uint64 pageSize = Math::Max<uint64>(_pageSize, size);
uint64 pageSize = Math::Max<uint64>(_pageSize, size + alignment + sizeof(Page));
#if COMPILE_WITH_PROFILER
ProfilerMemory::OnGroupUpdate(ProfilerMemory::Groups::MallocArena, (int64)pageSize, 1);
#endif
page = (Page*)(_allocate1 ? _allocate1(sizeof(Page), alignof(Page)) : _allocate2(sizeof(Page)));
page->Memory = _allocate1 ? _allocate1(pageSize, 16) : _allocate2(pageSize);
page = (Page*)(_allocate1 ? _allocate1(pageSize, 16) : _allocate2(pageSize));
page->Next = (Page*)_first;
page->Offset = 0;
page->Offset = sizeof(Page);
page->Size = (int64)pageSize;
Platform::AtomicStore(&_first, (intptr)page);
}

View File

@@ -13,7 +13,6 @@ class ArenaAllocator
private:
struct Page
{
void* Memory;
Page* Next;
uint32 Offset, Size;
};
@@ -75,7 +74,6 @@ class ConcurrentArenaAllocator
private:
struct Page
{
void* Memory;
Page* Next;
volatile int64 Offset;
int64 Size;