Fix render memory alignment issues to prevent crashes on Android

This commit is contained in:
Wojtek Figat
2025-09-06 23:36:54 +02:00
parent 857b0c5ac3
commit 44e70692a2
6 changed files with 32 additions and 21 deletions

View File

@@ -24,6 +24,11 @@ void ArenaAllocator::Free()
void* ArenaAllocator::Allocate(uint64 size, uint64 alignment)
{
if (size == 0)
return nullptr;
if (alignment < PLATFORM_MEMORY_ALIGNMENT)
alignment = PLATFORM_MEMORY_ALIGNMENT;
// Find the first page that has some space left
Page* page = _first;
while (page && page->Offset + size + alignment > page->Size)
@@ -79,6 +84,10 @@ void ConcurrentArenaAllocator::Free()
void* ConcurrentArenaAllocator::Allocate(uint64 size, uint64 alignment)
{
if (size == 0)
return nullptr;
if (alignment < PLATFORM_MEMORY_ALIGNMENT)
alignment = PLATFORM_MEMORY_ALIGNMENT;
RETRY:
// Check if the current page has some space left
@@ -120,6 +129,6 @@ RETRY:
_locker.Unlock();
// Use a single cde for allocation
// Use a single code for allocation
goto RETRY;
}

View File

@@ -79,7 +79,6 @@ private:
int64 Size;
};
int32 _pageSize;
volatile int64 _first = 0;
volatile int64 _totalBytes = 0;
void*(*_allocate1)(uint64 size, uint64 alignment) = nullptr;
@@ -87,19 +86,20 @@ private:
void*(*_allocate2)(uint64 size) = nullptr;
void(*_free2)(void* ptr, uint64 size) = nullptr;
CriticalSection _locker;
int32 _pageSize;
public:
ConcurrentArenaAllocator(int32 pageSizeBytes, void* (*customAllocate)(uint64 size, uint64 alignment), void(*customFree)(void* ptr))
: _pageSize(pageSizeBytes)
, _allocate1(customAllocate)
: _allocate1(customAllocate)
, _free1(customFree)
, _pageSize(pageSizeBytes)
{
}
ConcurrentArenaAllocator(int32 pageSizeBytes, void* (*customAllocate)(uint64 size), void(*customFree)(void* ptr, uint64 size))
: _pageSize(pageSizeBytes)
, _allocate2(customAllocate)
: _allocate2(customAllocate)
, _free2(customFree)
, _pageSize(pageSizeBytes)
{
}
@@ -120,7 +120,7 @@ public:
}
// Allocates a chunk of unitialized memory.
void* Allocate(uint64 size, uint64 alignment = 1);
void* Allocate(uint64 size, uint64 alignment = PLATFORM_MEMORY_ALIGNMENT);
// Frees all memory allocations within allocator.
void Free();

View File

@@ -11,14 +11,13 @@
class CrtAllocator
{
public:
/// <summary>
/// Allocates memory on a specified alignment boundary.
/// </summary>
/// <param name="size">The size of the allocation (in bytes).</param>
/// <param name="alignment">The memory alignment (in bytes). Must be an integer power of 2.</param>
/// <returns>The pointer to the allocated chunk of the memory. The pointer is a multiple of alignment.</returns>
FORCE_INLINE static void* Allocate(uint64 size, uint64 alignment = 16)
FORCE_INLINE static void* Allocate(uint64 size, uint64 alignment = PLATFORM_MEMORY_ALIGNMENT)
{
return Platform::Allocate(size, alignment);
}

View File

@@ -201,6 +201,9 @@ API_ENUM() enum class ArchitectureType
#ifndef PLATFORM_THREADS_LIMIT
#define PLATFORM_THREADS_LIMIT 64
#endif
#ifndef PLATFORM_MEMORY_ALIGNMENT
#define PLATFORM_MEMORY_ALIGNMENT 16
#endif
#define PLATFORM_32BITS (!PLATFORM_64BITS)
// Platform family defines

View File

@@ -463,20 +463,20 @@ bool DrawCallsList::IsEmpty() const
RenderListAlloc::~RenderListAlloc()
{
if (!List && Data) // Render List memory doesn't need free (arena allocator)
if (NeedFree && Data) // Render List memory doesn't need free (arena allocator)
RendererAllocation::Free(Data, Size);
}
void* RenderListAlloc::Init(RenderList* list, uintptr size, uintptr alignment)
void* RenderListAlloc::Init(RenderList* list, uint32 size, uint32 alignment)
{
ASSERT_LOW_LAYER(!Data);
Size = size;
bool useList = alignment <= 16 && size < 1024;
List = useList ? list : nullptr;
if (useList)
Data = list->Memory.Allocate(size, alignment);
else
Data = RendererAllocation::Allocate(size);
if (size == 0)
return nullptr;
if (size < 1024 || (alignment != 16 && alignment != 8 && alignment != 4 && alignment != 1))
return (Data = list->Memory.Allocate(size, alignment));
NeedFree = true;
Data = RendererAllocation::Allocate(size);
return Data;
}

View File

@@ -281,16 +281,16 @@ struct DrawCallsList
// Small utility for allocating memory from RenderList arena pool with automatic fallback to shared RendererAllocation for larger memory blocks.
struct RenderListAlloc
{
RenderList* List;
void* Data = nullptr;
uintptr Size;
uint32 Size = 0;
bool NeedFree = false;
~RenderListAlloc();
void* Init(RenderList* list, uintptr size, uintptr alignment = 1);
void* Init(RenderList* list, uint32 size, uint32 alignment = 1);
template<typename T>
FORCE_INLINE T* Init(RenderList* list, int32 count, uintptr alignment = 1)
FORCE_INLINE T* Init(RenderList* list, int32 count, uint32 alignment = 1)
{
return (T*)Init(list, count * sizeof(T), alignment);
}