diff --git a/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.cpp b/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.cpp
index ecdc1f258..a2dbb7733 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.cpp
+++ b/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.cpp
@@ -7,15 +7,13 @@
#include "QueueVulkan.h"
#include "GPUContextVulkan.h"
#include "GPUTimerQueryVulkan.h"
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
#include "DescriptorSetVulkan.h"
-#endif
void CmdBufferVulkan::AddWaitSemaphore(VkPipelineStageFlags waitFlags, SemaphoreVulkan* waitSemaphore)
{
- WaitFlags.Add(waitFlags);
- ASSERT(!WaitSemaphores.Contains(waitSemaphore));
- WaitSemaphores.Add(waitSemaphore);
+ _waitFlags.Add(waitFlags);
+ ASSERT(!_waitSemaphores.Contains(waitSemaphore));
+ _waitSemaphores.Add(waitSemaphore);
}
void CmdBufferVulkan::Begin()
@@ -25,11 +23,11 @@ void CmdBufferVulkan::Begin()
VkCommandBufferBeginInfo beginInfo;
RenderToolsVulkan::ZeroStruct(beginInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO);
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
- VALIDATE_VULKAN_RESULT(vkBeginCommandBuffer(CommandBufferHandle, &beginInfo));
+ VALIDATE_VULKAN_RESULT(vkBeginCommandBuffer(_commandBufferHandle, &beginInfo));
#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
// Acquire a descriptor pool set on
- if (CurrentDescriptorPoolSetContainer == nullptr)
+ if (_descriptorPoolSetContainer == nullptr)
{
AcquirePoolSet();
}
@@ -72,27 +70,23 @@ void CmdBufferVulkan::BeginRenderPass(RenderPassVulkan* renderPass, FramebufferV
info.clearValueCount = clearValueCount;
info.pClearValues = clearValues;
- vkCmdBeginRenderPass(CommandBufferHandle, &info, VK_SUBPASS_CONTENTS_INLINE);
+ vkCmdBeginRenderPass(_commandBufferHandle, &info, VK_SUBPASS_CONTENTS_INLINE);
_state = State::IsInsideRenderPass;
}
void CmdBufferVulkan::EndRenderPass()
{
ASSERT(IsInsideRenderPass());
- vkCmdEndRenderPass(CommandBufferHandle);
+ vkCmdEndRenderPass(_commandBufferHandle);
_state = State::IsInsideBegin;
}
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
-
void CmdBufferVulkan::AcquirePoolSet()
{
- ASSERT(!CurrentDescriptorPoolSetContainer);
- CurrentDescriptorPoolSetContainer = &_device->DescriptorPoolsManager->AcquirePoolSetContainer();
+ ASSERT(!_descriptorPoolSetContainer);
+ _descriptorPoolSetContainer = &_device->DescriptorPoolsManager->AcquirePoolSetContainer();
}
-#endif
-
#if GPU_ALLOW_PROFILE_EVENTS
void CmdBufferVulkan::BeginEvent(const Char* name)
@@ -138,19 +132,17 @@ void CmdBufferVulkan::RefreshFenceStatus()
{
_state = State::ReadyForBegin;
- SubmittedWaitSemaphores.Clear();
+ _submittedWaitSemaphores.Clear();
- vkResetCommandBuffer(CommandBufferHandle, VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
+ vkResetCommandBuffer(_commandBufferHandle, VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
_fence->GetOwner()->ResetFence(_fence);
- FenceSignaledCounter++;
+ _fenceSignaledCounter++;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- if (CurrentDescriptorPoolSetContainer)
+ if (_descriptorPoolSetContainer)
{
- _device->DescriptorPoolsManager->ReleasePoolSet(*CurrentDescriptorPoolSetContainer);
- CurrentDescriptorPoolSetContainer = nullptr;
+ _device->DescriptorPoolsManager->ReleasePoolSet(*_descriptorPoolSetContainer);
+ _descriptorPoolSetContainer = nullptr;
}
-#endif
}
}
else
@@ -161,20 +153,20 @@ void CmdBufferVulkan::RefreshFenceStatus()
CmdBufferVulkan::CmdBufferVulkan(GPUDeviceVulkan* device, CmdBufferPoolVulkan* pool)
: _device(device)
- , CommandBufferHandle(VK_NULL_HANDLE)
+ , _commandBufferHandle(VK_NULL_HANDLE)
, _state(State::ReadyForBegin)
, _fence(nullptr)
- , FenceSignaledCounter(0)
- , SubmittedFenceCounter(0)
- , CommandBufferPool(pool)
+ , _fenceSignaledCounter(0)
+ , _submittedFenceCounter(0)
+ , _commandBufferPool(pool)
{
VkCommandBufferAllocateInfo createCmdBufInfo;
RenderToolsVulkan::ZeroStruct(createCmdBufInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO);
createCmdBufInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
createCmdBufInfo.commandBufferCount = 1;
- createCmdBufInfo.commandPool = CommandBufferPool->GetHandle();
+ createCmdBufInfo.commandPool = _commandBufferPool->GetHandle();
- VALIDATE_VULKAN_RESULT(vkAllocateCommandBuffers(_device->Device, &createCmdBufInfo, &CommandBufferHandle));
+ VALIDATE_VULKAN_RESULT(vkAllocateCommandBuffers(_device->Device, &createCmdBufInfo, &_commandBufferHandle));
_fence = _device->FenceManager.AllocateFence();
}
@@ -193,7 +185,7 @@ CmdBufferVulkan::~CmdBufferVulkan()
fenceManager.ReleaseFence(_fence);
}
- vkFreeCommandBuffers(_device->Device, CommandBufferPool->GetHandle(), 1, &CommandBufferHandle);
+ vkFreeCommandBuffers(_device->Device, _commandBufferPool->GetHandle(), 1, &_commandBufferHandle);
}
CmdBufferVulkan* CmdBufferPoolVulkan::Create()
diff --git a/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.h b/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.h
index 85893d89f..553e844e7 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.h
+++ b/Source/Engine/GraphicsDevice/Vulkan/CmdBufferVulkan.h
@@ -11,9 +11,7 @@
class GPUDeviceVulkan;
class CmdBufferPoolVulkan;
class QueueVulkan;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
class DescriptorPoolSetContainerVulkan;
-#endif
///
/// Implementation of the command buffer for the Vulkan backend.
@@ -36,20 +34,20 @@ public:
private:
GPUDeviceVulkan* _device;
- VkCommandBuffer CommandBufferHandle;
+ VkCommandBuffer _commandBufferHandle;
State _state;
- Array WaitFlags;
- Array WaitSemaphores;
- Array SubmittedWaitSemaphores;
+ Array _waitFlags;
+ Array _waitSemaphores;
+ Array _submittedWaitSemaphores;
void MarkSemaphoresAsSubmitted()
{
- WaitFlags.Clear();
+ _waitFlags.Clear();
// Move to pending delete list
- SubmittedWaitSemaphores = WaitSemaphores;
- WaitSemaphores.Clear();
+ _submittedWaitSemaphores = _waitSemaphores;
+ _waitSemaphores.Clear();
}
FenceVulkan* _fence;
@@ -58,12 +56,14 @@ private:
#endif
// Last value passed after the fence got signaled
- volatile uint64 FenceSignaledCounter;
+ volatile uint64 _fenceSignaledCounter;
// Last value when we submitted the cmd buffer; useful to track down if something waiting for the fence has actually been submitted
- volatile uint64 SubmittedFenceCounter;
+ volatile uint64 _submittedFenceCounter;
- CmdBufferPoolVulkan* CommandBufferPool;
+ CmdBufferPoolVulkan* _commandBufferPool;
+
+ DescriptorPoolSetContainerVulkan* _descriptorPoolSetContainer = nullptr;
public:
@@ -75,7 +75,7 @@ public:
CmdBufferPoolVulkan* GetOwner()
{
- return CommandBufferPool;
+ return _commandBufferPool;
}
State GetState()
@@ -115,17 +115,17 @@ public:
inline VkCommandBuffer GetHandle() const
{
- return CommandBufferHandle;
+ return _commandBufferHandle;
}
inline volatile uint64 GetFenceSignaledCounter() const
{
- return FenceSignaledCounter;
+ return _fenceSignaledCounter;
}
inline volatile uint64 GetSubmittedFenceCounter() const
{
- return SubmittedFenceCounter;
+ return _submittedFenceCounter;
}
public:
@@ -138,11 +138,12 @@ public:
void BeginRenderPass(RenderPassVulkan* renderPass, FramebufferVulkan* framebuffer, uint32 clearValueCount, VkClearValue* clearValues);
void EndRenderPass();
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- DescriptorPoolSetContainerVulkan* CurrentDescriptorPoolSetContainer = nullptr;
+ DescriptorPoolSetContainerVulkan* GetDescriptorPoolSet() const
+ {
+ return _descriptorPoolSetContainer;
+ }
void AcquirePoolSet();
-#endif
#if GPU_ALLOW_PROFILE_EVENTS
void BeginEvent(const Char* name);
diff --git a/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.cpp b/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.cpp
index ea7298ea9..1a60dbbae 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.cpp
+++ b/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.cpp
@@ -11,11 +11,7 @@
#include "GPUAdapterVulkan.h"
#include "CmdBufferVulkan.h"
#include "Engine/Threading/Threading.h"
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
#include "Engine/Engine/Engine.h"
-#endif
-
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
void DescriptorSetLayoutInfoVulkan::CacheTypesUsageID()
{
@@ -23,7 +19,7 @@ void DescriptorSetLayoutInfoVulkan::CacheTypesUsageID()
static uint32 uniqueID = 1;
static Dictionary typesUsageHashMap;
- const uint32 typesUsageHash = Crc::MemCrc32(LayoutTypes, sizeof(LayoutTypes));
+ const uint32 typesUsageHash = Crc::MemCrc32(_layoutTypes, sizeof(_layoutTypes));
ScopeLock lock(locker);
uint32 id;
if (!typesUsageHashMap.TryGet(typesUsageHash, id))
@@ -34,19 +30,17 @@ void DescriptorSetLayoutInfoVulkan::CacheTypesUsageID()
_typesUsageID = id;
}
-#endif
-
void DescriptorSetLayoutInfoVulkan::AddDescriptor(int32 descriptorSetIndex, const VkDescriptorSetLayoutBinding& descriptor)
{
// Increment type usage
- LayoutTypes[descriptor.descriptorType]++;
+ _layoutTypes[descriptor.descriptorType]++;
- if (descriptorSetIndex >= SetLayouts.Count())
+ if (descriptorSetIndex >= _setLayouts.Count())
{
- SetLayouts.Resize(descriptorSetIndex + 1);
+ _setLayouts.Resize(descriptorSetIndex + 1);
}
- SetLayout& descSetLayout = SetLayouts[descriptorSetIndex];
+ SetLayout& descSetLayout = _setLayouts[descriptorSetIndex];
descSetLayout.LayoutBindings.Add(descriptor);
// TODO: manual hash update method?
@@ -92,51 +86,51 @@ void DescriptorSetLayoutVulkan::Compile()
const VkPhysicalDeviceLimits& limits = _device->PhysicalDeviceLimits;
// Check for maxDescriptorSetSamplers
- ASSERT(LayoutTypes[VK_DESCRIPTOR_TYPE_SAMPLER]
- + LayoutTypes[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER]
+ ASSERT(_layoutTypes[VK_DESCRIPTOR_TYPE_SAMPLER]
+ + _layoutTypes[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER]
< limits.maxDescriptorSetSamplers);
// Check for maxDescriptorSetUniformBuffers
- ASSERT(LayoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER]
- + LayoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC]
+ ASSERT(_layoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER]
+ + _layoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC]
< limits.maxDescriptorSetUniformBuffers);
// Check for maxDescriptorSetUniformBuffersDynamic
if (!_device->Adapter->IsAMD())
{
- ASSERT(LayoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC]
+ ASSERT(_layoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC]
< limits.maxDescriptorSetUniformBuffersDynamic);
}
// Check for maxDescriptorSetStorageBuffers
- ASSERT(LayoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER]
- + LayoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC]
+ ASSERT(_layoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER]
+ + _layoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC]
< limits.maxDescriptorSetStorageBuffers);
// Check for maxDescriptorSetStorageBuffersDynamic
- if (LayoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] > limits.maxDescriptorSetUniformBuffersDynamic)
+ if (_layoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] > limits.maxDescriptorSetUniformBuffersDynamic)
{
// TODO: Downgrade to non-dynamic?
}
- ASSERT(LayoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC]
+ ASSERT(_layoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC]
< limits.maxDescriptorSetStorageBuffersDynamic);
// Check for maxDescriptorSetSampledImages
- ASSERT(LayoutTypes[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER]
- + LayoutTypes[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE]
- + LayoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER]
+ ASSERT(_layoutTypes[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER]
+ + _layoutTypes[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE]
+ + _layoutTypes[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER]
< limits.maxDescriptorSetSampledImages);
// Check for maxDescriptorSetStorageImages
- ASSERT(LayoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE]
- + LayoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER]
+ ASSERT(_layoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE]
+ + _layoutTypes[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER]
< limits.maxDescriptorSetStorageImages);
- _handles.Resize(SetLayouts.Count());
+ _handles.Resize(_setLayouts.Count());
- for (int32 i = 0; i < SetLayouts.Count(); i++)
+ for (int32 i = 0; i < _setLayouts.Count(); i++)
{
- auto& layout = SetLayouts[i];
+ auto& layout = _setLayouts[i];
VkDescriptorSetLayoutCreateInfo layoutInfo;
RenderToolsVulkan::ZeroStruct(layoutInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
@@ -146,7 +140,6 @@ void DescriptorSetLayoutVulkan::Compile()
VALIDATE_VULKAN_RESULT(vkCreateDescriptorSetLayout(_device->Device, &layoutInfo, nullptr, &_handles[i]));
}
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
if (_typesUsageID == ~0)
{
CacheTypesUsageID();
@@ -155,34 +148,26 @@ void DescriptorSetLayoutVulkan::Compile()
RenderToolsVulkan::ZeroStruct(_allocateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO);
_allocateInfo.descriptorSetCount = _handles.Count();
_allocateInfo.pSetLayouts = _handles.Get();
-#endif
}
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
DescriptorPoolVulkan::DescriptorPoolVulkan(GPUDeviceVulkan* device, const DescriptorSetLayoutVulkan& layout)
-#else
-DescriptorPoolVulkan::DescriptorPoolVulkan(GPUDeviceVulkan* device)
-#endif
: _device(device)
, _handle(VK_NULL_HANDLE)
- , MaxDescriptorSets(0)
- , NumAllocatedDescriptorSets(0)
- , PeakAllocatedDescriptorSets(0)
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
+ , DescriptorSetsMax(0)
+ , AllocatedDescriptorSetsCount(0)
+ , AllocatedDescriptorSetsCountMax(0)
, Layout(layout)
-#endif
{
Array> types;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- // Max number of descriptor sets layout allocations
+ // The maximum amount of descriptor sets layout allocations to hold
const uint32 MaxSetsAllocations = 256;
// Descriptor sets number required to allocate the max number of descriptor sets layout.
// When we're hashing pools with types usage ID the descriptor pool can be used for different layouts so the initial layout does not make much sense.
// In the latter case we'll be probably over-allocating the descriptor types but given the relatively small number of max allocations this should not have
// a serious impact.
- MaxDescriptorSets = MaxSetsAllocations * (VULKAN_HASH_POOLS_WITH_TYPES_USAGE_ID ? 1 : Layout.GetLayouts().Count());
+ DescriptorSetsMax = MaxSetsAllocations * (VULKAN_HASH_POOLS_WITH_TYPES_USAGE_ID ? 1 : Layout.GetLayouts().Count());
for (uint32 typeIndex = VULKAN_DESCRIPTOR_TYPE_BEGIN; typeIndex <= VULKAN_DESCRIPTOR_TYPE_END; typeIndex++)
{
const VkDescriptorType descriptorType = (VkDescriptorType)typeIndex;
@@ -195,55 +180,13 @@ DescriptorPoolVulkan::DescriptorPoolVulkan(GPUDeviceVulkan* device)
type.descriptorCount = typesUsed * MaxSetsAllocations;
}
}
-#else
- MaxDescriptorSets = 16384;
-
- Platform::MemoryClear(MaxAllocatedTypes, sizeof(MaxAllocatedTypes));
- Platform::MemoryClear(NumAllocatedTypes, sizeof(NumAllocatedTypes));
- Platform::MemoryClear(PeakAllocatedTypes, sizeof(PeakAllocatedTypes));
-
- VkDescriptorPoolSize type;
-
- type.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
- type.descriptorCount = 2048;
- types.Add(type);
-
- type.type = VK_DESCRIPTOR_TYPE_SAMPLER;
- type.descriptorCount = 1024;
- types.Add(type);
-
- type.type = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
- type.descriptorCount = 512;
- types.Add(type);
-
- type.type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
- type.descriptorCount = 512;
- types.Add(type);
-
- type.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
- type.descriptorCount = 512;
- types.Add(type);
-
- type.type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
- type.descriptorCount = 512;
- types.Add(type);
-
- type.type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
- type.descriptorCount = 2048;
- types.Add(type);
-
- for (const VkDescriptorPoolSize& poolSize : types)
- {
- MaxAllocatedTypes[poolSize.type] = poolSize.descriptorCount;
- }
-#endif
VkDescriptorPoolCreateInfo createInfo;
RenderToolsVulkan::ZeroStruct(createInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO);
createInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
createInfo.poolSizeCount = types.Count();
createInfo.pPoolSizes = types.Get();
- createInfo.maxSets = MaxDescriptorSets;
+ createInfo.maxSets = DescriptorSetsMax;
VALIDATE_VULKAN_RESULT(vkCreateDescriptorPool(_device->Device, &createInfo, nullptr, &_handle));
}
@@ -260,16 +203,11 @@ void DescriptorPoolVulkan::TrackAddUsage(const DescriptorSetLayoutVulkan& layout
// Check and increment our current type usage
for (uint32 typeIndex = VULKAN_DESCRIPTOR_TYPE_BEGIN; typeIndex <= VULKAN_DESCRIPTOR_TYPE_END; typeIndex++)
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
ASSERT(Layout.GetTypesUsed((VkDescriptorType)typeIndex) == layout.GetTypesUsed((VkDescriptorType)typeIndex));
-#else
- NumAllocatedTypes[typeIndex] += (int32)layout.GetTypesUsed((VkDescriptorType)typeIndex);
- PeakAllocatedTypes[typeIndex] = Math::Max(PeakAllocatedTypes[typeIndex], NumAllocatedTypes[typeIndex]);
-#endif
}
- NumAllocatedDescriptorSets += layout.GetLayouts().Count();
- PeakAllocatedDescriptorSets = Math::Max(NumAllocatedDescriptorSets, PeakAllocatedDescriptorSets);
+ AllocatedDescriptorSetsCount += layout.GetLayouts().Count();
+ AllocatedDescriptorSetsCountMax = Math::Max(AllocatedDescriptorSetsCount, AllocatedDescriptorSetsCountMax);
}
void DescriptorPoolVulkan::TrackRemoveUsage(const DescriptorSetLayoutVulkan& layout)
@@ -277,26 +215,19 @@ void DescriptorPoolVulkan::TrackRemoveUsage(const DescriptorSetLayoutVulkan& lay
// Check and increment our current type usage
for (uint32 typeIndex = VULKAN_DESCRIPTOR_TYPE_BEGIN; typeIndex <= VULKAN_DESCRIPTOR_TYPE_END; typeIndex++)
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
ASSERT(Layout.GetTypesUsed((VkDescriptorType)typeIndex) == layout.GetTypesUsed((VkDescriptorType)typeIndex));
-#else
- NumAllocatedTypes[typeIndex] -= (int32)layout.GetTypesUsed((VkDescriptorType)typeIndex);
- ASSERT(NumAllocatedTypes[typeIndex] >= 0);
-#endif
}
- NumAllocatedDescriptorSets -= layout.GetLayouts().Count();
+ AllocatedDescriptorSetsCount -= layout.GetLayouts().Count();
}
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
-
void DescriptorPoolVulkan::Reset()
{
if (_handle != VK_NULL_HANDLE)
{
VALIDATE_VULKAN_RESULT(vkResetDescriptorPool(_device->Device, _handle, 0));
}
- NumAllocatedDescriptorSets = 0;
+ AllocatedDescriptorSetsCount = 0;
}
bool DescriptorPoolVulkan::AllocateDescriptorSets(const VkDescriptorSetAllocateInfo& descriptorSetAllocateInfo, VkDescriptorSet* result)
@@ -370,10 +301,6 @@ void TypedDescriptorPoolSetVulkan::Reset()
_poolListCurrent = _poolListHead;
}
-#endif
-
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
-
DescriptorPoolSetContainerVulkan::DescriptorPoolSetContainerVulkan(GPUDeviceVulkan* device)
: _device(device)
, _lastFrameUsed(Engine::FrameCount)
@@ -464,8 +391,6 @@ void DescriptorPoolsManagerVulkan::GC()
}
}
-#endif
-
PipelineLayoutVulkan::PipelineLayoutVulkan(GPUDeviceVulkan* device, const DescriptorSetLayoutInfoVulkan& layout)
: _device(device)
, _handle(VK_NULL_HANDLE)
@@ -491,84 +416,10 @@ PipelineLayoutVulkan::~PipelineLayoutVulkan()
}
}
-#if !VULKAN_USE_DESCRIPTOR_POOL_MANAGER
-
-DescriptorSetsVulkan::DescriptorSetsVulkan(GPUDeviceVulkan* device, const DescriptorSetLayoutVulkan& layout, GPUContextVulkan* context)
- : _device(device)
- , _pool(nullptr)
- , _layout(&layout)
-{
- const auto& layoutHandles = layout.GetHandles();
- if (layoutHandles.HasItems())
- {
- VkDescriptorSetAllocateInfo allocateInfo;
- RenderToolsVulkan::ZeroStruct(allocateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO);
- allocateInfo.descriptorSetCount = layoutHandles.Count();
- allocateInfo.pSetLayouts = layoutHandles.Get();
-
- _sets.AddZeroed(layoutHandles.Count());
-
- _pool = context->AllocateDescriptorSets(allocateInfo, layout, _sets.Get());
- _pool->TrackAddUsage(layout);
- }
-}
-
-DescriptorSetsVulkan::~DescriptorSetsVulkan()
-{
-}
-
-DescriptorSetRingBufferVulkan::DescriptorSetRingBufferVulkan(GPUDeviceVulkan* device)
- : _device(device)
- , _currDescriptorSets(nullptr)
-{
-}
-
-DescriptorSetsVulkan* DescriptorSetRingBufferVulkan::RequestDescriptorSets(GPUContextVulkan* context, CmdBufferVulkan* cmdBuffer, const PipelineLayoutVulkan* layout)
-{
- DescriptorSetsEntry* foundEntry = nullptr;
- for (DescriptorSetsEntry* descriptorSetsEntry : DescriptorSetsEntries)
- {
- if (descriptorSetsEntry->CmdBuffer == cmdBuffer)
- {
- foundEntry = descriptorSetsEntry;
- }
- }
-
- if (!foundEntry)
- {
- if (!layout->HasDescriptors())
- {
- return nullptr;
- }
-
- foundEntry = New(cmdBuffer);
- DescriptorSetsEntries.Add(foundEntry);
- }
-
- const uint64 cmdBufferFenceSignaledCounter = cmdBuffer->GetFenceSignaledCounter();
- for (int32 i = 0; i < foundEntry->Pairs.Count(); i++)
- {
- DescriptorSetsPair& entry = foundEntry->Pairs[i];
- if (entry.FenceCounter < cmdBufferFenceSignaledCounter)
- {
- entry.FenceCounter = cmdBufferFenceSignaledCounter;
- return entry.DescriptorSets;
- }
- }
-
- DescriptorSetsPair newEntry;
- newEntry.DescriptorSets = New(_device, layout->GetDescriptorSetLayout(), context);
- newEntry.FenceCounter = cmdBufferFenceSignaledCounter;
- foundEntry->Pairs.Add(newEntry);
- return newEntry.DescriptorSets;
-}
-
-#endif
-
uint32 DescriptorSetWriterVulkan::SetupDescriptorWrites(const SpirvShaderDescriptorInfo& info, VkWriteDescriptorSet* writeDescriptors, VkDescriptorImageInfo* imageInfo, VkDescriptorBufferInfo* bufferInfo, uint8* bindingToDynamicOffsetMap)
{
WriteDescriptors = writeDescriptors;
- NumWrites = info.DescriptorTypesCount;
+ WritesCount = info.DescriptorTypesCount;
ASSERT(info.DescriptorTypesCount <= 64 && TEXT("Out of bits for Dirty Mask! More than 64 resources in one descriptor set!"));
BindingToDynamicOffsetMap = bindingToDynamicOffsetMap;
diff --git a/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.h b/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.h
index 4f33d88a1..78ea83eb2 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.h
+++ b/Source/Engine/GraphicsDevice/Vulkan/DescriptorSetVulkan.h
@@ -9,9 +9,7 @@
#include "IncludeVulkanHeaders.h"
#include "Types.h"
#include "Config.h"
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
#include "Engine/Platform/CriticalSection.h"
-#endif
#if GRAPHICS_API_VULKAN
@@ -42,7 +40,7 @@ namespace DescriptorSet
Domain = 4,
// Graphics pipeline stages count
- NumGfxStages = 5,
+ GraphicsStagesCount = 5,
// Compute pipeline slot
Compute = 0,
@@ -113,48 +111,42 @@ public:
protected:
- uint32 LayoutTypes[VULKAN_DESCRIPTOR_TYPE_END];
- Array SetLayouts;
-
+ uint32 _layoutTypes[VULKAN_DESCRIPTOR_TYPE_END];
+ Array _setLayouts;
uint32 _hash = 0;
-
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
uint32 _typesUsageID = ~0;
void CacheTypesUsageID();
-#endif
void AddDescriptor(int32 descriptorSetIndex, const VkDescriptorSetLayoutBinding& descriptor);
public:
DescriptorSetLayoutInfoVulkan()
{
- Platform::MemoryClear(LayoutTypes, sizeof(LayoutTypes));
+ Platform::MemoryClear(_layoutTypes, sizeof(_layoutTypes));
}
public:
inline uint32 GetTypesUsed(VkDescriptorType type) const
{
- return LayoutTypes[type];
+ return _layoutTypes[type];
}
const Array& GetLayouts() const
{
- return SetLayouts;
+ return _setLayouts;
}
inline const uint32* GetLayoutTypes() const
{
- return LayoutTypes;
+ return _layoutTypes;
}
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
inline uint32 GetTypesUsageID() const
{
return _typesUsageID;
}
-#endif
public:
@@ -162,40 +154,27 @@ public:
void CopyFrom(const DescriptorSetLayoutInfoVulkan& info)
{
- Platform::MemoryCopy(LayoutTypes, info.LayoutTypes, sizeof(LayoutTypes));
+ Platform::MemoryCopy(_layoutTypes, info._layoutTypes, sizeof(_layoutTypes));
_hash = info._hash;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
_typesUsageID = info._typesUsageID;
-#endif
- SetLayouts = info.SetLayouts;
+ _setLayouts = info._setLayouts;
}
inline bool operator ==(const DescriptorSetLayoutInfoVulkan& other) const
{
- if (other.SetLayouts.Count() != SetLayouts.Count())
- {
+ if (other._setLayouts.Count() != _setLayouts.Count())
return false;
- }
-
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
if (other._typesUsageID != _typesUsageID)
- {
return false;
- }
-#endif
- for (int32 index = 0; index < other.SetLayouts.Count(); index++)
+ for (int32 index = 0; index < other._setLayouts.Count(); index++)
{
- const int32 numBindings = SetLayouts[index].LayoutBindings.Count();
- if (other.SetLayouts[index].LayoutBindings.Count() != numBindings)
- {
+ const int32 bindingsCount = _setLayouts[index].LayoutBindings.Count();
+ if (other._setLayouts[index].LayoutBindings.Count() != bindingsCount)
return false;
- }
- if (numBindings != 0 && Platform::MemoryCompare(other.SetLayouts[index].LayoutBindings.Get(), SetLayouts[index].LayoutBindings.Get(), numBindings * sizeof(VkDescriptorSetLayoutBinding)))
- {
+ if (bindingsCount != 0 && Platform::MemoryCompare(other._setLayouts[index].LayoutBindings.Get(), _setLayouts[index].LayoutBindings.Get(), bindingsCount * sizeof(VkDescriptorSetLayoutBinding)))
return false;
- }
}
return true;
@@ -217,9 +196,7 @@ private:
GPUDeviceVulkan* _device;
DescriptorSetLayoutHandlesArray _handles;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
VkDescriptorSetAllocateInfo _allocateInfo;
-#endif
public:
@@ -228,24 +205,22 @@ public:
public:
- void Compile();
-
inline const DescriptorSetLayoutHandlesArray& GetHandles() const
{
return _handles;
}
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
inline const VkDescriptorSetAllocateInfo& GetAllocateInfo() const
{
return _allocateInfo;
}
-#endif
friend inline uint32 GetHash(const DescriptorSetLayoutVulkan& key)
{
return key._hash;
}
+
+ void Compile();
};
class DescriptorPoolVulkan
@@ -255,25 +230,15 @@ private:
GPUDeviceVulkan* _device;
VkDescriptorPool _handle;
- uint32 MaxDescriptorSets;
- uint32 NumAllocatedDescriptorSets;
- uint32 PeakAllocatedDescriptorSets;
+ uint32 DescriptorSetsMax;
+ uint32 AllocatedDescriptorSetsCount;
+ uint32 AllocatedDescriptorSetsCountMax;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
const DescriptorSetLayoutVulkan& Layout;
-#else
- int32 MaxAllocatedTypes[VULKAN_DESCRIPTOR_TYPE_END];
- int32 NumAllocatedTypes[VULKAN_DESCRIPTOR_TYPE_END];
- int32 PeakAllocatedTypes[VULKAN_DESCRIPTOR_TYPE_END];
-#endif
public:
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
DescriptorPoolVulkan(GPUDeviceVulkan* device, const DescriptorSetLayoutVulkan& layout);
-#else
- DescriptorPoolVulkan(GPUDeviceVulkan* device);
-#endif
~DescriptorPoolVulkan();
@@ -286,43 +251,28 @@ public:
inline bool IsEmpty() const
{
- return NumAllocatedDescriptorSets == 0;
+ return AllocatedDescriptorSetsCount == 0;
}
inline bool CanAllocate(const DescriptorSetLayoutVulkan& layout) const
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- return MaxDescriptorSets > NumAllocatedDescriptorSets + layout.GetLayouts().Count();
-#else
- for (uint32 typeIndex = VULKAN_DESCRIPTOR_TYPE_BEGIN; typeIndex <= VULKAN_DESCRIPTOR_TYPE_END; typeIndex++)
- {
- if (NumAllocatedTypes[typeIndex] + (int32)layout.GetTypesUsed((VkDescriptorType)typeIndex) > MaxAllocatedTypes[typeIndex])
- {
- return false;
- }
- }
- return true;
-#endif
+ return DescriptorSetsMax > AllocatedDescriptorSetsCount + layout.GetLayouts().Count();
+ }
+
+ inline uint32 GetAllocatedDescriptorSetsCount() const
+ {
+ return AllocatedDescriptorSetsCount;
}
void TrackAddUsage(const DescriptorSetLayoutVulkan& layout);
void TrackRemoveUsage(const DescriptorSetLayoutVulkan& layout);
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
void Reset();
bool AllocateDescriptorSets(const VkDescriptorSetAllocateInfo& descriptorSetAllocateInfo, VkDescriptorSet* result);
-
- inline uint32 GetNumAllocatedDescriptorSets() const
- {
- return NumAllocatedDescriptorSets;
- }
-#endif
};
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
-
class DescriptorPoolSetContainerVulkan;
class TypedDescriptorPoolSetVulkan
@@ -428,8 +378,6 @@ public:
void GC();
};
-#endif
-
class PipelineLayoutVulkan
{
private:
@@ -479,115 +427,6 @@ struct DescriptorSetWriteContainerVulkan
}
};
-#if !VULKAN_USE_DESCRIPTOR_POOL_MANAGER
-
-class DescriptorSetsVulkan
-{
-public:
-
- typedef Array> DescriptorSetArray;
-
-private:
-
- GPUDeviceVulkan* _device;
- DescriptorPoolVulkan* _pool;
- const DescriptorSetLayoutVulkan* _layout;
- DescriptorSetArray _sets;
-
-public:
-
- DescriptorSetsVulkan(GPUDeviceVulkan* device, const DescriptorSetLayoutVulkan& layout, GPUContextVulkan* context);
- ~DescriptorSetsVulkan();
-
-public:
-
- inline const DescriptorSetArray& GetHandles() const
- {
- return _sets;
- }
-
- inline void Bind(VkCommandBuffer cmdBuffer, VkPipelineLayout pipelineLayout, VkPipelineBindPoint bindPoint, const Array& dynamicOffsets) const
- {
- vkCmdBindDescriptorSets(cmdBuffer, bindPoint, pipelineLayout, 0, _sets.Count(), _sets.Get(), dynamicOffsets.Count(), dynamicOffsets.Get());
- }
-};
-
-class DescriptorSetRingBufferVulkan
-{
-private:
-
- GPUDeviceVulkan* _device;
- DescriptorSetsVulkan* _currDescriptorSets;
-
- struct DescriptorSetsPair
- {
- uint64 FenceCounter;
- DescriptorSetsVulkan* DescriptorSets;
-
- DescriptorSetsPair()
- : FenceCounter(0)
- , DescriptorSets(nullptr)
- {
- }
- };
-
- struct DescriptorSetsEntry
- {
- CmdBufferVulkan* CmdBuffer;
- Array Pairs;
-
- DescriptorSetsEntry(CmdBufferVulkan* cmdBuffer)
- : CmdBuffer(cmdBuffer)
- {
- }
-
- ~DescriptorSetsEntry()
- {
- for (auto& pair : Pairs)
- {
- Delete(pair.DescriptorSets);
- }
- }
- };
-
- Array DescriptorSetsEntries;
-
-public:
-
- DescriptorSetRingBufferVulkan(GPUDeviceVulkan* device);
-
- virtual ~DescriptorSetRingBufferVulkan()
- {
- }
-
-public:
-
- void Reset()
- {
- _currDescriptorSets = nullptr;
- }
-
- void Release()
- {
- DescriptorSetsEntries.ClearDelete();
- }
-
- void Set(DescriptorSetsVulkan* newDescriptorSets)
- {
- _currDescriptorSets = newDescriptorSets;
- }
-
- inline void Bind(VkCommandBuffer cmdBuffer, VkPipelineLayout pipelineLayout, VkPipelineBindPoint bindPoint, const Array& dynamicOffsets)
- {
- ASSERT(_currDescriptorSets);
- _currDescriptorSets->Bind(cmdBuffer, pipelineLayout, bindPoint, dynamicOffsets);
- }
-
- DescriptorSetsVulkan* RequestDescriptorSets(GPUContextVulkan* context, CmdBufferVulkan* cmdBuffer, const PipelineLayoutVulkan* layout);
-};
-
-#endif
-
class DescriptorSetWriterVulkan
{
public:
@@ -595,7 +434,7 @@ public:
VkWriteDescriptorSet* WriteDescriptors;
uint8* BindingToDynamicOffsetMap;
uint32* DynamicOffsets;
- uint32 NumWrites;
+ uint32 WritesCount;
public:
@@ -603,7 +442,7 @@ public:
: WriteDescriptors(nullptr)
, BindingToDynamicOffsetMap(nullptr)
, DynamicOffsets(nullptr)
- , NumWrites(0)
+ , WritesCount(0)
{
}
@@ -613,7 +452,7 @@ public:
bool WriteUniformBuffer(uint32 descriptorIndex, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize range) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
VkDescriptorBufferInfo* bufferInfo = const_cast(WriteDescriptors[descriptorIndex].pBufferInfo);
ASSERT(bufferInfo);
@@ -625,7 +464,7 @@ public:
bool WriteDynamicUniformBuffer(uint32 descriptorIndex, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize range, uint32 dynamicOffset) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
VkDescriptorBufferInfo* bufferInfo = const_cast(WriteDescriptors[descriptorIndex].pBufferInfo);
ASSERT(bufferInfo);
@@ -639,7 +478,7 @@ public:
bool WriteSampler(uint32 descriptorIndex, VkSampler sampler) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
VkDescriptorImageInfo* imageInfo = const_cast(WriteDescriptors[descriptorIndex].pImageInfo);
ASSERT(imageInfo);
@@ -649,7 +488,7 @@ public:
bool WriteImage(uint32 descriptorIndex, VkImageView imageView, VkImageLayout layout) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
VkDescriptorImageInfo* imageInfo = const_cast(WriteDescriptors[descriptorIndex].pImageInfo);
ASSERT(imageInfo);
@@ -660,7 +499,7 @@ public:
bool WriteStorageImage(uint32 descriptorIndex, VkImageView imageView, VkImageLayout layout) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
VkDescriptorImageInfo* imageInfo = const_cast(WriteDescriptors[descriptorIndex].pImageInfo);
ASSERT(imageInfo);
@@ -671,7 +510,7 @@ public:
bool WriteStorageTexelBuffer(uint32 descriptorIndex, const VkBufferView* bufferView) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
WriteDescriptors[descriptorIndex].pTexelBufferView = bufferView;
return true;
@@ -679,7 +518,7 @@ public:
bool WriteStorageBuffer(uint32 descriptorIndex, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize range) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
VkDescriptorBufferInfo* bufferInfo = const_cast(WriteDescriptors[descriptorIndex].pBufferInfo);
ASSERT(bufferInfo);
@@ -691,14 +530,14 @@ public:
bool WriteUniformTexelBuffer(uint32 descriptorIndex, const VkBufferView* view) const
{
- ASSERT(descriptorIndex < NumWrites);
+ ASSERT(descriptorIndex < WritesCount);
ASSERT(WriteDescriptors[descriptorIndex].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
return DescriptorSet::CopyAndReturnNotEqual(WriteDescriptors[descriptorIndex].pTexelBufferView, view);
}
void SetDescriptorSet(VkDescriptorSet descriptorSet) const
{
- for (uint32 i = 0; i < NumWrites; i++)
+ for (uint32 i = 0; i < WritesCount; i++)
{
WriteDescriptors[i].dstSet = descriptorSet;
}
diff --git a/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.cpp b/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.cpp
index de79ebf7d..881e575e7 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.cpp
+++ b/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.cpp
@@ -102,15 +102,11 @@ GPUContextVulkan::GPUContextVulkan(GPUDeviceVulkan* device, QueueVulkan* queue)
GPUContextVulkan::~GPUContextVulkan()
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
for (int32 i = 0; i < _descriptorPools.Count(); i++)
{
_descriptorPools[i].ClearDelete();
}
_descriptorPools.Clear();
-#else
- _descriptorPools.ClearDelete();
-#endif
Delete(_cmdBufferManager);
}
@@ -297,7 +293,6 @@ DescriptorPoolVulkan* GPUContextVulkan::AllocateDescriptorSets(const VkDescripto
{
VkResult result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
VkDescriptorSetAllocateInfo allocateInfo = descriptorSetAllocateInfo;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
DescriptorPoolVulkan* pool = nullptr;
const uint32 hash = VULKAN_HASH_POOLS_WITH_TYPES_USAGE_ID ? layout.GetTypesUsageID() : GetHash(layout);
@@ -317,15 +312,6 @@ DescriptorPoolVulkan* GPUContextVulkan::AllocateDescriptorSets(const VkDescripto
{
typedDescriptorPools = &_descriptorPools.Add(hash, DescriptorPoolArray())->Value;
}
-#else
- DescriptorPoolVulkan* pool = _descriptorPools.HasItems() ? _descriptorPools.Last() : nullptr;
-
- if (pool && pool->CanAllocate(layout))
- {
- allocateInfo.descriptorPool = pool->GetHandle();
- result = vkAllocateDescriptorSets(_device->Device, &allocateInfo, outSets);
- }
-#endif
if (result < VK_SUCCESS)
{
@@ -336,13 +322,8 @@ DescriptorPoolVulkan* GPUContextVulkan::AllocateDescriptorSets(const VkDescripto
else
{
// Spec says any negative value could be due to fragmentation, so create a new Pool. If it fails here then we really are out of memory!
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
pool = New(_device, layout);
typedDescriptorPools->Add(pool);
-#else
- pool = New(_device);
- _descriptorPools.Add(pool);
-#endif
allocateInfo.descriptorPool = pool->GetHandle();
VALIDATE_VULKAN_RESULT(vkAllocateDescriptorSets(_device->Device, &allocateInfo, outSets));
}
@@ -540,23 +521,13 @@ void GPUContextVulkan::UpdateDescriptorSets(GPUPipelineStateVulkan* pipelineStat
ASSERT(pipelineLayout);
bool needsWrite = false;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
// No current descriptor pools set - acquire one and reset
- bool newDescriptorPool = pipelineState->AcquirePoolSet(cmdBuffer);
+ const bool newDescriptorPool = pipelineState->AcquirePoolSet(cmdBuffer);
needsWrite |= newDescriptorPool;
-#else
- const auto newDescriptorSets = pipelineState->DSRingBuffer.RequestDescriptorSets(this, cmdBuffer, pipelineLayout);
- pipelineState->DSRingBuffer.Set(newDescriptorSets);
- if (!newDescriptorSets)
- {
- return;
- }
- const auto& descriptorSetHandles = newDescriptorSets->GetHandles();
-#endif
// Update descriptors for every used shader stage
uint32 remainingHasDescriptorsPerStageMask = pipelineState->HasDescriptorsPerStageMask;
- for (int32 stage = 0; stage < DescriptorSet::NumGfxStages && remainingHasDescriptorsPerStageMask; stage++)
+ for (int32 stage = 0; stage < DescriptorSet::GraphicsStagesCount && remainingHasDescriptorsPerStageMask; stage++)
{
// Only process stages that exist in this pipeline and use descriptors
if (remainingHasDescriptorsPerStageMask & 1)
@@ -568,27 +539,19 @@ void GPUContextVulkan::UpdateDescriptorSets(GPUPipelineStateVulkan* pipelineStat
}
// Allocate sets based on what changed
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
//if (needsWrite) // TODO: write on change only?
-#endif
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
if (!pipelineState->AllocateDescriptorSets())
{
return;
}
-#endif
uint32 remainingStagesMask = pipelineState->HasDescriptorsPerStageMask;
uint32 stage = 0;
while (remainingStagesMask)
{
if (remainingStagesMask & 1)
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
const VkDescriptorSet descriptorSet = pipelineState->DescriptorSetHandles[stage];
-#else
- const VkDescriptorSet descriptorSet = descriptorSetHandles[stage];
-#endif
pipelineState->DSWriter[stage].SetDescriptorSet(descriptorSet);
}
@@ -608,39 +571,21 @@ void GPUContextVulkan::UpdateDescriptorSets(ComputePipelineStateVulkan* pipeline
bool needsWrite = false;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
// No current descriptor pools set - acquire one and reset
- bool newDescriptorPool = pipelineState->AcquirePoolSet(cmdBuffer);
+ const bool newDescriptorPool = pipelineState->AcquirePoolSet(cmdBuffer);
needsWrite |= newDescriptorPool;
-#else
- const auto newDescriptorSets = pipelineState->DSRingBuffer.RequestDescriptorSets(this, cmdBuffer, pipelineLayout);
- pipelineState->DSRingBuffer.Set(newDescriptorSets);
- if (!newDescriptorSets)
- {
- return;
- }
- const auto& descriptorSetHandles = newDescriptorSets->GetHandles();
-#endif
// Update descriptors
UpdateDescriptorSets(*pipelineState->DescriptorInfo, pipelineState->DSWriter, needsWrite);
// Allocate sets based on what changed
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- //if (needsWrite) // TODO: write on change only?
-#endif
+ //if (needsWrite) // TODO: write on change only?f
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
if (!pipelineState->AllocateDescriptorSets())
{
return;
}
-#endif
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
const VkDescriptorSet descriptorSet = pipelineState->DescriptorSetHandles[DescriptorSet::Compute];
-#else
- const VkDescriptorSet descriptorSet = descriptorSetHandles[DescriptorSet::Compute];
-#endif
pipelineState->DSWriter.SetDescriptorSet(descriptorSet);
vkUpdateDescriptorSets(_device->Device, pipelineState->DSWriteContainer.DescriptorWrites.Count(), pipelineState->DSWriteContainer.DescriptorWrites.Get(), 0, nullptr);
@@ -675,8 +620,6 @@ void GPUContextVulkan::OnDrawCall()
if (_rtDirtyFlag && cmdBuffer->IsInsideRenderPass())
EndRenderPass();
- _currentState->Reset();
-
if (pipelineState->HasDescriptorsPerStageMask)
{
UpdateDescriptorSets(pipelineState);
diff --git a/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.h b/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.h
index 16649e91e..8fd884cf2 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.h
+++ b/Source/Engine/GraphicsDevice/Vulkan/GPUContextVulkan.h
@@ -114,12 +114,8 @@ private:
DescriptorOwnerResourceVulkan* _uaHandles[GPU_MAX_UA_BINDED];
DescriptorOwnerResourceVulkan** _handles[(int32)SpirvShaderResourceBindingType::MAX];
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
typedef Array DescriptorPoolArray;
Dictionary _descriptorPools;
-#else
- Array _descriptorPools;
-#endif
public:
diff --git a/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.cpp b/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.cpp
index b489b56ca..2d0aeb512 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.cpp
+++ b/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.cpp
@@ -1096,9 +1096,7 @@ GPUDeviceVulkan::GPUDeviceVulkan(ShaderProfile shaderProfile, GPUAdapterVulkan*
, ValidationCache(VK_NULL_HANDLE)
#endif
, UniformBufferUploader(nullptr)
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
, DescriptorPoolsManager(nullptr)
-#endif
{
}
@@ -1871,9 +1869,7 @@ bool GPUDeviceVulkan::Init()
// Prepare stuff
FenceManager.Init(this);
UniformBufferUploader = New(this);
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
DescriptorPoolsManager = New(this);
-#endif
MainContext = New(this, GraphicsQueue);
// TODO: create and load PipelineCache
#if VULKAN_SUPPORTS_VALIDATION_CACHE
@@ -1895,9 +1891,7 @@ void GPUDeviceVulkan::DrawBegin()
// Flush resources
DeferredDeletionQueue.ReleaseResources();
StagingManager.ProcessPendingFree();
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
DescriptorPoolsManager->GC();
-#endif
}
void GPUDeviceVulkan::Dispose()
@@ -1925,9 +1919,7 @@ void GPUDeviceVulkan::Dispose()
StagingManager.Dispose();
TimestampQueryPools.ClearDelete();
SAFE_DELETE_GPU_RESOURCE(UniformBufferUploader);
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
Delete(DescriptorPoolsManager);
-#endif
SAFE_DELETE(MainContext);
SAFE_DELETE(GraphicsQueue);
SAFE_DELETE(ComputeQueue);
diff --git a/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.h b/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.h
index 905485f7d..8a1334450 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.h
+++ b/Source/Engine/GraphicsDevice/Vulkan/GPUDeviceVulkan.h
@@ -26,9 +26,7 @@ class RenderPassVulkan;
class FenceManagerVulkan;
class GPUDeviceVulkan;
class UniformBufferUploaderVulkan;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
class DescriptorPoolsManagerVulkan;
-#endif
class SemaphoreVulkan
{
@@ -637,9 +635,10 @@ public:
///
UniformBufferUploaderVulkan* UniformBufferUploader;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
+ ///
+ /// The descriptor pools manager.
+ ///
DescriptorPoolsManagerVulkan* DescriptorPoolsManager;
-#endif
///
/// The physical device limits.
diff --git a/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.cpp b/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.cpp
index 4d527a2bb..ee7305daf 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.cpp
+++ b/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.cpp
@@ -47,11 +47,8 @@ ComputePipelineStateVulkan* GPUShaderProgramCSVulkan::GetOrCreateState()
_pipelineState = New(_device, pipeline, layout);
_pipelineState->DescriptorInfo = &DescriptorInfo;
-
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
_pipelineState->DescriptorSetsLayout = &layout->GetDescriptorSetLayout();
_pipelineState->DescriptorSetHandles.AddZeroed(_pipelineState->DescriptorSetsLayout->GetHandles().Count());
-#endif
uint32 totalNumDynamicOffsets = 0;
@@ -89,13 +86,9 @@ ComputePipelineStateVulkan::ComputePipelineStateVulkan(GPUDeviceVulkan* device,
ComputePipelineStateVulkan::~ComputePipelineStateVulkan()
{
DSWriteContainer.Release();
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
CurrentTypedDescriptorPoolSet = nullptr;
DescriptorSetsLayout = nullptr;
DescriptorSetHandles.Resize(0);
-#else
- DSRingBuffer.Release();
-#endif
DynamicOffsets.Resize(0);
_device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::Pipeline, _handle);
_layout = nullptr;
@@ -105,9 +98,6 @@ GPUPipelineStateVulkan::GPUPipelineStateVulkan(GPUDeviceVulkan* device)
: GPUResourceVulkan(device, StringView::Empty)
, _pipelines(16)
, _layout(nullptr)
-#if !VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- , DSRingBuffer(device)
-#endif
{
}
@@ -132,10 +122,8 @@ PipelineLayoutVulkan* GPUPipelineStateVulkan::GetLayout()
_layout = _device->GetOrCreateLayout(descriptorSetLayoutInfo);
ASSERT(_layout);
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
DescriptorSetsLayout = &_layout->GetDescriptorSetLayout();
DescriptorSetHandles.AddZeroed(DescriptorSetsLayout->GetHandles().Count());
-#endif
return _layout;
}
@@ -192,13 +180,9 @@ VkPipeline GPUPipelineStateVulkan::GetState(RenderPassVulkan* renderPass)
void GPUPipelineStateVulkan::OnReleaseGPU()
{
DSWriteContainer.Release();
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
CurrentTypedDescriptorPoolSet = nullptr;
DescriptorSetsLayout = nullptr;
DescriptorSetHandles.Resize(0);
-#else
- DSRingBuffer.Release();
-#endif
DynamicOffsets.Resize(0);
for (auto i = _pipelines.Begin(); i.IsNotEnd(); ++i)
{
@@ -351,7 +335,7 @@ bool GPUPipelineStateVulkan::Init(const Description& desc)
_desc.pColorBlendState = &_descColorBlend;
ASSERT(DSWriteContainer.DescriptorWrites.IsEmpty());
- for (int32 stage = 0; stage < DescriptorSet::NumGfxStages; stage++)
+ for (int32 stage = 0; stage < DescriptorSet::GraphicsStagesCount; stage++)
{
const auto descriptor = DescriptorInfoPerStage[stage];
if (descriptor == nullptr || descriptor->DescriptorTypesCount == 0)
@@ -370,9 +354,9 @@ bool GPUPipelineStateVulkan::Init(const Description& desc)
VkDescriptorImageInfo* currentImageInfo = DSWriteContainer.DescriptorImageInfo.Get();
VkDescriptorBufferInfo* currentBufferInfo = DSWriteContainer.DescriptorBufferInfo.Get();
uint8* currentBindingToDynamicOffsetMap = DSWriteContainer.BindingToDynamicOffsetMap.Get();
- uint32 dynamicOffsetsStart[DescriptorSet::NumGfxStages];
+ uint32 dynamicOffsetsStart[DescriptorSet::GraphicsStagesCount];
uint32 totalNumDynamicOffsets = 0;
- for (int32 stage = 0; stage < DescriptorSet::NumGfxStages; stage++)
+ for (int32 stage = 0; stage < DescriptorSet::GraphicsStagesCount; stage++)
{
dynamicOffsetsStart[stage] = totalNumDynamicOffsets;
@@ -390,7 +374,7 @@ bool GPUPipelineStateVulkan::Init(const Description& desc)
}
DynamicOffsets.AddZeroed(totalNumDynamicOffsets);
- for (int32 stage = 0; stage < DescriptorSet::NumGfxStages; stage++)
+ for (int32 stage = 0; stage < DescriptorSet::GraphicsStagesCount; stage++)
{
DSWriter[stage].DynamicOffsets = dynamicOffsetsStart[stage] + DynamicOffsets.Get();
}
diff --git a/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.h b/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.h
index d1ebabea2..a51c105d2 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.h
+++ b/Source/Engine/GraphicsDevice/Vulkan/GPUPipelineStateVulkan.h
@@ -44,8 +44,6 @@ public:
DescriptorSetWriteContainerVulkan DSWriteContainer;
DescriptorSetWriterVulkan DSWriter;
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
-
const DescriptorSetLayoutVulkan* DescriptorSetsLayout = nullptr;
TypedDescriptorPoolSetVulkan* CurrentTypedDescriptorPoolSet = nullptr;
Array DescriptorSetHandles;
@@ -53,7 +51,7 @@ public:
inline bool AcquirePoolSet(CmdBufferVulkan* cmdBuffer)
{
// Pipeline state has no current descriptor pools set or set owner is not current - acquire a new pool set
- DescriptorPoolSetContainerVulkan* cmdBufferPoolSet = cmdBuffer->CurrentDescriptorPoolSetContainer;
+ DescriptorPoolSetContainerVulkan* cmdBufferPoolSet = cmdBuffer->GetDescriptorPoolSet();
if (CurrentTypedDescriptorPoolSet == nullptr || CurrentTypedDescriptorPoolSet->GetOwner() != cmdBufferPoolSet)
{
ASSERT(cmdBufferPoolSet);
@@ -70,26 +68,12 @@ public:
return CurrentTypedDescriptorPoolSet->AllocateDescriptorSets(*DescriptorSetsLayout, DescriptorSetHandles.Get());
}
-#else
-
- DescriptorSetRingBufferVulkan DSRingBuffer;
-
-#endif
-
Array DynamicOffsets;
public:
- void Reset()
- {
-#if !VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- DSRingBuffer.Reset();
-#endif
- }
-
void Bind(CmdBufferVulkan* cmdBuffer)
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
vkCmdBindDescriptorSets(
cmdBuffer->GetHandle(),
VK_PIPELINE_BIND_POINT_COMPUTE,
@@ -99,9 +83,6 @@ public:
DescriptorSetHandles.Get(),
DynamicOffsets.Count(),
DynamicOffsets.Get());
-#else
- DSRingBuffer.Bind(cmdBuffer->GetHandle(), GetLayout()->GetHandle(), VK_PIPELINE_BIND_POINT_COMPUTE, DynamicOffsets);
-#endif
}
public:
@@ -162,17 +143,15 @@ public:
///
/// The cached shader bindings per stage.
///
- const ShaderBindings* ShaderBindingsPerStage[DescriptorSet::NumGfxStages];
+ const ShaderBindings* ShaderBindingsPerStage[DescriptorSet::GraphicsStagesCount];
///
/// The cached shader descriptor infos per stage.
///
- const SpirvShaderDescriptorInfo* DescriptorInfoPerStage[DescriptorSet::NumGfxStages];
+ const SpirvShaderDescriptorInfo* DescriptorInfoPerStage[DescriptorSet::GraphicsStagesCount];
DescriptorSetWriteContainerVulkan DSWriteContainer;
- DescriptorSetWriterVulkan DSWriter[DescriptorSet::NumGfxStages];
-
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
+ DescriptorSetWriterVulkan DSWriter[DescriptorSet::GraphicsStagesCount];
const DescriptorSetLayoutVulkan* DescriptorSetsLayout = nullptr;
TypedDescriptorPoolSetVulkan* CurrentTypedDescriptorPoolSet = nullptr;
@@ -181,7 +160,7 @@ public:
inline bool AcquirePoolSet(CmdBufferVulkan* cmdBuffer)
{
// Pipeline state has no current descriptor pools set or set owner is not current - acquire a new pool set
- DescriptorPoolSetContainerVulkan* cmdBufferPoolSet = cmdBuffer->CurrentDescriptorPoolSetContainer;
+ DescriptorPoolSetContainerVulkan* cmdBufferPoolSet = cmdBuffer->GetDescriptorPoolSet();
if (CurrentTypedDescriptorPoolSet == nullptr || CurrentTypedDescriptorPoolSet->GetOwner() != cmdBufferPoolSet)
{
ASSERT(cmdBufferPoolSet);
@@ -198,26 +177,12 @@ public:
return CurrentTypedDescriptorPoolSet->AllocateDescriptorSets(*DescriptorSetsLayout, DescriptorSetHandles.Get());
}
-#else
-
- DescriptorSetRingBufferVulkan DSRingBuffer;
-
-#endif
-
Array DynamicOffsets;
public:
- void Reset()
- {
-#if !VULKAN_USE_DESCRIPTOR_POOL_MANAGER
- DSRingBuffer.Reset();
-#endif
- }
-
void Bind(CmdBufferVulkan* cmdBuffer)
{
-#if VULKAN_USE_DESCRIPTOR_POOL_MANAGER
vkCmdBindDescriptorSets(
cmdBuffer->GetHandle(),
VK_PIPELINE_BIND_POINT_GRAPHICS,
@@ -227,9 +192,6 @@ public:
DescriptorSetHandles.Get(),
DynamicOffsets.Count(),
DynamicOffsets.Get());
-#else
- DSRingBuffer.Bind(cmdBuffer->GetHandle(), GetLayout()->GetHandle(), VK_PIPELINE_BIND_POINT_GRAPHICS, DynamicOffsets);
-#endif
}
///
diff --git a/Source/Engine/GraphicsDevice/Vulkan/QueueVulkan.cpp b/Source/Engine/GraphicsDevice/Vulkan/QueueVulkan.cpp
index 1e021f4de..0fa456ed6 100644
--- a/Source/Engine/GraphicsDevice/Vulkan/QueueVulkan.cpp
+++ b/Source/Engine/GraphicsDevice/Vulkan/QueueVulkan.cpp
@@ -36,23 +36,23 @@ void QueueVulkan::Submit(CmdBufferVulkan* cmdBuffer, uint32 numSignalSemaphores,
submitInfo.pSignalSemaphores = signalSemaphores;
Array waitSemaphores;
- if (cmdBuffer->WaitSemaphores.HasItems())
+ if (cmdBuffer->_waitSemaphores.HasItems())
{
- waitSemaphores.EnsureCapacity((uint32)cmdBuffer->WaitSemaphores.Count());
- for (auto semaphore : cmdBuffer->WaitSemaphores)
+ waitSemaphores.EnsureCapacity((uint32)cmdBuffer->_waitSemaphores.Count());
+ for (auto semaphore : cmdBuffer->_waitSemaphores)
{
waitSemaphores.Add(semaphore->GetHandle());
}
- submitInfo.waitSemaphoreCount = (uint32)cmdBuffer->WaitSemaphores.Count();
+ submitInfo.waitSemaphoreCount = (uint32)cmdBuffer->_waitSemaphores.Count();
submitInfo.pWaitSemaphores = waitSemaphores.Get();
- submitInfo.pWaitDstStageMask = cmdBuffer->WaitFlags.Get();
+ submitInfo.pWaitDstStageMask = cmdBuffer->_waitFlags.Get();
}
VALIDATE_VULKAN_RESULT(vkQueueSubmit(_queue, 1, &submitInfo, fence->GetHandle()));
cmdBuffer->_state = CmdBufferVulkan::State::Submitted;
cmdBuffer->MarkSemaphoresAsSubmitted();
- cmdBuffer->SubmittedFenceCounter = cmdBuffer->FenceSignaledCounter;
+ cmdBuffer->_submittedFenceCounter = cmdBuffer->_fenceSignaledCounter;
#if 0
// Wait for the GPU to be idle on every submit (useful for tracking GPU hangs)