Add support for stencil buffer on Vulkan and D3D11/D3D12
This commit is contained in:
@@ -236,6 +236,7 @@ bool PixelFormatExtensions::HasStencil(const PixelFormat format)
|
||||
switch (format)
|
||||
{
|
||||
case PixelFormat::D24_UNorm_S8_UInt:
|
||||
case PixelFormat::D32_Float_S8X24_UInt:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
@@ -848,6 +849,10 @@ PixelFormat PixelFormatExtensions::MakeTypeless(const PixelFormat format)
|
||||
case PixelFormat::BC7_UNorm:
|
||||
case PixelFormat::BC7_UNorm_sRGB:
|
||||
return PixelFormat::BC7_Typeless;
|
||||
case PixelFormat::D24_UNorm_S8_UInt:
|
||||
return PixelFormat::R24G8_Typeless;
|
||||
case PixelFormat::D32_Float_S8X24_UInt:
|
||||
return PixelFormat::R32G8X24_Typeless;
|
||||
default:
|
||||
return format;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ void CmdBufferVulkan::Begin()
|
||||
// Acquire a descriptor pool set on
|
||||
if (_descriptorPoolSetContainer == nullptr)
|
||||
{
|
||||
AcquirePoolSet();
|
||||
_descriptorPoolSetContainer = &_device->DescriptorPoolsManager->AcquirePoolSetContainer();
|
||||
}
|
||||
|
||||
_state = State::IsInsideBegin;
|
||||
@@ -61,18 +61,16 @@ void CmdBufferVulkan::End()
|
||||
void CmdBufferVulkan::BeginRenderPass(RenderPassVulkan* renderPass, FramebufferVulkan* framebuffer, uint32 clearValueCount, VkClearValue* clearValues)
|
||||
{
|
||||
ASSERT(IsOutsideRenderPass());
|
||||
|
||||
VkRenderPassBeginInfo info;
|
||||
RenderToolsVulkan::ZeroStruct(info, VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO);
|
||||
info.renderPass = renderPass->GetHandle();
|
||||
info.framebuffer = framebuffer->GetHandle();
|
||||
info.renderPass = renderPass->Handle;
|
||||
info.framebuffer = framebuffer->Handle;
|
||||
info.renderArea.offset.x = 0;
|
||||
info.renderArea.offset.y = 0;
|
||||
info.renderArea.extent.width = framebuffer->Extent.width;
|
||||
info.renderArea.extent.height = framebuffer->Extent.height;
|
||||
info.clearValueCount = clearValueCount;
|
||||
info.pClearValues = clearValues;
|
||||
|
||||
vkCmdBeginRenderPass(_commandBuffer, &info, VK_SUBPASS_CONTENTS_INLINE);
|
||||
_state = State::IsInsideRenderPass;
|
||||
}
|
||||
@@ -84,12 +82,6 @@ void CmdBufferVulkan::EndRenderPass()
|
||||
_state = State::IsInsideBegin;
|
||||
}
|
||||
|
||||
void CmdBufferVulkan::AcquirePoolSet()
|
||||
{
|
||||
ASSERT(!_descriptorPoolSetContainer);
|
||||
_descriptorPoolSetContainer = &_device->DescriptorPoolsManager->AcquirePoolSetContainer();
|
||||
}
|
||||
|
||||
#if GPU_ALLOW_PROFILE_EVENTS
|
||||
|
||||
void CmdBufferVulkan::BeginEvent(const Char* name)
|
||||
@@ -134,6 +126,7 @@ void CmdBufferVulkan::RefreshFenceStatus()
|
||||
{
|
||||
if (_state == State::Submitted)
|
||||
{
|
||||
PROFILE_CPU();
|
||||
if (_device->FenceManager.IsFenceSignaled(_fence))
|
||||
{
|
||||
_state = State::ReadyForBegin;
|
||||
@@ -258,7 +251,7 @@ void CmdBufferManagerVulkan::SubmitActiveCmdBuffer(SemaphoreVulkan* signalSemaph
|
||||
// Pause all active queries
|
||||
for (int32 i = 0; i < _queriesInProgress.Count(); i++)
|
||||
{
|
||||
_queriesInProgress[i]->Interrupt(_activeCmdBuffer);
|
||||
_queriesInProgress.Get()[i]->Interrupt(_activeCmdBuffer);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -290,7 +283,7 @@ void CmdBufferManagerVulkan::PrepareForNewActiveCommandBuffer()
|
||||
PROFILE_CPU();
|
||||
for (int32 i = 0; i < _pool._cmdBuffers.Count(); i++)
|
||||
{
|
||||
auto cmdBuffer = _pool._cmdBuffers[i];
|
||||
auto cmdBuffer = _pool._cmdBuffers.Get()[i];
|
||||
cmdBuffer->RefreshFenceStatus();
|
||||
if (cmdBuffer->GetState() == CmdBufferVulkan::State::ReadyForBegin)
|
||||
{
|
||||
@@ -312,7 +305,7 @@ void CmdBufferManagerVulkan::PrepareForNewActiveCommandBuffer()
|
||||
// Resume any paused queries with the new command buffer
|
||||
for (int32 i = 0; i < _queriesInProgress.Count(); i++)
|
||||
{
|
||||
_queriesInProgress[i]->Resume(_activeCmdBuffer);
|
||||
_queriesInProgress.Get()[i]->Resume(_activeCmdBuffer);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -124,13 +124,11 @@ public:
|
||||
void BeginRenderPass(RenderPassVulkan* renderPass, FramebufferVulkan* framebuffer, uint32 clearValueCount, VkClearValue* clearValues);
|
||||
void EndRenderPass();
|
||||
|
||||
DescriptorPoolSetContainerVulkan* GetDescriptorPoolSet() const
|
||||
FORCE_INLINE DescriptorPoolSetContainerVulkan* GetDescriptorPoolSet() const
|
||||
{
|
||||
return _descriptorPoolSetContainer;
|
||||
}
|
||||
|
||||
void AcquirePoolSet();
|
||||
|
||||
#if GPU_ALLOW_PROFILE_EVENTS
|
||||
void BeginEvent(const Char* name);
|
||||
void EndEvent();
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
#define VULKAN_RESET_QUERY_POOLS 0
|
||||
#define VULKAN_HASH_POOLS_WITH_TYPES_USAGE_ID 1
|
||||
#define VULKAN_USE_DEBUG_LAYER GPU_ENABLE_DIAGNOSTICS
|
||||
#define VULKAN_USE_DEBUG_DATA (GPU_ENABLE_DIAGNOSTICS && COMPILE_WITH_DEV_ENV)
|
||||
|
||||
#ifndef VULKAN_USE_QUERIES
|
||||
#define VULKAN_USE_QUERIES 1
|
||||
|
||||
@@ -72,55 +72,19 @@ const Char* ToString(VkImageLayout layout)
|
||||
|
||||
#endif
|
||||
|
||||
void PipelineBarrierVulkan::AddImageBarrier(VkImage image, const VkImageSubresourceRange& range, VkImageLayout srcLayout, VkImageLayout dstLayout, GPUTextureViewVulkan* handle)
|
||||
{
|
||||
#if VK_ENABLE_BARRIERS_DEBUG
|
||||
ImageBarriersDebug.Add(handle);
|
||||
#endif
|
||||
VkImageMemoryBarrier& imageBarrier = ImageBarriers.AddOne();
|
||||
RenderToolsVulkan::ZeroStruct(imageBarrier, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER);
|
||||
imageBarrier.image = image;
|
||||
imageBarrier.subresourceRange = range;
|
||||
imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
imageBarrier.oldLayout = srcLayout;
|
||||
imageBarrier.newLayout = dstLayout;
|
||||
SourceStage |= RenderToolsVulkan::GetImageBarrierFlags(srcLayout, imageBarrier.srcAccessMask);
|
||||
DestStage |= RenderToolsVulkan::GetImageBarrierFlags(dstLayout, imageBarrier.dstAccessMask);
|
||||
#if VK_ENABLE_BARRIERS_DEBUG
|
||||
LOG(Warning, "Image Barrier: 0x{0:x}, {1} -> {2} for baseMipLevel: {3}, baseArrayLayer: {4}, levelCount: {5}, layerCount: {6} ({7})",
|
||||
(uintptr)image,
|
||||
ToString(srcLayout),
|
||||
ToString(dstLayout),
|
||||
range.baseMipLevel,
|
||||
range.baseArrayLayer,
|
||||
range.levelCount,
|
||||
range.layerCount,
|
||||
handle && handle->Owner->AsGPUResource() ? handle->Owner->AsGPUResource()->ToString() : String::Empty
|
||||
);
|
||||
#endif
|
||||
}
|
||||
|
||||
void PipelineBarrierVulkan::AddBufferBarrier(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkAccessFlags srcAccess, VkAccessFlags dstAccess)
|
||||
{
|
||||
VkBufferMemoryBarrier& bufferBarrier = BufferBarriers.AddOne();
|
||||
RenderToolsVulkan::ZeroStruct(bufferBarrier, VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER);
|
||||
bufferBarrier.buffer = buffer;
|
||||
bufferBarrier.offset = offset;
|
||||
bufferBarrier.size = size;
|
||||
bufferBarrier.srcAccessMask = srcAccess;
|
||||
bufferBarrier.dstAccessMask = dstAccess;
|
||||
bufferBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
bufferBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
SourceStage |= RenderToolsVulkan::GetBufferBarrierFlags(srcAccess);
|
||||
DestStage |= RenderToolsVulkan::GetBufferBarrierFlags(dstAccess);
|
||||
}
|
||||
|
||||
void PipelineBarrierVulkan::Execute(CmdBufferVulkan* cmdBuffer)
|
||||
void PipelineBarrierVulkan::Execute(const CmdBufferVulkan* cmdBuffer)
|
||||
{
|
||||
ASSERT(cmdBuffer->IsOutsideRenderPass());
|
||||
vkCmdPipelineBarrier(cmdBuffer->GetHandle(), SourceStage, DestStage, 0, 0, nullptr, BufferBarriers.Count(), BufferBarriers.Get(), ImageBarriers.Count(), ImageBarriers.Get());
|
||||
Reset();
|
||||
|
||||
// Reset
|
||||
SourceStage = 0;
|
||||
DestStage = 0;
|
||||
ImageBarriers.Clear();
|
||||
BufferBarriers.Clear();
|
||||
#if VK_ENABLE_BARRIERS_DEBUG
|
||||
ImageBarriersDebug.Clear();
|
||||
#endif
|
||||
}
|
||||
|
||||
GPUContextVulkan::GPUContextVulkan(GPUDeviceVulkan* device, QueueVulkan* queue)
|
||||
@@ -154,7 +118,7 @@ GPUContextVulkan::~GPUContextVulkan()
|
||||
Delete(_cmdBufferManager);
|
||||
}
|
||||
|
||||
void GPUContextVulkan::AddImageBarrier(VkImage image, VkImageLayout srcLayout, VkImageLayout dstLayout, VkImageSubresourceRange& subresourceRange, GPUTextureViewVulkan* handle)
|
||||
void GPUContextVulkan::AddImageBarrier(VkImage image, VkImageLayout srcLayout, VkImageLayout dstLayout, const VkImageSubresourceRange& subresourceRange, GPUTextureViewVulkan* handle)
|
||||
{
|
||||
#if VK_ENABLE_BARRIERS_BATCHING
|
||||
// Auto-flush on overflow
|
||||
@@ -168,7 +132,31 @@ void GPUContextVulkan::AddImageBarrier(VkImage image, VkImageLayout srcLayout, V
|
||||
#endif
|
||||
|
||||
// Insert barrier
|
||||
_barriers.AddImageBarrier(image, subresourceRange, srcLayout, dstLayout, handle);
|
||||
#if VK_ENABLE_BARRIERS_DEBUG
|
||||
_barriers.ImageBarriersDebug.Add(handle);
|
||||
#endif
|
||||
VkImageMemoryBarrier& imageBarrier = _barriers.ImageBarriers.AddOne();
|
||||
RenderToolsVulkan::ZeroStruct(imageBarrier, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER);
|
||||
imageBarrier.image = image;
|
||||
imageBarrier.subresourceRange = subresourceRange;
|
||||
imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
imageBarrier.oldLayout = srcLayout;
|
||||
imageBarrier.newLayout = dstLayout;
|
||||
_barriers.SourceStage |= RenderToolsVulkan::GetImageBarrierFlags(srcLayout, imageBarrier.srcAccessMask);
|
||||
_barriers.DestStage |= RenderToolsVulkan::GetImageBarrierFlags(dstLayout, imageBarrier.dstAccessMask);
|
||||
#if VK_ENABLE_BARRIERS_DEBUG
|
||||
LOG(Warning, "Image Barrier: 0x{0:x}, {1} -> {2} for baseMipLevel: {3}, baseArrayLayer: {4}, levelCount: {5}, layerCount: {6} ({7})",
|
||||
(uintptr)image,
|
||||
::ToString(srcLayout),
|
||||
::ToString(dstLayout),
|
||||
subresourceRange.baseMipLevel,
|
||||
subresourceRange.baseArrayLayer,
|
||||
subresourceRange.levelCount,
|
||||
subresourceRange.layerCount,
|
||||
handle && handle->Owner->AsGPUResource() ? handle->Owner->AsGPUResource()->ToString() : String::Empty
|
||||
);
|
||||
#endif
|
||||
|
||||
#if !VK_ENABLE_BARRIERS_BATCHING
|
||||
// Auto-flush without batching
|
||||
@@ -306,7 +294,17 @@ void GPUContextVulkan::AddBufferBarrier(GPUBufferVulkan* buffer, VkAccessFlags d
|
||||
#endif
|
||||
|
||||
// Insert barrier
|
||||
_barriers.AddBufferBarrier(buffer->GetHandle(), 0, buffer->GetSize(), buffer->Access, dstAccess);
|
||||
VkBufferMemoryBarrier& bufferBarrier = _barriers.BufferBarriers.AddOne();
|
||||
RenderToolsVulkan::ZeroStruct(bufferBarrier, VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER);
|
||||
bufferBarrier.buffer = buffer->GetHandle();
|
||||
bufferBarrier.offset = 0;
|
||||
bufferBarrier.size = buffer->GetSize();
|
||||
bufferBarrier.srcAccessMask = buffer->Access;
|
||||
bufferBarrier.dstAccessMask = dstAccess;
|
||||
bufferBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
bufferBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
_barriers.SourceStage |= RenderToolsVulkan::GetBufferBarrierFlags(buffer->Access);
|
||||
_barriers.DestStage |= RenderToolsVulkan::GetBufferBarrierFlags(dstAccess);
|
||||
buffer->Access = dstAccess;
|
||||
|
||||
#if !VK_ENABLE_BARRIERS_BATCHING
|
||||
@@ -404,8 +402,18 @@ void GPUContextVulkan::BeginRenderPass()
|
||||
if (_rtDepth)
|
||||
{
|
||||
handle = _rtDepth;
|
||||
layout.ReadDepth = true; // TODO: use proper depthStencilAccess flags
|
||||
layout.WriteDepth = handle->LayoutRTV == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; // TODO: do it in a proper way
|
||||
layout.ReadDepth = true;
|
||||
layout.ReadStencil = PixelFormatExtensions::HasStencil(handle->GetFormat());
|
||||
layout.WriteDepth = handle->LayoutRTV == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL || handle->LayoutRTV == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL || handle->LayoutRTV == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
|
||||
layout.WriteStencil = handle->LayoutRTV == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL || handle->LayoutRTV == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL || handle->LayoutRTV == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
if (_currentState && 0)
|
||||
{
|
||||
// TODO: use this but only if state doesn't change during whole render pass (eg. 1st draw call might not draw depth but 2nd might)
|
||||
layout.ReadDepth &= _currentState->DepthReadEnable;
|
||||
layout.ReadStencil &= _currentState->StencilReadEnable;
|
||||
layout.WriteDepth &= _currentState->DepthWriteEnable;
|
||||
layout.WriteStencil &= _currentState->StencilWriteEnable;
|
||||
}
|
||||
framebufferKey.AttachmentCount++;
|
||||
framebufferKey.Attachments[_rtCount] = handle->GetFramebufferView();
|
||||
AddImageBarrier(handle, handle->LayoutRTV);
|
||||
@@ -571,55 +579,6 @@ void GPUContextVulkan::UpdateDescriptorSets(const SpirvShaderDescriptorInfo& des
|
||||
}
|
||||
}
|
||||
|
||||
void GPUContextVulkan::UpdateDescriptorSets(GPUPipelineStateVulkan* pipelineState)
|
||||
{
|
||||
const auto cmdBuffer = _cmdBufferManager->GetCmdBuffer();
|
||||
const auto pipelineLayout = pipelineState->GetLayout();
|
||||
ASSERT(pipelineLayout);
|
||||
bool needsWrite = false;
|
||||
|
||||
// No current descriptor pools set - acquire one and reset
|
||||
const bool newDescriptorPool = pipelineState->AcquirePoolSet(cmdBuffer);
|
||||
needsWrite |= newDescriptorPool;
|
||||
|
||||
// Update descriptors for every used shader stage
|
||||
uint32 remainingHasDescriptorsPerStageMask = pipelineState->HasDescriptorsPerStageMask;
|
||||
for (int32 stage = 0; stage < DescriptorSet::GraphicsStagesCount && remainingHasDescriptorsPerStageMask; stage++)
|
||||
{
|
||||
// Only process stages that exist in this pipeline and use descriptors
|
||||
if (remainingHasDescriptorsPerStageMask & 1)
|
||||
{
|
||||
UpdateDescriptorSets(*pipelineState->DescriptorInfoPerStage[stage], pipelineState->DSWriter[stage], needsWrite);
|
||||
}
|
||||
|
||||
remainingHasDescriptorsPerStageMask >>= 1;
|
||||
}
|
||||
|
||||
// Allocate sets if need to
|
||||
//if (needsWrite) // TODO: write on change only?
|
||||
{
|
||||
if (!pipelineState->AllocateDescriptorSets())
|
||||
{
|
||||
return;
|
||||
}
|
||||
uint32 remainingStagesMask = pipelineState->HasDescriptorsPerStageMask;
|
||||
uint32 stage = 0;
|
||||
while (remainingStagesMask)
|
||||
{
|
||||
if (remainingStagesMask & 1)
|
||||
{
|
||||
const VkDescriptorSet descriptorSet = pipelineState->DescriptorSetHandles[stage];
|
||||
pipelineState->DSWriter[stage].SetDescriptorSet(descriptorSet);
|
||||
}
|
||||
|
||||
stage++;
|
||||
remainingStagesMask >>= 1;
|
||||
}
|
||||
|
||||
vkUpdateDescriptorSets(_device->Device, pipelineState->DSWriteContainer.DescriptorWrites.Count(), pipelineState->DSWriteContainer.DescriptorWrites.Get(), 0, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
void GPUContextVulkan::UpdateDescriptorSets(ComputePipelineStateVulkan* pipelineState)
|
||||
{
|
||||
const auto cmdBuffer = _cmdBufferManager->GetCmdBuffer();
|
||||
@@ -654,8 +613,6 @@ void GPUContextVulkan::OnDrawCall()
|
||||
GPUPipelineStateVulkan* pipelineState = _currentState;
|
||||
ASSERT(pipelineState && pipelineState->IsValid());
|
||||
const auto cmdBuffer = _cmdBufferManager->GetCmdBuffer();
|
||||
const auto pipelineLayout = pipelineState->GetLayout();
|
||||
ASSERT(pipelineLayout);
|
||||
|
||||
// End previous render pass if render targets layout was modified
|
||||
if (_rtDirtyFlag && cmdBuffer->IsInsideRenderPass())
|
||||
@@ -663,7 +620,35 @@ void GPUContextVulkan::OnDrawCall()
|
||||
|
||||
if (pipelineState->HasDescriptorsPerStageMask)
|
||||
{
|
||||
UpdateDescriptorSets(pipelineState);
|
||||
// Get descriptor pools set
|
||||
bool needsWrite = pipelineState->AcquirePoolSet(cmdBuffer);
|
||||
|
||||
// Update descriptors for every used shader stage
|
||||
uint32 remainingHasDescriptorsPerStageMask = pipelineState->HasDescriptorsPerStageMask;
|
||||
for (int32 stage = 0; stage < DescriptorSet::GraphicsStagesCount && remainingHasDescriptorsPerStageMask; stage++)
|
||||
{
|
||||
if (remainingHasDescriptorsPerStageMask & 1)
|
||||
UpdateDescriptorSets(*pipelineState->DescriptorInfoPerStage[stage], pipelineState->DSWriter[stage], needsWrite);
|
||||
remainingHasDescriptorsPerStageMask >>= 1;
|
||||
}
|
||||
|
||||
// Allocate sets if need to
|
||||
//if (needsWrite) // TODO: write on change only?
|
||||
{
|
||||
if (!pipelineState->CurrentTypedDescriptorPoolSet->AllocateDescriptorSets(*pipelineState->DescriptorSetsLayout, pipelineState->DescriptorSetHandles.Get()))
|
||||
return;
|
||||
uint32 remainingStagesMask = pipelineState->HasDescriptorsPerStageMask;
|
||||
uint32 stage = 0;
|
||||
while (remainingStagesMask)
|
||||
{
|
||||
if (remainingStagesMask & 1)
|
||||
pipelineState->DSWriter[stage].SetDescriptorSet(pipelineState->DescriptorSetHandles[stage]);
|
||||
remainingStagesMask >>= 1;
|
||||
stage++;
|
||||
}
|
||||
|
||||
vkUpdateDescriptorSets(_device->Device, pipelineState->DSWriteContainer.DescriptorWrites.Count(), pipelineState->DSWriteContainer.DescriptorWrites.Get(), 0, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
// Bind any missing vertex buffers to null if required by the current state
|
||||
@@ -689,26 +674,30 @@ void GPUContextVulkan::OnDrawCall()
|
||||
}
|
||||
|
||||
// Bind pipeline
|
||||
if (_psDirtyFlag && _currentState && (_rtDepth || _rtCount))
|
||||
if (_psDirtyFlag && pipelineState && (_rtDepth || _rtCount))
|
||||
{
|
||||
_psDirtyFlag = false;
|
||||
const auto cmdBuffer = _cmdBufferManager->GetCmdBuffer();
|
||||
const auto pipeline = _currentState->GetState(_renderPass);
|
||||
const auto pipeline = pipelineState->GetState(_renderPass);
|
||||
vkCmdBindPipeline(cmdBuffer->GetHandle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
|
||||
RENDER_STAT_PS_STATE_CHANGE();
|
||||
}
|
||||
|
||||
//UpdateDynamicStates();
|
||||
|
||||
// Bind descriptors sets to the graphics pipeline
|
||||
if (pipelineState->HasDescriptorsPerStageMask)
|
||||
{
|
||||
pipelineState->Bind(cmdBuffer);
|
||||
vkCmdBindDescriptorSets(
|
||||
cmdBuffer->GetHandle(),
|
||||
VK_PIPELINE_BIND_POINT_GRAPHICS,
|
||||
pipelineState->GetLayout()->GetHandle(),
|
||||
0,
|
||||
pipelineState->DescriptorSetHandles.Count(),
|
||||
pipelineState->DescriptorSetHandles.Get(),
|
||||
pipelineState->DynamicOffsets.Count(),
|
||||
pipelineState->DynamicOffsets.Get());
|
||||
}
|
||||
|
||||
// Clear flag
|
||||
_rtDirtyFlag = false;
|
||||
|
||||
#if VK_ENABLE_BARRIERS_DEBUG
|
||||
LOG(Warning, "Draw");
|
||||
#endif
|
||||
|
||||
@@ -40,7 +40,6 @@ class DescriptorSetLayoutVulkan;
|
||||
/// </summary>
|
||||
struct PipelineBarrierVulkan
|
||||
{
|
||||
public:
|
||||
VkPipelineStageFlags SourceStage = 0;
|
||||
VkPipelineStageFlags DestStage = 0;
|
||||
Array<VkImageMemoryBarrier, FixedAllocation<VK_BARRIER_BUFFER_SIZE>> ImageBarriers;
|
||||
@@ -49,32 +48,17 @@ public:
|
||||
Array<GPUTextureViewVulkan*, FixedAllocation<VK_BARRIER_BUFFER_SIZE>> ImageBarriersDebug;
|
||||
#endif
|
||||
|
||||
public:
|
||||
inline void Reset()
|
||||
{
|
||||
SourceStage = 0;
|
||||
DestStage = 0;
|
||||
ImageBarriers.Clear();
|
||||
BufferBarriers.Clear();
|
||||
#if VK_ENABLE_BARRIERS_DEBUG
|
||||
ImageBarriersDebug.Clear();
|
||||
#endif
|
||||
}
|
||||
|
||||
void AddImageBarrier(VkImage image, const VkImageSubresourceRange& range, VkImageLayout srcLayout, VkImageLayout dstLayout, GPUTextureViewVulkan* handle);
|
||||
void AddBufferBarrier(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkAccessFlags srcAccess, VkAccessFlags dstAccess);
|
||||
|
||||
inline bool IsFull() const
|
||||
FORCE_INLINE bool IsFull() const
|
||||
{
|
||||
return ImageBarriers.Count() == VK_BARRIER_BUFFER_SIZE || BufferBarriers.Count() == VK_BARRIER_BUFFER_SIZE;
|
||||
}
|
||||
|
||||
inline bool HasBarrier() const
|
||||
FORCE_INLINE bool HasBarrier() const
|
||||
{
|
||||
return ImageBarriers.Count() + BufferBarriers.Count() != 0;
|
||||
}
|
||||
|
||||
void Execute(CmdBufferVulkan* cmdBuffer);
|
||||
void Execute(const CmdBufferVulkan* cmdBuffer);
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
@@ -136,7 +120,7 @@ public:
|
||||
return _cmdBufferManager;
|
||||
}
|
||||
|
||||
void AddImageBarrier(VkImage image, VkImageLayout srcLayout, VkImageLayout dstLayout, VkImageSubresourceRange& subresourceRange, GPUTextureViewVulkan* handle);
|
||||
void AddImageBarrier(VkImage image, VkImageLayout srcLayout, VkImageLayout dstLayout, const VkImageSubresourceRange& subresourceRange, GPUTextureViewVulkan* handle);
|
||||
void AddImageBarrier(GPUTextureViewVulkan* handle, VkImageLayout dstLayout);
|
||||
void AddImageBarrier(GPUTextureVulkan* texture, int32 mipSlice, int32 arraySlice, VkImageLayout dstLayout);
|
||||
void AddImageBarrier(GPUTextureVulkan* texture, VkImageLayout dstLayout);
|
||||
@@ -153,7 +137,6 @@ public:
|
||||
|
||||
private:
|
||||
void UpdateDescriptorSets(const struct SpirvShaderDescriptorInfo& descriptorInfo, class DescriptorSetWriterVulkan& dsWriter, bool& needsWrite);
|
||||
void UpdateDescriptorSets(GPUPipelineStateVulkan* pipelineState);
|
||||
void UpdateDescriptorSets(ComputePipelineStateVulkan* pipelineState);
|
||||
void OnDrawCall();
|
||||
|
||||
|
||||
@@ -430,14 +430,10 @@ void DeferredDeletionQueueVulkan::EnqueueGenericResource(Type type, uint64 handl
|
||||
uint32 GetHash(const RenderTargetLayoutVulkan& key)
|
||||
{
|
||||
uint32 hash = (int32)key.MSAA * 11;
|
||||
CombineHash(hash, (uint32)key.ReadDepth);
|
||||
CombineHash(hash, (uint32)key.WriteDepth);
|
||||
CombineHash(hash, (uint32)key.BlendEnable);
|
||||
CombineHash(hash, key.Flags);
|
||||
CombineHash(hash, (uint32)key.DepthFormat * 93473262);
|
||||
CombineHash(hash, key.RTsCount * 136);
|
||||
CombineHash(hash, key.Extent.width);
|
||||
CombineHash(hash, key.Extent.height);
|
||||
CombineHash(hash, key.Layers);
|
||||
for (int32 i = 0; i < ARRAY_COUNT(key.RTVsFormats); i++)
|
||||
CombineHash(hash, (uint32)key.RTVsFormats[i]);
|
||||
return hash;
|
||||
@@ -452,9 +448,9 @@ uint32 GetHash(const FramebufferVulkan::Key& key)
|
||||
return hash;
|
||||
}
|
||||
|
||||
FramebufferVulkan::FramebufferVulkan(GPUDeviceVulkan* device, Key& key, VkExtent2D& extent, uint32 layers)
|
||||
: _device(device)
|
||||
, _handle(VK_NULL_HANDLE)
|
||||
FramebufferVulkan::FramebufferVulkan(GPUDeviceVulkan* device, const Key& key, const VkExtent2D& extent, uint32 layers)
|
||||
: Device(device)
|
||||
, Handle(VK_NULL_HANDLE)
|
||||
, Extent(extent)
|
||||
, Layers(layers)
|
||||
{
|
||||
@@ -462,18 +458,18 @@ FramebufferVulkan::FramebufferVulkan(GPUDeviceVulkan* device, Key& key, VkExtent
|
||||
|
||||
VkFramebufferCreateInfo createInfo;
|
||||
RenderToolsVulkan::ZeroStruct(createInfo, VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
|
||||
createInfo.renderPass = key.RenderPass->GetHandle();
|
||||
createInfo.renderPass = key.RenderPass->Handle;
|
||||
createInfo.attachmentCount = key.AttachmentCount;
|
||||
createInfo.pAttachments = key.Attachments;
|
||||
createInfo.width = extent.width;
|
||||
createInfo.height = extent.height;
|
||||
createInfo.layers = layers;
|
||||
VALIDATE_VULKAN_RESULT(vkCreateFramebuffer(device->Device, &createInfo, nullptr, &_handle));
|
||||
VALIDATE_VULKAN_RESULT(vkCreateFramebuffer(device->Device, &createInfo, nullptr, &Handle));
|
||||
}
|
||||
|
||||
FramebufferVulkan::~FramebufferVulkan()
|
||||
{
|
||||
_device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::Framebuffer, _handle);
|
||||
Device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::Framebuffer, Handle);
|
||||
}
|
||||
|
||||
bool FramebufferVulkan::HasReference(VkImageView imageView) const
|
||||
@@ -487,8 +483,8 @@ bool FramebufferVulkan::HasReference(VkImageView imageView) const
|
||||
}
|
||||
|
||||
RenderPassVulkan::RenderPassVulkan(GPUDeviceVulkan* device, const RenderTargetLayoutVulkan& layout)
|
||||
: _device(device)
|
||||
, _handle(VK_NULL_HANDLE)
|
||||
: Device(device)
|
||||
, Handle(VK_NULL_HANDLE)
|
||||
, Layout(layout)
|
||||
{
|
||||
const int32 colorAttachmentsCount = layout.RTsCount;
|
||||
@@ -531,23 +527,48 @@ RenderPassVulkan::RenderPassVulkan(GPUDeviceVulkan* device, const RenderTargetLa
|
||||
if (hasDepthStencilAttachment)
|
||||
{
|
||||
VkImageLayout depthStencilLayout;
|
||||
#if 0
|
||||
// TODO: enable extension and use separateDepthStencilLayouts from Vulkan 1.2
|
||||
if (layout.ReadStencil || layout.WriteStencil)
|
||||
{
|
||||
if (layout.WriteDepth && layout.WriteStencil)
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
else if (layout.WriteDepth && !layout.WriteStencil)
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
|
||||
else if (layout.WriteStencil && !layout.WriteDepth)
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
else if (layout.ReadDepth)
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
|
||||
else
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Depth-only
|
||||
if (layout.ReadDepth && !layout.WriteDepth)
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL;
|
||||
else
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
|
||||
}
|
||||
#else
|
||||
if ((layout.ReadDepth || layout.ReadStencil) && !(layout.WriteDepth || layout.WriteStencil))
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
|
||||
else
|
||||
depthStencilLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
#endif
|
||||
|
||||
// Use last slot for depth stencil attachment
|
||||
VkAttachmentDescription& depthAttachment = attachments[colorAttachmentsCount];
|
||||
depthAttachment.flags = 0;
|
||||
depthAttachment.format = RenderToolsVulkan::ToVulkanFormat(layout.DepthFormat);
|
||||
depthAttachment.samples = (VkSampleCountFlagBits)layout.MSAA;
|
||||
// TODO: fix those operations for load and store
|
||||
depthAttachment.loadOp = layout.ReadDepth || true ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
depthAttachment.storeOp = layout.WriteDepth || true ? VK_ATTACHMENT_STORE_OP_STORE : VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
depthAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; // TODO: Handle stencil
|
||||
depthAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
depthAttachment.initialLayout = depthStencilLayout;
|
||||
depthAttachment.finalLayout = depthStencilLayout;
|
||||
VkAttachmentDescription& attachment = attachments[colorAttachmentsCount];
|
||||
attachment.flags = 0;
|
||||
attachment.format = RenderToolsVulkan::ToVulkanFormat(layout.DepthFormat);
|
||||
attachment.samples = (VkSampleCountFlagBits)layout.MSAA;
|
||||
attachment.loadOp = layout.ReadDepth || layout.ReadStencil ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
//attachment.storeOp = layout.WriteDepth || layout.WriteStencil ? VK_ATTACHMENT_STORE_OP_STORE : VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; // For some reason, read-only depth results in artifacts
|
||||
attachment.stencilLoadOp = layout.ReadStencil ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
attachment.stencilStoreOp = layout.WriteStencil ? VK_ATTACHMENT_STORE_OP_STORE : VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
attachment.initialLayout = depthStencilLayout;
|
||||
attachment.finalLayout = depthStencilLayout;
|
||||
depthStencilReference.attachment = colorAttachmentsCount;
|
||||
depthStencilReference.layout = depthStencilLayout;
|
||||
subpassDesc.pDepthStencilAttachment = &depthStencilReference;
|
||||
@@ -559,12 +580,15 @@ RenderPassVulkan::RenderPassVulkan(GPUDeviceVulkan* device, const RenderTargetLa
|
||||
createInfo.pAttachments = attachments;
|
||||
createInfo.subpassCount = 1;
|
||||
createInfo.pSubpasses = &subpassDesc;
|
||||
VALIDATE_VULKAN_RESULT(vkCreateRenderPass(device->Device, &createInfo, nullptr, &_handle));
|
||||
VALIDATE_VULKAN_RESULT(vkCreateRenderPass(device->Device, &createInfo, nullptr, &Handle));
|
||||
#if VULKAN_USE_DEBUG_DATA
|
||||
DebugCreateInfo = createInfo;
|
||||
#endif
|
||||
}
|
||||
|
||||
RenderPassVulkan::~RenderPassVulkan()
|
||||
{
|
||||
_device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::RenderPass, _handle);
|
||||
Device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::RenderPass, Handle);
|
||||
}
|
||||
|
||||
QueryPoolVulkan::QueryPoolVulkan(GPUDeviceVulkan* device, int32 capacity, VkQueryType type)
|
||||
|
||||
@@ -173,21 +173,28 @@ private:
|
||||
void EnqueueGenericResource(Type type, uint64 handle, VmaAllocation allocation);
|
||||
};
|
||||
|
||||
class RenderTargetLayoutVulkan
|
||||
struct RenderTargetLayoutVulkan
|
||||
{
|
||||
public:
|
||||
int32 RTsCount;
|
||||
union
|
||||
{
|
||||
struct
|
||||
{
|
||||
uint32 Layers : 10; // Limited by GPU_MAX_TEXTURE_ARRAY_SIZE
|
||||
uint32 RTsCount : 3; // Limited by GPU_MAX_RT_BINDED
|
||||
uint32 ReadDepth : 1;
|
||||
uint32 WriteDepth : 1;
|
||||
uint32 ReadStencil : 1;
|
||||
uint32 WriteStencil : 1;
|
||||
uint32 BlendEnable : 1;
|
||||
};
|
||||
uint32 Flags;
|
||||
};
|
||||
MSAALevel MSAA;
|
||||
bool ReadDepth;
|
||||
bool WriteDepth;
|
||||
bool BlendEnable;
|
||||
PixelFormat DepthFormat;
|
||||
PixelFormat RTVsFormats[GPU_MAX_RT_BINDED];
|
||||
VkExtent2D Extent;
|
||||
uint32 Layers;
|
||||
|
||||
public:
|
||||
bool operator==(const RenderTargetLayoutVulkan& other) const
|
||||
FORCE_INLINE bool operator==(const RenderTargetLayoutVulkan& other) const
|
||||
{
|
||||
return Platform::MemoryCompare(this, &other, sizeof(RenderTargetLayoutVulkan)) == 0;
|
||||
}
|
||||
@@ -204,32 +211,21 @@ public:
|
||||
int32 AttachmentCount;
|
||||
VkImageView Attachments[GPU_MAX_RT_BINDED + 1];
|
||||
|
||||
public:
|
||||
bool operator==(const Key& other) const
|
||||
FORCE_INLINE bool operator==(const Key& other) const
|
||||
{
|
||||
return Platform::MemoryCompare(this, &other, sizeof(Key)) == 0;
|
||||
}
|
||||
};
|
||||
|
||||
private:
|
||||
GPUDeviceVulkan* _device;
|
||||
VkFramebuffer _handle;
|
||||
|
||||
public:
|
||||
FramebufferVulkan(GPUDeviceVulkan* device, Key& key, VkExtent2D& extent, uint32 layers);
|
||||
FramebufferVulkan(GPUDeviceVulkan* device, const Key& key, const VkExtent2D& extent, uint32 layers);
|
||||
~FramebufferVulkan();
|
||||
|
||||
public:
|
||||
GPUDeviceVulkan* Device;
|
||||
VkFramebuffer Handle;
|
||||
VkImageView Attachments[GPU_MAX_RT_BINDED + 1];
|
||||
VkExtent2D Extent;
|
||||
uint32 Layers;
|
||||
|
||||
public:
|
||||
inline VkFramebuffer GetHandle()
|
||||
{
|
||||
return _handle;
|
||||
}
|
||||
|
||||
bool HasReference(VkImageView imageView) const;
|
||||
};
|
||||
|
||||
@@ -237,22 +233,16 @@ uint32 GetHash(const FramebufferVulkan::Key& key);
|
||||
|
||||
class RenderPassVulkan
|
||||
{
|
||||
private:
|
||||
GPUDeviceVulkan* _device;
|
||||
VkRenderPass _handle;
|
||||
|
||||
public:
|
||||
GPUDeviceVulkan* Device;
|
||||
VkRenderPass Handle;
|
||||
RenderTargetLayoutVulkan Layout;
|
||||
#if VULKAN_USE_DEBUG_DATA
|
||||
VkRenderPassCreateInfo DebugCreateInfo;
|
||||
#endif
|
||||
|
||||
public:
|
||||
RenderPassVulkan(GPUDeviceVulkan* device, const RenderTargetLayoutVulkan& layout);
|
||||
~RenderPassVulkan();
|
||||
|
||||
public:
|
||||
inline VkRenderPass GetHandle() const
|
||||
{
|
||||
return _handle;
|
||||
}
|
||||
};
|
||||
|
||||
class QueryPoolVulkan
|
||||
|
||||
@@ -174,7 +174,7 @@ VkPipeline GPUPipelineStateVulkan::GetState(RenderPassVulkan* renderPass)
|
||||
// Update description to match the pipeline
|
||||
_descColorBlend.attachmentCount = renderPass->Layout.RTsCount;
|
||||
_descMultisample.rasterizationSamples = (VkSampleCountFlagBits)renderPass->Layout.MSAA;
|
||||
_desc.renderPass = renderPass->GetHandle();
|
||||
_desc.renderPass = renderPass->Handle;
|
||||
|
||||
// Check if has missing layout
|
||||
if (_desc.layout == VK_NULL_HANDLE)
|
||||
@@ -321,6 +321,10 @@ bool GPUPipelineStateVulkan::Init(const Description& desc)
|
||||
_descDepthStencil.front.passOp = ToVulkanStencilOp(desc.StencilPassOp);
|
||||
_descDepthStencil.front = _descDepthStencil.back;
|
||||
_desc.pDepthStencilState = &_descDepthStencil;
|
||||
DepthReadEnable = desc.DepthEnable && desc.DepthFunc != ComparisonFunc::Always;
|
||||
DepthWriteEnable = _descDepthStencil.depthWriteEnable;
|
||||
StencilReadEnable = desc.StencilEnable && desc.StencilReadMask != 0 && desc.StencilFunc != ComparisonFunc::Always;
|
||||
StencilWriteEnable = desc.StencilEnable && desc.StencilWriteMask != 0;
|
||||
|
||||
// Rasterization
|
||||
RenderToolsVulkan::ZeroStruct(_descRasterization, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO);
|
||||
|
||||
@@ -117,7 +117,11 @@ public:
|
||||
/// </summary>
|
||||
uint32 UsedStagesMask;
|
||||
|
||||
bool BlendEnable;
|
||||
uint32 BlendEnable : 1;
|
||||
uint32 DepthReadEnable : 1;
|
||||
uint32 DepthWriteEnable : 1;
|
||||
uint32 StencilReadEnable : 1;
|
||||
uint32 StencilWriteEnable : 1;
|
||||
|
||||
/// <summary>
|
||||
/// The bitmask of stages that have descriptors.
|
||||
@@ -146,42 +150,25 @@ public:
|
||||
TypedDescriptorPoolSetVulkan* CurrentTypedDescriptorPoolSet = nullptr;
|
||||
Array<VkDescriptorSet> DescriptorSetHandles;
|
||||
|
||||
Array<uint32> DynamicOffsets;
|
||||
|
||||
public:
|
||||
inline bool AcquirePoolSet(CmdBufferVulkan* cmdBuffer)
|
||||
{
|
||||
// Lazy init
|
||||
if (!DescriptorSetsLayout)
|
||||
GetLayout();
|
||||
|
||||
// Pipeline state has no current descriptor pools set or set owner is not current - acquire a new pool set
|
||||
DescriptorPoolSetContainerVulkan* cmdBufferPoolSet = cmdBuffer->GetDescriptorPoolSet();
|
||||
if (CurrentTypedDescriptorPoolSet == nullptr || CurrentTypedDescriptorPoolSet->GetOwner() != cmdBufferPoolSet)
|
||||
{
|
||||
ASSERT(cmdBufferPoolSet);
|
||||
CurrentTypedDescriptorPoolSet = cmdBufferPoolSet->AcquireTypedPoolSet(*DescriptorSetsLayout);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
inline bool AllocateDescriptorSets()
|
||||
{
|
||||
ASSERT(CurrentTypedDescriptorPoolSet);
|
||||
return CurrentTypedDescriptorPoolSet->AllocateDescriptorSets(*DescriptorSetsLayout, DescriptorSetHandles.Get());
|
||||
}
|
||||
|
||||
Array<uint32> DynamicOffsets;
|
||||
|
||||
public:
|
||||
void Bind(CmdBufferVulkan* cmdBuffer)
|
||||
{
|
||||
vkCmdBindDescriptorSets(
|
||||
cmdBuffer->GetHandle(),
|
||||
VK_PIPELINE_BIND_POINT_GRAPHICS,
|
||||
GetLayout()->GetHandle(),
|
||||
0,
|
||||
DescriptorSetHandles.Count(),
|
||||
DescriptorSetHandles.Get(),
|
||||
DynamicOffsets.Count(),
|
||||
DynamicOffsets.Get());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the Vulkan pipeline layout for this pipeline state.
|
||||
/// </summary>
|
||||
|
||||
@@ -24,6 +24,10 @@ void GPUTextureViewVulkan::Init(GPUDeviceVulkan* device, ResourceOwnerVulkan* ow
|
||||
Extent.height = Math::Max<uint32_t>(1, extent.height >> firstMipIndex);
|
||||
Extent.depth = Math::Max<uint32_t>(1, extent.depth >> firstMipIndex);
|
||||
Layers = arraySize;
|
||||
#if VULKAN_USE_DEBUG_DATA
|
||||
Format = format;
|
||||
ReadOnlyDepth = readOnlyDepth;
|
||||
#endif
|
||||
|
||||
RenderToolsVulkan::ZeroStruct(Info, VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO);
|
||||
Info.image = image;
|
||||
@@ -56,12 +60,26 @@ void GPUTextureViewVulkan::Init(GPUDeviceVulkan* device, ResourceOwnerVulkan* ow
|
||||
if (PixelFormatExtensions::IsDepthStencil(format))
|
||||
{
|
||||
range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
#if 0
|
||||
// TODO: enable extension and use separateDepthStencilLayouts from Vulkan 1.2
|
||||
if (PixelFormatExtensions::HasStencil(format))
|
||||
{
|
||||
range.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
}
|
||||
LayoutRTV = readOnlyDepth ? VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
LayoutSRV = readOnlyDepth ? VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
||||
LayoutSRV = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
|
||||
}
|
||||
else
|
||||
{
|
||||
LayoutRTV = readOnlyDepth ? VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
|
||||
LayoutSRV = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL;
|
||||
}
|
||||
#else
|
||||
|
||||
if (PixelFormatExtensions::HasStencil(format))
|
||||
range.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
LayoutRTV = readOnlyDepth ? VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
LayoutSRV = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -113,13 +131,18 @@ void GPUTextureViewVulkan::Release()
|
||||
{
|
||||
Device->OnImageViewDestroy(ViewFramebuffer);
|
||||
Device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::ImageView, ViewFramebuffer);
|
||||
ViewFramebuffer = VK_NULL_HANDLE;
|
||||
}
|
||||
if (ViewSRV != View && ViewSRV != VK_NULL_HANDLE)
|
||||
{
|
||||
Device->OnImageViewDestroy(ViewSRV);
|
||||
Device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::ImageView, ViewSRV);
|
||||
ViewSRV = VK_NULL_HANDLE;
|
||||
}
|
||||
|
||||
Device->OnImageViewDestroy(View);
|
||||
Device->DeferredDeletionQueue.EnqueueResource(DeferredDeletionQueueVulkan::Type::ImageView, View);
|
||||
|
||||
View = VK_NULL_HANDLE;
|
||||
ViewFramebuffer = VK_NULL_HANDLE;
|
||||
|
||||
#if BUILD_DEBUG
|
||||
Device = nullptr;
|
||||
@@ -133,15 +156,26 @@ void GPUTextureViewVulkan::DescriptorAsImage(GPUContextVulkan* context, VkImageV
|
||||
{
|
||||
imageView = View;
|
||||
layout = LayoutSRV;
|
||||
|
||||
const VkImageAspectFlags aspectMask = Info.subresourceRange.aspectMask;
|
||||
if (aspectMask == (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))
|
||||
{
|
||||
// Transition depth-only when binding depth buffer with stencil
|
||||
if (ViewSRV == VK_NULL_HANDLE)
|
||||
{
|
||||
VkImageViewCreateInfo createInfo = Info;
|
||||
createInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
VALIDATE_VULKAN_RESULT(vkCreateImageView(Device->Device, &createInfo, nullptr, &ViewSRV));
|
||||
}
|
||||
imageView = ViewSRV;
|
||||
}
|
||||
context->AddImageBarrier(this, LayoutSRV);
|
||||
Info.subresourceRange.aspectMask = aspectMask;
|
||||
}
|
||||
|
||||
void GPUTextureViewVulkan::DescriptorAsStorageImage(GPUContextVulkan* context, VkImageView& imageView, VkImageLayout& layout)
|
||||
{
|
||||
imageView = View;
|
||||
layout = VK_IMAGE_LAYOUT_GENERAL;
|
||||
|
||||
context->AddImageBarrier(this, VK_IMAGE_LAYOUT_GENERAL);
|
||||
}
|
||||
|
||||
@@ -152,7 +186,6 @@ bool GPUTextureVulkan::GetData(int32 arrayOrDepthSliceIndex, int32 mipMapIndex,
|
||||
LOG(Warning, "Texture::GetData is valid only for staging resources.");
|
||||
return true;
|
||||
}
|
||||
|
||||
GPUDeviceLock lock(_device);
|
||||
|
||||
// Internally it's a buffer, so adapt resource index and offset
|
||||
@@ -209,7 +242,6 @@ void GPUTextureVulkan::DescriptorAsStorageImage(GPUContextVulkan* context, VkIma
|
||||
ASSERT(_handleUAV.Owner == this);
|
||||
imageView = _handleUAV.View;
|
||||
layout = VK_IMAGE_LAYOUT_GENERAL;
|
||||
|
||||
context->AddImageBarrier(this, VK_IMAGE_LAYOUT_GENERAL);
|
||||
}
|
||||
|
||||
|
||||
@@ -48,12 +48,17 @@ public:
|
||||
VkImage Image = VK_NULL_HANDLE;
|
||||
VkImageView View = VK_NULL_HANDLE;
|
||||
VkImageView ViewFramebuffer = VK_NULL_HANDLE;
|
||||
VkImageView ViewSRV = VK_NULL_HANDLE;
|
||||
VkExtent3D Extent;
|
||||
uint32 Layers;
|
||||
VkImageViewCreateInfo Info;
|
||||
int32 SubresourceIndex;
|
||||
VkImageLayout LayoutRTV;
|
||||
VkImageLayout LayoutSRV;
|
||||
#if VULKAN_USE_DEBUG_DATA
|
||||
PixelFormat Format;
|
||||
bool ReadOnlyDepth;
|
||||
#endif
|
||||
|
||||
public:
|
||||
void Init(GPUDeviceVulkan* device, ResourceOwnerVulkan* owner, VkImage image, int32 totalMipLevels, PixelFormat format, MSAALevel msaa, VkExtent3D extent, VkImageViewType viewType, int32 mipLevels = 1, int32 firstMipIndex = 0, int32 arraySize = 1, int32 firstArraySlice = 0, bool readOnlyDepth = false);
|
||||
|
||||
@@ -77,12 +77,10 @@ public:
|
||||
case VK_ACCESS_SHADER_WRITE_BIT:
|
||||
stageFlags = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
#if VK_KHR_maintenance2
|
||||
case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:
|
||||
case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:
|
||||
stageFlags = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
CRASH;
|
||||
break;
|
||||
@@ -108,7 +106,9 @@ public:
|
||||
stageFlags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
|
||||
accessFlags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL:
|
||||
case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL:
|
||||
accessFlags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
stageFlags = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
@@ -124,15 +124,16 @@ public:
|
||||
stageFlags = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
|
||||
accessFlags = VK_ACCESS_SHADER_READ_BIT;
|
||||
stageFlags = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL:
|
||||
case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL:
|
||||
accessFlags = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
|
||||
stageFlags = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
#if VK_KHR_maintenance2
|
||||
case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
|
||||
accessFlags = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
stageFlags = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL:
|
||||
case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
|
||||
accessFlags = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
stageFlags = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
#endif
|
||||
case VK_IMAGE_LAYOUT_GENERAL:
|
||||
accessFlags = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
|
||||
stageFlags = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
@@ -150,7 +151,7 @@ public:
|
||||
static_assert(!TIsPointer<T>::Value, "Don't use a pointer.");
|
||||
static_assert(OFFSET_OF(T, sType) == 0, "Assumes type is the first member in the Vulkan type.");
|
||||
data.sType = type;
|
||||
Platform::MemoryClear(((uint8*)&data) + sizeof(VkStructureType), sizeof(T) - sizeof(VkStructureType));
|
||||
Platform::MemoryClear((uint8*)&data + sizeof(VkStructureType), sizeof(T) - sizeof(VkStructureType));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -151,7 +151,6 @@ void MotionBlurPass::Dispose()
|
||||
|
||||
void MotionBlurPass::RenderMotionVectors(RenderContext& renderContext)
|
||||
{
|
||||
// Prepare
|
||||
auto motionVectors = renderContext.Buffers->MotionVectors;
|
||||
ASSERT(motionVectors);
|
||||
MotionBlurSettings& settings = renderContext.List->Settings.MotionBlur;
|
||||
@@ -160,8 +159,6 @@ void MotionBlurPass::RenderMotionVectors(RenderContext& renderContext)
|
||||
const int32 screenHeight = renderContext.Buffers->GetHeight();
|
||||
const int32 motionVectorsWidth = screenWidth / static_cast<int32>(settings.MotionVectorsResolution);
|
||||
const int32 motionVectorsHeight = screenHeight / static_cast<int32>(settings.MotionVectorsResolution);
|
||||
|
||||
// Ensure to have valid data
|
||||
if (!renderContext.List->Setup.UseMotionVectors || checkIfSkipPass())
|
||||
{
|
||||
// Skip pass (just clear motion vectors if texture is allocated)
|
||||
|
||||
@@ -356,15 +356,11 @@ void ReflectionsPass::Render(RenderContext& renderContext, GPUTextureView* light
|
||||
{
|
||||
auto device = GPUDevice::Instance;
|
||||
auto context = device->GetMainContext();
|
||||
|
||||
// Skip pass if resources aren't ready
|
||||
if (checkIfSkipPass())
|
||||
{
|
||||
// Skip pass (just clear buffer when doing debug preview)
|
||||
if (renderContext.View.Mode == ViewMode::Reflections)
|
||||
{
|
||||
context->Clear(lightBuffer, Color::Black);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user