Fixes for vertex layouts binding into graphics backend

This commit is contained in:
Wojtek Figat
2025-01-11 22:44:38 +01:00
parent 237ea121be
commit 647d74af0d
7 changed files with 39 additions and 12 deletions

View File

@@ -262,7 +262,9 @@ GPUVertexLayout* GPUVertexLayout::Merge(GPUVertexLayout* base, GPUVertexLayout*
{
// Insert any missing elements
VertexElement ne = { e.Type, missingSlotOverride != -1 ? (byte)missingSlotOverride : e.Slot, 0, e.PerInstance, e.Format };
if (e.Type == VertexElement::Types::TexCoord1 || e.Type == VertexElement::Types::TexCoord2 || e.Type == VertexElement::Types::TexCoord3)
if (e.Type == VertexElement::Types::TexCoord1 ||
e.Type == VertexElement::Types::TexCoord2 ||
e.Type == VertexElement::Types::TexCoord3)
{
// Alias missing texcoords with existing texcoords
for (const VertexElement& ee : newElements)

View File

@@ -26,7 +26,11 @@ ID3D11InputLayout* GPUShaderProgramVSDX11::GetInputLayout(GPUVertexLayoutDX11* v
{
if (vertexLayout && vertexLayout->InputElementsCount)
{
auto mergedVertexLayout = (GPUVertexLayoutDX11*)GPUVertexLayout::Merge(vertexLayout, Layout ? Layout : InputLayout);
GPUVertexLayoutDX11* mergedVertexLayout = vertexLayout;
if (!mergedVertexLayout)
mergedVertexLayout = (GPUVertexLayoutDX11*)Layout; // Fallback to shader-specified layout (if using old APIs)
if (InputLayout)
mergedVertexLayout = (GPUVertexLayoutDX11*)GPUVertexLayout::Merge(mergedVertexLayout, InputLayout);
LOG_DIRECTX_RESULT(vertexLayout->GetDevice()->GetDevice()->CreateInputLayout(mergedVertexLayout->InputElements, mergedVertexLayout->InputElementsCount, Bytecode.Get(), Bytecode.Length(), &inputLayout));
}
_cache.Add(vertexLayout, inputLayout);

View File

@@ -563,7 +563,7 @@ void GPUContextDX12::flushPS()
// Change state
ASSERT(_currentState->IsValid());
#if GPU_ENABLE_ASSERTION_LOW_LAYERS
if (!_vertexLayout && _vbHandles[0] && !_currentState->VertexLayout)
if (!_vertexLayout && _vbHandles[0] && !_currentState->VertexBufferLayout)
{
LOG(Error, "Missing Vertex Layout (not assigned to GPUBuffer). Vertex Shader won't read valid data resulting incorrect visuals.");
}
@@ -957,7 +957,6 @@ void GPUContextDX12::BindUA(int32 slot, GPUResourceView* view)
void GPUContextDX12::BindVB(const Span<GPUBuffer*>& vertexBuffers, const uint32* vertexBuffersOffsets, GPUVertexLayout* vertexLayout)
{
ASSERT(vertexBuffers.Length() >= 0 && vertexBuffers.Length() <= GPU_MAX_VB_BINDED);
bool vbEdited = _vbCount != vertexBuffers.Length();
D3D12_VERTEX_BUFFER_VIEW views[GPU_MAX_VB_BINDED];
for (int32 i = 0; i < vertexBuffers.Length(); i++)
@@ -990,7 +989,13 @@ void GPUContextDX12::BindVB(const Span<GPUBuffer*>& vertexBuffers, const uint32*
#endif
_commandList->IASetVertexBuffers(0, vertexBuffers.Length(), views);
}
_vertexLayout = (GPUVertexLayoutDX12*)(vertexLayout ? vertexLayout : GPUVertexLayout::Get(vertexBuffers));
if (!vertexLayout)
vertexLayout = GPUVertexLayout::Get(vertexBuffers);
if (_vertexLayout != vertexLayout)
{
_vertexLayout = (GPUVertexLayoutDX12*)vertexLayout;
_psDirtyFlag = true;
}
}
void GPUContextDX12::BindIB(GPUBuffer* indexBuffer)

View File

@@ -83,9 +83,20 @@ ID3D12PipelineState* GPUPipelineStateDX12::GetState(GPUTextureViewDX12* depth, i
_desc.SampleDesc.Quality = key.MSAA == MSAALevel::None ? 0 : GPUDeviceDX12::GetMaxMSAAQuality((int32)key.MSAA);
_desc.SampleMask = D3D12_DEFAULT_SAMPLE_MASK;
_desc.DSVFormat = RenderToolsDX::ToDxgiFormat(PixelFormatExtensions::FindDepthStencilFormat(key.DepthFormat));
vertexLayout = (GPUVertexLayoutDX12*)GPUVertexLayout::Merge(vertexLayout, VertexLayout);
_desc.InputLayout.pInputElementDescs = vertexLayout ? vertexLayout->InputElements : nullptr;
_desc.InputLayout.NumElements = vertexLayout ? vertexLayout->InputElementsCount : 0;
if (!vertexLayout)
vertexLayout = VertexBufferLayout; // Fallback to shader-specified layout (if using old APIs)
if (vertexLayout)
{
if (VertexInputLayout)
vertexLayout = (GPUVertexLayoutDX12*)GPUVertexLayout::Merge(vertexLayout, VertexInputLayout);
_desc.InputLayout.pInputElementDescs = vertexLayout->InputElements;
_desc.InputLayout.NumElements = vertexLayout->InputElementsCount;
}
else
{
_desc.InputLayout.pInputElementDescs = nullptr;
_desc.InputLayout.NumElements = 0;
}
// Create object
const HRESULT result = _device->GetDevice()->CreateGraphicsPipelineState(&_desc, IID_PPV_ARGS(&state));
@@ -180,7 +191,11 @@ bool GPUPipelineStateDX12::Init(const Description& desc)
INIT_SHADER_STAGE(PS, GPUShaderProgramPSDX12);
// Input Assembly
VertexLayout = desc.VS ? (GPUVertexLayoutDX12*)(desc.VS->Layout ? desc.VS->Layout : desc.VS->InputLayout) : nullptr;
if (desc.VS)
{
VertexBufferLayout = (GPUVertexLayoutDX12*)desc.VS->Layout;
VertexInputLayout = (GPUVertexLayoutDX12*)desc.VS->InputLayout;
}
const D3D12_PRIMITIVE_TOPOLOGY_TYPE primTypes1[] =
{
D3D12_PRIMITIVE_TOPOLOGY_TYPE_UNDEFINED,

View File

@@ -58,7 +58,8 @@ public:
public:
D3D_PRIMITIVE_TOPOLOGY PrimitiveTopology = D3D_PRIMITIVE_TOPOLOGY_UNDEFINED;
DxShaderHeader Header;
GPUVertexLayoutDX12* VertexLayout;
GPUVertexLayoutDX12* VertexBufferLayout = nullptr;
GPUVertexLayoutDX12* VertexInputLayout = nullptr;
/// <summary>
/// Gets DirectX 12 graphics pipeline state object for the given rendering state. Uses depth buffer and render targets formats and multi-sample levels to setup a proper PSO. Uses caching.

View File

@@ -277,7 +277,7 @@ bool ShaderCompilerD3D::CompileShader(ShaderFunctionMeta& meta, WritePermutation
{
D3D11_SIGNATURE_PARAMETER_DESC inputDesc;
reflector->GetInputParameterDesc(inputIdx, &inputDesc);
if (inputDesc.ReadWriteMask == 0 || inputDesc.SystemValueType != D3D10_NAME_UNDEFINED)
if (inputDesc.SystemValueType != D3D10_NAME_UNDEFINED)
continue;
auto format = PixelFormat::Unknown;
switch (inputDesc.ComponentType)

View File

@@ -327,7 +327,7 @@ bool ShaderCompilerDX::CompileShader(ShaderFunctionMeta& meta, WritePermutationD
{
D3D12_SIGNATURE_PARAMETER_DESC inputDesc;
shaderReflection->GetInputParameterDesc(inputIdx, &inputDesc);
if (inputDesc.ReadWriteMask == 0 || inputDesc.SystemValueType != D3D10_NAME_UNDEFINED)
if (inputDesc.SystemValueType != D3D10_NAME_UNDEFINED)
continue;
auto format = PixelFormat::Unknown;
switch (inputDesc.ComponentType)