Advertisement

DX11 Shader Reflection Problem

Started by August 24, 2018 05:49 PM
18 comments, last by RubenRS 6 years, 5 months ago

Hi, I'm trying to create my input layout with shader reflection, but there's something weird happening because in the thaditional form this is my output:

Capture.PNG.e9f1d648b7620273788c5804940f70e9.PNG

Here's the code:


D3D11_INPUT_ELEMENT_DESC layout[] =
	{
		{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
		{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
	};
	UINT numElements = ARRAYSIZE(layout);

	hr = m_ptrd3dDevice->CreateInputLayout(layout, numElements, m_ptrVSBlob->GetBufferPointer(),
		m_ptrVSBlob->GetBufferSize(), &m_ptrInputLayout);
	m_ptrVSBlob->Release();
	if (FAILED(hr))
		return hr;

And when I create the input layout with shader reflection, this is my output:

Capture2.PNG.099dd4662e25b3fdf85ccb1c4c31acc5.PNG

Here's the code:


// Reflect shader info
	ID3D11ShaderReflection* pVertexShaderReflection = nullptr;
	if (FAILED(D3DReflect(m_ptrVSBlob->GetBufferPointer(), 
		m_ptrVSBlob->GetBufferSize(), IID_ID3D11ShaderReflection, 
		(void**)&pVertexShaderReflection)))
	{
		return S_FALSE;
	}

	// Get shader info
	D3D11_SHADER_DESC shaderDesc;
	pVertexShaderReflection->GetDesc(&shaderDesc);

	// Read input layout description from shader info
	std::vector<D3D11_INPUT_ELEMENT_DESC> inputLayoutDesc;

	for (UINT i = 0; i < shaderDesc.InputParameters; i++)
	{
		D3D11_SIGNATURE_PARAMETER_DESC paramDesc;
		pVertexShaderReflection->GetInputParameterDesc(i, &paramDesc);

		// Fill out input element desc
		D3D11_INPUT_ELEMENT_DESC elementDesc;
		elementDesc.SemanticName = paramDesc.SemanticName;
		elementDesc.SemanticIndex = paramDesc.SemanticIndex;
		elementDesc.InputSlot = 0;
		elementDesc.AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
		elementDesc.InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
		elementDesc.InstanceDataStepRate = 0;

		// determine DXGI format
		if (paramDesc.Mask == 1)
		{
			if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_UINT32) 
				elementDesc.Format = DXGI_FORMAT_R32_UINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_SINT32)
				elementDesc.Format = DXGI_FORMAT_R32_SINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_FLOAT32) 
				elementDesc.Format = DXGI_FORMAT_R32_FLOAT;
		}
		else if (paramDesc.Mask <= 3)
		{
			if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_UINT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32_UINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_SINT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32_SINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_FLOAT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32_FLOAT;
		}
		else if (paramDesc.Mask <= 7)
		{
			if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_UINT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32B32_UINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_SINT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32B32_SINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_FLOAT32)
				elementDesc.Format = DXGI_FORMAT_R32G32B32_FLOAT;
		}
		else if (paramDesc.Mask <= 15)
		{
			if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_UINT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32B32A32_UINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_SINT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32B32A32_SINT;
			else if (paramDesc.ComponentType == D3D_REGISTER_COMPONENT_FLOAT32) 
				elementDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT;
		}

		// Save element desc
		inputLayoutDesc.push_back(elementDesc);
	}

	// Try to create Input Layout
	hr = m_ptrd3dDevice->CreateInputLayout(&inputLayoutDesc[0], 
		inputLayoutDesc.size(), m_ptrVSBlob->GetBufferPointer(), 
		m_ptrVSBlob->GetBufferSize(), &m_ptrInputLayout);
	
	//Free allocation shader reflection memory
	pVertexShaderReflection->Release();
	pVertexShaderReflection = nullptr;
	if (FAILED(hr))
		return hr;

 

That should be fairly easy to debug on your side. Start with a breakpoint on


hr = m_ptrd3dDevice->CreateInputLayout(&inputLayoutDesc[0], 
		inputLayoutDesc.size(), m_ptrVSBlob->GetBufferPointer(), 
		m_ptrVSBlob->GetBufferSize(), &m_ptrInputLayout);

and check how inputLayoutDesc differ from your original :


{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }

From there you'll have a better idea of what went wrong.

Advertisement

@ChuckNovice Only the AlignedByteOffset of the TEXCOORD change from 12 to 0. I think the error could be in this part of the code:


elementDesc.AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;

but, i don't know how to correct it if that's the wrong part.

Mmmm I may have more time to look at it in detail tonight. Right now what I can tell is that D3D11_APPEND_ALIGNED_ELEMENT should give you 0xFFFFFFFF and not 0.

3 minutes ago, ChuckNovice said:

Mmmm I may have more time to look at it in detail tonight. Right now what I can tell is that D3D11_APPEND_ALIGNED_ELEMENT should give you 0xFFFFFFFF and not 0.

:o right, I was wrong the D3D11_APPEND_ALIGNED_ELEMENT give me 0xffffffff = 4294967295. And that's the only change both codes have.

Your layout description should be right then.

 

I'd then start looking at your call to CreateInputLayout. I'm not a C++ guy but is there a reason that you pass your layout as "layout" in the first sample and as the address of the first index in the other sample "&inputLayoutDesc[0]" ?

If not then are we sure that you modified nothing else than this part while introducing shader reflection ?

Advertisement
Just now, ChuckNovice said:

Your layout description should be right then.

 

I'd then start looking at your call to CreateInputLayout. I'm not a C++ guy but is there a reason that you pass your layout as "layout" in the first sample and as the address of the first index in the other sample "&inputLayoutDesc[0]" ?

If not then are we sure that you modified nothing else than this part while introducing shader reflection ?

I found something else, the format of the position is changing from DXGI_FORMAT_R32G32B32_FLOAT to DXGI_FORMAT_R32G32B32A32_FLOAT. And that's because the paramDesc.Mask = 15 when I get my position from the shader.

This is some code I made to create an input layout via reflection, you can use it it it fits your use case.

EDIT: Although to be honest it looks as though we're doing pretty much the same thing. This works for me loading in models from output from a specific editor.

 


DXGI_FORMAT ImplShaderReflector::InputLayoutRegisterToDXGIFormat(D3D_REGISTER_COMPONENT_TYPE type, BYTE mask)
{
	if (type != D3D_REGISTER_COMPONENT_FLOAT32) return DXGI_FORMAT_UNKNOWN;

	switch (mask)
	{
	case 0b00000001:
		return DXGI_FORMAT_R32_FLOAT;

	case 0b00000011:
		return DXGI_FORMAT_R32G32_FLOAT;

	case 0b00000111:
		return DXGI_FORMAT_R32G32B32_FLOAT;

	case 0b00001111:
		return DXGI_FORMAT_R32G32B32A32_FLOAT;

	default:
		break;
	}

	return DXGI_FORMAT_UNKNOWN;
}

ID3D11InputLayout* ImplShaderReflector::CreateInputLayout(ID3D11Device* pDevice, ID3DBlob* pByteCode)
{

	ID3D11ShaderReflection* pReflector = nullptr;
	if (FAILED(D3DReflect(pByteCode->GetBufferPointer(), pByteCode->GetBufferSize(), __uuidof(ID3D11ShaderReflection), (void**)&pReflector)))
	{
		OutputDebugString(L"Could not create reflector for vertex shader\n");
		return nullptr;
	}

	D3D11_SHADER_DESC shaderDesc = {};
	pReflector->GetDesc(&shaderDesc);

	std::vector<D3D11_INPUT_ELEMENT_DESC> inputDesc;
	inputDesc.resize(shaderDesc.InputParameters);

	for (UINT i = 0; i < shaderDesc.InputParameters; i++)
	{
		D3D11_SIGNATURE_PARAMETER_DESC sigDesc;
		pReflector->GetInputParameterDesc(i, &sigDesc);

		inputDesc[i].SemanticName = sigDesc.SemanticName;
		inputDesc[i].SemanticIndex = sigDesc.SemanticIndex;
		inputDesc[i].Format = InputLayoutRegisterToDXGIFormat(sigDesc.ComponentType, sigDesc.Mask);
		inputDesc[i].InputSlot = sigDesc.Stream;
		inputDesc[i].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
		inputDesc[i].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
		inputDesc[i].InstanceDataStepRate = 0;
	}

	ID3D11InputLayout* pInputLayout = nullptr;
	if (FAILED(pDevice->CreateInputLayout(inputDesc.data(), static_cast<UINT>(inputDesc.size()), pByteCode->GetBufferPointer(), pByteCode->GetBufferSize(), &pInputLayout)))
	{
		OutputDebugString(L"Could not create vertex shader input layout\n");
        pReflector->Release();
		return nullptr;
	}

	pReflector->Release();

	return pInputLayout;
}

 

What does your vertex input struct look like in your vertex shader?

6 minutes ago, 0xnullptr said:

What does your vertex input struct look like in your vertex shader?

I think it's because my POSITION in the vertex shader is using a float4 and when im creating my vertex buffer I'm using a XMFLOAT3


cbuffer cbChangesPerFrame : register(b0) { matrix mvp_; };

Texture2D colorMap_ : register(t0); 
SamplerState colorSampler_ : register(s0);

struct VS_Input 
{ 
	float4 pos : POSITION; 
	float2 tex0 : TEXCOORD0; 
};

struct PS_Input 
{ 
	float4 pos : SV_POSITION; 
	float2 tex0 : TEXCOORD0; 
};

PS_Input vs_main(VS_Input vertex) 
{
	PS_Input vsOut = (PS_Input)0; 
	vsOut.pos = mul(vertex.pos, mvp_);
	vsOut.tex0 = vertex.tex0;

	return vsOut;
}

float4 ps_main(PS_Input frag) : SV_TARGET
{ 
	return colorMap_.Sample(colorSampler_, frag.tex0); 
}

 

This topic is closed to new replies.

Advertisement