I'm learning Direct X and trying to draw a basic triangle to the screen. I have a simple vertex shader and pixel shader.
Vertex Shader:
float4 main( float2 pos : POSITION) : SV_Position
{
return float4(pos.x,pos.y,0.0f,1.0f);
}
Pixel Shader:
float4 main() : SV_Target
{
return float4(1.0f,1.0f,1.0f,1.0f);
}
When running the program I get a linkage error between the input assembler and the vertex shader:
Error: ID3D11DeviceContext::Draw: Input Assembler - Vertex Shader linkage error: Signatures between stages are incompatible. The input stage requires Semantic/Index (POSITION,0) as input, but it is not provided by the output stage.
My rendering code:
void renderer_draw_triangle()
{
struct vertex
{
float x;
float y;
};
const struct vertex vertices[] =
{
{0.0f, 0.5f},
{0.5f, -0.5f},
{-0.5f, -0.5f},
};
const UINT stride = sizeof(struct vertex);
const UINT offset = 0u;
ID3D11Buffer *vertex_buffer;
D3D11_BUFFER_DESC bd = {
.BindFlags = D3D11_BIND_VERTEX_BUFFER,
.Usage = D3D11_USAGE_IMMUTABLE,
.CPUAccessFlags = 0u,
.MiscFlags = 0u,
.ByteWidth = sizeof(vertices),
.StructureByteStride = stride};
D3D11_SUBRESOURCE_DATA sd = {.pSysMem = vertices};
CHECK_ERROR(device->lpVtbl->CreateBuffer(device, &bd, &sd, &vertex_buffer));
context->lpVtbl->IASetVertexBuffers(context, 0u, 1u, &vertex_buffer, &stride, &offset);
ID3D11VertexShader *vertexShader;
ID3DBlob *blob;
CHECK_DX_ERROR(D3DReadFileToBlob(L"../Engine/build/shaders/VertexShader.cso", &blob));
CHECK_DX_ERROR(device->lpVtbl->CreateVertexShader(device, blob->lpVtbl->GetBufferPointer(blob), blob->lpVtbl->GetBufferSize(blob), NULL, &vertexShader));
context->lpVtbl->VSSetShader(context, vertexShader, NULL, 0u);
ID3D11PixelShader *pixelShader;
CHECK_DX_ERROR(D3DReadFileToBlob(L"../Engine/build/shaders/PixelShader.cso", &blob));
CHECK_DX_ERROR(device->lpVtbl->CreatePixelShader(device, blob->lpVtbl->GetBufferPointer(blob), blob->lpVtbl->GetBufferSize(blob), NULL, &pixelShader));
context->lpVtbl->PSSetShader(context, pixelShader, NULL, 0u);
ID3D11InputLayout *input_layout;
const D3D11_INPUT_ELEMENT_DESC inputs[] =
{
{"POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
};
CHECK_DX_ERROR(device->lpVtbl->CreateInputLayout(device, inputs, ASIZE(inputs), blob->lpVtbl->GetBufferPointer(blob), blob->lpVtbl->GetBufferSize(blob), &input_layout));
CHECK_DX_ERROR(context->lpVtbl->IASetInputLayout(context, input_layout));
context->lpVtbl->OMSetRenderTargets(context, 1u, &target, NULL);
context->lpVtbl->IASetPrimitiveTopology(context, D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
D3D11_VIEWPORT vp = {.Width = 800, .Height = 600, .MinDepth = 0, .MaxDepth = 1, .TopLeftX = 0, .TopLeftY = 0};
context->lpVtbl->RSSetViewports(context, 1u, &vp);
CHECK_DX_ERROR(context->lpVtbl->Draw(context, ASIZE(vertices), 0u));
}
It says that it can't find the semantic in the input layer but I've reread the code a million times and can't figure out what is going wrong. The semantic is obviously in the input element description array. I'm pretty sure the input layer is bound correctly. Can anyone give any help?
float4 main(float4 pos) : SV_Target
. Normally you would use a struct to send data from the vertex shader to the pixel shader, defined in a.hlsli
file that is included by both shaders. – trojanfoe