Hi all,
I’m trying to make instancing work (as in drawing the same mesh multiple times with array of world transformations), and its very broken. Basically looks like this:
I’m not sure what I’m doing wrong here…
This is my effect file:
#if OPENGL
#define SV_POSITION POSITION
#define VS_SHADERMODEL vs_3_0
#define PS_SHADERMODEL ps_3_0
#else
#define VS_SHADERMODEL vs_4_0_level_9_1
#define PS_SHADERMODEL ps_4_0_level_9_1
#endif
Texture2D Texture;
sampler2D TextureSampler = sampler_state
{
Texture = (Texture);
};
matrix ViewProjection;
struct VertexShaderInput
{
float4 Position : POSITION0;
float2 Coords : TEXCOORD0;
};
struct VertexShaderOutput
{
float4 Position : SV_POSITION;
float2 Coords : TEXCOORD0;
};
VertexShaderOutput MainVS(in VertexShaderInput input, float4x4 instanceTransform : BLENDWEIGHT)
{
VertexShaderOutput output = (VertexShaderOutput)0;
matrix wvp = mul(instanceTransform, ViewProjection);
output.Position = mul(input.Position, wvp);
output.Coords = input.Coords;
return output;
}
float4 MainPS(VertexShaderOutput input) : COLOR
{
float4 color = tex2D(TextureSampler, input.Coords);
return color;
}
technique InstanceDrawing
{
pass P0
{
VertexShader = compile VS_SHADERMODEL MainVS();
PixelShader = compile PS_SHADERMODEL MainPS();
}
};
And the code that uses it:
// vertex declaration for instance rendering
static VertexDeclaration instanceVertexDeclaration = new VertexDeclaration
(
new VertexElement(0, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 0),
new VertexElement(sizeof(float) * 4, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 1),
new VertexElement(sizeof(float) * 8, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 2),
new VertexElement(sizeof(float) * 12, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 3)
);
void DrawInstanceQueue(InstancingQueue queue, Camera camera)
{
// update effect
var effect = queue.Material.Effect;
effect.Texture = queue.Texture;
effect.ViewProjection = camera.ViewProjection;
// update rasterizer state
GlobalResources.GraphicsDevice.RasterizerState = queue.Material.RasterizerState;
// update draw calls count
DrawCalls++;
// create buffer for instance matrices
var graphics = GlobalResources.GraphicsDevice;
var instanceVertexBuffer = new DynamicVertexBuffer(graphics, instanceVertexDeclaration, queue.Count, BufferUsage.WriteOnly);
instanceVertexBuffer.SetData(queue.Transforms, 0, queue.Count, SetDataOptions.Discard);
// draw with instancing
for (int i = 0; i < queue.Mesh.MeshParts.Count; i++)
{
var part = queue.Mesh.MeshParts[i];
if (part.PrimitiveCount > 0)
{
graphics.SetVertexBuffers(
new VertexBufferBinding(part.VertexBuffer, part.VertexOffset, 0),
new VertexBufferBinding(instanceVertexBuffer, 0, 1)
);
graphics.Indices = part.IndexBuffer;
for (int j = 0; j < effect.CurrentTechnique.Passes.Count; j++)
{
effect.CurrentTechnique.Passes[j].Apply();
graphics.DrawInstancedPrimitives(PrimitiveType.TriangleList, part.VertexOffset, part.StartIndex, part.PrimitiveCount, queue.Count);
}
}
}
// reset queue count
queue.Count = 0;
}
Now its safe to assume the camera ViewProjection and queue.Transforms are correct, because if I use them with non-instancing “regular” effect everything draws nicely. So I guess its something in the shader or the way I pass data to it.
Any hints on what I’m doing wrong?
Thanks
EDIT: I tried to ignore the instanceTransform
and only multiply with the ViewProjection and models render properly, but obviously they are all with identity matrix (ie position 0,0,0, no rotation, scale 1,1,1). This confirms that the issue is the values inside the instanceTransform
matrix. However, as mentioned, if I use the same transformations to draw “normally”, it looks alright. So looks like the issue is passing the instanceTransform from C# to shaders?