I’m trying to build a mesh for the Oculus Rift lens distortion, from data spat out by the sdk.
These include multiple texcoords (for red/gree/blue channels) and a couple of other values, stored as positions according to the vertex shader. After combining them with an IndexedGeometryBuffer I’m getting the error message :
Geometry is missing semantics: POSITION1 : POSITION2 : TEXCOORD1 : TEXCOORD2
Is it not ok to build a mesh with multiple entries?
I’ve attached a patch with example data, and the shader, and the shader code is below also:
//@author: vux
//@help: template for standard shaders
//@tags: template
//@credits:
Texture2D texture2d <string uiname="Texture";>;
SamplerState linearSampler : IMMUTABLE
{
Filter = MIN_MAG_MIP_LINEAR;
AddressU = Clamp;
AddressV = Clamp;
};
cbuffer cbPerDraw : register( b0 )
{
float4x4 tVP : VIEWPROJECTION;
};
cbuffer cbPerObj : register( b1 )
{
float4x4 tW : WORLD;
float4 cAmb <bool color=true;String uiname="Color";> = { 1.0f,1.0f,1.0f,1.0f };
};
float2 EyeToSourceUVScale, EyeToSourceUVOffset;
float4x4 EyeRotationStart, EyeRotationEnd;
float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat)
{
// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic
// aberration and distortion). These are now "real world" vectors in direction (x,y,1)
// relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors.
float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz);
// Project them back onto the Z=1 plane of the rendered images.
float2 flattened = (transformed.xy / transformed.z);
// Scale them into ([0,0.5](0,0.5),[0,1](0,1)) or ([0.5,0](0.5,0),[0,1](0,1)) UV lookup space (depending on eye)
return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);
}
struct VS_IN
{
float2 Position : POSITION;
float timewarpLerpFactor : POSITION1;
float Vignette : POSITION2;
float2 TexCoord0 : TEXCOORD0;
float2 TexCoord1 : TEXCOORD1;
float2 TexCoord2 : TEXCOORD2;
};
struct vs2ps
{
float4 oPosition : SV_Position;
float2 oTexCoord0 : TEXCOORD0;
float2 oTexCoord1 : TEXCOORD1;
float2 oTexCoord2 : TEXCOORD2;
float oVignette : TEXCOORD3;
};
vs2ps VS(VS_IN input)
{
vs2ps output;
float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, input.timewarpLerpFactor);
output.oTexCoord0 = TimewarpTexCoord(input.TexCoord0,lerpedEyeRot);
output.oTexCoord1 = TimewarpTexCoord(input.TexCoord1,lerpedEyeRot);
output.oTexCoord2 = TimewarpTexCoord(input.TexCoord2,lerpedEyeRot);
output.oPosition = float4(input.Position.xy, 0.5, 1.0);
output.oVignette = input.Vignette; /* For vignette fade */
return output;
}
float4 PS(vs2ps In): SV_Target
{
// 3 samples for fixing chromatic aberrations
float R = texture2d.Sample(linearSampler, In.oTexCoord0.xy).r;
float G = texture2d.Sample(linearSampler, In.oTexCoord1.xy).g;
float B = texture2d.Sample(linearSampler, In.oTexCoord2.xy).b;
return (In.oVignette*float4(R,G,B,1));
}
technique10 Constant
{
pass P0
{
SetVertexShader( CompileShader( vs_4_0, VS() ) );
SetPixelShader( CompileShader( ps_4_0, PS() ) );
}
}
Haculus Rift dx11.zip (855.8 kB)