'Screen space reflections bug

I try to implement screen space reflections in my graphics engine using ray marching algorithm. I've got weird results, which you can watch in the video:

https://youtu.be/FhLJzSrYJv8

As you can see, when i move the camera view direction (forward vector), reflections move too. I have some suspicions that the problem is related with coordinate system transformations. They are done in world space coordinates and I don't see any mistakes in functions.

Shader code:

vec3 ssToWorldSpace(vec2 uv, float depth)
{
    vec4 position = vec4(1.0f); 
 
    position.x = uv.x * 2.0f - 1.0f; 
    position.y = -(uv.y * 2.0f - 1.0f); 

    position.z = depth; 
 
    position = invProjView * position; 
 
    position /= position.w;

    return position.xyz;
}

vec2 viewSpaceToSs(vec3 position)
{
     vec4 pVP = proj * vec4(position,1.0f);
     pVP.xy /= pVP.w;
     pVP.xy = pVP.xy * 0.5 + 0.5;
     return pVP.xy;
}

float getDepth(vec2 uv)
{
    return texture(depthMap,uv).r;
}

vec4 getColor(vec2 uv)
{
    return texture(colorMap,uv);
}

vec4 getNormal(vec2 uv)
{
    return texture(normalMap,uv);
}

vec3 ssr(vec3 texelPositionWS, vec3 reflectDirWS)
{
    float error = 1.0f;
    float stepSize = 0.01f;
    const float maxSteps = 10;
    const float LDelmiter = 0.01f;
    vec2 projectedCoords;
    vec3 currentRay = texelPositionWS;

    float prevDepth = 0.0f;
    for(int i = 0; i < maxSteps; i++)
    {
        currentRay = currentRay + reflectDirWS * stepSize;

        projectedCoords = worldSpaceToSs(currentRay);
        float depth = getDepth(projectedCoords);
        if (depth < prevDepth)
            break;
        prevDepth = depth;

        vec3 newPositionWS = ssToWorldSpace(projectedCoords.xy, depth);

        stepSize = length(texelPositionWS - newPositionWS);
        stepSize = clamp(stepSize,0.0f,LDelmiter);
    }   

    projectedCoords = worldSpaceToSs(currentRay);

    float fresnel = normalize(2.8 * pow(1+dot(texelPositionWS, vec3(getNormal(TexCoords).xy,getNormal(TexCoords).z)), 2)); 
    return getColor(projectedCoords).xyz * fresnel;
}

void main()
{
    vec4 texelNormalWS = getNormal(TexCoords);
    vec3 texelPositionSS = vec3(TexCoords, getDepth(TexCoords));
    vec3 texelPositionWS = ssToWorldSpace(texelPositionSS.xy,texelPositionSS.z);

    vec3 viewDirWS  =  texelPositionWS - viewPos;

    vec3 reflectDirWS = normalize(reflect(viewDirWS,texelNormalWS.xyz));

    color = vec4(ssr(texelPositionWS, reflectDirWS),1.0f);
    color.xyz = hdr(color.xyz);
}

Any idea how to fix this problem?

UPD: I fixed previous bug with camera rotation and I'm almost sure that my vectors are in the same space (view space). But unfortunately i've got new issue: Reflections are shifted on Y axis and stretched.

You can see this bug by link: https://youtu.be/yzkgpcliBVQ

Updated parts of code:


void main()
{
    vec4 texelNormalWS = getNormal(TexCoords);
    vec4 texelNormalVS = inverse(transpose(view)) * texelNormalWS;

    vec4 texelPositionWS = getPosition(TexCoords);

    vec3 viewDirVS = (view * texelPositionWS).xyz;
    vec3 reflectDirVS = normalize(reflect(normalize(viewDirVS.xyz),normalize(texelNormalVS.xyz)));
   ...
}

vec3 ssrVS(vec3 ray, vec3 dir)
{
    float stepSize = 0.1f;
    const int maxSteps = 30;
    vec2 projectedCoords;

    dir*=stepSize;

    for(int i = 0; i < maxSteps; i++)
    {
        ray+=dir;

        projectedCoords = viewSpaceToSs(ray);
        float depth = getPosition(projectedCoords).z;

        if (depth > 100)
            continue;
        
        float dDepth = ray.z - depth;
        if(dDepth <= 0 && dir.z - dDepth < 1.2)
            projectedCoords = BinarySearchVS(ray,dir);
    }   

    return getColor(projectedCoords).xyz;
}

UPD:

I figured out that

if(dDepth <= 0 && dir.z - dDepth < 1.2)

doesn't hit most of the time.

I added after hit checking branch:

return vec3(1.0f,0.0f,0.0f);

got this:



Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source