1
0
mirror of https://git.dev.opencascade.org/repos/occt.git synced 2025-08-04 13:13:25 +03:00

0027607: Visualization - Implement adaptive screen space sampling in path tracing

This commit provides useful functionality for path tracing rendering core.

1) Graphic3d_RenderingParams class was extended with additional AdaptiveScreenSampling option (disabled by default).
   If this option is enabled, path tracing tries to adjust the number of samples for different screen areas.

   In this way, the more complex areas (from the point of light conditions) are sampled more intensively,
   while the simple areas are sampled very rarely.
   For example, caustics and glossy reflections are typical candidates for more precise sampling.

   In general, this allows to equalize image convergence and not to waste resources for already converged areas.
   It is also possible to visualize sampling densities by enabling ShowSamplingTiles option
   (activating and deactivating this option does not affect on the accumulated image).

2) Mixing OpenGL and ray-tracing output has been changed.
   Now blending is performed using OpenGL functionality, while ray-tracing shaders only output correct Z-value.

Test case bugs vis bug27083 has been updated,
since the alpha value is now correctly set by Ray-Tracing to 1, opaque.
This commit is contained in:
dbp
2016-07-13 12:19:27 +03:00
committed by kgv
parent 6a24c6ded9
commit 3a9b5dc86a
22 changed files with 1303 additions and 378 deletions

View File

@@ -1,29 +1,114 @@
//! Input image.
uniform sampler2D uInputTexture;
#ifdef ADAPTIVE_SAMPLING
//! Ray tracing depth image.
uniform sampler2D uDepthTexture;
#extension GL_ARB_shader_image_load_store : require
//! Gamma correction flag.
uniform int uApplyGamma;
//! OpenGL image used for accumulating rendering result.
volatile restrict layout(size1x32) uniform image2D uRenderImage;
//! OpenGL image storing variance of sampled pixels blocks.
volatile restrict layout(size1x32) uniform iimage2D uVarianceImage;
#else // ADAPTIVE_SAMPLING
//! Input image.
uniform sampler2D uInputTexture;
//! Ray tracing depth image.
uniform sampler2D uDepthTexture;
#endif // ADAPTIVE_SAMPLING
//! Number of accumulated frames.
uniform int uAccumFrames;
//! Is debug mode enabled for importance screen sampling.
uniform int uDebugAdaptive;
//! Output pixel color.
out vec4 OutColor;
//! RGB weight factors to calculate luminance.
#define LUMA vec3 (0.2126f, 0.7152f, 0.0722f)
//! Scale factor used to quantize visual error.
#define SCALE_FACTOR 1.0e6f
// =======================================================================
// function : main
// purpose :
// =======================================================================
void main (void)
{
#ifndef ADAPTIVE_SAMPLING
vec4 aColor = texelFetch (uInputTexture, ivec2 (gl_FragCoord.xy), 0);
#ifdef PATH_TRACING
float aDepth = aColor.w; // path tracing uses averaged depth
#else
float aDepth = texelFetch (uDepthTexture, ivec2 (gl_FragCoord.xy), 0).r;
#endif
gl_FragDepth = aDepth;
if (uApplyGamma == 1)
#else // ADAPTIVE_SAMPLING
ivec2 aPixel = ivec2 (gl_FragCoord.xy);
vec4 aColor = vec4 (0.0);
// fetch accumulated color and total number of samples
aColor.x = imageLoad (uRenderImage, ivec2 (3 * aPixel.x + 0,
2 * aPixel.y + 0)).x;
aColor.y = imageLoad (uRenderImage, ivec2 (3 * aPixel.x + 1,
2 * aPixel.y + 0)).x;
aColor.z = imageLoad (uRenderImage, ivec2 (3 * aPixel.x + 1,
2 * aPixel.y + 1)).x;
aColor.w = imageLoad (uRenderImage, ivec2 (3 * aPixel.x + 0,
2 * aPixel.y + 1)).x;
// calculate normalization factor
float aSampleWeight = 1.f / max (1.0, aColor.w);
// calculate averaged depth value
gl_FragDepth = imageLoad (uRenderImage, ivec2 (3 * aPixel.x + 2,
2 * aPixel.y + 1)).x * aSampleWeight;
// calculate averaged radiance for all samples and even samples only
float aHalfRad = imageLoad (uRenderImage, ivec2 (3 * aPixel.x + 2,
2 * aPixel.y + 0)).x * aSampleWeight * 2.f;
float aAverRad = dot (aColor.rgb, LUMA) * aSampleWeight;
// apply our 'tone mapping' operator (gamma correction and clamping)
aHalfRad = min (1.f, sqrt (aHalfRad));
aAverRad = min (1.f, sqrt (aAverRad));
// calculate visual error
float anError = (aAverRad - aHalfRad) * (aAverRad - aHalfRad);
// accumulate visual error to current block
imageAtomicAdd (uVarianceImage, ivec2 (aPixel / vec2 (BLOCK_SIZE)), int (anError * SCALE_FACTOR));
if (uDebugAdaptive == 0) // normal rendering
{
// apply gamma correction (we use gamma = 2)
OutColor = vec4 (sqrt (aColor.rgb), aColor.a);
aColor = vec4 (aColor.rgb * aSampleWeight, 1.0);
}
else
else // showing number of samples
{
OutColor = aColor;
aColor = vec4 (0.5f * aColor.rgb * aSampleWeight + vec3 (0.f, aColor.w / uAccumFrames * 0.35f, 0.f), 1.0);
}
#endif // ADAPTIVE_SAMPLING
#ifdef PATH_TRACING
// apply gamma correction (we use gamma = 2)
OutColor = vec4 (sqrt (aColor.rgb), 0.f);
#else // not PATH_TRACING
OutColor = aColor;
#endif
}

View File

@@ -512,15 +512,15 @@ float handleDirectLight (in vec3 theInput, in vec3 theToLight, in float theCosMa
//=======================================================================
// function : sampleLight
// purpose : general sampling function for directional and point lights
// purpose : General sampling function for directional and point lights
//=======================================================================
vec3 sampleLight (in vec3 theToLight, in bool isDirectional, in float theSmoothness, inout float thePDF)
vec3 sampleLight (in vec3 theToLight, inout float theDistance, in bool isInfinite, in float theSmoothness, inout float thePDF)
{
SLocalSpace aSpace = LocalSpace (theToLight);
SLocalSpace aSpace = LocalSpace (theToLight * (1.f / theDistance));
// for point lights smoothness defines radius
float aCosMax = isDirectional ? theSmoothness :
inversesqrt (1.f + theSmoothness * theSmoothness / dot (theToLight, theToLight));
float aCosMax = isInfinite ? theSmoothness :
inversesqrt (1.f + theSmoothness * theSmoothness / (theDistance * theDistance));
float aKsi1 = RandFloat();
float aKsi2 = RandFloat();
@@ -613,7 +613,6 @@ vec3 intersectLight (in SRay theRay, in bool isViewRay, in int theBounce, in flo
//=======================================================================
vec4 PathTrace (in SRay theRay, in vec3 theInverse)
{
float anOpenGlDepth = ComputeOpenGlDepth (theRay);
float aRaytraceDepth = MAXFLOAT;
vec3 aRadiance = ZERO;
@@ -648,32 +647,22 @@ vec4 PathTrace (in SRay theRay, in vec3 theInverse)
dot (aInvTransf1, aHit.Normal),
dot (aInvTransf2, aHit.Normal)));
// For polygons that are parallel to the screen plane, the depth slope
// is equal to 1, resulting in small polygon offset. For polygons that
// that are at a large angle to the screen, the depth slope tends to 1,
// resulting in a larger polygon offset
float aPolygonOffset = uSceneEpsilon * EPS_SCALE /
max (abs (dot (theRay.Direct, aHit.Normal)), MIN_SLOPE);
if (anOpenGlDepth < aHit.Time + aPolygonOffset)
{
vec4 aSrcColorRGBA = ComputeOpenGlColor();
aRadiance += aThroughput.xyz * aSrcColorRGBA.xyz;
aDepth = INVALID_BOUNCES; // terminate path
}
theRay.Origin += theRay.Direct * aHit.Time; // get new intersection point
// Evaluate depth
// Evaluate depth on first hit
if (aDepth == 0)
{
// For polygons that are parallel to the screen plane, the depth slope
// is equal to 1, resulting in small polygon offset. For polygons that
// that are at a large angle to the screen, the depth slope tends to 1,
// resulting in a larger polygon offset
float aPolygonOffset = uSceneEpsilon * EPS_SCALE /
max (abs (dot (theRay.Direct, aHit.Normal)), MIN_SLOPE);
// Hit point in NDC-space [-1,1] (the polygon offset is applied in the world space)
vec4 aNDCPoint = uViewMat * vec4 (theRay.Origin + theRay.Direct * aPolygonOffset, 1.f);
aNDCPoint.xyz *= 1.f / aNDCPoint.w;
aRaytraceDepth = aNDCPoint.z * 0.5f + 0.5f;
aRaytraceDepth = (aNDCPoint.z / aNDCPoint.w) * 0.5f + 0.5f;
}
// fetch material (BSDF)
@@ -701,7 +690,7 @@ vec4 PathTrace (in SRay theRay, in vec3 theInverse)
vec3 aTexColor = textureLod (
sampler2D (uTextureSamplers[int (aMaterial.Kd.w)]), aTexCoord.st, 0.f).rgb;
aMaterial.Kd.rgb *= aTexColor;
aMaterial.Kd.rgb *= aTexColor * aTexColor; // de-gamma correction (for gamma = 2)
}
#endif
@@ -732,8 +721,8 @@ vec4 PathTrace (in SRay theRay, in vec3 theInverse)
float aPDF = 1.f / uLightCount, aDistance = length (aLight.xyz);
aLight.xyz = sampleLight (aLight.xyz * (1.f / aDistance),
aLight.w == 0.f /* is infinite */, aParam.w /* angle cosine */, aPDF);
aLight.xyz = sampleLight (aLight.xyz, aDistance,
aLight.w == 0.f /* is infinite */, aParam.w /* max cos or radius */, aPDF);
vec3 aContrib = (1.f / aPDF) * aParam.rgb /* Le */ * handleMaterial (
aMaterial, toLocalSpace (aLight.xyz, aSpace), toLocalSpace (-theRay.Direct, aSpace));
@@ -787,13 +776,11 @@ vec4 PathTrace (in SRay theRay, in vec3 theInverse)
aHit.Normal * mix (-uSceneEpsilon, uSceneEpsilon, step (0.f, dot (aHit.Normal, anInput))), anInput);
theInverse = InverseDirection (anInput);
anOpenGlDepth = MAXFLOAT; // disable combining image with OpenGL output
}
gl_FragDepth = aRaytraceDepth;
return vec4 (aRadiance, 0.f);
return vec4 (aRadiance, aRaytraceDepth);
}
#endif

View File

@@ -1,3 +1,8 @@
#ifdef ADAPTIVE_SAMPLING
#extension GL_ARB_shader_image_load_store : require
#extension GL_NV_shader_atomic_float : require
#endif
#ifdef USE_TEXTURES
#extension GL_ARB_bindless_texture : require
#endif
@@ -66,11 +71,6 @@ uniform samplerBuffer uRaytraceLightSrcTexture;
//! Environment map texture.
uniform sampler2D uEnvironmentMapTexture;
//! Input pre-raytracing image rendered by OpenGL.
uniform sampler2D uOpenGlColorTexture;
//! Input pre-raytracing depth image rendered by OpenGL.
uniform sampler2D uOpenGlDepthTexture;
//! Total number of light sources.
uniform int uLightCount;
//! Intensity of global ambient light.
@@ -95,6 +95,14 @@ uniform float uSceneEpsilon;
uniform uvec2 uTextureSamplers[MAX_TEX_NUMBER];
#endif
#ifdef ADAPTIVE_SAMPLING
//! OpenGL image used for accumulating rendering result.
volatile restrict layout(size1x32) uniform image2D uRenderImage;
//! OpenGL image storing offsets of sampled pixels blocks.
coherent restrict layout(size2x32) uniform iimage2D uOffsetImage;
#endif
//! Top color of gradient background.
uniform vec4 uBackColorTop = vec4 (0.0);
//! Bottom color of gradient background.
@@ -240,7 +248,22 @@ vec3 InverseDirection (in vec3 theInput)
//=======================================================================
vec4 BackgroundColor()
{
#ifdef ADAPTIVE_SAMPLING
ivec2 aFragCoord = ivec2 (gl_FragCoord.xy);
ivec2 aTileXY = imageLoad (uOffsetImage, ivec2 (aFragCoord.x / BLOCK_SIZE,
aFragCoord.y / BLOCK_SIZE)).xy;
aTileXY.y += aFragCoord.y % min (uWinSizeY - aTileXY.y, BLOCK_SIZE);
return mix (uBackColorBot, uBackColorTop, float (aTileXY.y) / uWinSizeY);
#else
return mix (uBackColorBot, uBackColorTop, vPixel.y);
#endif
}
/////////////////////////////////////////////////////////////////////////////////////////
@@ -263,39 +286,6 @@ SRay GenerateRay (in vec2 thePixel)
return SRay (mix (aP0, aP1, thePixel.y), aDirection);
}
// =======================================================================
// function : ComputeOpenGlDepth
// purpose :
// =======================================================================
float ComputeOpenGlDepth (in SRay theRay)
{
// a depth in range [0,1]
float anOpenGlDepth = texelFetch (uOpenGlDepthTexture, ivec2 (gl_FragCoord.xy), 0).r;
// pixel point in NDC-space [-1,1]
vec4 aPoint = vec4 (2.0f * vPixel.x - 1.0f,
2.0f * vPixel.y - 1.0f,
2.0f * anOpenGlDepth - 1.0f,
1.0f);
vec4 aFinal = uUnviewMat * aPoint;
aFinal.xyz *= 1.f / aFinal.w;
return (anOpenGlDepth < 1.f) ? length (aFinal.xyz - theRay.Origin) : MAXFLOAT;
}
// =======================================================================
// function : ComputeOpenGlColor
// purpose :
// =======================================================================
vec4 ComputeOpenGlColor()
{
vec4 anOpenGlColor = texelFetch (uOpenGlColorTexture, ivec2 (gl_FragCoord.xy), 0);
// During blending with factors GL_SRC_ALPHA and GL_ONE_MINUS_SRC_ALPHA (for text and markers)
// the alpha channel (written in the color buffer) was squared.
anOpenGlColor.a = 1.f - sqrt (anOpenGlColor.a);
return anOpenGlColor;
}
// =======================================================================
// function : IntersectSphere
// purpose : Computes ray-sphere intersection
@@ -725,7 +715,7 @@ float SceneAnyHit (in SRay theRay, in vec3 theInverse, in float theDistance)
#endif
}
toContinue = (aHead >= 0);
toContinue = (aHead >= 0) && (aFactor > 0.1f);
if (aHead == aStop) // go to top-level BVH
{
@@ -876,7 +866,6 @@ vec4 Radiance (in SRay theRay, in vec3 theInverse)
int aTrsfId;
float anOpenGlDepth = ComputeOpenGlDepth (theRay);
float aRaytraceDepth = MAXFLOAT;
for (int aDepth = 0; aDepth < NB_BOUNCES; ++aDepth)
@@ -898,8 +887,7 @@ vec4 Radiance (in SRay theRay, in vec3 theInverse)
}
else
{
vec4 aGlColor = ComputeOpenGlColor();
aColor = vec4 (mix (aGlColor.rgb, BackgroundColor().rgb, aGlColor.w), aGlColor.w);
aColor = BackgroundColor();
}
aResult += aWeight.xyz * aColor.xyz; aWeight.w *= aColor.w;
@@ -915,31 +903,22 @@ vec4 Radiance (in SRay theRay, in vec3 theInverse)
dot (aInvTransf1, aHit.Normal),
dot (aInvTransf2, aHit.Normal)));
// For polygons that are parallel to the screen plane, the depth slope
// is equal to 1, resulting in small polygon offset. For polygons that
// that are at a large angle to the screen, the depth slope tends to 1,
// resulting in a larger polygon offset
float aPolygonOffset = uSceneEpsilon * EPS_SCALE /
max (abs (dot (theRay.Direct, aHit.Normal)), MIN_SLOPE);
if (anOpenGlDepth < aHit.Time + aPolygonOffset)
{
vec4 aGlColor = ComputeOpenGlColor();
aResult += aWeight.xyz * aGlColor.xyz;
aWeight *= aGlColor.w;
}
theRay.Origin += theRay.Direct * aHit.Time; // intersection point
// Evaluate depth
// Evaluate depth on first hit
if (aDepth == 0)
{
// For polygons that are parallel to the screen plane, the depth slope
// is equal to 1, resulting in small polygon offset. For polygons that
// that are at a large angle to the screen, the depth slope tends to 1,
// resulting in a larger polygon offset
float aPolygonOffset = uSceneEpsilon * EPS_SCALE /
max (abs (dot (theRay.Direct, aHit.Normal)), MIN_SLOPE);
// Hit point in NDC-space [-1,1] (the polygon offset is applied in the world space)
vec4 aNDCPoint = uViewMat * vec4 (theRay.Origin + theRay.Direct * aPolygonOffset, 1.f);
aNDCPoint.xyz *= 1.f / aNDCPoint.w;
aRaytraceDepth = aNDCPoint.z * 0.5f + 0.5f;
aRaytraceDepth = (aNDCPoint.z / aNDCPoint.w) * 0.5f + 0.5f;
}
vec3 aNormal = SmoothNormal (aHit.UV, aTriIndex);
@@ -1042,10 +1021,6 @@ vec4 Radiance (in SRay theRay, in vec3 theInverse)
{
theRay.Direct = Refract (theRay.Direct, aNormal, aOpacity.z, aOpacity.w);
}
else
{
anOpenGlDepth -= aHit.Time + uSceneEpsilon;
}
}
else
{
@@ -1074,8 +1049,6 @@ vec4 Radiance (in SRay theRay, in vec3 theInverse)
theInverse = 1.0f / max (abs (theRay.Direct), SMALL);
theInverse = mix (-theInverse, theInverse, step (ZERO, theRay.Direct));
anOpenGlDepth = MAXFLOAT; // disable combining image with OpenGL output
}
theRay.Origin += theRay.Direct * uSceneEpsilon;

View File

@@ -1,19 +1,24 @@
out vec4 OutColor;
// Seed for random number generator
// Seed for random number generator (generated on CPU).
uniform int uFrameRndSeed;
// Weight of current frame related to accumulated frames.
uniform float uSampleWeight;
//! Input accumulated image.
uniform sampler2D uAccumTexture;
//! Enabled/disbales using of single RNG seed for image 16x16 blocks.
//! Increases performance up to 4 times, but noise becomes structured.
//! Enables/disables using of single RNG seed for 16x16 image
//! blocks. Increases performance up to 4x, but the noise has
//! become structured. Can be used fo final rendering.
uniform int uBlockedRngEnabled;
#define MAX_RADIANCE vec3 (10.f)
#ifndef ADAPTIVE_SAMPLING
//! Weight of current frame related to accumulated samples.
uniform float uSampleWeight;
//! Input accumulated image.
uniform sampler2D uAccumTexture;
#endif
//! Maximum radiance that can be added to the pixel. Decreases noise
//! level, but introduces some bias.
#define MAX_RADIANCE vec3 (25.f)
// =======================================================================
// function : main
@@ -21,35 +26,82 @@ uniform int uBlockedRngEnabled;
// =======================================================================
void main (void)
{
SeedRand (uFrameRndSeed, uWinSizeX, uBlockedRngEnabled == 0 ? 1 : 16);
#ifndef PATH_TRACING
SRay aRay = GenerateRay (vPixel);
#else
ivec2 aWinSize = textureSize (uAccumTexture, 0);
SeedRand (uFrameRndSeed, aWinSize.x, uBlockedRngEnabled == 0 ? 1 : 8);
ivec2 aFragCoord = ivec2 (gl_FragCoord.xy);
SRay aRay = GenerateRay (vPixel +
vec2 (RandFloat() + 1.f, RandFloat() + 1.f) / vec2 (aWinSize));
#endif
#ifdef ADAPTIVE_SAMPLING
vec3 aInvDirect = 1.f / max (abs (aRay.Direct), SMALL);
ivec2 aTileXY = imageLoad (uOffsetImage, ivec2 (aFragCoord.x / BLOCK_SIZE,
aFragCoord.y / BLOCK_SIZE)).xy;
aInvDirect = vec3 (aRay.Direct.x < 0.f ? -aInvDirect.x : aInvDirect.x,
aRay.Direct.y < 0.f ? -aInvDirect.y : aInvDirect.y,
aRay.Direct.z < 0.f ? -aInvDirect.z : aInvDirect.z);
ivec2 aRealBlockSize = ivec2 (min (uWinSizeX - aTileXY.x, BLOCK_SIZE),
min (uWinSizeY - aTileXY.y, BLOCK_SIZE));
aFragCoord.x = aTileXY.x + (aFragCoord.x % aRealBlockSize.x);
aFragCoord.y = aTileXY.y + (aFragCoord.y % aRealBlockSize.y);
#endif // ADAPTIVE_SAMPLING
vec2 aPnt = vec2 (aFragCoord.x + RandFloat(),
aFragCoord.y + RandFloat());
SRay aRay = GenerateRay (aPnt / vec2 (uWinSizeX, uWinSizeY));
#endif // PATH_TRACING
vec3 aInvDirect = InverseDirection (aRay.Direct);
#ifdef PATH_TRACING
vec4 aColor = PathTrace (aRay, aInvDirect);
if (any (isnan (aColor.xyz)))
if (any (isnan (aColor.rgb)))
{
aColor.rgb = ZERO;
}
aColor.rgb = min (aColor.rgb, MAX_RADIANCE);
OutColor = mix (texture2D (uAccumTexture, vPixel), aColor, uSampleWeight);
#ifdef ADAPTIVE_SAMPLING
// accumulate RGB color and depth
imageAtomicAdd (uRenderImage, ivec2 (3 * aFragCoord.x + 0,
2 * aFragCoord.y + 0), aColor.r);
imageAtomicAdd (uRenderImage, ivec2 (3 * aFragCoord.x + 1,
2 * aFragCoord.y + 0), aColor.g);
imageAtomicAdd (uRenderImage, ivec2 (3 * aFragCoord.x + 1,
2 * aFragCoord.y + 1), aColor.b);
imageAtomicAdd (uRenderImage, ivec2 (3 * aFragCoord.x + 2,
2 * aFragCoord.y + 1), aColor.w);
// accumulate number of samples
float aNbSamples = imageAtomicAdd (uRenderImage, ivec2 (3 * aFragCoord.x + 0,
2 * aFragCoord.y + 1), 1.0);
if (int (aNbSamples) % 2 == 0) // accumulate luminance for even samples only
{
imageAtomicAdd (uRenderImage, ivec2 (3 * aFragCoord.x + 2,
2 * aFragCoord.y + 0), dot (LUMA, aColor.rgb));
}
discard; // fragment should not be written to frame buffer
#else
OutColor = mix (texture2D (uAccumTexture, vPixel), aColor, uSampleWeight);
#endif // ADAPTIVE_SAMPLING
#else
OutColor = clamp (Radiance (aRay, aInvDirect), 0.f, 1.f);
#endif
#endif // PATH_TRACING
}