Commit 8cd849e3 authored by Alexander Wiebel's avatar Alexander Wiebel
Browse files

Merge branch 'perspective-raymarching'

parents 65ef1f48 955cc3ab
......@@ -39,9 +39,9 @@ WDataTexture3D::WDataTexture3D( std::shared_ptr< WValueSetBase > valueSet, std::
setTextureSize( grid->getNbCoordsX(), grid->getNbCoordsY(), grid->getNbCoordsZ() );
// data textures do not repeat or something
setWrap( osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_BORDER );
setWrap( osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_BORDER );
setWrap( osg::Texture::WRAP_R, osg::Texture::CLAMP_TO_BORDER );
setWrap( osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_EDGE );
setWrap( osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_EDGE );
setWrap( osg::Texture::WRAP_R, osg::Texture::CLAMP_TO_EDGE );
thresholdLower()->setMin( valueSet->getMinimumValue() );
thresholdLower()->setMax( valueSet->getMaximumValue() );
......
......@@ -51,6 +51,9 @@ in float v_sampleDistance;
// The steps in relation to a default number of steps of 128.
in float v_relativeSampleDistance;
// The position of the camera
flat in vec3 v_eyePos;
/////////////////////////////////////////////////////////////////////////////
// Uniforms
/////////////////////////////////////////////////////////////////////////////
......@@ -84,10 +87,10 @@ uniform int u_samples;
*
* \return the end point
*/
vec3 findRayEnd( in vec3 rayStart, out float d )
vec3 findRayEnd( in vec3 rayStart, in vec3 ray, out float d )
{
// we need to ensure the vector components are not exactly 0.0 since they are used for division
vec3 r = v_ray + vec3( 0.000000001 );
vec3 r = ray + vec3( 0.000000001 );
vec3 p = rayStart;
// v_ray in cube coordinates is used to check against the unit cube borders
......@@ -226,6 +229,12 @@ float fastpow( float a, float b )
*/
void main()
{
vec3 ray = v_ray;
if( osg_ProjectionMatrix[3][3] == 0.0 )
{
ray = v_rayStart - v_eyePos;
}
// First, find the rayEnd point. We need to do it in the fragment shader as the ray end point may be interpolated wrong
// when done for each vertex.
float totalDistance = 0.0; // the maximal distance along the ray until the BBox ends
......@@ -235,12 +244,12 @@ void main()
// stochastic jittering can help to void these ugly wood-grain artifacts with larger sampling distances but might
// introduce some noise artifacts.
float jitter = 0.5 - texture( u_jitterSampler, gl_FragCoord.xy / float( u_jitterSizeX ) ).r;
vec3 rayStart = v_rayStart + ( v_ray * v_sampleDistance * jitter );
vec3 rayStart = v_rayStart + ( ray * v_sampleDistance * jitter );
#else
vec3 rayStart = v_rayStart;
#endif
vec3 rayEnd = findRayEnd( rayStart, totalDistance );
vec3 rayEnd = findRayEnd( rayStart, ray, totalDistance );
#ifdef MIP_ENABLED
// There is no nice early ray termination, so this will slow things
......@@ -253,7 +262,7 @@ void main()
while( currentDistance <= ( totalDistance - 0.02 ) )
{
// get current value, classify and illuminate
vec3 rayPoint = rayStart + ( currentDistance * v_ray );
vec3 rayPoint = rayStart + ( currentDistance * ray );
float alpha = transferFunction( texture( u_volumeSampler, rayPoint ).r ).a;
if( alpha > maxalpha )
{
......@@ -307,7 +316,7 @@ void main()
for( int i = 0; i < 10; ++i )
{
// get current value, classify and illuminate
vec3 rayPoint = rayStart + ( currentDistance * v_ray );
vec3 rayPoint = rayStart + ( currentDistance * ray );
vec4 src = localIllumination( rayPoint, transferFunction( texture( u_volumeSampler, rayPoint ).r ) );
// associated colors needed
src.rgb *= src.a;
......@@ -320,7 +329,7 @@ void main()
src.a = 1.0 - fastpow( 1.0 - src.a, v_relativeSampleDistance );
#endif
/* vec3 planeNorm = -v_ray;
/* vec3 planeNorm = -ray;
vec3 pSphere[16] = vec3[](vec3(0.53812504, 0.18565957, -0.43192),vec3(0.13790712, 0.24864247, 0.44301823),vec3(0.33715037, 0.56794053, -0.005789503),vec3(-0.6999805, -0.04511441, -0.0019965635),vec3(0.06896307, -0.15983082, -0.85477847),vec3(0.056099437, 0.006954967, -0.1843352),vec3(-0.014653638, 0.14027752, 0.0762037),vec3(0.010019933, -0.1924225, -0.034443386),vec3(-0.35775623, -0.5301969, -0.43581226),vec3(-0.3169221, 0.106360726, 0.015860917),vec3(0.010350345, -0.58698344, 0.0046293875),vec3(-0.08972908, -0.49408212, 0.3287904),vec3(0.7119986, -0.0154690035, -0.09183723),vec3(-0.053382345, 0.059675813, -0.5411899),vec3(0.035267662, -0.063188605, 0.54602677),vec3(-0.47761092, 0.2847911, -0.0271716));
float aoFactor = 0.0;
......
......@@ -46,51 +46,43 @@ out float v_sampleDistance;
// The steps in relation to a default number of steps of 128.
out float v_relativeSampleDistance;
// The position of the camera
flat out vec3 v_eyePos;
/////////////////////////////////////////////////////////////////////////////
// Uniforms
/////////////////////////////////////////////////////////////////////////////
// texture containing the data
uniform sampler3D tex0;
// The number of samples to use.
uniform int u_samples;
/////////////////////////////////////////////////////////////////////////////
// Attributes
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Variables
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Functions
/////////////////////////////////////////////////////////////////////////////
/**
* Main entry point of the vertex shader.
*/
void main()
{
// for easy access to texture coordinates
//gl_TexCoord[0] = osg_MultiTexCoord0;
// in texture space, the starting point simply is the current surface point in texture space
v_rayStart = osg_Vertex.xyz; // this equals osg_Vertex!
// transform the ray direction to texture space, which equals object space
// Therefore use two points, as we transform a vector
vec4 camLookAt = vec4( 0.0, 0.0, -1.0, 0.0 );
vec4 camPos = vec4( 0.0, 0.0, 0.0, 0.0 );
v_ray = normalize( worldToLocal( camLookAt, camPos ).xyz );
// to have equidistant sampling for each side of the box, use a fixed step size
v_sampleDistance = 1.0 / float( u_samples );
v_relativeSampleDistance = 128.0 / float( u_samples );
// Simply project the vertex
// in texture space, the starting point simply is the current surface point in texture space
v_rayStart = osg_Vertex.xyz;
if( osg_ProjectionMatrix[3][3] == 1.0 )
{
// orthographic:
// transform the ray direction to texture space, which equals object space
// Therefore use two points, as we transform a vector
vec4 camLookAt = vec4( 0.0, 0.0, -1.0, 0.0 );
vec4 camPos = vec4( 0.0, 0.0, 0.0, 0.0 );
v_ray = normalize( worldToLocal( camLookAt, camPos ).xyz );
}
else
{
// perspective:
// calculate object space coordinate for camera
// create vector between camera and vertex
v_eyePos = ( inverse( osg_ModelViewMatrix ) * vec4( 0.0, 0.0, 0.0, 1.0 ) ).xyz;
v_ray = vec3( 0.0 );
}
gl_Position = osg_ModelViewProjectionMatrix * osg_Vertex;
//gl_FrontColor = osg_Color;
}
......@@ -39,9 +39,15 @@
// The ray's starting point in texture space
in vec3 v_rayStart;
// The ray direction in texture space
// The ray direction in texture space, normalized
in vec3 v_ray;
// The sampling distance
in float v_sampleDistance;
// The steps in relation to a default number of steps of 128.
in float v_relativeSampleDistance;
// the Surface normal at this point
in vec3 v_normal;
......@@ -51,8 +57,10 @@ in float v_isovalue;
// The scaling component of the modelview matrix.
in float v_worldScale;
in vec4 v_color;
// The position of the camera
flat in vec3 v_eyePos;
in vec4 v_color;
/////////////////////////////////////////////////////////////////////////////
// Uniforms
......@@ -93,6 +101,7 @@ uniform float u_alpha;
// the ratio between normal color and the colormapping color.
uniform float u_colormapRatio;
/////////////////////////////////////////////////////////////////////////////
// Attributes
/////////////////////////////////////////////////////////////////////////////
......@@ -105,12 +114,20 @@ uniform float u_colormapRatio;
// Functions
/////////////////////////////////////////////////////////////////////////////
vec3 findRayEnd( out float d )
/**
* Method uses the ray direction (varying) and the ray entry point in the cube (varying) to calculate the exit point. This is lateron needed to
* get a proper maximum distance along the ray.
*
* \param d out - this value will contain the maximum distance along the ray untill the end of the cube
* \param rayStart in - the start point of the ray in the volume
*
* \return the end point
*/
vec3 findRayEnd( in vec3 rayStart, in vec3 ray, out float d )
{
vec3 r = v_ray + vec3( 0.0000001 );
vec3 p = v_rayStart;
// we need to ensure the vector components are not exactly 0.0
// we need to ensure the vector components are not exactly 0.0 since they are used for division
vec3 r = ray + vec3( 0.000000001 );
vec3 p = rayStart;
// v_ray in cube coordinates is used to check against the unit cube borders
// when will v_ray reach the front face?
......@@ -128,11 +145,6 @@ vec3 findRayEnd( out float d )
return p + ( r * d );
}
float pointDistance( vec3 p1, vec3 p2 )
{
return length( p1 - p2 );
}
/**
* Returns the gradient vector at the given position.
*
......@@ -140,7 +152,7 @@ float pointDistance( vec3 p1, vec3 p2 )
*
* \return the gradient, NOT normalized
*/
vec3 getNormal( in vec3 position )
vec3 getNormal( in vec3 position, in vec3 ray )
{
vec3 grad;
#ifdef GRADIENTTEXTURE_ENABLED
......@@ -148,7 +160,7 @@ vec3 getNormal( in vec3 position )
#else
grad = getGradient( u_texture0Sampler, position );
#endif
return sign( dot( grad, -v_ray ) ) * grad;
return sign( dot( grad, -ray ) ) * grad;
}
/**
......@@ -156,54 +168,50 @@ vec3 getNormal( in vec3 position )
*/
void main()
{
// init outputs
wgeInitGBuffer();
wge_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 );
gl_FragDepth = 1.0;
#define SAMPLES u_steps
vec3 ray = v_ray;
if( osg_ProjectionMatrix[3][3] == 0.0 )
{
ray = v_rayStart - v_eyePos;
}
#ifdef WGE_POSTPROCESSING_ENABLED
wge_FragZoom = v_worldScale;
#endif
// please do not laugh, it is a very very very simple "isosurface" shader
// First, find the rayEnd point. We need to do it in the fragment shader as the ray end point may be interpolated wrong
// when done for each vertex.
float totalDistance = 0.0;
vec3 rayEnd = findRayEnd( totalDistance );
float stepDistance = totalDistance / float( SAMPLES );
// the current value inside the data
float value;
float totalDistance = 0.0; // the maximal distance along the ray until the BBox ends
float currentDistance = 0.02; // accumulated distance along the ray
#ifdef STOCHASTICJITTER_ENABLED
// stochastic jittering can help to void these ugly wood-grain artifacts with larger sampling distances but might
// introduce some noise artifacts.
float jitter = 0.5 - texture( u_texture1Sampler, gl_FragCoord.xy / u_texture1SizeX ).r;
// the point along the ray in cube coordinates
vec3 curPoint = v_ray + v_rayStart + ( v_ray * stepDistance * jitter );
float jitter = 0.5 - texture( u_texture1Sampler, gl_FragCoord.xy / float( u_texture1SizeX ) ).r;
vec3 rayStart = v_rayStart + ( ray * v_sampleDistance * jitter );
#else
// the point along the ray in cube coordinates
vec3 curPoint = v_ray + v_rayStart;
vec3 rayStart = v_rayStart;
#endif
vec3 rayStart = curPoint;
// the step counter
int i = 1;
while( i < SAMPLES )
vec3 rayEnd = findRayEnd( rayStart, ray, totalDistance );
// walk along the ray
while( currentDistance <= ( totalDistance - 0.02 ) )
{
// get current value
value = texture( u_texture0Sampler, curPoint ).r;
vec3 rayPoint = rayStart + ( currentDistance * ray );
float value = texture( u_texture0Sampler, rayPoint ).r;
// is it the isovalue?
if( ( abs( value - v_isovalue ) < ISO_EPSILON )
#ifdef BORDERCLIP_ENABLED
&&
!( length( curPoint - rayStart ) < u_borderClipDistance )
!( length( rayPoint - rayStart ) < u_borderClipDistance )
&&
!( length( curPoint - rayEnd ) < u_borderClipDistance )
!( length( rayPoint - rayEnd ) < u_borderClipDistance )
#endif
)
{
......@@ -211,7 +219,7 @@ void main()
// Therefore, the complete standard pipeline is reproduced here:
// 1: transfer to world space and right after it, to eye space
vec4 curPointProjected = osg_ModelViewProjectionMatrix * vec4( curPoint, 1.0 );
vec4 rayPointProjected = osg_ModelViewProjectionMatrix * vec4( rayPoint, 1.0 );
// 2: scale to screen space and [0,1]
// -> x and y is not needed
......@@ -219,16 +227,13 @@ void main()
// curPointProjected.x = curPointProjected.x * 0.5 + 0.5 ;
// curPointProjected.y /= curPointProjected.w;
// curPointProjected.y = curPointProjected.y * 0.5 + 0.5 ;
curPointProjected.z /= curPointProjected.w;
curPointProjected.z = curPointProjected.z * 0.5 + 0.5;
rayPointProjected.z /= rayPointProjected.w;
rayPointProjected.z = rayPointProjected.z * 0.5 + 0.5;
// 3: set depth value
gl_FragDepth = curPointProjected.z;
// 4: Shading
gl_FragDepth = rayPointProjected.z;
// find a proper normal for a headlight in world-space
vec3 normal = ( osg_ModelViewMatrix * vec4( getNormal( curPoint ), 0.0 ) ).xyz;
vec3 normal = ( osg_ModelViewMatrix * vec4( getNormal( rayPoint, ray ), 0.0 ) ).xyz;
#ifdef WGE_POSTPROCESSING_ENABLED
wge_FragNormal = textureNormalize( normal );
#endif
......@@ -245,23 +250,16 @@ void main()
// 4: set color
// mix color with colormap
vec4 color = mix(
colormapping( vec4( curPoint.x * u_texture0SizeX, curPoint.y * u_texture0SizeY, curPoint.z * u_texture0SizeZ, 1.0 ) ),
colormapping( vec4( rayPoint.x * u_texture0SizeX, rayPoint.y * u_texture0SizeY, rayPoint.z * u_texture0SizeZ, 1.0 ) ),
vec4( v_color.rgb, u_alpha ),
1.0 - u_colormapRatio );
// 5: the final color construction
wge_FragColor = vec4( light * color.rgb, color.a );
break;
}
else
{
// no it is not the iso value
// -> continue along the ray
curPoint += stepDistance * v_ray;
}
// do not miss to count the steps already done
i++;
// go to next value
currentDistance += v_sampleDistance;
}
}
......@@ -31,6 +31,7 @@
#include "WGETransformationTools.glsl"
/////////////////////////////////////////////////////////////////////////////
// Varyings
/////////////////////////////////////////////////////////////////////////////
......@@ -38,9 +39,15 @@
// The ray's starting point in texture space
out vec3 v_rayStart;
// The ray direction in texture space
// The ray direction in texture space, normalized
out vec3 v_ray;
// The sampling distance
out float v_sampleDistance;
// The steps in relation to a default number of steps of 128.
out float v_relativeSampleDistance;
// the Surface normal at this point
out vec3 v_normal;
......@@ -50,12 +57,15 @@ out float v_isovalue;
// The scaling component of the modelview matrix.
out float v_worldScale;
out vec4 v_color;
// The position of the camera
flat out vec3 v_eyePos;
out vec4 v_color;
/////////////////////////////////////////////////////////////////////////////
// Uniforms
/////////////////////////////////////////////////////////////////////////////
uniform int u_steps;
// scaling factor of the data in the texture. Needed for descaling.
uniform float u_texture0Scale;
......@@ -66,18 +76,6 @@ uniform float u_texture0Min;
// The isovalue to use.
uniform float u_isovalue;
/////////////////////////////////////////////////////////////////////////////
// Attributes
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Variables
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// Functions
/////////////////////////////////////////////////////////////////////////////
/**
* Main entry point of the vertex shader.
*/
......@@ -85,28 +83,40 @@ void main()
{
colormapping();
// scale isovalue to equal the texture data scaling.
v_isovalue = ( u_isovalue - u_texture0Min ) / u_texture0Scale;
// for easy access to texture coordinates
// gl_TexCoord[0] = osg_MultiTexCoord0;
v_normal = osg_Normal;
// in texture space, the starting point simply is the current surface point in texture space
v_rayStart = osg_MultiTexCoord0.xyz; // this equals osg_Vertex!
// transform the ray direction to texture space, which equals object space
// Therefore use two points, as we transform a vector
vec4 camLookAt = vec4( 0.0, 0.0, -1.0, 0.0 );
v_ray = worldToLocal( camLookAt ).xyz;
// to have equidistant sampling for each side of the box, use a fixed step size
v_sampleDistance = 1.0 / float( u_steps );
v_relativeSampleDistance = 128.0 / float( u_steps );
#ifdef WGE_POSTPROCESSING_ENABLED
// Keep track of scaling information. This is needed by some postprocessors.
v_worldScale = getModelViewScale();
#endif
// in texture space, the starting point simply is the current surface point in texture space
v_rayStart = osg_Vertex.xyz;
if( osg_ProjectionMatrix[3][3] == 1.0 )
{
// orthographic:
// transform the ray direction to texture space, which equals object space
// Therefore use two points, as we transform a vector
vec4 camLookAt = vec4( 0.0, 0.0, -1.0, 0.0 );
vec4 camPos = vec4( 0.0, 0.0, 0.0, 0.0 );
v_ray = normalize( worldToLocal( camLookAt, camPos ).xyz );
}
else
{
// perspective:
// calculate object space coordinate for camera
// create vector between camera and vertex
v_eyePos = ( inverse( osg_ModelViewMatrix ) * vec4( 0.0, 0.0, 0.0, 1.0 ) ).xyz;
v_ray = vec3( 0.0 );
}
#ifdef WGE_POSTPROCESSING_ENABLED
// Keep track of scaling information. This is needed by some postprocessors.
v_worldScale = getModelViewScale();
#endif
// Simply project the vertex
gl_Position = osg_ModelViewProjectionMatrix * osg_Vertex;
v_color = osg_Color;
gl_Position = osg_ModelViewProjectionMatrix * osg_Vertex;
}
......@@ -45,6 +45,8 @@ WRTTCamera::WRTTCamera( uint32_t width,
setupCamera();
setupRTT();
wge::enableTransparency( m_node );
addChild( m_node );
}
......
......@@ -29,6 +29,7 @@
#include "core/graphicsEngine/WGECamera.h"
#include "core/graphicsEngine/WGEGroupNode.h"
#include "core/graphicsEngine/WGEUtils.h"
#include "core/graphicsEngine/offscreen/WGEOffscreenRenderNode.h"
/**
......
......@@ -68,12 +68,12 @@ void main()
vec4 finalColor = getColor( pixelCoord );
float depth = getDepth( pixelCoord );
if( finalColor.a == 0.0 )
{
finalColor = vec4( 1.0, 1.0, 1.0, 1.0 );
}
// if( finalColor.a == 0.0 )
// {
// finalColor = vec4( 1.0, 1.0, 1.0, 1.0 );
// }
gl_FragColor = finalColor;
gl_FragColor = mix( vec4( 1.0 ), finalColor, finalColor.a );
gl_FragDepth = depth;
}
......@@ -118,17 +118,19 @@ void WMWriteCSV::propertyCallback()
std::shared_ptr< WDataSetCSV > csvdataSet = m_CSVInput->getData();
std::shared_ptr< WDataSetFibers > fibersdataSet = m_fibersInput->getData();
if( !csvdataSet )
{
throw WException( "The Data-Modul-CSV-connection is missing." );
}
if( !fibersdataSet )
{
throw WException( "The Point-Connector-connection is missing." );
}
writeToFile();
if( !csvdataSet )
{
writeOnlyFibersToFile();
}
else
{
writeToFile();
}
}
std::list< std::tuple < float, float, float, size_t > > WMWriteCSV::getListOfInternalVertex( WDataSetFibers::SPtr fibers )
......@@ -221,6 +223,29 @@ size_t WMWriteCSV::createStartCounter( std::list< std::tuple < float, float, flo
return eventIDcounter + 1;
}
void WMWriteCSV::writeOnlyFibersToFile()
{
std::list< std::tuple < float, float, float, size_t > > listOfInternalVertex = getListOfInternalVertex( m_fibersInput->getData() );
std::ofstream newCSVFile( getPathToSave() );
if( !newCSVFile.is_open() )
{
throw WException( "Could not create new CSV in the selected source folder" );
}
newCSVFile << "posX,posY,posZ,eventID" << std::endl;
for( auto elm = listOfInternalVertex.begin(); elm != listOfInternalVertex.end(); elm++ )
{
float posX = std::get< 0 >( *elm );
float posY = std::get< 1 >( *elm );
float posZ = std::get< 2 >( *elm );
size_t evnt = std::get< 3 >( *elm );
newCSVFile << posX << "," << posY << "," << posZ << "," << evnt << std::endl;
}