Started work on IpsviRaycaster processor implementing...

Started work on IpsviRaycaster processor implementing ImagePlaneSweepVolumeIllumination by Sundén et al.
- Sweep direction setup works for directional light.
- Computation of illumination cache plane works.
- Implemented simplified version of the IPSVI shader, seems to work to some extent.
parent b4b9cfbf
// ================================================================================================
//
// This file is part of the CAMPVis Software Framework.
//
// If not explicitly stated otherwise: Copyright (C) 2012-2015, all rights reserved,
// Christian Schulte zu Berge <christian.szb@in.tum.de>
// Chair for Computer Aided Medical Procedures
// Technische Universitaet Muenchen
// Boltzmannstr. 3, 85748 Garching b. Muenchen, Germany
//
// For a full list of authors and contributors, please refer to the file "AUTHORS.txt".
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
//
// ================================================================================================
layout(location = 0) out vec4 out_Color; ///< outgoing fragment color
layout(location = 1) out vec4 out_FHP; ///< outgoing fragment first hitpoint
layout(location = 2) out vec4 out_FHN; ///< outgoing fragment first hit normal
#include "tools/gradient.frag"
#include "tools/raycasting.frag"
#include "tools/shading.frag"
#include "tools/texture2d.frag"
#include "tools/texture3d.frag"
#include "tools/transferfunction.frag"
uniform vec2 _viewportSizeRCP;
uniform float _jitterStepSizeMultiplier;
// ray entry points
uniform sampler2D _entryPoints;
uniform sampler2D _entryPointsDepth;
uniform TextureParameters2D _entryParams;
// ray exit points
uniform sampler2D _exitPoints;
uniform sampler2D _exitPointsDepth;
uniform TextureParameters2D _exitParams;
// DRR volume
uniform sampler3D _volume;
uniform TextureParameters3D _volumeTextureParams;
// Transfer function
uniform sampler1D _transferFunction;
uniform TFParameters1D _transferFunctionParams;
// Illumination cache
uniform layout(rgba8) image2D _icImageIn;
uniform layout(rgba8) image2D _icImageOut;
uniform vec3 _icOrigin;
uniform vec3 _icNormal;
uniform vec3 _icRightVector;
uniform vec3 _icUpVector;
uniform float _shadowIntensity = 0.5;
uniform LightSource _lightSource;
uniform vec3 _cameraPosition;
uniform float _samplingStepSize;
const float SAMPLING_BASE_INTERVAL_RCP = 200.0;
ivec2 calcIcSamplePosition(vec3 worldPosition) {
// project world position onto IC plane
const vec3 diag = worldPosition - _icOrigin;
const float distance = abs(dot(diag, _icNormal));
const vec3 projected = diag - (-distance * _icNormal);
return ivec2(dot(projected, _icRightVector), dot(projected, _icUpVector));
}
/**
* Performs the raycasting and returns the final fragment color.
*/
vec4 performRaycasting(in vec3 entryPoint, in vec3 exitPoint, in vec2 texCoords) {
vec4 result = vec4(0.0);
float firstHitT = -1.0;
// calculate ray parameters
vec3 direction = exitPoint.rgb - entryPoint.rgb;
float t = 0.0;
float tend = length(direction);
direction = normalize(direction);
jitterEntryPoint(entryPoint, direction, _samplingStepSize * _jitterStepSizeMultiplier);
ivec2 icPositionPrev = calcIcSamplePosition(textureToWorld(_volumeTextureParams, vec4(entryPoint, 1.0)).xyz);
vec4 icOut = vec4(0.0);
while (t < tend) {
// compute sample position
vec3 samplePosition = entryPoint.rgb + t * direction;
vec4 worldPos = textureToWorld(_volumeTextureParams, vec4(samplePosition, 1.0));
ivec2 icPosition = calcIcSamplePosition(worldPos.xyz / worldPos.w);
vec4 icIn = imageLoad(_icImageIn, icPositionPrev);
// lookup intensity and TF
float intensity = texture(_volume, samplePosition).r;
vec4 color = lookupTF(_transferFunction, _transferFunctionParams, intensity);
// perform compositing
if (color.a > 0.0) {
// compute gradient (needed for shading and normals)
vec3 gradient = computeGradient(_volume, _volumeTextureParams, samplePosition);
color.rgb = calculatePhongShading(worldPos.xyz / worldPos.w, _lightSource, _cameraPosition, gradient, color.rgb);
// accomodate for variable sampling rates
color.a = 1.0 - pow(1.0 - color.a, _samplingStepSize * SAMPLING_BASE_INTERVAL_RCP);
// perform global illumination
// back-to-front compositing from light-direction
icOut.rgb = ((1.0 - color.a) * icIn.rgb) + (color.a * color.rgb);
icOut.a = ((1.0 - color.a) * icIn.a) + color.a;
// apply shadowing
color.rgb *= (1.0 - icOut.a * _shadowIntensity);
// front-to-back compositing along view direction
result.rgb = result.rgb + color.rgb * color.a * (1.0 - result.a);
result.a = result.a + (1.0 -result.a) * color.a;
// update illumination information
imageStore(_icImageOut, icPosition, icOut);
icPositionPrev = icPosition;
}
// save first hit ray parameter for depth value calculation
if (firstHitT < 0.0 && result.a > 0.0) {
firstHitT = t;
out_FHP = vec4(samplePosition, 1.0);
out_FHN = vec4(icPosition, 0.0, 0.0);// vec4(normalize(computeGradient(_volume, _volumeTextureParams, samplePosition)), 1.0);
}
// early ray termination (disabled!)
//if (result.a > 0.975) {
// result.a = 1.0;
// t = tend;
//}
// advance to the next evaluation point along the ray
t += _samplingStepSize;
}
// calculate depth value from ray parameter
gl_FragDepth = 1.0;
if (firstHitT >= 0.0) {
float depthEntry = texture(_entryPointsDepth, texCoords).z;
float depthExit = texture(_exitPointsDepth, texCoords).z;
gl_FragDepth = calculateDepthValue(firstHitT/tend, depthEntry, depthExit);
}
return result;
}
/***
* The main method.
***/
void main() {
vec2 p = gl_FragCoord.xy * _viewportSizeRCP;
vec3 frontPos = texture(_entryPoints, p).rgb;
vec3 backPos = texture(_exitPoints, p).rgb;
//determine whether the ray has to be casted
if (frontPos == backPos) {
//background need no raycasting
discard;
} else {
//fragCoords are lying inside the boundingbox
out_Color = performRaycasting(frontPos, backPos, p);
}
}
\ No newline at end of file
This diff is collapsed.
// ================================================================================================
//
// This file is part of the CAMPVis Software Framework.
//
// If not explicitly stated otherwise: Copyright (C) 2012-2015, all rights reserved,
// Christian Schulte zu Berge <christian.szb@in.tum.de>
// Chair for Computer Aided Medical Procedures
// Technische Universitaet Muenchen
// Boltzmannstr. 3, 85748 Garching b. Muenchen, Germany
//
// For a full list of authors and contributors, please refer to the file "AUTHORS.txt".
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
//
// ================================================================================================
#ifndef IPSVIRAYCASTER_H__
#define IPSVIRAYCASTER_H__
#include "core/pipeline/raycastingprocessor.h"
#include "core/properties/floatingpointproperty.h"
#include "core/properties/genericproperty.h"
#include "core/properties/transferfunctionproperty.h"
#include "modules/modulesapi.h"
#include <string>
namespace cgt {
class Shader;
}
namespace campvis {
/**
* Performs a simple volume ray casting.
*/
class CAMPVIS_MODULES_API IpsviRaycaster : public RaycastingProcessor {
public:
/**
* Constructs a new IpsviRaycaster Processor
**/
explicit IpsviRaycaster(IVec2Property* viewportSizeProp);
/**
* Destructor
**/
virtual ~IpsviRaycaster();
/**
* To be used in ProcessorFactory static methods
*/
static const std::string getId() { return "IpsviRaycaster"; };
/// \see AbstractProcessor::getName()
virtual const std::string getName() const { return getId(); };
/// \see AbstractProcessor::getDescription()
virtual const std::string getDescription() const { return "Performs a simple volume ray casting."; };
/// \see AbstractProcessor::getAuthor()
virtual const std::string getAuthor() const { return "Christian Schulte zu Berge <christian.szb@in.tum.de>"; };
/// \see AbstractProcessor::getProcessorState()
virtual ProcessorState getProcessorState() const { return AbstractProcessor::TESTING; };
/// \see AbstractProcessor::init
virtual void init();
/// \see AbstractProcessor::deinit
virtual void deinit();
DataNameProperty p_lightId; ///< Name/ID for the LightSource to use
IntProperty p_sweepLineWidth;
FloatProperty p_shadowIntensity;
protected:
/// \see RaycastingProcessor::processImpl()
virtual void processImpl(DataContainer& data, ImageRepresentationGL::ScopedRepresentation& image);
static const std::string loggerCat_;
};
}
#endif // IPSVIRAYCASTER_H__
......@@ -39,6 +39,7 @@
#include "modules/vis/processors/drrraycaster.h"
#include "modules/vis/processors/eepgenerator.h"
#include "modules/vis/processors/geometryrenderer.h"
#include "modules/vis/processors/ipsviraycaster.h"
#include "modules/vis/processors/mprrenderer.h"
#include "modules/vis/processors/orientationoverlay.h"
#include "modules/vis/processors/proxygeometrygenerator.h"
......@@ -71,6 +72,7 @@ namespace campvis {
template class SmartProcessorRegistrar<DRRRaycaster>;
template class SmartProcessorRegistrar<EEPGenerator>;
template class SmartProcessorRegistrar<GeometryRenderer>;
template class SmartProcessorRegistrar<IpsviRaycaster>;
template class SmartProcessorRegistrar<MprRenderer>;
template class SmartProcessorRegistrar<OrientationOverlay>;
template class SmartProcessorRegistrar<ProxyGeometryGenerator>;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment