Starting from 2021-07-01, all LRZ GitLab users will be required to explicitly accept the GitLab Terms of Service. Please see the detailed information at https://doku.lrz.de/display/PUBLIC/GitLab and make sure that your projects conform to the requirements.

Commit 8024e0ec authored by Christian Schulte zu Berge's avatar Christian Schulte zu Berge
Browse files

Further work on neuro module.

parent 02a36abc
......@@ -22,7 +22,9 @@
//
// ================================================================================================
in vec3 ex_TexCoord;
in vec3 geom_TexCoord;
noperspective in vec3 geom_EdgeDistance;
out vec4 out_Color;
#include "tools/texture3d.frag"
......@@ -44,6 +46,9 @@ uniform TFParameters1D _transferFunctionParams1;
uniform TFParameters1D _transferFunctionParams2;
uniform TFParameters1D _transferFunctionParams3;
uniform vec4 _wireframeColor;
uniform float _lineWidth = 1.0;
uniform float _transparency;
vec4 lookupTexture(vec3 worldPosition, sampler3D volume, TextureParameters3D volumeParams, sampler1D tf, TFParameters1D tfParams) {
vec3 texCoord = worldToTexture(volumeParams, worldPosition).xyz;
......@@ -55,13 +60,25 @@ vec4 lookupTexture(vec3 worldPosition, sampler3D volume, TextureParameters3D vol
}
void main() {
vec4 color1 = lookupTexture(ex_TexCoord, _volume1, _volumeParams1, _transferFunction1, _transferFunctionParams1);
vec4 color2 = lookupTexture(ex_TexCoord, _volume2, _volumeParams2, _transferFunction2, _transferFunctionParams2);
vec4 color3 = lookupTexture(ex_TexCoord, _volume3, _volumeParams3, _transferFunction3, _transferFunctionParams3);
vec4 color1 = lookupTexture(geom_TexCoord, _volume1, _volumeParams1, _transferFunction1, _transferFunctionParams1);
vec4 color2 = lookupTexture(geom_TexCoord, _volume2, _volumeParams2, _transferFunction2, _transferFunctionParams2);
vec4 color3 = lookupTexture(geom_TexCoord, _volume3, _volumeParams3, _transferFunction3, _transferFunctionParams3);
out_Color = color1 + color2 + color3;
if (out_Color.w > 1.0)
out_Color /= out_Color.w;
out_Color = vec4(mix(out_Color.rgb, vec3(0.0, 0.0, 0.0), out_Color.a), 1.0);
//out_Color = vec4(mix(out_Color.rgb, vec3(0.0, 0.0, 0.0), out_Color.a), 1.0);
out_Color = vec4(mix(out_Color.rgb, vec3(0.0, 0.0, 0.0), out_Color.a), max(out_Color.a, 1.0 - _transparency));
#ifdef WIREFRAME_RENDERING
// Find the smallest distance to the edges
float d = min(geom_EdgeDistance.x, min(geom_EdgeDistance.y, geom_EdgeDistance.z)) * 2.0;
// perform anti-aliasing
float mixVal = smoothstep(_lineWidth - 1.0, _lineWidth + 1.0, d);
// Mix the surface color with the line color
out_Color = mix(vec4(1.0), out_Color, mixVal);
#endif
}
......@@ -114,7 +114,7 @@ vec2 clipVolume(usampler2D vhTexture, int vhMaxMipmapLevel, TextureParameters3D
if (firstHitT < 0.0 && result.a > 0.01) { \
firstHitT = tNear; \
out_FHP = vec4(worldPosition, 1.0); \
out_FHN = vec4(gradient, 1.0); \
out_FHN = vec4(normalize(gradient), 1.0); \
} \
} \
} \
......
......@@ -70,19 +70,19 @@ namespace campvis {
_tcp.p_image.setValue("ct.image");
_renderTargetID.setValue("composed");
_t1Reader.p_url.setValue(ShdrMgr.completePath("D:/Medical Data/K_export/K_Data/K_T1_bet04.GB306.am"));
_t1Reader.p_url.setValue(ShdrMgr.completePath("D:/Medical Data/K_Data/K_T1_bet04.GB306.am"));
_t1Reader.p_targetImageID.setValue("t1_tf.image");
_t1Reader.p_targetImageID.addSharedProperty(&_mvmpr.p_sourceImage1);
_t1Reader.p_targetImageID.addSharedProperty(&_mvr.p_sourceImage1);
_t1Reader.s_validated.connect(this, &NeuroDemo::onReaderValidated);
_ctReader.p_url.setValue(ShdrMgr.completePath("D:/Medical Data/K_export/K_Data/K_CT_CoregT1.am"));
_ctReader.p_url.setValue(ShdrMgr.completePath("D:/Medical Data/K_Data/K_CT_CoregT1.am"));
_ctReader.p_targetImageID.setValue("ct.image");
_ctReader.p_targetImageID.addSharedProperty(&_mvmpr.p_sourceImage2);
_ctReader.p_targetImageID.addSharedProperty(&_mvr.p_sourceImage2);
_ctReader.s_validated.connect(this, &NeuroDemo::onReaderValidated);
_petReader.p_url.setValue(ShdrMgr.completePath("D:/Medical Data/K_export/K_Data/K_PET-CoregNMI_fl.am"));
_petReader.p_url.setValue(ShdrMgr.completePath("D:/Medical Data/K_Data/K_PET-CoregNMI_fl.am"));
_petReader.p_targetImageID.setValue("pet.image");
_petReader.p_targetImageID.addSharedProperty(&_mvmpr.p_sourceImage3);
_petReader.p_targetImageID.addSharedProperty(&_mvr.p_sourceImage3);
......@@ -107,7 +107,7 @@ namespace campvis {
_mvmpr.p_relativeToImageCenter.setValue(false);
_mvmpr.p_use2DProjection.setValue(false);
_mvmpr.p_planeSize.setValue(150.f);
_mvmpr.p_planeSize.setValue(200.f);
_mvmpr.p_outputImageId.setValue("result.mpr");
_mvmpr.p_outputImageId.addSharedProperty(&_rtc.p_firstImageId);
_mvmpr.p_outputImageId.addSharedProperty(&_mvr.p_geometryImageId);
......
......@@ -61,6 +61,9 @@ namespace neuro {
, p_planeSize("PlaneSize", "Clipping Plane Size", 100.f, 0.f, 1000.f, 1.f, 1)
, p_use2DProjection("Use3dRendering", "Use 3D Rendering instead of 2D", true)
, p_relativeToImageCenter("RelativeToImageCenter", "Construct Plane Relative to Image Center", true)
, p_showWireframe("ShowWireframe", "Show Wireframe", true)
, p_lineWidth("LineWidth", "Line Width", 1.f, .1f, 10.f)
, p_transparency("Transparency", "Minimum Transparency", 0.5f, 0.f, 1.f)
, _shader(nullptr)
{
addProperty(p_sourceImage1, INVALID_PROPERTIES | INVALID_RESULT);
......@@ -78,6 +81,10 @@ namespace neuro {
addProperty(p_planeSize);
addProperty(p_use2DProjection, INVALID_RESULT | INVALID_PROPERTIES);
addProperty(p_relativeToImageCenter);
addProperty(p_showWireframe, INVALID_RESULT | INVALID_SHADER);
addProperty(p_lineWidth);
addProperty(p_transparency);
}
MultiVolumeMprRenderer::~MultiVolumeMprRenderer() {
......@@ -87,7 +94,7 @@ namespace neuro {
void MultiVolumeMprRenderer::init() {
VisualizationProcessor::init();
_shader = ShdrMgr.loadWithCustomGlslVersion("core/glsl/passthrough.vert", "", "modules/neuro/glsl/multivolumemprrenderer.frag", generateHeader(), "400");
_shader = ShdrMgr.load("modules/vis/glsl/geometryrenderer.vert", "modules/vis/glsl/geometryrenderer.geom", "modules/neuro/glsl/multivolumemprrenderer.frag", generateHeader());
if (_shader != nullptr) {
_shader->setAttributeLocation(0, "in_Position");
_shader->setAttributeLocation(1, "in_TexCoord");
......@@ -143,7 +150,13 @@ namespace neuro {
// perform the rendering
glEnable(GL_DEPTH_TEST);
_shader->activate();
cgt::Shader::IgnoreUniformLocationErrorGuard guard(_shader);
_shader->setUniform("_lineWidth", p_lineWidth.getValue());
_shader->setUniform("_transparency", p_transparency.getValue());
// calculate viewport matrix for NDC -> viewport conversion
cgt::vec2 halfViewport = cgt::vec2(getEffectiveViewportSize()) / 2.f;
cgt::mat4 viewportMatrix = cgt::mat4::createTranslation(cgt::vec3(halfViewport, 0.f)) * cgt::mat4::createScale(cgt::vec3(halfViewport, 1.f));
_shader->setUniform("_viewportMatrix", viewportMatrix);
if (p_use2DProjection.getValue()) {
// generate a camera position that simulates 2D rendering
......@@ -193,7 +206,12 @@ namespace neuro {
}
std::string MultiVolumeMprRenderer::generateHeader() const {
return "";
std::string toReturn = "#define HAS_GEOMETRY_SHADER\n";
if (p_showWireframe.getValue())
toReturn += "#define WIREFRAME_RENDERING\n";
return toReturn;
}
void MultiVolumeMprRenderer::updateProperties(DataContainer& dataContainer) {
......
......@@ -96,6 +96,9 @@ namespace neuro {
BoolProperty p_use2DProjection; ///< Use 3D Rendering instead of 2D
BoolProperty p_relativeToImageCenter; ///< Flag whether to construct image plane relative to image center
BoolProperty p_showWireframe; ///< Show wire frame
FloatProperty p_lineWidth; ///< Line Width when rendering lines
FloatProperty p_transparency; ///< Minimum transparency of the rendered image
protected:
/// \see AbstractProcessor::updateResult
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment