diff --git a/Sources/Rendering/Core/ImageCPRMapper/Constants.js b/Sources/Rendering/Core/ImageCPRMapper/Constants.js
new file mode 100644
index 00000000000..f5f9bf318cd
--- /dev/null
+++ b/Sources/Rendering/Core/ImageCPRMapper/Constants.js
@@ -0,0 +1,9 @@
+export const MipMode = {
+ MAX: 0,
+ MIN: 1,
+ AVERAGE: 2,
+};
+
+export default {
+ MipMode,
+};
diff --git a/Sources/Rendering/Core/ImageCPRMapper/Constants.ts b/Sources/Rendering/Core/ImageCPRMapper/Constants.ts
new file mode 100644
index 00000000000..96163e9875e
--- /dev/null
+++ b/Sources/Rendering/Core/ImageCPRMapper/Constants.ts
@@ -0,0 +1,10 @@
+export declare enum MipMode {
+ MAX = 0,
+ MIN = 1,
+ AVERAGE = 2,
+}
+
+declare const _default: {
+ MipMode: typeof MipMode;
+};
+export default _default;
diff --git a/Sources/Rendering/Core/ImageCPRMapper/example/controller.html b/Sources/Rendering/Core/ImageCPRMapper/example/controller.html
index df509ac62b1..57b23d8fc52 100644
--- a/Sources/Rendering/Core/ImageCPRMapper/example/controller.html
+++ b/Sources/Rendering/Core/ImageCPRMapper/example/controller.html
@@ -1,16 +1,56 @@
diff --git a/Sources/Rendering/Core/ImageCPRMapper/example/index.js b/Sources/Rendering/Core/ImageCPRMapper/example/index.js
index 6907fd2a6a8..2d7ca5aa682 100644
--- a/Sources/Rendering/Core/ImageCPRMapper/example/index.js
+++ b/Sources/Rendering/Core/ImageCPRMapper/example/index.js
@@ -29,6 +29,7 @@ import widgetBehavior from 'vtk.js/Sources/Widgets/Widgets3D/ResliceCursorWidget
import controlPanel from './controller.html';
import aortaJSON from './aorta_centerline.json';
import spineJSON from './spine_centerline.json';
+import { MipMode } from '../Constants';
const volumePath = `${__BASE_PATH__}/data/volume/LIDC2.vti`;
const centerlineJsons = { Aorta: aortaJSON, Spine: spineJSON };
@@ -44,8 +45,13 @@ const renderWindow = fullScreenRenderer.getRenderWindow();
fullScreenRenderer.addController(controlPanel);
const angleEl = document.getElementById('angle');
+const animateEl = document.getElementById('animate');
const centerlineEl = document.getElementById('centerline');
const modeEl = document.getElementById('mode');
+const mipEnableEl = document.getElementById('mipEnable');
+const mipModeEl = document.getElementById('mipMode');
+const mipThicknessEl = document.getElementById('mipThickness');
+const mipSamplesEl = document.getElementById('mipSamples');
const interactor = renderWindow.getInteractor();
interactor.setInteractorStyle(vtkInteractorStyleImage.newInstance());
@@ -354,6 +360,20 @@ angleEl.addEventListener('input', () =>
setAngleFromSlider(radiansFromDegrees(Number.parseFloat(angleEl.value, 10)))
);
+let animationId;
+animateEl.addEventListener('change', () => {
+ if (animateEl.checked) {
+ animationId = setInterval(() => {
+ const currentAngle = radiansFromDegrees(
+ Number.parseFloat(angleEl.value, 10)
+ );
+ setAngleFromSlider(currentAngle + 0.1);
+ }, 60);
+ } else {
+ clearInterval(animationId);
+ }
+});
+
function useStraightenedMode() {
mapper.useStraightenedMode();
updateDistanceAndDirection();
@@ -388,6 +408,38 @@ modeEl.appendChild(straightEl);
modeEl.addEventListener('input', () => setUseStretched(modeEl.value));
modeEl.value = 'straightened';
+mipEnableEl.addEventListener('change', (ev) => {
+ mapper.setUseMip(mipEnableEl.checked);
+ renderWindow.render();
+});
+
+Object.keys(MipMode).forEach((mipMode) => {
+ const optionEl = document.createElement('option');
+ optionEl.innerText = mipMode.charAt(0) + mipMode.substring(1).toLowerCase();
+ optionEl.value = mipMode;
+ mipModeEl.appendChild(optionEl);
+});
+
+mipModeEl.addEventListener('input', (ev) => {
+ mapper.setMipMode(MipMode[mipModeEl.value]);
+ renderWindow.render();
+});
+
+mipThicknessEl.addEventListener('input', (ev) => {
+ const thickness = Number.parseFloat(mipThicknessEl.value, 10);
+ mapper.setMipSlabThickness(thickness);
+ renderWindow.render();
+});
+mapper.setMipSlabThickness(0.1);
+mipThicknessEl.value = mapper.getMipSlabThickness();
+
+mipSamplesEl.addEventListener('input', (ev) => {
+ const samples = Number.parseInt(mipSamplesEl.value, 10);
+ mapper.setMipNumberOfSamples(samples);
+ renderWindow.render();
+});
+mipSamplesEl.value = mapper.getMipNumberOfSamples();
+
stretchViewWidgetInstance.onInteractionEvent(updateDistanceAndDirection);
crossViewWidgetInstance.onInteractionEvent(updateDistanceAndDirection);
diff --git a/Sources/Rendering/Core/ImageCPRMapper/index.d.ts b/Sources/Rendering/Core/ImageCPRMapper/index.d.ts
index ad37704e98a..7a220c93637 100644
--- a/Sources/Rendering/Core/ImageCPRMapper/index.d.ts
+++ b/Sources/Rendering/Core/ImageCPRMapper/index.d.ts
@@ -6,6 +6,7 @@ import vtkDataArray from "../../../Common/Core/DataArray";
import vtkImageData from "../../../Common/DataModel/ImageData";
import vtkPolyData from "../../../Common/DataModel/PolyData";
import vtkPolyLine from "../../../Common/DataModel/PolyLine";
+import { MipMode } from "./Constants";
interface ICoincidentTopology {
factor: number;
@@ -157,6 +158,59 @@ export interface vtkImageCPRMapper extends vtkAbstractMapper3D {
*/
setDirectionMatrix(mat: mat3): boolean;
+ /**
+ * Enable or disable Maximum Intensity Projection
+ * Can be configured to use other modes of projection
+ * @see getMipSlabThickness
+ * @see getMipNumberOfSamples
+ * @see getMipMode
+ */
+ getUseMip(): boolean;
+
+ /**
+ * @see getUseMip
+ * @param useMip
+ */
+ setUseMip(useMip: boolean): boolean;
+
+ /**
+ * Thickness of the MIP slab in image coordinates (NOT in voxels)
+ * Usually in millimeters if the spacing of the input image is set from a DICOM
+ */
+ getMipSlabThickness(): number;
+
+ /**
+ * @see getMipSlabThickness
+ * @param mipSlabThickness
+ */
+ setMipSlabThickness(mipSlabThickness: number): boolean;
+
+ /**
+ * Total number of samples of the volume done by the MIP mode
+ * Using an odd number is advised
+ * If this number is even, the center of the slab will not be sampled
+ */
+ getMipNumberOfSamples(): number;
+
+ /**
+ * @see getMipNumberOfSamples
+ * @param mipNumberOfSamples
+ */
+ setMipNumberOfSamples(mipNumberOfSamples: number): boolean;
+
+ /**
+ * The different modes of projection
+ * Even if the name of the mode is MIP (which implies a maximum intensity projection),
+ * other modes are available such as MinIP and Average Intensity Projection
+ */
+ getMipMode(): MipMode;
+
+ /**
+ * @see getMipMode
+ * @param mipMode
+ */
+ setMipMode(mipMode: MipMode): boolean;
+
/**
* Find the data array to use for orientation in the input polydata ( @see getOrientationArrayName )
*/
diff --git a/Sources/Rendering/Core/ImageCPRMapper/index.js b/Sources/Rendering/Core/ImageCPRMapper/index.js
index cb0d8bf2d14..be8b9614cb3 100644
--- a/Sources/Rendering/Core/ImageCPRMapper/index.js
+++ b/Sources/Rendering/Core/ImageCPRMapper/index.js
@@ -4,6 +4,7 @@ import vtkAbstractImageMapper from 'vtk.js/Sources/Rendering/Core/AbstractImageM
import macro from 'vtk.js/Sources/macros';
import vtkPoints from 'vtk.js/Sources/Common/Core/Points';
import vtkPolyLine from 'vtk.js/Sources/Common/DataModel/PolyLine';
+import { MipMode } from './Constants';
const { vtkErrorMacro } = macro;
@@ -103,7 +104,10 @@ function vtkImageCPRMapper(publicAPI, model) {
: orientationDataArray.getNumberOfComponents();
switch (numComps) {
case 16:
- convert = mat4.getRotation;
+ convert = (outQuat, inMat) => {
+ mat4.getRotation(outQuat, inMat);
+ quat.normalize(outQuat, outQuat);
+ };
break;
case 9:
convert = (outQuat, inMat) => {
@@ -335,6 +339,10 @@ const DEFAULT_VALUES = {
tangentDirection: [1, 0, 0],
bitangentDirection: [0, 1, 0],
normalDirection: [0, 0, 1],
+ useMip: false,
+ mipSlabThickness: 1,
+ mipNumberOfSamples: 31,
+ mipMode: MipMode.MAX,
};
// ----------------------------------------------------------------------------
@@ -362,6 +370,10 @@ export function extend(publicAPI, model, initialValues = {}) {
'tangentDirection',
'bitangentDirection',
'normalDirection',
+ 'useMip',
+ 'mipSlabThickness',
+ 'mipNumberOfSamples',
+ 'mipMode',
]);
CoincidentTopologyHelper.implementCoincidentTopologyMethods(publicAPI, model);
diff --git a/Sources/Rendering/OpenGL/ImageCPRMapper/index.js b/Sources/Rendering/OpenGL/ImageCPRMapper/index.js
index a6fa1d10cf1..b3cc13bcb45 100644
--- a/Sources/Rendering/OpenGL/ImageCPRMapper/index.js
+++ b/Sources/Rendering/OpenGL/ImageCPRMapper/index.js
@@ -1,15 +1,16 @@
import macro from 'vtk.js/Sources/macros';
import { mat4, vec3 } from 'gl-matrix';
-import vtkViewNode from 'vtk.js/Sources/Rendering/SceneGraph/ViewNode';
+import { Filter } from 'vtk.js/Sources/Rendering/OpenGL/Texture/Constants';
+import { InterpolationType } from 'vtk.js/Sources/Rendering/Core/ImageProperty/Constants';
+import { MipMode } from 'vtk.js/Sources/Rendering/Core/ImageCPRMapper/Constants';
+import { Representation } from 'vtk.js/Sources/Rendering/Core/Property/Constants';
+import { VtkDataTypes } from 'vtk.js/Sources/Common/Core/DataArray/Constants';
+import vtkDataArray from 'vtk.js/Sources/Common/Core/DataArray';
import vtkHelper from 'vtk.js/Sources/Rendering/OpenGL/Helper';
+import vtkOpenGLTexture from 'vtk.js/Sources/Rendering/OpenGL/Texture';
import vtkReplacementShaderMapper from 'vtk.js/Sources/Rendering/OpenGL/ReplacementShaderMapper';
import vtkShaderProgram from 'vtk.js/Sources/Rendering/OpenGL/ShaderProgram';
-import vtkOpenGLTexture from 'vtk.js/Sources/Rendering/OpenGL/Texture';
-import vtkDataArray from 'vtk.js/Sources/Common/Core/DataArray';
-import { VtkDataTypes } from 'vtk.js/Sources/Common/Core/DataArray/Constants';
-import { Representation } from 'vtk.js/Sources/Rendering/Core/Property/Constants';
-import { Filter } from 'vtk.js/Sources/Rendering/OpenGL/Texture/Constants';
-import { InterpolationType } from 'vtk.js/Sources/Rendering/Core/ImageProperty/Constants';
+import vtkViewNode from 'vtk.js/Sources/Rendering/SceneGraph/ViewNode';
import vtkPolyDataVS from 'vtk.js/Sources/Rendering/OpenGL/glsl/vtkPolyDataVS.glsl';
import vtkPolyDataFS from 'vtk.js/Sources/Rendering/OpenGL/glsl/vtkPolyDataFS.glsl';
@@ -451,46 +452,39 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
const customAttributes = [centerlinePosition, quadIndex];
if (!model.renderable.getUseUniformOrientation()) {
- // For each {quad / centerline segment}, two vectors in directionDataArray give the orientation of the centerline
- // Send these two vectors to each vertex and use flat interpolation to get them as is in the fragment shader
- // The interpolation will occur in the fragment shader (slerp)
- const directions = model.renderable.getCenterlineTangentDirections();
- const centerlineTopDirectionArray = new Float32Array(3 * nPts);
- const centerlineBotDirectionArray = new Float32Array(3 * nPts);
- for (let lineIdx = 0, offset = 0; lineIdx < nLines; ++lineIdx) {
- const baseDirectionIdx = 3 * lineIdx;
-
- // Every vertex of each quad/segment have the same topDir and botDir
- // Top left, Top right, Bottom right, Bottom left
- for (let i = 0; i < 4; ++i) {
- // Top array
- centerlineTopDirectionArray[offset + 0] =
- directions[baseDirectionIdx + 0];
- centerlineTopDirectionArray[offset + 1] =
- directions[baseDirectionIdx + 1];
- centerlineTopDirectionArray[offset + 2] =
- directions[baseDirectionIdx + 2];
- // Bot array
- centerlineBotDirectionArray[offset + 0] =
- directions[baseDirectionIdx + 3];
- centerlineBotDirectionArray[offset + 1] =
- directions[baseDirectionIdx + 4];
- centerlineBotDirectionArray[offset + 2] =
- directions[baseDirectionIdx + 5];
- offset += 3;
+ // For each quad (i.e. centerline segment), a top and bottom quaternion give the orientation
+ // Send both quaternions to each vertex and use flat interpolation to get them "as is" in the fragment shader
+ // The interpolation of the quaternions will occur in the fragment shader (slerp)
+ const orientationQuats =
+ model.renderable.getOrientedCenterline().getOrientations() ?? [];
+ const centerlineTopOrientationArray = new Float32Array(4 * nPts);
+ const centerlineBotOrientationArray = new Float32Array(4 * nPts);
+ for (let quadIdx = 0; quadIdx < nLines; ++quadIdx) {
+ // All vertices of a given quad have the same topDir and botDir
+ // Polyline goes from top to bottom
+ const topQuat = orientationQuats[quadIdx];
+ const botQuat = orientationQuats[quadIdx + 1];
+ for (let pointInQuadIdx = 0; pointInQuadIdx < 4; ++pointInQuadIdx) {
+ const pointIdx = pointInQuadIdx + 4 * quadIdx;
+ const quaternionArrayOffset = 4 * pointIdx;
+ centerlineTopOrientationArray.set(topQuat, quaternionArrayOffset);
+ centerlineBotOrientationArray.set(botQuat, quaternionArrayOffset);
}
}
- const centerlineTopDirection = vtkDataArray.newInstance({
- numberOfComponents: 3,
- values: centerlineTopDirectionArray,
- name: 'centerlineTopDirection',
+ const centerlineTopOrientation = vtkDataArray.newInstance({
+ numberOfComponents: 4,
+ values: centerlineTopOrientationArray,
+ name: 'centerlineTopOrientation',
});
- const centerlineBotDirection = vtkDataArray.newInstance({
- numberOfComponents: 3,
- values: centerlineBotDirectionArray,
- name: 'centerlineBotDirection',
+ const centerlineBotOrientation = vtkDataArray.newInstance({
+ numberOfComponents: 4,
+ values: centerlineBotOrientationArray,
+ name: 'centerlineBotOrientation',
});
- customAttributes.push(centerlineTopDirection, centerlineBotDirection);
+ customAttributes.push(
+ centerlineTopOrientation,
+ centerlineBotOrientation
+ );
}
model.tris.getCABO().createVBO(cells, 'polys', Representation.SURFACE, {
@@ -515,17 +509,21 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
const iComp = actor.getProperty().getIndependentComponents();
const useCenterPoint = !!model.renderable.getCenterPoint();
const useUniformOrientation = model.renderable.getUseUniformOrientation();
+ const mipMode =
+ model.renderable.getUseMip() && model.renderable.getMipMode();
if (
cellBO.getProgram() === 0 ||
model.lastUseCenterPoint !== useCenterPoint ||
model.lastUseUniformOrientation !== useUniformOrientation ||
+ model.lastMipMode !== mipMode ||
model.lastHaveSeenDepthRequest !== model.haveSeenDepthRequest ||
model.lastTextureComponents !== tNumComp ||
model.lastIndependentComponents !== iComp
) {
model.lastUseCenterPoint = useCenterPoint;
model.lastUseUniformOrientation = useUniformOrientation;
+ model.lastMipMode = mipMode;
model.lastHaveSeenDepthRequest = model.haveSeenDepthRequest;
model.lastTextureComponents = tNumComp;
model.lastIndependentComponents = iComp;
@@ -544,6 +542,26 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
let VSSource = shaders.Vertex;
let FSSource = shaders.Fragment;
+ // https://glmatrix.net/docs/vec3.js.html#line522
+ const applyQuaternionToVecShaderFunction = [
+ 'vec3 applyQuaternionToVec(vec4 q, vec3 v) {',
+ ' float uvx = q.y * v.z - q.z * v.y;',
+ ' float uvy = q.z * v.x - q.x * v.z;',
+ ' float uvz = q.x * v.y - q.y * v.x;',
+ ' float uuvx = q.y * uvz - q.z * uvy;',
+ ' float uuvy = q.z * uvx - q.x * uvz;',
+ ' float uuvz = q.x * uvy - q.y * uvx;',
+ ' float w2 = q.w * 2.0;',
+ ' uvx *= w2;',
+ ' uvy *= w2;',
+ ' uvz *= w2;',
+ ' uuvx *= 2.0;',
+ ' uuvy *= 2.0;',
+ ' uuvz *= 2.0;',
+ ' return vec3(v.x + uvx + uuvx, v.y + uvy + uuvy, v.z + uvz + uuvz);',
+ '}',
+ ];
+
// Vertex shader main replacements
VSSource = vtkShaderProgram.substitute(VSSource, '//VTK::Camera::Dec', [
'uniform mat4 MCPCMatrix;',
@@ -561,19 +579,27 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
'out vec2 quadOffsetVSOutput;',
'out vec3 centerlinePosVSOutput;',
];
+ const useMip = model.renderable.getUseMip();
const isDirectionUniform = model.renderable.getUseUniformOrientation();
if (isDirectionUniform) {
vsColorDec.push(
- 'out vec3 centerlineDirVSOutput;',
- 'uniform vec3 centerlineDirection;'
+ 'out vec3 samplingDirVSOutput;',
+ 'uniform vec4 centerlineOrientation;',
+ 'uniform vec3 tangentDirection;',
+ ...applyQuaternionToVecShaderFunction
);
+ if (useMip) {
+ vsColorDec.push(
+ 'out vec3 mipDirVSOutput;',
+ 'uniform vec3 bitangentDirection;'
+ );
+ }
} else {
vsColorDec.push(
- 'out vec3 centerlineTopDirVSOutput;',
- 'out vec3 centerlineBotDirVSOutput;',
- 'out float centerlineAngleVSOutput;',
- 'attribute vec3 centerlineTopDirection;',
- 'attribute vec3 centerlineBotDirection;'
+ 'out vec4 centerlineTopOrientationVSOutput;',
+ 'out vec4 centerlineBotOrientationVSOutput;',
+ 'attribute vec4 centerlineTopOrientation;',
+ 'attribute vec4 centerlineBotOrientation;'
);
}
VSSource = vtkShaderProgram.substitute(
@@ -589,35 +615,18 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
'centerlinePosVSOutput = centerlinePosition;',
];
if (isDirectionUniform) {
- vsColorImpl.push('centerlineDirVSOutput = centerlineDirection;');
+ vsColorImpl.push(
+ 'samplingDirVSOutput = applyQuaternionToVec(centerlineOrientation, tangentDirection);'
+ );
+ if (useMip) {
+ vsColorImpl.push(
+ 'mipDirVSOutput = applyQuaternionToVec(centerlineOrientation, bitangentDirection);'
+ );
+ }
} else {
vsColorImpl.push(
- // When u and v are unit vectors: uvAngle = 2 * atan2(|| u - v ||, || u + v ||)
- // When u != -v: || u + v || > 0
- // When x > 0: atan2(y, x) = atan(y/x)
- // Thus: dirAngle = 2 * atan(|| topDir - botDir || / || topDir + botDir ||)
- // This is more stable and should not be to slow compared to acos(dot(u, v))
- 'vec3 sumVec = centerlineTopDirection + centerlineBotDirection;',
- 'float sumLen2 = dot(sumVec, sumVec);',
- 'float diffLen2 = 4.0 - sumLen2;',
- 'if (diffLen2 < 0.001) {',
- ' // vectors are too close to each other, use lerp',
- ' centerlineAngleVSOutput = -1.0; // use negative angle as a flag for lerp',
- ' centerlineTopDirVSOutput = centerlineTopDirection;',
- ' centerlineBotDirVSOutput = centerlineBotDirection;',
- '} else if (sumLen2 == 0.0) {',
- " // vector are opposite to each other, don't make a choice for the user",
- ' // use slerp without direction, it will display the centerline color on each row of pixel',
- ' centerlineAngleVSOutput = 0.0;',
- ' centerlineTopDirVSOutput = vec3(0.0);',
- ' centerlineBotDirVSOutput = vec3(0.0);',
- '} else {',
- ' // use slerp',
- ' centerlineAngleVSOutput = 2.0 * atan(sqrt(diffLen2/sumLen2));',
- ' float sinAngle = sin(centerlineAngleVSOutput);',
- ' centerlineTopDirVSOutput = centerlineTopDirection / sinAngle;',
- ' centerlineBotDirVSOutput = centerlineBotDirection / sinAngle;',
- '}'
+ 'centerlineTopOrientationVSOutput = centerlineTopOrientation;',
+ 'centerlineBotOrientationVSOutput = centerlineBotOrientation;'
);
}
VSSource = vtkShaderProgram.substitute(
@@ -652,14 +661,29 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
`uniform float pwfshift0;`,
`uniform float pwfscale0;`,
];
+ if (useMip) {
+ tcoordFSDec.push(
+ 'uniform vec3 spacing;',
+ 'uniform int mipNumberOfSamples;',
+ 'uniform float mipConstantOffset;',
+ 'uniform float mipStepLength;'
+ );
+ }
if (isDirectionUniform) {
- tcoordFSDec.push('in vec3 centerlineDirVSOutput;');
+ tcoordFSDec.push('in vec3 samplingDirVSOutput;');
+ if (useMip) {
+ tcoordFSDec.push('in vec3 mipDirVSOutput;');
+ }
} else {
tcoordFSDec.push(
- 'in vec3 centerlineTopDirVSOutput;',
- 'in vec3 centerlineBotDirVSOutput;',
- 'in float centerlineAngleVSOutput;'
+ 'uniform vec3 tangentDirection;',
+ 'in vec4 centerlineTopOrientationVSOutput;',
+ 'in vec4 centerlineBotOrientationVSOutput;',
+ ...applyQuaternionToVecShaderFunction
);
+ if (useMip) {
+ tcoordFSDec.push('uniform vec3 bitangentDirection;');
+ }
}
const centerPoint = model.renderable.getCenterPoint();
if (centerPoint) {
@@ -730,47 +754,97 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
let tcoordFSImpl = [];
if (isDirectionUniform) {
- tcoordFSImpl.push(
- 'vec3 interpolatedCenterlineDir = centerlineDirVSOutput;'
- );
+ tcoordFSImpl.push('vec3 samplingDirection = samplingDirVSOutput;');
+ if (useMip) {
+ tcoordFSImpl.push('vec3 mipDirection = mipDirVSOutput;');
+ }
} else {
// Slerp or lerp between centerlineTopDirVSOutput and centerlineBotDirVSOutput
// We use quadOffsetVSOutput.y: bottom = 0.0; top = 1.0;
tcoordFSImpl.push(
- 'vec3 interpolatedCenterlineDir;',
- 'if (centerlineAngleVSOutput < 0.0) {',
- ' // Lerp',
- ' interpolatedCenterlineDir = quadOffsetVSOutput.y * centerlineTopDirVSOutput + (1.0 - quadOffsetVSOutput.y) * centerlineBotDirVSOutput;',
+ // Slerp / Lerp
+ 'vec4 q0 = centerlineBotOrientationVSOutput;',
+ 'vec4 q1 = centerlineTopOrientationVSOutput;',
+ 'float qCosAngle = dot(q0, q1);',
+ 'vec4 interpolatedOrientation;',
+ 'if (qCosAngle > 0.999 || qCosAngle < -0.999) {',
+ ' // Use LERP instead of SLERP when the two quaternions are close or opposite',
+ ' interpolatedOrientation = normalize(mix(q0, q1, quadOffsetVSOutput.y));',
'} else {',
- ' // Slerp',
- ' float topInterpolationAngle = quadOffsetVSOutput.y * centerlineAngleVSOutput;',
- ' float botInterpolationAngle = centerlineAngleVSOutput - topInterpolationAngle;',
- ' interpolatedCenterlineDir = sin(topInterpolationAngle) * centerlineTopDirVSOutput + sin(botInterpolationAngle) * centerlineBotDirVSOutput;',
+ ' float omega = acos(qCosAngle);',
+ ' interpolatedOrientation = normalize(sin((1.0 - quadOffsetVSOutput.y) * omega) * q0 + sin(quadOffsetVSOutput.y * omega) * q1);',
'}',
- '// Slerp should give a normalized vector but when sin(angle) is small, rounding error occurs',
- '// Normalize for both lerp and slerp',
- 'interpolatedCenterlineDir = normalize(interpolatedCenterlineDir);'
+ 'vec3 samplingDirection = applyQuaternionToVec(interpolatedOrientation, tangentDirection);'
);
+ if (useMip) {
+ tcoordFSImpl.push(
+ 'vec3 mipDirection = applyQuaternionToVec(interpolatedOrientation, bitangentDirection);'
+ );
+ }
}
if (centerPoint) {
tcoordFSImpl.push(
- 'float baseOffset = dot(interpolatedCenterlineDir, globalCenterPoint - centerlinePosVSOutput);',
+ 'float baseOffset = dot(samplingDirection, globalCenterPoint - centerlinePosVSOutput);',
'float horizontalOffset = quadOffsetVSOutput.x + baseOffset;'
);
} else {
tcoordFSImpl.push('float horizontalOffset = quadOffsetVSOutput.x;');
}
tcoordFSImpl.push(
- 'vec3 volumePosMC = centerlinePosVSOutput + horizontalOffset * interpolatedCenterlineDir;',
+ 'vec3 volumePosMC = centerlinePosVSOutput + horizontalOffset * samplingDirection;',
'vec3 volumePosTC = (MCTCMatrix * vec4(volumePosMC, 1.0)).xyz;',
'if (any(lessThan(volumePosTC, vec3(0.0))) || any(greaterThan(volumePosTC, vec3(1.0))))',
'{',
' // set the background color and exit',
' gl_FragData[0] = backgroundColor;',
' return;',
- '}',
- 'vec4 tvalue = texture(volumeTexture, volumePosTC);'
+ '}'
);
+
+ if (useMip) {
+ const mipMode = model.renderable.getMipMode();
+ switch (mipMode) {
+ case MipMode.MIN:
+ tcoordFSImpl.push('const vec4 initialMipTextureValue = vec4(1.0);');
+ break;
+ case MipMode.MAX:
+ case MipMode.AVERAGE:
+ default:
+ tcoordFSImpl.push('const vec4 initialMipTextureValue = vec4(0.0);');
+ break;
+ }
+
+ // Loop on all the samples of the MIP
+ tcoordFSImpl.push(
+ 'vec3 mipScaledDirection = mipDirection / spacing;',
+ 'vec3 mipStep = mipStepLength * mipScaledDirection;',
+ 'vec3 mipStartPosition = volumePosTC + mipConstantOffset * mipScaledDirection;',
+ 'vec4 tvalue = initialMipTextureValue;',
+ 'for (int mipSampleIdx = 0; mipSampleIdx < mipNumberOfSamples; ++mipSampleIdx) {',
+ ' vec3 mipSamplePosition = mipStartPosition + float(mipSampleIdx) * mipStep;',
+ ' vec4 sampledTextureValue = texture(volumeTexture, mipSamplePosition);'
+ );
+ switch (mipMode) {
+ case MipMode.MAX:
+ tcoordFSImpl.push(' tvalue = max(tvalue, sampledTextureValue);');
+ break;
+ case MipMode.MIN:
+ tcoordFSImpl.push(' tvalue = min(tvalue, sampledTextureValue);');
+ break;
+ case MipMode.AVERAGE:
+ default:
+ tcoordFSImpl.push(' tvalue = tvalue + sampledTextureValue;');
+ break;
+ }
+ tcoordFSImpl.push('}');
+
+ // Process the total if needed
+ if (mipMode === MipMode.AVERAGE) {
+ tcoordFSImpl.push('tvalue = tvalue / float(mipNumberOfSamples);');
+ }
+ } else {
+ tcoordFSImpl.push('vec4 tvalue = texture(volumeTexture, volumePosTC);');
+ }
if (iComps) {
const rgba = ['r', 'g', 'b', 'a'];
for (let comp = 0; comp < tNumComp; ++comp) {
@@ -923,23 +997,25 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
};
publicAPI.setMapperShaderParameters = (cellBO, ren, actor) => {
+ const program = cellBO.getProgram();
+ const cellArrayBufferObject = cellBO.getCABO();
if (
- cellBO.getCABO().getElementCount() &&
+ cellArrayBufferObject.getElementCount() &&
(model.VBOBuildTime.getMTime() >
cellBO.getAttributeUpdateTime().getMTime() ||
cellBO.getShaderSourceTime().getMTime() >
cellBO.getAttributeUpdateTime().getMTime())
) {
- if (cellBO.getProgram().isAttributeUsed('vertexMC')) {
+ if (program.isAttributeUsed('vertexMC')) {
if (
!cellBO
.getVAO()
.addAttributeArray(
- cellBO.getProgram(),
- cellBO.getCABO(),
+ program,
+ cellArrayBufferObject,
'vertexMC',
- cellBO.getCABO().getVertexOffset(),
- cellBO.getCABO().getStride(),
+ cellArrayBufferObject.getVertexOffset(),
+ cellArrayBufferObject.getStride(),
model.context.FLOAT,
3,
model.context.FALSE
@@ -956,15 +1032,15 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
.forEach((data) => {
if (
data &&
- cellBO.getProgram().isAttributeUsed(data.name) &&
+ program.isAttributeUsed(data.name) &&
!cellBO
.getVAO()
.addAttributeArray(
- cellBO.getProgram(),
- cellBO.getCABO(),
+ program,
+ cellArrayBufferObject,
data.name,
data.offset,
- cellBO.getCABO().getStride(),
+ cellArrayBufferObject.getStride(),
model.context.FLOAT,
data.components,
model.context.FALSE
@@ -977,24 +1053,52 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
}
const texUnit = model.volumeTexture.getTextureUnit();
- cellBO.getProgram().setUniformi('volumeTexture', texUnit);
- cellBO.getProgram().setUniformf('width', model.renderable.getWidth());
+ program.setUniformi('volumeTexture', texUnit);
+ program.setUniformf('width', model.renderable.getWidth());
cellBO
.getProgram()
- .setUniform4f(
- 'backgroundColor',
- ...model.renderable.getBackgroundColor()
- );
+ .setUniform4fv('backgroundColor', model.renderable.getBackgroundColor());
- if (cellBO.getProgram().isUniformUsed('centerlineDirection')) {
- const uniformDirection = model.renderable.getUniformDirection();
+ if (program.isUniformUsed('tangentDirection')) {
+ const tangentDirection = model.renderable.getTangentDirection();
+ cellBO
+ .getProgram()
+ .setUniform3fArray('tangentDirection', tangentDirection);
+ }
+ if (program.isUniformUsed('bitangentDirection')) {
+ const bitangentDirection = model.renderable.getBitangentDirection();
+ cellBO
+ .getProgram()
+ .setUniform3fArray('bitangentDirection', bitangentDirection);
+ }
+ if (program.isUniformUsed('centerlineOrientation')) {
+ const uniformOrientation = model.renderable.getUniformOrientation();
cellBO
.getProgram()
- .setUniform3fArray('centerlineDirection', uniformDirection);
+ .setUniform4fv('centerlineOrientation', uniformOrientation);
}
- if (cellBO.getProgram().isUniformUsed('globalCenterPoint')) {
+ if (program.isUniformUsed('globalCenterPoint')) {
const centerPoint = model.renderable.getCenterPoint();
- cellBO.getProgram().setUniform3fArray('globalCenterPoint', centerPoint);
+ program.setUniform3fArray('globalCenterPoint', centerPoint);
+ }
+ // Mip uniforms
+ if (model.renderable.getUseMip()) {
+ const image = model.currentImageDataInput;
+ const spacing = image.getSpacing();
+ const mipSlabThickness = model.renderable.getMipSlabThickness();
+ const mipNumberOfSamples = model.renderable.getMipNumberOfSamples();
+
+ program.setUniform3fArray('spacing', spacing);
+ program.setUniformi('mipNumberOfSamples', mipNumberOfSamples);
+ if (mipNumberOfSamples > 1) {
+ const constantOffset = -0.5 * mipSlabThickness;
+ program.setUniformf('mipConstantOffset', constantOffset);
+ const stepLength = mipSlabThickness / (mipNumberOfSamples - 1);
+ program.setUniformf('mipStepLength', stepLength);
+ } else {
+ program.setUniformf('mipConstantOffset', 0);
+ program.setUniformf('mipStepLength', 0);
+ }
}
// Model coordinates to image space
@@ -1008,7 +1112,7 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
vec3.inverse([], image.getDimensions())
);
const MCTCMatrix = mat4.mul(ICTCMatrix, ICTCMatrix, MCICMatrix);
- cellBO.getProgram().setUniformMatrix('MCTCMatrix', MCTCMatrix);
+ program.setUniformMatrix('MCTCMatrix', MCTCMatrix);
if (model.haveSeenDepthRequest) {
cellBO
@@ -1024,9 +1128,10 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
numClipPlanes = 6;
}
- const shiftScaleEnabled = cellBO.getCABO().getCoordShiftAndScaleEnabled();
+ const shiftScaleEnabled =
+ cellArrayBufferObject.getCoordShiftAndScaleEnabled();
const inverseShiftScaleMatrix = shiftScaleEnabled
- ? cellBO.getCABO().getInverseShiftAndScaleMatrix()
+ ? cellArrayBufferObject.getInverseShiftAndScaleMatrix()
: null;
const mat = inverseShiftScaleMatrix
? mat4.copy(model.imagematinv, actor.getMatrix())
@@ -1057,17 +1162,17 @@ function vtkOpenGLImageCPRMapper(publicAPI, model) {
planeEquations.push(planeEquation[j]);
}
}
- cellBO.getProgram().setUniformi('numClipPlanes', numClipPlanes);
- cellBO.getProgram().setUniform4fv('clipPlanes', planeEquations);
+ program.setUniformi('numClipPlanes', numClipPlanes);
+ program.setUniform4fv('clipPlanes', planeEquations);
}
// handle coincident
- if (cellBO.getProgram().isUniformUsed('coffset')) {
+ if (program.isUniformUsed('coffset')) {
const cp = publicAPI.getCoincidentParameters(ren, actor);
- cellBO.getProgram().setUniformf('coffset', cp.offset);
+ program.setUniformf('coffset', cp.offset);
// cfactor isn't always used when coffset is.
- if (cellBO.getProgram().isUniformUsed('cfactor')) {
- cellBO.getProgram().setUniformf('cfactor', cp.factor);
+ if (program.isUniformUsed('cfactor')) {
+ program.setUniformf('cfactor', cp.factor);
}
}
};