Пример #1
0
void SliceRenderer::colorMapRendering(
		const soglu::GLViewSetup &aViewSetup,
		const soglu::GLTextureImageTyped<3> &aImage,
		const SliceConfiguration &aSlice,
		const SliceRenderingQuality &aRenderingQuality,
		const ColorMapRenderingOptions &aOptions)
{
	renderSlice(
		aViewSetup,
		aImage,
		aSlice,
		aRenderingQuality,
		aOptions);
}
Пример #2
0
void
SliceRenderer::brightnessContrastRendering(
		const soglu::GLViewSetup &aViewSetup,
		const soglu::GLTextureImageTyped<3> &aImage,
		const SliceConfiguration &aSlice,
		const SliceRenderingQuality &aRenderingQuality,
		const BrightnessContrastRenderingOptions &aBCOptions)
{
	renderSlice(
		aViewSetup,
		aImage,
		aSlice,
		aRenderingQuality,
		aBCOptions);
}
Пример #3
0
void MultiplanarSliceRenderer::process() {

    tgtAssert(inport_.isReady(), "Inport not ready");
    tgtAssert(inport_.getData()->getRepresentation<VolumeRAM>(), "No volume");

    outport_.activateTarget("OrthogonalSliceRenderer::process()");
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // get GL resources and setup shader
    TextureUnit transferUnit, texUnit;
    bool setupSuccessful;
    if (texMode_.isSelected("2d-texture")) {      // 2D texture
        setupSuccessful = setupSliceShader(sliceShader_, inport_.getData(), &transferUnit);
    }
    else if (texMode_.isSelected("3d-texture")) { // 3D texture
        // also binds the volume
        setupSuccessful = setupVolumeShader(sliceShader_, inport_.getData(), &texUnit, &transferUnit, 0, lightPosition_.get());
    }
    else {
        LERROR("unknown texture mode: " << texMode_.get());
        setupSuccessful = false;
    }
    if (!setupSuccessful) {
        outport_.deactivateTarget();
        return;
    }

    transferUnit.activate();
    transferFunc_.get()->bind();

    sliceShader_->setUniform("textureMatrix_", tgt::mat4::identity);

    // important: save current camera state before using the processor's camera or
    // successive processors will use those settings!
    //
    glMatrixMode(GL_PROJECTION);
    glPushMatrix();
    glMatrixMode(GL_MODELVIEW);
    glPushMatrix();

    camProp_.look(outport_.getSize());

    // transform bounding box by dataset transformation matrix
    glMatrixMode(GL_MODELVIEW);
    glPushMatrix();
    tgt::multMatrix(inport_.getData()->getPhysicalToWorldMatrix());

    if (renderXYSlice_.get()) {
        renderSlice(SLICE_XY, sliceNumberXY_.get(), texUnit);
    }
    if (renderXZSlice_.get()) {
        renderSlice(SLICE_XZ, sliceNumberXZ_.get(), texUnit);
    }
    if (renderYZSlice_.get()) {
        renderSlice(SLICE_YZ, sliceNumberYZ_.get(), texUnit);
    }

    // restore matrix stack
    glPopMatrix();

    deactivateShader();

    glActiveTexture(GL_TEXTURE0);
    outport_.deactivateTarget();

    glMatrixMode(GL_PROJECTION);
    glPopMatrix();
    glMatrixMode(GL_MODELVIEW);
    glPopMatrix();
}
Пример #4
0
//understand the edge vertex computation from [email protected]
//ASSUMPTION: eye is along z-axis
//vo: volume vertex coords model-space coords
//tx: texture vertex coords tex-space coords
//axis: axis to slice along world-space coords
void vRenderer::renderTexture3D(float sampleFrequency,GLdouble mv[16],float vo[8][3],float tx[8][3],float axis[3])
{
    float rv[8][3];     //the rotated volume (may include a scale)
    float maxval = -10; //(tmp)
    float minval = 10;
    int minvert = 0, maxvert = 0;
    GLdouble mvinv[16];
    int i, j, k;
    inverseMatrix(mvinv, mv); //invert model view matrix

    for(i=0; i<8; ++i){
	translateV3(rv[i], mv, vo[i]); //get the rotated vol coords
	//now get the max and min z in view space
	if(maxval < MAX(maxval, rv[i][2])){
	    maxval = MAX(maxval, rv[i][2]);
	    maxvert = i;
	}
	if(minval > MIN(minval, rv[i][2])){
	    minval = MIN(minval, rv[i][2]);
	    minvert = i;  //determine the starting corner for slicing
	}
    }

    //find the slice plane point 'sp' (initial) and the slice plane normal 'sn'
    //sp is the slice starting point, simply the vertex farthest from the eye
    float sp[3] = {vo[minvert][0], vo[minvert][1], vo[minvert][2]};
//    float sp[3] = {vo[maxvert][0], vo[maxvert][1], vo[maxvert][2]};
    float vpn[3];
    vpn[0] = axis[0]; vpn[1] = axis[1]; vpn[2] = axis[2];

    //now calculate sn which is the normalized vpn in the model space
    //ie where the orginal slices are stored
    float sn[3];
    translateV3(sn, mvinv, vpn); //move vpn to sn (model space);
    //now normalize this
    float normsn = (float)sqrt(sn[0]*sn[0] + sn[1]*sn[1] + sn[2]*sn[2]); //normalize
    sn[0]/=normsn;
    sn[1]/=normsn;
    sn[2]/=normsn;

    //now find the distance we need to slice (|max_vertex - min_vertex|)
    float maxd[3] = {0, 0, maxval}; //(tmp) only use z-coord (view space)
    float mind[3] = {0, 0, minval}; //(tmp) ditto	    (view space)
    float maxv[3], minv[3];	   //(tmp)
    translateV3(maxv, mvinv, maxd); //translate back to model space
    translateV3(minv, mvinv, mind); //ditto
    maxv[0] -= minv[0]; //subtract
    maxv[1] -= minv[1];
    maxv[2] -= minv[2];

    //now take the norm of this vector... we have the distance to be sampled
    //this distance is in the world space
    float dist = (float)sqrt(maxv[0]*maxv[0] + maxv[1]*maxv[1] + maxv[2]*maxv[2]);

#if defined(ADDCGGL) || defined(ADDARBGL)
    glColor4f(1.0f,1.0f,1.0f,0.01);
#else
    glColor4f(1.0f,1.0f,1.0f,0.1);
#endif

    GlErr("vRenderer","drawVA");

    //distance between samples
    float sampleSpacing = 1.0 / (myVolume->maxDim* sampleFrequency);
    float del[3] = {sn[0]*sampleSpacing, sn[1]*sampleSpacing, sn[2]*sampleSpacing};

    int samples = (int)((dist) / sampleSpacing);//(total distance to be sam	//highly un-optimized!!!!!!!!!
    float poly[6][3];   // for edge intersections
    float tcoord[6][3]; // for texture intersections
    float tpoly[6][3];  // for transformed edge intersections
    int edges;	       // total number of edge intersections

    //the dep texture should be scaled
    glBindTexture(GL_TEXTURE_3D, myVolume->texName);
    //sp:slice plane point
    //sn:the slice dirn to cut thru the volume
    //the above 2 are in world coord space

    for(i = 0 ; i < samples; ++i){ //for each slice
	//increment the slice plane point by the slice distance
//	sp[0] -= del[0];
//	sp[1] -= del[1];
//	sp[2] -= del[2];

	sp[0] += del[0];
	sp[1] += del[1];
	sp[2] += del[2];

	edges = 0;
	//now check each edge of the volume for intersection with..
	//the plane defined by sp & sn
	//front bottom edge
	edges += intersect(vo[0], vo[1], tx[0], tx[1], rv[0], rv[1], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//front left edge
	edges += intersect(vo[0], vo[2], tx[0], tx[2], rv[0], rv[2], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//front right edge
	edges += intersect(vo[1], vo[3], tx[1], tx[3], rv[1], rv[3], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//left bottom edge
	edges += intersect(vo[4], vo[0], tx[4], tx[0], rv[4], rv[0], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//right bottom edge
	edges += intersect(vo[1], vo[5], tx[1], tx[5], rv[1], rv[5], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//front top edge
	edges += intersect(vo[2], vo[3], tx[2], tx[3], rv[2], rv[3], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//back bottom edge
	edges += intersect(vo[4], vo[5], tx[4], tx[5], rv[4], rv[5], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//back left edge
	edges += intersect(vo[4], vo[6], tx[4], tx[6], rv[4], rv[6], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//back right edge
	edges += intersect(vo[5], vo[7], tx[5], tx[7], rv[5], rv[7], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//back top edge
	edges += intersect(vo[6], vo[7], tx[6], tx[7], rv[6], rv[7], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//left top edge
	edges += intersect(vo[2], vo[6], tx[2], tx[6], rv[2], rv[6], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);
	//right top edge
	edges += intersect(vo[3], vo[7], tx[3], tx[7], rv[3], rv[7], sp, sn,
			   poly[edges], tcoord[edges], tpoly[edges]);

	// B.M.E. Moret & H.D. Shapiro "P to NP" pp. 453

	float dx, dy, tt ,theta, cen[2];  //tt= TempTheta
	cen[0] = cen[1] = 0.0;
	int next;
	//rather than swap 3 arrays, only one?
	int order[6] ={0,1,2,3,4,5};

	// order[6] could be an extreemly inefficient way to do this
	for(j=0; j<edges; ++j){ //find the center of the points
	    cen[0] += tpoly[j][0];
	    cen[1] += tpoly[j][1];
	} //by averaging
	cen[0]/= edges;
	cen[1]/= edges;

	for(j=0; j<edges; ++j){ //for each vertex
	    theta = -10;	       //find one with largest angle from center..
	    next = j;
	    for (k= j; k<edges; ++k){
		//... and check angle made between other edges
		dx = tpoly[order[k]][0] - cen[0];
		dy = tpoly[order[k]][1] - cen[1];
		if( (dx == 0) && (dy == 0)){ //same as center?
		    next = k;
		    cout << "what teh " << endl;
		    break; //out of this for-loop
		}
		tt = dy/(ABS(dx) + ABS(dy)); //else compute theta [0-4]
		if( dx < 0.0 ) tt = (float)(2.0 - tt); //check quadrants 2&3
		else if( dy < 0.0 ) tt = (float)(4.0 + tt); //quadrant 4
		if( theta <= tt ){  //grab the max theta
		    next = k;
		    theta = tt;
		}
	    } //end for(k) angle checking
	    // i am using 'tt' as a temp
	    // swap polygon vertex ( is this better than another branch?)
	    // I am not sure wich is worse: swapping 3 vectors for every edge
	    // or: using an array to index into another array??? hmmm....
	    //   should have payed more attention in class
	    int tmp = order[j];
	    order[j] = order[next];
	    order[next] = tmp;

	} //end for(j) edge /angle sort
	renderSlice(edges, tcoord, poly, order);
	//}//end else compute convex hull
    }// end for(i) each slice
    //now draw each slice view

 }