Ejemplo n.º 1
0
//--------------------------------------------------------------
void testApp::updateRenderer(ofVideoPlayer& fromPlayer){
	
	if (!temporalAlignmentMode) {
		if(alignmentScrubber.getPairSequence().isSequenceTimebased()){
			long movieMillis = fromPlayer.getPosition() * fromPlayer.getDuration()*1000;
			currentDepthFrame = alignmentScrubber.getPairSequence().getDepthFrameForVideoFrame(movieMillis);
			depthSequence.selectTime(currentDepthFrame);
		}
		else {
			currentDepthFrame = alignmentScrubber.getPairSequence().getDepthFrameForVideoFrame(fromPlayer.getCurrentFrame());
			depthSequence.selectFrame(currentDepthFrame);
		}
		renderer.setDepthImage(depthPixelDecodeBuffer);

	}
	
	
	processDepthFrame();
	
	renderer.update();
	
	processGeometry();
	
	if(!drawPointcloud && !drawWireframe && !drawMesh){
		drawPointcloud = true;
	}	
	
	currentDepthFrame = depthSequence.getSelectedFrame();

}
ofRange ofxRGBDVideoDepthSequence::getStartAndEndTimes(ofVideoPlayer& player, ofxDepthImageSequence& sequence){
    if(!ready()){
        ofLogError("ofxRGBDVideoDepthSequence::getStartAndEndTimes -- video sequence not ready");
        return ofRange();
    }
    ofRange output;
    output.min = MAX(0, getVideoMillisForDepthMillis(0))/1000.;
    output.max = MIN(player.getDuration(), getVideoMillisForDepthMillis(sequence.getDurationInMillis()) / 1000. );
	return output;
}