Ejemplo n.º 1
0
ofMatrix4x4 ofApp::getViewMatrix() {
	ofMatrix4x4 viewMatrix = ofMatrix4x4(
		globals.scale, 0.0, 0.0, 0.0, 
		0.0, -globals.scale, 0.0, 0.0, 
		0.0, 0.0, 1.0, 0.0, 
		0.0, 0.0, 0.0, 1.0);

	if (globals.invertX) viewMatrix(0,0) *= -1;
	if (globals.invertY) viewMatrix(1,1) *= -1;

	if (globals.flipXY) {
		viewMatrix = ofMatrix4x4(
			0.0, 1.0, 0.0, 0.0, 
			1.0, 0.0, 0.0, 0.0, 
			0.0, 0.0, 1.0, 0.0, 
			0.0, 0.0, 0.0, 1.0 ) * viewMatrix;
	}

	ofMatrix4x4 aspectMatrix; // identity matrix
	float aspectRatio = float(fbo.getWidth()) / float(fbo.getHeight());
	if (aspectRatio > 1.0) {
		aspectMatrix(0,0) /= aspectRatio;
	}
	else {
		aspectMatrix(1,1) *= aspectRatio;
	}

	return viewMatrix * aspectMatrix;
}
Ejemplo n.º 2
0
ofMatrix4x4 KinectGrabber::getWorldMatrix() {
	auto mat = ofMatrix4x4();
	if (kinectOpened) {
		ofVec3f a = kinect.getWorldCoordinateAt(0, 0, 1);// Trick to access kinect internal parameters without having to modify ofxKinect
		ofVec3f b = kinect.getWorldCoordinateAt(1, 1, 1);
		ofLogVerbose("kinectGrabber") << "getWorldMatrix(): Computing kinect world matrix";
		mat = ofMatrix4x4(b.x - a.x, 0, 0, a.x,
			0, b.y - a.y, 0, a.y,
			0, 0, 0, 1,
			0, 0, 0, 1);
	}
	return mat;
}
Ejemplo n.º 3
0
//--------------------------------------------------------------
ofVec4f ofxQuad::fromWarpToScreenCoord(float x, float y, float z)
{
	ofVec4f mousePoint;
	ofVec4f warpedPoint;
	
	// this is the point inside the warped system which i want to know the coordinates on the image  ...
	mousePoint.x = x;
	mousePoint.y = y;
	mousePoint.z = 0.0;
	mousePoint.w = 1.0;
	
	// i create a ofMatrix4x4 with the ofxGLWarper myMatrixData in column order
	ofMatrix4x4 myOFmatrix = ofMatrix4x4(matrix[0], matrix[4],matrix[8],matrix[12],
										 matrix[1], matrix[5],matrix[9], matrix[13],
										 matrix[2], matrix[6],matrix[10],matrix[14],
										 matrix[3],matrix[7],matrix[11],matrix[15]);
	// invert the matrix
	//ofMatrix4x4 invertedMyMatrix = myOFmatrix.getInverse();
	ofMatrix4x4 invertedMyMatrix = myOFmatrix;
	
	// multiply both to get the point transformed by the matrix
	warpedPoint = invertedMyMatrix * mousePoint ;
	
	warpedPoint.x = warpedPoint.x / warpedPoint.w;
	warpedPoint.y = warpedPoint.y / warpedPoint.w;
	warpedPoint.z = warpedPoint.z / warpedPoint.w;
	
	return warpedPoint;
}
Ejemplo n.º 4
0
	ofMatrix4x4 estimateAffine3D(vector<ofVec3f>& from, vector<ofVec3f>& to, float accuracy) {
		if(from.size() != to.size() || from.size() == 0 || to.size() == 0) {
			return ofMatrix4x4();
		}
		vector<unsigned char> outliers;
		return estimateAffine3D(from, to, outliers, accuracy);
	}
void LeapToCameraCalibrator::resetProjector()
{
	//position = ofVec3f(1.0f,1.0f,1.0f);//cam.getPosition();
	throwRatio = 1.62f;
	lensOffset = ofVec2f(0.0f,0.5f);
    
	ofQuaternion rotation;
	auto rotationQuat = ofQuaternion(rotationX, ofVec3f(1, 0, 0), rotationZ, ofVec3f(0, 0, 1), rotationY, ofVec3f(0, 1, 0));
	ofMatrix4x4 pose = ofMatrix4x4(rotationQuat);
	pose(3,0) = translationX;
	pose(3,1) = translationY;
	pose(3,2) = translationZ;
	projector.setView(pose);
    
	ofMatrix4x4 projection;
	projection(0,0) = throwRatioX;
	projection(1,1) = -throwRatioY;
	projection(2,3) = 1.0f;
	projection(3,3) = 0.0f;
	projection.postMultTranslate(-lensOffsetX, -lensOffsetY, 0.0f);
	projector.setProjection(projection);
    
	projector.setWidth(resolution.x);
	projector.setHeight(resolution.y);
}
Ejemplo n.º 6
0
//--------------------------------------------------------------
ofVec4f ofxQuad::fromScreenToWarpCoord(float x, float y, float z)
{
	ofVec4f mousePoint;
	ofVec4f warpedPoint;
	
	// this is the point on the image which i want to know the coordinates inside the warped system ...
	mousePoint.x = x;
	mousePoint.y = y;
	mousePoint.z = 0.0;
	mousePoint.w = 1.0;
	
	// i create a ofMatrix4x4 with the ofxGLWarper myMatrixData in column order
	ofMatrix4x4 myOFmatrix = ofMatrix4x4(matrix[0], matrix[4],matrix[8],matrix[12],
										 matrix[1], matrix[5],matrix[9], matrix[13],
										 matrix[2], matrix[6],matrix[10],matrix[14],
										 matrix[3],matrix[7],matrix[11],matrix[15]);
	// do not invert the matrix
	ofMatrix4x4 invertedMyMatrix = myOFmatrix.getInverse();
	//ofMatrix4x4 invertedMyMatrix = myOFmatrix;
	
	// multiply both to get the point transformed by the matrix
	warpedPoint = invertedMyMatrix * mousePoint ;
	
	// we need to normalize the value as described here : http://tech.groups.yahoo.com/group/OpenCV/message/80121
	warpedPoint.x = warpedPoint.x / warpedPoint.w;
	warpedPoint.y = warpedPoint.y / warpedPoint.w;
	warpedPoint.z = warpedPoint.z / warpedPoint.w;
	
	return warpedPoint;
}
Ejemplo n.º 7
0
	//----------
	void Controller::draw(ofEventArgs& args) {
		if (!initialised) {
			return;
		}

		DrawArguments rootDrawArguments;
		rootDrawArguments.chromeEnabled = this->chromeVisible;
		rootDrawArguments.naturalBounds = ofGetCurrentViewport();
		rootDrawArguments.globalTransform = ofMatrix4x4();
		rootDrawArguments.globalScale = 1.0f;
		rootDrawArguments.localBounds = ofRectangle(0, 0, rootDrawArguments.naturalBounds.getWidth(), rootDrawArguments.naturalBounds.getHeight());
		rootDrawArguments.globalBounds = rootDrawArguments.naturalBounds;

		auto currentPanel = this->currentPanel.lock();

		if (this->activeDialog) {
			this->activeDialogBackground.draw(rootDrawArguments.naturalBounds);
			ofPushStyle();
			{
				//draw light box background
				ofEnableAlphaBlending();
				ofSetColor(0, 200);
				ofDrawRectangle(rootDrawArguments.naturalBounds);

				//shadow for dialog
				ofFill();
				ofSetColor(0, 100);
				ofPushMatrix();
				{
					ofTranslate(5, 5);
					ofDrawRectangle(this->activeDialog->getBounds());
				}
				ofPopMatrix();

				//background for dialog
				ofSetColor(80);
				ofDrawRectangle(this->activeDialog->getBounds());
			}
			ofPopStyle();
			this->activeDialog->draw(rootDrawArguments);
		}
		else {
			if (this->maximised) {
				currentPanel->draw(rootDrawArguments);
			}
			else {
				//highlight panel
				if (currentPanel) {
					ofPushStyle();
					ofEnableAlphaBlending();
					ofSetColor(40, 40, 40, 100);
					ofDrawRectangle(this->currentPanelBounds);
					ofPopStyle();
				}

				this->rootGroup->draw(rootDrawArguments);
			}
		}
	}
Ejemplo n.º 8
0
ofMatrix4x4 aiMatrix4x4ToOfMatrix4x4(const aiMatrix4x4& aim){
	return ofMatrix4x4(
		aim.a1,aim.a2,aim.a3,aim.a4,
        aim.b1,aim.b2,aim.b3,aim.b4,
        aim.c1,aim.c2,aim.c3,aim.c4,
        aim.d1,aim.d2,aim.d3,aim.d4
	);
}
 ofMatrix4x4 ParallelTransportFrames::normalMatrix() const
 {
     ofMatrix4x4 normalMatrix = ofMatrix4x4::getTransposedOf(const_cast<ofMatrix4x4&>(frames.back()).getInverse());
     return ofMatrix4x4(normalMatrix(0, 0), normalMatrix(0, 1), normalMatrix(0, 2), 0.f,
                        normalMatrix(1, 0), normalMatrix(1, 1), normalMatrix(1, 2), 0.f,
                        normalMatrix(2, 0), normalMatrix(2, 1), normalMatrix(2, 2), 0.f,
                        0.f,                0.f,                0.f,                1.f);
 }
Ejemplo n.º 10
0
ofMatrix4x4 ofxToMatrix4x4(string s) {
    vector<string> a = ofSplitString(s, ",");
    float mat[16];
    for (int i=0; i<16; i++) {
        mat[i] = ofToFloat(a[i]);
    }
    return ofMatrix4x4(mat);
}
Ejemplo n.º 11
0
//----------------------------------------
void ofNode::setGlobalOrientation(const ofQuaternion& q) {
    if(parent == NULL) {
        setOrientation(q);
    } else {
        ofMatrix4x4 invParent(ofMatrix4x4::getInverseOf(parent->getGlobalTransformMatrix()));
        ofMatrix4x4 m44(ofMatrix4x4(q) * invParent);
        setOrientation(m44.getRotate());
    }
}
//------------------------------------------- update.
void ofxAssimpModelLoader::update() {
    updateAnimations();
    updateMeshes(scene->mRootNode, ofMatrix4x4());
    if(hasAnimations() == false) {
        return;
    }
    updateBones();
    updateGLResources();
}
Ejemplo n.º 13
0
void ofxMapamok::reset() {
	calibrationReady = false;

	rvec = cv::Mat();
	tvec = cv::Mat();
	modelMatrix = ofMatrix4x4();
	intrinsics = Intrinsics();
	distCoeffs = cv::Mat();

}
Ejemplo n.º 14
0
void Camera::init(int windowWidth, int windowHeight)
{
    matrix = ofMatrix4x4();
    width = windowWidth;
    height = windowHeight;
    aim_dist = 400;
    rotX = 0;
    rotY = 0;
    lastX = width/2;
    lastY = height/2;
    deltaX = 0;
    deltaZ = -aim_dist;
}
Ejemplo n.º 15
0
void Polychron::rotate4DOnlyOnce(float dwx, float dwy, float dwz){
    wx = dwx;
    wy = dwy;
    wz = dwz;
    
    ofMatrix4x4 rotateA1 = ofMatrix4x4(cosf(wx), 0,        0, -sinf(wx),
                                       0,        1,        0,         0,
                                       0,        0,        1,         0,
                                       sinf(wx), 0,        0,   cosf(wx) );
    ofMatrix4x4 rotateA2 = ofMatrix4x4(1,         0,        0,         0,
                                       0,  cosf(wy),        0, -sinf(wy),
                                       0,         0,        1,         0,
                                       0,  sinf(wy),        0,  cosf(wy) );
    ofMatrix4x4 rotateA3 = ofMatrix4x4(1,         0,        0,         0,
                                       0,         1,        0,         0,
                                       0,         0, cosf(wz), -sinf(wz),
                                       0,         0, sinf(wz),  cosf(wz) );
    ofMatrix4x4 rotation = ofMatrix4x4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1);
    rotation *= rotateA1;
    rotation *= rotateA2;
    rotation *= rotateA3;
    rotate(rotation);
}
Ejemplo n.º 16
0
	ofMatrix4x4 makeMatrix(Mat rotation, Mat translation) {
		Mat rot3x3;
		if(rotation.rows == 3 && rotation.cols == 3) {
			rot3x3 = rotation;
		} else {
			Rodrigues(rotation, rot3x3);
		}
		double* rm = rot3x3.ptr<double>(0);
		double* tm = translation.ptr<double>(0);
		return ofMatrix4x4(rm[0], rm[3], rm[6], 0.0f,
                           rm[1], rm[4], rm[7], 0.0f,
                           rm[2], rm[5], rm[8], 0.0f,
                           tm[0], tm[1], tm[2], 1.0f);
	}
ofMatrix4x4 loadMatrix(string& filename) {
	auto buffer = loadDoubles(filename);
	if (buffer.size() >= 16) {
		ofMatrix4x4 m;
		auto floatBuffer = m.getPtr();
		for(int i=0; i<16; i++) {
			floatBuffer[i] = buffer[i];
		}
		
		ofMatrix4x4 reverseZ;
		reverseZ.scale(1.0f, 1.0f, -1.0f);
		return reverseZ * m * reverseZ;
	} else {
		return ofMatrix4x4();
	}
}
//--------------------------------------------------------------
void ofApp::keyPressed(int key){
	if (key == 'c') {
		scene.points0.clear();
		scene.points1.clear();
		scene.camera.setView(ofMatrix4x4());
		scene.camera.setProjection(ofMatrix4x4());
		scene.camera.distortion.clear();
		scene.projector0.setView(ofMatrix4x4());
		scene.projector0.setProjection(ofMatrix4x4());
		scene.projector1.setView(ofMatrix4x4());
		scene.projector1.setProjection(ofMatrix4x4());
	}
}
Ejemplo n.º 19
0
/** @brief	Queries the current OpenGL matrix state
 *  @detail Returns the specified matrix as held by the renderer's current matrix stack.
 *
 *			You can query one of the following:
 *
 *			[OF_MATRIX_MODELVIEW | OF_MATRIX_PROJECTION | OF_MATRIX_TEXTURE]
 *
 *			Each query will return the state of the matrix
 *			as it was uploaded to the shader currently bound.
 *
 *	@param	matrixMode_  Which matrix mode to query
 */
ofMatrix4x4 ofGLRenderer::getCurrentMatrix(ofMatrixMode matrixMode_) const {
	ofMatrix4x4 mat;
	switch (matrixMode_) {
		case OF_MATRIX_MODELVIEW:
			glGetFloatv(GL_MODELVIEW_MATRIX, mat.getPtr());
			break;
		case OF_MATRIX_PROJECTION:
			glGetFloatv(GL_PROJECTION_MATRIX, mat.getPtr());
			break;
		case OF_MATRIX_TEXTURE:
			glGetFloatv(GL_TEXTURE_MATRIX, mat.getPtr());
			break;
		default:
			ofLogWarning() << "Invalid getCurrentMatrix query";
			mat = ofMatrix4x4();
			break;
	}
	return mat;
}
Ejemplo n.º 20
0
//--------------------------------------------------------------
void HypercubeScene::setup(){
    
    float MASTER_SCALE = 80;
    
    for(int i = 0; i < NUM_POLY; i++){
        polychron[i].loadVefFile("8cell.ascii.txt");
        highlighted[i].clear();
    }
    hotSpots.push_back( ofPoint(ofGetWidth()*.3, ofGetHeight()*.2) );
    hotSpots.push_back( ofPoint(ofGetWidth()*.7, ofGetHeight()*.2) );
    hotSpots.push_back( ofPoint(ofGetWidth()*.5, ofGetHeight()*.75) );
    hotSpotRadius = 60;

    myShader.load("gradient");
    
    for(int i = 0; i < NUM_POLY; i++){
        rotations[i].x = ofRandom(.01, 0.3);
        rotations[i].y = ofRandom(.01, 0.3);
        rotations[i].z = ofRandom(.01, 0.3);
        
        brightnesses[i] = 0;
    }
    
    for(int i = 0; i < NUM_POLY; i++){
        polyMatrix[i] = ofMatrix4x4(1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1);
        polyMatrix[i].scale(MASTER_SCALE, MASTER_SCALE, MASTER_SCALE);
//        polyMatrix[i].rotate(ofRandom(90), 1,0,0);
//        polyMatrix[i].rotate(ofRandom(90), 0,0,1);
        // add some rotations and scales between polychrons and increasingly scale them up
        polyMatrix[i].translate(i*0.1, i*0, 0);
        polyMatrix[i].rotate(i*45, 1, 0, 0);
        polyMatrix[i].rotate((i%2)*90, 0, 1, 0);
        polyMatrix[i].rotate( (floor(i/2)*45), 0, 0, 1);
        polyMatrix[i].scale(1+i*0.5, 1+i*0.5, 1+i*0.5);
    }
    
    numPoly = NUM_POLY;
    
    sceneWindowMatrix.makeIdentityMatrix();
    sceneWindowMatrix.scale(RESOLUTION_SCENE_WIDTH / (float)RESOLUTION_WINDOW_WIDTH,
                            RESOLUTION_SCENE_HEIGHT / (float)RESOLUTION_WINDOW_HEIGHT,
                            1.0);
}
Ejemplo n.º 21
0
//--------------------------icp测试-----------------------------
void testTheIcp(const ofVec3f* pointsmap,ofVec3f* pointsmap_f)
{
	ofMatrix4x4 tm = ofMatrix4x4(0.98481,-0.17365,0,0,0.17365,0.98481,0,0,0,0,1,0,0,0,0,1);
	for(int i = 0;i < KINECT_HEIGHT;  ++ i)
		for(int j = 0;j < KINECT_WIDTH; ++ j)
		{
			int num = i * KINECT_WIDTH + j;
			pointsmap_f[num] = pointsmap[num];
			if(pointsmap[num].z > 0)
			{
				ofVec4f newp = ofVec4f(pointsmap[num].x,pointsmap[num].y,pointsmap[num].z,1);
				ofVec4f newn = tm*newp;
				pointsmap_f[num].x = newn.x;
				pointsmap_f[num].y = newn.y;
				pointsmap_f[num].z = newn.z;
				//pointsmap_f[num].y += 0.1;
			}
		}
}
Ejemplo n.º 22
0
//--------------------------------------------------------------
void ofxGLWarper::setup(int _x, int _y, int _w, int _h){
    cout << "ofxGLWarper setup: " <<_x << " " <<_y << " " <<_w << " " <<_h << endl;
	ofUnregisterMouseEvents(this);
	/*
	corners[0].x = 0.0;
	corners[0].y = 0.0;
	
	corners[1].x = 1.0;
	corners[1].y = 0.0;
	
	corners[2].x = 1.0;
	corners[2].y = 1.0;
	
	corners[3].x = 0.0;
	corners[3].y = 1.0;
	//*/
    corners[0].x = _x;
	corners[0].y = _y;
	
	corners[1].x = _x + _w;
	corners[1].y = _y;
	
	corners[2].x = _x + _w;
	corners[2].y = _y + _h;
	
	corners[3].x = _x;
	corners[3].y = _y + _h;
    
	active=false;

        myMatrix = ofMatrix4x4(); // identity
	x=_x;
	y=_y;
	width=_w;
	height=_h;
	whichCorner = -1;
    cornerSelected = false;
    cornerSensibility = 0.5;
    bUseKeys = true;
}
Ejemplo n.º 23
0
//--------------------------------------------------------------
void ofApp::setup() {
	ofSetOrientation(OF_ORIENTATION_90_LEFT);
	ofSetLogLevel(OF_LOG_VERBOSE);
	ofBackground(0, 0, 0);
	ofSetVerticalSync(true);
	ofSetFrameRate(60);

	planet.setUseVbo(false);
	planet.set(1500, 50);
	planet.setPosition(0, 0, 0);

	easycam.setDistance(20);
	cam.setPosition(0, 0, 0);

	ofxAccelerometer.setup();
	ofxRegisterAccelEvents(this);

	tracking.reset();
	invert = ofNode();
	node = ofNode();
	view = ofMatrix4x4();
}
Ejemplo n.º 24
0
void ofApp::setupViewports(){
    
    viewportReal = ofRectangle((21*ofGetWindowWidth()/24)/2, 0, (21*ofGetWindowWidth()/24)/2, 8*ofGetWindowHeight()/8);
    viewportSim = ofRectangle(0, 0, (21*ofGetWindowWidth()/24)/2, 8*ofGetWindowHeight()/8);
    
    activeCam = 0;
    
    
    for(int i = 0; i < N_CAMERAS; i++){
        cams.push_back(new ofEasyCam());
        savedCamMats.push_back(ofMatrix4x4());
        viewportLabels.push_back("");
    }
    
    cams[0]->begin(viewportReal);
    cams[0]->end();
    cams[0]->enableMouseInput();
    
    
    cams[1]->begin(viewportSim);
    cams[1]->end();
    cams[1]->enableMouseInput();
    
}
Ejemplo n.º 25
0
		//----------
		void SolveSet::solveCv() {
			if (this->inPoints.empty()) {
				throw(ofxRulr::Exception("Data set is empty!"));
			}

			//Estimate the transform.
			vector<uchar> inliers;
			cv::Mat transform(3, 4, CV_64F);

			auto startTime = chrono::system_clock::now();
			int ret = cv::estimateAffine3D(this->inPoints, this->outPoints, transform, inliers, this->parameters.cvSettings.ransacThreshold);
			auto endTime = chrono::system_clock::now();

			this->result.success = ret;
			this->result.totalTime = endTime - startTime;
			this->result.residual = 1.0f - (inliers.size() / (float)this->inPoints.size());
			auto m = transform.ptr<double>(0);
			this->result.transform = ofMatrix4x4(m[0], m[4], m[8],  0.0f,
												 m[1], m[5], m[9],  0.0f,
												 m[2], m[6], m[10], 0.0f,
												 m[3], m[7], m[11], 1.0f);

			this->completed = true;
		}
Ejemplo n.º 26
0
void Dude::updateRenderer( ofShader & shader )
{
    // update the rendering matrices
    renderMats = animSys.getBoneMatrices();
    std::vector<float> boneLengths = animSys.getBoneLengths();
    
    for( int i = 0; i < animSys.getNumBones(); i++ )
    {
        renderMats[i].scale(1,1,boneLengths[i]);
        //renderMats[i].preTranslate(position);
        renderMats[i].invert();
    }
    
    // prepare rendering steer matrix
    renderSteerMatrix.identity();
    renderSteerMatrix.translate(position);
    renderSteerMatrix *= steerMatrix;
    //renderSteerMatrix.translate(-position);

    // set it up
    shader.setUniformMatrix4f("steerMatrix",ofMatrix4x4((float*)renderSteerMatrix.inverse()));
    shader.setUniform1f("blend_k",blend_k);
    shader.setUniformMatrix4f("box_mats", (ofMatrix4x4&) renderMats[0], renderMats.size());//
}
Ejemplo n.º 27
0
//--------------------------------------------
// transformations
//our openGL wrappers
ofMatrix4x4 ofCairoRenderer::getCurrentMatrix(ofMatrixMode matrixMode_) const{
	ofLogWarning() << "getCurrentMatrix not yet implemented for Cairo Renderer.";
	return ofMatrix4x4();
}
void NCKinectSkeletonRigged::resetNodesRotation() {
    for (int i=0;i<joints.size();++i) {
        joints[i]->localrotation = ofMatrix4x4();
        joints[i]->updateBone();
    }
}
Ejemplo n.º 29
0
	void CamaraLucida::load_data(string kinect_calibration_filename, string proj_calibration_filename)
	{
		const char* kinect_calibration_filename_str = kinect_calibration_filename.c_str();
		const char* proj_calibration_filename_str = proj_calibration_filename.c_str();
		
		//	rgb
		
		rgb_int = (CvMat*)cvLoad(kinect_calibration_filename_str, NULL, "rgb_intrinsics");
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n rgb_intrinsics opencv (kinect_calibration.yml)");
		printM(rgb_int);
		
		calib.fx_rgb = (float)cvGetReal2D( rgb_int, 0, 0 );
		calib.fy_rgb = (float)cvGetReal2D( rgb_int, 1, 1 );
		calib.cx_rgb = (float)cvGetReal2D( rgb_int, 0, 2 );
		calib.cy_rgb = (float)cvGetReal2D( rgb_int, 1, 2 );
		
		CvMat* rgb_size = (CvMat*)cvLoad(kinect_calibration_filename_str, NULL, "rgb_size");
		calib.rgb_width = (int)cvGetReal2D( rgb_size, 0, 0 );
		calib.rgb_height = (int)cvGetReal2D( rgb_size, 0, 1 );
		cvReleaseMat(&rgb_size);
		
		convertKKopencv2opengl(rgb_int, calib.rgb_width, calib.rgb_height, calib.near, calib.far, rgb_KK);
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n rgb_intrinsics converted to opengl");
		printM(rgb_KK, 4, 4);
		
		
		//	depth
		
		depth_int = (CvMat*)cvLoad(kinect_calibration_filename_str, NULL, "depth_intrinsics");
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n depth_intrinsics opencv (kinect_calibration.yml)");
		printM(depth_int);
		
		calib.fx_d = (float)cvGetReal2D( depth_int, 0, 0 );
		calib.fy_d = (float)cvGetReal2D( depth_int, 1, 1 );
		calib.cx_d = (float)cvGetReal2D( depth_int, 0, 2 );
		calib.cy_d = (float)cvGetReal2D( depth_int, 1, 2 );
		
		CvMat* d_size = (CvMat*)cvLoad(kinect_calibration_filename_str, NULL, "depth_size");
		calib.depth_width = (int)cvGetReal2D( d_size, 0, 0 );
		calib.depth_height = (int)cvGetReal2D( d_size, 0, 1 );
		cvReleaseMat(&d_size);
		
		convertKKopencv2opengl(depth_int, calib.depth_width, calib.depth_height, calib.near, calib.far, depth_KK);
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n depth_intrinsics converted to opengl");
		printM(depth_KK, 4, 4);
		
		
		//	depth/rgb RT
		
		rgb_R = (CvMat*)cvLoad(kinect_calibration_filename_str, NULL, "R");
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n rgb_R opencv (kinect_calibration.yml)");
		printM(rgb_R);
		
		rgb_T = (CvMat*)cvLoad(kinect_calibration_filename_str, NULL, "T");
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n rgb_T opencv (kinect_calibration.yml)");
		printM(rgb_T);
		
		convertRTopencv2opengl(rgb_R, rgb_T, rgb_RT);
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n rgb_RT converted to opengl");
		printM(rgb_RT, 4, 4);
		
		//	T_rgb = ofVec3f(
		//		rgb_RT[12],	rgb_RT[13], rgb_RT[14] );
		calib.RT_rgb = ofMatrix4x4(
			 rgb_RT[0],	rgb_RT[1], rgb_RT[2],		rgb_RT[12],
			 rgb_RT[4],	rgb_RT[5], rgb_RT[6],		rgb_RT[13],
			 rgb_RT[8],	rgb_RT[9], rgb_RT[10],		rgb_RT[14],
			 0.,			0.,			0.,				1);
		//	R_rgb.preMultTranslate(-T_rgb);
		//	R_rgb = ofMatrix4x4::getTransposedOf(R_rgb);
		
		
		//	proyector
		
		proj_int = (CvMat*)cvLoad(proj_calibration_filename_str, NULL, "proj_intrinsics");
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n proj_intrinsics opencv (projector_calibration.yml)");
		printM(proj_int);
		
		calib.fx_p = (float)cvGetReal2D( proj_int, 0, 0 );
		calib.fy_p = (float)cvGetReal2D( proj_int, 1, 1 );
		calib.cx_p = (float)cvGetReal2D( proj_int, 0, 2 );
		calib.cy_p = (float)cvGetReal2D( proj_int, 1, 2 );
		
		CvMat* p_size = (CvMat*)cvLoad(proj_calibration_filename_str, NULL, "proj_size");
		calib.proj_width = (int)cvGetReal2D( p_size, 0, 0 );
		calib.proj_height = (int)cvGetReal2D( p_size, 0, 1 );
		cvReleaseMat(&p_size);
		
		convertKKopencv2opengl(proj_int, calib.proj_width, calib.proj_height, calib.near, calib.far, proj_KK);
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n proj_intrinsics converted to opengl");
		printM(proj_KK, 4, 4);
		
		proj_R = (CvMat*)cvLoad(proj_calibration_filename_str, NULL, "R");
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n proj_R opencv (projector_calibration.yml)");
		printM(proj_R);
		
		proj_T = (CvMat*)cvLoad(proj_calibration_filename_str, NULL, "T");
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n proj_T opencv (projector_calibration.yml)");
		printM(proj_T);
		
		convertRTopencv2opengl(proj_R, proj_T, proj_RT);
		ofLog(OF_LOG_VERBOSE, "Camara Lucida \n proj_RT converted to opengl");
		printM(proj_RT, 4, 4);
	}
ofMatrix4x4 NCAssimpModel::tooF(aiMatrix4x4 & m){
    return ofMatrix4x4(m[0][0],m[1][0],m[2][0],m[3][0],
                       m[0][1],m[1][1],m[2][1],m[3][1],
                       m[0][2],m[1][2],m[2][2],m[3][2],
                       m[0][3],m[1][3],m[2][3],m[3][3]);
}