bool world2img(cv::Mat rotateMat,cv::Mat tranVector,double focalLength,cv::Point3d wP, cv::Point2d &iP) { //CvMat *originalPos=cvCreateMat(3,1,CV_32FC1); cv::Mat originalPos(3,1,CV_64F); originalPos.at<double>(0,0)=wP.x; originalPos.at<double>(1,0)=wP.y; originalPos.at<double>(2,0)=wP.z; //cvmSet(originalPos,0,0,wP.x); //cvmSet(originalPos,1,0,wP.y); //cvmSet(originalPos,2,0,wP.z); cv::Mat rotationPos(3,1,CV_64F); rotationPos=rotateMat*originalPos; //CvMat *rotationPos=cvCreateMat(3,1,CV_32FC1); //旋转变换 cv::Mat translationPos(3,1,CV_64F); translationPos=rotationPos+tranVector; //CvMat *translationPos=cvCreateMat(3,1,CV_32FC1); //位移变换 //std::cout<<"test img2world:"<<std::endl; //std::cout<<translationPos.at<double>(0,0)<<", "<<translationPos.at<double>(1,0)<<", "<<translationPos.at<double>(2,0)<<std::endl; cv::Point2f imgPos; //透视变换 iP.x=0.5*width+translationPos.at<double>(0,0)*focalLength/translationPos.at<double>(2,0); iP.y=0.5*height+translationPos.at<double>(1,0)*focalLength/translationPos.at<double>(2,0); //iP.x=0.5*width+cvmGet(translationPos,0,0)*focalLength/cvmGet(translationPos,2,0); //iP.y=0.5*height+cvmGet(translationPos,1,0)*focalLength/cvmGet(translationPos,2,0); //if(cvmGet(translationPos,2,0)<=focalLength) if(translationPos.at<double>(2,0)<=focalLength) { //cvReleaseMat(&originalPos); //cvReleaseMat(&translationPos); //cvReleaseMat(&rotationPos); return false; } //cvReleaseMat(&originalPos); //cvReleaseMat(&translationPos); //cvReleaseMat(&rotationPos); return true; }
Mesh Model::processMesh(aiMesh *mesh, const aiScene *scene, std::string name) { calcScalingFactors(); // calc positions of important vertices in resized model glm::vec4 posLeftEye = glm::vec4(m_ModelInfo.leftEye,1.0f)*m_fx; std::vector<Vertex> vertices; std::vector<GLuint> indices; /* if (mesh->mNumVertices > 0) { maxY = mesh->mVertices[0].y; minY = mesh->mVertices[0].y; } */ for (GLuint a = 0; a < mesh->mNumVertices; a++) { Vertex vertex; // Position glm::vec3 originalPos(mesh->mVertices[a].x, mesh->mVertices[a].y, mesh->mVertices[a].z); vertex.position = glm::vec4(moveGenericVertex(originalPos), 1.0f); // Find min and maxY; // maxY = maxY < vertex.position.y ? vertex.position.y : maxY; // minY = minY > vertex.position.y ? vertex.position.y : minY; // Normal vertex.normal = glm::vec4(mesh->mNormals[a].x, mesh->mNormals[a].y, mesh->mNormals[a].z,1.0f); // save new vertex vertices.push_back(vertex); } // Collect all the indices from the faces of the mesh for (GLuint a = 0; a < mesh->mNumFaces; a++) { aiFace face = mesh->mFaces[a]; for (GLuint b = 0; b < face.mNumIndices; b++) { indices.push_back(face.mIndices[b]); } } // y is scaled upside down float maxY = 2.62698f * m_fy; // taken from blender float minY = -1.50149f * m_fy; // taken from blender float eyeY = (m_ModelInfo.leftEye.y) * m_fy; // Eye is more than just the middle point float chinY = m_ModelInfo.chin.y * m_fy; float top = m_FaceCoords.getPoint(FaceCoordinates3d::TextureLeftEye).y; // this is in percent float bot = m_FaceCoords.getPoint(FaceCoordinates3d::TextureChin).y; float factor = ((eyeY - chinY) * (1 + top)) / (maxY - chinY); float factorBot = ((eyeY - chinY) * (1 + bot)) / (eyeY - minY); for (size_t i = 0; i < vertices.size(); i++) { Vertex& vertex = vertices[i]; if (vertex.position.y < (eyeY - 0.002)) { vertex.position.y = vertex.position.y * factor ; } else if(vertex.position.y > chinY) { vertex.position.y = vertex.position.y * factorBot; } } return Mesh(vertices, indices); }