void Mesh::computeFaceGradients(Eigen::MatrixXd& gradients, const Eigen::VectorXd& u) const { for (FaceCIter f = faces.begin(); f != faces.end(); f++) { Eigen::Vector3d gradient; gradient.setZero(); Eigen::Vector3d normal = f->normal(); normal.normalize(); HalfEdgeCIter he = f->he; do { double ui = u(he->next->next->vertex->index); Eigen::Vector3d ei = he->next->vertex->position - he->vertex->position; gradient += ui * normal.cross(ei); he = he->next; } while (he != f->he); gradient /= (2.0 * f->area()); gradient.normalize(); gradients.row(f->index) = -gradient; } }
void Camera::normalize() { /* Gram–Schmidt process to orthonormalise vectors http://en.wikipedia.org/wiki/Gram%E2%80%93Schmidt_process#The_Gram.E2.80.93Schmidt_process */ double sc = scalingCoefficient(); Eigen::Vector3d x = d->modelview.linear().col(0); Eigen::Vector3d y = d->modelview.linear().col(1); Eigen::Vector3d z = d->modelview.linear().col(2); y -= y.dot(x)/x.dot(x) * x; z -= z.dot(x)/x.dot(x) * x; z -= z.dot(y)/y.dot(y) * y; x.normalize(); y.normalize(); z.normalize(); x *= sc; y *= sc; z *= sc; d->modelview.linear().col(0) = x; d->modelview.linear().col(1) = y; d->modelview.linear().col(2) = z; }
static Eigen::Vector3d mapToSphere( Eigen::Matrix4d invProjMatrix, Eigen::Vector2d const p, Eigen::Vector3d viewSphereCenter, double viewSpaceRadius) { Eigen::Vector4d viewP = invProjMatrix * Eigen::Vector4d(p(0), p(1), -1.0, 1.0); //viewP(0) /= viewP(3); //viewP(1) /= viewP(3); //viewP(2) /= viewP(3); Eigen::Vector3d rayOrigin(0.0, 0.0, 0.0); Eigen::Vector3d rayDir(viewP(0), viewP(1), viewP(2)); rayDir.normalize(); Eigen::Vector3d CO = rayOrigin - viewSphereCenter; double a = 1.0; double bPrime = CO.dot(rayDir); double c = CO.dot(CO) - viewSpaceRadius * viewSpaceRadius; double delta = bPrime * bPrime - a * c; if (delta < 0.0) { double t = -CO.z() / rayDir.z(); Eigen::Vector3d point = rayOrigin + t * rayDir; Eigen::Vector3d dir = point - viewSphereCenter; dir.normalize(); return viewSphereCenter + dir * viewSpaceRadius; } double root[2] = { (-bPrime - sqrt(delta)) / a, (-bPrime + sqrt(delta)) / a }; if (root[0] >= 0.0) { Eigen::Vector3d intersectionPoint = rayOrigin + root[0] * rayDir; return intersectionPoint; } else if (root[1] >= 0.0) { Eigen::Vector3d intersectionPoint = rayOrigin + root[1] * rayDir; return intersectionPoint; } else { double t = -CO.z() / rayDir.z(); Eigen::Vector3d point = rayOrigin + t * rayDir; Eigen::Vector3d dir = point - viewSphereCenter; dir.normalize(); return viewSphereCenter + dir * viewSpaceRadius; } }
Eigen::MatrixXd doubleLayer(const SphericalDiffuse<PurisimaIntegrator, ProfilePolicy> & gf, const std::vector<Element> & e) const { // The singular part is "integrated" as usual, while the nonsingular part is evaluated in full size_t mat_size = e.size(); Eigen::MatrixXd D = Eigen::MatrixXd::Zero(mat_size, mat_size); for (size_t i = 0; i < mat_size; ++i) { // Fill diagonal double area = e[i].area(); double radius = e[i].sphere().radius; // Diagonal of S inside the cavity double Sii_I = factor * std::sqrt(4 * M_PI / area); // Diagonal of D inside the cavity double Dii_I = -factor * std::sqrt(M_PI/ area) * (1.0 / radius); // "Diagonal" of Coulomb singularity separation coefficient double coulomb_coeff = gf.coefficientCoulomb(e[i].center(), e[i].center()); // "Diagonal" of the directional derivative of the Coulomb singularity separation coefficient double coeff_grad = gf.coefficientCoulombDerivative(e[i].normal(), e[i].center(), e[i].center()) / std::pow(coulomb_coeff, 2); // "Diagonal" of the directional derivative of the image Green's function double image_grad = gf.imagePotentialDerivative(e[i].normal(), e[i].center(), e[i].center()); double eps_r2 = 0.0; pcm::tie(eps_r2, pcm::ignore) = gf.epsilon(e[i].center()); D(i, i) = eps_r2 * (Dii_I / coulomb_coeff - Sii_I * coeff_grad + image_grad); Eigen::Vector3d source = e[i].center(); for (size_t j = 0; j < mat_size; ++j) { // Fill off-diagonal Eigen::Vector3d probe = e[j].center(); Eigen::Vector3d probeNormal = e[j].normal(); probeNormal.normalize(); if (i != j) D(i, j) = gf.kernelD(probeNormal, source, probe); } } return D; }
double PairRelationDetector::RefPairModifier::operator() (Structure::Node *n1, Eigen::MatrixXd& m1, Structure::Node *n2, Eigen::MatrixXd& m2) { double deviation(-1.0), min_dist, mean_dist, max_dist; if ( n1->id == n2->id) return deviation; Eigen::Vector3d refCenter = (n1->center() + n2->center())*0.5; Eigen::Vector3d refNormal = n1->center() - n2->center(); refNormal.normalize(); Eigen::MatrixXd newverts2; reflect_points3d(m2, refCenter, refNormal, newverts2); distanceBetween(m1, newverts2, min_dist, mean_dist, max_dist); deviation = 2 * mean_dist / (n1->bbox().diagonal().norm() + n2->bbox().diagonal().norm()); if ( deviation > maxAllowDeviation_) { maxAllowDeviation_ = deviation; } return deviation; }
bool pcl::EnsensoGrabber::setExtrinsicCalibration (const double euler_angle, Eigen::Vector3d &rotation_axis, const Eigen::Vector3d &translation, const std::string target) { if (!device_open_) return (false); if (rotation_axis != Eigen::Vector3d (0, 0, 0)) // Otherwise the vector becomes NaN rotation_axis.normalize (); try { NxLibItem calibParams = camera_[itmLink]; calibParams[itmTarget].set (target); calibParams[itmRotation][itmAngle].set (euler_angle); calibParams[itmRotation][itmAxis][0].set (rotation_axis[0]); calibParams[itmRotation][itmAxis][1].set (rotation_axis[1]); calibParams[itmRotation][itmAxis][2].set (rotation_axis[2]); calibParams[itmTranslation][0].set (translation[0] * 1000.0); // Convert in millimeters calibParams[itmTranslation][1].set (translation[1] * 1000.0); calibParams[itmTranslation][2].set (translation[2] * 1000.0); } catch (NxLibException &ex) { ensensoExceptionHandling (ex, "setExtrinsicCalibration"); return (false); } return (true); }
void ShapeTextureFeature::add( const Eigen::Matrix< double, 6, 1 >& p_src, const Eigen::Matrix< double, 6, 1 >& p_dst, const Eigen::Vector3d& n_src, const Eigen::Vector3d& n_dst, float weight ) { // surflet pair relation as in "model globally match locally" const Eigen::Vector3d& p1 = p_src.block<3,1>(0,0); const Eigen::Vector3d& p2 = p_dst.block<3,1>(0,0); const Eigen::Vector3d& n1 = n_src; const Eigen::Vector3d& n2 = n_dst; Eigen::Vector3d d = p2-p1; d.normalize(); const int s1 = std::min( (SHAPE_TEXTURE_TABLE_SIZE-1), std::max( 0, (int)round((SHAPE_TEXTURE_TABLE_SIZE-1.0) * 0.5 * (n1.dot( d )+1.0) ) ) ); const int s2 = std::min( (SHAPE_TEXTURE_TABLE_SIZE-1), std::max( 0, (int)round((SHAPE_TEXTURE_TABLE_SIZE-1.0) * 0.5 * (n2.dot( d )+1.0) ) ) ); const int s3 = std::min( (SHAPE_TEXTURE_TABLE_SIZE-1), std::max( 0, (int)round((SHAPE_TEXTURE_TABLE_SIZE-1.0) * 0.5 * (n1.dot( n2 )+1.0) ) ) ); shape_.block<1,NUM_SHAPE_BINS>(0,0) += weight * ShapeTextureTable::instance()->shape_value_table_[0][s1]; shape_.block<1,NUM_SHAPE_BINS>(1,0) += weight * ShapeTextureTable::instance()->shape_value_table_[1][s2]; shape_.block<1,NUM_SHAPE_BINS>(2,0) += weight * ShapeTextureTable::instance()->shape_value_table_[2][s3]; const int c1 = std::min( (SHAPE_TEXTURE_TABLE_SIZE-1), std::max( 0, (int)round((SHAPE_TEXTURE_TABLE_SIZE-1.0) * 0.5 * ((p_dst(3,0) - p_src(3,0))+1.0) ) ) ); const int c2 = std::min( (SHAPE_TEXTURE_TABLE_SIZE-1), std::max( 0, (int)round((SHAPE_TEXTURE_TABLE_SIZE-1.0) * 0.25 * ((p_dst(4,0) - p_src(4,0))+2.0) ) ) ); const int c3 = std::min( (SHAPE_TEXTURE_TABLE_SIZE-1), std::max( 0, (int)round((SHAPE_TEXTURE_TABLE_SIZE-1.0) * 0.25 * ((p_dst(5,0) - p_src(5,0))+2.0) ) ) ); texture_.block<1,NUM_TEXTURE_BINS>(0,0) += weight * ShapeTextureTable::instance()->texture_value_table_[0][c1]; texture_.block<1,NUM_TEXTURE_BINS>(1,0) += weight * ShapeTextureTable::instance()->texture_value_table_[1][c2]; texture_.block<1,NUM_TEXTURE_BINS>(2,0) += weight * ShapeTextureTable::instance()->texture_value_table_[2][c3]; num_points_ += weight; }
void cxy_CAD_helper::getNormal( geometry_msgs::Point const& v1 , geometry_msgs::Point const& v2 , geometry_msgs::Point const& v3 , geometry_msgs::Vector3 & normal) { Eigen::Vector3d vec1, vec2; Eigen::Vector3d ev1, ev2, ev3; Eigen::Vector3d enormal; ev1 = Eigen::Vector3d(v1.x, v1.y, v1.z); ev2 = Eigen::Vector3d(v2.x, v2.y, v2.z); ev3 = Eigen::Vector3d(v3.x, v3.y, v3.z); vec1 = ev2 - ev1; vec2 = ev3 - ev1; try { enormal = vec1.cross(vec2); enormal.normalize(); normal.x = enormal(0); normal.y = enormal(1); normal.z = enormal(2); } catch (...) { ROS_WARN_STREAM("cxy_CAD_helper::getNormal exception. Block 1."); } }
/** * \ingroup GLVisualization * Apply the computed rotation matrix * This method must be invoked inside the \code glutDisplayFunc() \endcode * **/ void Arcball::applyRotationMatrix() { glMultMatrixf(startMatrix); if (isRotating) { // Do some rotation according to start and current rotation vectors //cerr << currentRotationVector.transpose() << " " << startRotationVector.transpose() << endl; if ((currentRotationVector - startRotationVector).norm() > 1E-6) { Eigen::Vector3d rotationAxis = currentRotationVector.cross(startRotationVector); Eigen::Matrix3d currentMatrix; for (int i = 0; i < 3;i++) { for (int j = 0; j < 3;j++) { currentMatrix(i, j) = (double)startMatrix[4 * i + j]; } } rotationAxis = currentMatrix*rotationAxis;// linear transformation rotationAxis.normalize(); double val = currentRotationVector.dot(startRotationVector); val > (1 - 1E-10) ? val = 1.0 : val = val; double rotationAngle = acos(val) * 180.0f / (float)M_PI;//unit vector // rotate around the current position glRotatef(rotationAngle * 2, -rotationAxis.x(), -rotationAxis.y(), -rotationAxis.z()); } } }
// draw a 3D arrow starting from pt along dir, the arrowhead is on the other end void drawArrow3D(const Eigen::Vector3d& _pt, const Eigen::Vector3d& _dir, const double _length, const double _thickness, const double _arrowThickness) { Eigen::Vector3d normDir = _dir; normDir.normalize(); double arrowLength; if (_arrowThickness == -1) arrowLength = 4*_thickness; else arrowLength = 2*_arrowThickness; // draw the arrow body as a cylinder GLUquadricObj *c; c = gluNewQuadric(); gluQuadricDrawStyle(c, GLU_FILL); gluQuadricNormals(c, GLU_SMOOTH); glPushMatrix(); glTranslatef(_pt[0], _pt[1], _pt[2]); glRotated(acos(normDir[2])*180/M_PI, -normDir[1], normDir[0], 0); gluCylinder(c, _thickness, _thickness, _length-arrowLength, 16, 16); // draw the arrowhed as a cone glPushMatrix(); glTranslatef(0, 0, _length-arrowLength); gluCylinder(c, arrowLength*0.5, 0.0, arrowLength, 10, 10); glPopMatrix(); glPopMatrix(); gluDeleteQuadric(c); }
//============================================================================== Eigen::MatrixXd ContactConstraint::getTangentBasisMatrixODE( const Eigen::Vector3d& _n) { // TODO(JS): Use mNumFrictionConeBases // Check if the number of bases is even number. // bool isEvenNumBases = mNumFrictionConeBases % 2 ? true : false; Eigen::MatrixXd T(Eigen::MatrixXd::Zero(3, 2)); // Pick an arbitrary vector to take the cross product of (in this case, // Z-axis) Eigen::Vector3d tangent = mFirstFrictionalDirection.cross(_n); // TODO(JS): Modify following lines once _updateFirstFrictionalDirection() is // implemented. // If they're too close, pick another tangent (use X-axis as arbitrary vector) if (tangent.norm() < DART_CONTACT_CONSTRAINT_EPSILON) tangent = Eigen::Vector3d::UnitX().cross(_n); tangent.normalize(); // Rotate the tangent around the normal to compute bases. // Note: a possible speedup is in place for mNumDir % 2 = 0 // Each basis and its opposite belong in the matrix, so we iterate half as // many times T.col(0) = tangent; T.col(1) = Eigen::Quaterniond(Eigen::AngleAxisd(DART_PI_HALF, _n)) * tangent; return T; }
Eigen::Vector3d QuaternionToExponentialMap(const Eigen::Quaterniond& quaternion) { Eigen::Vector3d vec = quaternion.vec(); if (vec.norm() < ITOMP_EPS) return Eigen::Vector3d::Zero(); double theta = 2.0 * std::acos(quaternion.w()); vec.normalize(); return theta * vec; }
template <typename PointInT, typename PointOutT> void pcl::MovingLeastSquares<PointInT, PointOutT>::projectPointToMLSSurface (float &u_disp, float &v_disp, Eigen::Vector3d &u, Eigen::Vector3d &v, Eigen::Vector3d &plane_normal, Eigen::Vector3d &mean, float &curvature, Eigen::VectorXd &c_vec, int num_neighbors, PointOutT &result_point, pcl::Normal &result_normal) const { double n_disp = 0.0f; double d_u = 0.0f, d_v = 0.0f; // HARDCODED 5*nr_coeff_ to guarantee that the computed polynomial had a proper point set basis if (polynomial_fit_ && num_neighbors >= 5*nr_coeff_ && pcl_isfinite (c_vec[0])) { // Compute the displacement along the normal using the fitted polynomial // and compute the partial derivatives needed for estimating the normal int j = 0; float u_pow = 1.0f, v_pow = 1.0f, u_pow_prev = 1.0f, v_pow_prev = 1.0f; for (int ui = 0; ui <= order_; ++ui) { v_pow = 1; for (int vi = 0; vi <= order_ - ui; ++vi) { // Compute displacement along normal n_disp += u_pow * v_pow * c_vec[j++]; // Compute partial derivatives if (ui >= 1) d_u += c_vec[j-1] * ui * u_pow_prev * v_pow; if (vi >= 1) d_v += c_vec[j-1] * vi * u_pow * v_pow_prev; v_pow_prev = v_pow; v_pow *= v_disp; } u_pow_prev = u_pow; u_pow *= u_disp; } } result_point.x = static_cast<float> (mean[0] + u[0] * u_disp + v[0] * v_disp + plane_normal[0] * n_disp); result_point.y = static_cast<float> (mean[1] + u[1] * u_disp + v[1] * v_disp + plane_normal[1] * n_disp); result_point.z = static_cast<float> (mean[2] + u[2] * u_disp + v[2] * v_disp + plane_normal[2] * n_disp); Eigen::Vector3d normal = plane_normal - d_u * u - d_v * v; normal.normalize (); result_normal.normal_x = static_cast<float> (normal[0]); result_normal.normal_y = static_cast<float> (normal[1]); result_normal.normal_z = static_cast<float> (normal[2]); result_normal.curvature = curvature; }
double EUTelGeometryTelescopeGeoDescription::FindRad(Eigen::Vector3d const & startPt, Eigen::Vector3d const & endPt) { Eigen::Vector3d track = endPt-startPt; double length = track.norm(); track.normalize(); double snext; Eigen::Vector3d point; Eigen::Vector3d direction; double epsil = 0.00001; double rad = 0.; double propagatedDistance = 0; bool reachedEnd = false; TGeoMedium* med; gGeoManager->InitTrack(startPt(0), startPt(1), startPt(2), track(0), track(1), track(2)); TGeoNode* nextnode = gGeoManager->GetCurrentNode(); while(nextnode && !reachedEnd) { med = nullptr; if (nextnode) med = nextnode->GetVolume()->GetMedium(); nextnode = gGeoManager->FindNextBoundaryAndStep(length); snext = gGeoManager->GetStep(); if( propagatedDistance+snext >= length ) { snext = length - propagatedDistance; reachedEnd = true; } //snext gets very small at a transition into a next node, in this case we need to manually propagate a small (epsil) //step into the direction of propagation. This introduces a small systematic error, depending on the size of epsil as if(snext < 1.e-8) { const double * currDir = gGeoManager->GetCurrentDirection(); const double * currPt = gGeoManager->GetCurrentPoint(); direction(0) = currDir[0]; direction(1) = currDir[1]; direction(2) = currDir[2]; point(0) = currPt[0]; point(1) = currPt[1]; point(2) = currPt[2]; point = point + epsil*direction; gGeoManager->CdTop(); nextnode = gGeoManager->FindNode(point(0),point(1),point(2)); snext = epsil; } if(med) { //ROOT returns the rad length in cm while we use mm, therefore factor of 10 double radlen = med->GetMaterial()->GetRadLen(); if (radlen > 1.e-5 && radlen < 1.e10) { rad += snext/(radlen*10); } } propagatedDistance += snext; } return rad; }
//-------------------------------------------------------------------------------------------------- void Camera::updatePickLines() { // Now draw lines from the pick points double halfVerticalAngle = 0.5*Utilities::degToRad( mpVtkCamera->GetViewAngle() ); double verticalLength = tan( halfVerticalAngle ); double horizontalLength = verticalLength*(mImageWidth/mImageHeight); Eigen::Vector3d cameraPos; Eigen::Vector3d cameraAxisX; Eigen::Vector3d cameraAxisY; Eigen::Vector3d cameraAxisZ; mpVtkCamera->GetPosition( cameraPos[ 0 ], cameraPos[ 1 ], cameraPos[ 2 ] ); mpVtkCamera->GetDirectionOfProjection( cameraAxisZ[ 0 ], cameraAxisZ[ 1 ], cameraAxisZ[ 2 ] ); mpVtkCamera->GetViewUp( cameraAxisY[ 0 ], cameraAxisY[ 1 ], cameraAxisY[ 2 ] ); cameraAxisX = cameraAxisY.cross( cameraAxisZ ); vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New(); vtkSmartPointer<vtkCellArray> lines = vtkSmartPointer<vtkCellArray>::New(); for ( uint32_t pickPointIdx = 0; pickPointIdx < mPickPoints.size(); pickPointIdx++ ) { const Eigen::Vector2d& p = mPickPoints[ pickPointIdx ]; Eigen::Vector3d startPos = cameraPos; Eigen::Vector3d rayDir = cameraAxisZ - p[ 0 ]*horizontalLength*cameraAxisX - p[ 1 ]*verticalLength*cameraAxisY; rayDir.normalize(); Eigen::Vector3d endPos = startPos + 2.0*rayDir; // Create the line points->InsertNextPoint( startPos.data() ); points->InsertNextPoint( endPos.data() ); vtkSmartPointer<vtkLine> line = vtkSmartPointer<vtkLine>::New(); line->GetPointIds()->SetId( 0, 2*pickPointIdx ); line->GetPointIds()->SetId( 1, 2*pickPointIdx + 1 ); // Store the line lines->InsertNextCell( line ); } // Create a polydata to store everything in vtkSmartPointer<vtkPolyData> linesPolyData = vtkSmartPointer<vtkPolyData>::New(); // Add the points and lines to the dataset linesPolyData->SetPoints( points ); linesPolyData->SetLines( lines ); mpPickLinesMapper->SetInput( linesPolyData ); }
void randomUnitSphere(Eigen::Vector3d &sp) { static boost::random::mt19937 rng(time(NULL)); static boost::random::normal_distribution<> normal; do { sp(0) = normal(rng); sp(1) = normal(rng); sp(2) = normal(rng); } while (sp.norm() < 0.00001); sp.normalize(); }
Eigen::Vector3d HoleFinder::getRandomPoint() { Q_D(HoleFinder); // Randomly select a hole const Hole &hole = d->getRandomHole(); // Trim the radius of the sphere by 1.5 units. If the sphere is smaller than // 1.5 units, trim it to 0.5 units. const double rmax = (hole.radius >= d->minDist) ? hole.radius - d->minDist : 0.5; // Randomly select point from uniform distribution around sphere. The radius // is chosen such that // // P(r) [is prop. to] Area(r) = 4*pi*r^2, or // P(r) = N * 4*pi*r^2 // // We want P(r) to lie between [0,1] and r to be between [0, rmax]. // // Lower boundary conditions are trivially satisfied: // // P(0) = N * 4*pi*0^2 = 0 // // Upper boundary conditions: // // P(rmax) = 1 = N * 4*pi*rmax^2, or // N = 1 / (4*pi*rmax^2) // // So P(r) simplifies to // // P(r) = (1/(4*pi*r^2)) * 4*pi*r^2 = r^2 / (rmax^2) // // So we can pick a "p" value from a uniform distribution from [0,1] and // transform it to a random radius that is uniformly distributed throughout // the sphere by const double p = RANDDOUBLE(); const double r = sqrt(p) * rmax; // A random unit vector representing the displacement: Eigen::Vector3d displacement (2.0 * RANDDOUBLE() - 1.0, 2.0 * RANDDOUBLE() - 1.0, 2.0 * RANDDOUBLE() - 1.0); displacement.normalize(); // Put the two together displacement *= r; // Shift by the hole's origin const Eigen::Vector3d ret (hole.center + displacement); // Ta-da! return ret; }
SpotLightParameterEstimationClass::InputForExponentCalculation* SpotLightParameterEstimationClass::GetSpotLightExponentInputParameters() { if (_inputSetter != nullptr) { return _inputSetter; } const LightEntityClass& light = _lightSystem->GetCurrentLight(); double materialAlbedo = 0.5; // Light double lightIntensity = _lightSystem->GetCurrentLightIntensity(); Eigen::Vector3d lightDirection = -light.GetDirectionVector(); lightDirection.normalize(); Eigen::Vector3d lightPosition = light.GetPosition(); double innerconeangle = light.GetSpotLightInnerConeAngle() * M_PI / 180; double outerconeangle = light.GetSpotLightOuterConeAngle() * M_PI / 180; double gammaCorrection = light.GetGammaCorrection(); double cosOfInnerConeAngle = cos(innerconeangle); double cosOfOuterConeAngle = cos(outerconeangle); // Geometry const GeometryEntityClass& geometryEntity = _geometrySystem->GetCurrentGeometry(); // Point3D<double> vertex = geometryEntity.GetAVertex(); std::vector<Eigen::Vector3d> vertexNormals = geometryEntity.GetVertexNormals(); Eigen::Vector3d vnormal = vertexNormals[0]; vnormal.normalize(); // Image _imageWidth = (int)_imageSystem->GetCurrentImageWidth(); _imageHeight = (int)_imageSystem->GetCurrentImageHeight(); ReprojectionClass* reprojectionClass = new ReprojectionClass(); std::vector<MapOFImageAndWorldPoints>reprojectedPoints; // Uncomment this if we are going to pass in Array<RGBA> of the pixels that we obtained from the circumference of the illumination //reprojectedPoints = reprojectionClass->ReprojectImagePixelsTo3DGeometry((_imageSystem->GetCurrentImage()).GetImage2DArrayPixels()); reprojectedPoints = reprojectionClass->ReprojectImagePixelsTo3DGeometry(); _inputSetter = new InputForExponentCalculation(_imageWidth, _imageHeight, lightIntensity, lightDirection, lightPosition, gammaCorrection,vnormal, cosOfInnerConeAngle, cosOfOuterConeAngle, materialAlbedo, reprojectedPoints); delete reprojectionClass; return _inputSetter; }
double Model::translateRandom(double maxTranslation, double minTranslation) { std::uniform_real_distribution<double> rand_double_min_max(minTranslation, maxTranslation); double translation = rand_double_min_max(m_random); Eigen::Vector3d translationVec = Eigen::Vector3d::Random(); translationVec.normalize(); translationVec *= translation; for(unsigned int i = 0; i < m_pMesh->vertices().size(); i++) m_pMesh->vertices()[i].translate(translationVec[0], translationVec[1], translationVec[2]); analyzeMesh(); return translation; }
Eigen::Vector4d Face::plane() const { Eigen::Vector3d a = he->vertex->position; Eigen::Vector3d n = normal(); n.normalize(); Eigen::Vector4d p; p << n.x(), n.y(), n.z(), -n.dot(a); return p; }
void Model::compute_NORMALS() { /////////////////////////////////////////////////////// /////////////////////////////////////////////////////// mesh.normals_Vertices. fill( Eigen::Vector3d::Zero() ); /////////////////////////////////////////////////////// /////////////////////////////////////////////////////// for (int tr=0; tr<totalTriangles; tr++) { int vertexIndex_1 = mesh.triangles[tr].vertexID_1; int vertexIndex_2 = mesh.triangles[tr].vertexID_2; int vertexIndex_3 = mesh.triangles[tr].vertexID_3; Eigen::Vector3d VertexA; Eigen::Vector3d VertexB; Eigen::Vector3d VertexC; Eigen::Vector3d vect_1; Eigen::Vector3d vect_2; Eigen::Vector3d currNormal; VertexA << mesh.verticesWeighted[vertexIndex_1]; // 0 VertexB << mesh.verticesWeighted[vertexIndex_2]; // 1 VertexC << mesh.verticesWeighted[vertexIndex_3]; // 2 vect_1 = VertexB - VertexA; // 1 - 0 vect_2 = VertexC - VertexA; // 2 - 0 currNormal = vect_1.cross(vect_2); currNormal.normalize(); ///////////////////////////////////////////////////// mesh.normals_Vertices[ vertexIndex_1 ] += currNormal; mesh.normals_Vertices[ vertexIndex_2 ] += currNormal; mesh.normals_Vertices[ vertexIndex_3 ] += currNormal; ///////////////////////////////////////////////////// } /////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////// for (int vvv=0; vvv< totalVertices; vvv++) mesh.normals_Vertices[vvv].normalize(); /////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////// }
void TetrahedralMesh::computeFaceNormals(){ std::vector<Eigen::Vector3d> tri(3); std::vector<Eigen::Vector3d> normals(4); face_normals_.resize(tets_.size()); for(int i=0; i<(int)tets_.size(); i++){ for(int j=0; j<4; j++){ tri[0] = points_[tets_[i][faces[j][0]]]; tri[1] = points_[tets_[i][faces[j][1]]]; tri[2] = points_[tets_[i][faces[j][2]]]; Eigen::Vector3d n = (tri[1]-tri[0]).cross(tri[2]-tri[0]); n.normalize(); normals[j] = n; } face_normals_[i] = normals; } }
Eigen::Matrix3d MutualPoseEstimation::vrrotvec2mat(double p, Eigen::Vector3d r){ double s = sin(p); double c = cos(p); double t = 1 - c; r.normalize(); Eigen::Vector3d n = r; double x = n[0]; double y = n[1]; double z = n[2]; Eigen::Matrix3d m(3,3); m(0,0) = t*x*x + c; m(0,1) = t*x*y - s*z; m(0,2) = t*x*z + s*y; m(1,0) = t*x*y + s*z; m(1,1) = t*y*y + c; m(1,2) = t*y*z - s*x; m(2,0) = t*x*z - s*y; m(2,1) = t*y*z + s*x; m(2,2) = t*z*z + c; return m; }
bool pcl::EnsensoGrabber::setExtrinsicCalibration (const double euler_angle, Eigen::Vector3d &rotation_axis, const Eigen::Vector3d &translation, const std::string target) { if (!device_open_) return (false); if (rotation_axis != Eigen::Vector3d (0, 0, 0)) // Otherwise the vector becomes NaN rotation_axis.normalize (); try { NxLibItem calibParams = camera_[itmLink]; calibParams[itmTarget].set (target); calibParams[itmRotation][itmAngle].set (euler_angle); /* FIXME: Bug in Ensenso API: http://ensenso.de/manual/index.html?about.htm see version 1.2.125 The re-normalisation is still not working proprely, when it does, use this code rather than the workaround calibParams[itmRotation][itmAxis][0].set (rotation_axis[0]); calibParams[itmRotation][itmAxis][1].set (rotation_axis[1]); calibParams[itmRotation][itmAxis][2].set (rotation_axis[2]); */ // Workaround std::string axis_x, axis_y, axis_z; axis_x = boost::lexical_cast<std::string> (rotation_axis[0]); axis_y = boost::lexical_cast<std::string> (rotation_axis[1]); axis_z = boost::lexical_cast<std::string> (rotation_axis[2]); calibParams[itmRotation][itmAxis][0].setJson (axis_x); calibParams[itmRotation][itmAxis][1].setJson (axis_y); calibParams[itmRotation][itmAxis][2].setJson (axis_z); // End of workaround calibParams[itmTranslation][0].set (translation[0] * 1000.0); // Convert in millimeters calibParams[itmTranslation][1].set (translation[1] * 1000.0); calibParams[itmTranslation][2].set (translation[2] * 1000.0); } catch (NxLibException &ex) { ensensoExceptionHandling (ex, "setExtrinsicCalibration"); return (false); } return (true); }
std::vector<edge*> surface::getTrailingEdges() { if (!LSflag) { std::vector<edge*> empty; return empty; } else if (trailingEdges.size() > 0) { return trailingEdges; } edge* TE; for (int i=0; i<panels.size(); i++) { if (panels[i]->isTEpanel()) { TE = panels[i]->getTrailingEdge(); trailingEdges.push_back(TE); } } // Sort edges so that the streamlines can start tracing at one side of surface and progress, keeping track of neighboring streamlines. Eigen::Vector3d vec; vec = trailingEdges[0]->getVector(); vec.normalize(); if (vec(1) >= 0.01) { // Sort by Y direction (edges are primarily lateral) std::sort(trailingEdges.begin(), trailingEdges.end(), [](edge* e1,edge* e2) {return e1->getMidPoint()(1) < e2->getMidPoint()(1);}); } else if (vec(1) < 0.01 && vec(2) >= 0.01) { // Sort by Z direction (edges are primarily vertical) std::sort(trailingEdges.begin(), trailingEdges.end(), [](edge* e1,edge* e2) {return e1->getMidPoint()(2) < e2->getMidPoint()(2);}); } else { // Sort by X direction (edges are primarily streamwise) std::sort(trailingEdges.begin(), trailingEdges.end(), [](edge* e1,edge* e2) {return e1->getMidPoint()(2) < e2->getMidPoint()(2);}); } return trailingEdges; }
/*! Returns off-diagonal elements of the matrix representation of the double layer operator by collocation * \param[in] elements list of finite elements * \param[in] kernD function for the evaluation of the off-diagonal of D */ Eigen::MatrixXd offDiagonalD(const std::vector<Element> & elements, const integrator::KernelD & kernD) const { size_t mat_size = elements.size(); Eigen::MatrixXd D = Eigen::MatrixXd::Zero(mat_size, mat_size); for (size_t i = 0; i < mat_size; ++i) { Eigen::Vector3d source = elements[i].center(); for (size_t j = 0; j < mat_size; ++j) { // Fill off-diagonal Eigen::Vector3d probe = elements[j].center(); Eigen::Vector3d probeNormal = elements[j].normal(); probeNormal.normalize(); if (i != j) D(i, j) = kernD(probeNormal, source, probe); } } return D; }
/*! Returns matrix representation of the double layer operator by collocation * \param[in] elements list of finite elements * \param[in] diagD functor for the evaluation of the diagonal of D * \param[in] kernD function for the evaluation of the off-diagonal of D */ inline Eigen::MatrixXd doubleLayer(const std::vector<Element> & elements, const Diagonal & diagD, const KernelD & kernD) { size_t mat_size = elements.size(); Eigen::MatrixXd D = Eigen::MatrixXd::Zero(mat_size, mat_size); for (size_t i = 0; i < mat_size; ++i) { // Fill diagonal D(i, i) = diagD(elements[i]); Eigen::Vector3d source = elements[i].center(); for (size_t j = 0; j < mat_size; ++j) { // Fill off-diagonal Eigen::Vector3d probe = elements[j].center(); Eigen::Vector3d probeNormal = elements[j].normal(); probeNormal.normalize(); if (i != j) D(i, j) = kernD(probeNormal, source, probe); } } return D; }
void NuTo::CollidableWallBase::VisualizationStatic(NuTo::Visualize::UnstructuredGrid& rVisualizer) const { double size = 1.; if (*mBoxes.begin() == mOutsideBox) size = 2.; Eigen::Matrix<double, 4, 3> corners; // get some vector != mDirection Eigen::Vector3d random; random << 1, 0, 0; if (std::abs(random.dot(mDirection)) == 1) { random << 0, 1, 0; } Eigen::Vector3d transversal = random.cross(mDirection); Eigen::Vector3d transversal2 = transversal.cross(mDirection); // normalize to size/2; transversal.normalize(); transversal2.normalize(); transversal *= size / 2; transversal2 *= size / 2; corners.row(0) = (mPosition + transversal + transversal2).transpose(); corners.row(1) = (mPosition + transversal - transversal2).transpose(); corners.row(2) = (mPosition - transversal - transversal2).transpose(); corners.row(3) = (mPosition - transversal + transversal2).transpose(); std::vector<int> cornerIndex(4); for (int i = 0; i < 4; ++i) { cornerIndex[i] = rVisualizer.AddPoint(corners.row(i)); } int insertIndex = rVisualizer.AddCell(cornerIndex, eCellTypes::QUAD); rVisualizer.SetCellData(insertIndex, "Direction", mDirection); }
void draw() { glBegin(GL_LINES); for (EdgeCIter e = mesh.edges.begin(); e != mesh.edges.end(); e++) { int s = 3; Eigen::Vector3d v = e->he->vertex->position; Eigen::Vector3d u = e->he->flip->vertex->position - v; u.normalize(); double dl = e->length() / (double)s; double c = 0.0; double c2 = 0.0; if (curvature == 0) { c = e->he->vertex->gaussCurvature; c2 = e->he->flip->vertex->gaussCurvature; } else if (curvature == 1) { c = e->he->vertex->meanCurvature; c2 = e->he->flip->vertex->meanCurvature; } else { c = normalCurvatures[e->he->vertex->index]; c2 = normalCurvatures[e->he->flip->vertex->index]; } double dc = (c2 - c) / (double)s; for (int i = 0; i < s; i++) { if (c < 0) glColor4f(0.0, 0.0, fabs(c), 0.6); else glColor4f(c, 0.0, 0.0, 0.6); glVertex3d(v.x(), v.y(), v.z()); c += dc; if (c < 0) glColor4f(0.0, 0.0, fabs(c), 0.6); else glColor4f(c, 0.0, 0.0, 0.6); v += u * dl; glVertex3d(v.x(), v.y(), v.z()); } } glEnd(); }
Eigen::Vector3d Circuit::GetField(const Eigen::Vector3d& point) { double segmentLength = 2.0*NXGR_PI*m_radius/m_segments; double segmentCoeff = PERMIABILITY_OVER_4PI*segmentLength*m_current; Eigen::Vector3d B(0.0); Eigen::Vector3d pos(0.0, 0.0, 0.0); Eigen::Vector3d up(0.0, 0.0, 1.0); double angle = 0.0; double angleInc = 2.0*NXGR_PI/m_segments; for(int i=0; i<m_segments; i++) { pos[0] = m_radius*cos(angle); pos[1] = m_radius*sin(angle); Eigen::Vector3d flow = pos.cross(up); flow.normalize(); Eigen::Vector3d segmentB = GetFieldFromPoint(point, pos, flow, segmentCoeff); B.array() += segmentB.array(); angle += angleInc; } return B; }