bool EXPORT_API ProjectPointTriangleConstraintsJB(btVector3* p, btVector3* p0, btVector3* p1, btVector3* p2, float radius, float K1, float K2, btVector3* c, btVector3* c0, btVector3* c1, btVector3* c2, btVector3 *debugOut) { //btScalar massInv = 1.0f / mass; Eigen::Vector3f pp(p->x(), p->y(), p->z()); Eigen::Vector3f pA(p0->x(), p0->y(), p0->z()); Eigen::Vector3f pB(p1->x(), p1->y(), p1->z()); Eigen::Vector3f pC(p2->x(), p2->y(), p2->z()); Eigen::Vector3f corrA; Eigen::Vector3f corrB; Eigen::Vector3f corrC; Eigen::Vector3f corr; Eigen::Vector3f debug; const bool res = PBD::PositionBasedDynamics::solveTrianglePointDistConstraint(pp, 1, pA, 1, pB, 1, pC, 1, radius, K1, K2, corr, corrA, corrB, corrC); if (res) { c->setValue(corr.x(), corr.y(), corr.z()); c0->setValue(corrA.x(), corrA.y(), corrA.z()); c1->setValue(corrB.x(), corrB.y(), corrB.z()); c2->setValue(corrC.x(), corrC.y(), corrC.z()); debugOut->setValue(debug.x(), debug.y(), debug.z()); } return res; }
void WorldDownloadManager::cropCloud(const Eigen::Vector3f & min,const Eigen::Vector3f & max, typename pcl::PointCloud<PointT>::ConstPtr cloud,typename pcl::PointCloud<PointT>::Ptr out_cloud) { const uint cloud_size = cloud->size(); std::vector<bool> valid_points(cloud_size,true); // check the points for (uint i = 0; i < cloud_size; i++) { const PointT & pt = (*cloud)[i]; if (pt.x > max.x() || pt.y > max.y() || pt.z > max.z() || pt.x < min.x() || pt.y < min.y() || pt.z < min.z()) valid_points[i] = false; } // discard invalid points out_cloud->clear(); out_cloud->reserve(cloud_size); uint count = 0; for (uint i = 0; i < cloud_size; i++) if (valid_points[i]) { out_cloud->push_back((*cloud)[i]); count++; } out_cloud->resize(count); }
void X3DConverter::startShape(const std::array<float, 12>& matrix) { // Finding axis/angle from matrix using Eigen for its bullet proof implementation. Eigen::Transform<float, 3, Eigen::Affine> t; t.setIdentity(); for (unsigned int i = 0; i < 3; i++) { for (unsigned int j = 0; j < 4; j++) { t(i, j) = matrix[i+j*3]; } } Eigen::Matrix3f rotationMatrix; Eigen::Matrix3f scaleMatrix; t.computeRotationScaling(&rotationMatrix, &scaleMatrix); Eigen::Quaternionf q; Eigen::AngleAxisf aa; q = rotationMatrix; aa = q; Eigen::Vector3f scale = scaleMatrix.diagonal(); Eigen::Vector3f translation = t.translation(); startNode(ID::Transform); m_writers.back()->setSFVec3f(ID::translation, translation.x(), translation.y() , translation.z()); m_writers.back()->setSFRotation(ID::rotation, aa.axis().x(), aa.axis().y(), aa.axis().z(), aa.angle()); m_writers.back()->setSFVec3f(ID::scale, scale.x(), scale.y(), scale.z()); startNode(ID::Shape); startNode(ID::Appearance); startNode(ID::Material); m_writers.back()->setSFColor<vector<float> >(ID::diffuseColor, RVMColorHelper::color(m_materials.back())); endNode(ID::Material); // Material endNode(ID::Appearance); // Appearance }
bool targetViewpoint(const Eigen::Vector3f& rayo,const Eigen::Vector3f& target,const Eigen::Vector3f& down, Eigen::Affine3f& transf) { // uz: versor pointing toward the destination Eigen::Vector3f uz = target - rayo; if (std::abs(uz.norm()) < 1e-3) { std::cout << __FILE__ << "," << __LINE__ << ": target point on ray origin!" << std::endl; return false; } uz.normalize(); //std::cout << "uz " << uz.transpose() << ", norm " << uz .norm() << std::endl; // ux: versor pointing toward the ground Eigen::Vector3f ux = down - down.dot(uz) * uz; if (std::abs(ux.norm()) < 1e-3) { std::cout << __FILE__ << "," << __LINE__ << ": ray to target toward ground direction!" << std::endl; return false; } ux.normalize(); //std::cout << "ux " << ux.transpose() << ", norm " << ux.norm() << std::endl; Eigen::Vector3f uy = uz.cross(ux); //std::cout << "uy " << uy.transpose() << ", norm " << uy.norm() << std::endl; Eigen::Matrix3f rot; rot << ux.x(), uy.x(), uz.x(), ux.y(), uy.y(), uz.y(), ux.z(), uy.z(), uz.z(); transf.setIdentity(); transf.translate(rayo); transf.rotate(rot); //std::cout << __FILE__ << "\nrotation\n" << rot << "\ntranslation\n" << rayo << "\naffine\n" << transf.matrix() << std::endl; return true; }
void EXPORT_API ComputeInternalConstraintsJB(btVector3* predictedPositions, float* invMasses, bool* posLocks, Link* links, int linksCount, float Ks_prime, float mass) { for (int i = 0; i < linksCount; i++) { Link link = links[i]; if ((link.type == -1) || (posLocks[link.idA] && posLocks[link.idB])) continue; btScalar wA = posLocks[link.idA] ? 0.0f : invMasses[link.idA]; btScalar wB = posLocks[link.idB] ? 0.0f : invMasses[link.idB]; btVector3 predPosA = predictedPositions[link.idA]; btVector3 predPosB = predictedPositions[link.idB]; Eigen::Vector3f pA(predPosA.x(), predPosA.y(), predPosA.z()); Eigen::Vector3f pB(predPosB.x(), predPosB.y(), predPosB.z()); Eigen::Vector3f corrA; Eigen::Vector3f corrB; const bool res = PBD::PositionBasedDynamics::solveDistanceConstraint(pA, wA, pB, wB, link.restLength, Ks_prime, Ks_prime, corrA, corrB); if (res) { if (wA != 0.0f) predictedPositions[link.idA] += btVector3(corrA.x(), corrA.y(), corrA.z()); if (wB != 0.0f) predictedPositions[link.idB] += btVector3(corrB.x(), corrB.y(), corrB.z()); } } }
void Kamikaze::updateEnemy(Level * currLev) { //todo move at ship; if (!dead) { Eigen::Vector3f shipLoc = currLev->ship->position; Eigen::Vector3f direction = shipLoc - position; direction.normalize(); direction /= 1000; direction.z() -= currLev->ship->velocity.z(); Eigen::Vector3f mousePos = Eigen::Vector3f(shipLoc.x(), shipLoc.y(), shipLoc.z()); float rotateY = RADIANS_TO_DEGREES(atan((shipLoc.x() - position.x()) / (shipLoc.z() - position.z()))) - 90.0; float rotateX = -RADIANS_TO_DEGREES(atan((shipLoc.y() - position.y()) / (shipLoc.z() - position.z()))); rotate.y() = rotateY; rotate.x() = rotateX; // if we're behind ship, keep going in that direction if (direction.z() <= 0) { direction[2] *= -1; direction[2] += 0.2; } velocity = direction; } }
template<typename PointT, typename LeafT, typename BranchT, typename OctreeT> int pcl::octree::OctreePointCloud<PointT, LeafT, BranchT, OctreeT>::getApproxIntersectedVoxelCentersBySegment ( const Eigen::Vector3f& origin, const Eigen::Vector3f& end, AlignedPointTVector &voxel_center_list, float precision) { Eigen::Vector3f direction = end - origin; float norm = direction.norm (); direction.normalize (); const float step_size = static_cast<const float> (resolution_) * precision; // Ensure we get at least one step for the first voxel. const int nsteps = std::max (1, static_cast<int> (norm / step_size)); OctreeKey prev_key; bool bkeyDefined = false; // Walk along the line segment with small steps. for (int i = 0; i < nsteps; ++i) { Eigen::Vector3f p = origin + (direction * step_size * static_cast<const float> (i)); PointT octree_p; octree_p.x = p.x (); octree_p.y = p.y (); octree_p.z = p.z (); OctreeKey key; this->genOctreeKeyforPoint (octree_p, key); // Not a new key, still the same voxel. if ((key == prev_key) && (bkeyDefined) ) continue; prev_key = key; bkeyDefined = true; PointT center; genLeafNodeCenterFromOctreeKey (key, center); voxel_center_list.push_back (center); } OctreeKey end_key; PointT end_p; end_p.x = end.x (); end_p.y = end.y (); end_p.z = end.z (); this->genOctreeKeyforPoint (end_p, end_key); if (!(end_key == prev_key)) { PointT center; genLeafNodeCenterFromOctreeKey (end_key, center); voxel_center_list.push_back (center); } return (static_cast<int> (voxel_center_list.size ())); }
Eigen::Vector3f lineNormalized(Eigen::Vector3f p0, Eigen::Vector3f p1) { Eigen::Vector3f l = p0.cross(p1); l.x() = l.x() / l.z(); l.y() = l.y() / l.z(); l.z() = 1.0f; //return l; return p0.cross(p1).normalized(); }
void View::render ( void ) { ::glClear ( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); ::glLoadIdentity(); // model view. const Camera& camera = this->_model.getCamera(); Eigen::Vector3f eye = camera.getEye(); Eigen::Vector3f center = camera.getCenter(); Eigen::Vector3f up = camera.getUpVector(); ::gluLookAt ( eye.x(), eye.y(), eye.z(), center.x(), center.y(), center.z(), up.x(), up.y(), up.z() ); //light Light light = this->_model.getLight(); this->setLight ( light ); //draw mesh RenderingMode mode = this->_model.getPreference().getRenderingMode() ; if ( mode == WIRE ) { ::glDisable ( GL_LIGHTING ); ::glPolygonMode ( GL_FRONT_AND_BACK, GL_LINE ); const Color3f fg = this->_model.getPreference().getWireColor(); ::glColor3f ( fg.x(), fg.y(), fg.z() ); } else if ( mode == SURFACE ) { ::glEnable ( GL_LIGHTING ); const Color3f fg = this->_model.getPreference().getSurfaceColor(); ::glPolygonMode ( GL_FRONT_AND_BACK, GL_FILL ); GLfloat mat_ambient[4] = {fg.x(), fg.y(), fg.z(), 1.0}; GLfloat mat_diffuse[4] = {0.8,0.8, 0.8, 1.0}; GLfloat mat_specular[4] = {0.2, 0.2, 0.2, 1.0}; GLfloat mat_shininess[1] = {100.0f}; ::glMaterialfv ( GL_FRONT, GL_AMBIENT, mat_ambient ); ::glMaterialfv ( GL_FRONT, GL_DIFFUSE, mat_diffuse ); ::glMaterialfv ( GL_FRONT, GL_SPECULAR, mat_specular ); ::glMaterialfv ( GL_FRONT, GL_SHININESS,mat_shininess ); GLfloat mat2_ambient[4] = {1-fg.x(), 1-fg.y(), 1-fg.z(), 1.0}; GLfloat mat2_diffuse[4] = {0.8,0.8, 0.8, 1.0}; GLfloat mat2_specular[4] = {0.2, 0.2, 0.2, 1.0}; GLfloat mat2_shininess[1] = {100.0f}; ::glMaterialfv ( GL_BACK, GL_AMBIENT, mat2_ambient ); ::glMaterialfv ( GL_BACK, GL_DIFFUSE, mat2_diffuse ); ::glMaterialfv ( GL_BACK, GL_SPECULAR, mat2_specular ); ::glMaterialfv ( GL_BACK, GL_SHININESS,mat2_shininess ); } this->render_mesh(); return; }
int Simulator::computeObservations(Mocap_object* mo, Camera* cam, bool show_image){ char img_name[100]; if (show_image){ sprintf(img_name,"Projection_%i", cam->id); cvNamedWindow(img_name,1); } cam->obs.clear(); Eigen::Affine3f c2w = cam->pose.inverse(); for (uint i=0; i<mo->points.size(); ++i){ if (!mo->point_valid[i]) continue; Eigen::Vector3f c = mo->points[i]; c = c2w*c; // ROS_INFO("pt in cam frame: %f %f %f", c.x(), c.y(), c.z()); float x_px = c.x()/c.z()*cam->f_x+cam->c_x; float y_px = c.y()/c.z()*cam->f_y+cam->c_y; // point behind camera or not within field of view if (c.z() < 0 || x_px < 0 || y_px < 0 || x_px >= cam->c_width || y_px >= cam->c_height){ continue; } cam->obs[i] = cvPoint2D32f(x_px,y_px); } if (show_image) { // print on image cvSet(img, cvScalar(0,0,0)); for (Observations::iterator it = cam->obs.begin(); it!=cam->obs.end(); ++it){ cvCircle(img, cvPoint(it->second.x,it->second.y),3, CV_RGB(0,255,0),2); } // for (uint i=0; i<prj.size(); ++i){ // float x = prj[i].x; float y = prj[i].y; // float x_ = prj[(i+1)%prj.size()].x; float y_ = prj[(i+1)%prj.size()].y; // cvLine(img,cvPoint(x,y),cvPoint(x_,y_), CV_RGB(255,0,0),2); // } cvShowImage(img_name, img); cvWaitKey(5); } return cam->obs.size(); }
template<typename PointInT, typename PointNT, typename PointOutT> void pcl::BOARDLocalReferenceFrameEstimation<PointInT, PointNT, PointOutT>::randomOrthogonalAxis ( Eigen::Vector3f const &axis, Eigen::Vector3f &rand_ortho_axis) { if (!areEquals (axis.z (), 0.0f)) { rand_ortho_axis.x () = (static_cast<float> (rand ()) / static_cast<float> (RAND_MAX)) * 2.0f - 1.0f; rand_ortho_axis.y () = (static_cast<float> (rand ()) / static_cast<float> (RAND_MAX)) * 2.0f - 1.0f; rand_ortho_axis.z () = -(axis.x () * rand_ortho_axis.x () + axis.y () * rand_ortho_axis.y ()) / axis.z (); } else if (!areEquals (axis.y (), 0.0f)) { rand_ortho_axis.x () = (static_cast<float> (rand ()) / static_cast<float> (RAND_MAX)) * 2.0f - 1.0f; rand_ortho_axis.z () = (static_cast<float> (rand ()) / static_cast<float> (RAND_MAX)) * 2.0f - 1.0f; rand_ortho_axis.y () = -(axis.x () * rand_ortho_axis.x () + axis.z () * rand_ortho_axis.z ()) / axis.y (); } else if (!areEquals (axis.x (), 0.0f)) { rand_ortho_axis.y () = (static_cast<float> (rand ()) / static_cast<float> (RAND_MAX)) * 2.0f - 1.0f; rand_ortho_axis.z () = (static_cast<float> (rand ()) / static_cast<float> (RAND_MAX)) * 2.0f - 1.0f; rand_ortho_axis.x () = -(axis.y () * rand_ortho_axis.y () + axis.z () * rand_ortho_axis.z ()) / axis.x (); } rand_ortho_axis.normalize (); // check if the computed x axis is orthogonal to the normal //assert(areEquals(rand_ortho_axis.dot(axis), 0.0f, 1E-6f)); }
pcl::PointCloud<pcl::PointNormal>::Ptr CloudFactory::createCube(const double size_, const Eigen::Vector3f &_center, const int _npoints) { pcl::PointCloud<pcl::PointNormal>::Ptr cloud = pcl::PointCloud<pcl::PointNormal>::Ptr(new pcl::PointCloud<pcl::PointNormal>()); int N = sqrt(_npoints / 6); double minX = _center.x() - size_ * 0.5; double minY = _center.y() - size_ * 0.5; double minZ = _center.z() - size_ * 0.5; double maxX = _center.x() + size_ * 0.5; double maxY = _center.y() + size_ * 0.5; double maxZ = _center.z() + size_ * 0.5; double step = size_ / N; // Generate faces fixed in X axis for (double y = minY; y <= maxY; y += step) { for (double z = minZ; z <= maxZ; z += step) { cloud->push_back(PointFactory::createPointNormal(minX, y, z, -1, 0, 0)); cloud->push_back(PointFactory::createPointNormal(maxX, y, z, 1, 0, 0)); } } // Generate faces fixed in Y axis for (double x = minX; x <= maxX; x += step) { for (double z = minZ; z <= maxZ; z += step) { cloud->push_back(PointFactory::createPointNormal(x, minY, z, 0, -1, 0)); cloud->push_back(PointFactory::createPointNormal(x, maxY, z, 0, 1, 0)); } } // Generate faces fixed in Z axis for (double x = minX; x <= maxX; x += step) { for (double y = minY; y <= maxY; y += step) { cloud->push_back(PointFactory::createPointNormal(x, y, minZ, 0, 0, -1)); cloud->push_back(PointFactory::createPointNormal(x, y, maxZ, 0, 0, 1)); } } return cloud; }
void WorldDownloadManager::transformBoundingBoxAndExpand(const Eigen::Vector3f& bbox_min,const Eigen::Vector3f& bbox_max, const Eigen::Affine3f& transform,Eigen::Vector3f& bbox_min_out,Eigen::Vector3f& bbox_max_out) { const int SIZE = 2; Eigen::Vector3f inv[SIZE]; inv[0] = bbox_min; inv[1] = bbox_max; Eigen::Vector3f comb; for (uint ix = 0; ix < SIZE; ix++) { comb.x() = inv[ix].x(); for (uint iy = 0; iy < SIZE; iy++) { comb.y() = inv[iy].y(); for (uint iz = 0; iz < SIZE; iz++) { comb.z() = inv[iz].z(); const Eigen::Vector3f t_comb = transform * comb; if (!ix && !iy && !iz) // first iteration bbox_min_out = bbox_max_out = t_comb; else { bbox_min_out = bbox_min_out.array().min(t_comb.array()); bbox_max_out = bbox_max_out.array().max(t_comb.array()); } } } } }
bool CommandSubscriber::getForcedTfFrame(Eigen::Affine3f & result) const { tf::StampedTransform transform; try { m_tf_listener.lookupTransform(m_forced_tf_frame_reference,m_forced_tf_frame_name,ros::Time(0),transform); } catch (tf::TransformException ex) { ROS_ERROR("kinfu: hint was forced by TF, but retrieval failed because: %s",ex.what()); return false; } Eigen::Vector3f vec; vec.x() = transform.getOrigin().x(); vec.y() = transform.getOrigin().y(); vec.z() = transform.getOrigin().z(); Eigen::Quaternionf quat; quat.x() = transform.getRotation().x(); quat.y() = transform.getRotation().y(); quat.z() = transform.getRotation().z(); quat.w() = transform.getRotation().w(); result.linear() = Eigen::AngleAxisf(quat).matrix(); result.translation() = vec; return true; }
static bool math_eigen_test_rotation_method_consistency(const Eigen::Quaternionf &q, const Eigen::Vector3f &v) { bool success = true; assert_eigen_quaternion_is_normalized(q); // This is the same as computing the rotation by computing: q^-1*[0,v]*q, but cheaper Eigen::Vector3f v_rotated = eigen_vector3f_clockwise_rotate(q, v); // Make sure doing the matrix based rotation performs the same result { Eigen::Matrix3f m = eigen_quaternion_to_clockwise_matrix3f(q); Eigen::Vector3f v_test = m * v; success &= v_test.isApprox(v_rotated, k_normal_epsilon); assert(success); } // Make sure the Hamilton product style rotation matches { Eigen::Quaternionf v_as_quaternion = Eigen::Quaternionf(0.f, v.x(), v.y(), v.z()); Eigen::Quaternionf q_inv = q.conjugate(); Eigen::Quaternionf qinv_v = q_inv * v_as_quaternion; Eigen::Quaternionf qinv_v_q = qinv_v * q; success &= is_nearly_equal(qinv_v_q.w(), 0.f, k_normal_epsilon) && is_nearly_equal(qinv_v_q.x(), v_rotated.x(), k_normal_epsilon) && is_nearly_equal(qinv_v_q.y(), v_rotated.y(), k_normal_epsilon) && is_nearly_equal(qinv_v_q.z(), v_rotated.z(), k_normal_epsilon); assert(success); } return success; }
void RosKinectReceiver::run() { ros::Rate rate(30); zmq::message_t msg; while (ros::ok()) { subscriber_->recv(&msg); const int id = *(int *)msg.data(); const Skeleton* skeleton = (Skeleton *)((int *)msg.data() + 1); for (int i=0; i<Skeleton::getNumJoints(); i++) { const std::string& joint_name = skeleton->getJointName(i); const Eigen::Vector3f joint_position = skeleton->getJointPosition(i); tf::Transform transform; transform.setIdentity(); transform.setOrigin( tf::Vector3(joint_position.x(), joint_position.y(), joint_position.z()) ); // omit user id //broadcaster_.sendTransform( tf::StampedTransform(transform, ros::Time::now(), "kinect_depth_frame", joint_name + "_" + std::to_string(id)) ); broadcaster_.sendTransform( tf::StampedTransform(transform, ros::Time::now(), "kinect_depth_frame", joint_name) ); } rate.sleep(); } }
void NodeRendererEvent::render( const Eigen::Matrix4f& view, const Eigen::Matrix4f& model ) { glPushMatrix(); glMultMatrixf( ( model * view ).array() ); glColor3f( 1.0f, 1.0f, 0.0f ); // render a filled elipse glBegin( GL_POLYGON ); for ( float step = 0.0f; step < 2.0f * M_PI; step += 0.1f ) { float x = GEOM_WIDTH * sinf( step ); float y = GEOM_HEIGHT * cosf( step ); glVertex3f( x, y, 0.0f ); } glEnd(); // render the border if ( _isHighlighted ) { Eigen::Vector3f col = RenderManager::get()->getHeighlightColour(); glColor3f( col.x(), col.y(), col.z() ); } else { glColor3f( 0.3f, 0.3f, 0.0f ); } glLineWidth( 8.0f ); glBegin( GL_LINE_LOOP ); for ( float step = 0.0f; step < 2.0f * M_PI; step += 0.05f ) { float x = ( GEOM_WIDTH + 0.9f ) * sinf( step ); float y = ( GEOM_HEIGHT + 0.9f ) * cosf( step ); glVertex3f( x, y, 0.0f ); } glEnd(); // render the node text glColor3f( 0.0, 0.0, 0.0 ); std::string text( getText() ); if ( text.length() ) { Eigen::Vector3f tmin, tmax; RenderManager::get()->fontGetDims( text.c_str(), tmin, tmax ); float xpos = tmax.x() - tmin.x(); float ypos = tmax.y() - tmin.y(); if ( getScale().x() ) xpos /= getScale().x(); if ( getScale().y() ) ypos /= getScale().y(); Eigen::Vector2f pos( -xpos / 2.0f, -ypos / 2.0f ); RenderManager::get()->fontRender( text.c_str(), pos ); } glPopMatrix(); }
bool VirtualTrackball::mouseMoved ( const MouseEvent* event ) { if ( !event->isLeftButtonPressed() ) return false; const Eigen::Vector3f p0 = this->_oldp; const Eigen::Vector3f p1 = this->project_on_sphere ( event->x(), event->y() ); this->_oldp = p1; if ( ( p0 - p1 ).norm() < this->_eps ) return false; // do nothing //何か間違ってそうなので訂正してみる float radian = std::acos( p0.dot ( p1 ) )*0.5; const float cost = std::cos(radian); const float sint = std::sin(radian); //const float cost = p0.dot ( p1 ); //const float sint = std::sqrt ( 1 - cost * cost ); const Eigen::Vector3f axis = p0.cross ( p1 ).normalized(); const Eigen::Quaternionf q ( -cost, sint * axis.x(), sint * axis.y(), sint * axis.z() ); if( ( q.x()!=q.x() )|| ( q.y()!=q.y() )|| ( q.z()!=q.z() )|| ( q.w()!=q.w() ) ) return false; /* Eigen::Vector3f bmin , bmax; Mesh mesh; mesh = this->_model.getMesh(); mesh.getBoundingBox(bmin,bmax); Eigen::Vector3f mc = (bmin + bmax)*0.5; Eigen::Vector3f c = Eigen::Matrix3f(q) * ( this->_model.getCamera().getCenter() - mc ) + mc ; this->_model.setCameraPosition(c.x(),c.y(),c.z());*/ //this->_model.addRotation ( q ); return true; }
void m2tq(Eigen::Vector3f& t, Eigen::Quaternionf& q, const Eigen::Matrix4f& m){ t.x()=m(0,3); t.y()=m(1,3); t.z()=m(2,3); Eigen::Matrix3f R=m.block<3,3>(0,0); q=Eigen::Quaternionf(R); }
void EXPORT_API InitTetrasShapeMatchingJB(btVector3* initialPositions,float* invMasses, bool* posLocks, Tetrahedron* tetras, btVector3* restCMs, float* invRestMat, int tetrasCount) { for (int i = 0; i < tetrasCount; i++) { Tetrahedron tetra = tetras[i]; Eigen::Vector3f x1_0(initialPositions[tetra.idA].x(), initialPositions[tetra.idA].y(), initialPositions[tetra.idA].z()); Eigen::Vector3f x2_0(initialPositions[tetra.idB].x(), initialPositions[tetra.idB].y(), initialPositions[tetra.idB].z()); Eigen::Vector3f x3_0(initialPositions[tetra.idC].x(), initialPositions[tetra.idC].y(), initialPositions[tetra.idC].z()); Eigen::Vector3f x4_0(initialPositions[tetra.idD].x(), initialPositions[tetra.idD].y(), initialPositions[tetra.idD].z()); float w1 = posLocks[tetra.idA] ? 0.0f : invMasses[tetra.idA]; float w2 = posLocks[tetra.idB] ? 0.0f : invMasses[tetra.idB]; float w3 = posLocks[tetra.idC] ? 0.0f : invMasses[tetra.idC]; float w4 = posLocks[tetra.idD] ? 0.0f : invMasses[tetra.idD]; Eigen::Vector3f restCM; Eigen::Matrix3f A; Eigen::Vector3f x0[4] = { x1_0, x2_0, x3_0, x4_0 }; float w[4] = { w1, w2, w3, w4 }; Eigen::Vector3f corr[4]; PBD::PositionBasedDynamics::initShapeMatchingRestInfo(x0, w, 4, restCM, A); restCMs[i] = btVector3(restCM.x(), restCM.y(), restCM.z()); invRestMat[16 * i + 0] = A(0, 0); invRestMat[16 * i + 1] = A(0, 1); invRestMat[16 * i + 2] = A(0, 2); invRestMat[16 * i + 4] = A(1, 0); invRestMat[16 * i + 5] = A(1, 1); invRestMat[16 * i + 6] = A(1, 2); invRestMat[16 * i + 8] = A(2, 0); invRestMat[16 * i + 9] = A(2, 1); invRestMat[16 * i +10] = A(2, 2); } }
void EXPORT_API InitShapeMatching(float* restPositions, float* invMasses, bool* posLocks, int pointsCount, float* restCMs, float* invRestMat) { Eigen::Vector3f* X_0 = new Eigen::Vector3f[pointsCount]; float* w = new float[pointsCount]; for (size_t i = 0; i < pointsCount; i++) { X_0[i] = Eigen::Map<Eigen::Vector3f>(restPositions + (i * 4)); w[i] = posLocks[i] ? 0.0f : invMasses[i]; } Eigen::Vector3f restCM; Eigen::Matrix3f A; PBD::PositionBasedDynamics::initShapeMatchingRestInfo(X_0, w, pointsCount, restCM, A); restCMs[0] = restCM.x(); restCMs[1] = restCM.y(); restCMs[2] = restCM.z(); restCMs[3] = 0.0f; invRestMat[0] = A(0, 0); invRestMat[1] = A(0, 1); invRestMat[2] = A(0, 2); invRestMat[4] = A(1, 0); invRestMat[5] = A(1, 1); invRestMat[6] = A(1, 2); invRestMat[8] = A(2, 0); invRestMat[9] = A(2, 1); invRestMat[10]= A(2, 2); delete[] X_0; delete[] w; }
void Camera::setRotate3D(const Eigen::Vector3f& rot) { Eigen::AngleAxis<float> aaZ(rot.z(), Eigen::Vector3f::UnitZ()); Eigen::AngleAxis<float> aaY(rot.y(), Eigen::Vector3f::UnitY()); Eigen::AngleAxis<float> aaX(rot.x(), Eigen::Vector3f::UnitX()); rotation_future_ = aaZ * aaY * aaX; emit modified(); }
/** * Convert the global coordinates into camera space * Multiply by pose.inverse() * @param world_coordinate The 3D point in world space * @return camera_coordinate The 3D pointin camera space */ Eigen::Vector3f Camera::world_to_camera( const Eigen::Vector3f & world_coordinate ) const { using namespace Eigen; Vector4f world_h { world_coordinate.x(), world_coordinate.y(), world_coordinate.z(), 1.0f}; Vector4f cam_h = m_pose_inverse * world_h; return cam_h.block(0, 0, 3, 1) / cam_h[3]; }
/** * Convert the camera coordinate into world space * Multiply by pose * @param camera_coordinate The 3D pointin camera space * @return world_coordinate The 3D point in world space */ Eigen::Vector3f Camera::camera_to_world( const Eigen::Vector3f & camera_coordinate ) const { using namespace Eigen; Vector4f cam_h { camera_coordinate.x(), camera_coordinate.y(), camera_coordinate.z(), 1.0f}; Vector4f world_h = m_pose * cam_h; return world_h.block(0, 0, 3, 1) / world_h.w(); }
void View::render_mesh ( void ) { const Mesh& mesh = this->_model.getMesh(); ::glBegin ( GL_TRIANGLES ); for ( int i = 0 ; i < mesh.getNumFaces() ; i++ ) { const Eigen::Vector3f nrm = mesh.getNormal ( i ); ::glNormal3f ( nrm.x(),nrm.y(),nrm.z() ); for ( int j = 0 ; j < 3 ; j+=1 ) { const Eigen::Vector3f p = mesh.getPosition ( i,j ); ::glVertex3f ( p.x(), p.y(), p.z() ); } } ::glEnd(); return; }
void Task::alignPointcloudAsMLS(const base::Time& ts, const std::vector< base::Vector3d >& sample_pointcloud, const envire::TransformWithUncertainty& body2odometry) { if(sample_pointcloud.size() == 0) return; boost::shared_ptr<envire::Environment> pointcloud_env; pointcloud_env.reset(new envire::Environment()); envire::MLSProjection* pointcloud_projection = new envire::MLSProjection(); pointcloud_projection->useNegativeInformation(false); pointcloud_projection->useUncertainty(true); double grid_size = 200.0; double cell_resolution = 0.1; double grid_count = grid_size / cell_resolution; envire::MultiLevelSurfaceGrid* pointcloud_grid = new envire::MultiLevelSurfaceGrid(grid_count, grid_count, cell_resolution, cell_resolution, -0.5 * grid_size, -0.5 * grid_size); pointcloud_grid->getConfig().updateModel = envire::MLSConfiguration::KALMAN; pointcloud_grid->getConfig().gapSize = 0.5; pointcloud_grid->getConfig().thickness = 0.05; pointcloud_env->attachItem(pointcloud_grid, pointcloud_env->getRootNode()); pointcloud_env->addOutput(pointcloud_projection, pointcloud_grid); // create envire pointcloud envire::Pointcloud* pc = new envire::Pointcloud(); pc->vertices.reserve(sample_pointcloud.size()); for(unsigned i = 0; i < sample_pointcloud.size(); i++) pc->vertices.push_back(sample_pointcloud[i]); // update mls envire::MLSGrid* grid = pointcloud_projection->getOutput<envire::MLSGrid*>(); if(!grid) { RTT::log(RTT::Error) << "Missing mls grid in pointcloud environment." << RTT::endlog(); return; } grid->clear(); pointcloud_env->attachItem(pc, pointcloud_env->getRootNode()); pointcloud_env->addInput(pointcloud_projection, pc); pointcloud_projection->updateAll(); // remove inputs pointcloud_env->removeInput(pointcloud_projection, pc); pointcloud_env->detachItem(pc, true); // create pointcloud from mls_grid PCLPointCloudPtr pcl_pointcloud(new PCLPointCloud()); createPointcloudFromMLS(pcl_pointcloud, grid); // create debug pointcloud aligned_cloud.points.clear(); aligned_cloud.colors.clear(); aligned_cloud.points.reserve(pcl_pointcloud->size()); for(unsigned i = 0; i < pcl_pointcloud->size(); i++) { Eigen::Vector3f point = pcl_pointcloud->at(i).getVector3fMap(); aligned_cloud.points.push_back(base::Vector3d(point.x(), point.y(), point.z())); } aligned_cloud.colors.resize(aligned_cloud.points.size(), base::Vector4d(1.0, 0.0, 0.0, 1.0)); alignPointcloud(ts, pcl_pointcloud, body2odometry); }
bool intersectWithCylinder(float radius, float m_x, float m_z, cv::Point2f pixel,const cv::Mat &P, const cv::Mat projector_position, cv::Point3f& S, float& out_y, float& out_phi, float &hit_angle){ // get one point that projects on the given pixel: cv::Point3f point_on_ray; project3D(pixel, P, 1, point_on_ray); cv::Point3f center(projector_position.at<double>(0),projector_position.at<double>(1),projector_position.at<double>(2)); bool intersects_with_cylinder = intersect(center, point_on_ray, m_x,m_z, radius, S); if (!intersects_with_cylinder) return false; // HACK! if (S.z > 0) return false; out_y = S.y; out_phi = atan2(S.x-m_x,-(S.z-m_z))/M_PI*180; // compute angle between (projector, S) and (Center,S) Eigen::Vector3f PS; PS.x() = projector_position.at<double>(0)-S.x; PS.y() = projector_position.at<double>(1)-S.y; PS.z() = projector_position.at<double>(2)-S.z; PS.normalize(); Eigen::Vector3f MS; MS.x() = S.x-m_x; MS.y() = 0; MS.z() = S.z-m_z; MS.normalize(); hit_angle = acos(PS.dot(MS))/M_PI*180; return true; }
static void write_calibration_parameter( const Eigen::Vector3f &in_vector, PSMoveProtocol::FloatVector *out_vector) { out_vector->set_i(in_vector.x()); out_vector->set_j(in_vector.y()); out_vector->set_k(in_vector.z()); }
static Eigen::Quaternionf angular_velocity_to_quaternion_derivative( const Eigen::Quaternionf ¤t_orientation, const Eigen::Vector3f &ang_vel) { Eigen::Quaternionf omega = Eigen::Quaternionf(0.f, ang_vel.x(), ang_vel.y(), ang_vel.z()); Eigen::Quaternionf quaternion_derivative = Eigen::Quaternionf(current_orientation.coeffs() * 0.5f) *omega; return quaternion_derivative; }
void rmd::Publisher::publishPointCloud() const { { std::lock_guard<std::mutex> lock(depthmap_->getRefImgMutex()); const cv::Mat depth = depthmap_->getDepthmap(); const cv::Mat convergence = depthmap_->getConvergenceMap(); const cv::Mat ref_img = depthmap_->getReferenceImage(); const float fx = depthmap_->getFx(); const float fy = depthmap_->getFy(); const float cx = depthmap_->getCx(); const float cy = depthmap_->getCy(); for(int y=0; y<depth.rows; ++y) { for(int x=0; x<depth.cols; ++x) { Eigen::Vector3f f((x-cx)/fx, (y-cy)/fy, 1.0); f.normalize(); Eigen::Vector3f xyz = f*depth.at<float>(y,x); if(convergence.at<int>(y, x) != rmd::ConvergenceState::DIVERGED && convergence.at<int>(y, x) != rmd::ConvergenceState::BORDER) { PointType p; p.x = xyz.x(); p.y = xyz.y(); p.z = xyz.z(); uint8_t intensity = ref_img.at<uint8_t>(y,x); p.intensity = intensity; pc_->push_back(p); } } } } if (!pc_->empty()) { if(nh_.ok()) { uint64_t timestamp; #if PCL_MAJOR_VERSION == 1 && PCL_MINOR_VERSION >= 7 pcl_conversions::toPCL(ros::Time::now(), timestamp); #else timestamp = ros::Time::now(); #endif pc_->header.frame_id = "/world"; pc_->header.stamp = timestamp; pub_pc_.publish(pc_); std::cout << "INFO: publishing pointcloud, " << pc_->size() << " points" << std::endl; } pc_->clear(); } }