void HoleFinder::setTranslations(const Eigen::Vector3d &v1, const Eigen::Vector3d &v2, const Eigen::Vector3d &v3) { Q_D(HoleFinder); d->v1 = v1; d->v2 = v2; d->v3 = v3; d->cartMat.col(0) = v1; d->cartMat.col(1) = v2; d->cartMat.col(2) = v3; d->fracMat = d->cartMat.inverse(); double v1SqrNorm = v1.squaredNorm(); double v2SqrNorm = v2.squaredNorm(); double v3SqrNorm = v3.squaredNorm(); d->v1Norm = sqrt(v1SqrNorm); d->v2Norm = sqrt(v2SqrNorm); d->v3Norm = sqrt(v3SqrNorm); d->cellVolume = fabs(v1.dot(v2.cross(v3))); DDEBUGOUT("setTranslations") QString("Cart matrix:\n %L1 %L2 %L3\n %L4 %L5 %L6\n %L7 %L8 %L9") .arg(d->cartMat(0,0), 10) .arg(d->cartMat(0,1), 10) .arg(d->cartMat(0,2), 10) .arg(d->cartMat(1,0), 10) .arg(d->cartMat(1,1), 10) .arg(d->cartMat(1,2), 10) .arg(d->cartMat(2,0), 10) .arg(d->cartMat(2,1), 10) .arg(d->cartMat(2,2), 10); DDEBUGOUT("setTranslations") QString("Frac matrix:\n %L1 %L2 %L3\n %L4 %L5 %L6\n %L7 %L8 %L9") .arg(d->fracMat(0,0), 10) .arg(d->fracMat(0,1), 10) .arg(d->fracMat(0,2), 10) .arg(d->fracMat(1,0), 10) .arg(d->fracMat(1,1), 10) .arg(d->fracMat(1,2), 10) .arg(d->fracMat(2,0), 10) .arg(d->fracMat(2,1), 10) .arg(d->fracMat(2,2), 10); int numCells = 27; d->points.reserve(numCells * d->numPoints); DDEBUGOUT("setTranslations") "VectorLengths:" << d->v1Norm << d->v2Norm << d->v3Norm; DDEBUGOUT("setTranslations") "CellVolume" << d->cellVolume; DDEBUGOUT("setTranslations") "NumPoints:" << d->numPoints; DDEBUGOUT("setTranslations") "NumCells:" << numCells; DDEBUGOUT("setTranslations") "Allocated points:" << d->points.capacity(); }
// return closest point on line segment to the given point, and the distance // betweeen them. double mesh_core::closestPointOnLine( const Eigen::Vector3d& line_a, const Eigen::Vector3d& line_b, const Eigen::Vector3d& point, Eigen::Vector3d& closest_point) { Eigen::Vector3d ab = line_b - line_a; Eigen::Vector3d ab_norm = ab.normalized(); Eigen::Vector3d ap = point - line_a; Eigen::Vector3d closest_point_rel = ab_norm.dot(ap) * ab_norm; double dp = ab.dot(closest_point_rel); if (dp < 0.0) { closest_point = line_a; } else if (dp > ab.squaredNorm()) { closest_point = line_b; } else { closest_point = line_a + closest_point_rel; } return (closest_point - point).norm(); }
std::pair<Eigen::Vector3d, double> resolveCenterOfPressure(Eigen::Vector3d torque, Eigen::Vector3d force, Eigen::Vector3d normal, Eigen::Vector3d point_on_contact_plane) { // TODO: implement multi-column version using namespace Eigen; if (abs(normal.squaredNorm() - 1.0) > 1e-12) { mexErrMsgIdAndTxt("Drake:resolveCenterOfPressure:BadInputs", "normal should be a unit vector"); } Vector3d cop; double normal_torque_at_cop; double fz = normal.dot(force); bool cop_exists = abs(fz) > 1e-12; if (cop_exists) { auto torque_at_point_on_contact_plane = torque - point_on_contact_plane.cross(force); double normal_torque_at_point_on_contact_plane = normal.dot(torque_at_point_on_contact_plane); auto tangential_torque = torque_at_point_on_contact_plane - normal * normal_torque_at_point_on_contact_plane; cop = normal.cross(tangential_torque) / fz + point_on_contact_plane; auto torque_at_cop = torque - cop.cross(force); normal_torque_at_cop = normal.dot(torque_at_cop); } else { cop.setConstant(std::numeric_limits<double>::quiet_NaN()); normal_torque_at_cop = std::numeric_limits<double>::quiet_NaN(); } return std::pair<Vector3d, double>(cop, normal_torque_at_cop); }
// converts compact quaternion (3D vector part of quaternion) to a rotation matrix inline Eigen::Matrix3d fromCompactQuat(const Eigen::Vector3d& v) { double w = 1-v.squaredNorm(); if (w<0) return Eigen::Matrix3d::Identity(); else w=sqrt(w); return Eigen::Quaterniond(w, v[0], v[1], v[2]).toRotationMatrix(); }
void CylindricalNVecConstr::impl_compute(result_t& res, const argument_t& x) const { pgdata_->x(x); sva::PTransformd pos = surfaceFrame_*pgdata_->mbc().bodyPosW[bodyIndex_]; Eigen::Vector3d vec = (targetFrame_.translation() - pos.translation()); double dot = vec.dot(pos.rotation().row(2)); double sign = std::copysign(1., dot); res(0) = sign*std::pow(dot, 2) - vec.squaredNorm(); }
bool VSOArray::pointTelescopes(const Eigen::Vector3d& v) { if(v.squaredNorm() == 0) return false; bool good = true; for(std::vector<VSOTelescope*>::iterator i = fTelescopes.begin(); i!=fTelescopes.end(); i++)good &= (*i)->pointTelescope(v); return good; }
void NormalModeMeanSquareFluctuationCalculator::calculate_hessian_matrix( const Protein& protein, const ForceConstantSelector & kk_selector, const std::shared_ptr<ProteinSegment> &protein_segment) { LOGD << "calculating hessian matrix"; Eigen::VectorXd ave = protein_segment->displacement_vector(); for (size_t ires=0; ires < this->residue_count; ++ires) { for (size_t jres=0; jres < ires; ++jres) { Eigen::Vector3d dr = ave.segment<3>(3 * ires) - ave.segment<3>(3 * jres); double range1_2 = dr.squaredNorm(); double kk = kk_selector.select_force_constant( protein.get_secondary_structure_type_of_atom_pair(ires + 1, jres + 1), ires - jres, range1_2); // calnmodemfs.f:72 for (size_t ixyz = 0; ixyz < 3; ixyz++) { for (size_t jxyz = 0; jxyz < 3; jxyz++) { size_t i = 3 * ires + ixyz; size_t j = 3 * ires + jxyz; this->hessian_matrix(i, j) += kk * dr(ixyz) * dr(jxyz) / range1_2; i = 3 * ires + ixyz; j = 3 * jres + jxyz; this->hessian_matrix(i, j) -= kk * dr(ixyz) * dr(jxyz) / range1_2; i = 3 * jres + ixyz; j = 3 * ires + jxyz; this->hessian_matrix(i, j) -= kk * dr(ixyz) * dr(jxyz) / range1_2; i = 3 * jres + ixyz; j = 3 * jres + jxyz; this->hessian_matrix(i, j) += kk * dr(ixyz) * dr(jxyz) / range1_2; } } } } // mass weighted // calmodemsfss.f:200 for (size_t ires = 0; ires < this->residue_count; ++ires) { for (size_t ixyz = 0; ixyz < 3; ++ixyz) { size_t i = 3 * ires+ixyz; for (size_t jres = 0; jres < this->residue_count; ++jres) { for (size_t jxyz = 0; jxyz < 3; ++jxyz) { size_t j = 3 * jres + jxyz; this->hessian_matrix(i,j) /= sqrt(mass(ires) * mass(jres)); } } } } }
int collideSphereSphere(CollisionObject* o1, CollisionObject* o2, const double& _r0, const Eigen::Isometry3d& c0, const double& _r1, const Eigen::Isometry3d& c1, CollisionResult& result) { double r0 = _r0; double r1 = _r1; double rsum = r0 + r1; Eigen::Vector3d normal = c0.translation() - c1.translation(); double normal_sqr = normal.squaredNorm(); if ( normal_sqr > rsum * rsum ) { return 0; } r0 /= rsum; r1 /= rsum; Eigen::Vector3d point = r1 * c0.translation() + r0 * c1.translation(); double penetration; if (normal_sqr < DART_COLLISION_EPS) { normal.setZero(); penetration = rsum; Contact contact; contact.collisionObject1 = o1; contact.collisionObject2 = o2; contact.point = point; contact.normal = normal; contact.penetrationDepth = penetration; result.addContact(contact); return 1; } normal_sqr = sqrt(normal_sqr); normal *= (1.0/normal_sqr); penetration = rsum - normal_sqr; Contact contact; contact.collisionObject1 = o1; contact.collisionObject2 = o2; contact.point = point; contact.normal = normal; contact.penetrationDepth = penetration; result.addContact(contact); return 1; }
Eigen::Vector3d Circuit::GetFieldFromPoint( const Eigen::Vector3d& point, const Eigen::Vector3d& segmentCenter, const Eigen::Vector3d& flowDirection, double segmentCoeff ) { Eigen::Vector3d r = segmentCenter.array()-point.array(); double rLengthSquared = r.squaredNorm(); // distance squared. Eigen::Vector3d rUnit = r.normalized(); Eigen::Vector3d Vcrossr = flowDirection.cross(rUnit); Eigen::Vector3d B = segmentCoeff/rLengthSquared*Vcrossr.array(); return B; }
void IACTDetectorSphereCherenkovPhotonIntersectionFinder:: visit_cherenkov_photon(const calin::simulation::air_cherenkov_tracker::CherenkovPhoton& cherenkov_photon) { calin::math::ray::Ray ray(cherenkov_photon.x0, cherenkov_photon.u0, cherenkov_photon.t0, cherenkov_photon.epsilon); unsigned scope_id = 0; bool do_refraction = do_refraction_; double refraction_safety_margin = refraction_safety_margin_; for(auto& isphere : visitor_->detector_spheres()) { Eigen::Vector3d dx = cherenkov_photon.x0 - isphere.r0; double u0_dot_dx = cherenkov_photon.u0.dot(dx); double dr2 = dx.squaredNorm() - u0_dot_dx*u0_dot_dx; if(dr2 < SQR(isphere.radius + refraction_safety_margin)) { // we have a potential hit ! if(do_refraction) { // Here we propagate to the observation plane associated with the // detector sphere then refract the ray then back-propagate to the // height of the photon emission so absoption can be calculated later atm_->propagate_ray_with_refraction(ray, isphere.iobs, /* time_reversal_ok = */ false); double reverse_propagate_dist = (cherenkov_photon.x0.z() - ray.z())/ray.uz(); ray.propagate_dist(reverse_propagate_dist); do_refraction = false; // don't refract twice if we have overlapping telescopes refraction_safety_margin = 0.0; dx = cherenkov_photon.x0 - isphere.r0; u0_dot_dx = cherenkov_photon.u0.dot(dx); dr2 = dx.squaredNorm() - u0_dot_dx*u0_dot_dx; if(dr2<SQR(isphere.radius)) { visitor_->process_ray(scope_id, ray, /* pe_weight = */ 1.0); } } else { visitor_->process_ray(scope_id, ray, /* pe_weight = */ 1.0); } } ++scope_id; } }
void CEViewOptionsWidget::updateMillerPlane() { // View into a Miller plane Camera *camera = m_glWidget->camera(); Eigen::Transform3d modelView; modelView.setIdentity(); // OK, so we want to rotate to look along the normal at the plane // So we convert <h k l> into a Cartesian normal Eigen::Matrix3d cellMatrix = m_ext->unconvertLength(m_ext->currentCellMatrix()).transpose(); // Get miller indices: const Eigen::Vector3d millerIndices (static_cast<double>(ui.spin_mi_h->value()), static_cast<double>(ui.spin_mi_k->value()), static_cast<double>(ui.spin_mi_l->value())); // Check to see if we have 0,0,0 // in which case, we do nothing if (millerIndices.squaredNorm() < 0.5) return; const Eigen::Vector3d normalVector ((cellMatrix * millerIndices).normalized()); Eigen::Matrix3d rotation; rotation.row(2) = normalVector; rotation.row(0) = rotation.row(2).unitOrthogonal(); rotation.row(1) = rotation.row(2).cross(rotation.row(0)); // Translate camera to the center of the cell const Eigen::Vector3d cellDiagonal = cellMatrix.col(0) * m_glWidget->aCells() + cellMatrix.col(1) * m_glWidget->bCells() + cellMatrix.col(2) * m_glWidget->cCells(); modelView.translate(-cellDiagonal*0.5); // Prerotate the camera to look down the specified normal modelView.prerotate(rotation); // Pretranslate in the negative Z direction modelView.pretranslate(Eigen::Vector3d(0.0, 0.0, -1.5 * cellDiagonal.norm())); camera->setModelview(modelView); // Call for a redraw m_glWidget->update(); }
/*! Returns propagated state and max relative errors in kinetic energy and magnetic moment of particle. */ template<class Stepper> std::tuple<state_t, double, double> trace_trajectory( state_t state, const double time_step ) { using std::fabs; using std::max; constexpr double propagation_time = 424.1; // seconds, 1/100 of doi above Particle_Propagator propagator(proton_charge_mass_ratio); const double initial_energy = 0.5 * proton_mass * state[1].squaredNorm(), initial_magnetic_moment = proton_mass * std::pow(state[1][1], 2) / (2 * get_earth_dipole(state[0]).norm()); Stepper stepper; double nrj_error = 0, mom_error = 0; for (double time = 0; time < propagation_time; time += time_step) { stepper.do_step(propagator, state, time, time_step); const Eigen::Vector3d v(state[1]), b(get_earth_dipole(state[0])), v_perp_b(v - (v.dot(b) / b.squaredNorm()) * b); const double current_energy = 0.5 * proton_mass * state[1].squaredNorm(), current_magnetic_moment = proton_mass * v_perp_b.squaredNorm() / (2 * b.norm()); nrj_error = max( nrj_error, fabs(current_energy - initial_energy) / initial_energy ); mom_error = max( mom_error, fabs(current_magnetic_moment - initial_magnetic_moment) / initial_magnetic_moment ); } return std::make_tuple(state, nrj_error, mom_error); }
template <typename PointT> void pcl::SampleConsensusModelCircle3D<PointT>::getDistancesToModel (const Eigen::VectorXf &model_coefficients, std::vector<double> &distances) { // Check if the model is valid given the user constraints if (!isModelValid (model_coefficients)) { distances.clear (); return; } distances.resize (indices_->size ()); // Iterate through the 3d points and calculate the distances from them to the sphere for (size_t i = 0; i < indices_->size (); ++i) // Calculate the distance from the point to the circle: // 1. calculate intersection point of the plane in which the circle lies and the // line from the sample point with the direction of the plane normal (projected point) // 2. calculate the intersection point of the line from the circle center to the projected point // with the circle // 3. calculate distance from corresponding point on the circle to the sample point { // what i have: // P : Sample Point Eigen::Vector3d P (input_->points[(*indices_)[i]].x, input_->points[(*indices_)[i]].y, input_->points[(*indices_)[i]].z); // C : Circle Center Eigen::Vector3d C (model_coefficients[0], model_coefficients[1], model_coefficients[2]); // N : Circle (Plane) Normal Eigen::Vector3d N (model_coefficients[4], model_coefficients[5], model_coefficients[6]); // r : Radius double r = model_coefficients[3]; Eigen::Vector3d helper_vectorPC = P - C; // 1.1. get line parameter double lambda = (helper_vectorPC.dot (N)) / N.squaredNorm (); // Projected Point on plane Eigen::Vector3d P_proj = P + lambda * N; Eigen::Vector3d helper_vectorP_projC = P_proj - C; // K : Point on Circle Eigen::Vector3d K = C + r * helper_vectorP_projC.normalized (); Eigen::Vector3d distanceVector = P - K; distances[i] = distanceVector.norm (); } }
/* * @brief Convert the vector part of a quaternion to a * full quaternion. * @note This function is useful to convert delta quaternion * which is usually a 3x1 vector to a full quaternion. * For more details, check Equation (238) and (239) in * "Indirect Kalman Filter for 3D Attitude Estimation: * A Tutorial for quaternion Algebra". */ inline Eigen::Vector4d smallAngleQuaternion( const Eigen::Vector3d& dtheta) { Eigen::Vector3d dq = dtheta / 2.0; Eigen::Vector4d q; double dq_square_norm = dq.squaredNorm(); if (dq_square_norm <= 1) { q.head<3>() = dq; q(3) = std::sqrt(1-dq_square_norm); } else { q.head<3>() = dq; q(3) = 1; q = q / std::sqrt(1+dq_square_norm); } return q; }
int ParticleManagerTinkerToy::findClosestParticleIndex( double x, double y, bool fIgnoreLast ) { double distance = DBL_MAX; int closestParticleIndex = -1; Eigen::Vector3d positionCurrent( x, y, 0 ); int lastParticleIndex = fIgnoreLast ? m_particles.size() - 1: m_particles.size(); for (int i = 0; i < lastParticleIndex; i++) //Ignore last added particle { Eigen::Vector3d distVector = m_particles[i]->mPosition - positionCurrent; double distCur = distVector.squaredNorm(); if ( distCur < distance ) { distance = distCur; closestParticleIndex = i; } } return closestParticleIndex; }
PlanarRangeVis2(Pose plane_pose, LegController<n_joints>* controller, double samples_per_unit, double max_range) : controller(controller), point(point), normal(normal) { double d_r = 1.0/samples_per_unit; Eigen::Vector3d center = (point.dot(normal) / normal.squaredNorm()) * normal; /* double yaw = atan2(normal[1], normal[0]); double pitch = atan2(normal[2], sqrt(normal[0]*normal[0] + normal[1]*normal[1])); center_pose = Pose(center[0], center[1], center[2], yaw, pitch, 0); */ center_pose = plane_pose; center_pose.x = center[0]; center_pose.y = center[1]; center_pose.z = center[2]; double angles[n_joints]; for (double x = -max_range; x < max_range; x += d_r) { for (double y = -max_range; y < max_range; y += d_r) { Eigen::Vector3d goal = Vector4dTo3d(center_pose.FromFrame(Eigen::Vector4d(0.0, x, y, 1.0))); plane_query_scores.push_back(controller->GetJointCommands(goal, angles)); plane_query.push_back(goal); } } }
Eigen::Vector3d Atom::CalculateElectricalForce(const Eigen::Vector3d& chargePosition, Real charge, shared_ptr<ForceVectors> forceVectorStorage) { Eigen::Vector3d forceDirection = chargePosition.array()-m_position.array(); Real rSquared = forceDirection.squaredNorm(); Real forceMagnitude = K_VACUUM*charge*m_effectiveNuclearCharge*Square(ELECTRIC_CHARGE)/rSquared; // The force is in the direction of the charge. forceDirection.normalize(); Eigen::Vector3d forceVector = forceDirection.array()*forceMagnitude; if(forceVectorStorage) { forceVectorStorage->AddForceVector(GetName(), forceVector); } // Add in forces due to sp orbitals. for(int i=0; i<m_spOrbitals.size(); i++) { shared_ptr<SpOrbital> orbital = m_spOrbitals[i]; Eigen::Vector3d spForce = orbital->CalculateElectricalForce(chargePosition, charge, forceVectorStorage); forceVector.array() += spForce.array(); } return forceVector; }
void drawWorldPretty(const World& world, const Camera& camera, const VisSettings& settings, SDL_Window* window){ glEnable(GL_DEPTH_TEST); glFrontFace(GL_CCW); glEnable(GL_CULL_FACE); glClearColor(0.2, 0.2, 0.2, 1); glClear(GL_COLOR_BUFFER_BIT| GL_DEPTH_BUFFER_BIT); glMatrixMode(GL_PROJECTION); glLoadIdentity(); int windowWidth, windowHeight; SDL_GetWindowSize(window, &windowWidth, &windowHeight); gluPerspective(45,static_cast<double>(windowWidth)/windowHeight, .5, 100); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); gluLookAt(camera.position.x(), camera.position.y(), camera.position.z(), camera.lookAt.x(), camera.lookAt.y(), camera.lookAt.z(), camera.up.x(), camera.up.y(), camera.up.z()); glEnable (GL_BLEND); glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); //avoid typing world.clusters, world.particles everywhere const auto& clusters = world.clusters; const auto& particles = world.particles; const int which_cluster = settings.which_cluster; if(!particles.empty()){ glPointSize(10); if (!settings.drawColoredParticles) { if (settings.which_cluster == -1) { glColor4d(1,1,1,0.95); } else { glColor4d(1,1,1,0.2); } glEnableClientState(GL_VERTEX_ARRAY); glVertexPointer(3, GL_DOUBLE, sizeof(Particle), &(particles[0].position)); glDrawArrays(GL_POINTS, 0, particles.size()); } else { glBegin(GL_POINTS); for(auto& p : particles) { //find nearest cluster int min_cluster = p.clusters[0]; auto com = clusters[min_cluster].worldCom;//computeNeighborhoodCOM(clusters[min_cluster]); Eigen::Vector3d dir = p.position - com; double min_sqdist = dir.squaredNorm(); for (auto& cInd : p.clusters) { com = clusters[cInd].worldCom;//computeNeighborhoodCOM(clusters[cInd]); dir = p.position - com; double dist = dir.squaredNorm(); if (dist < min_sqdist) { min_sqdist = dist; min_cluster = cInd; } } RGBColor rgb = HSLColor(2.0*acos(-1)*(min_cluster%12)/12.0, 0.7, 0.7).to_rgb(); //RGBColor rgb = HSLColor(2.0*acos(-1)*min_cluster/clusters.size(), 0.7, 0.7).to_rgb(); if ((which_cluster == -1 || min_cluster == which_cluster) && clusters[min_cluster].members.size() > 1) { // sqrt(min_sqdist) < 0.55*clusters[min_cluster].renderWidth) { glColor4d(rgb.r, rgb.g, rgb.b, 0.95); } else { glColor4d(1,1,1,0.95); } //glPushMatrix(); //glTranslated(p.position[0], p.position[1], p.position[2]); //utils::drawSphere(0.01, 4, 4); glVertex3dv(p.position.data()); //glPopMatrix(); } glEnd(); } /* glPointSize(3); glColor3f(0,0,1); glVertexPointer(3, GL_DOUBLE, sizeof(Particle), &(particles[0].goalPosition)); glDrawArrays(GL_POINTS, 0, particles.size()); */ } drawPlanesPretty(world.planes, world.movingPlanes, world.twistingPlanes, world.tiltingPlanes, world.elapsedTime); auto max_t = std::max_element( clusters.begin(), clusters.end(), [](const Cluster& a, const Cluster& b){ return a.toughness < b.toughness; })->toughness; glEnable(GL_POLYGON_OFFSET_FILL); glPolygonOffset(1.0,1.0); //draw cluster spheres if(settings.drawClusters){ glEnable(GL_CULL_FACE); glCullFace(GL_BACK); glMatrixMode(GL_MODELVIEW); for(auto&& pr : benlib::enumerate(clusters)){ auto& c = pr.second; const auto i = pr.first; if (which_cluster == -1 || i == which_cluster) { glPushMatrix(); //logic: //c.cg.c = original COM of cluster //c.worldCom = current COM in world space //c.restCom = current COM in rest space //c.worldCom - c.restCom = translation of center of mass rest to world //trans = location of c in world. //auto trans = cg.c + c.worldCom - c.restCom; //auto trans = c.worldCom - c.restCom; //glTranslated(trans(0), trans(1), trans(2)); /* //step 3, translate from rest space origin to world glTranslated(c.worldCom(0), c.worldCom(1), c.worldCom(2)); //step 2, rotate about rest-to-world transform Eigen::Matrix4d gl_rot = Eigen::Matrix4d::Identity(); //push the rotation onto a 4x4 glMatrix //gl_rot.block<3,3>(0,0) << c.restToWorldTransform; auto polar = utils::polarDecomp(c.restToWorldTransform); gl_rot.block<3,3>(0,0) << polar.first; glMultMatrixd(gl_rot.data()); //step 1, translate rest com to origin glTranslated(-c.restCom(0), -c.restCom(1), -c.restCom(2)); */ Eigen::Matrix4d vis_t = c.getVisTransform(); glMultMatrixd(vis_t.data()); RGBColor rgb = HSLColor(2.0*acos(-1)*(i%12)/12.0, 0.7, 0.7).to_rgb(); //RGBColor rgb = HSLColor(2.0*acos(-1)*i/clusters.size(), 0.7, 0.7).to_rgb(); if (settings.colorByToughness) { if (c.toughness == std::numeric_limits<double>::infinity()) { rgb = RGBColor(0.0, 0.0, 0.0); } else { auto factor = c.toughness/max_t; rgb = RGBColor(1.0-factor, factor, factor); } } glPolygonMode(GL_FRONT,GL_FILL); glColor4d(rgb.r, rgb.g, rgb.b, 0.6); //utils::drawSphere(c.cg.r, 10, 10); utils::drawClippedSphere(c.cg.r, 30, 30, c.cg.c, c.cg.planes); glPolygonMode(GL_FRONT,GL_LINE); glColor3d(0,0,0); utils::drawClippedSphere(c.cg.r, 30, 30, c.cg.c, c.cg.planes); glPopMatrix(); } } glDisable(GL_CULL_FACE); } //draw fracture planes glMatrixMode(GL_MODELVIEW); if(settings.drawFracturePlanes){ //glEnable(GL_CULL_FACE); //glCullFace(GL_BACK); for(auto&& pr : benlib::enumerate(clusters)){ auto& c = pr.second; const auto i = pr.first; if (which_cluster == -1 || i == which_cluster) { glPushMatrix(); auto& cg = c.cg; if (cg.planes.size() > 0) { //step 3, translate from rest space origin to world /* glTranslated(c.worldCom(0), c.worldCom(1), c.worldCom(2)); //step 2, rotate about rest-to-world transform Eigen::Matrix4d gl_rot = Eigen::Matrix4d::Identity(); //push the rotation onto a 4x4 glMatrix //gl_rot.block<3,3>(0,0) << c.restToWorldTransform; auto polar = utils::polarDecomp(c.restToWorldTransform); gl_rot.block<3,3>(0,0) << polar.first; glMultMatrixd(gl_rot.data()); //step 1, translate rest com to origin glTranslated(-c.restCom(0), -c.restCom(1), -c.restCom(2)); */ if (settings.joshDebugFlag) { Eigen::Matrix4d vis_t = c.getVisTransform(); glMultMatrixd(vis_t.data()); RGBColor rgb = HSLColor(2.0*acos(-1)*(i%12)/12.0, 0.7, 0.7).to_rgb(); for (auto &p : cg.planes) { glPolygonMode(GL_FRONT, GL_FILL); glColor4d(rgb.r, rgb.g, rgb.b, 0.5); //if rest com translated to origin (step 1 above) ////then we project from c.cg.c to make support point utils::drawPlane(p.first, p.second, c.cg.r, c.cg.c); glPolygonMode(GL_FRONT, GL_LINE); glColor3d(0,0,0); utils::drawPlane(p.first, p.second, c.cg.r, c.cg.c); //if rest com translated to origin (step 1 above) } } else { Eigen::Matrix4d vis_t = c.getVisTransform(); //glMultMatrixd(vis_t.data()); RGBColor rgb = HSLColor(2.0*acos(-1)*(i%12)/12.0, 0.7, 0.7).to_rgb(); for (auto &p : cg.planes) { glPolygonMode(GL_FRONT, GL_FILL); glColor4d(rgb.r, rgb.g, rgb.b, 0.5); //if rest com translated to origin (step 1 above) ////then we project from c.cg.c to make support point Eigen::Vector4d norm4(p.first(0), p.first(1), p.first(2), 0); Eigen::Vector4d pt4(c.cg.c(0), c.cg.c(1), c.cg.c(2), 1); double d = norm4.dot(pt4) + p.second; pt4 = pt4 - d*norm4; norm4 = vis_t * norm4; pt4 = vis_t * pt4; Eigen::Vector3d norm3(norm4(0), norm4(1), norm4(2)); Eigen::Vector3d pt3(pt4(0), pt4(1), pt4(2)); d = norm3.dot(pt3); utils::drawPlane(norm3, -d, c.cg.r, pt3); } //only draw the plane outlines with the transform glMultMatrixd(vis_t.data()); for (auto &p : cg.planes) { glPolygonMode(GL_FRONT, GL_LINE); glColor3d(0,0,0); utils::drawPlane(p.first, p.second, c.cg.r, c.cg.c); //if rest com translated to origin (step 1 above) } } } glPopMatrix(); } } //glDisable(GL_CULL_FACE); } glPolygonMode(GL_FRONT_AND_BACK, GL_FILL); if (settings.which_cluster != -1) { const auto& c = clusters[which_cluster]; Eigen::Matrix3d Apq = world.computeApq(c); Eigen::Matrix3d A = Apq*c.aInv; if (world.nu > 0.0) A = A*c.Fp.inverse(); // plasticity //do the SVD here so we can handle fracture stuff Eigen::JacobiSVD<Eigen::Matrix3d> solver(A, Eigen::ComputeFullU | Eigen::ComputeFullV); Eigen::Matrix3d U = solver.matrixU(), V = solver.matrixV(); Eigen::Vector3d sigma = solver.singularValues(); std::cout << "sigma: " << sigma << std::endl; glColor4d(0,0,0, 0.9); glPointSize(5); if (!settings.drawColoredParticles) { glBegin(GL_POINTS); for(auto &member : c.members){ glVertex3dv(particles[member.index].position.data()); } glEnd(); } } glColor3d(1, 0, 1); for(auto& projectile : world.projectiles){ glMatrixMode(GL_MODELVIEW); glPushMatrix(); auto currentPosition = projectile.start + world.elapsedTime*projectile.velocity; glTranslated(currentPosition.x(), currentPosition.y(), currentPosition.z()); utils::drawSphere(projectile.radius, 10, 10); glPopMatrix(); } glColor3d(0,1,0); for(auto& cylinder : world.cylinders){ utils::drawCylinder(cylinder.supportPoint, cylinder.normal, cylinder.radius); } glFlush(); SDL_GL_SwapWindow(window); }
void HoleFinderPrivate::mergeHoles() { // Make copy, clear original, add merged holes back into original QVector<Hole> holeCopy (this->holes); const int numHoles = this->holes.size(); this->holes.clear(); this->holes.reserve(numHoles); // If the bit is on, it has not been merged. If off, it has been. QBitArray mask (holeCopy.size(), true); // Check each pair of unmerged holes. If one contains the other, merge them. QVector<Hole*> toMerge; toMerge.reserve(256); // Way bigger than we need, but certainly sufficient // Temp vars Eigen::Vector3d diffVec; // "i" indexes the "base" hole for (int i = 0; i < numHoles; ++i) { if (!mask.testBit(i)) continue; mask.clearBit(i); Hole &hole_i = holeCopy[i]; toMerge.clear(); toMerge.reserve(256); toMerge.push_back(&hole_i); // "j" indexes the compared holes for (int j = i+1; j < numHoles; ++j) { if (!mask.testBit(j)) continue; Hole &hole_j = holeCopy[j]; diffVec = hole_j.center - hole_i.center; // Use the greater of the two radii const double rad = (hole_i.radius > hole_j.radius) ? hole_i.radius : hole_j.radius; const double radSq = rad * rad; // Check periodic conditions // Convert diffVec to fractional units this->cartToFrac(&diffVec); // Adjust each component to range [-0.5, 0.5] (shortest representation) while (diffVec.x() < -0.5) ++diffVec.x(); while (diffVec.y() < -0.5) ++diffVec.y(); while (diffVec.z() < -0.5) ++diffVec.z(); while (diffVec.x() > 0.5) --diffVec.x(); while (diffVec.y() > 0.5) --diffVec.y(); while (diffVec.z() > 0.5) --diffVec.z(); // Back to cartesian this->fracToCart(&diffVec); // if j is within i's radius, add "j" to the merge list // and mark "j" as merged if (fabs(diffVec.x()) > rad || fabs(diffVec.y()) > rad || fabs(diffVec.z()) > rad || fabs(diffVec.squaredNorm()) > radSq) continue; // no match // match: // Reset j's position to account for periodic wrap-around hole_j.center = hole_i.center + diffVec; mask.clearBit(j); toMerge.push_back(&hole_j); } if (toMerge.size() == 1) this->holes.push_back(hole_i); else this->holes.push_back(reduceHoles(toMerge)); } }
void mesh_core::findSphereTouching3Points( Eigen::Vector3d& center, double& radius, const Eigen::Vector3d& a, const Eigen::Vector3d& b, const Eigen::Vector3d& c) { Eigen::Vector3d ab = b - a; Eigen::Vector3d ac = c - a; Eigen::Vector3d n = ab.cross(ac); double ab_lensq = ab.squaredNorm(); double ac_lensq = ac.squaredNorm(); double n_lensq = n.squaredNorm(); // points are colinear? if (n_lensq <= std::numeric_limits<double>::epsilon()) { findSphereTouching3PointsColinear( center, radius, a, b, c, ab_lensq, (c - b).squaredNorm(), ac_lensq); if (g_verbose) { logInform("findSphereTouching3Points() COLINEAR QQQQ"); logInform(" a = (%7.3f %7.3f %7.3f)", a.x(), a.y(), a.z()); logInform(" b = (%7.3f %7.3f %7.3f)", b.x(), b.y(), b.z()); logInform(" c = (%7.3f %7.3f %7.3f)", c.x(), c.y(), c.z()); logInform(" s = (%7.3f %7.3f %7.3f) r=%7.3f", center.x(), center.y(), center.z(), radius); } return; } Eigen::Vector3d rel_center = (ac_lensq * n.cross(ab) + ab_lensq * ac.cross(n)) / (2.0 * n_lensq); radius = rel_center.norm(); center = rel_center + a; if (g_verbose) { logInform("findSphereTouching3Points()"); logInform(" a = (%7.3f %7.3f %7.3f)", a.x(), a.y(), a.z()); logInform(" b = (%7.3f %7.3f %7.3f)", b.x(), b.y(), b.z()); logInform(" c = (%7.3f %7.3f %7.3f)", c.x(), c.y(), c.z()); logInform(" s = (%7.3f %7.3f %7.3f) r=%7.3f", center.x(), center.y(), center.z(), radius); } }
int main() { using std::cos; using std::fabs; using std::sin; const Eigen::Vector3d guiding_center_start{5 * earth_radius, 0, 0}, field_at_start{get_earth_dipole(guiding_center_start)}; const double gyroperiod = 2 * M_PI / fabs(proton_charge_mass_ratio) / field_at_start.norm(); /*cout << "Energy (eV), pitch angle (deg), steps/gyroperiod: " "order of convergence, max relative error in energy\n";*/ for (auto kinetic_energy: {1e4, 1e7}) { // in eV for (auto pitch_angle: {90.0, 30.0}) { // V from B, degrees const double particle_speed = sqrt(2 * kinetic_energy * proton_charge_mass_ratio); const Eigen::Vector3d initial_velocity{ 0, sin(pitch_angle / 180 * M_PI) * particle_speed, cos(pitch_angle / 180 * M_PI) * particle_speed }; const double gyroradius = initial_velocity[1] / proton_charge_mass_ratio / field_at_start.norm(); const Eigen::Vector3d offset_from_gc{gyroradius, 0, 0}, initial_position{guiding_center_start + offset_from_gc}; const double initial_energy = 0.5 * proton_mass * initial_velocity.squaredNorm(), initial_magnetic_moment = proton_mass * std::pow(initial_velocity[1], 2) / (2 * get_earth_dipole(initial_position).norm()); for (size_t step_divisor = 1; step_divisor <= (1 << 11); step_divisor *= 2) { state_t state{{initial_position, initial_velocity}}; const double time_step = gyroperiod / step_divisor; constexpr double propagation_time = 424.1; // seconds, 1/100 of doi above double nrj_error = 0, mom_error = 0; for (double time = 0; time < propagation_time; time += time_step) { std::tie( state[0], state[1] ) = propagate( state[0], state[1], Vector3d{0, 0, 0}, get_earth_dipole(state[0]), proton_charge_mass_ratio, time_step ); const Eigen::Vector3d v(state[1]), b(get_earth_dipole(state[0])), v_perp_b(v - (v.dot(b) / b.squaredNorm()) * b); const double current_energy = 0.5 * proton_mass * state[1].squaredNorm(), current_magnetic_moment = proton_mass * v_perp_b.squaredNorm() / (2 * b.norm()); nrj_error = max( nrj_error, fabs(current_energy - initial_energy) / initial_energy ); mom_error = max( mom_error, fabs(current_magnetic_moment - initial_magnetic_moment) / initial_magnetic_moment ); } /*cout << kinetic_energy << " " << pitch_angle << " " << step_divisor << ": " << nrj_error << " " << mom_error << endl;*/ if ( not check_result( kinetic_energy, pitch_angle, step_divisor, nrj_error, mom_error, state[0] ) ) { std::cerr << __FILE__ << " (" << __LINE__ << "): " << "Unexpected failure in propagator." << std::endl; return EXIT_FAILURE; } } }} return EXIT_SUCCESS; }
void CTrackerStereoMotionModel::_trackLandmarks( const cv::Mat& p_matImageLEFT, const cv::Mat& p_matImageRIGHT, const Eigen::Isometry3d& p_matTransformationEstimateWORLDtoLEFT, const Eigen::Isometry3d& p_matTransformationEstimateParallelWORLDtoLEFT, const CLinearAccelerationIMU& p_vecLinearAcceleration, const Eigen::Vector3d& p_vecRotationTotal ) { //ds get images into triple channel mats (display only) cv::Mat matDisplayLEFT; cv::Mat matDisplayRIGHT; //ds get images to triple channel for colored display cv::cvtColor( p_matImageLEFT, matDisplayLEFT, cv::COLOR_GRAY2BGR ); cv::cvtColor( p_matImageRIGHT, matDisplayRIGHT, cv::COLOR_GRAY2BGR ); //ds get clean copies const cv::Mat matDisplayLEFTClean( matDisplayLEFT.clone( ) ); const cv::Mat matDisplayRIGHTClean( matDisplayRIGHT.clone( ) ); //ds compute motion scaling const double dMotionScaling = 1.0+100*p_vecRotationTotal.squaredNorm( ); //ds refresh landmark states m_cMatcher.resetVisibilityActiveLandmarks( ); //ds initial transformation Eigen::Isometry3d matTransformationWORLDtoLEFT( p_matTransformationEstimateWORLDtoLEFT ); try { //ds get the optimized pose matTransformationWORLDtoLEFT = m_cMatcher.getPoseOptimizedSTEREO( m_uFrameCount, matDisplayLEFT, matDisplayRIGHT, p_matImageLEFT, p_matImageRIGHT, p_matTransformationEstimateWORLDtoLEFT, p_vecRotationTotal, dMotionScaling ); } catch( const CExceptionPoseOptimization& p_cException ) { std::printf( "<CTrackerStereoMotionModel>(_trackLandmarks) pose optimization failed: '%s' trying parallel transform\n", p_cException.what( ) ); try { //ds get the optimized pose on constant motion matTransformationWORLDtoLEFT = m_cMatcher.getPoseOptimizedSTEREO( m_uFrameCount, matDisplayLEFT, matDisplayRIGHT, p_matImageLEFT, p_matImageRIGHT, p_matTransformationEstimateParallelWORLDtoLEFT, p_vecRotationTotal, dMotionScaling ); } catch( const CExceptionPoseOptimization& p_cException ) { //ds stick to the last estimate and adopt orientation std::printf( "<CTrackerStereoMotionModel>(_trackLandmarks) parallel pose optimization failed: '%s'\n", p_cException.what( ) ); matTransformationWORLDtoLEFT = m_matTransformationWORLDtoLEFTLAST; m_uWaitKeyTimeoutMS = 0; } } //ds respective camera transform Eigen::Isometry3d matTransformationLEFTtoWORLD( matTransformationWORLDtoLEFT.inverse( ) ); //ds estimate acceleration in current WORLD frame (necessary to filter gravity) const Eigen::Isometry3d matTransformationIMUtoWORLD( matTransformationLEFTtoWORLD*m_pCameraLEFT->m_matTransformationIMUtoCAMERA ); const CLinearAccelerationWORLD vecLinearAccelerationWORLD( matTransformationIMUtoWORLD.linear( )*p_vecLinearAcceleration ); const CLinearAccelerationWORLD vecLinearAccelerationWORLDFiltered( CIMUInterpolator::getLinearAccelerationFiltered( vecLinearAccelerationWORLD ) ); //ds update acceleration reference m_vecLinearAccelerationFilteredLAST = matTransformationWORLDtoLEFT.linear( )*vecLinearAccelerationWORLDFiltered; CLogger::CLogLinearAcceleration::addEntry( m_uFrameCount, vecLinearAccelerationWORLD, vecLinearAccelerationWORLDFiltered ); //ds current translation m_vecPositionLAST = m_vecPositionCurrent; m_vecPositionCurrent = matTransformationLEFTtoWORLD.translation( ); CLogger::CLogTrajectory::addEntry( m_uFrameCount, m_vecPositionCurrent, Eigen::Quaterniond( matTransformationLEFTtoWORLD.linear( ) ) ); //ds set visible landmarks (including landmarks already detected in the pose optimization) const std::shared_ptr< const std::vector< const CMeasurementLandmark* > > vecMeasurements( m_cMatcher.getMeasurementsEpipolar( matDisplayLEFT, matDisplayRIGHT, m_uFrameCount, p_matImageLEFT, p_matImageRIGHT, matTransformationWORLDtoLEFT, matTransformationLEFTtoWORLD, p_vecRotationTotal, dMotionScaling ) ); //ds compute landmark lost since last (negative if we see more landmarks than before) const UIDLandmark uNumberOfVisibleLandmarks = vecMeasurements->size( ); const int32_t iLandmarksLost = m_uNumberofVisibleLandmarksLAST-uNumberOfVisibleLandmarks; //ds if we lose more than 75% landmarks in one frame if( 0.75 < static_cast< double >( iLandmarksLost )/m_uNumberofVisibleLandmarksLAST ) { std::printf( "<CTrackerStereoMotionModel>(_trackLandmarks) lost track (landmarks lost: %i), total delta: %f (%f %f %f)\n", iLandmarksLost, m_vecVelocityAngularFilteredLAST.squaredNorm( ), m_vecVelocityAngularFilteredLAST.x( ), m_vecVelocityAngularFilteredLAST.y( ), m_vecVelocityAngularFilteredLAST.z( ) ); //m_uWaitKeyTimeoutMS = 0; } //ds update reference m_uNumberofVisibleLandmarksLAST = uNumberOfVisibleLandmarks; //ds debug for( const CMeasurementLandmark* pMeasurement: *vecMeasurements ) { //ds compute green brightness based on depth (further away -> darker) uint8_t uGreenValue = 255-std::sqrt( pMeasurement->vecPointXYZLEFT.z( ) )*20; cv::circle( matDisplayLEFT, pMeasurement->ptUVLEFT, 2, CColorCodeBGR( 0, uGreenValue, 0 ), -1 ); cv::circle( matDisplayRIGHT, pMeasurement->ptUVRIGHT, 2, CColorCodeBGR( 0, uGreenValue, 0 ), -1 ); //ds get 3d position in current camera frame const CPoint3DCAMERA vecXYZLEFT( matTransformationWORLDtoLEFT*pMeasurement->vecPointXYZWORLDOptimized ); //ds also draw reprojections cv::circle( matDisplayLEFT, m_pCameraLEFT->getProjection( vecXYZLEFT ), 6, CColorCodeBGR( 0, uGreenValue, 0 ), 1 ); cv::circle( matDisplayRIGHT, m_pCameraRIGHT->getProjection( vecXYZLEFT ), 6, CColorCodeBGR( 0, uGreenValue, 0 ), 1 ); } //ds accumulate orientation m_vecCameraOrientationAccumulated += p_vecRotationTotal; //ds position delta const Eigen::Vector3d vecDelta( m_vecPositionCurrent-m_vecPositionKeyFrameLAST ); //ds get norm m_dTranslationDeltaSquaredNormCurrent = vecDelta.squaredNorm( ); bool bIsKeyFrameRegular = false; bool bIsKeyFrameSliding = false; //ds add a keyframe if translational delta is sufficiently high if( m_dTranslationDeltaForKeyFrameMetersL2 < m_dTranslationDeltaSquaredNormCurrent || m_dAngleDeltaForOptimizationRadiansL2 < m_vecCameraOrientationAccumulated.squaredNorm( ) ) { bIsKeyFrameRegular = true; } //ds check the sliding window else { //ds every nth frame is handled with the sliding window if( 0 == m_uFrameCount%m_uFrameToWindowRatio ) { //ds get average position of current and last (ROBUSTNESS) const CPoint3DWORLD vecPositionAverage( ( m_vecPositionCurrent+m_vecPositionLAST )/2.0 ); m_arrFramesSlidingWindow[m_uIndexSlidingWindow] = std::make_pair( vecPositionAverage, new CKeyFrame( 0, matTransformationLEFTtoWORLD, p_vecLinearAcceleration.normalized( ), *vecMeasurements, m_uFrameCount, 0 ) ); //ds evaluate sliding window bIsKeyFrameSliding = _containsSlidingWindowKeyFrame( ); //ds current slide index ++m_uIndexSlidingWindow; } } //ds check if set if( bIsKeyFrameRegular || bIsKeyFrameSliding ) { //ds register to matcher m_cMatcher.setKeyFrameToVisibleLandmarks( ); //ds current "id" const UIDKeyFrame iIDKeyFrameCurrent = m_vecKeyFrames->size( ); //ds add the keyframe depending on selection mode if( bIsKeyFrameRegular ) { //ds create new frame m_vecKeyFrames->push_back( new CKeyFrame( iIDKeyFrameCurrent, matTransformationLEFTtoWORLD, p_vecLinearAcceleration.normalized( ), *vecMeasurements, m_uFrameCount, _getLoopClosureKeyFrame( iIDKeyFrameCurrent, matTransformationLEFTtoWORLD, matDisplayLEFT ) ) ); //ds check if optimization is required if( m_uIDDeltaKeyFrameForOptimization < iIDKeyFrameCurrent-m_uIDProcessedKeyFrameLAST ) { //ds optimize the segment m_cOptimizer.optimizeContinuous( m_uIDProcessedKeyFrameLAST, iIDKeyFrameCurrent ); assert( m_vecKeyFrames->back( )->bIsOptimized ); //ds update transformations with optimized ones m_uIDProcessedKeyFrameLAST = iIDKeyFrameCurrent+1; matTransformationLEFTtoWORLD = m_vecKeyFrames->back( )->matTransformationLEFTtoWORLD; m_vecPositionCurrent = matTransformationLEFTtoWORLD.translation( ); matTransformationWORLDtoLEFT = matTransformationLEFTtoWORLD.inverse( ); m_matTransformationWORLDtoLEFTLAST = matTransformationWORLDtoLEFT; //m_uWaitKeyTimeoutMS = 0; } } else { //ds add a copy of the center frame of the window (at position 0,1,2,3,[4],5,6,7,8 m_vecKeyFrames->push_back( new CKeyFrame( iIDKeyFrameCurrent, m_arrFramesSlidingWindow[m_uIndexSlidingWindow-5].second, _getLoopClosureKeyFrame( iIDKeyFrameCurrent, m_arrFramesSlidingWindow[m_uIndexSlidingWindow-5].second->matTransformationLEFTtoWORLD, matDisplayLEFT ) ) ); //ds scene viewer m_bIsFrameAvailableSlidingWindow = true; } //ds update references m_uIDFrameOfKeyFrameLAST = m_vecKeyFrames->back( )->uFrame; m_vecPositionKeyFrameLAST = m_vecPositionCurrent; m_vecCameraOrientationAccumulated = Eigen::Vector3d::Zero( ); //ds update scene in viewer m_prFrameLEFTtoWORLD = std::pair< bool, Eigen::Isometry3d >( bIsKeyFrameRegular, matTransformationLEFTtoWORLD ); m_bIsFrameAvailable = true; } else { //ds update scene in viewer: no keyframe m_prFrameLEFTtoWORLD = std::pair< bool, Eigen::Isometry3d >( false, matTransformationLEFTtoWORLD ); m_bIsFrameAvailable = true; } //ds check if we have to detect new landmarks if( m_uVisibleLandmarksMinimum > m_uNumberofVisibleLandmarksLAST ) { //ds clean the lower display cv::hconcat( matDisplayLEFTClean, matDisplayRIGHTClean, m_matDisplayLowerReference ); //ds detect landmarks const std::shared_ptr< std::vector< CLandmark* > > vecNewLandmarks( _getNewLandmarks( m_uFrameCount, m_matDisplayLowerReference, p_matImageLEFT, p_matImageRIGHT, matTransformationWORLDtoLEFT, matTransformationLEFTtoWORLD, p_vecRotationTotal ) ); //ds all visible in this frame m_uNumberofVisibleLandmarksLAST += vecNewLandmarks->size( ); //ds add to permanent reference holder m_vecLandmarks->insert( m_vecLandmarks->end( ), vecNewLandmarks->begin( ), vecNewLandmarks->end( ) ); //ds add this measurement point to the epipolar matcher m_cMatcher.addDetectionPoint( matTransformationLEFTtoWORLD, vecNewLandmarks ); } //ds build display mat cv::Mat matDisplayUpper = cv::Mat( m_pCameraSTEREO->m_uPixelHeight, 2*m_pCameraSTEREO->m_uPixelWidth, CV_8UC3 ); cv::hconcat( matDisplayLEFT, matDisplayRIGHT, matDisplayUpper ); _drawInfoBox( matDisplayUpper ); cv::Mat matDisplayComplete = cv::Mat( 2*m_pCameraSTEREO->m_uPixelHeight, 2*m_pCameraSTEREO->m_uPixelWidth, CV_8UC3 ); cv::vconcat( matDisplayUpper, m_matDisplayLowerReference, matDisplayComplete ); //ds display cv::imshow( "stereo matching", matDisplayComplete ); //ds if there was a keystroke int iLastKeyStroke( cv::waitKey( m_uWaitKeyTimeoutMS ) ); if( -1 != iLastKeyStroke ) { //ds user input - reset frame rate counting m_uFramesCurrentCycle = 0; //ds evaluate keystroke switch( iLastKeyStroke ) { case CConfigurationOpenCV::KeyStroke::iEscape: { _shutDown( ); return; } case CConfigurationOpenCV::KeyStroke::iBackspace: { if( 0 < m_uWaitKeyTimeoutMS ) { //ds switch to stepwise mode m_uWaitKeyTimeoutMS = 0; std::printf( "<CTrackerStereoMotionModel>(_trackLandmarks) switched to stepwise mode\n" ); } else { //ds switch to benchmark mode m_uWaitKeyTimeoutMS = 1; std::printf( "<CTrackerStereoMotionModel>(_trackLandmarks) switched back to benchmark mode\n" ); } break; } default: { break; } } } else { _updateFrameRateForInfoBox( ); } //ds update references ++m_uFrameCount; m_matTransformationLEFTLASTtoLEFTNOW = matTransformationWORLDtoLEFT*m_matTransformationWORLDtoLEFTLAST.inverse( ); m_matTransformationWORLDtoLEFTLAST = matTransformationWORLDtoLEFT; }