static void test_homo_move_assign () { rw_info (0, __FILE__, __LINE__, "move assignment operator (homogenous tuples)"); std::tuple<> et1, et2; et2 = std::move (et1); _RWSTD_UNUSED (et2); int i = std::rand (); std::tuple<int> it1 (i); std::tuple<int> it2; it2 = std::move (it1); test (__LINE__, it2, i); // move assignment ill-formed for constant element types std::tuple<std::tuple<int> > nt1 (it2); std::tuple<std::tuple<int> > nt2; nt2 = std::move (nt1); test (__LINE__, nt2, it2); std::tuple<long, const char*> pt1 (1234567890L, "string"); std::tuple<long, const char*> pt2; pt2 = std::move (pt1); test (__LINE__, pt2, 1234567890L, (const char*) "string"); const UserDefined ud (i); std::tuple<UserDefined> ut1 (ud); std::tuple<UserDefined> ut2; UserDefined::reset (); ut2 = std::move (ut1); ++UserDefined::expect.move_asgn; test (__LINE__, ut2, ud); std::tuple<bool, char, int, double, void*, UserDefined> bt1 (true, 'a', i, 3.14159, (void* const) &i, ud); std::tuple<bool, char, int, double, void*, UserDefined> bt2; UserDefined::reset (); bt2 = std::move (bt1); ++UserDefined::expect.move_asgn; test (__LINE__, bt2, true, 'a', i, 3.14159, (void* const) &i, ud); }
QgsTriangle::QgsTriangle( const QPointF p1, const QPointF p2, const QPointF p3 ) : QgsPolygonV2() { mWkbType = QgsWkbTypes::Triangle; QgsPointV2 pt1( p1 ); QgsPointV2 pt2( p2 ); QgsPointV2 pt3( p3 ); if ( !validateGeom( pt1, pt2, pt3 ) ) { return; } QVector< double > x; x << p1.x() << p2.x() << p3.x(); QVector< double > y; y << p1.y() << p2.y() << p3.y(); QgsLineString *ext = new QgsLineString( x, y ); setExteriorRing( ext ); }
void CGuideRectODL::GetTopPointList( std::vector<gp_Pnt>& arrPoint ) { m_arrTopPoint.clear(); //如果开始点大于结束点 Gdiplus::REAL fX0 = m_rtArea.GetLeft(); Gdiplus::REAL fY0 = m_rtArea.GetTop(); Gdiplus::REAL fX1 = m_rtArea.GetRight(); Gdiplus::REAL fY1 = m_rtArea.GetBottom(); gp_Pnt pt0(fX0, 0, fY0); gp_Pnt pt1(fX1, 0, fY0); gp_Pnt pt2(fX1, 0, fY1); gp_Pnt pt3(fX0, 0, fY1); m_arrTopPoint.push_back(pt0); m_arrTopPoint.push_back(pt1); m_arrTopPoint.push_back(pt2); m_arrTopPoint.push_back(pt3); m_arrTopPoint.push_back(pt0); arrPoint = m_arrTopPoint; }
bool CCoordTransService::Transform (double *pX, double *pY) { if (m_fIsInitialized) { try { CCSPoint pt1 (*pX, *pY); THROW_FAILED_HRESULT(m_CTF -> Transform (&pt1, TRANSDIRECTION_FORWARD)); THROW_FAILED_HRESULT(pt1 -> get_X (pX)); THROW_FAILED_HRESULT(pt1 -> get_Y (pY)); return true; } catch (_com_error &e) { ShowError (_COM_ERROR(e), IDS_CSSERRORCAPTION, g_cbTransform); return false; } } ShowError (TRIAS02_E_CSSNOTINITIALIZED, IDS_CSSERRORCAPTION, g_cbTransform); return false; }
bool CCoordTransService::InvTransformEx (double *pX, double *pY, LPVOID pData) { if (m_fIsInitialized) { try { CCSPoint pt1 (*pX, *pY); THROW_FAILED_HRESULT(m_CTF -> TransformEx (&pt1, TRANSDIRECTION_INVERSE, (IDispatch *)pData)); THROW_FAILED_HRESULT(pt1 -> get_X (pX)); THROW_FAILED_HRESULT(pt1 -> get_Y (pY)); return true; } catch (_com_error &e) { ShowError (_COM_ERROR(e), IDS_CSSERRORCAPTION, g_cbInvTransformEx); return false; } } ShowError (TRIAS02_E_CSSNOTINITIALIZED, IDS_CSSERRORCAPTION, g_cbInvTransformEx); return false; }
Surface* Building::extrude_envelope() const { Surface* envelope = new Surface(SurfaceType::Envelope,0,_length,_width,_height); OGRLinearRing* ringFt = _footprint->getExteriorRing(); if(ringFt->isClockwise()) adjust_winding_ccw(ringFt); envelope->addChild(new Surface(SurfaceType::Footprint,_footprint,_length,_width,0.)); //extrude roof OGRLinearRing* ringRoof = new OGRLinearRing; for(int i=0; i<ringFt->getNumPoints(); i++) ringRoof->addPoint(ringFt->getX(i),ringFt->getY(i),_height); OGRPolygon plyRoof; plyRoof.addRingDirectly(ringRoof); envelope->addChild(new Surface(SurfaceType::Roof,&plyRoof,_length,_width,0.)); //extrude walls for(int i=0; i<ringFt->getNumPoints()-1; i++) { OGRLinearRing* ringWall = new OGRLinearRing; ringWall->addPoint(ringFt->getX(i),ringFt->getY(i),0); ringWall->addPoint(ringFt->getX(i+1),ringFt->getY(i+1),0); ringWall->addPoint(ringFt->getX(i+1),ringFt->getY(i+1),_height); ringWall->addPoint(ringFt->getX(i),ringFt->getY(i),_height); ringWall->addPoint(ringFt->getX(i),ringFt->getY(i),0); OGRPolygon plyWall; plyWall.addRingDirectly(ringWall); OGRPoint pt1(ringFt->getX(i),ringFt->getY(i),0); OGRPoint pt2(ringFt->getX(i+1),ringFt->getY(i+1),0); envelope->addChild(new Wall(&plyWall,pt1.Distance(&pt2),_height,&pt1,&pt2)); } return envelope; }
static void test_homo_move_ctor () { rw_info (0, __FILE__, __LINE__, "move constructor (homogenous tuples)"); std::tuple<> et (std::tuple<> ()); _RWSTD_UNUSED (et); const int ci = std::rand (); std::tuple<int> it1 (ci); std::tuple<int> it2 (std::move (it1)); test (__LINE__, it2, ci); std::tuple<const int> ct1 (ci); std::tuple<const int> ct2 = std::move (ct1); test (__LINE__, ct2, ci); std::tuple<std::tuple<int> > nt1 (it1); std::tuple<std::tuple<int> > nt2 = std::move (nt1); test (__LINE__, nt2, it1); std::tuple<long, const char*> pt1 (1234567890L, "string"); std::tuple<long, const char*> pt2 (std::move (pt1)); test (__LINE__, pt2, 1234567890L, (const char*) "string"); const UserDefined ud (ci); std::tuple<UserDefined> ut1 (ud); UserDefined::reset (); std::tuple<UserDefined> ut2 (std::move (ut1)); ++UserDefined::expect.move_ctor; test (__LINE__, ut2, ud); std::tuple<bool, char, int, double, void*, UserDefined> bt1 (true, 'a', ci, 3.14159, (void*) &ci, ud); ++UserDefined::expect.copy_ctor; std::tuple<bool, char, int, double, void*, UserDefined> bt2 (std::move (bt1)); ++UserDefined::expect.move_ctor; test (__LINE__, bt2, true, 'a', ci, 3.14159, (void*) &ci, ud); }
void displayopencv::DisplayBspline(const mathtools::application::Bspline<2> &bspline, cv::Mat &img, const mathtools::affine::Frame<2>::Ptr frame, const cv::Scalar &color, const unsigned int &nb_pts) { double inf = bspline.getInfBound(), sup = bspline.getSupBound(); Eigen::Vector2d prev = mathtools::affine::Point<2>(bspline(inf)).getCoords(frame); for(unsigned int i=1;i<nb_pts;i++) { double t = inf + (double)i*(sup-inf)/(double)(nb_pts-1); Eigen::Vector2d curr = mathtools::affine::Point<2>(bspline(t)).getCoords(frame); cv::Point pt1(prev.x(), prev.y()), pt2(curr.x(), curr.y()); prev=curr; cv::line(img,pt1,pt2,color); } }
void mark(cv::Mat& img, PPoint2d &p, unsigned char l) { int r = 10; //draw_circle(img, p, r, l); // draw_circle(img, p[0], p[1], 10, l); // draw_line(img, p[0]-r/2, p[1]-r/2, p[0]+r/2, p[1]+r/2, l); // draw_line(img, p[0]-r/2, p[1]+r/2, p[0]+r/2, p[1]-r/2, l); double x = p.x(); double y = p.y(); cv::Point2d pt(x,y); cv::Point2d pt1(x - r / 2, y - r / 2); cv::Point2d pt2(x + r / 2, y + r / 2); cv::Point2d pt3(x - r / 2, y + r / 2); cv::Point2d pt4(x + r / 2, y - r / 2); cout << p << endl; if (x >= 0 && y >= 0 && x <= img.rows && y <= img.cols) { cv::circle(img, pt, 10, l); cv::line(img, pt1, pt2, l); cv::line(img, pt3, pt4, l); } }
void FixtureGroup_Test::swap() { FixtureGroup grp(m_doc); grp.setSize(QSize(4, 4)); for (quint32 id = 0; id < 16; id++) { Fixture* fxi = new Fixture(m_doc); fxi->setChannels(1); m_doc->addFixture(fxi); grp.assignFixture(fxi->id()); } QCOMPARE(grp.headList().size(), 16); QLCPoint pt1(0, 0); QLCPoint pt2(2, 1); QVERIFY(grp.headHash().contains(pt1) == true); QVERIFY(grp.headHash().contains(pt2) == true); QCOMPARE(grp.headHash()[pt1], GroupHead(0, 0)); QCOMPARE(grp.headHash()[pt2], GroupHead(6, 0)); // Switch places with two fixtures grp.swap(pt1, pt2); QVERIFY(grp.headHash().contains(pt1) == true); QVERIFY(grp.headHash().contains(pt2) == true); QCOMPARE(grp.headHash()[pt1], GroupHead(6, 0)); QCOMPARE(grp.headHash()[pt2], GroupHead(0, 0)); // Switch places with a fixture and an empty point pt2 = QLCPoint(500, 500); grp.swap(pt1, pt2); QVERIFY(grp.headHash().contains(pt1) == false); QVERIFY(grp.headHash().contains(pt2) == true); QCOMPARE(grp.headHash()[pt2], GroupHead(6, 0)); // ...and back again grp.swap(pt1, pt2); QVERIFY(grp.headHash().contains(pt1) == true); QVERIFY(grp.headHash().contains(pt2) == false); QCOMPARE(grp.headHash()[pt1], GroupHead(6, 0)); }
void check_are_mergable() { Polycurve_conic_traits_2 traits; Polycurve_conic_traits_2::Construct_x_monotone_curve_2 construct_x_monotone_curve_2 = traits.construct_x_monotone_curve_2_object(); Polycurve_conic_traits_2::Are_mergeable_2 are_mergeable_2 = traits.are_mergeable_2_object(); //create a curve Rat_point_2 ps1(1, 10); Rat_point_2 pmid1(5, 4); Rat_point_2 pt1(10, 1); Conic_curve_2 c1(ps1, pmid1, pt1); Rat_point_2 ps2(10, 1); Rat_point_2 pmid2(15, 14); Rat_point_2 pt2(20, 20); Conic_curve_2 c2(ps2, pmid2, pt2); Rat_point_2 ps3(Rational(1,4), 4); Rat_point_2 pmid3(Rational(3,2), 2); Rat_point_2 pt3(2, Rational(1,3)); Conic_curve_2 c3(ps3, pmid3, pt3); //construct x-monotone curve(compatible with polyline class) Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc1 = construct_x_monotone_curve_2(c1); Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc2 = construct_x_monotone_curve_2(c2); Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc3 = construct_x_monotone_curve_2(c3); bool result = are_mergeable_2(polyline_xmc1, polyline_xmc2); std::cout << "Are_mergable:: Mergable x-monotone polycurves are Computed as: " << ((result)? "Mergable" : "Not-Mergable") << std::endl; result = are_mergeable_2(polyline_xmc1, polyline_xmc3); std::cout << "Are_mergable:: Non-Mergable x-monotone polycurves are Computed as: " << ((result)? "Mergable" : "Not-Mergable") << std::endl; }
void DrawResult(IplImage* image, windage::Matrix3 homography, CvScalar color = CV_RGB(255, 0, 0), int thickness = 1) { windage::Vector3 pt1(0.0, 0.0, 1.0); windage::Vector3 pt2(TEMPLATE_WIDTH, 0.0, 1.0); windage::Vector3 pt3(TEMPLATE_WIDTH, TEMPLATE_HEIGHT, 1.0); windage::Vector3 pt4(0.0, TEMPLATE_HEIGHT, 1.0); windage::Vector3 outPoint1 = homography * pt1; windage::Vector3 outPoint2 = homography * pt2; windage::Vector3 outPoint3 = homography * pt3; windage::Vector3 outPoint4 = homography * pt4; outPoint1 /= outPoint1.z; outPoint2 /= outPoint2.z; outPoint3 /= outPoint3.z; outPoint4 /= outPoint4.z; cvLine(image, cvPoint((int)outPoint1.x, (int)outPoint1.y), cvPoint((int)outPoint2.x, (int)outPoint2.y), color, thickness); cvLine(image, cvPoint((int)outPoint2.x, (int)outPoint2.y), cvPoint((int)outPoint3.x, (int)outPoint3.y), color, thickness); cvLine(image, cvPoint((int)outPoint3.x, (int)outPoint3.y), cvPoint((int)outPoint4.x, (int)outPoint4.y), color, thickness); cvLine(image, cvPoint((int)outPoint4.x, (int)outPoint4.y), cvPoint((int)outPoint1.x, (int)outPoint1.y), color, thickness); }
// forward the message to the appropriate item bool wxListBox::MSWOnDraw(WXDRAWITEMSTRUCT *item) { // only owner-drawn control should receive this message wxCHECK( ((m_windowStyle & wxLB_OWNERDRAW) == wxLB_OWNERDRAW), false ); DRAWITEMSTRUCT *pStruct = (DRAWITEMSTRUCT *)item; // the item may be -1 for an empty listbox if ( pStruct->itemID == (UINT)-1 ) return false; wxListBoxItem *pItem = (wxListBoxItem *)m_aItems[pStruct->itemID]; wxDCTemp dc((WXHDC)pStruct->hDC); wxPoint pt1(pStruct->rcItem.left, pStruct->rcItem.top); wxPoint pt2(pStruct->rcItem.right, pStruct->rcItem.bottom); wxRect rect(pt1, pt2); return pItem->OnDrawItem(dc, rect, (wxOwnerDrawn::wxODAction)pStruct->itemAction, (wxOwnerDrawn::wxODStatus)pStruct->itemState); }
bool CCoordTransService::InvTransformEx (DoublePair *pPoints, int iNum, LPVOID pData) { if (m_fIsInitialized) { try { for (int i = 0; i < iNum; i++) { CCSPoint pt1 (pPoints[i].X(), pPoints[i].Y()); THROW_FAILED_HRESULT(m_CTF -> TransformEx (&pt1, TRANSDIRECTION_INVERSE, (IDispatch *)pData)); THROW_FAILED_HRESULT(pt1 -> get_X (&pPoints[i].X())); THROW_FAILED_HRESULT(pt1 -> get_Y (&pPoints[i].Y())); } return true; } catch (_com_error &e) { ShowError (_COM_ERROR(e), IDS_CSSERRORCAPTION, g_cbInvTransformEx); return false; } } ShowError (TRIAS02_E_CSSNOTINITIALIZED, IDS_CSSERRORCAPTION, g_cbInvTransformEx); return false; }
void check_is_vertical() { Polycurve_conic_traits_2 traits; Polycurve_conic_traits_2::Construct_x_monotone_curve_2 construct_x_monotone_curve_2 = traits.construct_x_monotone_curve_2_object(); Polycurve_conic_traits_2::Is_vertical_2 is_vertical = traits.is_vertical_2_object(); //create a curve Rat_point_2 ps1(1, 10); Rat_point_2 pmid1(5, 4); Rat_point_2 pt1(10, 1); Conic_curve_2 c1(ps1, pmid1, pt1); //make x-monotone curve Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc1 = construct_x_monotone_curve_2(c1); bool result = is_vertical(polyline_xmc1); std::cout << "Is_verticle:: Expected first result is not vertivle: Computed: " << ((result)? "vertical" : "not vertical") << std::endl; }
bool CCoordTransService::CoordTransDistanceEx ( const DoublePair *pP1, const DoublePair *pP2, double &dX, double &dY) { if (m_fIsInitialized) { try { CCSPoint pt1 (pP1 -> Width(), pP1 -> Height()); CCSPoint pt2 (pP2 -> Width(), pP2 -> Height()); W_DGMPoint outPt; THROW_FAILED_HRESULT(m_CTF -> DistancePoint(&pt1, &pt2, outPt.ppi())); THROW_FAILED_HRESULT(outPt -> get_X (&dX)); THROW_FAILED_HRESULT(outPt -> get_Y (&dY)); return true; } catch (_com_error &e) { ShowError (_COM_ERROR(e), IDS_CSSERRORCAPTION, g_cbCoordTransDistanceEx); return false; } } ShowError (TRIAS02_E_CSSNOTINITIALIZED, IDS_CSSERRORCAPTION, g_cbCoordTransDistanceEx); return false; }
void AABoxContainTest::testIsInVolumePt() { // Test empty box gmtl::AABoxf box; gmtl::Point3f origin; CPPUNIT_ASSERT(! gmtl::isInVolume(box, origin)); // Test valid box with pt outside gmtl::AABoxf box2(gmtl::Point3f(-1,-1,-1), gmtl::Point3f(1,1,1)); gmtl::Point3f pt1(2,2,2); CPPUNIT_ASSERT(! gmtl::isInVolume(box2, pt1)); CPPUNIT_ASSERT(! gmtl::isInVolumeExclusive(box2, pt1)); //Test valid box with pt inside CPPUNIT_ASSERT(gmtl::isInVolume(box2, origin)); CPPUNIT_ASSERT(gmtl::isInVolumeExclusive(box2, origin)); //Test valid box with pt on surface gmtl::Point3f pt_on_surf(1,0,0); CPPUNIT_ASSERT(gmtl::isInVolume(box2, pt_on_surf)); CPPUNIT_ASSERT(!gmtl::isInVolumeExclusive(box2, pt_on_surf)); }
void check_construct_opposite() { Polycurve_conic_traits_2 traits; Polycurve_conic_traits_2::Construct_x_monotone_curve_2 construct_x_monotone_curve_2 = traits.construct_x_monotone_curve_2_object(); Polycurve_conic_traits_2::Construct_opposite_2 construct_opposite_2 = traits.construct_opposite_2_object(); //create a curve Rat_point_2 ps1(1, 10); Rat_point_2 pmid1(5, 4); Rat_point_2 pt1(10, 1); Conic_curve_2 c1(ps1, pmid1, pt1); //construct x-monotone curve (compatible with polyline class) Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc1 = construct_x_monotone_curve_2(c1); Polycurve_conic_traits_2::X_monotone_curve_2 polyline_opposite_curve = construct_opposite_2(polyline_xmc1); std::cout<< "Construct_opposite_2:: Opposite curve created"; }
TEST(NumLib, SpatialFunctionInterpolationSurface) { // create geometry GeoLib::Point pt1(0.0, 0.0, 0.0); GeoLib::Point pt2(10.0, 0.0, 0.0); GeoLib::Point pt3(10.0, 10.0, 0.0); GeoLib::Point pt4(0.0, 10.0, 0.0); std::vector<GeoLib::Point*> pnts = {&pt1, &pt2, &pt3, &pt4}; GeoLib::Polyline ply0(pnts); ply0.addPoint(0); ply0.addPoint(1); ply0.addPoint(2); ply0.addPoint(3); ply0.addPoint(0); std::unique_ptr<GeoLib::Surface> sfc1(GeoLib::Surface::createSurface(ply0)); // define a function const std::vector<std::size_t> vec_point_ids = {{0, 1, 2, 3}}; const std::vector<double> vec_point_values = {{0., 100., 100., 0.}}; NumLib::LinearInterpolationOnSurface interpolate(*sfc1, vec_point_ids, vec_point_values, std::numeric_limits<double>::max()); // normal for (unsigned k=0; k<10; k++) { ASSERT_DOUBLE_EQ(k*10., interpolate(GeoLib::Point(k,0,0))); ASSERT_DOUBLE_EQ(k*10., interpolate(GeoLib::Point(k,5,0))); ASSERT_DOUBLE_EQ(k*10., interpolate(GeoLib::Point(k,10,0))); } // failure // x, y ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(-1,-1,0))); ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(11,-1,0))); ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(11,11,0))); ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(-1,11,0))); // z ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(0,0,1))); ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(0,0,-1))); }
void BlockingInfoRenderer::render(Camera* cam, Layer* layer, RenderList& instances) { CellGrid* cg = layer->getCellGrid(); if (!cg) { FL_WARN(_log, "No cellgrid assigned to layer, cannot draw grid"); return; } Rect cv = cam->getViewPort(); RenderList::const_iterator instance_it = instances.begin(); for (;instance_it != instances.end(); ++instance_it) { Instance* instance = (*instance_it)->instance; if (!instance->getObject()->isBlocking() || !instance->isBlocking()) { continue; } std::vector<ExactModelCoordinate> vertices; cg->getVertices(vertices, instance->getLocationRef().getLayerCoordinates()); std::vector<ExactModelCoordinate>::const_iterator it = vertices.begin(); int halfind = vertices.size() / 2; ScreenPoint firstpt = cam->toScreenCoordinates(cg->toMapCoordinates(*it)); Point pt1(firstpt.x, firstpt.y); Point pt2; ++it; for (; it != vertices.end(); it++) { ScreenPoint pts = cam->toScreenCoordinates(cg->toMapCoordinates(*it)); pt2.x = pts.x; pt2.y = pts.y; Point cpt1 = pt1; Point cpt2 = pt2; m_renderbackend->drawLine(cpt1, cpt2, m_color.r, m_color.g, m_color.b); pt1 = pt2; } m_renderbackend->drawLine(pt2, Point(firstpt.x, firstpt.y), m_color.r, m_color.g, m_color.b); ScreenPoint spt1 = cam->toScreenCoordinates(cg->toMapCoordinates(vertices[0])); Point pt3(spt1.x, spt1.y); ScreenPoint spt2 = cam->toScreenCoordinates(cg->toMapCoordinates(vertices[halfind])); Point pt4(spt2.x, spt2.y); m_renderbackend->drawLine(pt3, pt4, m_color.r, m_color.g, m_color.b); } }
bool MgCmdDrawRect::touchEnded(const MgMotion* sender) { Point2d pt1(m_startPt); Point2d pt2(snapPoint(sender)); MgBaseRect* shape = (MgBaseRect*)dynshape()->shape(); if (shape->getFlag(kMgSquare)) { float len = (float)mgMax(fabs(pt2.x - pt1.x), fabs(pt2.y - pt1.y)); Box2d rect(m_startPt, 2.f * len, 0); pt1 = rect.leftTop(); pt2 = rect.rightBottom(); } shape->setRect(pt1, pt2); dynshape()->shape()->update(); float minDist = mgDisplayMmToModel(5, sender); if (shape->getWidth() > minDist && shape->getHeight() > minDist) { addRectShape(sender); } return _touchEnded(sender); }
void plotLinesToPainter(QPainter& painter, const Numpy1DObj& x1, const Numpy1DObj& y1, const Numpy1DObj& x2, const Numpy1DObj& y2, const QRectF* clip, bool autoexpand) { const int maxsize = min(x1.dim, x2.dim, y1.dim, y2.dim); // if autoexpand, expand rectangle by line width QRectF clipcopy; if ( clip != 0 && autoexpand ) { const qreal lw = painter.pen().widthF(); qreal x1, y1, x2, y2; clip->getCoords(&x1, &y1, &x2, &y2); clipcopy.setCoords(x1, y1, x2, y2); clipcopy.adjust(-lw, -lw, lw, lw); } if( maxsize != 0 ) { QVector<QLineF> lines; for(int i = 0; i < maxsize; ++i) { QPointF pt1(x1(i), y1(i)); QPointF pt2(x2(i), y2(i)); if( clip != 0 ) { if( clipLine(clipcopy, pt1, pt2) ) lines << QLineF(pt1, pt2); } else lines << QLineF(pt1, pt2); } painter.drawLines(lines); } }
void createBlock() { AcDbBlockTableRecord* pBTR = new AcDbBlockTableRecord(); pBTR->setName(DYNBLKSAMP_BLOCKNAME); AcDbSymbolTable* pBT = NULL; acdbHostApplicationServices()->workingDatabase()->getBlockTable(pBT, AcDb::kForWrite); pBT->add(pBTR); pBT->close(); AcGePoint3d pt1(0.0, 0.0, 0.0); AcGePoint3d pt2(2.0, 0.0, 0.0); AcDbLine* pLine = new AcDbLine(pt1, pt2); pBTR->appendAcDbEntity(pLine); pLine->close(); pt1 = pt2; pt2.y = 2.0; pLine = new AcDbLine(pt1, pt2); pBTR->appendAcDbEntity(pLine); pLine->close(); pt1 = pt2; pt2.x = 0.0; pLine = new AcDbLine(pt1, pt2); pBTR->appendAcDbEntity(pLine); pLine->close(); pt1 = pt2; pt2.y = 0.0; pLine = new AcDbLine(pt1, pt2); pBTR->appendAcDbEntity(pLine); pLine->close(); pBTR->close(); }
void CellSelectionRenderer::render(Camera* cam, Layer* layer, RenderList& instances) { if (m_locations.empty()) { return; } std::vector<Location>::const_iterator locit = m_locations.begin(); for (; locit != m_locations.end(); locit++) { const Location loc = *locit; if (layer != loc.getLayer()) { continue; } CellGrid* cg = layer->getCellGrid(); if (!cg) { FL_WARN(_log, "No cellgrid assigned to layer, cannot draw selection"); continue; } std::vector<ExactModelCoordinate> vertices; cg->getVertices(vertices, loc.getLayerCoordinates()); std::vector<ExactModelCoordinate>::const_iterator it = vertices.begin(); ScreenPoint firstpt = cam->toScreenCoordinates(cg->toMapCoordinates(*it)); Point pt1(firstpt.x, firstpt.y); Point pt2; ++it; for (; it != vertices.end(); it++) { ScreenPoint pts = cam->toScreenCoordinates(cg->toMapCoordinates(*it)); pt2.x = pts.x; pt2.y = pts.y; Point cpt1 = pt1; Point cpt2 = pt2; m_renderbackend->drawLine(cpt1, cpt2, m_color.r, m_color.g, m_color.b); pt1 = pt2; } m_renderbackend->drawLine(pt2, Point(firstpt.x, firstpt.y), m_color.r, m_color.g, m_color.b); } }
Cube::Cube(){ geoType = CUBE_GEO; p = new Poly(); Point pt1(0,0,0); Point pt2(0,0,10); Point pt3(10,0,10); Point pt4(10,0,0); Point pt5(10,10,0); Point pt6(10,10,10); Point pt7(0,10,10); Point pt8(0,10,0); p->addFace(pt1, pt2, pt7, pt8); // bottom p->addFace(pt3, pt4, pt5, pt6); // right p->addFace(pt1, pt2, pt3, pt4); // front p->addFace(pt5, pt6, pt7, pt8); // top p->addFace(pt1, pt4, pt5, pt8); // left p->addFace(pt2, pt3, pt6, pt7); // back p->computeNormals(); this->addPoly(p); // this adds it to scene p->bound(); this->bound(); }
TEST_F(CreatePolygonTreesTest, P0AndP1AndP2) { GeoLib::SimplePolygonTree *pt0(new GeoLib::SimplePolygonTree(_p0, nullptr)); GeoLib::SimplePolygonTree *pt1(new GeoLib::SimplePolygonTree(_p1, nullptr)); GeoLib::SimplePolygonTree *pt2(new GeoLib::SimplePolygonTree(_p2, nullptr)); std::list<GeoLib::SimplePolygonTree*> pt_list; pt_list.push_back(pt0); pt_list.push_back(pt1); pt_list.push_back(pt2); ASSERT_EQ(3u, pt_list.size()); ASSERT_FALSE(_p0->isPolylineInPolygon(*_p1)); ASSERT_FALSE(_p0->isPolylineInPolygon(*_p2)); ASSERT_FALSE(_p1->isPolylineInPolygon(*_p0)); ASSERT_FALSE(_p1->isPolylineInPolygon(*_p2)); ASSERT_TRUE(_p2->isPolylineInPolygon(*_p0)); ASSERT_TRUE(_p2->isPolylineInPolygon(*_p1)); createPolygonTrees(pt_list); ASSERT_EQ(1u, pt_list.size()); ASSERT_EQ(2u, (*(pt_list.begin()))->getNChilds()); std::for_each(pt_list.begin(), pt_list.end(), std::default_delete<GeoLib::SimplePolygonTree>()); }
static void test_lt () { rw_info (0, __FILE__, __LINE__, "operator<"); std::tuple<> nt1, nt2; TEST (!(nt1 < nt1)); TEST (!(nt1 < nt2)); std::tuple<int> it1 (1), it2 (2); TEST (!(it1 < it1)); TEST (it1 < it2); UserDefined ud1 (1), ud2 (2); std::tuple<UserDefined> ut1 (ud1), ut2 (ud2); TEST (!(ut1 < ut1)); TEST (ut1 < ut2); std::tuple<long, const char*> pt1 (1234L, "string"); TEST (!(pt1 < pt1)); std::tuple<long, const char*> pt2 (1235L, "string"); TEST (pt1 < pt2); std::tuple<long, const char*> pt3 (1234L, "strings"); TEST (pt1 < pt3); std::tuple<bool, char, int, double, void*, UserDefined> bt1 (true, 'a', 255, 3.14159, &nt1, ud1), bt2 (true, 'a', 256, 3.14159, &nt1, ud1), bt3 (true, 'a', 255, 3.14159, &nt1, ud2); rw_assert (!(bt1 < bt1), __FILE__, __LINE__, "bt1 < bt1, got true, expected false"); rw_assert (bt1 < bt2, __FILE__, __LINE__, "bt1 < bt2, got false, expected true"); rw_assert (bt1 < bt3, __FILE__, __LINE__, "bt1 < bt3, got false, expected true"); }
void imageCallback( const sensor_msgs::ImageConstPtr& l_image_msg, const sensor_msgs::ImageConstPtr& r_image_msg, const sensor_msgs::CameraInfoConstPtr& l_info_msg, const sensor_msgs::CameraInfoConstPtr& r_info_msg) { bool first_run = false; // create odometer if not exists if (!visual_odometer_) { first_run = true; #if(DEBUG) cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr; l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8); r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8); last_l_image_ = l_cv_ptr->image; last_r_image_ = r_cv_ptr->image; #endif initOdometer(l_info_msg, r_info_msg); } // convert images if necessary uint8_t *l_image_data, *r_image_data; int l_step, r_step; cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr; l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8); l_image_data = l_cv_ptr->image.data; l_step = l_cv_ptr->image.step[0]; r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8); r_image_data = r_cv_ptr->image.data; r_step = r_cv_ptr->image.step[0]; ROS_ASSERT(l_step == r_step); ROS_ASSERT(l_image_msg->width == r_image_msg->width); ROS_ASSERT(l_image_msg->height == r_image_msg->height); int32_t dims[] = {l_image_msg->width, l_image_msg->height, l_step}; // on first run or when odometer got lost, only feed the odometer with // images without retrieving data if (first_run || got_lost_) { visual_odometer_->process(l_image_data, r_image_data, dims); got_lost_ = false; } else { bool success = visual_odometer_->process( l_image_data, r_image_data, dims, last_motion_small_); if (success) { Matrix motion = Matrix::inv(visual_odometer_->getMotion()); ROS_DEBUG("Found %i matches with %i inliers.", visual_odometer_->getNumberOfMatches(), visual_odometer_->getNumberOfInliers()); ROS_DEBUG_STREAM("libviso2 returned the following motion:\n" << motion); Matrix camera_motion; // if image was replaced due to small motion we have to subtract the // last motion to get the increment if (last_motion_small_) { camera_motion = Matrix::inv(reference_motion_) * motion; } else { // image was not replaced, report full motion from odometer camera_motion = motion; } reference_motion_ = motion; // store last motion as reference std::cout<< camera_motion << "\n" <<std::endl; #if (USE_MOVEMENT_CONSTRAIN) double deltaRoll = atan2(camera_motion.val[2][1], camera_motion.val[2][2]); double deltaPitch = asin(-camera_motion.val[2][0]); double deltaYaw = atan2(camera_motion.val[1][0], camera_motion.val[0][0]); double tanRoll = camera_motion.val[2][1] / camera_motion.val[2][2]; double tanPitch = tan(deltaPitch); printf("deltaroll is %lf\n", deltaRoll); printf("deltaPitch is %lf\n", deltaPitch); printf("deltaYaw is %lf\n", deltaYaw); double deltaX = camera_motion.val[0][3]; double deltaY = camera_motion.val[1][3]; double deltaZ = camera_motion.val[2][3]; printf("dx %lf, dy %lf, dz %lf, tanRoll %lf tanPitch %lf\n",deltaX, deltaY, deltaZ, tanRoll, tanPitch); if(deltaY > 0 && deltaY > tanRoll * deltaZ) { camera_motion.val[1][3] = tanRoll * deltaZ; //printf("dy %lf deltaY, dynew %lf\n", deltaY,camera_motion.val[2][3]); } else if(deltaY < 0 && deltaY < -tanRoll * deltaZ) { camera_motion.val[1][3] = -tanRoll * deltaZ; //printf("dy %lf deltaY, dynew %lf\n", deltaY,camera_motion.val[2][3]); } /*if(deltaX > 0 && deltaX > tanPitch * deltaZ) { camera_motion.val[0][3] = tanPitch * deltaZ; printf("dx %lf, dxnew %lf\n", deltaX,camera_motion.val[1][3]); } else if(deltaX < 0 && deltaX < -tanPitch * deltaZ) { camera_motion.val[0][3] = -tanPitch * deltaZ; printf("dx %lf, dxnew %lf\n", deltaX,camera_motion.val[1][3]); }*/ /* if(deltaPitch > 0) { deltaPitch = deltaPitch+fabs(deltaRoll)+fabs(deltaYaw); } else { deltaPitch = deltaPitch-fabs(deltaRoll)-fabs(deltaYaw); }*/ deltaPitch = deltaPitch+deltaYaw; #endif // calculate current feature flow std::vector<Matcher::p_match> matches = visual_odometer_->getMatches(); std::vector<int> inlier_indices = visual_odometer_->getInlierIndices(); #if(DEBUG) cv::Mat img1 = l_cv_ptr->image; cv::Mat img2 = r_cv_ptr->image; cv::Size size(last_l_image_.cols, last_l_image_.rows+last_r_image_.rows); cv::Mat outImg(size,CV_MAKETYPE(img1.depth(), 3)); cv::Mat outImg1 = outImg( cv::Rect(0, 0, last_l_image_.cols, last_l_image_.rows) ); cv::Mat outImg2 = outImg( cv::Rect(0, last_l_image_.rows, img1.cols, img1.rows) ); if( last_l_image_.type() == CV_8U ) cvtColor( last_l_image_, outImg1, CV_GRAY2BGR ); else last_l_image_.copyTo( outImg1 ); if( img1.type() == CV_8U ) cvtColor( img1, outImg2, CV_GRAY2BGR ); else img1.copyTo( outImg2 ); for (size_t i = 0; i < matches.size(); ++i) { cv::Point pt1(matches[i].u1p,matches[i].v1p); cv::Point pt2(matches[i].u1c,matches[i].v1c + last_l_image_.rows); if(pt1.y > 239) cv::line(outImg, pt1, pt2, cv::Scalar(255,0,0)); //else //cv::line(outImg, pt1, pt2, cv::Scalar(0,255,0)); } cv::imshow("matching image", outImg); cv::waitKey(10); last_l_image_ = img1; last_r_image_ = img2; #endif double feature_flow = computeFeatureFlow(matches); last_motion_small_ = (feature_flow < motion_threshold_); ROS_DEBUG_STREAM("Feature flow is " << feature_flow << ", marking last motion as " << (last_motion_small_ ? "small." : "normal.")); btMatrix3x3 rot_mat( cos(deltaPitch), 0, sin(deltaPitch), 0, 1, 0, -sin(deltaPitch), 0, cos(deltaPitch)); /*btMatrix3x3 rot_mat( camera_motion.val[0][0], camera_motion.val[0][1], camera_motion.val[0][2], camera_motion.val[1][0], camera_motion.val[1][1], camera_motion.val[1][2], camera_motion.val[2][0], camera_motion.val[2][1], camera_motion.val[2][2]);*/ btVector3 t(camera_motion.val[0][3], camera_motion.val[1][3], camera_motion.val[2][3]); //rotation /*double delta_yaw = 0; double delta_pitch = 0; double delta_roll = 0; delta_yaw = delta_yaw*M_PI/180.0; delta_pitch = delta_pitch*M_PI/180.0; delta_roll = delta_roll*M_PI/180.0; //btMatrix3x3 initialTrans; Eigen::Matrix4f initialTrans = Eigen::Matrix4f::Identity(); initialTrans(0,0) = cos(delta_pitch)*cos(delta_yaw); initialTrans(0,1) = -cos(delta_roll)*sin(delta_yaw) + sin(delta_roll)*sin(delta_pitch)*cos(delta_yaw); initialTrans(0,2) = sin(delta_roll)*sin(delta_yaw) + cos(delta_roll)*sin(delta_pitch)*cos(delta_yaw); initialTrans(1,0) = cos(delta_pitch)*sin(delta_yaw); initialTrans(1,1) = cos(delta_roll)*cos(delta_yaw) + sin(delta_roll)*sin(delta_pitch)*sin(delta_yaw); initialTrans(1,2) = -sin(delta_roll)*cos(delta_yaw) + cos(delta_roll)*sin(delta_pitch)*sin(delta_yaw); initialTrans(2,0) = -sin(delta_pitch); initialTrans(2,1) = sin(delta_roll)*cos(delta_pitch); initialTrans(2,2) = cos(delta_roll)*cos(delta_pitch); btMatrix3x3 rot_mat( initialTrans(0,0), initialTrans(0,1), initialTrans(0,2), initialTrans(1,0), initialTrans(1,1), initialTrans(1,2), initialTrans(2,0), initialTrans(2,1), initialTrans(2,2)); btVector3 t(0.0, 0.00, 0.01);*/ tf::Transform delta_transform(rot_mat, t); setPoseCovariance(STANDARD_POSE_COVARIANCE); setTwistCovariance(STANDARD_TWIST_COVARIANCE); integrateAndPublish(delta_transform, l_image_msg->header.stamp); if (point_cloud_pub_.getNumSubscribers() > 0) { computeAndPublishPointCloud(l_info_msg, l_image_msg, r_info_msg, matches, inlier_indices); } } else { setPoseCovariance(BAD_COVARIANCE); setTwistCovariance(BAD_COVARIANCE); tf::Transform delta_transform; delta_transform.setIdentity(); integrateAndPublish(delta_transform, l_image_msg->header.stamp); ROS_DEBUG("Call to VisualOdometryStereo::process() failed."); ROS_WARN_THROTTLE(1.0, "Visual Odometer got lost!"); got_lost_ = true; } } }
void do_work(const sensor_msgs::ImageConstPtr& msg, const std::string input_frame_from_msg) { // Transform the image. try { // Convert the image into something opencv can handle. cv::Mat in_image = cv_bridge::toCvShare(msg, msg->encoding)->image; if (msg->encoding != enc::MONO8) { ROS_ERROR("Hough Lines assume monochrome image for input"); return; } cv::Mat out_image; cvtColor(in_image, out_image, CV_GRAY2BGR); // Do the work ROS_INFO_STREAM("Hough Lines : rho:" << _rho << ", tehta:" << _theta << ", threshold:" << _threshold << ", minLineLength:" << _minLineLength << ", maxLineGap" << _maxLineGap); #if 1 std::vector<cv::Vec4i> lines; cv::HoughLinesP( in_image, lines, _rho, _theta*CV_PI/180 , _threshold, _minLineLength, _maxLineGap ); jsk_perception::LineArray out_lines; out_lines.header = msg->header; out_lines.lines.resize(lines.size()); for( size_t i = 0; i < lines.size(); i++ ) { out_lines.lines[i].x1 = lines[i][0]; out_lines.lines[i].y1 = lines[i][1]; out_lines.lines[i].x2 = lines[i][2]; out_lines.lines[i].y2 = lines[i][3]; cv::line( out_image, cv::Point(lines[i][0], lines[i][1]), cv::Point(lines[i][2], lines[i][3]), cv::Scalar(255,0,0), 3, 8 ); } #else std::vector<cv::Vec2f> lines; cv::HoughLines( in_image, lines, _rho, _theta, _threshold*CV_PI/180, _minLineLength, _maxLineGap ); jsk_perception::LineArray out_lines; out_lines.header = msg->header; out_lines.lines.resize(lines.size()); for( size_t i = 0; i < lines.size(); i++ ) { float rho = lines[i][0]; float theta = lines[i][1]; double a = cos(theta), b = sin(theta); double x0 = a*rho, y0 = b*rho; cv::Point pt1(cvRound(x0 + 1000*(-b)), cvRound(y0 + 1000*(a))); cv::Point pt2(cvRound(x0 - 1000*(-b)), cvRound(y0 - 1000*(a))); cv::line( out_image, pt1, pt2, cv::Scalar(0,0,255), 3, 8 ); } #endif // Publish the image. sensor_msgs::Image::Ptr out_img = cv_bridge::CvImage(msg->header, enc::RGB8, out_image).toImageMsg(); out_pub_.publish(out_lines); img_pub_.publish(out_img); } catch (cv::Exception &e) { ROS_ERROR("Image processing error: %s %s %s %i", e.err.c_str(), e.func.c_str(), e.file.c_str(), e.line); } }
void CBCGPTransferFunction::Calculate(CArray<double, double>& output) const { output.RemoveAll(); int size = (int)m_Points.GetSize(); if (size == 0) { return; } int count = (int)fabs((m_InputValueMax - m_InputValueMin) / m_InputValueStep) + 1; if (count == 1) { return; } output.SetSize(count); XPoint pt(m_Points[0]); if (m_InputValueMin < pt.m_Point) { for (int i = 0; i <= GetIndex(pt.m_Point); i++) { output[i] = pt.m_Value; } } if (size > 1) { pt = m_Points[size - 1]; } if (pt.m_Point < m_InputValueMax) { for (int i = GetIndex(pt.m_Point); i < count; i++) { output[i] = pt.m_Value; } } // linear size = (int)m_Points.GetSize (); if (size < 2) { return; } for(long k = size - 1; k >= 1; k--) { CArray<double, double> points; XPoint pt1(m_Points[k - 1]); XPoint pt2(m_Points[k]); int index1 = GetIndex(pt1.m_Point); int index2 = GetIndex(pt2.m_Point); double dY = (pt2.m_Value - pt1.m_Value) / (double)(index2 - index1); double value = pt1.m_Value; for(int i = index1; i <= index2; i++) { points.Add(bcg_clamp(value, m_OutputValueMin, m_OutputValueMax)); value += dY; } if(points.GetSize() <= 2) { continue; } int kInsert = index1; for(int kk = 0; kk <= points.GetSize() - 1; kk++) { output[kInsert++] = points[kk]; } } }