Exemplo n.º 1
0
static void
test_homo_move_ctor ()
{
    rw_info (0, __FILE__, __LINE__,
             "move constructor (homogenous tuples)");

    std::tuple<> et (std::tuple<> ()); _RWSTD_UNUSED (et);

    const int ci = std::rand ();

    std::tuple<int> it1 (ci);
    std::tuple<int> it2 (std::move (it1));
    test (__LINE__, it2, ci);

    std::tuple<const int> ct1 (ci);
    std::tuple<const int> ct2 = std::move (ct1);
    test (__LINE__, ct2, ci);

    std::tuple<std::tuple<int> > nt1 (it1);
    std::tuple<std::tuple<int> > nt2 = std::move (nt1);
    test (__LINE__, nt2, it1);

    std::tuple<long, const char*> pt1 (1234567890L, "string");
    std::tuple<long, const char*> pt2 (std::move (pt1));
    test (__LINE__, pt2, 1234567890L, (const char*) "string");

    const UserDefined ud (ci);
    std::tuple<UserDefined> ut1 (ud);
    UserDefined::reset ();
    std::tuple<UserDefined> ut2 (std::move (ut1));
    ++UserDefined::expect.move_ctor;
    test (__LINE__, ut2, ud);

    std::tuple<bool, char, int, double, void*, UserDefined>
        bt1 (true, 'a', ci, 3.14159, (void*) &ci, ud);
    ++UserDefined::expect.copy_ctor;
    std::tuple<bool, char, int, double, void*, UserDefined>
        bt2 (std::move (bt1));
    ++UserDefined::expect.move_ctor;
    test (__LINE__, bt2, true, 'a', ci, 3.14159, (void*) &ci, ud);
}
Exemplo n.º 2
0
	void mark(cv::Mat& img, PPoint2d &p, unsigned char l) {
		int r = 10;
  //draw_circle(img, p, r, l);
  //	draw_circle(img, p[0], p[1], 10, l);
  //	draw_line(img, p[0]-r/2, p[1]-r/2, p[0]+r/2, p[1]+r/2, l);
  //	draw_line(img, p[0]-r/2, p[1]+r/2, p[0]+r/2, p[1]-r/2, l);
		double x = p.x();
		double y = p.y();
		cv::Point2d pt(x,y);
		cv::Point2d pt1(x - r / 2, y - r / 2);
		cv::Point2d pt2(x + r / 2, y + r / 2);
		cv::Point2d pt3(x - r / 2, y + r / 2);
		cv::Point2d pt4(x + r / 2, y - r / 2);

		cout << p << endl;
		if (x >= 0 && y >= 0 && x <= img.rows && y <= img.cols) {
			cv::circle(img, pt, 10, l);
			cv::line(img, pt1, pt2, l);
			cv::line(img, pt3, pt4, l);
		}
	}
void displayopencv::DisplayBspline(const mathtools::application::Bspline<2> &bspline, cv::Mat &img, const mathtools::affine::Frame<2>::Ptr frame, const cv::Scalar &color, const unsigned int &nb_pts)
{
	double inf = bspline.getInfBound(),
		   sup = bspline.getSupBound();
	
	Eigen::Vector2d prev = mathtools::affine::Point<2>(bspline(inf)).getCoords(frame);

	for(unsigned int i=1;i<nb_pts;i++)
	{
		double t = inf + (double)i*(sup-inf)/(double)(nb_pts-1);

		Eigen::Vector2d curr = mathtools::affine::Point<2>(bspline(t)).getCoords(frame);

		cv::Point pt1(prev.x(), prev.y()),
				  pt2(curr.x(), curr.y());
		
		prev=curr;

		cv::line(img,pt1,pt2,color);
	}
}
Exemplo n.º 4
0
void check_are_mergable()
{
  Polycurve_conic_traits_2 traits;
  Polycurve_conic_traits_2::Construct_x_monotone_curve_2
    construct_x_monotone_curve_2 = traits.construct_x_monotone_curve_2_object();
  Polycurve_conic_traits_2::Are_mergeable_2  are_mergeable_2 =
    traits.are_mergeable_2_object();

   //create a curve
  Rat_point_2 ps1(1, 10);
  Rat_point_2 pmid1(5, 4);
  Rat_point_2 pt1(10, 1);
  Conic_curve_2 c1(ps1, pmid1, pt1);

  Rat_point_2 ps2(10, 1);
  Rat_point_2 pmid2(15, 14);
  Rat_point_2 pt2(20, 20);
  Conic_curve_2 c2(ps2, pmid2, pt2);

  Rat_point_2 ps3(Rational(1,4), 4);
  Rat_point_2 pmid3(Rational(3,2), 2);
  Rat_point_2 pt3(2, Rational(1,3));
  Conic_curve_2 c3(ps3, pmid3, pt3);

  //construct x-monotone curve(compatible with polyline class)
   Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc1 =
     construct_x_monotone_curve_2(c1);
   Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc2 =
     construct_x_monotone_curve_2(c2);
   Polycurve_conic_traits_2::X_monotone_curve_2 polyline_xmc3 =
     construct_x_monotone_curve_2(c3);

  bool result = are_mergeable_2(polyline_xmc1, polyline_xmc2);
  std::cout << "Are_mergable:: Mergable x-monotone polycurves are Computed as: "
            << ((result)? "Mergable" : "Not-Mergable") << std::endl;

  result = are_mergeable_2(polyline_xmc1, polyline_xmc3);
  std::cout << "Are_mergable:: Non-Mergable x-monotone polycurves are Computed as: "
            << ((result)? "Mergable" : "Not-Mergable") << std::endl;
}
Exemplo n.º 5
0
void FixtureGroup_Test::swap()
{
    FixtureGroup grp(m_doc);
    grp.setSize(QSize(4, 4));
    for (quint32 id = 0; id < 16; id++)
    {
        Fixture* fxi = new Fixture(m_doc);
        fxi->setChannels(1);
        m_doc->addFixture(fxi);
        grp.assignFixture(fxi->id());
    }
    QCOMPARE(grp.headList().size(), 16);

    QLCPoint pt1(0, 0);
    QLCPoint pt2(2, 1);
    QVERIFY(grp.headHash().contains(pt1) == true);
    QVERIFY(grp.headHash().contains(pt2) == true);
    QCOMPARE(grp.headHash()[pt1], GroupHead(0, 0));
    QCOMPARE(grp.headHash()[pt2], GroupHead(6, 0));

    // Switch places with two fixtures
    grp.swap(pt1, pt2);
    QVERIFY(grp.headHash().contains(pt1) == true);
    QVERIFY(grp.headHash().contains(pt2) == true);
    QCOMPARE(grp.headHash()[pt1], GroupHead(6, 0));
    QCOMPARE(grp.headHash()[pt2], GroupHead(0, 0));

    // Switch places with a fixture and an empty point
    pt2 = QLCPoint(500, 500);
    grp.swap(pt1, pt2);
    QVERIFY(grp.headHash().contains(pt1) == false);
    QVERIFY(grp.headHash().contains(pt2) == true);
    QCOMPARE(grp.headHash()[pt2], GroupHead(6, 0));

    // ...and back again
    grp.swap(pt1, pt2);
    QVERIFY(grp.headHash().contains(pt1) == true);
    QVERIFY(grp.headHash().contains(pt2) == false);
    QCOMPARE(grp.headHash()[pt1], GroupHead(6, 0));
}
Exemplo n.º 6
0
void test_wait_for_either_of_four_futures_4()
{
    boost::packaged_task<int> pt(make_int_slowly);
    boost::unique_future<int> f1(pt.get_future());
    boost::packaged_task<int> pt2(make_int_slowly);
    boost::unique_future<int> f2(pt2.get_future());
    boost::packaged_task<int> pt3(make_int_slowly);
    boost::unique_future<int> f3(pt3.get_future());
    boost::packaged_task<int> pt4(make_int_slowly);
    boost::unique_future<int> f4(pt4.get_future());
    
    boost::thread(::cast_to_rval(pt4));
    
    unsigned const future=boost::wait_for_any(f1,f2,f3,f4);
    
    BOOST_CHECK(future==3);
    BOOST_CHECK(!f1.is_ready());
    BOOST_CHECK(!f2.is_ready());
    BOOST_CHECK(!f3.is_ready());
    BOOST_CHECK(f4.is_ready());
    BOOST_CHECK(f4.get()==42);
}
Exemplo n.º 7
0
bool CCoordTransService::CoordTransDistanceEx (
	const DoublePair *pP1, const DoublePair *pP2, double &dX, double &dY)
{
	if (m_fIsInitialized) {
		try {
		CCSPoint pt1 (pP1 -> Width(), pP1 -> Height());
		CCSPoint pt2 (pP2 -> Width(), pP2 -> Height());
		W_DGMPoint outPt;

			THROW_FAILED_HRESULT(m_CTF -> DistancePoint(&pt1, &pt2, outPt.ppi()));
			THROW_FAILED_HRESULT(outPt -> get_X (&dX));
			THROW_FAILED_HRESULT(outPt -> get_Y (&dY));
			return true;

		} catch (_com_error &e) {
			ShowError (_COM_ERROR(e), IDS_CSSERRORCAPTION, g_cbCoordTransDistanceEx);
			return false;
		}
	}
	ShowError (TRIAS02_E_CSSNOTINITIALIZED, IDS_CSSERRORCAPTION, g_cbCoordTransDistanceEx);
	return false;
}
Exemplo n.º 8
0
// forward the message to the appropriate item
bool wxListBox::MSWOnDraw(WXDRAWITEMSTRUCT *item)
{
    // only owner-drawn control should receive this message
    wxCHECK( ((m_windowStyle & wxLB_OWNERDRAW) == wxLB_OWNERDRAW), false );

    DRAWITEMSTRUCT *pStruct = (DRAWITEMSTRUCT *)item;

    // the item may be -1 for an empty listbox
    if ( pStruct->itemID == (UINT)-1 )
        return false;

    wxListBoxItem *pItem = (wxListBoxItem *)m_aItems[pStruct->itemID];

    wxDCTemp dc((WXHDC)pStruct->hDC);
    wxPoint pt1(pStruct->rcItem.left, pStruct->rcItem.top);
    wxPoint pt2(pStruct->rcItem.right, pStruct->rcItem.bottom);
    wxRect rect(pt1, pt2);

    return pItem->OnDrawItem(dc, rect,
                             (wxOwnerDrawn::wxODAction)pStruct->itemAction,
                             (wxOwnerDrawn::wxODStatus)pStruct->itemState);
}
Exemplo n.º 9
0
void DrawResult(IplImage* image, windage::Matrix3 homography, CvScalar color = CV_RGB(255, 0, 0), int thickness = 1)
{
	windage::Vector3 pt1(0.0, 0.0, 1.0);
	windage::Vector3 pt2(TEMPLATE_WIDTH, 0.0, 1.0);
	windage::Vector3 pt3(TEMPLATE_WIDTH, TEMPLATE_HEIGHT, 1.0);
	windage::Vector3 pt4(0.0, TEMPLATE_HEIGHT, 1.0);

	windage::Vector3 outPoint1 = homography * pt1;
	windage::Vector3 outPoint2 = homography * pt2;
	windage::Vector3 outPoint3 = homography * pt3;
	windage::Vector3 outPoint4 = homography * pt4;

	outPoint1 /= outPoint1.z;
	outPoint2 /= outPoint2.z;
	outPoint3 /= outPoint3.z;
	outPoint4 /= outPoint4.z;

	cvLine(image, cvPoint((int)outPoint1.x, (int)outPoint1.y), cvPoint((int)outPoint2.x, (int)outPoint2.y), color, thickness);
	cvLine(image, cvPoint((int)outPoint2.x, (int)outPoint2.y), cvPoint((int)outPoint3.x, (int)outPoint3.y), color, thickness);		
	cvLine(image, cvPoint((int)outPoint3.x, (int)outPoint3.y), cvPoint((int)outPoint4.x, (int)outPoint4.y), color, thickness);
	cvLine(image, cvPoint((int)outPoint4.x, (int)outPoint4.y), cvPoint((int)outPoint1.x, (int)outPoint1.y), color, thickness);
}
void tst_QNetworkConfiguration::invalidPoint()
{
    QNetworkConfiguration pt;

    QVERIFY(pt.name().isEmpty());
    QVERIFY(!pt.isValid());
    QVERIFY(pt.type() == QNetworkConfiguration::Invalid);
    QVERIFY(!(pt.state() & QNetworkConfiguration::Defined));
    QVERIFY(!(pt.state() & QNetworkConfiguration::Discovered));
    QVERIFY(!(pt.state() & QNetworkConfiguration::Active));
    QVERIFY(!pt.isRoamingAvailable());

    QNetworkConfiguration pt2(pt);
    QVERIFY(pt2.name().isEmpty());
    QVERIFY(!pt2.isValid());
    QVERIFY(pt2.type() == QNetworkConfiguration::Invalid);
    QVERIFY(!(pt2.state() & QNetworkConfiguration::Defined));
    QVERIFY(!(pt2.state() & QNetworkConfiguration::Discovered));
    QVERIFY(!(pt2.state() & QNetworkConfiguration::Active));
    QVERIFY(!pt2.isRoamingAvailable());

}
Exemplo n.º 11
0
TEST(NumLib, SpatialFunctionInterpolationSurface)
{
	// create geometry
	GeoLib::Point pt1(0.0, 0.0, 0.0);
	GeoLib::Point pt2(10.0, 0.0, 0.0);
	GeoLib::Point pt3(10.0, 10.0, 0.0);
	GeoLib::Point pt4(0.0, 10.0, 0.0);
	std::vector<GeoLib::Point*> pnts = {&pt1, &pt2, &pt3, &pt4};
	GeoLib::Polyline ply0(pnts);
	ply0.addPoint(0);
	ply0.addPoint(1);
	ply0.addPoint(2);
	ply0.addPoint(3);
	ply0.addPoint(0);
	std::unique_ptr<GeoLib::Surface>  sfc1(GeoLib::Surface::createSurface(ply0));

	// define a function
	const std::vector<std::size_t> vec_point_ids = {{0, 1, 2, 3}};
	const std::vector<double> vec_point_values = {{0., 100., 100., 0.}};
	NumLib::LinearInterpolationOnSurface interpolate(*sfc1, vec_point_ids, vec_point_values, std::numeric_limits<double>::max());

	// normal
	for (unsigned k=0; k<10; k++) {
		ASSERT_DOUBLE_EQ(k*10., interpolate(GeoLib::Point(k,0,0)));
		ASSERT_DOUBLE_EQ(k*10., interpolate(GeoLib::Point(k,5,0)));
		ASSERT_DOUBLE_EQ(k*10., interpolate(GeoLib::Point(k,10,0)));
	}

	// failure
	// x, y
	ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(-1,-1,0)));
	ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(11,-1,0)));
	ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(11,11,0)));
	ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(-1,11,0)));
	// z
	ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(0,0,1)));
	ASSERT_DOUBLE_EQ(std::numeric_limits<double>::max(), interpolate(GeoLib::Point(0,0,-1)));
}
Exemplo n.º 12
0
// Move the image: call from OnMouseMove. Pt is in window client coordinates if window
// is non-NULL, or in screen coordinates if NULL.
bool wxGenericDragImage::Move(const wxPoint& pt)
{
    wxASSERT_MSG( (m_windowDC != NULL), wxT("No window DC in wxGenericDragImage::Move()") );

    wxPoint pt2(pt);
    if (m_fullScreen)
        pt2 = m_window->ClientToScreen(pt);

    // Erase at old position, then show at the current position
    wxPoint oldPos = m_position;

    bool eraseOldImage = (m_isDirty && m_isShown);

    if (m_isShown)
        RedrawImage(oldPos - m_offset, pt2 - m_offset, eraseOldImage, true);

    m_position = pt2;

    if (m_isShown)
        m_isDirty = true;

    return true;
}
Exemplo n.º 13
0
bool MgCmdDrawRect::touchEnded(const MgMotion* sender)
{
    Point2d pt1(m_startPt);
    Point2d pt2(snapPoint(sender));
    MgBaseRect* shape = (MgBaseRect*)dynshape()->shape();
    
    if (shape->getFlag(kMgSquare)) {
        float len = (float)mgMax(fabs(pt2.x - pt1.x), fabs(pt2.y - pt1.y));
        Box2d rect(m_startPt, 2.f * len, 0);
        pt1 = rect.leftTop();
        pt2 = rect.rightBottom();
    }
    shape->setRect(pt1, pt2);
    dynshape()->shape()->update();

    float minDist = mgDisplayMmToModel(5, sender);

    if (shape->getWidth() > minDist && shape->getHeight() > minDist) {
        addRectShape(sender);
    }

    return _touchEnded(sender);
}
Exemplo n.º 14
0
void plotLinesToPainter(QPainter& painter,
			const Numpy1DObj& x1, const Numpy1DObj& y1,
			const Numpy1DObj& x2, const Numpy1DObj& y2,
			const QRectF* clip, bool autoexpand)
{
  const int maxsize = min(x1.dim, x2.dim, y1.dim, y2.dim);

  // if autoexpand, expand rectangle by line width
  QRectF clipcopy;
  if ( clip != 0 && autoexpand )
    {
      const qreal lw = painter.pen().widthF();
      qreal x1, y1, x2, y2;
      clip->getCoords(&x1, &y1, &x2, &y2);
      clipcopy.setCoords(x1, y1, x2, y2);
      clipcopy.adjust(-lw, -lw, lw, lw);
    }

  if( maxsize != 0 )
    {
      QVector<QLineF> lines;
      for(int i = 0; i < maxsize; ++i)
	{
	  QPointF pt1(x1(i), y1(i));
	  QPointF pt2(x2(i), y2(i));
	  if( clip != 0 )
	    {
	      if( clipLine(clipcopy, pt1, pt2) )
		lines << QLineF(pt1, pt2);
	    }
	  else
	    lines << QLineF(pt1, pt2);
	}

      painter.drawLines(lines);
    }
}
Exemplo n.º 15
0
static void
test_lt ()
{
    rw_info (0, __FILE__, __LINE__, "operator<");

    std::tuple<> nt1, nt2;
    TEST (!(nt1 < nt1));
    TEST (!(nt1 < nt2));

    std::tuple<int> it1 (1), it2 (2);
    TEST (!(it1 < it1));
    TEST (it1 < it2);

    UserDefined ud1 (1), ud2 (2);
    std::tuple<UserDefined> ut1 (ud1), ut2 (ud2);
    TEST (!(ut1 < ut1));
    TEST (ut1 < ut2);

    std::tuple<long, const char*> pt1 (1234L, "string");
    TEST (!(pt1 < pt1));
    std::tuple<long, const char*> pt2 (1235L, "string");
    TEST (pt1 < pt2);
    std::tuple<long, const char*> pt3 (1234L, "strings");
    TEST (pt1 < pt3);

    std::tuple<bool, char, int, double, void*, UserDefined>
        bt1 (true, 'a', 255, 3.14159, &nt1, ud1),
        bt2 (true, 'a', 256, 3.14159, &nt1, ud1),
        bt3 (true, 'a', 255, 3.14159, &nt1, ud2);
    rw_assert (!(bt1 < bt1), __FILE__, __LINE__,
               "bt1 < bt1, got true, expected false");
    rw_assert (bt1 < bt2, __FILE__, __LINE__,
               "bt1 < bt2, got false, expected true");
    rw_assert (bt1 < bt3, __FILE__, __LINE__,
               "bt1 < bt3, got false, expected true");
}
Exemplo n.º 16
0
TEST_F(CreatePolygonTreesTest, P0AndP1AndP2)
{
	GeoLib::SimplePolygonTree *pt0(new GeoLib::SimplePolygonTree(_p0, nullptr));
	GeoLib::SimplePolygonTree *pt1(new GeoLib::SimplePolygonTree(_p1, nullptr));
	GeoLib::SimplePolygonTree *pt2(new GeoLib::SimplePolygonTree(_p2, nullptr));

	std::list<GeoLib::SimplePolygonTree*> pt_list;
	pt_list.push_back(pt0);
	pt_list.push_back(pt1);
	pt_list.push_back(pt2);
	ASSERT_EQ(3u, pt_list.size());

	ASSERT_FALSE(_p0->isPolylineInPolygon(*_p1));
	ASSERT_FALSE(_p0->isPolylineInPolygon(*_p2));
	ASSERT_FALSE(_p1->isPolylineInPolygon(*_p0));
	ASSERT_FALSE(_p1->isPolylineInPolygon(*_p2));
	ASSERT_TRUE(_p2->isPolylineInPolygon(*_p0));
	ASSERT_TRUE(_p2->isPolylineInPolygon(*_p1));

	createPolygonTrees(pt_list);
	ASSERT_EQ(1u, pt_list.size());
	ASSERT_EQ(2u, (*(pt_list.begin()))->getNChilds());
	std::for_each(pt_list.begin(), pt_list.end(), std::default_delete<GeoLib::SimplePolygonTree>());
}
Exemplo n.º 17
0
Cube::Cube(){
    geoType = CUBE_GEO;
    p = new Poly();
    Point pt1(0,0,0);
    Point pt2(0,0,10);
    Point pt3(10,0,10);
    Point pt4(10,0,0);
    Point pt5(10,10,0);
    Point pt6(10,10,10);
    Point pt7(0,10,10);
    Point pt8(0,10,0);

    p->addFace(pt1, pt2, pt7, pt8); // bottom
    p->addFace(pt3, pt4, pt5, pt6); // right
    p->addFace(pt1, pt2, pt3, pt4); // front 
    p->addFace(pt5, pt6, pt7, pt8); // top
    p->addFace(pt1, pt4, pt5, pt8); // left
    p->addFace(pt2, pt3, pt6, pt7); // back 

    p->computeNormals();
    this->addPoly(p); // this adds it to scene
    p->bound();
    this->bound();
}
void createBlock()
{
	AcDbBlockTableRecord* pBTR = new AcDbBlockTableRecord();
	pBTR->setName(DYNBLKSAMP_BLOCKNAME);
	AcDbSymbolTable* pBT = NULL;
	acdbHostApplicationServices()->workingDatabase()->getBlockTable(pBT, AcDb::kForWrite);
	pBT->add(pBTR);
	pBT->close();	
	
	
	AcGePoint3d pt1(0.0, 0.0, 0.0);
	AcGePoint3d pt2(2.0, 0.0, 0.0);
	AcDbLine* pLine = new AcDbLine(pt1, pt2);
	pBTR->appendAcDbEntity(pLine);
	pLine->close();

	pt1 = pt2;
	pt2.y = 2.0;
	pLine = new AcDbLine(pt1, pt2);
	pBTR->appendAcDbEntity(pLine);
	pLine->close();

	pt1 = pt2;
	pt2.x = 0.0;
	pLine = new AcDbLine(pt1, pt2);
	pBTR->appendAcDbEntity(pLine);
	pLine->close();

	pt1 = pt2;
	pt2.y = 0.0;
	pLine = new AcDbLine(pt1, pt2);
	pBTR->appendAcDbEntity(pLine);
	pLine->close();
	
	pBTR->close();
}
void CBCGPTransferFunction::Calculate(CArray<double, double>& output) const
{
	output.RemoveAll();

	int size = (int)m_Points.GetSize();
	if (size == 0)
	{
		return;
	}

	int count = (int)fabs((m_InputValueMax - m_InputValueMin) / m_InputValueStep) + 1;
	if (count == 1)
	{
		return;
	}

	output.SetSize(count);

	XPoint pt(m_Points[0]);
	if (m_InputValueMin < pt.m_Point)
	{
		for (int i = 0; i <= GetIndex(pt.m_Point); i++)
		{
			output[i] = pt.m_Value;
		}
	}

	if (size > 1)
	{
		pt = m_Points[size - 1];
	}

	if (pt.m_Point < m_InputValueMax)
	{
		for (int i = GetIndex(pt.m_Point); i < count; i++)
		{
			output[i] = pt.m_Value;
		}
	}

	// linear
	size = (int)m_Points.GetSize ();
	if (size < 2)
	{
		return;
	}

	for(long k = size - 1; k >= 1; k--)
	{
		CArray<double, double> points;

		XPoint pt1(m_Points[k - 1]);
		XPoint pt2(m_Points[k]);

		int index1 = GetIndex(pt1.m_Point);
		int index2 = GetIndex(pt2.m_Point);
		double dY = (pt2.m_Value - pt1.m_Value) / (double)(index2 - index1);
		double value = pt1.m_Value;
		for(int i = index1; i <= index2; i++)
		{
			points.Add(bcg_clamp(value, m_OutputValueMin, m_OutputValueMax));
			value += dY;
		}

		if(points.GetSize() <= 2)
		{
			continue;
		}

		int kInsert = index1;
		for(int kk = 0; kk <= points.GetSize() - 1; kk++)
		{
			output[kInsert++] = points[kk];
		}
	}
}
Exemplo n.º 20
0
void Engine::update_world(double start_time, double delta_time)
{
	frame_stop = start_time + delta_time;

	double start = stopwatch();
	
	bucVec keys;
	bucVec new_keys;
	bucVec obsolete_keys;
	

	//Update the hashboxes for all dynamic objects
	for(dyn_iterator dyn_it = wr->begin_dyn();dyn_it != wr->end_dyn(); ++dyn_it){
	
		reaper::object::phys::ObjectAccessor &phys_acc = dyn_it->get_physics();
		
		const double len = length(phys_acc.vel)*delta_time;

		const double ext1 = phys_acc.radius + len + 
			 phys_acc.max_acc * delta_time* delta_time / 2;
		
		const double ext2 = ext1 - 2* len;

		const Point &pos = dyn_it->get_pos(); 
		
		Point pt1( pos + norm(phys_acc.vel)*ext1 );
		Point pt2( pos - norm(phys_acc.vel)*ext2 );

		const Point p1(min(pt1.x,pt2.x),min(pt1.y,pt2.y), min(pt1.z,pt2.z) );
		const Point p2(max(pt1.x,pt2.x), max(pt1.y,pt2.y), max(pt1.z,pt2.z) );

		the_grid.calc_key(p1,p2,keys);		
		
		bucVec& current_k = old_keys[dyn_it->get_id()];
				
		for(int i = 0;i < keys.size(); ++i){
			bucVec::const_iterator p = 
				find(current_k.begin(), current_k.end(), keys[i] );
			
			if(p == current_k.end() )
				new_keys.push_back(keys[i]);
		}
		
		for(int i = 0;i < current_k.size(); ++i){
			bucVec::const_iterator p2 = 
				find(keys.begin(), keys.end(), current_k[i] );
			
			if(p2 == keys.end() )
				obsolete_keys.push_back(keys[i]);
		}

		for(int i = 0;i < new_keys.size();++i){
			collision_table.insert(dyn_it->get_id(),new_keys[i]);
			const set<int>& ids = collision_table.get_ids(new_keys[i]);
			for(std::set<int>::const_iterator j = ids.begin();j != ids.end(); ++j){
				int res = close_pairs.increase(dyn_it->get_id(),*j);
				if(res == 1){
					Pair* pr;
					SillyPtr si = wr->lookup_si(*j);
					StaticPtr st = wr->lookup_st(*j);
					DynamicPtr dyn = wr->lookup_dyn(*j);
					ShotPtr sh = wr->lookup_shot(*j);
					if( si.valid() ){
						pr = new SillyDynPair(si,*dyn_it);
					}
					else if( st.valid() )
						pr = new StaticDynPair(st,*dyn_it);
					else if( dyn.valid() )
						pr = new DynDynPair(dyn,*dyn_it);
					else if( sh.valid() )
						pr = new ShotDynPair(sh,*dyn_it);
					pr->calc_lower_bound();
					prio_queue.insert(pr);
				}
			}
		}
		
		
		
		for(int i = 0;i < obsolete_keys.size();++i){
			collision_table.remove(dyn_it->get_id(),obsolete_keys[i]);
			const set<int> ids = collision_table.get_ids(new_keys[i]);
			for(std::set<int>::const_iterator j = ids.begin();j != ids.end(); ++j){
				close_pairs.decrease(dyn_it->get_id(),*j);
			}
		}

		

		new_keys.clear();
		obsolete_keys.clear();

	}

	double mid = stopwatch();
	
	if(!prio_queue.empty()) {
		while( !prio_queue.empty() && prio_queue.top()->get_lower_bound() < frame_stop ) {
			
			//Now check if we want to simulate a collision between objects                                 
			Pair* pair = prio_queue.top();
			
			double dt = pair->get_lower_bound() - pair->get_sim_time();
			
			if(dt < 0)
				cout << "Neg dt!" << endl;

			//the object will be simulated this far when the loop has ended

			//prio_queue.pop();
			//Pair simulate simulates until lower_bound                                   
			pair->simulate( pair->get_lower_bound() );                                        
			
			if(pair->check_distance() < Collision_Distance ) {                                                
				if (pair->collide( *(pair->get_collision()) ) ) 
					prio_queue.update_if(tstId(pair->get_id1(), pair->get_id2() ));
			}
						
			if( to_insert(frame_stop,*pair) ) {
				pair->calc_lower_bound();
				prio_queue.update(pair);
				
			}
			else{
				prio_queue.pop();
				delete pair;
			}
	

		}

	}
		

	double mid2 = stopwatch();

	//Now simulate all objects until framestop
	//Simulate all aobjekt so that they have a simtime of end of fram
	
	
	for(dyn_iterator i(wr->begin_dyn()); i != wr->end_dyn(); ++i){
		double dt = frame_stop - i->get_sim_time();
		if(dt < 0 ) 
			cout << "Negative dt!" << dt <<endl;
		i->simulate(dt);
	}

	double end = stopwatch();

	cout << "Timing info Hash: " << mid - start << " treap: " << mid2 - mid << endl;

	/*
	for(st_iterator i(wr->begin_st()); i != wr->end_st(); ++i){
		double dt = frame_stop - i->get_sim_time();
		i->simulate(dt);
	}
	
	for(shot_iterator i(wr->begin_shot()); i != wr->end_shot(); ++i){
		double dt = frame_stop - i->get_sim_time();
		i->simulate(dt);
	}

	*/
	
	
}
void CAutomaticBreakingDeviceTABFallOffDetector::DetectError(vector<SErrorInfoArray>& errorInfoArray,
	vector<Mat>& resultImageArray, /*vector<PositionOfComponents>& positionOfComponents, */int statues /*= 0*/)
{
	//将车厢的方向统一
	flip(m_srcImageArray[2], m_srcImageArray[2], 1);
	flip(m_srcImageArray[3], m_srcImageArray[3], 1);
	flip(resultImageArray[m_interstedIndex[2]], resultImageArray[m_interstedIndex[2]], 1);
	flip(resultImageArray[m_interstedIndex[3]], resultImageArray[m_interstedIndex[3]], 1);



	int count = 0;								//找到的脱轨自动制动装置拉环的个数
	int flag[4] = {0,0,0,0};
	for (int i = 0; i <4; ++i)
	{
		Rect roiRect;
		roiRect.x = 0;
		roiRect.y = m_srcImageArray[i].rows/5;
		roiRect.width = m_srcImageArray[i].cols/4+60;
		roiRect.height = m_srcImageArray[i].rows/5*3;

		float maxMatchVal = -1.0;
		Point matchPoint;
		int templateIndex = 0;

		for (int j = 0; j != m_templateImageArray.size(); ++j)
		{
			Mat compareResult;																//匹配结果矩阵
			//定义查找移动杠杆的ROI
			Mat searchTruckLiveLever_ROI;

			if (!IsValidRectInMat(m_srcImageArray[i], roiRect))
			{
				continue;
			}

			searchTruckLiveLever_ROI = m_srcImageArray[i](roiRect);
			compareResult.create(searchTruckLiveLever_ROI.rows - m_templateImageArray[j].rows + 1, 
				searchTruckLiveLever_ROI.cols - m_templateImageArray[j].cols + 1, CV_32FC1);

			matchTemplate(searchTruckLiveLever_ROI, m_templateImageArray[j], compareResult, CV_TM_CCOEFF_NORMED);	//模板匹配

			double minVal,maxVal;															//矩阵中最小值,最大值
			Point minLoc;																	//最小值的坐标
			Point matchLoc;																	//最匹配值的坐标
			minMaxLoc(compareResult, &minVal, &maxVal, &minLoc, &matchLoc, cv::Mat());		//寻找最匹配的点
			if (maxVal > maxMatchVal)
			{
				maxMatchVal = maxVal;
				matchPoint.x = matchLoc.x + roiRect.x;
				matchPoint.y =roiRect.y+matchLoc.y;
				templateIndex = j;
			}
		}

		//如果最高匹配值大于0.6则视为匹配
		if (maxMatchVal >= 0.45)
		{
			++count;
			flag[i] = 1;
			if (statues == 1)
			{
				Mat ROI= m_srcImageArray[i](cv::Rect(matchPoint.x, matchPoint.y, m_templateImageArray[templateIndex].cols, m_templateImageArray[templateIndex].rows));	//抠出夹扣螺栓大概位置
				Mat ROICanny= m_srcImageArray[i](cv::Rect(matchPoint.x, matchPoint.y, m_templateImageArray[templateIndex].cols, m_templateImageArray[templateIndex].rows)).clone();
				Canny(ROICanny, ROICanny,20, 120);		
				vector<Vec4i> lines;		
				HoughLinesP(ROICanny,lines,1,CV_PI/180,10,30, 150);  
				std::vector<cv::Vec4i>::const_iterator it= lines.begin();
				if (lines.size()==0)
				{
				}
				else
				{
					double tanb,tanc,tanResult,tanResult2,tanResult3;
					tanc=0;
					tanb=0;

					while (it!=lines.end()) {       
						cv::Point pt1((*it)[0]+matchPoint.x,(*it)[1]+matchPoint.y);        
						cv::Point pt2((*it)[2]+matchPoint.x,(*it)[3]+matchPoint.y);
						double m_i=(*it)[0]-(*it)[2];
						double m_i2=abs(m_i);
						if(m_i2>tanc){
							tanc=m_i2;
							tanb=abs((*it)[1]-(*it)[3]);
						}     
						++it;   
					}
					tanResult=tanb/tanc;
					tanResult2 = atan(tanResult);
					tanResult3=tanResult2*180.0/3.141592654;
					if(tanResult3<=30){
					}
					else
					{
						SErrorInfoArray sei;
						sei.errorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_FALLOFF;
						sei.realErrorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_FALLOFF;
						sei.errorImageFile = m_srcImageFileArray[i];
						switch (i)
						{
						case 0:
							sei.leftPos = 0;
							sei.rightPos = 300;
							sei.topPos = 500;
							sei.bottomPos = 800;
							sei.confidence = 61;
							sei.errorImageIndex = m_interstedIndex[0];
							rectangle(resultImageArray[m_interstedIndex[0]], Point(0,500),Point(300,800),Scalar(0,0,255),2);
							break;
						case 1:
							sei.leftPos = 1100;
							sei.rightPos = 1399;
							sei.topPos = 500;
							sei.bottomPos = 900;
							sei.confidence = 67;
							sei.errorImageIndex = m_interstedIndex[1];
							rectangle(resultImageArray[m_interstedIndex[0]], Point(1100,500),Point(1399,900),Scalar(0,0,255),2);
							break;
						case 2:
							sei.leftPos = 1100;
							sei.rightPos = 1399;
							sei.topPos = 500;
							sei.bottomPos = 900;
							sei.confidence = 62;
							sei.errorImageIndex = m_interstedIndex[2];
							rectangle(resultImageArray[m_interstedIndex[0]], Point(0,500),Point(300,800),Scalar(0,0,255),2);
							break;
						case 3:
							sei.leftPos = 0;
							sei.rightPos = 300;
							sei.topPos = 500;
							sei.bottomPos = 800;
							sei.confidence = 63;
							sei.errorImageIndex = m_interstedIndex[3];
							rectangle(resultImageArray[m_interstedIndex[0]], Point(1100,500),Point(1399,900),Scalar(0,0,255),2);
							break;
						}
						errorInfoArray.push_back(sei);
						continue;
					}
				}
			}
		}
	}

	if (count < 2 && statues == 0)
	{
		if (flag[0] == 1 || flag[2] == 1)
		{
			if (flag[0] == 0)
			{

				SErrorInfoArray sei;
				sei.errorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.realErrorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.errorImageFile = m_srcImageFileArray[0];
				sei.leftPos = 0;
				sei.rightPos = 300;
				sei.topPos = 500;
				sei.bottomPos = 800;
				sei.confidence = 61;
				sei.errorImageIndex = m_interstedIndex[0];
				rectangle(resultImageArray[m_interstedIndex[0]], Point(0,500),Point(300,800),Scalar(0,0,255),2);
				sei.errorImageIndex = m_interstedIndex[0];
				errorInfoArray.push_back(sei);
			}
			if (flag[2] == 0)
			{

				SErrorInfoArray sei;
				sei.errorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.realErrorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.errorImageFile = m_srcImageFileArray[1];
				sei.leftPos = 1100;
				sei.rightPos = 1399;
				sei.topPos = 500;
				sei.bottomPos = 900;
				sei.errorImageIndex = m_interstedIndex[2];
				rectangle(resultImageArray[m_interstedIndex[0]], Point(0,500),Point(300,800),Scalar(0,0,255),2);
				sei.confidence = 67;
				sei.errorImageIndex = m_interstedIndex[1];
				errorInfoArray.push_back(sei);
			}
		}
		else
		{
			if (flag[1] == 0)
			{
				SErrorInfoArray sei;
				sei.errorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.realErrorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.errorImageFile = m_srcImageFileArray[2];
				sei.leftPos = 1100;
				sei.rightPos = 1399;
				sei.topPos = 500;
				sei.bottomPos = 900;
				sei.errorImageIndex = m_interstedIndex[1];
				rectangle(resultImageArray[m_interstedIndex[0]], Point(1100,500),Point(1399,900),Scalar(0,0,255),2);
				sei.confidence = 62;
				sei.errorImageIndex = m_interstedIndex[2];
				errorInfoArray.push_back(sei);
			}
			if (flag[3] == 0)
			{
				SErrorInfoArray sei;
				sei.errorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.realErrorMask |= DEMO_ERROR_DERAILMENT_AUTOMATIC_BREAKING_DEVICE_TAB_LOST;
				sei.errorImageFile = m_srcImageFileArray[3];
				sei.leftPos = 0;
				sei.rightPos = 300;
				sei.topPos = 500;
				sei.bottomPos = 800;
				sei.errorImageIndex = m_interstedIndex[3];
				rectangle(resultImageArray[m_interstedIndex[0]], Point(1100,500),Point(1399,900),Scalar(0,0,255),2);
				sei.confidence = 63;
				sei.errorImageIndex = m_interstedIndex[3];
				errorInfoArray.push_back(sei);
			}
		}
	}

	//将方向调回原来的方向
	flip(m_srcImageArray[2], m_srcImageArray[2], 1);
	flip(m_srcImageArray[3], m_srcImageArray[3], 1);
	flip(resultImageArray[m_interstedIndex[2]], resultImageArray[m_interstedIndex[2]], 1);
	flip(resultImageArray[m_interstedIndex[3]], resultImageArray[m_interstedIndex[3]], 1);
}
Exemplo n.º 22
0
	void CDuiHeaderCtrl::OnMouseMove( UINT nFlags,CPoint pt )
	{
		if(m_bDragging || nFlags&MK_LBUTTON)
		{
			if(!m_bDragging)
			{
				m_bDragging=TRUE;
				if(IsItemHover(m_dwHitTest) && m_bItemSwapEnable)
				{
					m_dwDragTo=m_dwHitTest;
					CRect rcItem=GetItemRect(LOWORD(m_dwHitTest));
					DrawDraggingState(m_dwDragTo);
					m_hDragImg=CreateDragImage(LOWORD(m_dwHitTest));
					CPoint pt=m_ptClick-rcItem.TopLeft();
					CDragWnd::BeginDrag(m_hDragImg,pt,0,128,LWA_ALPHA|LWA_COLORKEY);
				}
			}
			if(IsItemHover(m_dwHitTest))
			{
				if(m_bItemSwapEnable)
				{
					DWORD dwDragTo=HitTest(pt);
					CPoint pt2(pt.x,m_ptClick.y);
					ClientToScreen(GetContainer()->GetHostHwnd(),&pt2);
					if(IsItemHover(dwDragTo) && m_dwDragTo!=dwDragTo)
					{
						m_dwDragTo=dwDragTo;
						DUITRACE(_T("\n!!! dragto %d"),LOWORD(dwDragTo));
						DrawDraggingState(dwDragTo);
					}
					CDragWnd::DragMove(pt2);
				}
			}else if(m_dwHitTest!=-1)
			{//调节宽度
				int cxNew=m_nAdjItemOldWidth+pt.x-m_ptClick.x;
				if(cxNew<0) cxNew=0;
				m_arrItems[LOWORD(m_dwHitTest)].cx=cxNew;
				NotifyInvalidate();
				GetContainer()->DuiUpdateWindow();//立即更新窗口
				//发出调节宽度消息
				DUINMHDSIZECHANGING	nm;
				nm.hdr.hDuiWnd=m_hDuiWnd;
				nm.hdr.code=DUINM_HDSIZECHANGING;
				nm.hdr.idFrom=GetCmdID();
				nm.hdr.pszNameFrom=GetName();
				nm.nWidth=cxNew;
				DuiNotify((LPDUINMHDR)&nm);
			}
		}else
		{
			DWORD dwHitTest=HitTest(pt);
			if(dwHitTest!=m_dwHitTest)
			{
				if(m_bSortHeader)
				{
					if(IsItemHover(m_dwHitTest))
					{
						WORD iHover=LOWORD(m_dwHitTest);
						m_arrItems[iHover].state=0;
						RedrawItem(iHover);
					}
					if(IsItemHover(dwHitTest))
					{
						WORD iHover=LOWORD(dwHitTest);
						m_arrItems[iHover].state=1;//hover
						RedrawItem(iHover);
					}
				}
				m_dwHitTest=dwHitTest;
			}
		}
		
	}
Exemplo n.º 23
0
int main( int nArgc, char ** papszArgv )
{
    // register drivers
    GDALAllRegister();

    if( nArgc < 2 )
        return EXIT_FAILURE;

    double dfaCornersX[5] = {0};
    double dfaCornersY[5] = {0};
    CPLString sFileName;

    // parse input values
    for( int iArg = 1; iArg < nArgc; iArg++ )
    {
        if( EQUAL(papszArgv[iArg],"-nw"))
        {
            CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(2);
            const char* pszCoord = papszArgv[++iArg];
            dfaCornersY[1] = CPLAtofM(pszCoord);
            pszCoord = papszArgv[++iArg];
            dfaCornersX[1] = CPLAtofM(pszCoord);
        }
        else if( EQUAL(papszArgv[iArg],"-ne"))
        {
            CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(2);
            const char* pszCoord = papszArgv[++iArg];
            dfaCornersY[2] = CPLAtofM(pszCoord);
            pszCoord = papszArgv[++iArg];
            dfaCornersX[2] = CPLAtofM(pszCoord);
        }
        else if( EQUAL(papszArgv[iArg],"-se"))
        {
            CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(2);
            const char* pszCoord = papszArgv[++iArg];
            dfaCornersY[3] = CPLAtofM(pszCoord);
            pszCoord = papszArgv[++iArg];
            dfaCornersX[3] = CPLAtofM(pszCoord);
        }
        else if( EQUAL(papszArgv[iArg],"-sw"))
        {
            CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(2);
            const char* pszCoord = papszArgv[++iArg];
            dfaCornersY[4] = CPLAtofM(pszCoord);
            pszCoord = papszArgv[++iArg];
            dfaCornersX[4] = CPLAtofM(pszCoord);
        }
        else if( EQUAL(papszArgv[iArg],"-c"))
        {
            CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(2);
            const char* pszCoord = papszArgv[++iArg];
            dfaCornersY[0] = CPLAtofM(pszCoord);
            pszCoord = papszArgv[++iArg];
            dfaCornersX[0] = CPLAtofM(pszCoord);
        }
        else if(sFileName.empty())
            sFileName = papszArgv[iArg];
    }

    OGRSpatialReference oOGRSpatialReference(SRS_WKT_WGS84);
    int nZoneNo = ceil( (180.0 + dfaCornersX[0]) / 6.0 );
    OGRSpatialReference oDstSpatialReference(SRS_WKT_WGS84);
    oDstSpatialReference.SetUTM(nZoneNo, dfaCornersY[0] > 0);

    // transform coordinates from WGS84 to UTM
    OGRCoordinateTransformation *poCT = OGRCreateCoordinateTransformation( &oOGRSpatialReference, &oDstSpatialReference);
    if(!poCT)
    {
        Usage("get coordinate transformation failed");
        return EXIT_FAILURE;
    }

    int nResult = poCT->Transform(5, dfaCornersX, dfaCornersY, NULL);
    if(!nResult)
    {
        Usage("transformation failed");
        return EXIT_FAILURE;
    }

    // open input dataset
    GDALDataset *poSrcDataset = (GDALDataset *) GDALOpen( sFileName, GA_ReadOnly ); // GA_Update
    char* pszSpaRefDef = NULL;
    if( oDstSpatialReference.exportToWkt(&pszSpaRefDef) != OGRERR_NONE)
    {
        CPLFree( pszSpaRefDef );
        GDALClose( (GDALDatasetH) poSrcDataset );
        return EXIT_FAILURE;
    }

    // search point along image
    // add GCP to opened raster
    OGRPoint ptCenter(dfaCornersX[0], dfaCornersY[0]);
    OGRPoint pt1(dfaCornersX[1], dfaCornersY[1]); // NW Cormer
    OGRPoint pt2(dfaCornersX[2], dfaCornersY[2]); // NE Corner
    OGRPoint pt3(dfaCornersX[3], dfaCornersY[3]); // SE Corner
    OGRPoint pt4(dfaCornersX[4], dfaCornersY[4]); // SW Corner
    int nGCPCount = 0;
    OGREnvelope DstEnv;
    GDAL_GCP *paGSPs = PrepareGCP(sFileName, &pt1, &pt2, &pt3, &pt4, &ptCenter, oDstSpatialReference, poSrcDataset->GetRasterXSize(), poSrcDataset->GetRasterYSize(), nGCPCount, DstEnv);

    if(poSrcDataset->SetGCPs(nGCPCount, paGSPs, pszSpaRefDef) != CE_None)
    {
        Usage( "Set GCPs failed" );
        return EXIT_FAILURE;
    }

    // create warper
    char **papszTO = NULL;
    papszTO = CSLSetNameValue( papszTO, "METHOD", "GCP_TPS" );
    papszTO = CSLSetNameValue( papszTO, "NUM_THREADS", "4" );
    papszTO = CSLSetNameValue( papszTO, "DST_SRS", pszSpaRefDef );
    papszTO = CSLSetNameValue( papszTO, "SRC_SRS", pszSpaRefDef );
    papszTO = CSLSetNameValue( papszTO, "INSERT_CENTER_LONG", "FALSE" );

    GDALDriver *poOutputDriver = (GDALDriver *) GDALGetDriverByName( "GTiff" );
    CPLSetConfigOption( "CHECK_WITH_INVERT_PROJ", "TRUE" );
    void* hTransformArg = GDALCreateGenImgProjTransformer2( poSrcDataset, NULL, papszTO );
    GDALTransformerInfo* psInfo = (GDALTransformerInfo*)hTransformArg;

    double adfThisGeoTransform[6];
    double adfExtent[4];
    int nThisPixels, nThisLines;

    // suggest the raster output size
    if( GDALSuggestedWarpOutput2( poSrcDataset, psInfo->pfnTransform, hTransformArg, adfThisGeoTransform, &nThisPixels, &nThisLines, adfExtent, 0 ) != CE_None )
    {
        Usage( "Suggest Output failed" );
        return EXIT_FAILURE;
    }

    adfThisGeoTransform[0] = DstEnv.MinX;
    adfThisGeoTransform[3] = DstEnv.MaxY;

    int nPixels = (int) ((DstEnv.MaxX - DstEnv.MinX) / adfThisGeoTransform[1] + 0.5);
    int nLines = (int) ((DstEnv.MaxY - DstEnv.MinY) / -adfThisGeoTransform[5] + 0.5);

    GDALSetGenImgProjTransformerDstGeoTransform( hTransformArg, adfThisGeoTransform);

    // create new raster
    CPLString sOutputRasterPath = CPLResetExtension(sFileName, "tif");
    GDALDataset  *poDstDataset = poOutputDriver->Create(sOutputRasterPath, nPixels, nLines, poSrcDataset->GetRasterCount(), GDT_Byte, NULL );
    if( NULL == poDstDataset )
    {
        Usage( "Create Output failed" );
        return EXIT_FAILURE;
    }
    poDstDataset->SetProjection( pszSpaRefDef );
    poDstDataset->SetGeoTransform( adfThisGeoTransform );

#ifdef APRROX_MAXERROR
    hTransformArg = GDALCreateApproxTransformer( GDALGenImgProjTransform,  hTransformArg, APRROX_MAXERROR);
    GDALTransformerFunc pfnTransformer = GDALApproxTransform;
    GDALApproxTransformerOwnsSubtransformer(hTransformArg, TRUE);
#else
    GDALTransformerFunc pfnTransformer = GDALGenImgProjTransform;
#endif // APRROX_MAXERROR

    // warp
    GDALWarpOptions *psWO = GDALCreateWarpOptions();

    psWO->eWorkingDataType = GDT_Byte;
    psWO->eResampleAlg = GRA_NearestNeighbour;

    psWO->hSrcDS = poSrcDataset;
    psWO->hDstDS = poDstDataset;

    psWO->pfnTransformer = pfnTransformer;
    psWO->pTransformerArg = hTransformArg;

    psWO->pfnProgress = GDALTermProgress;
    psWO->nBandCount = poSrcDataset->GetRasterCount();

    psWO->panSrcBands = (int *) CPLMalloc(psWO->nBandCount*sizeof(int));
    psWO->panDstBands = (int *) CPLMalloc(psWO->nBandCount*sizeof(int));

    for(int i = 0; i < psWO->nBandCount; ++i )
    {
        psWO->panSrcBands[i] = i+1;
        psWO->panDstBands[i] = i+1;
    }

    GDALWarpOperation oWO;
    if( oWO.Initialize( psWO ) == CE_None )
    {
#ifdef MULTI
        if( oWO.ChunkAndWarpMulti( 0, 0, poDstDataset->GetRasterXSize(), poDstDataset->GetRasterYSize() ) != CE_None)
#else //MULTI
        if( oWO.ChunkAndWarpImage( 0, 0, poDstDataset->GetRasterXSize(), poDstDataset->GetRasterYSize() ) != CE_None)
#endif //MULTI
        {
            const char* err = CPLGetLastErrorMsg();
            Usage( CPLSPrintf("Warp failed.%s", err) );
            return EXIT_FAILURE;
        }
    }

    // cleanup
    GDALDestroyWarpOptions( psWO );
    CSLDestroy( papszTO );

    CPLFree( pszSpaRefDef );
    GDALClose( (GDALDatasetH) poSrcDataset );
    GDALClose( (GDALDatasetH) poDstDataset );

    GDALDestroyDriverManager();

    return EXIT_SUCCESS;
}
Exemplo n.º 24
0
    void do_work(const sensor_msgs::ImageConstPtr& msg, const std::string input_frame_from_msg)
    {
        // Transform the image.
        try
            {
                // Convert the image into something opencv can handle.
                cv::Mat in_image = cv_bridge::toCvShare(msg, msg->encoding)->image;

                if (msg->encoding != enc::MONO8) {
                    ROS_ERROR("Hough Lines assume monochrome image for input");
                    return;
                }

                cv::Mat out_image;
                cvtColor(in_image, out_image, CV_GRAY2BGR);

                // Do the work
                ROS_INFO_STREAM("Hough Lines : rho:" << _rho << ", tehta:" << _theta << ", threshold:" <<  _threshold << ", minLineLength:" <<  _minLineLength << ", maxLineGap" <<  _maxLineGap);
#if 1
                std::vector<cv::Vec4i> lines;
                cv::HoughLinesP( in_image, lines, _rho, _theta*CV_PI/180 , _threshold, _minLineLength, _maxLineGap );
                jsk_perception::LineArray out_lines;
                out_lines.header = msg->header;
                out_lines.lines.resize(lines.size());
                for( size_t i = 0; i < lines.size(); i++ )
                    {
                        out_lines.lines[i].x1 = lines[i][0];
                        out_lines.lines[i].y1 = lines[i][1];
                        out_lines.lines[i].x2 = lines[i][2];
                        out_lines.lines[i].y2 = lines[i][3];
                        cv::line( out_image, cv::Point(lines[i][0], lines[i][1]),
                                  cv::Point(lines[i][2], lines[i][3]), cv::Scalar(255,0,0), 3, 8 );
                    }
#else
                std::vector<cv::Vec2f> lines;
                cv::HoughLines( in_image, lines, _rho, _theta, _threshold*CV_PI/180, _minLineLength, _maxLineGap );
                jsk_perception::LineArray out_lines;
                out_lines.header = msg->header;
                out_lines.lines.resize(lines.size());
                for( size_t i = 0; i < lines.size(); i++ )
                    {
                        float rho = lines[i][0];
                        float theta = lines[i][1];
                        double a = cos(theta), b = sin(theta);
                        double x0 = a*rho, y0 = b*rho;
                        cv::Point pt1(cvRound(x0 + 1000*(-b)),
                                      cvRound(y0 + 1000*(a)));
                        cv::Point pt2(cvRound(x0 - 1000*(-b)),
                                      cvRound(y0 - 1000*(a)));
                        cv::line( out_image, pt1, pt2, cv::Scalar(0,0,255), 3, 8 );
                    }
#endif

                // Publish the image.
                sensor_msgs::Image::Ptr out_img = cv_bridge::CvImage(msg->header, enc::RGB8, out_image).toImageMsg();
                out_pub_.publish(out_lines);
                img_pub_.publish(out_img);
            }
        catch (cv::Exception &e)
            {
                ROS_ERROR("Image processing error: %s %s %s %i", e.err.c_str(), e.func.c_str(), e.file.c_str(), e.line);
            }
    }
Exemplo n.º 25
0
/**
 * Overrides drawing of subentities. This is only ever called for solid fills.
 */
void RS_Hatch::draw(RS_Painter* painter, RS_GraphicView* view, double& /*patternOffset*/) {

    if (!data.solid) {
		for(auto se: entities){

            view->drawEntity(painter,se);
        }
        return;
    }

    //area of solid fill. Use polygon approximation, except trivial cases
    QPainterPath path;
    QList<QPolygon> paClosed;
    QPolygon pa;
//    QPolygon jp;   // jump points

    // loops:
    if (needOptimization==true) {
		for(auto l: entities){

            if (l->rtti()==RS2::EntityContainer) {
                RS_EntityContainer* loop = (RS_EntityContainer*)l;

                loop->optimizeContours();
            }
        }
        needOptimization = false;
    }

    // loops:
	for(auto l: entities){
        l->setLayer(getLayer());

        if (l->rtti()==RS2::EntityContainer) {
            RS_EntityContainer* loop = (RS_EntityContainer*)l;

            // edges:
			for(auto e: *loop){

                e->setLayer(getLayer());
                switch (e->rtti()) {
                case RS2::EntityLine: {
                    QPoint pt1(RS_Math::round(view->toGuiX(e->getStartpoint().x)),
                               RS_Math::round(view->toGuiY(e->getStartpoint().y)));
                    QPoint pt2(RS_Math::round(view->toGuiX(e->getEndpoint().x)),
                               RS_Math::round(view->toGuiY(e->getEndpoint().y)));

//                    if (! (pa.size()>0 && (pa.last() - pt1).manhattanLength()<=2)) {
//                        jp<<pt1;
//                    }
                    if(pa.size() && (pa.last()-pt1).manhattanLength()>=1)
                        pa<<pt1;
                    pa<<pt2;
                }
                    break;

                case RS2::EntityArc: {
//                    QPoint pt1(RS_Math::round(view->toGuiX(e->getStartpoint().x)),
//                               RS_Math::round(view->toGuiY(e->getStartpoint().y)));
//                    if (! (pa.size()>0 && (pa.last() - pt1).manhattanLength()<=2)) {
//                        jp<<pt1;
//                    }

                    QPolygon pa2;
                    RS_Arc* arc=static_cast<RS_Arc*>(e);

                    painter->createArc(pa2, view->toGui(arc->getCenter()),
                                       view->toGuiDX(arc->getRadius())
                                       ,arc->getAngle1(),arc->getAngle2(),arc->isReversed());
                    if(pa.size() &&pa2.size()&&(pa.last()-pa2.first()).manhattanLength()<1)
                        pa2.remove(0,1);
                    pa<<pa2;

                }
                    break;

                case RS2::EntityCircle: {
                    RS_Circle* circle = static_cast<RS_Circle*>(e);
//                    QPoint pt1(RS_Math::round(view->toGuiX(circle->getCenter().x+circle->getRadius())),
//                               RS_Math::round(view->toGuiY(circle->getCenter().y)));
//                    if (! (pa.size()>0 && (pa.last() - pt1).manhattanLength()<=2)) {
//                        jp<<pt1;
//                    }

                    RS_Vector c=view->toGui(circle->getCenter());
                    double r=view->toGuiDX(circle->getRadius());
#if QT_VERSION >= 0x040400
                    path.addEllipse(QPoint(c.x,c.y),r,r);
#else
                    path.addEllipse(c.x - r, c.y + r, 2.*r, 2.*r);
//                    QPolygon pa2;
//                    painter->createArc(pa2, view->toGui(circle->getCenter()),
//                                       view->toGuiDX(circle->getRadius()),
//                                       0.0,
//                                       2*M_PI,
//                                       false);
//                    pa<<pa2;
#endif
                }
                    break;
                case RS2::EntityEllipse:
                if(static_cast<RS_Ellipse*>(e)->isArc()) {
                    QPolygon pa2;
                    auto ellipse=static_cast<RS_Ellipse*>(e);

                    painter->createEllipse(pa2,
                                           view->toGui(ellipse->getCenter()),
                                           view->toGuiDX(ellipse->getMajorRadius()),
                                           view->toGuiDX(ellipse->getMinorRadius()),
                                           ellipse->getAngle()
                                           ,ellipse->getAngle1(),ellipse->getAngle2(),ellipse->isReversed()
                                           );
//                    qDebug()<<"ellipse: "<<ellipse->getCenter().x<<","<<ellipse->getCenter().y;
//                    qDebug()<<"ellipse: pa2.size()="<<pa2.size();
//                    qDebug()<<"ellipse: pa2="<<pa2;
                    if(pa.size() && pa2.size()&&(pa.last()-pa2.first()).manhattanLength()<1)
                        pa2.remove(0,1);
                    pa<<pa2;
                }else{
                    QPolygon pa2;
                    auto ellipse=static_cast<RS_Ellipse*>(e);
                    painter->createEllipse(pa2,
                                           view->toGui(ellipse->getCenter()),
                                           view->toGuiDX(ellipse->getMajorRadius()),
                                           view->toGuiDX(ellipse->getMinorRadius()),
                                           ellipse->getAngle(),
                                           ellipse->getAngle1(), ellipse->getAngle2(),
                                           ellipse->isReversed()
                                           );
                    path.addPolygon(pa2);
                }
                    break;
                default:
                    break;
                }
//                qDebug()<<"pa="<<pa;
                if( pa.size()>2 && pa.first() == pa.last()) {
                    paClosed<<pa;
                    pa.clear();
                }

            }

        }
    }
    if(pa.size()>2){
        pa<<pa.first();
        paClosed<<pa;
    }

    for(auto& p:paClosed){
        path.addPolygon(p);
    }

    //bug#474, restore brush after solid fill
    const QBrush brush(painter->brush());
    const RS_Pen pen=painter->getPen();
    painter->setBrush(pen.getColor());
    painter->disablePen();
    painter->drawPath(path);
    painter->setBrush(brush);
    painter->setPen(pen);


}
Exemplo n.º 26
0
void VideoPipelineStats::ProcessFrame(FrameSetPtr input, list<FrameSetPtr>* output) {
  const int bins = sinks_.size();
  const int border = 40;
  const float scale = (options_.frame_height - border) /
    (options_.max_queue_height * 1.3);

  // Create a new frameset and video-frame.
  shared_ptr<VideoFrame> curr_frame(new VideoFrame(options_.frame_width,
                                                   options_.frame_height,
                                                   3,
                                                   frame_width_step_));

  cv::Mat image;
  curr_frame->MatView(&image);
  image.setTo(180);

  // Draw histogram.
  const int spacing = image.cols / (bins + 2);
  for (int i = 0; i < bins; ++i) {
    // Locations.
    int val = sinks_[i].first->GetQueueSize();
    int col = (i + 1) * spacing;
    int row = options_.frame_height - border - (val * scale);
    cv::Point pt1(col - spacing / 3, row);
    cv::Point pt2(col + spacing / 3, image.rows - border);
    // Bar plot.
    cv::rectangle(image, pt1, pt2, CV_RGB(0, 0, 200), -2);   // Neg. last arg for filled.

    // Value text.
    cv::Point txt_pt(col - spacing / 3, options_.frame_height - border / 3 - 10);
    cv::putText(image,
                base::StringPrintf("%02d", val),
                txt_pt,
                cv::FONT_HERSHEY_SIMPLEX,
                0.5,
                CV_RGB(0, 0, 0),
                2);

    // Name text.
    txt_pt.y += 15;
    cv::putText(image,
                sinks_[i].second,
                txt_pt,
                cv::FONT_HERSHEY_SIMPLEX,
                0.4,
                CV_RGB(0, 0, 0),
                1);

    // Fps text.
    txt_pt.y = border / 3;
    cv::putText(image,
                base::StringPrintf("%3.1f", sinks_[i].first->MinTreeRate()),
                txt_pt,
                cv::FONT_HERSHEY_SIMPLEX,
                0.4,
                CV_RGB(0, 0, 0),
                1);

  }

  // Print running time.
  boost::posix_time::ptime curr_time = boost::posix_time::microsec_clock::local_time();
  float secs_passed = boost::posix_time::time_period(
      start_time_, curr_time).length().total_milliseconds() * 1.e-3f;
  cv::putText(image,
              base::StringPrintf("up: %5.1f", secs_passed),
              cv::Point(options_.frame_width - 75, border / 3),
              cv::FONT_HERSHEY_SIMPLEX,
              0.4,
              CV_RGB(0, 0, 0),
              1);

  // Pass to output.
  input->push_back(curr_frame);
  output->push_back(input);
}
Exemplo n.º 27
0
// Adds sub-pixel resolution EdgeOffsets for the outline if the supplied
// pix is 8-bit. Does nothing otherwise.
// Operation: Consider the following near-horizontal line:
// _________
//          |________
//                   |________
// At *every* position along this line, the gradient direction will be close
// to vertical. Extrapoaltion/interpolation of the position of the threshold
// that was used to binarize the image gives a more precise vertical position
// for each horizontal step, and the conflict in step direction and gradient
// direction can be used to ignore the vertical steps.
void C_OUTLINE::ComputeEdgeOffsets(int threshold, Pix* pix) {
  if (pixGetDepth(pix) != 8) return;
  const l_uint32* data = pixGetData(pix);
  int wpl = pixGetWpl(pix);
  int width = pixGetWidth(pix);
  int height = pixGetHeight(pix);
  bool negative = flag(COUT_INVERSE);
  delete [] offsets;
  offsets = new EdgeOffset[stepcount];
  ICOORD pos = start;
  ICOORD prev_gradient;
  ComputeGradient(data, wpl, pos.x(), height - pos.y(), width, height,
                  &prev_gradient);
  for (int s = 0; s < stepcount; ++s) {
    ICOORD step_vec = step(s);
    TPOINT pt1(pos);
    pos += step_vec;
    TPOINT pt2(pos);
    ICOORD next_gradient;
    ComputeGradient(data, wpl, pos.x(), height - pos.y(), width, height,
                    &next_gradient);
    // Use the sum of the prev and next as the working gradient.
    ICOORD gradient = prev_gradient + next_gradient;
    // best_diff will be manipulated to be always positive.
    int best_diff = 0;
    // offset will be the extrapolation of the location of the greyscale
    // threshold from the edge with the largest difference, relative to the
    // location of the binary edge.
    int offset = 0;
    if (pt1.y == pt2.y && abs(gradient.y()) * 2 >= abs(gradient.x())) {
      // Horizontal step. diff_sign == 1 indicates black above.
      int diff_sign = (pt1.x > pt2.x) == negative ? 1 : -1;
      int x = MIN(pt1.x, pt2.x);
      int y = height - pt1.y;
      int best_sum = 0;
      int best_y = y;
      EvaluateVerticalDiff(data, wpl, diff_sign, x, y, height,
                           &best_diff, &best_sum, &best_y);
      // Find the strongest edge.
      int test_y = y;
      do {
        ++test_y;
      } while (EvaluateVerticalDiff(data, wpl, diff_sign, x, test_y, height,
                                    &best_diff, &best_sum, &best_y));
      test_y = y;
      do {
        --test_y;
      } while (EvaluateVerticalDiff(data, wpl, diff_sign, x, test_y, height,
                                    &best_diff, &best_sum, &best_y));
      offset = diff_sign * (best_sum / 2 - threshold) +
          (y - best_y) * best_diff;
    } else if (pt1.x == pt2.x && abs(gradient.x()) * 2 >= abs(gradient.y())) {
      // Vertical step. diff_sign == 1 indicates black on the left.
      int diff_sign = (pt1.y > pt2.y) == negative ? 1 : -1;
      int x = pt1.x;
      int y = height - MAX(pt1.y, pt2.y);
      const l_uint32* line = pixGetData(pix) + y * wpl;
      int best_sum = 0;
      int best_x = x;
      EvaluateHorizontalDiff(line, diff_sign, x, width,
                             &best_diff, &best_sum, &best_x);
      // Find the strongest edge.
      int test_x = x;
      do {
        ++test_x;
      } while (EvaluateHorizontalDiff(line, diff_sign, test_x, width,
                                      &best_diff, &best_sum, &best_x));
      test_x = x;
      do {
        --test_x;
      } while (EvaluateHorizontalDiff(line, diff_sign, test_x, width,
                                      &best_diff, &best_sum, &best_x));
      offset = diff_sign * (threshold - best_sum / 2) +
          (best_x - x) * best_diff;
    }
    offsets[s].offset_numerator =
        static_cast<inT8>(ClipToRange(offset, -MAX_INT8, MAX_INT8));
    offsets[s].pixel_diff = static_cast<uinT8>(ClipToRange(best_diff, 0 ,
                                                           MAX_UINT8));
    if (negative) gradient = -gradient;
    // Compute gradient angle quantized to 256 directions, rotated by 64 (pi/2)
    // to convert from gradient direction to edge direction.
    offsets[s].direction =
        Modulo(FCOORD::binary_angle_plus_pi(gradient.angle()) + 64, 256);
    prev_gradient = next_gradient;
  }
}
Exemplo n.º 28
0
void BaseVH::displayContour3D( std::vector< std::vector< cv::Point2f > > contours )
{

  vtkSmartPointer< vtkPolyData > edgePolyData =  vtkSmartPointer< vtkPolyData >::New();
  vtkSmartPointer<vtkUnsignedCharArray> colors = vtkSmartPointer<vtkUnsignedCharArray>::New();    
    
  colors->SetName("colors");
  colors->SetNumberOfComponents(3);
  
  int numEdges = 0 ;
  
  for( int ee1 = 0; ee1 < contours.size(); ee1++ )
  {
    numEdges += contours[ ee1 ].size();
  }
  
  vtkSmartPointer< vtkPoints > points = vtkSmartPointer< vtkPoints >::New();
  
  int numPoints = 2 * numEdges;
  
  points->Allocate( numPoints );
    
  int numCells = numEdges ;
  
  edgePolyData->Allocate( numCells );
  
  vtkSmartPointer< vtkIdList > cell = vtkSmartPointer< vtkIdList >::New();
  
  std::vector< std::vector< cv::Vec3f > > colorVecs;
  
  colorVecs.resize( contours.size() );

  bool alternate = false;
  
  for( int ee1 = 0; ee1 < contours.size(); ee1++ )
    for( int ee2 = 0; ee2 < contours[ ee1 ].size() ; ee2++ ) 
  {
	  if( ee2 == 150  )
	  {
		  colorVecs[ ee1 ].push_back( cv::Vec3f( 0 , 255 , 255 ) );
		  continue;
	  }
// 
// 	  if( ee2 == 60 || ee2 == 61  )
// 	  {
// 		  colorVecs[ ee1 ].push_back( cv::Vec3f( 255 , 255 , 255 ) );
// 		  continue;
// 	  }

	  if( alternate )
	  {
        colorVecs[ ee1 ].push_back( cv::Vec3f( 0 , 0 , 255 ) );

		alternate = false;
	  }
	  else
	  {
		colorVecs[ ee1 ].push_back( cv::Vec3f( 255 , 0 , 0 ) );

		alternate = true;
	  }
  }
  
  
  for( int ee1 = 0; ee1 < contours.size(); ee1++ )
    for( int ee2 = 0; ee2 < contours[ ee1 ].size() ; ee2++ ) 
  {
    
    cv::Point2f &pt1f = contours[ ee1 ][ ee2 ];
    cv::Point2f &pt2f = contours[ ee1 ][ ( ee2 + 1 ) % contours[ ee1 ].size() ];
    
    Eigen::Vector3d  pt1( pt1f.x , pt1f.y , 0 ) , pt2( pt2f.x , pt2f.y , 0 );

    //if( ee2 == 150 )
    // std::cout<< pt1.transpose() <<" -- "<< pt2.transpose() << std::endl;
    
    int id1 = points->InsertNextPoint( pt1.data() );
    int id2 = points->InsertNextPoint( pt2.data() );
    
    cell->Reset();
    
    cell->InsertNextId( id1 );
    cell->InsertNextId( id2 );
    
    edgePolyData->InsertNextCell( VTK_LINE , cell );
    
    const unsigned char _color[] = { (unsigned char) colorVecs[ ee1 ][ ee2 ][ 0 ] ,
                                     (unsigned char) colorVecs[ ee1 ][ ee2 ][ 1 ] ,
                                     (unsigned char) colorVecs[ ee1 ][ ee2 ][ 2 ]};
  
    colors->InsertNextTupleValue(_color);
  }
  
  
  edgePolyData->SetPoints( points );
  
  edgePolyData->GetCellData()->SetScalars( colors );

  tr::Display3DRoutines::displayPolyData( edgePolyData );

}
Exemplo n.º 29
0
  void imageCallback(
      const sensor_msgs::ImageConstPtr& l_image_msg,
      const sensor_msgs::ImageConstPtr& r_image_msg,
      const sensor_msgs::CameraInfoConstPtr& l_info_msg,
      const sensor_msgs::CameraInfoConstPtr& r_info_msg)
  {
 
    bool first_run = false;
    // create odometer if not exists
    if (!visual_odometer_)
    {
      first_run = true;
#if(DEBUG)
      cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr;
      l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8);
      r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8);
      last_l_image_ = l_cv_ptr->image;
      last_r_image_ = r_cv_ptr->image;
#endif
      initOdometer(l_info_msg, r_info_msg);
    }

    // convert images if necessary
    uint8_t *l_image_data, *r_image_data;
    int l_step, r_step;
    cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr;
    l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8);
    l_image_data = l_cv_ptr->image.data;
    l_step = l_cv_ptr->image.step[0];
    r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8);
    r_image_data = r_cv_ptr->image.data;
    r_step = r_cv_ptr->image.step[0];

    ROS_ASSERT(l_step == r_step);
    ROS_ASSERT(l_image_msg->width == r_image_msg->width);
    ROS_ASSERT(l_image_msg->height == r_image_msg->height);

    int32_t dims[] = {l_image_msg->width, l_image_msg->height, l_step};
    // on first run or when odometer got lost, only feed the odometer with 
    // images without retrieving data
    if (first_run || got_lost_)
    {
      visual_odometer_->process(l_image_data, r_image_data, dims);
      got_lost_ = false;
    }
    else
    {
      bool success = visual_odometer_->process(
          l_image_data, r_image_data, dims, last_motion_small_);
      if (success)
      {
        Matrix motion = Matrix::inv(visual_odometer_->getMotion());
        ROS_DEBUG("Found %i matches with %i inliers.", 
                  visual_odometer_->getNumberOfMatches(),
                  visual_odometer_->getNumberOfInliers());
        ROS_DEBUG_STREAM("libviso2 returned the following motion:\n" << motion);
        Matrix camera_motion;
        // if image was replaced due to small motion we have to subtract the 
        // last motion to get the increment
        if (last_motion_small_)
        {
          camera_motion = Matrix::inv(reference_motion_) * motion;
        }
        else
        {
          // image was not replaced, report full motion from odometer
          camera_motion = motion;
        }
        reference_motion_ = motion; // store last motion as reference
        std::cout<< camera_motion << "\n" <<std::endl;
#if (USE_MOVEMENT_CONSTRAIN)
        double deltaRoll = atan2(camera_motion.val[2][1], camera_motion.val[2][2]);
        double deltaPitch = asin(-camera_motion.val[2][0]);
        double deltaYaw = atan2(camera_motion.val[1][0], camera_motion.val[0][0]);
        double tanRoll = camera_motion.val[2][1] / camera_motion.val[2][2];
        double tanPitch = tan(deltaPitch);
        printf("deltaroll is %lf\n", deltaRoll);
        printf("deltaPitch is %lf\n", deltaPitch);
        printf("deltaYaw is %lf\n", deltaYaw);
        double deltaX = camera_motion.val[0][3];
        double deltaY = camera_motion.val[1][3];
        double deltaZ = camera_motion.val[2][3];
        printf("dx %lf, dy %lf, dz %lf, tanRoll %lf tanPitch %lf\n",deltaX, deltaY, deltaZ, tanRoll, tanPitch);
        if(deltaY > 0 && deltaY > tanRoll * deltaZ)
        {
           camera_motion.val[1][3] = tanRoll * deltaZ;
          //printf("dy %lf deltaY, dynew %lf\n", deltaY,camera_motion.val[2][3]);
        }
        else if(deltaY < 0 && deltaY < -tanRoll * deltaZ)
        {
           camera_motion.val[1][3] = -tanRoll * deltaZ;
          //printf("dy %lf deltaY, dynew %lf\n", deltaY,camera_motion.val[2][3]);
        }

        /*if(deltaX > 0 && deltaX > tanPitch * deltaZ)
        {
           camera_motion.val[0][3] = tanPitch * deltaZ;
          printf("dx %lf, dxnew %lf\n", deltaX,camera_motion.val[1][3]);
        }
        else if(deltaX < 0 && deltaX < -tanPitch * deltaZ)
        {
           camera_motion.val[0][3] = -tanPitch * deltaZ;
          printf("dx %lf, dxnew %lf\n", deltaX,camera_motion.val[1][3]);
        }*/
        /*
        if(deltaPitch > 0)
        {
          deltaPitch = deltaPitch+fabs(deltaRoll)+fabs(deltaYaw);
        }
        else
        {
          deltaPitch = deltaPitch-fabs(deltaRoll)-fabs(deltaYaw);
        }*/
        deltaPitch = deltaPitch+deltaYaw;
#endif
        // calculate current feature flow
        std::vector<Matcher::p_match> matches = visual_odometer_->getMatches();
        std::vector<int> inlier_indices = visual_odometer_->getInlierIndices();
#if(DEBUG)
        cv::Mat img1 = l_cv_ptr->image;
        cv::Mat img2 = r_cv_ptr->image;
        cv::Size size(last_l_image_.cols, last_l_image_.rows+last_r_image_.rows);
        cv::Mat outImg(size,CV_MAKETYPE(img1.depth(), 3));
        cv::Mat outImg1 = outImg( cv::Rect(0, 0, last_l_image_.cols, last_l_image_.rows) );
        cv::Mat outImg2 = outImg( cv::Rect(0, last_l_image_.rows, img1.cols, img1.rows) );
        
        if( last_l_image_.type() == CV_8U )
          cvtColor( last_l_image_, outImg1, CV_GRAY2BGR );
        else
          last_l_image_.copyTo( outImg1 );
        
        if( img1.type() == CV_8U )
          cvtColor( img1, outImg2, CV_GRAY2BGR );
        else
          img1.copyTo( outImg2 );
        for (size_t i = 0; i < matches.size(); ++i)
        {
          cv::Point pt1(matches[i].u1p,matches[i].v1p);
          cv::Point pt2(matches[i].u1c,matches[i].v1c + last_l_image_.rows);
          if(pt1.y > 239)
            cv::line(outImg, pt1, pt2, cv::Scalar(255,0,0));
          //else
            //cv::line(outImg, pt1, pt2, cv::Scalar(0,255,0));
        }
        cv::imshow("matching image", outImg);
    cv::waitKey(10);
        



        last_l_image_ = img1;
        last_r_image_ = img2;
#endif
        double feature_flow = computeFeatureFlow(matches);
        last_motion_small_ = (feature_flow < motion_threshold_);
        ROS_DEBUG_STREAM("Feature flow is " << feature_flow 
            << ", marking last motion as " 
            << (last_motion_small_ ? "small." : "normal."));

        btMatrix3x3 rot_mat(
          cos(deltaPitch), 0, sin(deltaPitch),
          0,               1,           0, 
          -sin(deltaPitch), 0, cos(deltaPitch));
        /*btMatrix3x3 rot_mat(
          camera_motion.val[0][0], camera_motion.val[0][1], camera_motion.val[0][2],
          camera_motion.val[1][0], camera_motion.val[1][1], camera_motion.val[1][2],
          camera_motion.val[2][0], camera_motion.val[2][1], camera_motion.val[2][2]);*/
        btVector3 t(camera_motion.val[0][3], camera_motion.val[1][3], camera_motion.val[2][3]);
   //rotation
   /*double delta_yaw = 0;
   double delta_pitch = 0;
   double delta_roll = 0;
   delta_yaw = delta_yaw*M_PI/180.0;
   delta_pitch = delta_pitch*M_PI/180.0;
   delta_roll = delta_roll*M_PI/180.0;
   //btMatrix3x3 initialTrans;
    Eigen::Matrix4f initialTrans = Eigen::Matrix4f::Identity();
   initialTrans(0,0) = cos(delta_pitch)*cos(delta_yaw); 
   initialTrans(0,1) = -cos(delta_roll)*sin(delta_yaw) + sin(delta_roll)*sin(delta_pitch)*cos(delta_yaw);
   initialTrans(0,2) = sin(delta_roll)*sin(delta_yaw) + cos(delta_roll)*sin(delta_pitch)*cos(delta_yaw);
   initialTrans(1,0) = cos(delta_pitch)*sin(delta_yaw);
   initialTrans(1,1) = cos(delta_roll)*cos(delta_yaw) + sin(delta_roll)*sin(delta_pitch)*sin(delta_yaw);
   initialTrans(1,2) = -sin(delta_roll)*cos(delta_yaw) + cos(delta_roll)*sin(delta_pitch)*sin(delta_yaw);
   initialTrans(2,0) = -sin(delta_pitch);
   initialTrans(2,1) = sin(delta_roll)*cos(delta_pitch);
   initialTrans(2,2) = cos(delta_roll)*cos(delta_pitch);
        btMatrix3x3 rot_mat(
          initialTrans(0,0), initialTrans(0,1), initialTrans(0,2),
          initialTrans(1,0), initialTrans(1,1), initialTrans(1,2),
          initialTrans(2,0), initialTrans(2,1), initialTrans(2,2));
        btVector3 t(0.0, 0.00, 0.01);*/
        tf::Transform delta_transform(rot_mat, t);

        setPoseCovariance(STANDARD_POSE_COVARIANCE);
        setTwistCovariance(STANDARD_TWIST_COVARIANCE);

        integrateAndPublish(delta_transform, l_image_msg->header.stamp);

        if (point_cloud_pub_.getNumSubscribers() > 0)
        {
          computeAndPublishPointCloud(l_info_msg, l_image_msg, r_info_msg, matches, inlier_indices);
        }
      }
      else
      {
        setPoseCovariance(BAD_COVARIANCE);
        setTwistCovariance(BAD_COVARIANCE);
        tf::Transform delta_transform;
        delta_transform.setIdentity();
        integrateAndPublish(delta_transform, l_image_msg->header.stamp);

        ROS_DEBUG("Call to VisualOdometryStereo::process() failed.");
        ROS_WARN_THROTTLE(1.0, "Visual Odometer got lost!");
        got_lost_ = true;
      }
    }
  }
int mitkSymmetricForcesDemonsRegistrationTest(int /*argc*/, char* /*argv*/[])
{
  //Create Image out of nowhere
  mitk::Image::Pointer image;
  mitk::PixelType pt(typeid(int));
  unsigned int dim[]={100,100,20};

  std::cout << "Creating image: ";
  image = mitk::Image::New();
  //image->DebugOn();
  image->Initialize(mitk::PixelType(typeid(int)), 3, dim);
  int *p = (int*)image->GetData();

  int size = dim[0]*dim[1]*dim[2];
  int i;
  for(i=0; i<size; ++i, ++p)
    *p=i;
  std::cout<<"[PASSED]"<<std::endl;

  //Create second Image out of nowhere
  mitk::Image::Pointer image2;
  mitk::PixelType pt2(typeid(int));
  unsigned int dim2[]={100,100,20};

  std::cout << "Creating image: ";
  image2 = mitk::Image::New();
  //image->DebugOn();
  image2->Initialize(mitk::PixelType(typeid(int)), 3, dim2);
  int *p2 = (int*)image2->GetData();

  int size2 = dim2[0]*dim2[1]*dim2[2];
  int i2;
  for(i2=0; i2<size2; ++i2, ++p2)
    *p2=i2;
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Constructor: ";
  mitk::SymmetricForcesDemonsRegistration::Pointer symmetricForcesDemonsRegistration = mitk::SymmetricForcesDemonsRegistration::New(); 
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set Reference Image: ";
  symmetricForcesDemonsRegistration->SetReferenceImage(image);
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set Moving Image: ";
  symmetricForcesDemonsRegistration->SetInput(image2);
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set number of iterations: ";
  symmetricForcesDemonsRegistration->SetNumberOfIterations(5);
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set standard deviation: ";
  symmetricForcesDemonsRegistration->SetStandardDeviation(1.0);
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set save deformation field: ";
  symmetricForcesDemonsRegistration->SetSaveDeformationField(false);
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set deformation field file name: ";
  symmetricForcesDemonsRegistration->SetDeformationFieldFileName("TestField.mhd");
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set save result image: ";
  symmetricForcesDemonsRegistration->SetSaveResult(false);
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Set result image file name: ";
  symmetricForcesDemonsRegistration->SetResultFileName("TestResult.mhd");
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Perform registration: ";
  symmetricForcesDemonsRegistration->Update();
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Get the result image: ";
  mitk::Image::Pointer resultImage = symmetricForcesDemonsRegistration->GetOutput();
  std::cout<<"[PASSED]"<<std::endl;

  std::cout << "Get the deformation field: ";
  itk::Image<class itk::Vector<float, 3>,3>::Pointer deformationField = symmetricForcesDemonsRegistration->GetDeformationField();
  std::cout<<"[PASSED]"<<std::endl;

  return EXIT_SUCCESS;
}