Example #1
0
int 
main (int argc, char ** argv)
{
  std::string arg;
  if (argc > 1)
    arg = std::string (argv[1]);
  
  if (arg == "--help" || arg == "-h")
  {
    usage (argv);
    return 1;
  }

  pcl::OpenNIGrabber grabber ("");
  if (grabber.providesCallback<pcl::OpenNIGrabber::sig_cb_openni_point_cloud_rgb> ())
  {
    PCL_INFO ("PointXYZRGB mode enabled.\n");
    OpenNIIntegralImageNormalEstimation<pcl::PointXYZRGB> v ("");
    v.run ();
  }
  else
  {
    PCL_INFO ("PointXYZ mode enabled.\n");
    OpenNIIntegralImageNormalEstimation<pcl::PointXYZ> v ("");
    v.run ();
  }

  return (0);
}
Example #2
0
int 
main (int argc, char ** argv)
{
  if (pcl::console::find_argument (argc, argv, "-h") != -1)
    usage (argv);

  float min_v = 0.0f, max_v = 5.0f;
  pcl::console::parse_2x_arguments (argc, argv, "-minmax", min_v, max_v);
  std::string field_name ("z");
  pcl::console::parse_argument (argc, argv, "-field", field_name);
  PCL_INFO ("Filtering data on %s between %f -> %f.\n", field_name.c_str (), min_v, max_v);
  float leaf_x = 0.01f, leaf_y = 0.01f, leaf_z = 0.01f;
  pcl::console::parse_3x_arguments (argc, argv, "-leaf", leaf_x, leaf_y, leaf_z);
  PCL_INFO ("Using %f, %f, %f as a leaf size for VoxelGrid.\n", leaf_x, leaf_y, leaf_z);

  pcl::OpenNIGrabber grabber ("");
  if (grabber.providesCallback<pcl::OpenNIGrabber::sig_cb_openni_point_cloud_rgba> ())
  {
    OpenNIVoxelGrid<pcl::PointXYZRGBA> v ("", field_name, min_v, max_v, leaf_x, leaf_y, leaf_z);
    v.run ();
  }
  else
  {
    OpenNIVoxelGrid<pcl::PointXYZ> v ("", field_name, min_v, max_v, leaf_x, leaf_y, leaf_z);
    v.run ();
  }

  return (0);
}
int
main (int argc, char ** argv)
{
    if (argc < 2)
    {
        usage (argv);
        return 1;
    }

    std::string arg (argv[1]);

    if (arg == "--help" || arg == "-h")
    {
        usage (argv);
        return 1;
    }

    double threshold = 0.05;
    pcl::console::parse_argument (argc, argv, "-thresh", threshold);

    pcl::OpenNIGrabber grabber (arg);
    if (grabber.providesCallback<pcl::OpenNIGrabber::sig_cb_openni_point_cloud_rgba> ())
    {
        OpenNIPlanarSegmentation<pcl::PointXYZRGBA> v (arg, threshold);
        v.run ();
    }
    else
    {
        OpenNIPlanarSegmentation<pcl::PointXYZ> v (arg, threshold);
        v.run ();
    }

    return (0);
}
int 
main (int argc, char ** argv)
{
  if (argc < 2)
  {
    usage (argv);
    return 1;
  }

  std::string arg (argv[1]);
  
  if (arg == "--help" || arg == "-h")
  {
    usage (argv);
    return 1;
  }

  double leaf_res = 0.05;
  pcl::console::parse_argument (argc, argv, "-leaf", leaf_res);
  PCL_INFO ("Using %f as a leaf size for UniformSampling.\n", leaf_res);

  pcl::OpenNIGrabber grabber (arg);
  OpenNIUniformSampling v (arg, leaf_res);
  v.run ();

  return (0);
}
Example #5
0
MetadataLookupList MetadataDownload::runGrabber(QString cmd, QStringList args,
                                                MetadataLookup* lookup,
                                                bool passseas)
{
    MythSystem grabber(cmd, args, kMSNoRunShell | kMSStdOut | kMSBuffered);
    MetadataLookupList list;

    LOG(VB_GENERAL, LOG_INFO, QString("Running Grabber: %1 %2")
        .arg(cmd).arg(args.join(" ")));

    grabber.Run();
    grabber.Wait();
    QByteArray result = grabber.ReadAll();
    if (!result.isEmpty())
    {
        QDomDocument doc;
        doc.setContent(result, true);
        QDomElement root = doc.documentElement();
        QDomElement item = root.firstChildElement("item");

        while (!item.isNull())
        {
            MetadataLookup *tmp = ParseMetadataItem(item, lookup,
                passseas);
            list.append(tmp);
            item = item.nextSiblingElement("item");
        }
    }
    return list;
}
Example #6
0
MetaGrabberScript::MetaGrabberScript(const QString &path) :
    m_type(kGrabberInvalid), m_version(0.0), m_valid(false)
{
    if (path.isEmpty())
        return;
    m_fullcommand = path;
    if (path[0] != '/')
        m_fullcommand.prepend(QString("%1metadata").arg(GetShareDir()));

    MythSystemLegacy grabber(path, QStringList() << "-v",
                             kMSRunShell | kMSStdOut);
    grabber.Run();
    if (grabber.Wait() != GENERIC_EXIT_OK)
        // script failed
        return;

    QByteArray result = grabber.ReadAll();
    if (result.isEmpty())
        // no output
        return;

    QDomDocument doc;
    doc.setContent(result, true);
    QDomElement root = doc.documentElement();
    if (root.isNull())
        // no valid XML
        return;

    ParseGrabberVersion(root);
    if (m_name.isEmpty())
        // XML not processed correctly
        return;

    m_valid = true;
}
Example #7
0
void MainWindowActions::shootOneLayout(int index)
{
    QWidget* widget = mServices->view()->getLayoutWidget(index);
	ViewCollectionWidget* vcWidget = dynamic_cast<ViewCollectionWidget*>(widget);

	ViewCollectionImageWriter grabber(vcWidget);
    QImage pm = ViewCollectionImageWriter::vtkImageData2QImage(grabber.grab());
	mScreenShotWriter->save(pm, QString("_layout%1").arg(index));
}
QImage ScreenVideoProvider::grabSecondaryLayout()
{
    ViewCollectionWidget* widget = this->getSecondaryLayoutWidget();
    if (!widget)
        return QImage();

    ViewCollectionImageWriter grabber(widget);
    return ViewCollectionImageWriter::vtkImageData2QImage(grabber.grab());
}
void WiimoteSettingsPage::AddAction() {
  emit SetWiimotedevInterfaceActived(false);
  WiimoteShortcutGrabber grabber(0, ui_->wiimotedev_device->value(), this);
  connect(&grabber, SIGNAL(AddShortcut(quint64, quint32)), this,
          SLOT(AddShortcut(quint64, quint32)), Qt::QueuedConnection);
  grabber.exec();
  emit SetWiimotedevInterfaceActived(true);

  ui_->list->sortItems(1, Qt::AscendingOrder);
}
Example #10
0
void MSOptionPopupMenu::buttonPress(const XEvent *event_)
{
  if (grabber()==this)
   {
     lastShowTime(0);
   }
  else
   {
     lastShowTime(event_->xbutton.time);
   }
  MSMenu::buttonPress(event_);
}
int
main (int argc, char ** argv)
{
  if (argc < 2)
  {
    usage (argv);
    return 1;
  }

  std::string arg (argv[1]);

  if (arg == "--help" || arg == "-h")
  {
    usage (argv);
    return 1;
  }

  unsigned char red = 0, green = 0, blue = 0;
  int rr, gg, bb;
  unsigned char radius = 442; // all colors!
  int rad;

  if (pcl::console::parse_3x_arguments (argc, argv, "-rgb", rr, gg, bb, true) != -1 )
  {
    cout << "-rgb present" << endl;
    int idx = pcl::console::parse_argument (argc, argv, "-radius", rad);
    if (idx != -1)
    {
      if (rad > 0)
        radius = rad;
    }
    if (rr >= 0 && rr < 256)
      red = (unsigned char) rr;
    if (gg >= 0 && gg < 256)
      green = (unsigned char) gg;
    if (bb >= 0 && bb < 256)
      blue = (unsigned char) bb;
  }
  
  pcl::OpenNIGrabber grabber (arg);
  
  if (grabber.providesCallback<pcl::OpenNIGrabber::sig_cb_openni_point_cloud_rgba> ())
  {
    OpenNIPassthrough<pcl::PointXYZRGBA> v (grabber, red, green, blue, radius);
    v.run ();
  }
  else
  {
    cout << "device does not provide rgb stream" << endl;
  }

  return (0);
}
Example #12
0
bool MetaGrabberScript::Test(void)
{
    if (!m_valid || m_fullcommand.isEmpty())
        return false;

    QStringList args; args << "-t";
    MythSystemLegacy grabber(m_fullcommand, args, kMSStdOut);

    grabber.Run();
    if (grabber.Wait() != GENERIC_EXIT_OK)
        return false;

    return true;
}
Example #13
0
int main()
	{
	PngCRC crc;
	unsigned char signature[8]={137,80,78,71,13,10,26,10};
	
	char IEND[CHUNK_BASE_SIZE];
	Chunk_sizeSet(IEND,0);
	Chunk_IDSet(IEND,0x49454e44);
	Chunk_CRCSet(IEND,crc,0);
	
	FrameGrabber grabber(GetDesktopWindow());
	Image img(grabber);
	grabber.grab();
	
	char IHDR[CHUNK_BASE_SIZE+IHDR_SIZE];
	Chunk_sizeSet(IHDR,IHDR_SIZE);
	Chunk_IDSet(IHDR,0x49484452);
	IHDR_widthSet(IHDR,grabber.widthGet());
	IHDR_heightSet(IHDR,grabber.heightGet());
	IHDR_bitDepthSet(IHDR,8);
	IHDR_colorTypeSet(IHDR,IHDR_COLORTYPE_RGB);
	IHDR_compressionMethodSet(IHDR,0);
	IHDR_filterMethodSet(IHDR,0);
	IHDR_interlaceMethodSet(IHDR,0);
	Chunk_CRCSet(IHDR,crc,IHDR_SIZE);	
	
	
	size_t size_uncompressed=4+(1+3*grabber.widthGet())*grabber.heightGet();
	char* img_png_uncompressed=(char*)malloc(size_uncompressed);
	filterApply(img.rowsGet(),img_png_uncompressed,grabber.widthGet(),grabber.heightGet());
	char* IDAT=(char*)malloc(size_uncompressed+CHUNK_BASE_SIZE);
	
	uint32_t size_idat=compress(img_png_uncompressed,IDAT+CHUNK_OFFSET_DATA,size_uncompressed);
	Chunk_sizeSet(IDAT,size_idat);
	Chunk_IDSet(IDAT,0x49444154);
	printf("%u\n",size_idat);
	Chunk_CRCSet(IDAT,crc,size_idat);
	
		
	FILE* file_out=fopen("test.png","wb");
	
	fwrite(signature,1,sizeof(signature),file_out);
	fwrite(IHDR,1,sizeof(IHDR),file_out);
	fwrite(IDAT,1,size_idat,file_out);
	fwrite(IEND,1,sizeof(IEND),file_out);
	free(IDAT);
	fclose(file_out);

	return 0;
	}
Example #14
0
bool MetadataDownload::runGrabberTest(const QString &grabberpath)
{
    QStringList args;
    args.append("-t");

    MythSystem grabber(grabberpath, args, kMSNoRunShell | kMSStdOut | kMSBuffered);

    grabber.Run();
    uint exitcode = grabber.Wait();

    if (exitcode != 0)
        return false;

    return true;
}
int
main (int argc, char ** argv)
{
  if (argc < 2)
  {
    usage (argv);
    return 1;
  }

  std::string arg (argv[1]);

  if (arg == "--help" || arg == "-h")
  {
    usage (argv);
    return 1;
  }

  // Command line parsing
  double search_radius = default_search_radius;
  double sqr_gauss_param = default_sqr_gauss_param;
  bool sqr_gauss_param_set = true;
  int polynomial_order = default_polynomial_order;
  bool use_polynomial_fit = default_use_polynomial_fit;

  pcl::console::parse_argument (argc, argv, "-search_radius", search_radius);
  if (pcl::console::parse_argument (argc, argv, "-sqr_gauss_param", sqr_gauss_param) == -1)
    sqr_gauss_param_set = false;
  if (pcl::console::parse_argument (argc, argv, "-polynomial_order", polynomial_order) == -1 )
    use_polynomial_fit = true;
  pcl::console::parse_argument (argc, argv, "-use_polynomial_fit", use_polynomial_fit);

  pcl::OpenNIGrabber grabber (arg);
  if (grabber.providesCallback<pcl::OpenNIGrabber::sig_cb_openni_point_cloud_rgba> ())
  {
    OpenNISmoothing<pcl::PointXYZRGBA> v (search_radius, sqr_gauss_param_set, sqr_gauss_param, 
                                          use_polynomial_fit, polynomial_order, arg);
    v.run ();
  }
  else
  {
    OpenNISmoothing<pcl::PointXYZ> v (search_radius, sqr_gauss_param_set, sqr_gauss_param,
                                      use_polynomial_fit, polynomial_order, arg);
    v.run ();
  }

  return (0);
}
Example #16
0
int main()
{
	std::string device_id ("");
	pcl::OpenNIGrabber::Mode image_mode = pcl::OpenNIGrabber::OpenNI_Default_Mode;

	pcl::OpenNIGrabber grabber (device_id, pcl::OpenNIGrabber::OpenNI_Default_Mode, image_mode);

//	grabber.start();

	image_viewer v(grabber);

	v.run();

	while (1)
	{
	}
	
	return 0;
}
Example #17
0
void Event::handle() {
    Handler* h = nil;
    if (rep()->xevent_.type != KeyPress) {
	h = grabber();
    }else if (ivoc_snapshot_ && (*ivoc_snapshot_)(this)) {
	return;
    }
    if (h == nil) {
	h = handler();
    }
    if (h != nil) {
	boolean b = Resource::defer(true);
	h->ref();
	h->event(*this);
	h->unref();
	Resource::flush();
	Resource::defer(b);
    }
}
Example #18
0
// TODO
// using the MetadataLookup object as both argument input, and parsed output,
// is clumsy. break the inputs out into a separate object, and spawn a new
// MetadataLookup object in ParseMetadataItem, rather than requiring an
// existing one to reuse.
MetadataLookupList MetaGrabberScript::RunGrabber(const QStringList &args,
                        MetadataLookup *lookup, bool passseas)
{
    MythSystemLegacy grabber(m_fullcommand, args, kMSStdOut);
    MetadataLookupList list;

    LOG(VB_GENERAL, LOG_INFO, QString("Running Grabber: %1 %2")
        .arg(m_fullcommand).arg(args.join(" ")));

    grabber.Run();
    if (grabber.Wait() != GENERIC_EXIT_OK)
        return list;

    QByteArray result = grabber.ReadAll();
    if (!result.isEmpty())
    {
        QDomDocument doc;
        doc.setContent(result, true);
        QDomElement root = doc.documentElement();
        QDomElement item = root.firstChildElement("item");

        while (!item.isNull())
        {
            MetadataLookup *tmp = ParseMetadataItem(item, lookup, passseas);
            tmp->SetInetref(QString("%1_%2").arg(m_command)
                                            .arg(tmp->GetInetref()));
            if (!tmp->GetCollectionref().isEmpty())
            {
                tmp->SetCollectionref(QString("%1_%2").arg(m_command)
                                .arg(tmp->GetCollectionref()));
            }
            list.append(tmp);
            // MetadataLookup is to be owned by the list
            tmp->DecrRef();
            item = item.nextSiblingElement("item");
        }
    }
    return list;
}
Example #19
0
int main (int argc, char ** argv)
{
  QApplication app(argc, argv);
  
  //PCL_INFO ("Creating PCD Grabber\n");
  //std::vector<std::string> pcd_files;
  //boost::filesystem::directory_iterator end_itr;
  //for (boost::filesystem::directory_iterator itr("/u/atrevor/data/sushi_long_pcds_compressed"); itr != end_itr; ++itr)
  //{
  //  pcd_files.push_back (itr->path ().string ());
  //  std::cout << "added: " << itr->path ().string () << std::endl;
  //}
  //sort (pcd_files.begin (),pcd_files.end ());

  //pcl::PCDGrabber<pcl::PointXYZRGB> grabber(pcd_files,5.0,false);
  //PCL_INFO ("PCD Grabber created\n");

  pcl::OpenNIGrabber grabber("#1");
  OrganizedSegmentationDemo seg_demo (grabber);
  seg_demo.show();
  return (app.exec());
}
Example #20
0
int
main (int argc, char ** argv)
{
  std::string arg;
  if (argc > 1)
    arg = std::string (argv[1]);

  openni_wrapper::OpenNIDriver& driver = openni_wrapper::OpenNIDriver::getInstance ();
  if (arg == "--help" || arg == "-h" || driver.getNumberDevices () == 0)
  {
    usage (argv);
    return 1;
  }

  std::cout << "Press following keys to switch to the different integral image normal estimation methods:\n";
  std::cout << "<1> COVARIANCE_MATRIX method\n";
  std::cout << "<2> AVERAGE_3D_GRADIENT method\n";
  std::cout << "<3> AVERAGE_DEPTH_CHANGE method\n";
  std::cout << "<4> SIMPLE_3D_GRADIENT method\n";
  std::cout << "<Q,q> quit\n\n";

  pcl::OpenNIGrabber grabber ("");
  if (grabber.providesCallback<pcl::OpenNIGrabber::sig_cb_openni_point_cloud_rgba> ())
  {
    PCL_INFO ("PointXYZRGBA mode enabled.\n");
    OpenNIIntegralImageNormalEstimation<pcl::PointXYZRGBA> v ("");
    v.run ();
  }
  else
  {
    PCL_INFO ("PointXYZ mode enabled.\n");
    OpenNIIntegralImageNormalEstimation<pcl::PointXYZ> v ("");
    v.run ();
  }

  return (0);
}
Example #21
0
int main( int argc, char** argv )
{
    ::setenv( "PYLON_ROOT", STRINGIZED( BASLER_PYLON_DIR ), 0 );
    ::setenv( "GENICAM_ROOT_V2_1", STRINGIZED( BASLER_PYLON_GENICAM_DIR ), 0 );
    try
    {
        unsigned int id;
        std::string address;
        std::string setAttributes;
        unsigned int discard;
        boost::program_options::options_description description( "options" );
        unsigned int packet_size;
        unsigned int exposure;
        unsigned int gain;
        unsigned int offset_x;
        unsigned int offset_y;
        unsigned int width;
        unsigned int height;
        std::string frame_trigger;
        std::string line_trigger;
        unsigned int line_rate;
        double timeout_seconds;
        description.add_options()
            ( "help,h", "display help message" )
            ( "address", boost::program_options::value< std::string >( &address ), "camera ip address; default: connect to the first available camera" )
            ( "discard", "discard frames, if cannot keep up; same as --buffer=1" )
            ( "buffer", boost::program_options::value< unsigned int >( &discard )->default_value( 0 ), "maximum buffer size before discarding frames, default: unlimited" )
            ( "list-cameras", "output camera list and exit" )
            ( "fields,f", boost::program_options::value< std::string >( &options.fields )->default_value( "t,rows,cols,type" ), "header fields, possible values: t,rows,cols,type,size,counters" )
            ( "image-type", boost::program_options::value< std::string >( &options.type )->default_value( "3ub" ), "image type, e.g. '3ub'; also see --long-help for details" )
            ( "offset-x", boost::program_options::value< unsigned int >( &offset_x )->default_value( 0 ), "offset in pixels in the line" )
            ( "offset-y", boost::program_options::value< unsigned int >( &offset_y )->default_value( 0 ), "offset in lines in the frame" )
            ( "width", boost::program_options::value< unsigned int >( &width )->default_value( std::numeric_limits< unsigned int >::max() ), "line width in pixels; default: max" )
            ( "height", boost::program_options::value< unsigned int >( &height )->default_value( std::numeric_limits< unsigned int >::max() ), "number of lines in frame (in chunk mode always 1); default: max" )
            ( "frame-trigger", boost::program_options::value< std::string >( &frame_trigger ), "'line1', 'line2', 'line3', 'encoder'" ) //; if absent while --line-trigger present, same as --line-trigger" )
            ( "line-trigger", boost::program_options::value< std::string >( &line_trigger ), "'line1', 'line2', 'line3', 'encoder'" )
            ( "line-rate", boost::program_options::value< unsigned int >( &line_rate ), "line aquisition rate" )
            ( "encoder-ticks", boost::program_options::value< unsigned int >( &encoder_ticks ), "number of encoder ticks until the counter resets (reused for line number in frame in chunk mode)" )
            ( "header-only", "output header only" )
            ( "no-header", "output image data only" )
            ( "packet-size", boost::program_options::value< unsigned int >( &packet_size ), "mtu size on camera side, should be not greater than your lan and network interface set to" )
            ( "exposure", boost::program_options::value< unsigned int >( &exposure )->default_value( 100 ), "exposure" )
            ( "gain", boost::program_options::value< unsigned int >( &gain )->default_value( 100 ), "gain" )
            ( "timeout", boost::program_options::value< double >( &timeout_seconds )->default_value( 3.0 ), " frame acquisition timeout" )
            ( "test-colour", "output colour test image" )
            ( "verbose,v", "be more verbose" );
        boost::program_options::variables_map vm;
        boost::program_options::store( boost::program_options::parse_command_line( argc, argv, description), vm );
        boost::program_options::parsed_options parsed = boost::program_options::command_line_parser(argc, argv).options( description ).allow_unregistered().run();
        boost::program_options::notify( vm );
        if ( vm.count( "help" ) || vm.count( "long-help" ) )
        {
            std::cerr << "acquire images from a basler camera (for now gige only)" << std::endl;
            std::cerr << "output to stdout as serialized cv::Mat" << std::endl;
            std::cerr << std::endl;
            std::cerr << "usage: basler-cat [<options>] [<filters>]" << std::endl;
            std::cerr << std::endl;
            std::cerr << description << std::endl;
            if( vm.count( "long-help" ) )
            {
                std::cerr << std::endl;
                std::cerr << snark::cv_mat::filters::usage() << std::endl;
                std::cerr << std::endl;
                std::cerr << snark::cv_mat::serialization::options::type_usage() << std::endl;
            }
            std::cerr << std::endl;
            std::cerr << "note: there is a glitch or a subtle feature in basler line camera:" << std::endl;
            std::cerr << "      - power-cycle camera" << std::endl;
            std::cerr << "      - view colour images: it works" << std::endl;
            std::cerr << "      - view grey-scale images: it works" << std::endl;
            std::cerr << "      - view colour images: it still displays grey-scale" << std::endl;
            std::cerr << "      even in their native viewer you need to set colour image" << std::endl;
            std::cerr << "      repeatedly and with pure luck it works, but we have not" << std::endl;
            std::cerr << "      managed to do it in software; the remedy: power-cycle the camera" << std::endl;
            std::cerr << std::endl;
            return 1;
        }
        verbose = vm.count( "verbose" );
        if( verbose )
        {
            std::cerr << "basler-cat: PYLON_ROOT=" << ::getenv( "PYLON_ROOT" ) << std::endl;
            std::cerr << "basler-cat: GENICAM_ROOT_V2_1=" << ::getenv( "GENICAM_ROOT_V2_1" ) << std::endl;
            std::cerr << "basler-cat: initializing camera..." << std::endl;
        }
        Pylon::PylonAutoInitTerm auto_init_term;
        Pylon::CTlFactory& factory = Pylon::CTlFactory::GetInstance();
        Pylon::ITransportLayer* transport_layer( Pylon::CTlFactory::GetInstance().CreateTl( Pylon::CBaslerGigECamera::DeviceClass() ) );        
        if( !transport_layer )
        { 
            std::cerr << "basler-cat: failed to create transport layer" << std::endl;
            std::cerr << "            most likely PYLON_ROOT and GENICAM_ROOT_V2_1 environment variables not set" << std::endl;
            std::cerr << "            point them to your pylon installation, e.g:" << std::endl;
            std::cerr << "            export PYLON_ROOT=/opt/pylon" << std::endl;
            std::cerr << "            export GENICAM_ROOT_V2_1=/opt/pylon/genicam" << std::endl;
            return 1;
        }
        if( vm.count( "list-cameras" ) )
        {
            Pylon::DeviceInfoList_t devices;
            factory.EnumerateDevices( devices );
            for( unsigned int i = 0; i < devices.size(); ++i ) { std::cerr << devices[i].GetFullName() << std::endl; }
            return 0;
        }
        timeout = timeout_seconds * 1000.0;
        std::string filters = comma::join( boost::program_options::collect_unrecognized( parsed.options, boost::program_options::include_positional ), ';' );
        options.header_only = vm.count( "header-only" );
        options.no_header = vm.count( "no-header" );
        csv = comma::csv::options( argc, argv );
        bool chunk_mode =    csv.has_field( "counters" ) // quick and dirty
                          || csv.has_field( "adjusted-t" )
                          || csv.has_field( "line" )
                          || csv.has_field( "line-count" )
                          || csv.has_field( "ticks" )
                          || csv.has_field( "counters/adjusted-t" )
                          || csv.has_field( "counters/line" )
                          || csv.has_field( "counters/line-count" )
                          || csv.has_field( "counters/ticks" );
        if( chunk_mode )
        {
            if( vm.count( "encoder-ticks" ) == 0 ) { std::cerr << "basler-cat: chunk mode, please specify --encoder-ticks" << std::endl; return 1; }
            if( !filters.empty() ) { std::cerr << "basler-cat: chunk mode, cannot handle filters; use: basler-cat | cv-cat <filters> instead" << std::endl; return 1; }
            if( height != 1 && height != std::numeric_limits< unsigned int >::max() ) { std::cerr << "basler-cat: only --height=1 implemented in chunk mode" << std::endl; return 1; }
            height = 1;
            std::vector< std::string > v = comma::split( csv.fields, ',' );
            std::string format;
            for( unsigned int i = 0; i < v.size(); ++i )
            {
                if( v[i] == "t" ) { v[i] = "header/" + v[i]; format += "t"; }
                else if( v[i] == "rows" || v[i] == "cols" || v[i] == "size" || v[i] == "type" ) { v[i] = "header/" + v[i]; format += "ui"; }
                else if( v[i] == "adjusted-t" ) { v[i] = "counters/" + v[i]; format += "t"; }
                else if( v[i] == "line-count" || v[i] == "ticks" ) { v[i] = "counters/" + v[i]; format += "ul"; }
                else if( v[i] == "line" ) { v[i] = "counters/" + v[i]; format += "ui"; }
                else { std::cerr << "basler-cat: expected field, got '" << v[i] << "'" << std::endl; return 1; }
            }
            csv.fields = comma::join( v, ',' );
            csv.full_xpath = true;
            csv.format( format );
        }
        if( !vm.count( "buffer" ) && vm.count( "discard" ) ) { discard = 1; }
        Pylon::CBaslerGigECamera camera;
        if( vm.count( "address" ) )
        {
            Pylon::CBaslerGigEDeviceInfo info;
            info.SetIpAddress( address.c_str() );
            camera.Attach( factory.CreateDevice( info ) );
        }
        else
        {
            Pylon::DeviceInfoList_t devices;
            factory.EnumerateDevices( devices );
            if( devices.empty() ) { std::cerr << "basler-cat: no camera found" << std::endl; return 1; }
            std::cerr << "basler-cat: will connect to the first of " << devices.size() << " found device(s):" << std::endl;
            for( unsigned int i = 0; i < devices.size(); ++i ) { std::cerr << "    " << devices[i].GetFullName() << std::endl; }
            camera.Attach( transport_layer->CreateDevice( devices[0] ) );
        }
        if( verbose ) { std::cerr << "basler-cat: initialized camera" << std::endl; }
        if( verbose ) { std::cerr << "basler-cat: opening camera " << camera.GetDevice()->GetDeviceInfo().GetFullName() << "..." << std::endl; }
        camera.Open();
        if( verbose ) { std::cerr << "basler-cat: opened camera " << camera.GetDevice()->GetDeviceInfo().GetFullName() << std::endl; }
        Pylon::CBaslerGigECamera::StreamGrabber_t grabber( camera.GetStreamGrabber( 0 ) );
        grabber.Open();
        unsigned int channels;
        switch( options.get_header().type ) // quick and dirty
        {
            case CV_8UC1:
                channels = set_pixel_format_( camera, Basler_GigECamera::PixelFormat_Mono8 );
                break;
            case CV_8UC3:
                channels = set_pixel_format_( camera, Basler_GigECamera::PixelFormat_RGB8Packed );
                break;
            default:
                std::cerr << "basler-cat: type \"" << options.type << "\" not implemented or not supported by camera" << std::endl;
                return 1;
        }
        unsigned int max_width = camera.Width.GetMax();
        if( offset_x >= max_width ) { std::cerr << "basler-cat: expected --offset-x less than " << max_width << ", got " << offset_x << std::endl; return 1; }
        camera.OffsetX.SetValue( offset_x );
        width = ( ( unsigned long long )( offset_x ) + width ) < max_width ? width : max_width - offset_x;
        camera.Width.SetValue( width );
        unsigned int max_height = camera.Height.GetMax();
        //if( height < 512 ) { std::cerr << "basler-cat: expected height greater than 512, got " << height << std::endl; return 1; }
        
        // todo: is the colour line 2098 * 3 or ( 2098 / 3 ) * 3 ?
        //offset_y *= channels;
        //height *= channels;
        
        if( offset_y >= max_height ) { std::cerr << "basler-cat: expected --offset-y less than " << max_height << ", got " << offset_y << std::endl; return 1; }
        camera.OffsetY.SetValue( offset_y );
        height = ( ( unsigned long long )( offset_y ) + height ) < max_height ? height : ( max_height - offset_y );
        camera.Height.SetValue( height );
        if( verbose ) { std::cerr << "basler-cat: set width,height to " << width << "," << height << std::endl; }
        if( vm.count( "packet-size" ) ) { camera.GevSCPSPacketSize.SetValue( packet_size ); }
        // todo: giving up... the commented code throws, but failure to stop acquisition, if active
        //       seems to lead to the following scenario:
        //       - power-cycle camera
        //       - view colour images: it works
        //       - view grey-scale images: it works
        //       - view colour images: it still displays grey-scale
        //if( verbose ) { std::cerr << "basler-cat: getting aquisition status... (frigging voodoo...)" << std::endl; }
        //GenApi::IEnumEntry* acquisition_status = camera.AcquisitionStatusSelector.GetEntry( Basler_GigECamera::AcquisitionStatusSelector_AcquisitionActive );
        //if( acquisition_status && GenApi::IsAvailable( acquisition_status ) && camera.AcquisitionStatus() )
        //{
        //    if( verbose ) { std::cerr << "basler-cat: stopping aquisition..." << std::endl; }
        //    camera.AcquisitionStop.Execute();
        //    if( verbose ) { std::cerr << "basler-cat: aquisition stopped" << std::endl; }
        //}
        
        // todo: a hack for now
        GenApi::IEnumEntry* acquisitionStart = camera.TriggerSelector.GetEntry( Basler_GigECamera::TriggerSelector_AcquisitionStart );
        if( acquisitionStart && GenApi::IsAvailable( acquisitionStart ) )
        {
            camera.TriggerSelector.SetValue( Basler_GigECamera::TriggerSelector_AcquisitionStart );
            camera.TriggerMode.SetValue( frame_trigger.empty() ? Basler_GigECamera::TriggerMode_Off : Basler_GigECamera::TriggerMode_On );
        }
        GenApi::IEnumEntry* frameStart = camera.TriggerSelector.GetEntry( Basler_GigECamera::TriggerSelector_FrameStart );
        if( frameStart && GenApi::IsAvailable( frameStart ) )
        {
            //if( frame_trigger.empty() ) { frame_trigger = line_trigger; }
            if( frame_trigger.empty() )
            {
                camera.TriggerSelector.SetValue( Basler_GigECamera::TriggerSelector_FrameStart );
                camera.TriggerMode.SetValue( Basler_GigECamera::TriggerMode_Off );
            }
            else
            {
                camera.TriggerSelector.SetValue( Basler_GigECamera::TriggerSelector_FrameStart );
                camera.TriggerMode.SetValue( Basler_GigECamera::TriggerMode_On );
                Basler_GigECamera::TriggerSourceEnums t;
                if( frame_trigger == "line1" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_Line1 ); }
                if( frame_trigger == "line2" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_Line2 ); }
                if( frame_trigger == "line3" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_Line3 ); }
                else if( frame_trigger == "encoder" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_ShaftEncoderModuleOut ); }
                else { std::cerr << "basler-cat: frame trigger '" << frame_trigger << "' not implemented or invalid" << std::endl; return 1; }
                camera.TriggerActivation.SetValue( Basler_GigECamera::TriggerActivation_RisingEdge );
                camera.TriggerSelector.SetValue( Basler_GigECamera::TriggerSelector_LineStart );
                camera.TriggerMode.SetValue( Basler_GigECamera::TriggerMode_On );
                camera.TriggerActivation.SetValue( Basler_GigECamera::TriggerActivation_RisingEdge );
                if( frame_trigger == "encoder" )
                {
                    // todo: make configurable
                    camera.ShaftEncoderModuleLineSelector.SetValue( Basler_GigECamera::ShaftEncoderModuleLineSelector_PhaseA );
                    camera.ShaftEncoderModuleLineSource.SetValue( Basler_GigECamera::ShaftEncoderModuleLineSource_Line1 );
                    camera.ShaftEncoderModuleLineSelector.SetValue( Basler_GigECamera::ShaftEncoderModuleLineSelector_PhaseB );
                    camera.ShaftEncoderModuleLineSource.SetValue( Basler_GigECamera::ShaftEncoderModuleLineSource_Line2 );
                    camera.ShaftEncoderModuleCounterMode.SetValue( Basler_GigECamera::ShaftEncoderModuleCounterMode_FollowDirection );
                    camera.ShaftEncoderModuleMode.SetValue( Basler_GigECamera::ShaftEncoderModuleMode_ForwardOnly );
                    camera.ShaftEncoderModuleCounterMax.SetValue( encoder_ticks - 1 );
                    /// @todo compensate for mechanical jitter, if needed
                    ///       see Runner_Users_manual.pdf, 8.3, Case 2
                    camera.ShaftEncoderModuleReverseCounterMax.SetValue( 0 );
                    camera.ShaftEncoderModuleCounterReset.Execute();
                    camera.ShaftEncoderModuleReverseCounterReset.Execute();
                }
            }
        }
        GenApi::IEnumEntry* lineStart = camera.TriggerSelector.GetEntry( Basler_GigECamera::TriggerSelector_LineStart );
        if( lineStart && GenApi::IsAvailable( lineStart ) )
        {
            if( line_trigger.empty() )
            {
                camera.TriggerSelector.SetValue( Basler_GigECamera::TriggerSelector_LineStart );
                camera.TriggerMode.SetValue( Basler_GigECamera::TriggerMode_Off );
            }
            else
            {
                camera.TriggerSelector.SetValue( Basler_GigECamera::TriggerSelector_LineStart );
                camera.TriggerMode.SetValue( Basler_GigECamera::TriggerMode_On );
                Basler_GigECamera::TriggerSourceEnums t;
                if( line_trigger == "line1" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_Line1 ); }
                else if( line_trigger == "line2" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_Line2 ); }
                else if( line_trigger == "line3" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_Line3 ); }
                else if( line_trigger == "encoder" ) { camera.TriggerSource.SetValue( Basler_GigECamera::TriggerSource_ShaftEncoderModuleOut ); }
                else { std::cerr << "basler-cat: line trigger '" << line_trigger << "' not implemented or invalid" << std::endl; return 1; }
                camera.TriggerActivation.SetValue( Basler_GigECamera::TriggerActivation_RisingEdge );
                camera.TriggerSelector.SetValue( Basler_GigECamera::TriggerSelector_LineStart );
                camera.TriggerMode.SetValue( Basler_GigECamera::TriggerMode_On );
                camera.TriggerActivation.SetValue( Basler_GigECamera::TriggerActivation_RisingEdge );
            }
        }
        if( chunk_mode )
        {
            std::cerr << "basler-cat: setting chunk mode..." << std::endl;
            if( !GenApi::IsWritable( camera.ChunkModeActive ) ) { std::cerr << "basler-cat: camera does not support chunk features" << std::endl; camera.Close(); return 1; }
            camera.ChunkModeActive.SetValue( true );
            camera.ChunkSelector.SetValue( Basler_GigECameraParams::ChunkSelector_Framecounter );
            camera.ChunkEnable.SetValue( true );
            camera.ChunkSelector.SetValue( Basler_GigECameraParams::ChunkSelector_Timestamp );
            camera.ChunkEnable.SetValue( true );
            camera.ChunkSelector.SetValue( Basler_GigECameraParams::ChunkSelector_LineTriggerIgnoredCounter );
            camera.ChunkEnable.SetValue( true );
            camera.ChunkSelector.SetValue( Basler_GigECameraParams::ChunkSelector_FrameTriggerIgnoredCounter );
            camera.ChunkEnable.SetValue( true );
            camera.ChunkSelector.SetValue( Basler_GigECameraParams::ChunkSelector_LineTriggerEndToEndCounter );
            camera.ChunkEnable.SetValue( true );
            camera.ChunkSelector.SetValue( Basler_GigECameraParams::ChunkSelector_FrameTriggerCounter );
            camera.ChunkEnable.SetValue( true );
            camera.ChunkSelector.SetValue( Basler_GigECameraParams::ChunkSelector_FramesPerTriggerCounter );
            camera.ChunkEnable.SetValue( true );
            parser = camera.CreateChunkParser();
            if( !parser ) { std::cerr << "basler-cat: failed to create chunk parser" << std::endl; camera.Close(); return 1; }
            std::cerr << "basler-cat: set chunk mode" << std::endl;
        }
        camera.ExposureMode.SetValue( Basler_GigECamera::ExposureMode_Timed );
        if( vm.count( "exposure" ) ) { camera.ExposureTimeRaw.SetValue( exposure ); } // todo? auto exposure (see ExposureAutoEnums)
        if( vm.count( "gain" ) )
        { 
            camera.GainSelector.SetValue( Basler_GigECamera::GainSelector_All );
            camera.GainRaw.SetValue( gain );
            if( channels == 3 ) // todo: make configurable; also is not setting all not enough?
            {
                camera.GainSelector.SetValue( Basler_GigECamera::GainSelector_Red );
                camera.GainRaw.SetValue( gain );
                camera.GainSelector.SetValue( Basler_GigECamera::GainSelector_Green );
                camera.GainRaw.SetValue( gain );
                camera.GainSelector.SetValue( Basler_GigECamera::GainSelector_Blue );
                camera.GainRaw.SetValue( gain );
            }
        }
        if( vm.count( "line-rate" ) ) { camera.AcquisitionLineRateAbs.SetValue( line_rate ); }
        if( vm.count( "test-colour" ) ) { camera.TestImageSelector.SetValue( Basler_GigECamera::TestImageSelector_Testimage6 ); }
        else { camera.TestImageSelector.SetValue( Basler_GigECamera::TestImageSelector_Off ); }
        unsigned int payload_size = camera.PayloadSize.GetValue();
        if( verbose )
        { 
            std::cerr << "basler-cat: camera mtu size: " << camera.GevSCPSPacketSize.GetValue() << std::endl;
            std::cerr << "basler-cat: exposure: " << camera.ExposureTimeRaw.GetValue() << std::endl;
            std::cerr << "basler-cat: payload size: " << payload_size << std::endl;
        }
        std::vector< std::vector< char > > buffers( 2 ); // todo? make number of buffers configurable
        for( std::size_t i = 0; i < buffers.size(); ++i ) { buffers[i].resize( payload_size ); }
        grabber.MaxBufferSize.SetValue( buffers[0].size() );
        grabber.SocketBufferSize.SetValue( 127 );
        if( verbose )
        { 
            std::cerr << "basler-cat: socket buffer size: " << grabber.SocketBufferSize.GetValue() << std::endl;
            std::cerr << "basler-cat: max buffer size: " << grabber.MaxBufferSize.GetValue() << std::endl;
        }
        grabber.MaxNumBuffer.SetValue( buffers.size() ); // todo: use --buffer value for number of buffered images
        grabber.PrepareGrab(); // image size now must not be changed until FinishGrab() is called.
        std::vector< Pylon::StreamBufferHandle > buffer_handles( buffers.size() );
        for( std::size_t i = 0; i < buffers.size(); ++i )
        { 
            buffer_handles[i] = grabber.RegisterBuffer( &buffers[i][0], buffers[i].size() );
            grabber.QueueBuffer( buffer_handles[i], NULL );
        }
        if( chunk_mode )
        {
            snark::tbb::bursty_reader< ChunkPair > read( boost::bind( &capture_< ChunkPair >, boost::ref( camera ), boost::ref( grabber ) ), discard );
            tbb::filter_t< ChunkPair, void > write( tbb::filter::serial_in_order, boost::bind( &write_, _1 ) );
            snark::tbb::bursty_pipeline< ChunkPair > pipeline;
            camera.AcquisitionMode.SetValue( Basler_GigECamera::AcquisitionMode_Continuous );
            camera.AcquisitionStart.Execute(); // continuous acquisition mode        
            if( verbose ) { std::cerr << "basler-cat: running in chunk mode..." << std::endl; }
            pipeline.run( read, write );
            if( verbose ) { std::cerr << "basler-cat: shutting down..." << std::endl; }
            camera.AcquisitionStop();
            camera.DestroyChunkParser( parser );
        }
        else
        {
            snark::cv_mat::serialization serialization( options );
            snark::tbb::bursty_reader< Pair > reader( boost::bind( &capture_< Pair >, boost::ref( camera ), boost::ref( grabber ) ), discard );
            snark::imaging::applications::pipeline pipeline( serialization, filters, reader );
            camera.AcquisitionMode.SetValue( Basler_GigECamera::AcquisitionMode_Continuous );
            camera.AcquisitionStart.Execute(); // continuous acquisition mode        
            if( verbose ) { std::cerr << "basler-cat: running..." << std::endl; }
            pipeline.run();
            if( verbose ) { std::cerr << "basler-cat: shutting down..." << std::endl; }
            camera.AcquisitionStop();
        }
        if( verbose ) { std::cerr << "basler-cat: acquisition stopped" << std::endl; }
        is_shutdown = true;
        while( !done ) { boost::thread::sleep( boost::posix_time::microsec_clock::universal_time() + boost::posix_time::milliseconds( 100 ) ); }
        grabber.FinishGrab();
        Pylon::GrabResult result;
        while( grabber.RetrieveResult( result ) ); // get all buffers back
        for( std::size_t i = 0; i < buffers.size(); ++i ) { grabber.DeregisterBuffer( buffer_handles[i] ); }
        grabber.Close();
        camera.Close();
        if( verbose ) { std::cerr << "basler-cat: done" << std::endl; }
        return 0;
    }
    catch( std::exception& ex ) { std::cerr << "basler-cat: " << ex.what() << std::endl; }
    catch( ... ) { std::cerr << "basler-cat: unknown exception" << std::endl; }
    return 1;
}
Example #22
0
int main()
{
  try {
#if defined(VISP_HAVE_OGRE) 
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || (VISP_HAVE_OPENCV_VERSION >= 0x020100)

	// Image to stock gathered data
    // Here we acquire a color image. The consequence will be that
    // the background texture used in Ogre renderer will be also in color.
    vpImage<vpRGBa> I;

    // Now we try to find an available framegrabber
#if defined(VISP_HAVE_V4L2)
    // Video for linux 2 grabber
    vpV4l2Grabber grabber;
	grabber.open(I);
    grabber.acquire(I);
#elif defined(VISP_HAVE_DC1394)
    // libdc1394-2
    vp1394TwoGrabber grabber;
	grabber.open(I);
    grabber.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
    // OpenCV to gather images
    cv::VideoCapture grabber(0); // open the default camera
    if(!grabber.isOpened()) { // check if we succeeded
      std::cout << "Failed to open the camera" << std::endl;
      return -1;
    }
    cv::Mat frame;
    grabber >> frame; // get a new frame from camera
    vpImageConvert::convert(frame, I);
#endif

    // Parameters of our camera
    double px = 565;
    double py = 565;
    double u0 = I.getWidth() / 2;
    double v0 = I.getHeight() / 2;
    vpCameraParameters cam(px,py,u0,v0);
    // The matrix with our pose
    // Defines the pose of the object in the camera frame
    vpHomogeneousMatrix cMo;

    // Our object
    // A simulator with the camera parameters defined above,
    // a grey level background image and of the good size
    vpAROgre ogre(cam, I.getWidth(), I.getHeight());
    // Initialisation
    // Here we load the requested plugins specified in the "plugins.cfg" file
    // and the resources specified in the "resources.cfg" file
    // These two files can be found respectively in ViSP_HAVE_OGRE_PLUGINS_PATH
    // and ViSP_HAVE_OGRE_RESOURCES_PATH folders
    ogre.init(I);

    // Create a basic scene
    // -----------------------------------
    //  	      Loading things
    // -----------------------------------
    //  As you will see in section 5, our
    //  application knows locations where
    //  it can search for medias.
    //  Here we use a mesh included in
    //  the installation files : a robot.
    // -----------------------------------
    // Here we load the "robot.mesh" model that is found thanks to the resources locations
    // specified in the "resources.cfg" file
    ogre.load("Robot", "robot.mesh");
    ogre.setPosition("Robot", vpTranslationVector(-0.3, 0.2, 0));
    ogre.setScale("Robot", 0.001f,0.001f,0.001f);
    ogre.setRotation("Robot", vpRotationMatrix(vpRxyzVector(M_PI, 0, 0)));

    cMo[2][3] = 1.; // Z = 1 meter

    std::cout << "cMo:\n" << cMo << std::endl;

    // Rendering loop, ended with on escape
    while(ogre.continueRendering()){
      // Acquire a new image
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394)
      grabber.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
	  grabber >> frame;
      vpImageConvert::convert(frame, I);
#endif
      //Pose computation
      // ...
      // cMo updated
      // Display the robot at the position specified by cMo with vpAROgre
      ogre.display(I,cMo);
    }
#else
    std::cout << "You need an available framegrabber to run this example" << std::endl;
#endif
#else
    std::cout << "You need Ogre3D to run this example" << std::endl;
#endif
    return 0;
  }
  catch(vpException e) {
    std::cout << "Catch an exception: " << e << std::endl;
    return 1;
  }
  catch(...) {
    std::cout << "Catch an exception " << std::endl;
    return 1;
  }
}
Example #23
0
int
main(int argc, char ** argv)
{
  std::string arg("");
  pcl::OpenNIGrabber::Mode depth_mode = pcl::OpenNIGrabber::OpenNI_Default_Mode;
  pcl::OpenNIGrabber::Mode image_mode = pcl::OpenNIGrabber::OpenNI_Default_Mode;

  if (argc >= 2)
  {
    arg = argv[1];

    if (arg == "--help" || arg == "-h")
    {
      usage(argv);
      return 1;
    }
    else if (arg == "-l")
    {
      if (argc >= 3)
      {
        pcl::OpenNIGrabber grabber(argv[2]);
        boost::shared_ptr<openni_wrapper::OpenNIDevice> device = grabber.getDevice();
        cout << "Supported depth modes for device: " << device->getVendorName() << " , " << device->getProductName() << endl;
        std::vector<std::pair<int, XnMapOutputMode > > modes = grabber.getAvailableDepthModes();
        for (std::vector<std::pair<int, XnMapOutputMode > >::const_iterator it = modes.begin(); it != modes.end(); ++it)
        {
          cout << it->first << " = " << it->second.nXRes << " x " << it->second.nYRes << " @ " << it->second.nFPS << endl;
        }

        cout << endl << "Supported image modes for device: " << device->getVendorName() << " , " << device->getProductName() << endl;
        modes = grabber.getAvailableImageModes();
        for (std::vector<std::pair<int, XnMapOutputMode > >::const_iterator it = modes.begin(); it != modes.end(); ++it)
        {
          cout << it->first << " = " << it->second.nXRes << " x " << it->second.nYRes << " @ " << it->second.nFPS << endl;
        }
        return 0;
      }
      else
      {
        openni_wrapper::OpenNIDriver& driver = openni_wrapper::OpenNIDriver::getInstance();
        if (driver.getNumberDevices() > 0)
        {
          for (unsigned deviceIdx = 0; deviceIdx < driver.getNumberDevices(); ++deviceIdx)
          {
            cout << "Device: " << deviceIdx + 1 << ", vendor: " << driver.getVendorName(deviceIdx) << ", product: " << driver.getProductName(deviceIdx)
              << ", connected: " << (int) driver.getBus(deviceIdx) << " @ " << (int) driver.getAddress(deviceIdx) << ", serial number: \'" << driver.getSerialNumber(deviceIdx) << "\'" << endl;
          }

        }
        else
          cout << "No devices connected." << endl;
        
        cout <<"Virtual Devices available: ONI player" << endl;
        return 0;
      }
    }

    if (argc >= 3)
    {
      depth_mode = (pcl::OpenNIGrabber::Mode) atoi(argv[2]);
      if (argc == 4)
      {
        image_mode = (pcl::OpenNIGrabber::Mode) atoi(argv[3]);
      }
    }
  }
  else
  {
    openni_wrapper::OpenNIDriver& driver = openni_wrapper::OpenNIDriver::getInstance();
    if (driver.getNumberDevices() > 0)
      cout << "Device Id not set, using first device." << endl;
  }

  pcl::OpenNIGrabber grabber(arg, depth_mode, image_mode);
  if (grabber.providesCallback<pcl::OpenNIGrabber::sig_cb_openni_point_cloud_rgb > ())
  {
    SimpleOpenNIViewer<pcl::PointXYZRGB> v(grabber);
    v.run();
  }
  else
  {
    SimpleOpenNIViewer<pcl::PointXYZI> v(grabber);
    v.run();
  }

  return (0);
}
void setupGrabbers(ModelManager& manager)
{
  size_t cameraID = 0;
  while(true)
  {
    std::string dev = sformat("/dev/video%" ZU , grabbers.size());
    LINFO("Trying to open %s", dev.c_str());
    int fd = open(dev.c_str(), O_RDONLY);
    if (fd == -1) { LINFO("%s not found -- Skipping.", dev.c_str()); break; } else close(fd);

    // instantiate and configure a grabber:
    nub::ref<V4L2grabber> grabber(new V4L2grabber(manager, dev, dev));
    
    // do not export any command-line option to avoid clashes among grabber instances:
    grabber->forgetExports();

    // let's set everything by hand here to ensure consistency:
    grabber->setModelParamVal("FrameGrabberDevice", dev);

    grabber->setModelParamVal("FrameGrabberNbuf", 2);
    grabber->setModelParamVal("FrameGrabberStreaming", true); 
    grabber->setModelParamVal("FrameGrabberByteSwap", false);
    grabber->setModelParamVal("FrameGrabberDims", Dims(1280,720));
    grabber->setModelParamVal("FrameGrabberMode", VIDFMT_YUYV);
    grabber->setModelParamVal("FrameGrabberChannel", 0);

    // to make sure we will force the hardware to set its values, first set some trash values, then set the real values:
    // turn all auto controls to on:
    grabber->setModelParamVal("FrameGrabberWhiteBalTempAuto", true);
    grabber->setModelParamVal("FrameGrabberPowerLineFreq", 2);
    grabber->setModelParamVal("FrameGrabberBacklightComp", 1);
    grabber->setModelParamVal("FrameGrabberFocusAuto", true);
 
    // now turn all auto controls to off:
    grabber->setModelParamVal("FrameGrabberWhiteBalTempAuto", false);
    grabber->setModelParamVal("FrameGrabberPowerLineFreq", 0);
    grabber->setModelParamVal("FrameGrabberBacklightComp", 0);
    grabber->setModelParamVal("FrameGrabberExposureAuto", 3); // that's still auto
    grabber->setModelParamVal("FrameGrabberFocusAuto", false);
    grabber->setModelParamVal("FrameGrabberExposureAuto", 1); // now we are full manual for exposure
 
    // set all manual settings 1-off from what we want:
    grabber->setModelParamVal("FrameGrabberBrightness", 134);
    grabber->setModelParamVal("FrameGrabberContrast", 6);
    grabber->setModelParamVal("FrameGrabberSaturation", 84);
    grabber->setModelParamVal("FrameGrabberWhiteBalTemp", 3101);
    grabber->setModelParamVal("FrameGrabberExposureAbs", 39);
    grabber->setModelParamVal("FrameGrabberSharpness", 26);
    grabber->setModelParamVal("FrameGrabberFocus", focusval[cameraID] + 1);
    grabber->setModelParamVal("FrameGrabberZoom", 1);
 
    // and now the real values:
    grabber->setModelParamVal("FrameGrabberPowerLineFreq", 0); // should be set already but just in case...
    grabber->setModelParamVal("FrameGrabberBacklightComp", 0);
    grabber->setModelParamVal("FrameGrabberExposureAuto", 1);
    grabber->setModelParamVal("FrameGrabberWhiteBalTempAuto", false);
 
    grabber->setModelParamVal("FrameGrabberBrightness", 133);
    grabber->setModelParamVal("FrameGrabberContrast", 5);
    grabber->setModelParamVal("FrameGrabberSaturation", 83);
    grabber->setModelParamVal("FrameGrabberWhiteBalTemp", 3100);
    grabber->setModelParamVal("FrameGrabberSharpness", 25);
    grabber->setModelParamVal("FrameGrabberExposureAbs", 156);
    grabber->setModelParamVal("FrameGrabberFocusAuto", false);
    grabber->setModelParamVal("FrameGrabberFocus", focusval[cameraID]);
    grabber->setModelParamVal("FrameGrabberZoom", 0);
    
    // keep track of it:
    manager.addSubComponent(grabber);
    grabbers.push_back(grabber);
    LINFO("Added V4L2grabber for %s", dev.c_str());

    ++cameraID;
  }

  // Now look for the Imagize cam:

  // instantiate and configure a grabber:
  nub::ref<XCgrabber> grabber(new XCgrabber(manager, "Imagize", "Imagize"));
    
  // do not export any command-line option to avoid clashes among grabber instances:
  grabber->forgetExports();

  // let's set everything by hand here to ensure consistency:
  grabber->setModelParamString("XCGrabberFormatFile", "/home/ilab24/xcap/data/imagize-works.xsetup");
    grabber->setModelParamVal("FrameGrabberMode", VIDFMT_RGB24);
  grabber->setModelParamVal("FrameGrabberNbuf", 2);
  grabber->setModelParamVal("FrameGrabberDims", Dims(1280,1024));

  // keep track of it:
  manager.addSubComponent(grabber);
  grabbers.push_back(grabber);
  LINFO("Added XCgrabber for Imagize camera");
}
Example #25
0
int
main(int argc, char ** argv)
{
  std::string device_id ("");
  pcl::OpenNIGrabber::Mode image_mode = pcl::OpenNIGrabber::OpenNI_Default_Mode;
  
  if (argc >= 2)
  {
    device_id = argv[1];
    if (device_id == "--help" || device_id == "-h")
    {
      usage(argv);
      return 0;
    }
    else if (device_id == "-l")
    {
      if (argc >= 3)
      {
        pcl::OpenNIGrabber grabber (argv[2]);
        boost::shared_ptr<openni_wrapper::OpenNIDevice> device = grabber.getDevice ();
        std::vector<std::pair<int, XnMapOutputMode> > modes;

        if (device->hasImageStream ())
        {
          cout << endl << "Supported image modes for device: " << device->getVendorName () << " , " << device->getProductName () << endl;
          modes = grabber.getAvailableImageModes ();
          for (std::vector<std::pair<int, XnMapOutputMode> >::const_iterator it = modes.begin (); it != modes.end (); ++it)
          {
            cout << it->first << " = " << it->second.nXRes << " x " << it->second.nYRes << " @ " << it->second.nFPS << endl;
          }
        }
      }
      else
      {
        openni_wrapper::OpenNIDriver& driver = openni_wrapper::OpenNIDriver::getInstance ();
        if (driver.getNumberDevices () > 0)
        {
          for (unsigned deviceIdx = 0; deviceIdx < driver.getNumberDevices (); ++deviceIdx)
          {
            cout << "Device: " << deviceIdx + 1 << ", vendor: " << driver.getVendorName (deviceIdx) << ", product: " << driver.getProductName (deviceIdx)
              << ", connected: " << driver.getBus (deviceIdx) << " @ " << driver.getAddress (deviceIdx) << ", serial number: \'" << driver.getSerialNumber (deviceIdx) << "\'" << endl;
          }

        }
        else
          cout << "No devices connected." << endl;
        
        cout <<"Virtual Devices available: ONI player" << endl;
      }
      return (0);
    }
  }
  else
  {
    openni_wrapper::OpenNIDriver& driver = openni_wrapper::OpenNIDriver::getInstance ();
    if (driver.getNumberDevices () > 0)
      cout << "Device Id not set, using first device." << endl;
  }
  
  unsigned mode;
  if (pcl::console::parse (argc, argv, "-imagemode", mode) != -1)
    image_mode = pcl::OpenNIGrabber::Mode (mode);
  
  pcl::OpenNIGrabber grabber (device_id, pcl::OpenNIGrabber::OpenNI_Default_Mode, image_mode);
  SimpleOpenNIViewer v (grabber);
  v.run ();

  return (0);
}
Example #26
0
int main()
{
  try {
#if defined(VISP_HAVE_OGRE) 
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || (VISP_HAVE_OPENCV_VERSION >= 0x020100)

    // Image to store gathered data
    // Here we acquire a grey level image. The consequence will be that
    // the background texture used in Ogre renderer will be also in grey
    // level.
    vpImage<unsigned char> I;

    // Now we try to find an available framegrabber
#if defined(VISP_HAVE_V4L2)
    // Video for linux 2 grabber
    vpV4l2Grabber grabber;
    // Open frame grabber
    // Here we acquire an image from an available framegrabber to update
    // the image size
    grabber.open(I);
    grabber.acquire(I);
#elif defined(VISP_HAVE_DC1394)
    // libdc1394-2
    vp1394TwoGrabber grabber;
    // Open frame grabber
    // Here we acquire an image from an available framegrabber to update
    // the image size
    grabber.open(I);
    grabber.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
    // OpenCV to gather images
    cv::VideoCapture grabber(0); // open the default camera
    if(!grabber.isOpened()) { // check if we succeeded
      std::cout << "Failed to open the camera" << std::endl;
      return -1;
    }
    cv::Mat frame;
    grabber >> frame; // get a new frame from camera
    vpImageConvert::convert(frame, I);
#endif

    // Parameters of our camera
    double px = 565;
    double py = 565;
    double u0 = I.getWidth() / 2;
    double v0 = I.getHeight() / 2;
    vpCameraParameters cam(px,py,u0,v0);
    // The matrix with our pose
    vpHomogeneousMatrix cMo;
    cMo[2][3] = 0.5; // Z = 0.5 meter

    // Our object
    vpAROgreAdvanced ogre(cam, I.getWidth(), I.getHeight());
    // Initialisation
    ogre.init(I);

    // Rendering loop
    while(ogre.continueRendering()){
      // Acquire a new image
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394)
      grabber.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
      grabber >> frame;
      vpImageConvert::convert(frame, I);
#endif
      // Pose computation
      // ...
      // cMo updated
      // Display with vpAROgre
      ogre.display(I, cMo);
    }
#else
    std::cout << "You need an available framegrabber to run this example" << std::endl;
#endif
#else
    std::cout << "You need Ogre3D to run this example" << std::endl;
#endif
    return 0;
  }
  catch(vpException &e) {
    std::cout << "Catch an exception: " << e << std::endl;
    return 1;
  }
  catch(...) {
    std::cout << "Catch an exception " << std::endl;
    return 1;
  }
}