OpenNi2Video::OpenNi2Video(const std::string& device_uri) { InitialiseOpenNI(); const int dev_id = AddDevice(device_uri); AddStream(OpenNiStreamMode( OpenNiDepth_1mm, ImageDim(), 30, dev_id) ); AddStream(OpenNiStreamMode( OpenNiRgb, ImageDim(), 30, dev_id) ); SetupStreamModes(); Start(); }
std::shared_ptr<CameraDriverInterface> GetDevice(const Uri& uri) { std::string sFormat = uri.properties.Get<std::string>("fmt", "MONO8"); std::string sRange = uri.properties.Get<std::string>("range", "1"); ImageDim dims = uri.properties.Get<ImageDim>("size", ImageDim(0, 0)); int channel = uri.properties.Get<int>("channel", -1); double dRange; if(sRange == "ir") dRange = 1023; // OpenNi uses the 10 l.s.bits only (range [0, 1023]) else if(sRange == "ir2") dRange = 20000; // libfreenect2 uses this value else if(sRange == "depth") dRange = 4500; // max range (mm) of asus xtion pro live else { dRange = strtod(sRange.c_str(), nullptr); if(dRange == 0.) dRange = 1.; } const Uri input_uri = Uri(uri.url); // Create input camera std::shared_ptr<CameraDriverInterface> Input = DeviceRegistry<hal::CameraDriverInterface>::I().Create(input_uri); ConvertDriver* pDriver = new ConvertDriver( Input, sFormat, dRange, dims, channel); return std::shared_ptr<CameraDriverInterface>( pDriver ); }
PleoraVideo::PleoraVideo(const Params& p): size_bytes(0), lPvSystem(0), lDevice(0), lStream(0), lDeviceParams(0), lStart(0), lStop(0), lTemperatureCelcius(0), getTemp(false), lStreamParams(0), validGrabbedBuffers(0) { std::string sn; std::string mn; int index = 0; size_t buffer_count = PleoraVideo::DEFAULT_BUFFER_COUNT; Params device_params; for(Params::ParamMap::const_iterator it = p.params.begin(); it != p.params.end(); it++) { if(it->first == "model"){ mn = it->second; } else if(it->first == "sn"){ sn = it->second; } else if(it->first == "idx"){ index = p.Get<int>("idx", 0); } else if(it->first == "size") { const ImageDim dim = p.Get<ImageDim>("size", ImageDim(0,0) ); device_params.Set("Width" , dim.x); device_params.Set("Height" , dim.y); } else if(it->first == "pos") { const ImageDim pos = p.Get<ImageDim>("pos", ImageDim(0,0) ); device_params.Set("OffsetX" , pos.x); device_params.Set("OffsetY" , pos.y); } else if(it->first == "roi") { const ImageRoi roi = p.Get<ImageRoi>("roi", ImageRoi(0,0,0,0) ); device_params.Set("Width" , roi.w); device_params.Set("Height" , roi.h); device_params.Set("OffsetX", roi.x); device_params.Set("OffsetY", roi.y); } else { device_params.Set(it->first, it->second); } } InitDevice(mn.empty() ? 0 : mn.c_str(), sn.empty() ? 0 : sn.c_str(), index); SetDeviceParams(device_params); InitStream(); InitPangoStreams(); InitPangoDeviceProperties(); InitBuffers(buffer_count); }
std::shared_ptr<CameraDriverInterface> GetDevice(const Uri& uri) { float fps = uri.properties.Get<float>("fps", 0); int exp = uri.properties.Get<int>("exp", 0); float gain = uri.properties.Get<float>("gain", 0.5); std::string mode = uri.properties.Get<std::string>("mode", "MONO8"); ImageDim dims = uri.properties.Get<ImageDim>("size", ImageDim(640,480)); ImageRoi ROI = uri.properties.Get<ImageRoi>("roi", ImageRoi(0,0,0,0)); int sync = uri.properties.Get<int>("sync", 0); int binning = uri.properties.Get<int>("binning", 0); int bus_cams = uri.properties.Get<int>("bus_cams", 0); std::vector<std::string> vector_ids; printf("bus_cams: 0x%x\n", bus_cams); while(true) { std::stringstream ss; ss << "id" << vector_ids.size(); const std::string key = ss.str(); if(!uri.properties.Contains(key)) { break; } vector_ids.push_back(uri.properties.Get<std::string>(key, "")); } if(ROI.w == 0 && ROI.h == 0) { ROI.w = dims.x; ROI.h = dims.y; } XI_IMG_FORMAT xi_mode; if (mode == "RAW8") { xi_mode = XI_RAW8; } else if (mode == "RAW16") { xi_mode = XI_RAW16; } else if (mode == "MONO16") { xi_mode = XI_MONO16; } else if (mode == "RGB24") { xi_mode = XI_RGB24; } else if (mode == "RGB32") { xi_mode = XI_RGB32; }else { xi_mode = XI_MONO8; } XimeaDriver* pDriver = new XimeaDriver(vector_ids, fps, exp, gain, xi_mode, ROI, sync, binning, bus_cams); return std::shared_ptr<CameraDriverInterface>(pDriver); }
std::shared_ptr<CameraDriverInterface> GetDevice(const Uri& uri) { ImageDim Dims = uri.properties.Get("size", ImageDim(1920,1080)); bool bRGB = uri.properties.Get("rgb", true); bool bDepth = uri.properties.Get("depth", true); bool bIR = uri.properties.Get("ir", false); bool bColor = uri.properties.Get("color", true); bool bAlign = uri.properties.Get("align", false); std::string sPipeline = uri.properties.Get("pipeline", std::string("opencl")); Freenect2Driver* pDriver = new Freenect2Driver( Dims.x, Dims.y, bRGB, bDepth, bIR, bColor, bAlign,sPipeline ); return std::shared_ptr<CameraDriverInterface>( pDriver ); }
std::shared_ptr<CameraDriverInterface> GetDevice(const Uri& uri) { ImageDim dims = uri.properties.Get("size", ImageDim(640, 480)); bool capture_color = uri.properties.Get("rgb", true); bool capture_depth = uri.properties.Get("depth", true); bool capture_ir0 = uri.properties.Get("ir0", false); bool capture_ir1 = uri.properties.Get("ir1", false); double exposure = uri.properties.Get("exposure", 0.0); double gain = uri.properties.Get("gain", 64); double emitter = uri.properties.Get("emitter", 0.462); int frame_rate = uri.properties.Get("fps", 30); std::vector<std::string> ids; while (true) { std::stringstream stream; stream << "id" << ids.size(); const std::string key = stream.str(); if (!uri.properties.Contains(key)) break; ids.push_back(uri.properties.Get<std::string>(key, "")); } std::shared_ptr<RealSense2Driver> driver = std::make_shared<RealSense2Driver>(dims.x, dims.y, frame_rate, capture_color, capture_depth, capture_ir0, capture_ir1, ids); if (capture_color) { const int channel = capture_ir0 + capture_ir1; driver->SetExposure(exposure, channel); driver->SetGain(gain, channel); } for (size_t i = 0; i < driver->NumDevices(); ++i) { driver->SetEmitter(i, emitter); } return driver; }
//----------------------------------------------------------------------------// BaseDim* ImageDim::clone() const { return CEGUI_NEW_AO ImageDim(*this); }