Beispiel #1
0
void OpenNi2Video::UpdateProperties()
{
    json::value& jsopenni = device_properties["openni"];

    json::value& jsdevices = jsopenni["devices"];
    jsdevices = json::value(json::array_type,false);
    jsdevices.get<json::array>().resize(numDevices);
    for (size_t i=0; i<numDevices; ++i) {
      json::value& jsdevice = jsdevices[i];
#define SET_PARAM(param_type, param) \
      { \
        param_type val; \
        if(devices[i].getProperty(param, &val) == openni::STATUS_OK) { \
          jsdevice[#param] = val; \
        } \
      }
      SET_PARAM( unsigned long long, XN_MODULE_PROPERTY_USB_INTERFACE );
      SET_PARAM( bool,  XN_MODULE_PROPERTY_MIRROR );
      char serialNumber[1024];
      devices[i].getProperty(ONI_DEVICE_PROPERTY_SERIAL_NUMBER, &serialNumber);
      jsdevice["ONI_DEVICE_PROPERTY_SERIAL_NUMBER"] = std::string(serialNumber);
#undef SET_PARAM
    }

    json::value& stream = jsopenni["streams"];
    stream = json::value(json::array_type,false);
    stream.get<json::array>().resize(Streams().size());
    for(unsigned int i=0; i<Streams().size(); ++i) {
        if(sensor_type[i].sensor_type != OpenNiUnassigned)
        {
#define SET_PARAM(param_type, param) \
            {\
                param_type val; \
                if(video_stream[i].getProperty(param, &val) == openni::STATUS_OK) { \
                    jsstream[#param] = val; \
                } \
            }

            json::value& jsstream = stream[i];
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_INPUT_FORMAT );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_CROPPING_MODE );

            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_CLOSE_RANGE );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_WHITE_BALANCE_ENABLED );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_GAIN );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_HOLE_FILTER );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_REGISTRATION_TYPE );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_CONST_SHIFT );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_PIXEL_SIZE_FACTOR );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_MAX_SHIFT );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_PARAM_COEFF );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_SHIFT_SCALE );
            SET_PARAM( unsigned long long, XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE );
            SET_PARAM( double, XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE );
            SET_PARAM( double, XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE );
            SET_PARAM( double, XN_STREAM_PROPERTY_DCMOS_RCMOS_DISTANCE );
#undef SET_PARAM
        }
    }
}
PerforceDepotView::Streams PerforceDepotView::GetStreams()
{
	std::string cmd = "streams";
	auto results = RunCommand(cmd);
	if (!results || results->hasErrors())
		return Streams();

	std::string output = results->output();
	if (output.empty())
		return Streams();

	size_t start = 0;
	std::string token = "\n\n";
	auto end = output.find(token);
	Streams streams;
	while (start != output.length() && end != std::string::npos)
	{
		auto chunk = output.substr(start, end - start);
		if (!chunk.empty())
		{
			auto info = ParseResults(chunk);
			if (info.find("Stream") != info.end())
			{
				streams[info["Stream"]] = (std::move(info));
			}
		}
		start = end + token.size();
		end = output.find("\n\n", start);
	}
	return streams;
}
Beispiel #3
0
void OpenNi2Video::SetDepthCloseRange(bool enable)
{
    // Set this property on all streams. It doesn't matter if it fails.
    for(unsigned int i=0; i<Streams().size(); ++i) {
        video_stream[i].setProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, enable);
    }
}
Beispiel #4
0
void OpenNi2Video::SetMirroring(bool enable)
{
    // Set this property on all streams. It doesn't matter if it fails.
    for(unsigned int i=0; i<Streams().size(); ++i) {
        video_stream[i].setMirroringEnabled(enable);
    }
}
Beispiel #5
0
void OpenNi2Video::SetDepthHoleFilter(bool enable)
{
    // Set this property on all streams. It doesn't matter if it fails.
    for(unsigned int i=0; i<Streams().size(); ++i) {
        video_stream[i].setProperty(XN_STREAM_PROPERTY_HOLE_FILTER, enable);
        video_stream[i].setProperty(XN_STREAM_PROPERTY_GAIN,50);
    }
}
Beispiel #6
0
void OpenNi2Video::SetAutoWhiteBalance(bool enable)
{
    // Set this property on all streams exposing CameraSettings
    for(unsigned int i=0; i<Streams().size(); ++i) {
        openni::CameraSettings* cam = video_stream[i].getCameraSettings();
        if(cam) cam->setAutoWhiteBalanceEnabled(enable);
    }
}
Beispiel #7
0
void DebayerVideo::ProcessStreams(unsigned char* out, const unsigned char *in)
{
    for(size_t s=0; s<streams.size(); ++s) {
        const StreamInfo& stin = videoin[0]->Streams()[s];

        if(stin.PixFormat().bpp == 8) {
            Image<unsigned char> img_in  = stin.StreamImage(in);
            Image<unsigned char> img_out = Streams()[s].StreamImage(out);
            ProcessImage(img_out, img_in, methods[s], tile);
        }else if(stin.PixFormat().bpp == 16){
            Image<uint16_t> img_in = stin.StreamImage(in).Reinterpret<uint16_t>();
            Image<uint16_t> img_out = Streams()[s].StreamImage(out).Reinterpret<uint16_t>();
            ProcessImage(img_out, img_in, methods[s], tile);
        }else{
            throw std::runtime_error("debayer: unhandled format combination: " + stin.PixFormat().format );
        }
    }
}
Beispiel #8
0
void MergeVideo::CopyBuffer(unsigned char* dst_bytes, unsigned char* src_bytes)
{
    Image<unsigned char> dst_image = Streams()[0].StreamImage(dst_bytes);
    const size_t dst_pix_bytes = Streams()[0].PixFormat().bpp / 8;

    for(size_t i=0; i < stream_pos.size(); ++i) {
        const StreamInfo& src_stream = src->Streams()[i];
        const Image<unsigned char> src_image = src_stream.StreamImage(src_bytes);
        const Point& p = stream_pos[i];
        for(size_t y=0; y < src_stream.Height(); ++y) {
            // Copy row from src to dst
            std::memcpy(
                dst_image.RowPtr(y + p.y) + p.x * dst_pix_bytes,
                src_image.RowPtr(y), src_stream.RowBytes()
            );
        }
    }
}
Beispiel #9
0
bool OpenNi2Video::GrabNext( unsigned char* image, bool /*wait*/ )
{
    unsigned char* out_img = image;

    openni::Status rc = openni::STATUS_OK;

    for(unsigned int i=0; i<Streams().size(); ++i) {
        if(sensor_type[i].sensor_type == OpenNiUnassigned) {
            rc = openni::STATUS_OK;
            continue;
        }

        if(!video_stream[i].isValid()) {
            rc = openni::STATUS_NO_DEVICE;
            continue;
        }

        if(use_ir_and_rgb) video_stream[i].start();

        rc = video_stream[i].readFrame(&video_frame[i]);
        video_frame[0].getFrameIndex();
        if(rc != openni::STATUS_OK) {
            pango_print_error("Error reading frame:\n%s", openni::OpenNI::getExtendedError() );
        }

        const bool toGreyscale = false;
        if(toGreyscale) {
            const int w = streams[i].Width();
            const int h = streams[i].Height();

            openni::RGB888Pixel* pColour = (openni::RGB888Pixel*)video_frame[i].getData();
            for(int i = 0 ; i  < w*h;i++){
                openni::RGB888Pixel rgb = pColour[i];
                int grey = ((int)(rgb.r&0xFF) +  (int)(rgb.g&0xFF) + (int)(rgb.b&0xFF))/3;
                grey = std::min(255,std::max(0,grey));
                out_img[i] = grey;
            }
        }else{
            memcpy(out_img, video_frame[i].getData(), streams[i].SizeBytes());
        }

        // update frame properties
        (*streams_properties)[i]["devtime_us"] = video_frame[i].getTimestamp();

        if(use_ir_and_rgb) video_stream[i].stop();

        out_img += streams[i].SizeBytes();
    }

    current_frame_index = video_frame[0].getFrameIndex();

    return rc == openni::STATUS_OK;
}
bool VideoInput::Grab( unsigned char* buffer, std::vector<Image<unsigned char> >& images, bool wait, bool newest)
{
    if( !video_src ) throw VideoException("No video source open");

    bool success;

    if(newest) {
        success = GrabNewest(buffer, wait);
    }else{
        success = GrabNext(buffer, wait);
    }

    if(success) {
        images.clear();
        for(size_t s=0; s < Streams().size(); ++s) {
            images.push_back(Streams()[s].StreamImage(buffer));
        }
    }

    return success;
}
Beispiel #11
0
//! Implement VideoInput::GrabNext()
bool DebayerVideo::GrabNext( unsigned char* image, bool wait )
{    
    if(videoin[0]->GrabNext(buffer,wait)) {
        for(size_t s=0; s<streams.size(); ++s) {
            Image<unsigned char> img_in  = videoin[0]->Streams()[s].StreamImage(buffer);
            Image<unsigned char> img_out = Streams()[s].StreamImage(image);

#ifdef HAVE_DC1394
            dc1394_bayer_decoding_8bit(
                img_in.ptr, img_out.ptr, img_in.w, img_in.h,
                (dc1394color_filter_t)tile, (dc1394bayer_method_t)method
            );
#else
            // use our simple debayering instead
            DownsampleDebayer(img_out, img_in, tile);
#endif
        }
        return true;
    }else{
        return false;
    }
}
Beispiel #12
0
void DebayerVideo::ProcessStreams(unsigned char* out, const unsigned char *in)
{
    for(size_t s=0; s<streams.size(); ++s) {
        const StreamInfo& stin = videoin[0]->Streams()[s];
        Image<unsigned char> img_in  = stin.StreamImage(in);
        Image<unsigned char> img_out = Streams()[s].StreamImage(out);

        if(methods[s] == BAYER_METHOD_NONE) {
            const size_t num_bytes = std::min(img_in.w, img_out.w) * stin.PixFormat().bpp / 8;
            for(size_t y=0; y < img_out.h; ++y) {
                std::memcpy(img_out.RowPtr((int)y), img_in.RowPtr((int)y), num_bytes);
            }
        }else if(stin.PixFormat().bpp == 8) {
            ProcessImage(img_out, img_in, methods[s], tile);
        }else if(stin.PixFormat().bpp == 16){
            Image<uint16_t> img_in16  = img_in.UnsafeReinterpret<uint16_t>();
            Image<uint16_t> img_out16 = img_out.UnsafeReinterpret<uint16_t>();
            ProcessImage(img_out16, img_in16, methods[s], tile);
        }else {
            throw std::runtime_error("debayer: unhandled format combination: " + stin.PixFormat().format );
        }
    }
}
Beispiel #13
0
void OpenNi2Video::Stop()
{
    for(unsigned int i=0; i<Streams().size(); ++i) {
        video_stream[i].stop();
    }
}