void DisplayChannel::streams_time()
{
    _next_timer_time = 0;
    Lock lock(_streams_lock);
    uint32_t mm_time = get_client().get_mm_time();
    uint32_t next_time = 0;
    VideoStream* stream = _active_streams;
    while (stream) {
        uint32_t next_frame_time;
        if ((next_frame_time = stream->handle_timer_update(mm_time))) {
            if (!next_time || int(next_frame_time - next_time) < 0) {
                next_time = next_frame_time;
            }
        }
        stream = stream->next;
    }
    Lock timer_lock(_timer_lock);
    mm_time = get_client().get_mm_time();
    next_time = mm_time + 15;
    if (next_time && (!_next_timer_time || int(next_time - _next_timer_time) < 0)) {
        get_client().activate_interval_timer(*_streams_timer, MAX(int(next_time - mm_time), 0));
        _next_timer_time = next_time;
    } else if (!_next_timer_time) {
        get_client().deactivate_interval_timer(*_streams_timer);
    }
    timer_lock.unlock();
    lock.unlock();
    Platform::yield();
}
Ejemplo n.º 2
0
OniStatus Recorder::attachStream(VideoStream& stream, OniBool allowLossyCompression)
{
    if (m_wasStarted)
    {
        return ONI_STATUS_ERROR;
    }

    xnl::LockGuard<AttachedStreams> guard(m_streams);
    VideoStream* pStream = &stream;
    if (m_streams.Find(pStream) == m_streams.End())
    {
        if (ONI_STATUS_OK == pStream->addRecorder(*this))
        {
            m_streams[pStream].nodeId                    = ++m_maxId;
            m_streams[pStream].pCodec                    = NULL;
            m_streams[pStream].allowLossyCompression     = allowLossyCompression;
            m_streams[pStream].frameId                   = 0;
            m_streams[pStream].lastOutputTimestamp       = 0;
            m_streams[pStream].lastInputTimestamp        = 0;
            m_streams[pStream].lastNewDataRecordPosition = 0;
            m_streams[pStream].dataIndex.Clear();
            send(Message::MESSAGE_ATTACH, pStream);
            return ONI_STATUS_OK;
        }
    }
    return ONI_STATUS_ERROR;
}
Ejemplo n.º 3
0
OniStatus Device::close()
{
	--m_openCount;

	if (m_openCount == 0)
	{
		while(m_streams.Begin() != m_streams.End())
		{
			VideoStream* pStream = *m_streams.Begin();
			pStream->stop();
			m_streams.Remove(pStream);
		}
		
		for (int i = 0; i < MAX_SENSORS_PER_DEVICE; ++i)
		{
			if (m_sensors[i] != NULL)
			{
				XN_DELETE(m_sensors[i]);
				m_sensors[i] = NULL;
			}
		}

		if (m_deviceHandle != NULL)
		{
			m_driverHandler.deviceClose(m_deviceHandle);
		}
		m_deviceHandle = NULL;
	}

	return ONI_STATUS_OK;
}
Ejemplo n.º 4
0
  virtual ~SensorStreamManager()
  {
    stream_.removeNewFrameListener(this);
    stream_.stop();
    stream_.destroy();

    publisher_.shutdown();
  }
Ejemplo n.º 5
0
void* VideoStream::ReadVideoFrame(void *arg)
{
    VideoStream *h = (VideoStream*)arg;

    //一帧一帧读取视频
    h->PlayImageSlots();
    return NULL;
}
int kinect_init()
{
    Status rc = OpenNI::initialize();
    if (rc != STATUS_OK)
    {
        printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
        return 1;
    }

    rc = device.open(ANY_DEVICE);
    if (rc != STATUS_OK)
    {
        printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
        return 2;
    }

    if (device.getSensorInfo(SENSOR_DEPTH) != NULL)
    {
        rc = depth.create(device, SENSOR_DEPTH);
        if (rc != STATUS_OK)
        {
            printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
            return 3;
        }

        const SensorInfo* sinfo = device.getSensorInfo(SENSOR_DEPTH);
        const Array<VideoMode>& modes = sinfo->getSupportedVideoModes();

        for (int i=0; i<modes.getSize(); i++) {
            printf("%i: %ix%i, %i fps, %i format\n",
                i,
                modes[i].getResolutionX(),
                modes[i].getResolutionY(),
                modes[i].getFps(),
                modes[i].getPixelFormat()
                );
        }

        //rc = depth.setVideoMode(modes[0]); // 320x240, 30fps, format: 100
        //rc = depth.setVideoMode(modes[4]); // 640x480, 30fps, format: 100
        rc = depth.setVideoMode(modes[4]); // 640x480, 30fps, format: 100

        if (rc != openni::STATUS_OK) {
            printf("Failed to set depth resolution\n");
            return -1;
        }
    }

    rc = depth.start();
    if (rc != STATUS_OK)
    {
        printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
        return 4;
    }

    return 0;
}
Ejemplo n.º 7
0
	__declspec(dllexport) Status VideoStream_create(VideoStream*& vs, Device* device, SensorType sensorType)
	{
		 VideoStream* vsl = new VideoStream();
		 Status status = vsl->create(*device, sensorType);
		 if (status == STATUS_OK)
			 vs = vsl;
		 else
			 VideoStream_destroy(vsl);
		 return status;
	}
int readOpenNiColorAndDepth(VideoStream &color , VideoStream &depth,VideoFrameRef &colorFrame,VideoFrameRef &depthFrame)
{
  #if USE_WAITFORANYSTREAM_TO_GRAB
   #warning "Please turn #define USE_WAITFORANYSTREAM_TO_GRAB 0"
   #warning "This is a bad idea taken from OpenNI2/Samples/SimpleViewer , we dont just want to update 'any' frame we really want to snap BOTH and do that sequentially"
   #warning "It is better to sequencially grab them instead of waiting for any stream a couple of times "
   openni::VideoStream** m_streams = new openni::VideoStream*[2];
   m_streams[0] = &depth;
   m_streams[1] = &color;


  unsigned char haveDepth=0,haveColor=0;
  int changedIndex;
  while ( (!haveDepth) || (!haveColor) )
  {
	openni::Status rc = openni::OpenNI::waitForAnyStream(m_streams, 2, &changedIndex);
	if (rc != openni::STATUS_OK)
	{
		fprintf(stderr,"Wait failed\n");
		return 0 ;
	}

  unsigned int i=0;

	switch (changedIndex)
	{
	case 0:
		depth.readFrame(&depthFrame);
		haveDepth=1;
    break;
	case 1:
		color.readFrame(&colorFrame);
		haveColor=1;
    break;
	default:
		printf("Error in wait\n");
		return 0;
	}
  }

	delete m_streams;
	return 1;
  #else
    //Using serial frame grabbing
    readFrameBlocking(depth,depthFrame,MAX_TRIES_FOR_EACH_FRAME); // depth.readFrame(&depthFrame);
    readFrameBlocking(color,colorFrame,MAX_TRIES_FOR_EACH_FRAME); // color.readFrame(&colorFrame);

    if(depthFrame.isValid() && colorFrame.isValid()) { return 1; }

    fprintf(stderr,"Depth And Color frames are wrong!\n");
  #endif
    return 0;

}
Ejemplo n.º 9
0
  virtual bool tryConfigureVideoMode(VideoMode& mode)
  {
    bool result = true;
    VideoMode old = stream_.getVideoMode();

    if(stream_.setVideoMode(mode) != STATUS_OK)
    {
      ROS_ERROR_STREAM_COND(stream_.setVideoMode(old) != STATUS_OK, "Failed to recover old video mode!");
      result = false;
    }

    return result;
  }
Ejemplo n.º 10
0
/**************************************
* startReceivingVideo
*	Function helper for thread
**************************************/
void* VideoStream::startReceivingVideo(void *par)
{
	Log("RecVideoThread [%p]\n",pthread_self());

	//Obtenemos el objeto
	VideoStream *conf = (VideoStream *)par;

	//Bloqueamos las se�a�es
	blocksignals();

	//Y ejecutamos
	conf->RecVideo();
	//Exit
	return NULL;
}
Ejemplo n.º 11
0
/**************************************
* startSendingVideo
*	Function helper for thread
**************************************/
void* VideoStream::startSendingVideo(void *par)
{
	Log("SendVideoThread [%p]\n",pthread_self());

	//OBtenemos el objeto
	VideoStream *conf = (VideoStream *)par;

	//Bloqueamos las se�ales
	blocksignals();

	//Y ejecutamos la funcion
	conf->SendVideo();
	//Exit
	return NULL;
}
void DisplayChannel::handle_stream_data(RedPeer::InMessage* message)
{
    SpiceMsgDisplayStreamData* stream_data = (SpiceMsgDisplayStreamData*)message->data();
    VideoStream* stream;

    if (stream_data->id >= _streams.size() || !(stream = _streams[stream_data->id])) {
        THROW("invalid stream");
    }

    if (message->size() < sizeof(SpiceMsgDisplayStreamData) + stream_data->data_size) {
        THROW("access violation");
    }

    stream->push_data(stream_data->multi_media_time, stream_data->data_size, stream_data->data);
}
void DisplayChannel::on_streams_trigger()
{
#ifndef RED64
    Lock lock(_mark_lock);
#endif
    uint64_t update_mark = _update_mark;
#ifndef RED64
    lock.unlock();
#endif
    VideoStream* stream = _active_streams;
    while (stream) {
        stream->handle_update_mark(update_mark);
        stream = stream->next;
    }
}
Ejemplo n.º 14
0
 virtual void onSubscriptionChanged(const image_transport::SingleSubscriberPublisher& topic)
 {
   if(topic.getNumSubscribers() > 0)
   {
     if(!running_ && stream_.start() == STATUS_OK)
     {
       running_ = true;
     }
   }
   else
   {
     stream_.stop();
     running_ = false;
   }
 }
void XtionDepthDriverImpl::onNewFrame(VideoStream &stream)
{
  VideoFrameRef ref;
  stream.readFrame(&ref);
  _lastCaptured = XtionDepthImage(ref.getData(), ref.getDataSize(),
    ref.getWidth(), ref.getHeight(), 0, this);
}
Ejemplo n.º 16
0
static void
VDADecoderCallback (void *decompressionOutputRefCon, CFDictionaryRef frameInfo, OSStatus status, uint32_t infoFlags, CVImageBufferRef imageBuffer)
{
	MoonVDADecoder *decoder = (MoonVDADecoder *) decompressionOutputRefCon;
	VideoStream *vs = (VideoStream *) decoder->GetStream ();

	// FIXME: Is this always 1 thread?  Can we optimize this
	decoder->GetDeployment ()->RegisterThread ();

	Deployment::SetCurrent (decoder->GetDeployment ());

	if (imageBuffer == NULL) {
		return;
	}

	OSType format_type = CVPixelBufferGetPixelFormatType (imageBuffer);
	if (format_type != kCVPixelFormatType_422YpCbCr8) {
		g_warning ("Mismatched format in VDA");
		return;
	}

	MediaFrame *mf = (MediaFrame *) CFDictionaryGetValue (frameInfo, CFSTR ("MoonMediaFrame"));

	mf->AddState (MediaFrameVUY2);
	mf->FreeBuffer ();
	mf->SetBufLen (0);

	mf->srcSlideY = 0;
	mf->srcSlideH = vs->GetHeight ();

	mf->width = vs->GetWidth ();
	mf->height = vs->GetHeight ();

	CVPixelBufferLockBaseAddress (imageBuffer, 0);

	mf->data_stride [0] = (uint8_t *) CVPixelBufferGetBaseAddress (imageBuffer);
	mf->srcStride [0] = CVPixelBufferGetBytesPerRow (imageBuffer);

	mf->AddState (MediaFrameDecoded);

	mf->decoder_specific_data = imageBuffer;
	CVPixelBufferRetain (imageBuffer);

	decoder->ReportDecodeFrameCompleted (mf);

	mf->unref ();
}
Ejemplo n.º 17
0
int main(void) {
	VideoStream *videoStream = new VideoStream();
	DifferenceImage *differenceImage = new DifferenceImage();
	HeatImage *heatImage = new HeatImage();

	Mat *image = NULL;
	Mat *bgImage = NULL;
	Mat *diffImage = NULL;
	Mat *htImage = NULL;

	if (DEBUG == true) {
		namedWindow("actImage", CV_WINDOW_AUTOSIZE);
		namedWindow("bgImage", CV_WINDOW_AUTOSIZE);
	}

	namedWindow("diffImage", CV_WINDOW_AUTOSIZE);
	namedWindow("heatImage", CV_WINDOW_AUTOSIZE);

	bgImage = videoStream->getFrame();

	while (true) {
		image = videoStream->getFrame();

		differenceImage->generate(image, bgImage);
		diffImage = differenceImage->get();

		heatImage->generate(diffImage);
		htImage = heatImage->get();

		if (DEBUG == true) {
			imshow("actImage", *image);
			imshow("bgImage", *image);
		}

		imshow("diffImage", *diffImage);
		imshow("heatImage", *htImage);
		waitKey(1);

		heatImage->degenerate();

		delete bgImage;
		bgImage = image;
	}
	delete image;

	return 0;
}
Ejemplo n.º 18
0
  virtual void onNewFrame(VideoStream& stream)
  {
    ros::Time ts = ros::Time::now();

    VideoFrameRef frame;
    stream.readFrame(&frame);

    sensor_msgs::Image::Ptr img(new sensor_msgs::Image);
    sensor_msgs::CameraInfo::Ptr info(new sensor_msgs::CameraInfo);

    double scale = double(frame.getWidth()) / double(1280);

    info->header.stamp = ts;
    info->header.frame_id = frame_id_;
    info->width = frame.getWidth();
    info->height = frame.getHeight();
    info->K.assign(0);
    info->K[0] = 1050.0 * scale;
    info->K[4] = 1050.0 * scale;
    info->K[2] = frame.getWidth() / 2.0 - 0.5;
    info->K[5] = frame.getHeight() / 2.0 - 0.5;
    info->P.assign(0);
    info->P[0] = 1050.0 * scale;
    info->P[5] = 1050.0 * scale;
    info->P[2] = frame.getWidth() / 2.0 - 0.5;
    info->P[6] = frame.getHeight() / 2.0 - 0.5;

    switch(frame.getVideoMode().getPixelFormat())
    {
    case PIXEL_FORMAT_GRAY8:
      img->encoding = sensor_msgs::image_encodings::MONO8;
      break;
    case PIXEL_FORMAT_GRAY16:
      img->encoding = sensor_msgs::image_encodings::MONO16;
      break;
    case PIXEL_FORMAT_YUV422:
      img->encoding = sensor_msgs::image_encodings::YUV422;
      break;
    case PIXEL_FORMAT_RGB888:
      img->encoding = sensor_msgs::image_encodings::RGB8;
      break;
    case PIXEL_FORMAT_SHIFT_9_2:
    case PIXEL_FORMAT_DEPTH_1_MM:
      img->encoding = sensor_msgs::image_encodings::TYPE_16UC1;
      break;
    default:
      ROS_WARN("Unknown OpenNI pixel format!");
      break;
    }
    img->header.stamp = ts;
    img->header.frame_id = frame_id_;
    img->height = frame.getHeight();
    img->width = frame.getWidth();
    img->step = frame.getStrideInBytes();
    img->data.resize(frame.getDataSize());
    std::copy(static_cast<const uint8_t*>(frame.getData()), static_cast<const uint8_t*>(frame.getData()) + frame.getDataSize(), img->data.begin());

    publish(img, info);
  }
Ejemplo n.º 19
0
OniStatus Recorder::record(VideoStream& stream, OniFrame& aFrame)
{
    if (!m_started)
    {
        return ONI_STATUS_ERROR;
    }
    xnl::LockGuard< AttachedStreams > guard(m_streams);
    VideoStream* pStream = &stream;
    if (m_streams.Find(pStream) == m_streams.End())
    {
        return ONI_STATUS_BAD_PARAMETER;
    }
    OniFrame* pFrame = &aFrame;
    pStream->frameAddRef(pFrame);
    send(Message::MESSAGE_RECORD, pStream, pFrame);
    return ONI_STATUS_OK;
}
Ejemplo n.º 20
0
  virtual bool beginConfigure()
  {
    was_running_ = running_;
    if(was_running_) stream_.stop();
    running_ = false;

    return true;
  }
void DisplayChannel::handle_stream_clip(RedPeer::InMessage* message)
{
    SpiceMsgDisplayStreamClip* clip_data = (SpiceMsgDisplayStreamClip*)message->data();
    VideoStream* stream;
    uint32_t num_clip_rects;
    SpiceRect* clip_rects;

    if (clip_data->id >= _streams.size() || !(stream = _streams[clip_data->id])) {
        THROW("invalid stream");
    }

    if (message->size() < sizeof(SpiceMsgDisplayStreamClip)) {
        THROW("access violation");
    }
    set_clip_rects(clip_data->clip, num_clip_rects, clip_rects);
    Lock lock(_streams_lock);
    stream->set_clip(clip_data->clip.type, num_clip_rects, clip_rects);
}
Ejemplo n.º 22
0
OniStatus Context::createStream(OniDeviceHandle device, OniSensorType sensorType, OniStreamHandle* pStream)
{

	// Create the stream.
	Device* pDevice = device->pDevice;
	VideoStream* pMyStream = pDevice->createStream(sensorType);
	if (pMyStream == NULL)
	{
		m_errorLogger.Append("Context: Couldn't create stream from device:%08x, source: %d", device, sensorType);
		return ONI_STATUS_ERROR;
	}

	pMyStream->setNewFrameCallback(newFrameCallback, this);

	// Create stream frame holder and connect it to the stream.
	StreamFrameHolder* pFrameHolder = XN_NEW(StreamFrameHolder, m_frameManager, pMyStream);
	if (pFrameHolder == NULL)
	{
		m_errorLogger.Append("Context: Couldn't create stream frame holder from device:%08x, source: %d", device, sensorType);
		XN_DELETE(pMyStream);
		return ONI_STATUS_ERROR;
	}
	pMyStream->setFrameHolder(pFrameHolder);

	// Create handle object.
	_OniStream* pStreamHandle = XN_NEW(_OniStream);
	if (pStreamHandle == NULL)
	{
		m_errorLogger.Append("Couldn't allocate memory for StreamHandle");
		XN_DELETE(pFrameHolder);
		pFrameHolder = NULL;
		XN_DELETE(pMyStream);
		pMyStream = NULL;
		return ONI_STATUS_ERROR;
	}
	*pStream = pStreamHandle;
	pStreamHandle->pStream = pMyStream;

	m_cs.Lock();
	m_streams.AddLast(pMyStream);
	m_cs.Unlock();

	return ONI_STATUS_OK;
}
Ejemplo n.º 23
0
  SensorStreamManager(ros::NodeHandle& nh, Device& device, SensorType type, std::string name, std::string frame_id, VideoMode& default_mode) :
    device_(device),
    default_mode_(default_mode),
    name_(name),
    frame_id_(frame_id),
    running_(false),
    nh_(nh, name_),
    it_(nh_),
    camera_info_manager_(nh_)
  {
    assert(device_.hasSensor(type));

    callback_ = boost::bind(&SensorStreamManager::onSubscriptionChanged, this, _1);
    publisher_ = it_.advertiseCamera("image_raw", 1, callback_, callback_);

    ROS_ERROR_STREAM_COND(stream_.create(device_, type) != STATUS_OK, "Failed to create stream '" << toString(type) << "'!");
    stream_.addNewFrameListener(this);
    ROS_ERROR_STREAM_COND(stream_.setVideoMode(default_mode_) != STATUS_OK, "Failed to set default video mode for stream '" << toString(type) << "'!");
  }
Ejemplo n.º 24
0
int main()
{
	Status rc = OpenNI::initialize();
	if (rc != STATUS_OK)
	{
		printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
		return 1;
	}

	OpenNIEventListener eventPrinter;
	OpenNI::addListener(&eventPrinter);

	Device device;
	rc = device.open(ANY_DEVICE);
	if (rc != STATUS_OK)
	{
		printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
		return 2;
	}

	VideoStream depth;

	if (device.getSensorInfo(SENSOR_DEPTH) != NULL)
	{
		rc = depth.create(device, SENSOR_DEPTH);
		if (rc != STATUS_OK)
		{
			printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
		}
	}
	rc = depth.start();
	if (rc != STATUS_OK)
	{
		printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
	}


	PrintCallback depthPrinter;

	// Register to new frame
	depth.addListener(&depthPrinter);

	// Wait while we're getting frames through the printer
	while (!wasKeyboardHit())
	{
		Sleep(100);
	}

	depth.removeListener(&depthPrinter);


	depth.stop();
	depth.destroy();
	device.close();
	OpenNI::shutdown();

	return 0;
}
Ejemplo n.º 25
0
	void WriteProperties( VideoStream& rStream )
	{
		for_each( vProperties.begin(), vProperties.end(), [&rStream]( CProperty& rProp ){
			int iSize = rProp.vData.size();
			if( rStream.setProperty( rProp.iIdx, rProp.vData.data(), iSize ) != STATUS_OK )
			{
				cerr << "Property " << rProp.sName << " write fail" << endl;
			}
		} );
	}
Ejemplo n.º 26
0
const short unsigned int *wb_kinect_get_range_image_mm(WbDeviceTag tag) {
  if(kinectenable) {
    int i;
    int changedStreamDummy;
    VideoStream* pStream = &depth;

    rc = OpenNI::waitForAnyStream(&pStream, 1, &changedStreamDummy, SAMPLE_READ_WAIT_TIMEOUT);

    if (rc != STATUS_OK)
    {
      printf("Wait failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError());
      return NULL;

    }

    rc = depth.readFrame(&frame);

    if (rc != STATUS_OK)
    {
      printf("Read failed!\n%s\n", OpenNI::getExtendedError());
      return NULL;

    }

    if (frame.getVideoMode().getPixelFormat() != PIXEL_FORMAT_DEPTH_1_MM && frame.getVideoMode().getPixelFormat() != PIXEL_FORMAT_DEPTH_100_UM)
    {
      printf("Unexpected frame format\n");
      return NULL;
    }

    DepthPixel* pDepth = (DepthPixel*)frame.getData();
    height_pixels = frame.getHeight();
    width_pixels = frame.getWidth();

    if (width_pixels>160){
    //Fix blind column
    	for(i=0;i<height_pixels;i++){
          pDepth[i*width_pixels + 0] = pDepth[i*width_pixels + 3]; 
          pDepth[i*width_pixels + 1] = pDepth[i*width_pixels + 3];  
          pDepth[i*width_pixels + 2] = pDepth[i*width_pixels + 3];  
    	}
    } else {
        for(i=0;i<height_pixels;i++){
          pDepth[i*width_pixels + 0] = pDepth[i*width_pixels + 1];
        }
    }

    return pDepth;
  }
  // kinect not enable
  fprintf(stderr, "2.Please enable the kinect before to use wb_kinect_get_range_image()\n");
  return NULL;
}
Ejemplo n.º 27
0
OniStatus Recorder::detachStream(VideoStream& stream)
{
    xnl::LockGuard<AttachedStreams> guard(m_streams);
    VideoStream* pStream = &stream;
    if (m_streams.End() != m_streams.Find(pStream))
    {
        stream.removeRecorder(*this);
        send(Message::MESSAGE_DETACH, pStream);
        return ONI_STATUS_OK;
    }
    return ONI_STATUS_BAD_PARAMETER;
}
Ejemplo n.º 28
0
void
MoonVDADecoder::OpenDecoderAsyncInternal ()
{
	IMediaStream *stream = GetStream ();
	VideoStream *vs = (VideoStream *) stream;
	int format = 'avc1';

	CFDataRef avcCData = CFDataCreate (kCFAllocatorDefault, (const uint8_t*) stream->GetRawExtraData (), stream->GetRawExtraDataSize ());
	OSStatus status = CreateDecoder ((SInt32) vs->GetHeight (), (SInt32) vs->GetWidth (), (OSType) format, avcCData);

	if (avcCData) CFRelease (avcCData);

	if (status == kVDADecoderNoErr) {
		SetPixelFormat (MoonPixelFormat422YpCbCr8);

		ReportOpenDecoderCompleted ();
	} else {
		char *str = g_strdup_printf ("MoonVDADecoder failed to open codec (result: %d)", status);
		ReportErrorOccurred (str);
		g_free (str);
	}
}
Ejemplo n.º 29
0
int readFrameBlocking(VideoStream &stream,VideoFrameRef &frame , unsigned int max_tries)
{
  unsigned int tries_for_frame = 0 ;
  while (  tries_for_frame < max_tries  )
          {
            stream.readFrame(&frame);
	        if (frame.isValid()) { return 1; }
	        ++tries_for_frame;
          }

  if (!frame.isValid()) { fprintf(stderr,"Could not get a valid frame even after %u tries \n",max_tries); return 0; }
  return (tries_for_frame<max_tries);
}
Ejemplo n.º 30
0
void wb_kinect_enable(WbDeviceTag tag, int colorMS, int rangeMS, bool advancedModel) {
  rc = OpenNI::initialize();
  if (rc != STATUS_OK)
  {
    printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
    return;
  }

  rc = device.open(ANY_DEVICE);
  if (rc != STATUS_OK)
  {
    printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
    return;
  }

  if (device.getSensorInfo(SENSOR_DEPTH) != NULL)
  {
    rc = depth.create(device, SENSOR_DEPTH);
    if (rc != STATUS_OK)
    {
      printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
      return;
    }
  }

  rc = depth.start();
  if (rc != STATUS_OK)
  {
    printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
    return;
  }

  // Do one read to update the withd and height.
  kinectenable = true;
  wb_kinect_get_range_image_mm(tag);
}