static int dc1394_read_packet(AVFormatContext *c, AVPacket *pkt)
{
    struct dc1394_data *dc1394 = c->priv_data;
    int res;

    /* discard stale frame */
    if (dc1394->current_frame++) {
        if (dc1394_dma_done_with_buffer(&dc1394->camera) != DC1394_SUCCESS)
            av_log(c, AV_LOG_ERROR, "failed to release %d frame\n", dc1394->current_frame);
    }

    res = dc1394_dma_single_capture(&dc1394->camera);

    if (res == DC1394_SUCCESS) {
        dc1394->packet.data = (uint8_t *)(dc1394->camera.capture_buffer);
        dc1394->packet.pts = (dc1394->current_frame * 1000000) / dc1394->fps;
        res = dc1394->packet.size;
    } else {
        av_log(c, AV_LOG_ERROR, "DMA capture failed\n");
        dc1394->packet.data = NULL;
        res = -1;
    }

    *pkt = dc1394->packet;
    return res;
}
Пример #2
0
static void *capture_loop( void *d )
{
	struct dc1394_input *conf = (struct dc1394_input *)d;
	struct frame *f;
	int cam;

	for(;;)
	{
		dc1394_dma_multi_capture( conf->camera, conf->cam_count );
		for( cam = 0; cam < conf->cam_count; ++cam )
		{
			if( conf->cam[cam].running && ( f = new_frame() ) )
			{
				f->length = HEIGHT * WIDTH * 2;
				f->format = FORMAT_RAW_UYVY;
				f->width = WIDTH;
				f->height = HEIGHT;
				f->key = 1;

				memcpy( f->d, conf->camera[cam].capture_buffer,
						f->length );

				if( soft_queue_add( conf->cam[cam].outq, f ) < 0 )
					unref_frame( f );
			}
			dc1394_dma_done_with_buffer( &conf->camera[cam] );
		}
	}

	return NULL;
}
Пример #3
0
/*****************************************************************************
 * Demux:
 *****************************************************************************/
static block_t *GrabVideo( demux_t *p_demux )
{
    demux_sys_t *p_sys = p_demux->p_sys;
    block_t     *p_block = NULL;
    int result = 0;

    if( p_sys->dma_capture )
    {
        result = dc1394_dma_single_capture( &p_sys->camera );
        if( result != DC1394_SUCCESS )
        {
            msg_Err( p_demux, "unable to capture a frame" );
            return NULL;
        }
    }
    else
    {
        result = dc1394_single_capture( p_sys->camera_info.handle,
                                        &p_sys->camera );
        if( result != DC1394_SUCCESS )
        {
            msg_Err( p_demux, "unable to capture a frame" );
            return NULL;
        }
    }

    p_block = block_New( p_demux, p_sys->camera.frame_width *
                                  p_sys->camera.frame_height * 2 );
    if( !p_block )
    {
        msg_Err( p_demux, "cannot get block" );
        return NULL;
    }

    if( !p_sys->camera.capture_buffer )
    {
        msg_Err (p_demux, "caputer buffer empty");
        block_Release( p_block );
        return NULL;
    }

    memcpy( p_block->p_buffer, (const char *)p_sys->camera.capture_buffer,
            p_sys->camera.frame_width * p_sys->camera.frame_height * 2 );

    p_block->i_pts = p_block->i_dts = mdate();
    if( p_sys->dma_capture )
        dc1394_dma_done_with_buffer( &p_sys->camera );
    return p_block;
}
Пример #4
0
static void *AcquireFrames(void *arg) {
  struct timeval tv;
  int    start_sec;
  int cam = (int)arg;
  pthread_cleanup_push(dc1394Done, (void *)cam);
  printf("in acquire\n");
  gettimeofday(&tv, NULL);
  start_sec = tv.tv_sec;
  printf("start sec is %d\n", start_sec);
  for (Cams[cam].next_frame=0; Cams[cam].next_frame < Cams[cam].max_frames; Cams[cam].next_frame++) {
    if (dc1394_dma_single_capture(&Cams[cam].camera)!=DC1394_SUCCESS) {
      fprintf( stderr, "unable to capture a frame\n");
      dc1394_stop_iso_transmission(Cams[cam].handle,Cams[cam].camera.node);
      Cams[cam].running = 0;
      dc1394_dma_release_camera(Cams[cam].handle,&Cams[cam].camera);
      Cams[cam].dma_active = 0;
      dc1394_destroy_handle(Cams[cam].handle);
      Cams[cam].handle = 0;
      Cams[cam].thread_id =0;
      return;
    }
    gettimeofday(&tv, NULL);
    Cams[cam].times[Cams[cam].next_frame] = tv.tv_sec-start_sec+(double)tv.tv_usec*1E-6;
    printf("The time for frame %d is %g\n", Cams[cam].next_frame, Cams[cam].times[Cams[cam].next_frame]);
    memcpy((void *)Cams[cam].frames+Cams[cam].next_frame*Cams[cam].width*Cams[cam].height, Cams[cam].camera.capture_buffer, Cams[cam].width*Cams[cam].height);
    dc1394_dma_done_with_buffer(&Cams[cam].camera);
    printf("got frame %d\n", Cams[cam].next_frame);
  }
  /* clean up active daq */
  dc1394_stop_iso_transmission(Cams[cam].handle,Cams[cam].camera.node);
  Cams[cam].running = 0;
  dc1394_dma_unlisten(Cams[cam].handle, &Cams[cam].camera);
  dc1394_dma_release_camera(Cams[cam].handle,&Cams[cam].camera);
  Cams[cam].dma_active = 0;
  dc1394_destroy_handle(Cams[cam].handle);
  Cams[cam].handle = 0;

  Cams[cam].thread_id =0;
  pthread_cleanup_pop(0);
  return;
}
Пример #5
0
static IplImage* icvRetrieveFrameCAM_DC1394( CvCaptureCAM_DC1394* capture ){
	if(capture->camera->capture_buffer )
	{
		if(capture->convert){
			/* Convert to RGBA */
			unsigned char * src = (unsigned char *)capture->camera->capture_buffer;
			unsigned char * dst = (unsigned char *)capture->frame.imageData;
			switch (capture->color_mode) {
				case COLOR_FORMAT7_RGB8:
					printf("icvRetrieveFrame convert RGB to BGR\n");
					/* Convert RGB to BGR */
					for (int i=0;i<capture->frame.imageSize;i+=6) {
						dst[i]   = src[i+2];
						dst[i+1] = src[i+1];
						dst[i+2] = src[i];
						dst[i+3] = src[i+5];
						dst[i+4] = src[i+4];
						dst[i+5] = src[i+3];
					}
					break;
				case COLOR_FORMAT7_YUV422:
					//printf("icvRetrieveFrame convert YUV422 to BGR %d\n");
					uyvy2bgr(src,
							dst,
							capture->camera->frame_width * capture->camera->frame_height);
					break;
				case COLOR_FORMAT7_MONO8:
					//printf("icvRetrieveFrame convert MONO8 to BGR %d\n");
					y2bgr(src,
							dst,
							capture->camera->frame_width * capture->camera->frame_height);
					break;
				case COLOR_FORMAT7_YUV411:
					//printf("icvRetrieveFrame convert YUV411 to BGR %d\n");
					uyyvyy2bgr(src,
							dst,
							capture->camera->frame_width * capture->camera->frame_height);
					break;
				case COLOR_FORMAT7_YUV444:
					//printf("icvRetrieveFrame convert YUV444 to BGR %d\n");
					uyv2bgr(src,
							dst,
							capture->camera->frame_width * capture->camera->frame_height);
					break;
				case COLOR_FORMAT7_MONO16:
					//printf("icvRetrieveFrame convert MONO16 to BGR %d\n");
					y162bgr(src,
							dst,
							capture->camera->frame_width * capture->camera->frame_height, MONO16_BPP);
					break;
				case COLOR_FORMAT7_RGB16:
					//printf("icvRetrieveFrame convert RGB16 to BGR %d\n");
					rgb482bgr(src,
							dst,
							capture->camera->frame_width * capture->camera->frame_height, MONO16_BPP);
					break;
				default:
					fprintf(stderr,"%s:%d: Unsupported color mode %d\n",__FILE__,__LINE__,capture->color_mode);
					return 0;
			} /* switch (capture->mode) */
		}
		else{
			// return raw data
			capture->frame.imageData = (char *) capture->camera->capture_buffer;
			capture->frame.imageDataOrigin = (char *) capture->camera->capture_buffer;
		}
		
		// TODO: if convert=0, we are not actually done with the buffer
		// but this seems to work anyway.
		dc1394_dma_done_with_buffer(capture->camera);
		
		return &capture->frame;
	}
	return 0;
};
 //---------------------------------------------------------------
 void
 BufferManager1394::releaseOutputBuffer()
 {	
   dc1394_dma_done_with_buffer(pCamera_);
 }
Пример #7
0
int CameraDcam::moduleLoop(void)
{
    camera_data_msg*   p_data = NULL;
    ssize_t            datalength = 0;
    rack_time_t          starttime;
    int h=0,w=0,p=0;
    int ret;

    starttime = rackTime.get();

    // get datapointer from databuffer
    p_data = (camera_data_msg *)getDataBufferWorkSpace();

    p_data->data.recordingTime   = rackTime.get();
    p_data->data.mode            = mode;
    p_data->data.colorFilterId   = format7image.colorFilterId;

    switch (mode){
        case CAMERA_MODE_MONO8:
        case CAMERA_MODE_RAW8:
            p_data->data.depth = 8;
            break;
        case CAMERA_MODE_RAW12:
        case CAMERA_MODE_YUV422:
            p_data->data.depth = 16;
            break;
           default:
            GDOS_ERROR("Unknown mode %i \n",mode);
            return -1;
    }

   /*-----------------------------------------------------------------------
    *  capture one frame and send to mbx
    *-----------------------------------------------------------------------*/
    GDOS_DBG_INFO("Capturing one image\n");
    if ((ret = dc1394_dma_single_capture( &dc1394Camera ))!=DC1394_SUCCESS)
       {
      GDOS_WARNING("Unable to capture a frame. ret = %i \n", ret);
    }
    else
    {
        p_data->data.width  = dc1394Camera.frame_width  / lossRate;
        p_data->data.height = dc1394Camera.frame_height / lossRate;

        //shrink data if set to.
        if (lossRate == 1)
        {
            memcpy(p_data->byteStream, dc1394Camera.capture_buffer, dc1394Camera.dma_frame_size );
        }
        else
        {
            /*lossRate != 1 so we need to throw some pixel away. -> iterate over array.
             *The loss in YUV422 format must lose pairs of pixel! as UYVY pairs cannot be divided up!!
             * ->from 4 pixel in the original only one pixelPair! is transmitted.
             * eg. lossrate of 2 in yuv -> take 4 byte, leave 4 byte, take 4 byte.... leave a row...
             *
             *The loss in RAW format must also lose pairs of pixel! ohterwise only one color may result.
             */
            int pairfactor = 2; //depending on colorMode (Yuv=Raw=2, mono=1)
            int bytesPerPixelPair = 0;

            if (p_data->data.mode==CAMERA_MODE_MONO8 ||p_data->data.mode==CAMERA_MODE_RGB24 || p_data->data.mode==CAMERA_MODE_MONO12)
                pairfactor = 1;

            bytesPerPixelPair = bytesPerPixel * pairfactor;//to save multiplications

            //copy needed to change datatypes
            memcpy(dataBuffer, dc1394Camera.capture_buffer, dc1394Camera.dma_frame_size );

            for (h=0; h < p_data->data.height; h++)
            { //for every line in the smaller image
                for(w=0; w < p_data->data.width / pairfactor; w++)
                { //for every pixelPair in the smaller image
                    for (p=0; p<bytesPerPixelPair; p++)
                    { //for every byte per pixelPair
                        p_data->byteStream[   (h*bytesPerPixel*p_data->data.width) + (w*bytesPerPixelPair)              + p] =
                        dataBuffer[(h*bytesPerPixel*dc1394Camera.frame_width*lossRate) + (w*bytesPerPixelPair*lossRate) + p];
                    }
                }
            }
            GDOS_DBG_DETAIL("Data width %i pixelPairs to be send height %i pixel(!) bppp %i\n", w, h, p);
        } //end of lossRate calculation

        //doing auto shutter / gain / brightness control
        autoBrightness(p_data);

        if (whitebalanceMode > 0)
        {
            autoWhitebalance(p_data);
        }

        GDOS_DBG_DETAIL("Data recordingtime %i width %i height %i depth %i mode %i\n", p_data->data.recordingTime, p_data->data.width, p_data->data.height, p_data->data.depth, p_data->data.mode);

        datalength = p_data->data.width * p_data->data.height * bytesPerPixel + sizeof(camera_data);
        putDataBufferWorkSpace(datalength);

        dc1394_dma_done_with_buffer( &dc1394Camera );//return the buffer handle to library.
    }

    //rt_sleep(timeCount1s/fps);//## zeitverwaltung zentral erledigt

    RackTask::sleep((1000000000llu/fps) - (rackTime.get() - starttime));

    return 0;
}
Пример #8
0
unsigned char* linuxfwCamera::getFrame()  {

	if (handle==NULL) return NULL;
	
	if (use_dma) {
		if (dc1394_dma_single_capture(&camera)!=DC1394_SUCCESS) 
		{
			fprintf( stderr, "unable to capture a frame\n");
			dc1394_dma_release_camera(handle,&camera);
			dc1394_destroy_handle(handle);
			return NULL;
		} dc1394_dma_done_with_buffer(&camera);
	} else {
		if (dc1394_single_capture(handle,&camera)!=DC1394_SUCCESS) 
		{
			fprintf( stderr, "unable to capture a frame\n");
			dc1394_dma_release_camera(handle,&camera);
			dc1394_destroy_handle(handle);
			return NULL;
		}
	}
	
	switch (colour) {
		case false: {
			if (image_mode_==MODE_640x480_YUV411) {
				unsigned char *src = (unsigned char*)camera.capture_buffer;
				unsigned char *dest = buffer;
				for(int y=0;y<height;y++) {
					for(int x=0;x<width/4;++x) {
						src++;
						*dest++ = *src++;
						*dest++ = *src++;
						src++;
						*dest++ = *src++;
						*dest++ = *src++;
					}
				}
			} else {
				if (buffer!=NULL)
					memcpy(buffer,(unsigned char*)camera.capture_buffer,width*height*bytes);
			}
			break;
		}
		case true: {
			if (image_mode_==MODE_640x480_YUV411) {
				int R,G,B;
				int Y[4];
				int U,V;
				
				unsigned char *src = (unsigned char*)camera.capture_buffer;
				unsigned char *dest = buffer;
				for(int y=0;y<height;y++) {
					for(int x=0;x<width/4;++x) {
						U    = *src++;
						Y[0] = *src++;
						Y[1] = *src++;
						V    = *src++;
						Y[2] = *src++;
						Y[3] = *src++;
	
						// U and V are +-0.5
						U -= 128;
						V -= 128;
						
						// conversion
						for (int i=0;i<4;i++) {
							R = (int)(Y[i] + 1.370705f * V); 			//R
							G = (int)(Y[i] - 0.698001f * V - 0.337633f * U);	//G
							B = (int)(Y[i] + 1.732446f * U); 			//B
							
							if (R < 0) R=0;
							if (G < 0) G=0;
							if (B < 0) B=0;
							if (R > 255) R=255;
							if (G > 255) G=255;
							if (B > 255) B=255;
		
							*dest++ = B;
							*dest++ = G;
							*dest++ = R;
						}
					}
				}			
			} else {
				if (buffer!=NULL)
					memcpy(buffer,(unsigned char*)camera.capture_buffer,width*height*bytes);
			}
			break;
		}
		
	}
	
	return buffer;
}
Пример #9
0
void
capture_frame()
{
  static int                have_warned = 0;

  dc1394_dma_multi_capture(cameras, numCameras);

  if (!freeze && adaptor >= 0) {
    for (int i = 0; i < numCameras; i++) {
      // create OpenCV image
      switch (res) {
      case MODE_640x480_YUV411:
	if (!have_warned) {
	  have_warned = 1;
	  printf("WARNING: no OpenCV conversion available for this format\n");
	}
        //iyu12yuy2((unsigned char *) cameras[i].capture_buffer,
	//        frame_buffer + (i * frame_length),
	//        (device_width * device_height));
        break;

      case MODE_320x240_YUV422:
      case MODE_640x480_YUV422:
	if (!have_warned) {
	  have_warned = 1;
	  printf("WARNING: no OpenCV conversion available for this format\n");
	}
	//        memcpy(frame_buffer + (i * frame_length),
	//     cameras[i].capture_buffer, device_width * device_height * 2);
        break;

      case MODE_640x480_RGB:
	// don't convert for OpenCV, this is the correct format
	readOnlyImg->imageData = (char *) cameras[i].capture_buffer;
        if (async_processing) {
          if (i==0) {
            hvAsyncGetImageBuffer(&m_async_image, &m_async_bufID);
            cvCopy(readOnlyImg, m_async_image);
          }
        } else {
          cvCopy(readOnlyImg, iplImages[i]);
        }
        break;
      }
      
      if (verbose) {
        CvFont                    font;
        cvInitFont(&font, CV_FONT_VECTOR0, 0.5f /* hscale */ ,
                   0.5f /* vscale */ , 0.1f /*italic_scale */ ,
                   1 /* thickness */ , 8);
        char                      str[256];
        sprintf(str, "camera id: %d", i);
        CvSize                    textsize;
        int                       underline;
        cvGetTextSize(str, &font, &textsize, &underline);
        CvPoint                   pos =
          cvPoint(iplImages[i]->width - textsize.width - 5,
                  textsize.height + 10);
        cvPutText(iplImages[i], str, pos, &font, CV_RGB(0, 255, 0));

	//        cvRectangle(iplImages[i], cvPoint(10, 10), cvPoint(100, 100),
        //            cvScalar(255, 255, 255, 255), CV_FILLED, 8, 0);
      }
    }
  }

  for (int i = 0; i < numCameras; i++) {
    dc1394_dma_done_with_buffer(&cameras[i]);
  }
}