static int dc1394_read_packet(AVFormatContext *c, AVPacket *pkt) { struct dc1394_data *dc1394 = c->priv_data; int res; /* discard stale frame */ if (dc1394->current_frame++) { if (dc1394_dma_done_with_buffer(&dc1394->camera) != DC1394_SUCCESS) av_log(c, AV_LOG_ERROR, "failed to release %d frame\n", dc1394->current_frame); } res = dc1394_dma_single_capture(&dc1394->camera); if (res == DC1394_SUCCESS) { dc1394->packet.data = (uint8_t *)(dc1394->camera.capture_buffer); dc1394->packet.pts = (dc1394->current_frame * 1000000) / dc1394->fps; res = dc1394->packet.size; } else { av_log(c, AV_LOG_ERROR, "DMA capture failed\n"); dc1394->packet.data = NULL; res = -1; } *pkt = dc1394->packet; return res; }
//--------------------------------------------------------------- void BufferManager1394::acquireOutputBuffer(unsigned long _index) { dc1394_dma_single_capture(pCamera_); bufferStatus_[_index].time = ACE_OS::gettimeofday() - camParams_->latency; bufferStatus_[_index].buffer = reinterpret_cast<unsigned char *>(pCamera_->capture_buffer); }
/***************************************************************************** * Demux: *****************************************************************************/ static block_t *GrabVideo( demux_t *p_demux ) { demux_sys_t *p_sys = p_demux->p_sys; block_t *p_block = NULL; int result = 0; if( p_sys->dma_capture ) { result = dc1394_dma_single_capture( &p_sys->camera ); if( result != DC1394_SUCCESS ) { msg_Err( p_demux, "unable to capture a frame" ); return NULL; } } else { result = dc1394_single_capture( p_sys->camera_info.handle, &p_sys->camera ); if( result != DC1394_SUCCESS ) { msg_Err( p_demux, "unable to capture a frame" ); return NULL; } } p_block = block_New( p_demux, p_sys->camera.frame_width * p_sys->camera.frame_height * 2 ); if( !p_block ) { msg_Err( p_demux, "cannot get block" ); return NULL; } if( !p_sys->camera.capture_buffer ) { msg_Err (p_demux, "caputer buffer empty"); block_Release( p_block ); return NULL; } memcpy( p_block->p_buffer, (const char *)p_sys->camera.capture_buffer, p_sys->camera.frame_width * p_sys->camera.frame_height * 2 ); p_block->i_pts = p_block->i_dts = mdate(); if( p_sys->dma_capture ) dc1394_dma_done_with_buffer( &p_sys->camera ); return p_block; }
static void *AcquireFrames(void *arg) { struct timeval tv; int start_sec; int cam = (int)arg; pthread_cleanup_push(dc1394Done, (void *)cam); printf("in acquire\n"); gettimeofday(&tv, NULL); start_sec = tv.tv_sec; printf("start sec is %d\n", start_sec); for (Cams[cam].next_frame=0; Cams[cam].next_frame < Cams[cam].max_frames; Cams[cam].next_frame++) { if (dc1394_dma_single_capture(&Cams[cam].camera)!=DC1394_SUCCESS) { fprintf( stderr, "unable to capture a frame\n"); dc1394_stop_iso_transmission(Cams[cam].handle,Cams[cam].camera.node); Cams[cam].running = 0; dc1394_dma_release_camera(Cams[cam].handle,&Cams[cam].camera); Cams[cam].dma_active = 0; dc1394_destroy_handle(Cams[cam].handle); Cams[cam].handle = 0; Cams[cam].thread_id =0; return; } gettimeofday(&tv, NULL); Cams[cam].times[Cams[cam].next_frame] = tv.tv_sec-start_sec+(double)tv.tv_usec*1E-6; printf("The time for frame %d is %g\n", Cams[cam].next_frame, Cams[cam].times[Cams[cam].next_frame]); memcpy((void *)Cams[cam].frames+Cams[cam].next_frame*Cams[cam].width*Cams[cam].height, Cams[cam].camera.capture_buffer, Cams[cam].width*Cams[cam].height); dc1394_dma_done_with_buffer(&Cams[cam].camera); printf("got frame %d\n", Cams[cam].next_frame); } /* clean up active daq */ dc1394_stop_iso_transmission(Cams[cam].handle,Cams[cam].camera.node); Cams[cam].running = 0; dc1394_dma_unlisten(Cams[cam].handle, &Cams[cam].camera); dc1394_dma_release_camera(Cams[cam].handle,&Cams[cam].camera); Cams[cam].dma_active = 0; dc1394_destroy_handle(Cams[cam].handle); Cams[cam].handle = 0; Cams[cam].thread_id =0; pthread_cleanup_pop(0); return; }
int CameraDcam::moduleLoop(void) { camera_data_msg* p_data = NULL; ssize_t datalength = 0; rack_time_t starttime; int h=0,w=0,p=0; int ret; starttime = rackTime.get(); // get datapointer from databuffer p_data = (camera_data_msg *)getDataBufferWorkSpace(); p_data->data.recordingTime = rackTime.get(); p_data->data.mode = mode; p_data->data.colorFilterId = format7image.colorFilterId; switch (mode){ case CAMERA_MODE_MONO8: case CAMERA_MODE_RAW8: p_data->data.depth = 8; break; case CAMERA_MODE_RAW12: case CAMERA_MODE_YUV422: p_data->data.depth = 16; break; default: GDOS_ERROR("Unknown mode %i \n",mode); return -1; } /*----------------------------------------------------------------------- * capture one frame and send to mbx *-----------------------------------------------------------------------*/ GDOS_DBG_INFO("Capturing one image\n"); if ((ret = dc1394_dma_single_capture( &dc1394Camera ))!=DC1394_SUCCESS) { GDOS_WARNING("Unable to capture a frame. ret = %i \n", ret); } else { p_data->data.width = dc1394Camera.frame_width / lossRate; p_data->data.height = dc1394Camera.frame_height / lossRate; //shrink data if set to. if (lossRate == 1) { memcpy(p_data->byteStream, dc1394Camera.capture_buffer, dc1394Camera.dma_frame_size ); } else { /*lossRate != 1 so we need to throw some pixel away. -> iterate over array. *The loss in YUV422 format must lose pairs of pixel! as UYVY pairs cannot be divided up!! * ->from 4 pixel in the original only one pixelPair! is transmitted. * eg. lossrate of 2 in yuv -> take 4 byte, leave 4 byte, take 4 byte.... leave a row... * *The loss in RAW format must also lose pairs of pixel! ohterwise only one color may result. */ int pairfactor = 2; //depending on colorMode (Yuv=Raw=2, mono=1) int bytesPerPixelPair = 0; if (p_data->data.mode==CAMERA_MODE_MONO8 ||p_data->data.mode==CAMERA_MODE_RGB24 || p_data->data.mode==CAMERA_MODE_MONO12) pairfactor = 1; bytesPerPixelPair = bytesPerPixel * pairfactor;//to save multiplications //copy needed to change datatypes memcpy(dataBuffer, dc1394Camera.capture_buffer, dc1394Camera.dma_frame_size ); for (h=0; h < p_data->data.height; h++) { //for every line in the smaller image for(w=0; w < p_data->data.width / pairfactor; w++) { //for every pixelPair in the smaller image for (p=0; p<bytesPerPixelPair; p++) { //for every byte per pixelPair p_data->byteStream[ (h*bytesPerPixel*p_data->data.width) + (w*bytesPerPixelPair) + p] = dataBuffer[(h*bytesPerPixel*dc1394Camera.frame_width*lossRate) + (w*bytesPerPixelPair*lossRate) + p]; } } } GDOS_DBG_DETAIL("Data width %i pixelPairs to be send height %i pixel(!) bppp %i\n", w, h, p); } //end of lossRate calculation //doing auto shutter / gain / brightness control autoBrightness(p_data); if (whitebalanceMode > 0) { autoWhitebalance(p_data); } GDOS_DBG_DETAIL("Data recordingtime %i width %i height %i depth %i mode %i\n", p_data->data.recordingTime, p_data->data.width, p_data->data.height, p_data->data.depth, p_data->data.mode); datalength = p_data->data.width * p_data->data.height * bytesPerPixel + sizeof(camera_data); putDataBufferWorkSpace(datalength); dc1394_dma_done_with_buffer( &dc1394Camera );//return the buffer handle to library. } //rt_sleep(timeCount1s/fps);//## zeitverwaltung zentral erledigt RackTask::sleep((1000000000llu/fps) - (rackTime.get() - starttime)); return 0; }
unsigned char* linuxfwCamera::getFrame() { if (handle==NULL) return NULL; if (use_dma) { if (dc1394_dma_single_capture(&camera)!=DC1394_SUCCESS) { fprintf( stderr, "unable to capture a frame\n"); dc1394_dma_release_camera(handle,&camera); dc1394_destroy_handle(handle); return NULL; } dc1394_dma_done_with_buffer(&camera); } else { if (dc1394_single_capture(handle,&camera)!=DC1394_SUCCESS) { fprintf( stderr, "unable to capture a frame\n"); dc1394_dma_release_camera(handle,&camera); dc1394_destroy_handle(handle); return NULL; } } switch (colour) { case false: { if (image_mode_==MODE_640x480_YUV411) { unsigned char *src = (unsigned char*)camera.capture_buffer; unsigned char *dest = buffer; for(int y=0;y<height;y++) { for(int x=0;x<width/4;++x) { src++; *dest++ = *src++; *dest++ = *src++; src++; *dest++ = *src++; *dest++ = *src++; } } } else { if (buffer!=NULL) memcpy(buffer,(unsigned char*)camera.capture_buffer,width*height*bytes); } break; } case true: { if (image_mode_==MODE_640x480_YUV411) { int R,G,B; int Y[4]; int U,V; unsigned char *src = (unsigned char*)camera.capture_buffer; unsigned char *dest = buffer; for(int y=0;y<height;y++) { for(int x=0;x<width/4;++x) { U = *src++; Y[0] = *src++; Y[1] = *src++; V = *src++; Y[2] = *src++; Y[3] = *src++; // U and V are +-0.5 U -= 128; V -= 128; // conversion for (int i=0;i<4;i++) { R = (int)(Y[i] + 1.370705f * V); //R G = (int)(Y[i] - 0.698001f * V - 0.337633f * U); //G B = (int)(Y[i] + 1.732446f * U); //B if (R < 0) R=0; if (G < 0) G=0; if (B < 0) B=0; if (R > 255) R=255; if (G > 255) G=255; if (B > 255) B=255; *dest++ = B; *dest++ = G; *dest++ = R; } } } } else { if (buffer!=NULL) memcpy(buffer,(unsigned char*)camera.capture_buffer,width*height*bytes); } break; } } return buffer; }