/***************************************************************************** * Close: *****************************************************************************/ static void Close( vlc_object_t *p_this ) { demux_t *p_demux = (demux_t*)p_this; demux_sys_t *p_sys = p_demux->p_sys; int result = 0; /* Stop data transmission */ result = dc1394_stop_iso_transmission( p_sys->fd_video, p_sys->camera.node ); if( result != DC1394_SUCCESS ) { msg_Err( p_demux, "couldn't stop the camera" ); } /* Close camera */ if( p_sys->dma_capture ) { dc1394_dma_unlisten( p_sys->fd_video, &p_sys->camera ); dc1394_dma_release_camera( p_sys->fd_video, &p_sys->camera ); } else { dc1394_release_camera( p_sys->fd_video, &p_sys->camera ); } if( p_sys->fd_video ) dc1394_destroy_handle( p_sys->fd_video ); CloseAudioDev( p_demux ); free( p_sys->camera_nodes ); free( p_sys->audio_device ); free( p_sys ); }
static void icvCloseCAM_DC1394( CvCaptureCAM_DC1394* capture ){ dc1394_stop_iso_transmission(capture->handle, capture->camera->node); dc1394_dma_unlisten (capture->handle, capture->camera); /* Deallocate space for RGBA data */ if(capture->convert){ cvFree(&capture->frame.imageData); } }
void dc1394_close(void) { fprintf( stderr, "closing down..." ); dc1394_dma_unlisten( handle, &camera ); dc1394_dma_release_camera( handle, &camera ); dc1394_destroy_handle( handle ); fprintf( stderr, "done!\n" ); }
static int dc1394_close(AVFormatContext * context) { struct dc1394_data *dc1394 = context->priv_data; dc1394_stop_iso_transmission(dc1394->handle, dc1394->camera.node); dc1394_dma_unlisten(dc1394->handle, &dc1394->camera); dc1394_dma_release_camera(dc1394->handle, &dc1394->camera); dc1394_destroy_handle(dc1394->handle); return 0; }
static int dc1394_v1_read_header(AVFormatContext *c, AVFormatParameters * ap) { dc1394_data* dc1394 = c->priv_data; AVStream* vst; nodeid_t* camera_nodes; int res; struct dc1394_frame_format *fmt = NULL; struct dc1394_frame_rate *fps = NULL; if (dc1394_read_common(c,ap,&fmt,&fps) != 0) return -1; /* Now let us prep the hardware. */ dc1394->handle = dc1394_create_handle(0); /* FIXME: gotta have ap->port */ if (!dc1394->handle) { av_log(c, AV_LOG_ERROR, "Can't acquire dc1394 handle on port %d\n", 0 /* ap->port */); goto out; } camera_nodes = dc1394_get_camera_nodes(dc1394->handle, &res, 1); if (!camera_nodes || camera_nodes[ap->channel] == DC1394_NO_CAMERA) { av_log(c, AV_LOG_ERROR, "There's no IIDC camera on the channel %d\n", ap->channel); goto out_handle; } res = dc1394_dma_setup_capture(dc1394->handle, camera_nodes[ap->channel], 0, FORMAT_VGA_NONCOMPRESSED, fmt->frame_size_id, SPEED_400, fps->frame_rate_id, 8, 1, c->filename, &dc1394->camera); dc1394_free_camera_nodes(camera_nodes); if (res != DC1394_SUCCESS) { av_log(c, AV_LOG_ERROR, "Can't prepare camera for the DMA capture\n"); goto out_handle; } res = dc1394_start_iso_transmission(dc1394->handle, dc1394->camera.node); if (res != DC1394_SUCCESS) { av_log(c, AV_LOG_ERROR, "Can't start isochronous transmission\n"); goto out_handle_dma; } return 0; out_handle_dma: dc1394_dma_unlisten(dc1394->handle, &dc1394->camera); dc1394_dma_release_camera(dc1394->handle, &dc1394->camera); out_handle: dc1394_destroy_handle(dc1394->handle); out: return -1; }
bool linuxfwCamera::closeCamera() { if (use_dma) { dc1394_dma_unlisten(handle, &camera); dc1394_dma_release_camera(handle, &camera); } else { dc1394_release_camera(handle,&camera); } dc1394_camera_off(handle, cameraID); dc1394_destroy_handle(handle); return true; }
//--------------------------------------------------------------- void Device1394::fini() { MIRO_DBG(VIDEO, LL_DEBUG, "Device1394::fini()"); if (is_open_) { MIRO_DBG(VIDEO, LL_DEBUG, "Device1394::fini() close"); dc1394_stop_iso_transmission(handle_, p_camera_->node); dc1394_dma_unlisten(handle_, p_camera_); dc1394_dma_release_camera(handle_, p_camera_); is_open_ = false; } cleanupDevice(); }
void CameraDcam::moduleOff(void) { RackDataModule::moduleOff(); // has to be first command in moduleOff(); if ( dc1394_stop_iso_transmission( porthandle[dc1394CameraPortNo],dc1394Camera.node ) != DC1394_SUCCESS ) { GDOS_WARNING("Couldn't stop the iso transmission!!\n"); } if ( dc1394_dma_unlisten( porthandle[dc1394CameraPortNo], &dc1394Camera ) != DC1394_SUCCESS) { GDOS_WARNING("Couldn't unlisten the dma!!\n"); } if( dc1394_dma_release_camera( porthandle[dc1394CameraPortNo], &dc1394Camera ) != DC1394_SUCCESS) { GDOS_WARNING("Couldn't release the dma!!\n"); } }
static void *AcquireFrames(void *arg) { struct timeval tv; int start_sec; int cam = (int)arg; pthread_cleanup_push(dc1394Done, (void *)cam); printf("in acquire\n"); gettimeofday(&tv, NULL); start_sec = tv.tv_sec; printf("start sec is %d\n", start_sec); for (Cams[cam].next_frame=0; Cams[cam].next_frame < Cams[cam].max_frames; Cams[cam].next_frame++) { if (dc1394_dma_single_capture(&Cams[cam].camera)!=DC1394_SUCCESS) { fprintf( stderr, "unable to capture a frame\n"); dc1394_stop_iso_transmission(Cams[cam].handle,Cams[cam].camera.node); Cams[cam].running = 0; dc1394_dma_release_camera(Cams[cam].handle,&Cams[cam].camera); Cams[cam].dma_active = 0; dc1394_destroy_handle(Cams[cam].handle); Cams[cam].handle = 0; Cams[cam].thread_id =0; return; } gettimeofday(&tv, NULL); Cams[cam].times[Cams[cam].next_frame] = tv.tv_sec-start_sec+(double)tv.tv_usec*1E-6; printf("The time for frame %d is %g\n", Cams[cam].next_frame, Cams[cam].times[Cams[cam].next_frame]); memcpy((void *)Cams[cam].frames+Cams[cam].next_frame*Cams[cam].width*Cams[cam].height, Cams[cam].camera.capture_buffer, Cams[cam].width*Cams[cam].height); dc1394_dma_done_with_buffer(&Cams[cam].camera); printf("got frame %d\n", Cams[cam].next_frame); } /* clean up active daq */ dc1394_stop_iso_transmission(Cams[cam].handle,Cams[cam].camera.node); Cams[cam].running = 0; dc1394_dma_unlisten(Cams[cam].handle, &Cams[cam].camera); dc1394_dma_release_camera(Cams[cam].handle,&Cams[cam].camera); Cams[cam].dma_active = 0; dc1394_destroy_handle(Cams[cam].handle); Cams[cam].handle = 0; Cams[cam].thread_id =0; pthread_cleanup_pop(0); return; }
void dc1394Done(void *arg) { int cam = (int)arg; printf("here I am in done - %d %d\n", cam, Cams[cam].running); /* clean up any active daq */ if (Cams[cam].running) { printf("stop the dma\n\n"); dc1394_dma_unlisten(Cams[cam].handle, &Cams[cam].camera); dc1394_stop_iso_transmission(Cams[cam].handle,Cams[cam].camera.node); Cams[cam].running = 0; } if (Cams[cam].dma_active) { dc1394_dma_release_camera(Cams[cam].handle,&Cams[cam].camera); Cams[cam].dma_active = 0; } if (Cams[cam].handle != NULL) { dc1394_destroy_handle(Cams[cam].handle); Cams[cam].handle = 0; } printf("all done with Don\n"); }
void cleanup(void) { int i; for (i = 0; i < numCameras; i++) { dc1394_dma_unlisten(handles[cameras[i].port], &cameras[i]); dc1394_dma_release_camera(handles[cameras[i].port], &cameras[i]); } for (i = 0; i < numPorts; i++) raw1394_destroy_handle(handles[i]); if ((void *) window != NULL) XUnmapWindow(display, window); if (display != NULL) XFlush(display); if (frame_buffer != NULL) free(frame_buffer); // OpenCV cleanup for (i = 0; i < numCameras; i++) { cvReleaseImageHeader(&iplImages[i]); } }
// Setup camera to use given dc1394 mode static int icvSetModeCAM_DC1394( CvCaptureCAM_DC1394 * capture, int mode ){ quadlet_t modes, formats; //printf("<icvSetModeCAM_DC1394>\n"); // figure out corrent format for this mode int format = (mode - MODE_FORMAT0_MIN) / 32 + FORMAT_MIN; // get supported formats if (dc1394_query_supported_formats(capture->handle, capture->camera->node, &formats)<0) { fprintf(stderr,"%s:%d: Could not query supported formats\n",__FILE__,__LINE__); return 0; } // is format for requested mode supported ? if(icvFormatSupportedCAM_DC1394(format, formats)==0){ return 0; } // get supported modes for requested format if (dc1394_query_supported_modes(capture->handle, capture->camera->node, format, &modes)<0){ fprintf(stderr,"%s:%d: Could not query supported modes for format %d\n",__FILE__,__LINE__, capture->format); return 0; } // is requested mode supported ? if(! icvModeSupportedCAM_DC1394(format, mode, modes) ){ return 0; } int color_mode = icvColorMode( mode ); if(color_mode == -1){ return 0; } int frame_rate = icvGetBestFrameRate(capture, format, mode); dc1394_dma_unlisten(capture->handle, capture->camera); if (dc1394_dma_setup_capture(capture->handle, capture->camera->node, capture->camera->channel /*channel*/, format, mode, SPEED_400, frame_rate, NUM_BUFFERS, #ifdef HAVE_DC1394_095 0 /*do_extra_buffering*/, #endif 1 /*DROP_FRAMES*/, capture->device_name, capture->camera) != DC1394_SUCCESS) { fprintf(stderr,"%s:%d: Failed to setup DMA capture with VIDEO1394\n",__FILE__,__LINE__); return 0; } dc1394_start_iso_transmission(capture->handle, capture->camera->node); capture->frame_rate = frame_rate; capture->format = format; capture->mode = mode; capture->color_mode = color_mode; // now fix image size to match new mode icvResizeFrame( capture ); return 1; }
static int dc1394_read_header(AVFormatContext *c, AVFormatParameters * ap) { dc1394_data* dc1394 = c->priv_data; AVStream* vst; nodeid_t* camera_nodes; int res; struct dc1394_frame_format *fmt; struct dc1394_frame_rate *fps; for (fmt = dc1394_frame_formats; fmt->width; fmt++) if (fmt->pix_fmt == ap->pix_fmt && fmt->width == ap->width && fmt->height == ap->height) break; for (fps = dc1394_frame_rates; fps->frame_rate; fps++) if (fps->frame_rate == av_rescale(1000, ap->time_base.den, ap->time_base.num)) break; /* create a video stream */ vst = av_new_stream(c, 0); if (!vst) return -1; av_set_pts_info(vst, 64, 1, 1000); vst->codec->codec_type = CODEC_TYPE_VIDEO; vst->codec->codec_id = CODEC_ID_RAWVIDEO; vst->codec->time_base.den = fps->frame_rate; vst->codec->time_base.num = 1000; vst->codec->width = fmt->width; vst->codec->height = fmt->height; vst->codec->pix_fmt = fmt->pix_fmt; /* packet init */ av_init_packet(&dc1394->packet); dc1394->packet.size = avpicture_get_size(fmt->pix_fmt, fmt->width, fmt->height); dc1394->packet.stream_index = vst->index; dc1394->packet.flags |= PKT_FLAG_KEY; dc1394->current_frame = 0; dc1394->fps = fps->frame_rate; vst->codec->bit_rate = av_rescale(dc1394->packet.size * 8, fps->frame_rate, 1000); /* Now lets prep the hardware */ dc1394->handle = dc1394_create_handle(0); /* FIXME: gotta have ap->port */ if (!dc1394->handle) { av_log(c, AV_LOG_ERROR, "Can't acquire dc1394 handle on port %d\n", 0 /* ap->port */); goto out; } camera_nodes = dc1394_get_camera_nodes(dc1394->handle, &res, 1); if (!camera_nodes || camera_nodes[ap->channel] == DC1394_NO_CAMERA) { av_log(c, AV_LOG_ERROR, "There's no IIDC camera on the channel %d\n", ap->channel); goto out_handle; } res = dc1394_dma_setup_capture(dc1394->handle, camera_nodes[ap->channel], 0, FORMAT_VGA_NONCOMPRESSED, fmt->frame_size_id, SPEED_400, fps->frame_rate_id, 8, 1, c->filename, &dc1394->camera); dc1394_free_camera_nodes(camera_nodes); if (res != DC1394_SUCCESS) { av_log(c, AV_LOG_ERROR, "Can't prepare camera for the DMA capture\n"); goto out_handle; } res = dc1394_start_iso_transmission(dc1394->handle, dc1394->camera.node); if (res != DC1394_SUCCESS) { av_log(c, AV_LOG_ERROR, "Can't start isochronous transmission\n"); goto out_handle_dma; } return 0; out_handle_dma: dc1394_dma_unlisten(dc1394->handle, &dc1394->camera); dc1394_dma_release_camera(dc1394->handle, &dc1394->camera); out_handle: dc1394_destroy_handle(dc1394->handle); out: return -1; }