int main(int argc, char *argv[]) { struct video_config * camera; if(argc == 1) { printf(" *** Usage ***\n"); printf("./testquickcam DEVICE [ -r | -m | -l ] [ -0 | -1 | -2 | -3 | -4 | -5 ]\n\n"); printf(" -r reads one frame via read() from the camera\n"); printf(" -m reads one frame via mmap() from the camera\n"); printf(" -l read() loop...good for debugging gain etc \n"); printf(" -0-5 set resulution\n"); exit(1); } camera = init_video_config(324,248); if(open_camera(camera,argv[1]) == 1) { get_camera_info(camera); print_camera_info(camera); read_loop(camera); close_camera(camera); } delete_video_config(camera); exit(1); }
void v4l2_close(Camera* cam) { stop_video_capturing(cam) ; uninit_camera(cam); close_camera(cam); free(cam);
void v4l2_close(struct camera *cam) { stop_capturing(cam); uninit_camera(cam); close_camera(cam); free(cam); close_file(); close_encoder(); }
int main(int argc, char *argv[]) { static struct Camera camera; pthread_t draw_thread, grab_thread, tele_thread; init_camera(&camera); if(argc >= 2 && strcmp(argv[1], "GPS_ON") == 0) camera.gpson = TRUE; //init threads g_thread_init(NULL); gdk_threads_init(); //init gtk gtk_init(&argc, &argv); //Init gdk_rgb gdk_rgb_init(); //Init semaphore sem_init(&sem_draw, 0, 0); sem_init(&sem_grab, 0, 1); open_camera(&camera); set_camera_info(&camera); get_camera_info(&camera); // print_camera_info(&camera); create_window(&camera); if(camera.gpson) { camera_antennaimg_change(&camera, "0"); pthread_create(&tele_thread, NULL, (void*)&camera_gps_tel, &camera); } pthread_create(&draw_thread, NULL, (void*)&preview_camera, &camera); pthread_create(&grab_thread, NULL, (void*)&grab_image_camera, &camera); gdk_threads_enter(); gtk_main(); gdk_threads_leave(); camera.quit = 1; if(camera.gpson) pthread_join(tele_thread, NULL); pthread_join(grab_thread, NULL); pthread_join(draw_thread, NULL); sem_post(&sem_draw); sem_post(&sem_grab); sem_destroy(&sem_draw); sem_destroy(&sem_grab); close_camera(&camera); return 0; }
static int k4w2_v4l2_close(k4w2_t ctx) { k4w2_v4l2 * v4l2 = (k4w2_v4l2 *)ctx; CHANNEL ch; k4w2_v4l2_stop(ctx); for (ch = ctx->begin; ch <= ctx->end; ++ch) { close_camera(&v4l2->cam[ch]); } return K4W2_SUCCESS; }
static int run_capture(const char *basename, float delay, bool testonly) { dc1394camera_t *camera; printf("Waiting for camera\n"); while ((camera = open_camera()) == NULL) { printf("."); fflush(stdout); sleep(1); } dc1394_camera_reset(camera); capture_loop(camera, basename, delay, testonly); close_camera(camera); return 0; }
static PyObject * flea_close(PyObject *self, PyObject *args) { int handle = -1; fleaCamera* camera; if (!PyArg_ParseTuple(args, "i", &handle)) { return NULL; } if (handle >= 0 && handle < NUM_CAMERA_HANDLES && cameras[handle]) { camera = cameras[handle]; } else { PyErr_SetString(FleaError, "Invalid handle"); return NULL; } close_camera(camera); cameras[handle] = NULL; Py_RETURN_NONE; }
/* * Initializes camera for streaming */ int init_camera(void) { /* Declare properties for camera */ camera.memory_mode = V4L2_MEMORY_MMAP; camera.num_buffers = 3; strcpy(camera.dev_name,"/dev/video0"); strcpy(camera.name,"Camera"); camera.buffers = NULL; camera.fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; camera.width = 640; camera.height = 480; /* Initialize the v4l2 capture devices */ if (v4l2_init_device(&camera) < 0) goto Error; /* Enable streaming for the v4l2 capture devices */ if (v4l2_stream_on(&camera) < 0) goto Error; return 0; Error: close_camera(); return -1; }
static PyObject * chameleon_close(PyObject *self, PyObject *args) { int sts = -1; int handle = -1; if (!PyArg_ParseTuple(args, "i", &handle)) return NULL; if (handle >= 0 && handle < NUM_CAMERA_HANDLES && cameras[handle]) { close_camera(cameras[handle]); cameras[handle] = NULL; sts = 0; } else { PyErr_SetString(ChameleonError, "Invalid handle"); return NULL; } if (sts < 0) { PyErr_SetString(ChameleonError, "Failed to close device"); return NULL; } Py_RETURN_NONE; }
/* * quiet close and exit */ void closeWin(int e) { close_camera(); close_campus(); exit(0); }
int main(int argc, char **argv) { int c = -1; int option_index; int img_height = DEFAULT_HEIGHT; int img_width = DEFAULT_WIDTH; int listen_port = LISTEN_PORT; for (;;) { c = getopt_long(argc, argv, short_options, long_options, &option_index); if (c == -1) { break; } switch(c) { case 'h': { if (optarg != NULL) { img_height = atoi(optarg); } break; }; case 'w': { if (optarg != NULL) { img_width = atoi(optarg); } break; }; case 'p': { if (optarg != NULL) { listen_port = atoi(optarg); } break; }; }; } if (open_socket(LISTEN_ADDR, listen_port) != 0) { printf("ERROR: cannot open socket\n"); return 1; } if (open_camera() != 0) { printf("Camera file error, exiting...\n"); return 1; } if (init_camera(img_height, img_width) != 0) { printf("Camera error, exiting...\n"); return 1; } else { printf("INFO: image size is %dx%d;\n\n", img_width, img_height); } while (1) { while (1) { if (wait_for_connect() == 0) { break; } } while (1) { if (read_current_socket() == 0) { if (check_get_params("exit") == 1) { goto APP_EXIT; } else if (check_get_params("bmp") == 1) { if (check_get_params(GETPARAM_SEND_HTTP_HEADERS)) { send(get_current_socket(), http_headers, strlen(http_headers), 0); send(get_current_socket(), header_ct_bmp, strlen(header_ct_bmp), 0); } read_camera(IMAGE_TYPE_BMP, get_current_socket(), check_get_params(GETPARAM_COLOR_IMAGE)); close_current_socket(); break; } else if (check_get_params("jpg") == 1) { //print_time(0); if (check_get_params(GETPARAM_SEND_HTTP_HEADERS)) { send(get_current_socket(), http_headers, strlen(http_headers), 0); send(get_current_socket(), header_ct_jpeg, strlen(header_ct_jpeg), 0); } read_camera(IMAGE_TYPE_JPG, get_current_socket(), check_get_params(GETPARAM_COLOR_IMAGE)); close_current_socket(); //print_time(1); break; } else if (check_get_params("yuyv") == 1) { // GET:yuyv read_camera(IMAGE_TYPE_YUYV, get_current_socket(), 0); close_current_socket(); break; } } } } APP_EXIT: printf("Exiting...\n"); close_camera(); close_current_socket(); close_main_socket(); return 0; }
fleaCamera* open_camera(int brightness, unsigned int height, unsigned int width) { fc2Error error; fleaCamera* camera = calloc(1, sizeof(fleaCamera)); fc2PGRGuid guid; fc2GigEImageSettings image_settings; printf("Creating context\n"); error = fc2CreateGigEContext( &camera->context ); if ( error != FC2_ERROR_OK ) { printf( "Error in fc2CreateContext: %d\n", error ); free(camera); return NULL; } // Get the 0th camera fc2GetCameraFromIndex( camera->context, 0, &guid ); error = fc2Connect( camera->context, &guid ); if ( error != FC2_ERROR_OK ) { printf( "Error in fc2Connect: %d\n", error ); close_camera(camera); return NULL; } //set_property_value(camera, FC2_BRIGHTNESS, brightness); //PrintCameraInfo( camera->context ); //setup_camera( camera ); SetTimeStamping( camera->context, TRUE ); error = fc2GetGigEImageSettings(camera->context, &image_settings); if ( error != FC2_ERROR_OK ) { printf( "Error getting image settings settings: %d\n", error ); return NULL; } image_settings.width = width; image_settings.height = height; image_settings.offsetX = (int)(MAX_WIDTH - width)/2; image_settings.offsetY = (int)(MAX_HEIGHT - height)/2; image_settings.pixelFormat = FC2_PIXEL_FORMAT_RAW8; error = fc2SetGigEImageSettings(camera->context, &image_settings); if ( error != FC2_ERROR_OK ) { printf( "Error setting format7 settings: %d\n", error ); return NULL; } sleep(0.5); error = fc2StartCapture( camera->context ); if ( error != FC2_ERROR_OK ) { printf( "Error in fc2StartCapture: %d\n", error ); } sleep(0.5); return camera; }