void video_free( void ) { video_close(); if( conv_frame ); av_free( conv_frame ); conv_frame = NULL; }
void open_picovic () { struct vic20_config config = { .memory_expansion = have_memory_expansion, .memory_expansion_3k = have_memory_expansion_3k, .use_paddles = FALSE, .manual_screen_updates = FALSE, .frames_per_second = 50, .frame_interceptor = NULL }; struct vic20_config * cfg = malloc (sizeof (struct vic20_config)); memcpy (cfg, &config, sizeof (struct vic20_config)); joystick_open (); video_open (); video_map (); vic20_open (cfg); init_debugger (); } void close_picovic () { vic20_close (); video_close (); joystick_close (); }
int main (int argc, char * argv[]) { struct vic20_config config = { .memory_expansion_3k = FALSE, .memory_expansion = 0, .use_paddles = FALSE, .manual_screen_updates = FALSE, .frames_per_second = 50, .frame_interceptor = NULL }; printf ("shadowVIC 6502 CPU emulation test\n"); joystick_open (); video_open (); video_map (); vic20_open (&config); memcpy (&m[0x1000], tests_image, sizeof (tests_image)); vic20_emulate (0x1000); vic20_close (); video_close (); joystick_close (); printf ("Tests passed.\n"); return 0; }
void main_close() { debug_kill(); #ifdef WIN32 timeEndPeriod(1); #endif config_save(); cmos_save(models[curmodel]); midi_close(); mem_close(); uef_close(); csw_close(); tube_6502_close(); arm_close(); x86_close(); z80_close(); w65816_close(); n32016_close(); disc_close(0); disc_close(1); scsi_close(); ide_close(); vdfs_close(); ddnoise_close(); tapenoise_close(); al_close(); video_close(); log_close(); }
/* Free everything: */ void video_quit() { video_close(); gst_object_unref(GST_OBJECT(vthread)); if (!audio_disabled) gst_object_unref(GST_OBJECT(athread)); gst_object_unref(GST_OBJECT(pipeline)); }
Image *load(char *path, int index, int *nframes) { static video_t *v=NULL; Image *im=NULL; if(index>=0) { if(!v) TRY(v=video_open(path),ErrorOpen); if(nframes) *nframes=video_frame_count(v); TRY(im=video_get(v,index,1),ErrorRead); } else { if(v) video_close(&v); } return im; ErrorRead: video_close(&v); return NULL; ErrorOpen: return NULL; }
void *horizontal_velocities_thread_main(void *data) { //file_fd = fopen("test.yuv", "wb"); double prevTime = 0; video_GrabImageGrey(img_old); for (;;) { //double t1=util_timestamp(); video_GrabImageGrey(img_new); //double t2=util_timestamp(); //printf("video grab took %f ms\n", (t2-t1)*1000); double currentTime = img_new->timestamp; int dxi, dyi; video_blocksum(img_old, img_new, &dxi, &dyi); //double t3=util_timestamp(); //printf("blocksum took %f ms\n", (t3-t2)*1000); if (dxi != 0 || dyi != 0) { //swap buffers struct img_struct* tmp = img_new; img_new = img_old; img_old = tmp; } pthread_mutex_lock(&velocity_access_mutex); dt = currentTime - prevTime; xv_buffer[buf_ind] = dxi / dt; yv_buffer[buf_ind] = dyi / dt; buf_ind = (buf_ind+1)%BUF_SIZE; seqNum++; prevTime = currentTime; pthread_mutex_unlock(&velocity_access_mutex); printf("%f\n", util_timestamp()); //printf("\ndxi=%i dyi=%i cur=%f pre=%f dt=%f\n", dxi, dyi, currentTime, prevTime, dt); //if(writeImagesToDisk) // fwrite((const void *)img_new->buf, 320*240, 1, file_fd); // bottom camera = 60Hz //usleep(10000); } video_close(); return 0; }
int video_open(const char *filename) { video_close(); g_object_set (G_OBJECT (src), "location", filename, NULL); if (!gst_element_set_state(pipeline, GST_STATE_READY)) return VIDEO_ERROR; return VIDEO_OK; }
HRESULT qcap_driver_destroy(Capture *capBox) { TRACE("%p\n", capBox); if( capBox->fd != -1 ) video_close(capBox->fd); capBox->CritSect.DebugInfo->Spare[0] = 0; DeleteCriticalSection(&capBox->CritSect); CoTaskMemFree(capBox); return S_OK; }
void snap_clear( void ) { if( video ) { video_close(); video = 0; } else { if( texture ) { ogl_free_texture( texture ); } } texture = NULL; platform_texture = NULL; }
void application_free(application_t *app) { DEBUG("application_free()"); assert(app != 0); video_close(app->video); gps_free(app->gps); imu_free(app->imu); graphics_drawable_free(app->image); graphics_hud_free(app->hud); graphics_atlas_free(app->atlas1); graphics_atlas_free(app->atlas2); graphics_free(app->graphics); free(app); }
void dgreed_close(void) { malka_states_end(); malka_states_close(); sprsheet_close(); malka_close(); mfx_close(); particles_close(); keyval_close(); sound_close(); video_close(); }
int dgreed_main(int argc, const char** argv) { params_init(argc, argv); rand_init(time(NULL)); layouts_init(); layouts_set("dvorak"); bool fullscreen = true; if(params_find("-windowed") != ~0) fullscreen = false; video_init_ex(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, "KeyMingler", fullscreen); font = font_load(FONT_FILE); float text_width = font_width(font, LOADING_TEXT); float text_height = font_height(font); Vector2 pos = vec2((SCREEN_WIDTH - text_width) / 2.0f, (SCREEN_HEIGHT - text_height) / 2.0f); font_draw(font, LOADING_TEXT, 0, &pos, COLOR_WHITE); video_present(); system_update(); game_init(); sounds_init(); music = sound_load_stream(MUSIC_FILE); sound_play(music); while(system_update()) { game_update(); game_render(); video_present(); sound_update(); if(key_up(KEY_QUIT)) break; } font_free(font); sound_free(music); sounds_close(); game_close(); video_close(); layouts_close(); return 0; }
int main(int argc, char*argv[]) { if (argc < 2 || strcmp(argv[1], "-h") == 0 || strcmp(argv[1], "--help") == 0) { usage(); return 0; } signal(SIGINT, &onExit); streaming = 0; AVOutputFormat *fmt; AVFormatContext *oc; avcodec_init(); av_register_all(); fmt = av_guess_format(NULL, ".mkv", NULL); if (!fmt) { fprintf(stderr, "Could not find suitable output format\n"); exit(1); } fmt->flags = fmt->flags | AVFMT_NOFILE; // allocate the output media context oc = avformat_alloc_context(); if (!oc) { fprintf(stderr, "Memory error\n"); exit(1); } oc->oformat = fmt; // set the output parameters (must be done even if no parameters). if (av_set_parameters(oc, NULL) < 0) { fprintf(stderr, "Invalid output format parameters\n"); exit(1); } // * Initializiations * video_record_init(fmt, oc); video_play_init(); audio_record_init(fmt, oc); // * Start recording and encoding audio and video * stopRecording = 0; pthread_create(&video_thread_id, NULL, startVideoEncoding, NULL); pthread_create(&audio_thread_id, NULL, startAudioEncoding, NULL); pthread_create(&keyboard_thread_id, NULL, captureKeyboard, NULL); // * Connect to nameserver * char * name = "default"; char* protocol = "0"; char* control_port = CONTROL_PORT_S; if( argc >= 2) name = argv[1]; if( argc >= 3) protocol = argv[2]; if( argc >= 4) control_port = argv[3]; register_nameserver(name, protocol, control_port); // * Establish control, audio, and video connections * if(protocol[0] == TCP) establish_peer_connections(SOCK_STREAM); else if(protocol[0] == UDP) establish_peer_connections(SOCK_DGRAM); else exit(1); send_init_control_packet( oc->streams[0], oc->streams[1] ); // * Transmit data through the network * pthread_create(&control_network_thread_id, NULL, (void*)listen_control_packets,(void*) NULL); pthread_create(&video_network_thread_id, NULL, (void*)stream_video_packets, (void*)NULL); pthread_create(&audio_network_thread_id, NULL, (void*)stream_audio_packets, (void*)NULL); streaming = 1; // * Wait for threads to exit * pthread_join(video_thread_id, NULL); pthread_join(audio_thread_id, NULL); pthread_join(keyboard_thread_id, NULL); pthread_join(control_network_thread_id, NULL); pthread_join(video_network_thread_id, NULL); pthread_join(audio_network_thread_id, NULL); // * Exit * // pthread_mutex_destroy(&fileMutex); sdl_quit(); video_close(); audio_close(); av_free(oc); printf("[RECORDER] Quit Successfully\n"); return 0; }
int dgreed_main(int argc, const char** argv) { log_init("pview.log", LOG_LEVEL_INFO); video_init(800, 600, "PView"); rand_init(666); GuiDesc style = greed_gui_style(false); gui_init(&style); particles_init("greed_assets/", 5); TexHandle empty = tex_load("greed_assets/empty.png"); if(psystem_descs_count < 1) LOG_ERROR("No particle systems described!"); int active_backg = 0; int active_desc = 0; const char* active_desc_name = psystem_descs[active_desc].name; RectF gui_area = rectf(0.0f, 0.0f, 520.0f, 80.0f); RectF gui_area2 = rectf(0.0f, 500.0f, 280.0f, 600.0f); Vector2 button_prev_pos = vec2(10.0f, 10.0f); Vector2 button_next_pos = vec2(280.0f, 10.0f); Vector2 button_backg_pos = vec2(10.0f, 550.0f); Vector2 label_name_pos = vec2(20.0f, 60.0f); char label_text[256]; while(system_update()) { RectF src = rectf_null(); RectF dest = {0.0f, 0.0f, EDITOR_WIDTH, EDITOR_HEIGHT}; Color c = backgrounds[active_backg % ARRAY_SIZE(backgrounds)]; video_draw_rect(empty, 0, &src, &dest, c); if(mouse_down(MBTN_LEFT)) { uint x, y; mouse_pos(&x, &y); Vector2 pos = vec2((float)x, (float)y); if(!rectf_contains_point(&gui_area, &pos)) if(!rectf_contains_point(&gui_area2, &pos)) particles_spawn(active_desc_name, &pos, 0.0f); } particles_update(time_ms() / 1000.0f); sprintf(label_text, "Current psystem: %s", active_desc_name); gui_label(&label_name_pos, label_text); if(gui_button(&button_prev_pos, "Previuos")) active_desc = MAX(0, active_desc-1); if(gui_button(&button_next_pos, "Next")) active_desc = MIN(psystem_descs_count-1, active_desc+1); if(gui_button(&button_backg_pos, "Background color")) active_backg++; active_desc_name = psystem_descs[active_desc].name; particles_draw(); draw_grid(0, 12.0f); video_present(); } tex_free(empty); particles_close(); gui_close(); greed_gui_free(); video_close(); log_close(); return 0; }
int main(int argc, char **argv) { int i,j; int flags = fcntl(0, F_GETFL, 0); /* get current file status flags */ flags |= O_NONBLOCK; /* turn off blocking flag */ fcntl(0, F_SETFL, flags); int status; struct VISION_TASK visionTask; unsigned long nb = 0; /* Messaging used for GPP/DSP synchronization */ struct DSP_MSG msgToDsp; struct DSP_MSG msgFromDsp; system("sysctl -w kernel.shmmax=134217728"); system("sysctl -w kernel.shmall=134217728"); system("./cexec.out -T ddspbase_tiomap3430.dof64P"); //Image structures initailization struct img_struct* img_new = malloc(sizeof(struct img_struct)); img_new->w = VIDEO_IN_W; img_new->h = VIDEO_IN_H; struct img_struct* img_dsp = malloc(sizeof(struct img_struct)); img_dsp->w = VIDEO_OUT_W; img_dsp->h = VIDEO_OUT_H; if(DSPVisionOpen(&visionTask,img_new, img_dsp)!=0) { printf("Fail to open DSP Vision\n"); return EXIT_FAILURE; } //Video streaming initilization // Video Input struct vid_struct vid; vid.device = (char*)"/dev/video1"; vid.w=VIDEO_IN_W; vid.h=VIDEO_IN_H; vid.n_buffers = 4; if (video_init(&vid)<0) { printf("Error initialising video\n"); return EXIT_FAILURE; } // Video Resizing uint8_t quality_factor = 50; // From 0 to 99 (99=high) uint8_t dri_jpeg_header = 0; int millisleep = 10; struct img_struct small; small.w = vid.w / DOWNSIZE_FACTOR; small.h = vid.h / DOWNSIZE_FACTOR; small.buf = (uint8_t*)malloc(small.w*small.h*2); // Video Compression uint8_t* jpegbuf = (uint8_t*)malloc(vid.h*vid.w*2); // Network Transmit struct UdpSocket* vsock; vsock = udp_socket("192.168.1.255", 5000, 5001, FMS_BROADCAST); struct sigaction action; memset(&action, 0, sizeof(action)); action.sa_handler = &sigint_handler; sigaction(SIGINT, &action, &old_action); struct timespec start,end; unsigned long diffTime; printf("Vision started...\n"); while(visionRun) { //usleep(1000 * millisleep); diffTime = 0; clock_gettime(CLOCK_MONOTONIC,&start); while(diffTime<100000) { clock_gettime(CLOCK_MONOTONIC, &end); diffTime = ((end.tv_sec - start.tv_sec)*1000000000 + (end.tv_nsec - start.tv_nsec))/1000; } //Get picture video_grab_image(&vid, img_new); printf("d... "); clock_gettime(CLOCK_MONOTONIC, &start); //Send it to the DSP DSPProcessor_FlushMemory(visionTask.hProcessor, (PVOID)(visionTask.bufferSend),SIZEBUFF2DSP,0); msgToDsp.dwCmd = VISION_WRITEREADY; msgToDsp.dwArg1 = (DWORD)SIZEBUFF2DSP / g_dwDSPWordSize; status = DSPNode_PutMessage(visionTask.hNode, &msgToDsp, DSP_FOREVER); if (DSP_FAILED(status)) { fprintf(stdout, "DSPProcessor_PutMessage failed. Status = 0x%x\n", (UINT)status); } //Read back status = DSPNode_GetMessage(visionTask.hNode, &msgFromDsp, DSP_FOREVER); if (DSP_FAILED(status)) { fprintf(stdout, "DSPProcessor_GetMessage failed. Status = 0x%x\n", (UINT)status); } // Go ahead and flush here DSPProcessor_InvalidateMemory(visionTask.hProcessor, (PVOID)(visionTask.bufferReceive),SIZEBUFF2MPU); clock_gettime(CLOCK_MONOTONIC, &end); diffTime = ((end.tv_sec - start.tv_sec)*1000000000 + (end.tv_nsec - start.tv_nsec))/1000; printf("%d us\n", diffTime); printf("Send picture %d\n", nb++); send_rtp_frame( vsock, // UDP img_dsp->buf,msgFromDsp.dwArg1, // JPEG img_dsp->w/DOWNSIZE_FACTOR, img_dsp->h/DOWNSIZE_FACTOR, // Img Size 0, // Format 422 quality_factor, // Jpeg-Quality dri_jpeg_header, // DRI Header 0 // 90kHz time increment ); } sigaction(SIGINT, &old_action, NULL); video_close(&vid); DSPVisionClose(&visionTask); return EXIT_SUCCESS; }
void *computervision_thread_main(void* data) { // Video Input struct vid_struct vid; vid.device = (char*)"/dev/video1"; vid.w=1280; vid.h=720; vid.n_buffers = 4; if (video_init(&vid)<0) { printf("Error initialising video\n"); computervision_thread_status = -1; return 0; } // Frame Grabbing struct img_struct* img_new = video_create_image(&vid); // Frame Resizing uint8_t quality_factor = IMAGE_QUALITY_FACTOR; uint8_t dri_jpeg_header = 1; struct img_struct small; small.w = vid.w / IMAGE_DOWNSIZE_FACTOR; small.h = vid.h / IMAGE_DOWNSIZE_FACTOR; small.buf = (uint8_t*)malloc(small.w*small.h*2); // Commpressed image buffer uint8_t* jpegbuf = (uint8_t*)malloc(vid.h*vid.w*2); // file index (search from 0) int file_index = 0; int microsleep = (int)(1000000. / IMAGE_FPS); while (computer_vision_thread_command > 0) { usleep(microsleep); video_grab_image(&vid, img_new); // Resize resize_uyuv(img_new, &small, IMAGE_DOWNSIZE_FACTOR); // JPEG encode the image: uint32_t image_format = FOUR_TWO_TWO; // format (in jpeg.h) uint8_t* end = encode_image (small.buf, jpegbuf, quality_factor, image_format, small.w, small.h, dri_jpeg_header); uint32_t size = end-(jpegbuf); #if IMAGE_SAVE FILE* save; char save_name[128]; if (system("mkdir -p /data/video/images") == 0) { // search available index (max is 99) for ( ; file_index < 99; file_index++) { printf("search %d\n",file_index); sprintf(save_name,"/data/video/images/img_%02d.jpg",file_index); // test if file exists or not if (access(save_name, F_OK) == -1) { printf("access\n"); save = fopen(save_name, "w"); if (save != NULL) { fwrite(jpegbuf, sizeof(uint8_t), size, save); fclose(save); } else { printf("Error when opening file %s\n", save_name); } // leave for loop break; } else {printf("file exists\n");} } } #endif // Fork process int status; pid_t pid = fork(); if (pid == 0) { // Open process to send using netcat in child process char nc_cmd[64]; sprintf(nc_cmd, "nc %s %d", IMAGE_SERVER_IP, IMAGE_SERVER_PORT); FILE* netcat; netcat = popen(nc_cmd, "w"); if (netcat != NULL) { fwrite(jpegbuf, sizeof(uint8_t), size, netcat); if (pclose(netcat) == 0) { printf("Sending image succesfully\n"); } } else { printf("Fail sending image\n"); } exit(0); } else if (pid < 0) { printf("Fork failed\n"); } else { // Parent is waiting for child to terminate wait(&status); } } printf("Thread Closed\n"); video_close(&vid); computervision_thread_status = -100; return 0; }
/*{{{ PROC ..video.close (VAL VIDEO.DEVICE vdev, RESULT BOOL ok)*/ void _video_close (int *w) { *((int *)w[1]) = video_close ((opi_video_device_t *)(w[0])); }
int main(int argc, char *argv[]) { struct device dev; int ret; /* Options parsings */ int do_file = 0, do_capture = 0, do_pause = 0; int do_set_time_per_frame = 0; int do_enum_formats = 0, do_set_format = 0; int do_enum_inputs = 0, do_set_input = 0; int do_list_controls = 0, do_get_control = 0, do_set_control = 0; int do_sleep_forever = 0, do_requeue_last = 0; int no_query = 0; char *endptr; int c; /* Controls */ int ctrl_name = 0; int ctrl_value = 0; /* Video buffers */ enum v4l2_memory memtype = V4L2_MEMORY_MMAP; unsigned int pixelformat = V4L2_PIX_FMT_YUYV; unsigned int width = 640; unsigned int height = 480; unsigned int nbufs = V4L_BUFFERS_DEFAULT; unsigned int input = 0; unsigned int skip = 0; unsigned int quality = (unsigned int)-1; unsigned int userptr_offset = 0; struct v4l2_fract time_per_frame = {1, 25}; /* Capture loop */ unsigned int delay = 0, nframes = (unsigned int)-1; const char *filename = "/dev/shm/capture.output"; opterr = 0; while ((c = getopt_long(argc, argv, "c::d:f:F::hi:ln:pq:r:s:t:uw:", opts, NULL)) != -1) { switch (c) { case 'c': do_capture = 1; if (optarg) nframes = atoi(optarg); break; case 'd': delay = atoi(optarg); break; case 'f': do_set_format = 1; if (strcasecmp(optarg, "MJPEG") == 0) pixelformat = V4L2_PIX_FMT_MJPEG; else if (strcasecmp(optarg, "YUYV") == 0) pixelformat = V4L2_PIX_FMT_YUYV; else if (strcasecmp(optarg, "UYVY") == 0) pixelformat = V4L2_PIX_FMT_UYVY; else if (strcasecmp(optarg, "Y16") == 0) pixelformat = V4L2_PIX_FMT_Y16; else if (strcasecmp(optarg, "SGRBG10_DPMC8") == 0) pixelformat = V4L2_PIX_FMT_SGRBG10DPCM8; else if (strcasecmp(optarg, "SGRBG10") == 0) pixelformat = V4L2_PIX_FMT_SGRBG10; #ifdef V4L2_PIX_FMT_SGRBG12 else if (strcasecmp(optarg, "SGRBG12") == 0) pixelformat = V4L2_PIX_FMT_SGRBG12; #endif else if (strcasecmp(optarg, "DV") == 0) pixelformat = V4L2_PIX_FMT_DV; else { printf("Unsupported video format '%s'\n", optarg); return 1; } break; case 'F': do_file = 1; if (optarg) filename = optarg; break; case 'h': usage(argv[0]); return 0; case 'i': do_set_input = 1; input = atoi(optarg); break; case 'l': do_list_controls = 1; break; case 'n': nbufs = atoi(optarg); if (nbufs > V4L_BUFFERS_MAX) nbufs = V4L_BUFFERS_MAX; break; case 'p': do_pause = 1; break; case 'q': quality = atoi(optarg); break; case 'r': ctrl_name = strtol(optarg, &endptr, 0); if (*endptr != 0) { printf("Invalid control name '%s'\n", optarg); return 1; } do_get_control = 1; break; case 's': do_set_format = 1; width = strtol(optarg, &endptr, 10); if (*endptr != 'x' || endptr == optarg) { printf("Invalid size '%s'\n", optarg); return 1; } height = strtol(endptr + 1, &endptr, 10); if (*endptr != 0) { printf("Invalid size '%s'\n", optarg); return 1; } break; case 't': do_set_time_per_frame = 1; time_per_frame.numerator = strtol(optarg, &endptr, 10); if (*endptr != '/' || endptr == optarg) { printf("Invalid time per frame '%s'\n", optarg); return 1; } time_per_frame.denominator = strtol(endptr + 1, &endptr, 10); if (*endptr != 0) { printf("Invalid time per frame '%s'\n", optarg); return 1; } break; case 'u': memtype = V4L2_MEMORY_USERPTR; break; case 'w': ctrl_name = strtol(optarg, &endptr, 0); if (*endptr != ' ' || endptr == optarg) { printf("Invalid control name '%s'\n", optarg); return 1; } ctrl_value = strtol(endptr + 1, &endptr, 0); if (*endptr != 0) { printf("Invalid control value '%s'\n", optarg); return 1; } do_set_control = 1; break; case OPT_ENUM_FORMATS: do_enum_formats = 1; break; case OPT_ENUM_INPUTS: do_enum_inputs = 1; break; case OPT_NO_QUERY: no_query = 1; break; case OPT_REQUEUE_LAST: do_requeue_last = 1; break; case OPT_SKIP_FRAMES: skip = atoi(optarg); break; case OPT_SLEEP_FOREVER: do_sleep_forever = 1; break; case OPT_USERPTR_OFFSET: userptr_offset = atoi(optarg); break; default: printf("Invalid option -%c\n", c); printf("Run %s -h for help.\n", argv[0]); return 1; } } if (optind >= argc) { usage(argv[0]); return 1; } if (!do_file) filename = NULL; /* Open the video device. */ ret = video_open(&dev, argv[optind], no_query); if (ret < 0) return 1; dev.memtype = memtype; if (do_get_control) uvc_get_control(&dev, ctrl_name); if (do_set_control) uvc_set_control(&dev, ctrl_name, ctrl_value); if (do_list_controls) video_list_controls(&dev); if (do_enum_formats) { printf("- Available formats:\n"); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_CAPTURE); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_OUTPUT); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_OVERLAY); } if (do_enum_inputs) { printf("- Available inputs:\n"); video_enum_inputs(&dev); } if (do_set_input) { video_set_input(&dev, input); ret = video_get_input(&dev); printf("Input %d selected\n", ret); } /* Set the video format. */ if (do_set_format) { if (video_set_format(&dev, width, height, pixelformat) < 0) { video_close(&dev); return 1; } } if (!no_query || do_capture) video_get_format(&dev); /* Set the frame rate. */ if (do_set_time_per_frame) { if (video_set_framerate(&dev, &time_per_frame) < 0) { video_close(&dev); return 1; } } while (do_sleep_forever) sleep(1000); if (!do_capture) { video_close(&dev); return 0; } /* Set the compression quality. */ if (video_set_quality(&dev, quality) < 0) { video_close(&dev); return 1; } if (video_prepare_capture(&dev, nbufs, userptr_offset, filename)) { video_close(&dev); return 1; } if (do_pause) { printf("Press enter to start capture\n"); getchar(); } if (video_do_capture(&dev, nframes, skip, delay, filename, do_requeue_last) < 0) { video_close(&dev); return 1; } video_close(&dev); return 0; }
application_t *application_init(struct config *cfg) { DEBUG("application_init()"); assert(cfg != 0); application_t *app = calloc(1, sizeof(struct _application)); assert(app != 0); // Initialize graphics if(!(app->graphics = graphics_init(cfg->app_window_id))) { ERROR("Cannot initialize graphics"); goto error; } // Create atlases if(!(app->atlas1 = graphics_atlas_create(cfg->graphics_font_file, cfg->graphics_font_size_1)) || !(app->atlas2 = graphics_atlas_create(cfg->graphics_font_file, cfg->graphics_font_size_2))) { ERROR("Cannot create atlas"); goto error; } // Create image if(!(app->image = graphics_image_create(app->graphics, cfg->video_width, cfg->video_height, cfg->video_format, ANCHOR_CENTER))) { ERROR("Cannot create image"); goto error; } // Create HUD if(!(app->hud = graphics_hud_create(app->graphics, app->atlas1, cfg->graphics_font_color_1, cfg->graphics_font_size_1, cfg->video_hfov, cfg->video_vfov))) { ERROR("Cannot create HUD"); goto error; } // Initialize GPS memcpy(&app->gps_config, &cfg->gps_conf, sizeof(struct gps_config)); app->gps_config.userdata = app; app->gps_config.create_label = create_label_handler; app->gps_config.delete_label = delete_label_handler; if(!(app->gps = gps_init(cfg->gps_device, &app->gps_config))) { ERROR("Cannot initialize GPS"); goto error; } // Initialize IMU memcpy(&app->imu_config, &cfg->imu_conf, sizeof(struct imu_config)); if(!(app->imu = imu_init(cfg->imu_device, &app->imu_config))) { ERROR("Cannot initialize IMU"); goto error; } // Open video if(!(app->video = video_open(cfg->video_device, cfg->video_width, cfg->video_height, cfg->video_format, cfg->video_interlace))) { ERROR("Cannot open video device"); goto error; } // Copy arguments app->video_width = cfg->video_width; app->video_height = cfg->video_height; app->window_width = cfg->window_width; app->window_height = cfg->window_height; app->video_hfov = cfg->video_hfov; app->video_vfov = cfg->video_vfov; app->visible_distance = cfg->app_landmark_vis_dist; memcpy(app->label_color, cfg->graphics_font_color_2, 4); return app; error: if(app->video) video_close(app->video); if(app->gps) gps_free(app->gps); if(app->imu) imu_free(app->imu); if(app->image) graphics_drawable_free(app->image); if(app->hud) graphics_hud_free(app->hud); if(app->atlas1) graphics_atlas_free(app->atlas1); if(app->atlas2) graphics_atlas_free(app->atlas2); if(app->graphics) graphics_free(app->graphics); free(app); return NULL; }
void horizontal_velocities_close() { fclose(file_fd); video_close(); }