int set_rawbuflen(unsigned int gt) { char lbl[200]; raw_tape=(unsigned char *)malloc(gt*1024000); if(raw_tape==NULL) { gui_error("Couldn't Allocate enough Memory for the TAPE buffer"); raw_tape=(unsigned char *)malloc(1000000); if(raw_tape==NULL) { gui_error("Couldn't Allocate enough Memory for ANY tape buffer :("); } else raw_buflen=1000000; } raw_buflen=gt*1024000; // sprintf(lbl,"Allocated tape buffer - %x",raw_buflen); // MessageBox(NULL,lbl,"PALE ",MB_OK ); return(0); }
int load_raw(char fnam[]) { FILE *handle; int ret,errorr,g,h; int size_read,size_written; int cdd,f; char lbl[200]; handle=fopen( fnam, "rb" ); if( handle != NULL ) { raw_samples = fread(raw_tape,1,raw_buflen,handle); fclose( handle ); if( raw_samples == -1 ) { gui_error(":( Couldn't Read the Input file"); return(1); } } else { gui_error(":( Couldn't Read the Input file"); return(1); } raw_position=0; if(raw_samples==raw_buflen) gui_error("RAW file too big"); return(raw_samples); }
static void default_hash_funcs(void) { // Some funcs could already be enabled by cmdline arg if (hash_funcs_count_enabled()) return; bool has_enabled = false; // Try to enable default functions for (int i = 0; i < HASH_FUNCS_N; i++) { if (HASH_FUNC_IS_DEFAULT(i) && hash.funcs[i].supported) { gtk_toggle_button_set_active(gui.hash_widgets[i].button, true); has_enabled = true; } } if (has_enabled) return; // Try to enable any supported function for (int i = 0; i < HASH_FUNCS_N; i++) { if (hash.funcs[i].supported) { gtk_toggle_button_set_active(gui.hash_widgets[i].button, true); return; } } gui_error(_("Failed to enable any supported hash functions.")); exit(EXIT_FAILURE); }
G_MODULE_EXPORT void gui_preferences_apply(GtkWidget *widget,gui_gd_t *gd) { gui_preferences_gui2data(gd,&gd->preferences.current); if (gui_preferences_save("config.ini",&gd->preferences.current)==-1) { gui_error(gd,"Saving","Could not save preferences"); } if (gui_preferences_upload(&gd->preferences.current)==-1) { if (nxtcam_com_is_connected()) { gui_error(gd,"Uploading","Can not upload preferences to NXTCam."); } else { gui_warning(gd,"Not connected","NXTCam is not connected, so preferences are not uploaded."); } } }
void fb_shuffle(int shuffle) { char **item, *tmp; int i, j, k, n; if (shuffle) mvpw_set_menu_title(playlist_widget, "Shuffle Play"); else mvpw_set_menu_title(playlist_widget, "Play All"); item = alloca(sizeof(char*) * MAX_PLAYLIST_ENTRIES); // Recurse from the current directory and find all // audio files n = 0; recurse_find_audio(cwd, item, &n); if (n == 0) { gui_error("No audio files exist in this directory or its subdirectories"); return; } if (shuffle && (n > 1)) { for (i=0; i < MAX_PLAYLIST_ENTRIES; i++) { j = rand() % n; k = rand() % n; tmp = item[k]; item[k] = item[j]; item[j] = tmp; } } printf("created playlist of %d songs\n", n); switch_hw_state(MVPMC_STATE_FILEBROWSER); video_functions = &file_functions; playlist_clear(); mvpw_show(playlist_widget); mvpw_focus(playlist_widget); playlist_create(item, n); // Release the list of items for (i = 0; i < n; i++) free(item[i]); if (shuffle) mvpw_set_text_str(fb_name, "Shuffle Play"); else mvpw_set_text_str(fb_name, "Play All"); mvpw_show(fb_progress); mvpw_set_timer(fb_progress, fb_osd_update, 500); playlist_play(NULL); }
int set_raw_samprate(unsigned int gt) { raw_sample_rate=gt; if ((raw_sample_rate<BASE_SAMPLE_RATE) || (raw_sample_rate>100000)) { gui_error("Invalid Sample Rate"); raw_sample_rate=BASE_SAMPLE_RATE; } return(0); }
int set_raw_buflen(int gt) { if(tape_operation!=TAPE_STOPPED || tape_override==FORCE_PLAY || tape_override==FORCE_RECORD ) { gui_error("STOP the tape before changing Buffer Length"); return(0); } free(raw_tape); set_rawbuflen(gt); return(0); }
static void va_log_message(enum log_priority priority, const char *fmt, va_list args) { SDL_LogPriority sdl_priority; int size; char *buffer; va_list temp; if (priority >= LOG_PRIORITY_NUM_PRIORITIES) priority = LOG_PRIORITY_INFO; sdl_priority = priority_info[priority].sdl_priority; va_copy(temp, args); size = vsnprintf(NULL, 0, fmt, temp) + 1; va_end(temp); buffer = NULL; if (size) { buffer = malloc(size); if (!buffer) return; } va_copy(temp, args); vsnprintf(buffer, size, fmt, temp); va_end(temp); #if GUI_ENABLED if (gui_enabled && (priority == LOG_PRIORITY_ERROR)) { gui_error(buffer); } #endif SDL_LogMessage(SDL_LOG_CATEGORY_APPLICATION, sdl_priority, buffer, ""); free(buffer); }
int save_raw(char fnam[]) { FILE *handle; FILE *handle2; int size_written,size_read,ret; /* open a file for output */ handle2 = fopen( fnam,"wb"); if( handle2 != NULL ) { size_written = fwrite(raw_tape,1,raw_position,handle2 ); fclose( handle2 ); } else { gui_error(":( Couldn't Open output file"); return(1); } raw_position=0; return(0); }
int load_wav(char fnam[]) { FILE * handle,handle2; unsigned int wav_length,f; unsigned int leng,size_written,size_read,ret; char lbl[200]; handle=fopen( fnam, "rb" ); if( handle != NULL ) { size_read = fread(raw_tape,1,raw_buflen,handle); fclose( handle ); if( size_read == -1 ) { gui_error(":( Couldn't Read the Input file"); return(1); } } else { gui_error(":( Couldn't Read the Input file"); return(1); } raw_position=0; //sprintf(lbl,"Size Read was %d",size_read); //gui_error(lbl); if(size_read==raw_buflen) gui_error("WAV file too big"); //raw_tape[5]=0; //sprintf(lbl,"Header %s",raw_tape); //MessageBox(NULL,lbl,"PALE ",MB_YESNOCANCEL | MB_DEFBUTTON1); if(raw_tape[0]!='R') { gui_error(":( NOT A standard RIFF WAV file"); return(1); } //Build Header Info raw_samples=((int)raw_tape[43]<<(3*8))+(int)raw_tape[42]<<(2*8)+((int)raw_tape[41]<<8)+((int)raw_tape[40]); //sprintf(lbl,"T43 = %x 42=%x 41=%x 40=%x Samples was %d",raw_tape[43],raw_tape[42],raw_tape[41],raw_tape[40],raw_samples); //gui_error(lbl); //sprintf(lbl,"T43 = %x 42=%x 41=%x ",raw_tape[43],raw_tape[42],raw_tape[41]); //ret=MessageBox(NULL,lbl,"PALE Load WAV",MB_ICONERROR | MB_OK); if(raw_tape[20]!=0x01) { gui_error("This looks like stereo, I expected MONO"); return(1); } raw_sample_rate=(raw_tape[26]<<(2*8))+(raw_tape[25]<<8)+raw_tape[24]; //sprintf(lbl,"T26 = %x 25=%x 24=%x ",raw_tape[26],raw_tape[25],raw_tape[24]); //ret=MessageBox(NULL,lbl,"PALE Load WAV",MB_ICONERROR | MB_OK); //sprintf(lbl,"IN DLL Sample Rate = %x",raw_sample_rate); //ret=MessageBox(NULL,lbl,"PALE Load WAV",MB_ICONERROR | MB_OK); if(raw_tape[36]!='d') { gui_error(":( Couldn't find the WAV data segment"); return(1); } //now copy the data down into the first bit for(f=0;f<raw_samples;f++) raw_tape[f]=raw_tape[f+44]; return(raw_samples); }
int save_wav(char fnam[]) { FILE *handle; FILE *handle2; unsigned int leng,size_written,size_read,ret; char t_head[WAV_HEADER_LENGTH]; //Build Header Info t_head[0]='R';t_head[1]='I';t_head[2]='F';t_head[3]='F'; leng=WAV_HEADER_LENGTH+raw_position-8; // t_head[4]=leng>>(3*8); // t_head[5]=leng>>(2*8); // t_head[6]=leng>>8; // t_head[7]=leng; t_head[4]=leng; t_head[5]=leng>>8; t_head[6]=leng>>(2*8); t_head[7]=leng>>(3*8); t_head[8]='W';t_head[9]='A';t_head[10]='V';t_head[11]='E'; t_head[12]='f';t_head[13]='m';t_head[14]='t';t_head[15]=' '; t_head[16]=0x10;//leng of format chunk always 0x10 t_head[17]=0; t_head[18]=0; t_head[19]=0; t_head[20]=0x01;//channels t_head[21]=0; t_head[22]=0x01;//audio format always 0x01 t_head[23]=0; t_head[24]=raw_sample_rate; //0x22;//22050 hertz t_head[25]=raw_sample_rate>>8; //0x56; t_head[26]=raw_sample_rate>>(2*8); t_head[27]=0; t_head[28]=raw_sample_rate; // 0x22;//bytes per sec t_head[29]=raw_sample_rate>>8; // 0x56; t_head[30]=raw_sample_rate>>(2*8); t_head[31]=0; t_head[32]=0x01;//bytes per sample t_head[33]=0; t_head[34]=0x08;//bits per sample t_head[35]=0; t_head[36]='d';t_head[37]='a';t_head[38]='t';t_head[39]='a'; t_head[40]=raw_position; t_head[41]=raw_position>>8; t_head[42]=raw_position>>(2*8); t_head[43]=raw_position>>(3*8); /* open a file for output */ handle2 = fopen( fnam,"wb"); if( handle2 != NULL ) { size_written = fwrite(t_head,1,WAV_HEADER_LENGTH,handle2 ); size_written = fwrite(raw_tape,1,raw_position, handle2 ); fclose( handle2 ); } else { gui_error(":( Couldn't Open output file"); return(1); } return(0); }
/* * capture loop (should run in a separate thread) * args: * data - pointer to user data (device data + options data) * * asserts: * device data is not null * * returns: pointer to return code */ void *capture_loop(void *data) { capture_loop_data_t *cl_data = (capture_loop_data_t *) data; v4l2_dev_t *device = (v4l2_dev_t *) cl_data->device; options_t *my_options = (options_t *) cl_data->options; //config_t *my_config = (config_t *) cl_data->config; uint64_t my_last_photo_time = 0; /*timer count*/ int my_photo_npics = 0;/*no npics*/ /*asserts*/ assert(device != NULL); /*reset quit flag*/ quit = 0; if(debug_level > 1) printf("GUVCVIEW: capture thread (tid: %u)\n", (unsigned int) syscall (SYS_gettid)); int ret = 0; int render_flags = 0; if (strcasecmp(my_options->render_flag, "full") == 0) render_flags = 1; else if (strcasecmp(my_options->render_flag, "max") == 0) render_flags = 2; render_set_verbosity(debug_level); if(render_init(render, device->format.fmt.pix.width, device->format.fmt.pix.height, render_flags) < 0) render = RENDER_NONE; else { render_set_event_callback(EV_QUIT, &quit_callback, NULL); render_set_event_callback(EV_KEY_V, &key_V_callback, device); render_set_event_callback(EV_KEY_I, &key_I_callback, NULL); render_set_event_callback(EV_KEY_UP, &key_UP_callback, device); render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, device); render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, device); render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, device); } /*add a video capture timer*/ if(my_options->video_timer > 0) { my_video_timer = NSEC_PER_SEC * my_options->video_timer; my_video_begin_time = v4l2core_time_get_timestamp(); /*timer count*/ /*if are not saving video start it*/ if(!get_encoder_status()) start_encoder_thread(device); } /*add a photo capture timer*/ if(my_options->photo_timer > 0) { my_photo_timer = NSEC_PER_SEC * my_options->photo_timer; my_last_photo_time = v4l2core_time_get_timestamp(); /*timer count*/ } if(my_options->photo_npics > 0) my_photo_npics = my_options->photo_npics; v4l2core_start_stream(device); while(!quit) { if(restart) { restart = 0; /*reset*/ v4l2core_stop_stream(device); /*close render*/ render_close(); v4l2core_clean_buffers(device); /*try new format (values prepared by the request callback)*/ ret = v4l2core_update_current_format(device); /*try to set the video stream format on the device*/ if(ret != E_OK) { fprintf(stderr, "GUCVIEW: could not set the defined stream format\n"); fprintf(stderr, "GUCVIEW: trying first listed stream format\n"); v4l2core_prepare_valid_format(device); v4l2core_prepare_valid_resolution(device); ret = v4l2core_update_current_format(device); if(ret != E_OK) { fprintf(stderr, "GUCVIEW: also could not set the first listed stream format\n"); gui_error(device, "Guvcview error", "could not start a video stream in the device", 1); return ((void *) -1); } } /*restart the render with new format*/ if(render_init(render, device->format.fmt.pix.width, device->format.fmt.pix.height, render_flags) < 0) render = RENDER_NONE; else { render_set_event_callback(EV_QUIT, &quit_callback, NULL); render_set_event_callback(EV_KEY_V, &key_V_callback, device); render_set_event_callback(EV_KEY_I, &key_I_callback, NULL); render_set_event_callback(EV_KEY_UP, &key_UP_callback, device); render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, device); render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, device); render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, device); } if(debug_level > 0) printf("GUVCVIEW: reset to pixelformat=%x width=%i and height=%i\n", device->requested_fmt, device->format.fmt.pix.width, device->format.fmt.pix.height); v4l2core_start_stream(device); } if( v4l2core_get_frame(device) == E_OK) { /*decode the raw frame*/ if(v4l2core_frame_decode(device) != E_OK) { fprintf(stderr, "GUVCIEW: Error - Couldn't decode frame\n"); } /*run software autofocus (must be called after frame_decode)*/ if(do_soft_autofocus || do_soft_focus) do_soft_focus = v4l2core_soft_autofocus_run(device); /*render the decoded frame*/ snprintf(render_caption, 29, "Guvcview (%2.2f fps)", v4l2core_get_realfps()); render_set_caption(render_caption); render_frame(device->yuv_frame, my_render_mask); /* Save frame to file */ char filename[50]; sprintf(filename, "/home/cobra/Desktop/frame/%d", device->frame_index); save_image_bmp(device, filename); if(check_photo_timer()) { if((device->timestamp - my_last_photo_time) > my_photo_timer) { save_image = 1; my_last_photo_time = device->timestamp; if(my_options->photo_npics > 0) { if(my_photo_npics > 0) my_photo_npics--; else stop_photo_timer(); /*close timer*/ } } } if(check_video_timer()) { if((device->timestamp - my_video_begin_time) > my_video_timer) stop_video_timer(device); } if(save_image) { char *img_filename = NULL; /*get_photo_[name|path] always return a non NULL value*/ char *name = strdup(get_photo_name()); char *path = strdup(get_photo_path()); if(get_photo_sufix_flag()) { char *new_name = add_file_suffix(path, name); free(name); /*free old name*/ name = new_name; /*replace with suffixed name*/ } int pathsize = strlen(path); if(path[pathsize] != '/') img_filename = smart_cat(path, '/', name); else img_filename = smart_cat(path, 0, name); //if(debug_level > 1) // printf("GUVCVIEW: saving image to %s\n", img_filename); snprintf(status_message, 79, _("saving image to %s"), img_filename); gui_status_message(status_message); v4l2core_save_image(device, img_filename, get_photo_format()); free(path); free(name); free(img_filename); save_image = 0; /*reset*/ } if(video_capture_get_save_video()) { #ifdef USE_PLANAR_YUV int size = (device->format.fmt.pix.width * device->format.fmt.pix.height * 3) / 2; #else int size = device->format.fmt.pix.width * device->format.fmt.pix.height * 2; #endif uint8_t *input_frame = device->yuv_frame; /* * TODO: check codec_id, format and frame flags * (we may want to store a compressed format */ if(get_video_codec_ind() == 0) { switch(device->requested_fmt) { case V4L2_PIX_FMT_H264: input_frame = device->h264_frame; size = (int) device->h264_frame_size; break; default: input_frame = device->raw_frame; size = (int) device->raw_frame_size; break; } } encoder_add_video_frame(input_frame, size, device->timestamp, device->isKeyframe); /* * exponencial scheduler * with 50% threshold (nanosec) * and max value of 250 ms (4 fps) */ int time_sched = encoder_buff_scheduler(ENCODER_SCHED_EXP, 0.5, 250); if(time_sched > 0) { switch(device->requested_fmt) { case V4L2_PIX_FMT_H264: { uint32_t framerate = time_sched; /*nanosec*/ v4l2core_set_h264_frame_rate_config(device, framerate); break; } default: { struct timespec req = { .tv_sec = 0, .tv_nsec = time_sched};/*nanosec*/ nanosleep(&req, NULL); break; } } } } /*we are done with the frame buffer release it*/ v4l2core_release_frame(device); } } v4l2core_stop_stream(device); render_close(); return ((void *) 0); } /* * start the encoder thread * args: * data - pointer to user data * * asserts: * none * * returns: error code */ int start_encoder_thread(void *data) { int ret = __THREAD_CREATE(&encoder_thread, encoder_loop, data); if(ret) fprintf(stderr, "GUVCVIEW: encoder thread creation failed (%i)\n", ret); else if(debug_level > 2) printf("GUVCVIEW: created encoder thread with tid: %u\n", (unsigned int) encoder_thread); return ret; }
/* * encoder loop (should run in a separate thread) * args: * data - pointer to user data * * asserts: * none * * returns: pointer to return code */ static void *encoder_loop(void *data) { my_encoder_status = 1; if(debug_level > 1) printf("GUVCVIEW: encoder thread (tid: %u)\n", (unsigned int) syscall (SYS_gettid)); /*get the audio context*/ audio_context_t *audio_ctx = get_audio_context(); __THREAD_TYPE encoder_audio_thread; int channels = 0; int samprate = 0; if(audio_ctx) { channels = audio_ctx->channels; samprate = audio_ctx->samprate; } if(debug_level > 0) printf("GUVCVIEW: audio [channels= %i; samprate= %i] \n", channels, samprate); /*create the encoder context*/ encoder_context_t *encoder_ctx = encoder_get_context( v4l2core_get_requested_frame_format(), get_video_codec_ind(), get_audio_codec_ind(), get_video_muxer(), v4l2core_get_frame_width(), v4l2core_get_frame_height(), v4l2core_get_fps_num(), v4l2core_get_fps_denom(), channels, samprate); /*store external SPS and PPS data if needed*/ if(encoder_ctx->video_codec_ind == 0 && /*raw - direct input*/ v4l2core_get_requested_frame_format() == V4L2_PIX_FMT_H264) { /*request a IDR (key) frame*/ v4l2core_h264_request_idr(); if(debug_level > 0) printf("GUVCVIEW: storing external pps and sps data in encoder context\n"); encoder_ctx->h264_pps_size = v4l2core_get_h264_pps_size(); if(encoder_ctx->h264_pps_size > 0) { encoder_ctx->h264_pps = calloc(encoder_ctx->h264_pps_size, sizeof(uint8_t)); if(encoder_ctx->h264_pps == NULL) { fprintf(stderr,"GUVCVIEW: FATAL memory allocation failure (encoder_loop): %s\n", strerror(errno)); exit(-1); } memcpy(encoder_ctx->h264_pps, v4l2core_get_h264_pps(), encoder_ctx->h264_pps_size); } encoder_ctx->h264_sps_size = v4l2core_get_h264_sps_size(); if(encoder_ctx->h264_sps_size > 0) { encoder_ctx->h264_sps = calloc(encoder_ctx->h264_sps_size, sizeof(uint8_t)); if(encoder_ctx->h264_sps == NULL) { fprintf(stderr,"GUVCVIEW: FATAL memory allocation failure (encoder_loop): %s\n", strerror(errno)); exit(-1); } memcpy(encoder_ctx->h264_sps, v4l2core_get_h264_sps(), encoder_ctx->h264_sps_size); } } uint32_t current_framerate = 0; if(v4l2core_get_requested_frame_format() == V4L2_PIX_FMT_H264) { /* store framerate since it may change due to scheduler*/ current_framerate = v4l2core_get_h264_frame_rate_config(); } char *video_filename = NULL; /*get_video_[name|path] always return a non NULL value*/ char *name = strdup(get_video_name()); char *path = strdup(get_video_path()); if(get_video_sufix_flag()) { char *new_name = add_file_suffix(path, name); free(name); /*free old name*/ name = new_name; /*replace with suffixed name*/ } int pathsize = strlen(path); if(path[pathsize] != '/') video_filename = smart_cat(path, '/', name); else video_filename = smart_cat(path, 0, name); snprintf(status_message, 79, _("saving video to %s"), video_filename); gui_status_message(status_message); /*muxer initialization*/ encoder_muxer_init(encoder_ctx, video_filename); /*start video capture*/ video_capture_save_video(1); int treshold = 102400; /*100 Mbytes*/ int64_t last_check_pts = 0; /*last pts when disk supervisor called*/ /*start audio processing thread*/ if(encoder_ctx->enc_audio_ctx != NULL && audio_ctx->channels > 0) { if(debug_level > 1) printf("GUVCVIEW: starting encoder audio thread\n"); int ret = __THREAD_CREATE(&encoder_audio_thread, audio_processing_loop, (void *) encoder_ctx); if(ret) fprintf(stderr, "GUVCVIEW: encoder audio thread creation failed (%i)\n", ret); else if(debug_level > 2) printf("GUVCVIEW: created audio encoder thread with tid: %u\n", (unsigned int) encoder_audio_thread); } while(video_capture_get_save_video()) { /*process the video buffer*/ if(encoder_process_next_video_buffer(encoder_ctx) > 0) { /* * no buffers to process * sleep a couple of milisec */ struct timespec req = { .tv_sec = 0, .tv_nsec = 1000000};/*nanosec*/ nanosleep(&req, NULL); } /*disk supervisor*/ if(encoder_ctx->enc_video_ctx->pts - last_check_pts > 2 * NSEC_PER_SEC) { last_check_pts = encoder_ctx->enc_video_ctx->pts; if(!encoder_disk_supervisor(treshold, path)) { /*stop capture*/ gui_set_video_capture_button_status(0); } } } /*flush the video buffer*/ encoder_flush_video_buffer(encoder_ctx); /*make sure the audio processing thread has stopped*/ if(encoder_ctx->enc_audio_ctx != NULL && audio_ctx->channels > 0) { if(debug_level > 1) printf("GUVCVIEW: join encoder audio thread\n"); __THREAD_JOIN(encoder_audio_thread); } /*close the muxer*/ encoder_muxer_close(encoder_ctx); /*close the encoder context (clean up)*/ encoder_close(encoder_ctx); if(v4l2core_get_requested_frame_format() == V4L2_PIX_FMT_H264) { /* restore framerate */ v4l2core_set_h264_frame_rate_config(current_framerate); } /*clean string*/ free(video_filename); free(path); free(name); my_encoder_status = 0; return ((void *) 0); } /* * capture loop (should run in a separate thread) * args: * data - pointer to user data (options data) * * asserts: * none * * returns: pointer to return code */ void *capture_loop(void *data) { capture_loop_data_t *cl_data = (capture_loop_data_t *) data; options_t *my_options = (options_t *) cl_data->options; //config_t *my_config = (config_t *) cl_data->config; uint64_t my_last_photo_time = 0; /*timer count*/ int my_photo_npics = 0;/*no npics*/ /*reset quit flag*/ quit = 0; if(debug_level > 1) printf("GUVCVIEW: capture thread (tid: %u)\n", (unsigned int) syscall (SYS_gettid)); int ret = 0; int render_flags = 0; if (strcasecmp(my_options->render_flag, "full") == 0) render_flags = 1; else if (strcasecmp(my_options->render_flag, "max") == 0) render_flags = 2; render_set_verbosity(debug_level); if(render_init(render, v4l2core_get_frame_width(), v4l2core_get_frame_height(), render_flags) < 0) render = RENDER_NONE; else { render_set_event_callback(EV_QUIT, &quit_callback, NULL); render_set_event_callback(EV_KEY_V, &key_V_callback, NULL); render_set_event_callback(EV_KEY_I, &key_I_callback, NULL); render_set_event_callback(EV_KEY_UP, &key_UP_callback, NULL); render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, NULL); render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, NULL); render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, NULL); } /*add a video capture timer*/ if(my_options->video_timer > 0) { my_video_timer = NSEC_PER_SEC * my_options->video_timer; my_video_begin_time = v4l2core_time_get_timestamp(); /*timer count*/ /*if are not saving video start it*/ if(!get_encoder_status()) start_encoder_thread(); } /*add a photo capture timer*/ if(my_options->photo_timer > 0) { my_photo_timer = NSEC_PER_SEC * my_options->photo_timer; my_last_photo_time = v4l2core_time_get_timestamp(); /*timer count*/ } if(my_options->photo_npics > 0) my_photo_npics = my_options->photo_npics; v4l2core_start_stream(); v4l2_frame_buff_t *frame = NULL; //pointer to frame buffer while(!quit) { if(restart) { restart = 0; /*reset*/ v4l2core_stop_stream(); /*close render*/ render_close(); v4l2core_clean_buffers(); /*try new format (values prepared by the request callback)*/ ret = v4l2core_update_current_format(); /*try to set the video stream format on the device*/ if(ret != E_OK) { fprintf(stderr, "GUCVIEW: could not set the defined stream format\n"); fprintf(stderr, "GUCVIEW: trying first listed stream format\n"); v4l2core_prepare_valid_format(); v4l2core_prepare_valid_resolution(); ret = v4l2core_update_current_format(); if(ret != E_OK) { fprintf(stderr, "GUCVIEW: also could not set the first listed stream format\n"); gui_error("Guvcview error", "could not start a video stream in the device", 1); return ((void *) -1); } } /*restart the render with new format*/ if(render_init(render, v4l2core_get_frame_width(), v4l2core_get_frame_height(), render_flags) < 0) render = RENDER_NONE; else { render_set_event_callback(EV_QUIT, &quit_callback, NULL); render_set_event_callback(EV_KEY_V, &key_V_callback, NULL); render_set_event_callback(EV_KEY_I, &key_I_callback, NULL); render_set_event_callback(EV_KEY_UP, &key_UP_callback, NULL); render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, NULL); render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, NULL); render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, NULL); } if(debug_level > 0) printf("GUVCVIEW: reset to pixelformat=%x width=%i and height=%i\n", v4l2core_get_requested_frame_format(), v4l2core_get_frame_width(), v4l2core_get_frame_height()); v4l2core_start_stream(); } frame = v4l2core_get_decoded_frame(); if( frame != NULL) { /*run software autofocus (must be called after frame was grabbed and decoded)*/ if(do_soft_autofocus || do_soft_focus) do_soft_focus = v4l2core_soft_autofocus_run(frame); /*render the decoded frame*/ snprintf(render_caption, 29, "Guvcview (%2.2f fps)", v4l2core_get_realfps()); render_set_caption(render_caption); render_frame(frame->yuv_frame, my_render_mask); if(check_photo_timer()) { if((frame->timestamp - my_last_photo_time) > my_photo_timer) { save_image = 1; my_last_photo_time = frame->timestamp; if(my_options->photo_npics > 0) { if(my_photo_npics > 0) my_photo_npics--; else stop_photo_timer(); /*close timer*/ } } } if(check_video_timer()) { if((frame->timestamp - my_video_begin_time) > my_video_timer) stop_video_timer(); } if(save_image) { char *img_filename = NULL; /*get_photo_[name|path] always return a non NULL value*/ char *name = strdup(get_photo_name()); char *path = strdup(get_photo_path()); if(get_photo_sufix_flag()) { char *new_name = add_file_suffix(path, name); free(name); /*free old name*/ name = new_name; /*replace with suffixed name*/ } int pathsize = strlen(path); if(path[pathsize] != '/') img_filename = smart_cat(path, '/', name); else img_filename = smart_cat(path, 0, name); //if(debug_level > 1) // printf("GUVCVIEW: saving image to %s\n", img_filename); snprintf(status_message, 79, _("saving image to %s"), img_filename); gui_status_message(status_message); v4l2core_save_image(frame, img_filename, get_photo_format()); free(path); free(name); free(img_filename); save_image = 0; /*reset*/ } if(video_capture_get_save_video()) { #ifdef USE_PLANAR_YUV int size = (v4l2core_get_frame_width() * v4l2core_get_frame_height() * 3) / 2; #else int size = v4l2core_get_frame_width() * v4l2core_get_frame_height() * 2; #endif uint8_t *input_frame = frame->yuv_frame; /* * TODO: check codec_id, format and frame flags * (we may want to store a compressed format */ if(get_video_codec_ind() == 0) //raw frame { switch(v4l2core_get_requested_frame_format()) { case V4L2_PIX_FMT_H264: input_frame = frame->h264_frame; size = (int) frame->h264_frame_size; break; default: input_frame = frame->raw_frame; size = (int) frame->raw_frame_size; break; } } encoder_add_video_frame(input_frame, size, frame->timestamp, frame->isKeyframe); /* * exponencial scheduler * with 50% threshold (nanosec) * and max value of 250 ms (4 fps) */ int time_sched = encoder_buff_scheduler(ENCODER_SCHED_EXP, 0.5, 250); if(time_sched > 0) { switch(v4l2core_get_requested_frame_format()) { case V4L2_PIX_FMT_H264: { uint32_t framerate = time_sched; /*nanosec*/ v4l2core_set_h264_frame_rate_config(framerate); break; } default: { struct timespec req = { .tv_sec = 0, .tv_nsec = time_sched};/*nanosec*/ nanosleep(&req, NULL); break; } } } } /*we are done with the frame buffer release it*/ v4l2core_release_frame(frame); } } v4l2core_stop_stream(); /*if we are still saving video then stop it*/ if(video_capture_get_save_video()) stop_encoder_thread(); render_close(); return ((void *) 0); } /* * start the encoder thread * args: * data - pointer to user data * * asserts: * none * * returns: error code */ int start_encoder_thread(void *data) { int ret = __THREAD_CREATE(&encoder_thread, encoder_loop, data); if(ret) fprintf(stderr, "GUVCVIEW: encoder thread creation failed (%i)\n", ret); else if(debug_level > 2) printf("GUVCVIEW: created encoder thread with tid: %u\n", (unsigned int) encoder_thread); return ret; }