GtkWidget *get_language_menu(GtkWidget *window, cb_t cb) { //GtkAccelGroup *group; gint i; //group = gtk_accel_group_new (); lang_menu = gtk_image_menu_item_new(); for (i = 0; i < NUM_LANGS; i++) { flags[i] = get_picture(flags_files[i]); g_object_ref(flags[i]); } gtk_image_menu_item_set_image(GTK_IMAGE_MENU_ITEM(lang_menu), flags[language]); gtk_image_menu_item_set_always_show_image(GTK_IMAGE_MENU_ITEM(lang_menu), TRUE); g_signal_connect(G_OBJECT(lang_menu), "button_press_event", G_CALLBACK(lang_dialog), cb); set_gui_language(language); g_object_set(gtk_settings_get_default(), "gtk-button-images", TRUE, NULL); return lang_menu; }
int rotate_pic_sync(struct rotate_pic *rpic) { struct picture *pic; int sync; pic = get_picture(rpic->pic_name); if (!pic) { pr_err("pic_view: can not get pic: %s\n", rpic->pic_name); sync = -ENODEV; } else { sync = picture_sync(pic); } put_picture(rpic->pic); rpic->pic = pic; if (!sync) { rotate_set_region(&rpic->rt, picture_region(rpic->pic)); rotate_set_dst(&rpic->rt, &rpic->center); rotate_free_save(&rpic->rt); if (!rpic->show) rotate_free_maps(&rpic->rt); rpic->ready = 1; } else { rpic->ready = 0; } return sync; }
void UCAM529::takePicture() { if (send_initial()) { if (set_package_size()) { if (do_snapshot()) { if (get_picture()) { get_data(); } } } } }
static gboolean lang_dialog(GtkWidget *eventbox, GdkEventButton *event, void *param) { struct button_data *data = g_malloc(sizeof(*data)); GtkWidget *dialog; #if (GTKVER == 3) GtkWidget *table = gtk_grid_new(); #else GtkWidget *table = gtk_table_new((NUM_LANGS+1)/2, 4, TRUE); #endif gint i; //gtk_table_set_row_spacings(table, 5); //gtk_table_set_col_spacings(table, 5); dialog = gtk_dialog_new(); data->cb = param; data->dialog = dialog; for (i = 0; i < NUM_LANGS; i++) { gint col = (i&1) ? 2 : 0; gint row = i/2; GtkWidget *button = data->buttons[i] = gtk_button_new_with_label(lang_names_in_own_language[i]); gtk_button_set_image(GTK_BUTTON(button), get_picture(flags_files[i])/*flags[i]*/); gtk_button_set_image_position(GTK_BUTTON(button), GTK_POS_LEFT); //gtk_button_set_alignment(GTK_BUTTON(button), 0.0, 0.5); XXXXXX #if (GTKVER == 3) gtk_grid_attach(GTK_GRID(table), button, col, row, 1, 1); g_signal_connect(G_OBJECT(button), "clicked", G_CALLBACK(lang_dialog_cb), data); #else gtk_table_attach_defaults(GTK_TABLE(table), button, col, col+1, row, row+1); gtk_signal_connect(GTK_OBJECT(button), "clicked", GTK_SIGNAL_FUNC(lang_dialog_cb), data); #endif } #if (GTKVER == 3) gtk_box_pack_start(GTK_BOX(gtk_dialog_get_content_area(GTK_DIALOG(dialog))), table, TRUE, TRUE, 0); #else gtk_container_add(GTK_CONTAINER(GTK_DIALOG(dialog)->vbox), table); #endif gtk_widget_show_all(dialog); return TRUE; }
int init_rotate_pic(struct rotate_pic *rpic, const char *name, const char *pic_name) { struct picture *pic; struct fb_region *bg = get_current_fb_region(); assert(bg); memset(rpic, 0, sizeof(*rpic)); rpic->name = name; init_rotate(&rpic->rt); pic = get_picture(pic_name); if (!pic) { pr_err("pic_view: can not get picture: %s\n", pic_name); } strcpy(rpic->pic_name, pic_name); rpic->pic = pic; rpic->show = 1; rotate_set_bg(&rpic->rt, bg); rotate_set_dst_to_center(&rpic->rt); rpic->center = *(rotate_dst_center(&rpic->rt)); return 0; }
static int get_requested_picture ( libavsmash_video_decode_handler_t *vdhp, AVFrame *picture, uint32_t sample_number ) { #define MAX_ERROR_COUNT 3 /* arbitrary */ codec_configuration_t *config = &vdhp->config; uint32_t config_index; if( sample_number < vdhp->first_valid_frame_number || vdhp->sample_count == 1 ) { /* Get the index of the decoder configuration. */ lsmash_sample_t sample; uint32_t decoding_sample_number = get_decoding_sample_number( vdhp->order_converter, vdhp->first_valid_frame_number ); if( lsmash_get_sample_info_from_media_timeline( vdhp->root, vdhp->track_id, decoding_sample_number, &sample ) < 0 ) goto video_fail; config_index = sample.index; /* Copy the first valid video frame data. */ av_frame_unref( picture ); if( av_frame_ref( picture, vdhp->first_valid_frame ) < 0 ) goto video_fail; /* Force seeking at the next access for valid video frame. */ vdhp->last_sample_number = vdhp->sample_count + 1; goto return_frame; } uint32_t start_number; /* number of sample, for normal decoding, where decoding starts excluding decoding delay */ uint32_t rap_number; /* number of sample, for seeking, where decoding starts excluding decoding delay */ int seek_mode = vdhp->seek_mode; int roll_recovery = 0; if( sample_number > vdhp->last_sample_number && sample_number <= vdhp->last_sample_number + vdhp->forward_seek_threshold ) { start_number = vdhp->last_sample_number + 1 + config->delay_count; rap_number = vdhp->last_rap_number; } else { roll_recovery = find_random_accessible_point( vdhp, sample_number, 0, &rap_number ); if( rap_number == vdhp->last_rap_number && sample_number > vdhp->last_sample_number ) { roll_recovery = 0; start_number = vdhp->last_sample_number + 1 + config->delay_count; } else { /* Require starting to decode from random accessible sample. */ vdhp->last_rap_number = rap_number; start_number = seek_video( vdhp, picture, sample_number, rap_number, roll_recovery || seek_mode != SEEK_MODE_NORMAL ); } } /* Get the desired picture. */ int error_count = 0; while( start_number == 0 /* Failed to seek. */ || config->update_pending /* Need to update the decoder configuration to decode pictures. */ || get_picture( vdhp, picture, start_number, sample_number + config->delay_count ) < 0 ) { if( config->update_pending ) { roll_recovery = find_random_accessible_point( vdhp, sample_number, 0, &rap_number ); vdhp->last_rap_number = rap_number; } else { /* Failed to get the desired picture. */ if( config->error || seek_mode == SEEK_MODE_AGGRESSIVE ) goto video_fail; if( ++error_count > MAX_ERROR_COUNT || rap_number <= 1 ) { if( seek_mode == SEEK_MODE_UNSAFE ) goto video_fail; /* Retry to decode from the same random accessible sample with error ignorance. */ seek_mode = SEEK_MODE_AGGRESSIVE; } else { /* Retry to decode from more past random accessible sample. */ roll_recovery = find_random_accessible_point( vdhp, sample_number, rap_number - 1, &rap_number ); if( vdhp->last_rap_number == rap_number ) goto video_fail; vdhp->last_rap_number = rap_number; } } start_number = seek_video( vdhp, picture, sample_number, rap_number, roll_recovery || seek_mode != SEEK_MODE_NORMAL ); } vdhp->last_sample_number = sample_number; config_index = config->index; return_frame:; /* Don't exceed the maximum presentation size specified for each sequence. */ extended_summary_t *extended = &config->entries[ config_index - 1 ].extended; if( config->ctx->width > extended->width ) config->ctx->width = extended->width; if( config->ctx->height > extended->height ) config->ctx->height = extended->height; return 0; video_fail: /* fatal error of decoding */ lw_log_show( &config->lh, LW_LOG_WARNING, "Couldn't read video frame." ); return -1; #undef MAX_ERROR_COUNT }
int main(int argc, char *argv[]) { #ifndef EMBEDED_X210 //PC platform const SDL_VideoInfo *info; char driver[128]; SDL_Surface *pscreen; SDL_Overlay *overlay; SDL_Rect drect; SDL_Event sdlevent; SDL_Thread *mythread; SDL_mutex *affmutex; Uint32 currtime; Uint32 lasttime; #endif int status; unsigned char *p = NULL; int hwaccel = 0; const char *videodevice = NULL; const char *mode = NULL; int format = V4L2_PIX_FMT_MJPEG; int i; int grabmethod = 1; int width = 320; int height = 240; int fps = 15; unsigned char frmrate = 0; char *avifilename = NULL; int queryformats = 0; int querycontrols = 0; int readconfigfile = 0; char *separateur; char *sizestring = NULL; char *fpsstring = NULL; int enableRawStreamCapture = 0; int enableRawFrameCapture = 0; char * pRGBData=NULL; printf("luvcview version %s \n", version); for (i = 1; i < argc; i++) { /* skip bad arguments */ if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') { continue; } if (strcmp(argv[i], "-d") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -d, aborting.\n"); exit(1); } videodevice = strdup(argv[i + 1]); } if (strcmp(argv[i], "-g") == 0) { /* Ask for read instead default mmap */ grabmethod = 0; } if (strcmp(argv[i], "-w") == 0) { /* disable hw acceleration */ hwaccel = 1; } if (strcmp(argv[i], "-f") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -f, aborting.\n"); exit(1); } mode = strdup(argv[i + 1]); if (strncmp(mode, "yuv", 3) == 0) { format = V4L2_PIX_FMT_YUYV; } else if (strncmp(mode, "jpg", 3) == 0) { format = V4L2_PIX_FMT_MJPEG; } else { format = V4L2_PIX_FMT_MJPEG; } } if (strcmp(argv[i], "-s") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -s, aborting.\n"); exit(1); } sizestring = strdup(argv[i + 1]); width = strtoul(sizestring, &separateur, 10); if (*separateur != 'x') { printf("Error in size use -s widthxheight \n"); exit(1); } else { ++separateur; height = strtoul(separateur, &separateur, 10); if (*separateur != 0) printf("hmm.. dont like that!! trying this height \n"); printf(" size width: %d height: %d \n", width, height); } } if (strcmp(argv[i], "-i") == 0){ if (i + 1 >= argc) { printf("No parameter specified with -i, aborting. \n"); exit(1); } fpsstring = strdup(argv[i + 1]); fps = strtoul(fpsstring, &separateur, 10); printf(" interval: %d fps \n", fps); } if (strcmp(argv[i], "-S") == 0) { /* Enable raw stream capture from the start */ enableRawStreamCapture = 1; } if (strcmp(argv[i], "-c") == 0) { /* Enable raw frame capture for the first frame */ enableRawFrameCapture = 1; } if (strcmp(argv[i], "-C") == 0) { /* Enable raw frame stream capture from the start*/ enableRawFrameCapture = 2; } if (strcmp(argv[i], "-o") == 0) { /* set the avi filename */ if (i + 1 >= argc) { printf("No parameter specified with -o, aborting.\n"); exit(1); } avifilename = strdup(argv[i + 1]); } if (strcmp(argv[i], "-L") == 0) { /* query list of valid video formats */ queryformats = 1; } if (strcmp(argv[i], "-l") == 0) { /* query list of valid video formats */ querycontrols = 1; } if (strcmp(argv[i], "-r") == 0) { /* query list of valid video formats */ readconfigfile = 1; } if (strcmp(argv[i], "-h") == 0) { printf("usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n"); printf("-h print this message \n"); printf("-d /dev/videoX use videoX device\n"); printf("-g use read method for grab instead mmap \n"); printf("-w disable SDL hardware accel. \n"); printf("-f video format default jpg others options are yuv jpg \n"); printf("-i fps use specified frame interval \n"); printf("-s widthxheight use specified input size \n"); printf("-c enable raw frame capturing for the first frame\n"); printf("-C enable raw frame stream capturing from the start\n"); printf("-S enable raw stream capturing from the start\n"); printf("-o avifile create avifile, default video.avi\n"); printf("-L query valid video formats\n"); printf("-l query valid controls and settings\n"); printf("-r read and set control settings from luvcview.cfg\n"); exit(0); } } #ifndef EMBEDED_X210 //PC platform /************* Test SDL capabilities ************/ if (SDL_Init(SDL_INIT_VIDEO) < 0) { fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError()); exit(1); } /* For this version, we'll be save and disable hardware acceleration */ if(hwaccel) { if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") ) { putenv("SDL_VIDEO_YUV_HWACCEL=0"); } } if (SDL_VideoDriverName(driver, sizeof(driver))) { printf("Video driver: %s\n", driver); } info = SDL_GetVideoInfo(); if (info->wm_available) { printf("A window manager is available\n"); } if (info->hw_available) { printf("Hardware surfaces are available (%dK video memory)\n", info->video_mem); SDL_VIDEO_Flags |= SDL_HWSURFACE; } if (info->blit_hw) { printf("Copy blits between hardware surfaces are accelerated\n"); SDL_VIDEO_Flags |= SDL_ASYNCBLIT; } if (info->blit_hw_CC) { printf ("Colorkey blits between hardware surfaces are accelerated\n"); } if (info->blit_hw_A) { printf("Alpha blits between hardware surfaces are accelerated\n"); } if (info->blit_sw) { printf ("Copy blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_CC) { printf ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_A) { printf ("Alpha blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_fill) { printf("Color fills on hardware surfaces are accelerated\n"); } if (!(SDL_VIDEO_Flags & SDL_HWSURFACE)) SDL_VIDEO_Flags |= SDL_SWSURFACE; #endif if (videodevice == NULL || *videodevice == 0) { videodevice = "/dev/video0"; } if (avifilename == NULL || *avifilename == 0) { avifilename = "video.avi"; } videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if ( queryformats ) { /* if we're supposed to list the video formats, do that now and go out */ check_videoIn(videoIn,(char *) videodevice); free(videoIn); #ifndef EMBEDED_X210 SDL_Quit(); #endif exit(1); } if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0) exit(1); /* if we're supposed to list the controls, do that now */ if ( querycontrols ) enum_controls(videoIn->fd); /* if we're supposed to read the control settings from a configfile, do that now */ if ( readconfigfile ) load_controls(videoIn->fd); #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT init_framebuffer(); #else x6410_init_Draw(videoIn->width,videoIn->height); #endif #else pscreen = SDL_SetVideoMode(videoIn->width, videoIn->height+30 , 0,SDL_VIDEO_Flags); overlay =SDL_CreateYUVOverlay(videoIn->width, videoIn->height+30 , SDL_YUY2_OVERLAY, pscreen); p = (unsigned char *) overlay->pixels[0]; drect.x = 0; drect.y = 0; drect.w =pscreen->w; drect.h = pscreen->h; #endif if (enableRawStreamCapture) { videoIn->captureFile = fopen("stream.raw", "wb"); if(videoIn->captureFile == NULL) { perror("Unable to open file for raw stream capturing"); } else { printf("Starting raw stream capturing to stream.raw ...\n"); } } if (enableRawFrameCapture) videoIn->rawFrameCapture = enableRawFrameCapture; initLut(); #ifndef EMBEDED_X210 SDL_WM_SetCaption(title_act[A_VIDEO].title, NULL); lasttime = SDL_GetTicks(); creatButt(videoIn->width, 32); SDL_LockYUVOverlay(overlay); memcpy(p + (videoIn->width * (videoIn->height) * 2), YUYVbutt, videoIn->width * 64); SDL_UnlockYUVOverlay(overlay); /* initialize thread data */ ptdata.ptscreen = &pscreen; ptdata.ptvideoIn = videoIn; ptdata.ptsdlevent = &sdlevent; ptdata.drect = &drect; affmutex = SDL_CreateMutex(); ptdata.affmutex = affmutex; mythread = SDL_CreateThread(eventThread, (void *) &ptdata); #endif pRGBData = (unsigned char *)malloc(videoIn->width*videoIn->width*4*sizeof(char)); if(pRGBData==NULL) { return ; } /* main big loop */ while (videoIn->signalquit) { #ifndef EMBEDED_X210 currtime = SDL_GetTicks(); if (currtime - lasttime > 0) { frmrate = 1000/(currtime - lasttime); } lasttime = currtime; #endif if (uvcGrab(videoIn) < 0) { printf("Error grabbing \n"); break; } /* if we're grabbing video, show the frame rate */ if (videoIn->toggleAvi) printf("\rframe rate: %d ",frmrate); #ifndef EMBEDED_X210 SDL_LockYUVOverlay(overlay); memcpy(p, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2); SDL_UnlockYUVOverlay(overlay); SDL_DisplayYUVOverlay(overlay, &drect); #endif #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT // yuv to rgb565 ,and to frambuffer process_image(videoIn->framebuffer,fbp,videoIn->width,videoIn->height,vinfo,finfo); // convertYUYVtoRGB565(videoIn->framebuffer,pRGBData,videoIn->width,videoIn->height); // Pyuv422torgb24(videoIn->framebuffer, pRGBData, videoIn->width, videoIn->height); // memcpy(fbp,pRGBData,videoIn->width*videoIn->height*2); #else //X6410 post processor convert yuv to rgb,X210 not suport now. /* memcpy(pInbuffer, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2); ioctl(dev_fb0, GET_FB_INFO, &fb_info); pp_param.SrcFrmSt = ioctl(dev_pp, S3C_PP_GET_RESERVED_MEM_ADDR_PHY); //must be physical adress pp_param.DstFrmSt = fb_info.map_dma_f1; //must be physical adress ioctl(dev_pp, S3C_PP_SET_PARAMS, &pp_param); ioctl(dev_pp, S3C_PP_SET_DST_BUF_ADDR_PHY, &pp_param); ioctl(dev_pp, S3C_PP_SET_SRC_BUF_ADDR_PHY, &pp_param); ioctl(dev_pp, S3C_PP_START); */ #endif #endif if (videoIn->getPict) { switch(videoIn->formatIn){ case V4L2_PIX_FMT_MJPEG: get_picture(videoIn->tmpbuffer,videoIn->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: get_pictureYV2(videoIn->framebuffer,videoIn->width,videoIn->height); break; default: break; } videoIn->getPict = 0; printf("get picture !\n"); } #ifndef EMBEDED_X210 SDL_LockMutex(affmutex); ptdata.frmrate = frmrate; SDL_WM_SetCaption(videoIn->status, NULL); SDL_UnlockMutex(affmutex); #endif #ifdef EMBEDED_X210 usleep(10); #else SDL_Delay(10); #endif } #ifndef EMBEDED_X210 SDL_WaitThread(mythread, &status); SDL_DestroyMutex(affmutex); #endif /* if avifile is defined, we made a video: compute the exact fps and set it in the video */ if (videoIn->avifile != NULL) { float fps=(videoIn->framecount/(videoIn->recordtime/1000)); fprintf(stderr,"setting fps to %f\n",fps); AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height, fps, "MJPG"); AVI_close(videoIn->avifile); } close_v4l2(videoIn); #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT close_frambuffer(); #else x6410_DeInit_Draw(); #endif #endif free(pRGBData); free(videoIn); destroyButt(); freeLut(); printf(" Clean Up done Quit \n"); #ifndef EMBEDED_X210 SDL_Quit(); #endif }
int main(int argc, char *argv[]) { char driver[128]; int status; //Uint32 currtime; //Uint32 lasttime; unsigned char *p = NULL; int hwaccel = 0; const char *videodevice = NULL; const char *mode = NULL; int format = V4L2_PIX_FMT_MJPEG; int i; int grabmethod = 1; int width = 320; int height = 240; int fps = 15; unsigned char frmrate = 0; char *avifilename = NULL; int queryformats = 0; int querycontrols = 0; int readconfigfile = 0; char *separateur; char *sizestring = NULL; char *fpsstring = NULL; int enableRawStreamCapture = 0; int enableRawFrameCapture = 0; printf("luvcview version %s \n", version); for (i = 1; i < argc; i++) { /* skip bad arguments */ if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') { continue; } if (strcmp(argv[i], "-d") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -d, aborting.\n"); exit(1); } videodevice = strdup(argv[i + 1]); } if (strcmp(argv[i], "-g") == 0) { /* Ask for read instead default mmap */ grabmethod = 0; } if (strcmp(argv[i], "-w") == 0) { /* disable hw acceleration */ hwaccel = 1; } if (strcmp(argv[i], "-f") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -f, aborting.\n"); exit(1); } mode = strdup(argv[i + 1]); if (strncmp(mode, "yuv", 3) == 0) { format = V4L2_PIX_FMT_YUYV; } else if (strncmp(mode, "jpg", 3) == 0) { format = V4L2_PIX_FMT_MJPEG; } else { format = V4L2_PIX_FMT_JPEG; } } if (strcmp(argv[i], "-s") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -s, aborting.\n"); exit(1); } sizestring = strdup(argv[i + 1]); width = strtoul(sizestring, &separateur, 10); if (*separateur != 'x') { printf("Error in size use -s widthxheight \n"); exit(1); } else { ++separateur; height = strtoul(separateur, &separateur, 10); if (*separateur != 0) printf("hmm.. dont like that!! trying this height \n"); printf(" size width: %d height: %d \n", width, height); } } if (strcmp(argv[i], "-i") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -i, aborting. \n"); exit(1); } fpsstring = strdup(argv[i + 1]); fps = strtoul(fpsstring, &separateur, 10); printf(" interval: %d fps \n", fps); } if (strcmp(argv[i], "-S") == 0) { /* Enable raw stream capture from the start */ enableRawStreamCapture = 1; } if (strcmp(argv[i], "-c") == 0) { /* Enable raw frame capture for the first frame */ enableRawFrameCapture = 1; } if (strcmp(argv[i], "-C") == 0) { /* Enable raw frame stream capture from the start*/ enableRawFrameCapture = 2; } if (strcmp(argv[i], "-o") == 0) { /* set the avi filename */ if (i + 1 >= argc) { printf("No parameter specified with -o, aborting.\n"); exit(1); } avifilename = strdup(argv[i + 1]); } if (strcmp(argv[i], "-L") == 0) { /* query list of valid video formats */ queryformats = 1; } if (strcmp(argv[i], "-l") == 0) { /* query list of valid video formats */ querycontrols = 1; } if (strcmp(argv[i], "-r") == 0) { /* query list of valid video formats */ readconfigfile = 1; } if (strcmp(argv[i], "-O") == 0) { /* get picture */ getpictureflag = 1; } if (strcmp(argv[i], "-h") == 0) { printf( "usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n"); printf("-h print this message \n"); printf("-d /dev/videoX use videoX device\n"); printf("-g use read method for grab instead mmap \n"); printf("-w disable SDL hardware accel. \n"); printf( "-f video format default jpg others options are yuv jpg \n"); printf("-i fps use specified frame interval \n"); printf("-s widthxheight use specified input size \n"); printf("-c enable raw frame capturing for the first frame\n"); printf("-C enable raw frame stream capturing from the start\n"); printf("-S enable raw stream capturing from the start\n"); printf("-o avifile create avifile, default video.avi\n"); printf("-L query valid video formats\n"); printf("-l query valid controls and settings\n"); printf("-r read and set control settings from luvcview.cfg\n"); printf("-O get picture.\n"); exit(0); } } if (videodevice == NULL || *videodevice == 0) { videodevice = "/dev/video0"; } if (avifilename == NULL || *avifilename == 0) { avifilename = "video.avi"; } videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if (queryformats) { /* if we're supposed to list the video formats, do that now and go out */ check_videoIn(videoIn, (char *) videodevice); free(videoIn); exit(1); } if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0) exit(1); /* if we're supposed to list the controls, do that now */ if (querycontrols) enum_controls(videoIn->fd); /* if we're supposed to read the control settings from a configfile, do that now */ if (readconfigfile) load_controls(videoIn->fd); printf("Enable Raw Stream Capture\n"); if (enableRawStreamCapture) { videoIn->captureFile = fopen("stream.raw", "wb"); if (videoIn->captureFile == NULL) { perror("Unable to open file for raw stream capturing"); } else { printf("Starting raw stream capturing to stream.raw ...\n"); } } if (enableRawFrameCapture) videoIn->rawFrameCapture = enableRawFrameCapture; initLut(); printf("Begin main big loop\n"); int loopNum = 0; /* main big loop */ while (videoIn->signalquit) { // if (uvcGrab(videoIn) < 0) { printf("Error grabbing \n"); break; } // /* if we're grabbing video, show the frame rate */ if (videoIn->toggleAvi) printf("\rframe rate: %d ", frmrate); // if (getpictureflag) { //if (videoIn->getPict) { switch (videoIn->formatIn) { case V4L2_PIX_FMT_MJPEG: get_picture(videoIn->tmpbuffer, videoIn->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: printf("get picture yuv...\n"); get_pictureYV2(videoIn->framebuffer, videoIn->width, videoIn->height); break; default: break; } videoIn->getPict = 0; printf("get picture !\n"); } printf("loop number %d\n",loopNum); loopNum ++; } /* if avifile is defined, we made a video: compute the exact fps and set it in the video */ if (videoIn->avifile != NULL) { float fps = (videoIn->framecount / (videoIn->recordtime / 1000)); fprintf(stderr, "setting fps to %f\n", fps); AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height, fps, "MJPG"); AVI_close(videoIn->avifile); } close_v4l2(videoIn); free(videoIn); freeLut(); printf(" Clean Up done Quit \n"); }
static int get_requested_picture ( lwlibav_video_decode_handler_t *vdhp, AVFrame *picture, uint32_t frame_number ) { #define MAX_ERROR_COUNT 3 /* arbitrary */ if( frame_number > vdhp->frame_count ) frame_number = vdhp->frame_count; uint32_t extradata_index; if( frame_number == vdhp->last_frame_number || frame_number == vdhp->last_frame_number + vdhp->last_half_offset ) { /* The last frame is the requested frame. */ if( copy_last_frame( vdhp, picture ) < 0 ) goto video_fail; extradata_index = vdhp->frame_list[frame_number].extradata_index; goto return_frame; } if( frame_number < vdhp->first_valid_frame_number || vdhp->frame_count == 1 ) { /* Copy the first valid video frame data. */ av_frame_unref( picture ); if( av_frame_ref( picture, vdhp->first_valid_frame ) < 0 ) goto video_fail; /* Force seeking at the next access for valid video frame. */ vdhp->last_frame_number = vdhp->frame_count + 1; vdhp->last_frame_buffer = picture; /* Return the first valid video frame. */ extradata_index = vdhp->frame_list[ vdhp->first_valid_frame_number ].extradata_index; goto return_frame; } uint32_t start_number; /* number of sample, for normal decoding, where decoding starts excluding decoding delay */ uint32_t rap_number; /* number of sample, for seeking, where decoding starts excluding decoding delay */ uint32_t last_frame_number = vdhp->last_frame_number + vdhp->last_half_offset; int seek_mode = vdhp->seek_mode; int64_t rap_pos = INT64_MIN; if( frame_number > last_frame_number && frame_number <= last_frame_number + vdhp->forward_seek_threshold ) { start_number = last_frame_number + 1 + vdhp->exh.delay_count; rap_number = vdhp->last_rap_number; } else { lwlibav_find_random_accessible_point( vdhp, frame_number, 0, &rap_number ); if( rap_number == vdhp->last_rap_number && frame_number > last_frame_number ) start_number = last_frame_number + 1 + vdhp->exh.delay_count; else { /* Require starting to decode from random accessible sample. */ rap_pos = lwlibav_get_random_accessible_point_position( vdhp, rap_number ); vdhp->last_rap_number = rap_number; start_number = seek_video( vdhp, picture, frame_number, rap_number, rap_pos, seek_mode != SEEK_MODE_NORMAL ); } } /* Get requested picture. */ int error_count = 0; while( start_number == 0 || get_picture( vdhp, picture, start_number, frame_number + vdhp->exh.delay_count, rap_number ) < 0 ) { /* Failed to get desired picture. */ if( vdhp->error || seek_mode == SEEK_MODE_AGGRESSIVE ) goto video_fail; if( ++error_count > MAX_ERROR_COUNT || rap_number <= 1 ) { if( seek_mode == SEEK_MODE_UNSAFE ) goto video_fail; /* Retry to decode from the same random accessible sample with error ignorance. */ seek_mode = SEEK_MODE_AGGRESSIVE; } else { /* Retry to decode from more past random accessible sample. */ lwlibav_find_random_accessible_point( vdhp, frame_number, rap_number - 1, &rap_number ); rap_pos = lwlibav_get_random_accessible_point_position( vdhp, rap_number ); vdhp->last_rap_number = rap_number; } start_number = seek_video( vdhp, picture, frame_number, rap_number, rap_pos, seek_mode != SEEK_MODE_NORMAL ); } vdhp->last_frame_number = frame_number; vdhp->last_frame_buffer = picture; if( vdhp->last_half_frame == UINT32_MAX ) { /* The second field was requested in this time. * Shift the last frame number to the first field number. */ vdhp->last_frame_number -= 1; vdhp->last_half_frame = 1; } extradata_index = vdhp->frame_list[frame_number].extradata_index; return_frame:; /* Don't exceed the maximum presentation size specified for each sequence. */ lwlibav_extradata_t *entry = &vdhp->exh.entries[extradata_index]; if( vdhp->ctx->width > entry->width ) vdhp->ctx->width = entry->width; if( vdhp->ctx->height > entry->height ) vdhp->ctx->height = entry->height; return 0; video_fail: /* fatal error of decoding */ if( vdhp->lh.show_log ) vdhp->lh.show_log( &vdhp->lh, LW_LOG_ERROR, "Couldn't get the requested video frame." ); return -1; #undef MAX_ERROR_COUNT }
CPICTURE *CPICTURE_get_picture(const char *path) { return get_picture(path, strlen(path)); }