struct vdIn * openUVC(char * videodevice, int width, int height, int fps){ int format = V4L2_PIX_FMT_YUYV; int grabmethod = 1; char *avifilename = NULL; struct vdIn *videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if (init_videoIn (videoIn, videodevice, width, height, fps, format, grabmethod, avifilename) < 0) exit(1); return videoIn; }
IplImage *image_input::capture_uvc_camera() { #ifdef ENABLE_CAMERA struct vdIn *videoIn; char *videodevice = (char*)"/dev/video0"; int format = V4L2_PIX_FMT_YUYV; int grabmethod = 1; IplImage* frame; videoIn = (struct vdIn *) calloc (1, sizeof (struct vdIn)); if (init_videoIn(videoIn, videodevice, camera.width, camera.height, format, grabmethod) < 0) { puts("init failed"); exit (1); } //Reset all camera controls v4l2ResetControl (videoIn, V4L2_CID_BRIGHTNESS); v4l2ResetControl (videoIn, V4L2_CID_CONTRAST); v4l2ResetControl (videoIn, V4L2_CID_SATURATION); v4l2ResetControl (videoIn, V4L2_CID_GAIN); //Setup Camera Parameters v4l2SetControl (videoIn, V4L2_CID_BRIGHTNESS, camera.brightness); v4l2SetControl (videoIn, V4L2_CID_CONTRAST, camera.contrast); v4l2SetControl (videoIn, V4L2_CID_SATURATION, camera.saturation); v4l2SetControl (videoIn, V4L2_CID_GAIN, camera.gain); if (uvcGrab (videoIn) < 0) { fprintf(stderr, "Error grabbing\n"); close_v4l2(videoIn); free(videoIn); exit(1); } // IplImage frame = cvCreateImage(cvSize(videoIn->width, videoIn->height), IPL_DEPTH_8U, 3); // convert image format convert_yuyv_to_rgb(videoIn->framebuffer, (unsigned char*)frame->imageData, videoIn->width, videoIn->height); close_v4l2 (videoIn); free (videoIn); return frame; #else // ENABLE_CAMERA return NULL; #endif // ENABLE_CAMERA }
CameraManager2::CameraManager2() { s_catch_signals (); vd = new vdIn; this->saving_buffer = false; this->grabbing = false; init_videoIn("/dev/video0", 640, 480, V4L2_PIX_FMT_MJPEG, 1); std::cout << vd->isstreaming << std::endl; std::thread t1(&CameraManager2::start_grabbing, this); t1.join(); }
/****************************************************************************** Description.: This function ializes the plugin. It parses the commandline- parameter and stores the default and parsed values in the appropriate variables. Input Value.: param contains among others the command-line string Return Value: 0 if everything is fine 1 if "--help" was triggered, in this case the calling programm should stop running and leave. ******************************************************************************/ int input_init(input_parameter *param, int id) { char *dev = "/dev/video0", *s; int width = 640, height = 480, fps = 5, format = V4L2_PIX_FMT_MJPEG, i; /* initialize the mutes variable */ if(pthread_mutex_init(&cams[id].controls_mutex, NULL) != 0) { IPRINT("could not initialize mutex variable\n"); exit(EXIT_FAILURE); } param->argv[0] = INPUT_PLUGIN_NAME; /* show all parameters for DBG purposes */ for(i = 0; i < param->argc; i++) { DBG("argv[%d]=%s\n", i, param->argv[i]); } /* parse the parameters */ reset_getopt(); while(1) { int option_index = 0, c = 0; static struct option long_options[] = { {"h", no_argument, 0, 0 }, {"help", no_argument, 0, 0}, {"d", required_argument, 0, 0}, {"device", required_argument, 0, 0}, {"r", required_argument, 0, 0}, {"resolution", required_argument, 0, 0}, {"f", required_argument, 0, 0}, {"fps", required_argument, 0, 0}, {"y", no_argument, 0, 0}, {"yuv", no_argument, 0, 0}, {"q", required_argument, 0, 0}, {"quality", required_argument, 0, 0}, {"m", required_argument, 0, 0}, {"minimum_size", required_argument, 0, 0}, {"n", no_argument, 0, 0}, {"no_dynctrl", no_argument, 0, 0}, {"l", required_argument, 0, 0}, {"led", required_argument, 0, 0}, {"s", no_argument, 0, 0}, {"stop", no_argument, 0, 0}, {0, 0, 0, 0} }; /* parsing all parameters according to the list above is sufficent */ c = getopt_long_only(param->argc, param->argv, "", long_options, &option_index); /* no more options to parse */ if(c == -1) break; /* unrecognized option */ if(c == '?') { help(); return 1; } /* dispatch the given options */ switch(option_index) { /* h, help */ case 0: case 1: DBG("case 0,1\n"); help(); return 1; break; /* d, device */ case 2: case 3: DBG("case 2,3\n"); dev = strdup(optarg); break; /* r, resolution */ case 4: case 5: DBG("case 4,5\n"); width = -1; height = -1; /* try to find the resolution in lookup table "resolutions" */ for(i = 0; i < LENGTH_OF(resolutions); i++) { if(strcmp(resolutions[i].string, optarg) == 0) { width = resolutions[i].width; height = resolutions[i].height; } } /* done if width and height were set */ if(width != -1 && height != -1) break; /* parse value as decimal value */ width = strtol(optarg, &s, 10); height = strtol(s + 1, NULL, 10); break; /* f, fps */ case 6: case 7: DBG("case 6,7\n"); fps = atoi(optarg); break; /* y, yuv */ case 8: case 9: DBG("case 8,9\n"); format = V4L2_PIX_FMT_YUYV; break; /* q, quality */ case 10: case 11: DBG("case 10,11\n"); format = V4L2_PIX_FMT_YUYV; gquality = MIN(MAX(atoi(optarg), 0), 100); break; /* m, minimum_size */ case 12: case 13: DBG("case 12,13\n"); minimum_size = MAX(atoi(optarg), 0); break; /* n, no_dynctrl */ case 14: case 15: DBG("case 14,15\n"); dynctrls = 0; break; /* l, led */ case 16: case 17:/* DBG("case 16,17\n"); if ( strcmp("on", optarg) == 0 ) { led = IN_CMD_LED_ON; } else if ( strcmp("off", optarg) == 0 ) { led = IN_CMD_LED_OFF; } else if ( strcmp("auto", optarg) == 0 ) { led = IN_CMD_LED_AUTO; } else if ( strcmp("blink", optarg) == 0 ) { led = IN_CMD_LED_BLINK; }*/ break; /* s, stop */ case 18: case 19: DBG("case 18,19\n"); stop_camera = 1; break; default: DBG("default case\n"); help(); return 1; } } DBG("input id: %d\n", id); cams[id].id = id; cams[id].pglobal = param->global; /* allocate webcam datastructure */ cams[id].videoIn = malloc(sizeof(struct vdIn)); if(cams[id].videoIn == NULL) { IPRINT("not enough memory for videoIn\n"); exit(EXIT_FAILURE); } memset(cams[id].videoIn, 0, sizeof(struct vdIn)); /* display the parsed values */ IPRINT("Using V4L2 device.: %s\n", dev); IPRINT("Desired Resolution: %i x %i\n", width, height); IPRINT("Frames Per Second.: %i\n", fps); IPRINT("Format............: %s\n", (format == V4L2_PIX_FMT_YUYV) ? "YUV" : "MJPEG"); if(format == V4L2_PIX_FMT_YUYV) IPRINT("JPEG Quality......: %d\n", gquality); IPRINT("Stop camera feat..: %d\n", stop_camera); DBG("vdIn pn: %d\n", id); /* open video device and prepare data structure */ if(init_videoIn(cams[id].videoIn, dev, width, height, fps, format, 1, cams[id].pglobal, id) < 0) { IPRINT("init_VideoIn failed\n"); closelog(); exit(EXIT_FAILURE); } /* * recent linux-uvc driver (revision > ~#125) requires to use dynctrls * for pan/tilt/focus/... * dynctrls must get initialized */ if(dynctrls) initDynCtrls(cams[id].videoIn->fd); enumerateControls(cams[id].videoIn, cams[id].pglobal, id); // enumerate V4L2 controls after UVC extended mapping return 0; }
int main(int argc, char *argv[]) { #ifndef EMBEDED_X210 //PC platform const SDL_VideoInfo *info; char driver[128]; SDL_Surface *pscreen; SDL_Overlay *overlay; SDL_Rect drect; SDL_Event sdlevent; SDL_Thread *mythread; SDL_mutex *affmutex; Uint32 currtime; Uint32 lasttime; #endif int status; unsigned char *p = NULL; int hwaccel = 0; const char *videodevice = NULL; const char *mode = NULL; int format = V4L2_PIX_FMT_MJPEG; int i; int grabmethod = 1; int width = 320; int height = 240; int fps = 15; unsigned char frmrate = 0; char *avifilename = NULL; int queryformats = 0; int querycontrols = 0; int readconfigfile = 0; char *separateur; char *sizestring = NULL; char *fpsstring = NULL; int enableRawStreamCapture = 0; int enableRawFrameCapture = 0; char * pRGBData=NULL; printf("luvcview version %s \n", version); for (i = 1; i < argc; i++) { /* skip bad arguments */ if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') { continue; } if (strcmp(argv[i], "-d") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -d, aborting.\n"); exit(1); } videodevice = strdup(argv[i + 1]); } if (strcmp(argv[i], "-g") == 0) { /* Ask for read instead default mmap */ grabmethod = 0; } if (strcmp(argv[i], "-w") == 0) { /* disable hw acceleration */ hwaccel = 1; } if (strcmp(argv[i], "-f") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -f, aborting.\n"); exit(1); } mode = strdup(argv[i + 1]); if (strncmp(mode, "yuv", 3) == 0) { format = V4L2_PIX_FMT_YUYV; } else if (strncmp(mode, "jpg", 3) == 0) { format = V4L2_PIX_FMT_MJPEG; } else { format = V4L2_PIX_FMT_MJPEG; } } if (strcmp(argv[i], "-s") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -s, aborting.\n"); exit(1); } sizestring = strdup(argv[i + 1]); width = strtoul(sizestring, &separateur, 10); if (*separateur != 'x') { printf("Error in size use -s widthxheight \n"); exit(1); } else { ++separateur; height = strtoul(separateur, &separateur, 10); if (*separateur != 0) printf("hmm.. dont like that!! trying this height \n"); printf(" size width: %d height: %d \n", width, height); } } if (strcmp(argv[i], "-i") == 0){ if (i + 1 >= argc) { printf("No parameter specified with -i, aborting. \n"); exit(1); } fpsstring = strdup(argv[i + 1]); fps = strtoul(fpsstring, &separateur, 10); printf(" interval: %d fps \n", fps); } if (strcmp(argv[i], "-S") == 0) { /* Enable raw stream capture from the start */ enableRawStreamCapture = 1; } if (strcmp(argv[i], "-c") == 0) { /* Enable raw frame capture for the first frame */ enableRawFrameCapture = 1; } if (strcmp(argv[i], "-C") == 0) { /* Enable raw frame stream capture from the start*/ enableRawFrameCapture = 2; } if (strcmp(argv[i], "-o") == 0) { /* set the avi filename */ if (i + 1 >= argc) { printf("No parameter specified with -o, aborting.\n"); exit(1); } avifilename = strdup(argv[i + 1]); } if (strcmp(argv[i], "-L") == 0) { /* query list of valid video formats */ queryformats = 1; } if (strcmp(argv[i], "-l") == 0) { /* query list of valid video formats */ querycontrols = 1; } if (strcmp(argv[i], "-r") == 0) { /* query list of valid video formats */ readconfigfile = 1; } if (strcmp(argv[i], "-h") == 0) { printf("usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n"); printf("-h print this message \n"); printf("-d /dev/videoX use videoX device\n"); printf("-g use read method for grab instead mmap \n"); printf("-w disable SDL hardware accel. \n"); printf("-f video format default jpg others options are yuv jpg \n"); printf("-i fps use specified frame interval \n"); printf("-s widthxheight use specified input size \n"); printf("-c enable raw frame capturing for the first frame\n"); printf("-C enable raw frame stream capturing from the start\n"); printf("-S enable raw stream capturing from the start\n"); printf("-o avifile create avifile, default video.avi\n"); printf("-L query valid video formats\n"); printf("-l query valid controls and settings\n"); printf("-r read and set control settings from luvcview.cfg\n"); exit(0); } } #ifndef EMBEDED_X210 //PC platform /************* Test SDL capabilities ************/ if (SDL_Init(SDL_INIT_VIDEO) < 0) { fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError()); exit(1); } /* For this version, we'll be save and disable hardware acceleration */ if(hwaccel) { if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") ) { putenv("SDL_VIDEO_YUV_HWACCEL=0"); } } if (SDL_VideoDriverName(driver, sizeof(driver))) { printf("Video driver: %s\n", driver); } info = SDL_GetVideoInfo(); if (info->wm_available) { printf("A window manager is available\n"); } if (info->hw_available) { printf("Hardware surfaces are available (%dK video memory)\n", info->video_mem); SDL_VIDEO_Flags |= SDL_HWSURFACE; } if (info->blit_hw) { printf("Copy blits between hardware surfaces are accelerated\n"); SDL_VIDEO_Flags |= SDL_ASYNCBLIT; } if (info->blit_hw_CC) { printf ("Colorkey blits between hardware surfaces are accelerated\n"); } if (info->blit_hw_A) { printf("Alpha blits between hardware surfaces are accelerated\n"); } if (info->blit_sw) { printf ("Copy blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_CC) { printf ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_A) { printf ("Alpha blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_fill) { printf("Color fills on hardware surfaces are accelerated\n"); } if (!(SDL_VIDEO_Flags & SDL_HWSURFACE)) SDL_VIDEO_Flags |= SDL_SWSURFACE; #endif if (videodevice == NULL || *videodevice == 0) { videodevice = "/dev/video0"; } if (avifilename == NULL || *avifilename == 0) { avifilename = "video.avi"; } videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if ( queryformats ) { /* if we're supposed to list the video formats, do that now and go out */ check_videoIn(videoIn,(char *) videodevice); free(videoIn); #ifndef EMBEDED_X210 SDL_Quit(); #endif exit(1); } if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0) exit(1); /* if we're supposed to list the controls, do that now */ if ( querycontrols ) enum_controls(videoIn->fd); /* if we're supposed to read the control settings from a configfile, do that now */ if ( readconfigfile ) load_controls(videoIn->fd); #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT init_framebuffer(); #else x6410_init_Draw(videoIn->width,videoIn->height); #endif #else pscreen = SDL_SetVideoMode(videoIn->width, videoIn->height+30 , 0,SDL_VIDEO_Flags); overlay =SDL_CreateYUVOverlay(videoIn->width, videoIn->height+30 , SDL_YUY2_OVERLAY, pscreen); p = (unsigned char *) overlay->pixels[0]; drect.x = 0; drect.y = 0; drect.w =pscreen->w; drect.h = pscreen->h; #endif if (enableRawStreamCapture) { videoIn->captureFile = fopen("stream.raw", "wb"); if(videoIn->captureFile == NULL) { perror("Unable to open file for raw stream capturing"); } else { printf("Starting raw stream capturing to stream.raw ...\n"); } } if (enableRawFrameCapture) videoIn->rawFrameCapture = enableRawFrameCapture; initLut(); #ifndef EMBEDED_X210 SDL_WM_SetCaption(title_act[A_VIDEO].title, NULL); lasttime = SDL_GetTicks(); creatButt(videoIn->width, 32); SDL_LockYUVOverlay(overlay); memcpy(p + (videoIn->width * (videoIn->height) * 2), YUYVbutt, videoIn->width * 64); SDL_UnlockYUVOverlay(overlay); /* initialize thread data */ ptdata.ptscreen = &pscreen; ptdata.ptvideoIn = videoIn; ptdata.ptsdlevent = &sdlevent; ptdata.drect = &drect; affmutex = SDL_CreateMutex(); ptdata.affmutex = affmutex; mythread = SDL_CreateThread(eventThread, (void *) &ptdata); #endif pRGBData = (unsigned char *)malloc(videoIn->width*videoIn->width*4*sizeof(char)); if(pRGBData==NULL) { return ; } /* main big loop */ while (videoIn->signalquit) { #ifndef EMBEDED_X210 currtime = SDL_GetTicks(); if (currtime - lasttime > 0) { frmrate = 1000/(currtime - lasttime); } lasttime = currtime; #endif if (uvcGrab(videoIn) < 0) { printf("Error grabbing \n"); break; } /* if we're grabbing video, show the frame rate */ if (videoIn->toggleAvi) printf("\rframe rate: %d ",frmrate); #ifndef EMBEDED_X210 SDL_LockYUVOverlay(overlay); memcpy(p, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2); SDL_UnlockYUVOverlay(overlay); SDL_DisplayYUVOverlay(overlay, &drect); #endif #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT // yuv to rgb565 ,and to frambuffer process_image(videoIn->framebuffer,fbp,videoIn->width,videoIn->height,vinfo,finfo); // convertYUYVtoRGB565(videoIn->framebuffer,pRGBData,videoIn->width,videoIn->height); // Pyuv422torgb24(videoIn->framebuffer, pRGBData, videoIn->width, videoIn->height); // memcpy(fbp,pRGBData,videoIn->width*videoIn->height*2); #else //X6410 post processor convert yuv to rgb,X210 not suport now. /* memcpy(pInbuffer, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2); ioctl(dev_fb0, GET_FB_INFO, &fb_info); pp_param.SrcFrmSt = ioctl(dev_pp, S3C_PP_GET_RESERVED_MEM_ADDR_PHY); //must be physical adress pp_param.DstFrmSt = fb_info.map_dma_f1; //must be physical adress ioctl(dev_pp, S3C_PP_SET_PARAMS, &pp_param); ioctl(dev_pp, S3C_PP_SET_DST_BUF_ADDR_PHY, &pp_param); ioctl(dev_pp, S3C_PP_SET_SRC_BUF_ADDR_PHY, &pp_param); ioctl(dev_pp, S3C_PP_START); */ #endif #endif if (videoIn->getPict) { switch(videoIn->formatIn){ case V4L2_PIX_FMT_MJPEG: get_picture(videoIn->tmpbuffer,videoIn->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: get_pictureYV2(videoIn->framebuffer,videoIn->width,videoIn->height); break; default: break; } videoIn->getPict = 0; printf("get picture !\n"); } #ifndef EMBEDED_X210 SDL_LockMutex(affmutex); ptdata.frmrate = frmrate; SDL_WM_SetCaption(videoIn->status, NULL); SDL_UnlockMutex(affmutex); #endif #ifdef EMBEDED_X210 usleep(10); #else SDL_Delay(10); #endif } #ifndef EMBEDED_X210 SDL_WaitThread(mythread, &status); SDL_DestroyMutex(affmutex); #endif /* if avifile is defined, we made a video: compute the exact fps and set it in the video */ if (videoIn->avifile != NULL) { float fps=(videoIn->framecount/(videoIn->recordtime/1000)); fprintf(stderr,"setting fps to %f\n",fps); AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height, fps, "MJPG"); AVI_close(videoIn->avifile); } close_v4l2(videoIn); #ifdef EMBEDED_X210 #ifdef SOFT_COLOR_CONVERT close_frambuffer(); #else x6410_DeInit_Draw(); #endif #endif free(pRGBData); free(videoIn); destroyButt(); freeLut(); printf(" Clean Up done Quit \n"); #ifndef EMBEDED_X210 SDL_Quit(); #endif }
int main (int argc, char *argv[]) { char *videodevice = "/dev/video0"; char *outputfile = "snap.jpg"; char *post_capture_command[3]; int format = V4L2_PIX_FMT_MJPEG; int grabmethod = 1; int width = 320; int height = 240; int brightness = 0, contrast = 0, saturation = 0, gain = 0; int verbose = 0; int delay = 0; int quality = 95; int post_capture_command_wait = 0; time_t ref_time; struct vdIn *videoIn; FILE *file; (void) signal (SIGINT, sigcatch); (void) signal (SIGQUIT, sigcatch); (void) signal (SIGKILL, sigcatch); (void) signal (SIGTERM, sigcatch); (void) signal (SIGABRT, sigcatch); (void) signal (SIGTRAP, sigcatch); // set post_capture_command to default values post_capture_command[0] = NULL; post_capture_command[1] = NULL; post_capture_command[2] = NULL; //Options Parsing (FIXME) while ((argc > 1) && (argv[1][0] == '-')) { switch (argv[1][1]) { case 'v': verbose++; break; case 'o': outputfile = &argv[1][2]; break; case 'd': videodevice = &argv[1][2]; break; case 'x': width = atoi (&argv[1][2]); break; case 'y': height = atoi (&argv[1][2]); break; case 'r': grabmethod = 0; break; case 'm': format = V4L2_PIX_FMT_YUYV; break; case 't': delay = atoi (&argv[1][2]); break; case 'c': post_capture_command[0] = &argv[1][2]; break; case 'w': post_capture_command_wait = 1; break; case 'B': brightness = atoi (&argv[1][2]); break; case 'C': contrast = atoi (&argv[1][2]); break; case 'S': saturation = atoi (&argv[1][2]); break; case 'G': gain = atoi (&argv[1][2]); break; case 'q': quality = atoi (&argv[1][2]); break; case 'h': usage (); break; default: fprintf (stderr, "Unknown option %s \n", argv[1]); usage (); } ++argv; --argc; } if ((width > 960) || (height > 720) || (quality != 95)) format = V4L2_PIX_FMT_YUYV; if (post_capture_command[0]) post_capture_command[1] = outputfile; if (verbose >= 1) { fprintf (stderr, "Using videodevice: %s\n", videodevice); fprintf (stderr, "Saving images to: %s\n", outputfile); fprintf (stderr, "Image size: %dx%d\n", width, height); fprintf (stderr, "Taking snapshot every %d seconds\n", delay); if (grabmethod == 1) fprintf (stderr, "Taking images using mmap\n"); else fprintf (stderr, "Taking images using read\n"); if (post_capture_command[0]) fprintf (stderr, "Executing '%s' after each image capture\n", post_capture_command[0]); } videoIn = (struct vdIn *) calloc (1, sizeof (struct vdIn)); if (init_videoIn (videoIn, (char *) videodevice, width, height, format, grabmethod) < 0) exit (1); //Reset all camera controls if (verbose >= 1) fprintf (stderr, "Resetting camera settings\n"); v4l2ResetControl (videoIn, V4L2_CID_BRIGHTNESS); v4l2ResetControl (videoIn, V4L2_CID_CONTRAST); v4l2ResetControl (videoIn, V4L2_CID_SATURATION); v4l2ResetControl (videoIn, V4L2_CID_GAIN); //Setup Camera Parameters if (brightness != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera brightness to %d\n", brightness); v4l2SetControl (videoIn, V4L2_CID_BRIGHTNESS, brightness); } else if (verbose >= 1) { fprintf (stderr, "Camera brightness level is %d\n", v4l2GetControl (videoIn, V4L2_CID_BRIGHTNESS)); } if (contrast != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera contrast to %d\n", contrast); v4l2SetControl (videoIn, V4L2_CID_CONTRAST, contrast); } else if (verbose >= 1) { fprintf (stderr, "Camera contrast level is %d\n", v4l2GetControl (videoIn, V4L2_CID_CONTRAST)); } if (saturation != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera saturation to %d\n", saturation); v4l2SetControl (videoIn, V4L2_CID_SATURATION, saturation); } else if (verbose >= 1) { fprintf (stderr, "Camera saturation level is %d\n", v4l2GetControl (videoIn, V4L2_CID_SATURATION)); } if (gain != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera gain to %d\n", gain); v4l2SetControl (videoIn, V4L2_CID_GAIN, gain); } else if (verbose >= 1) { fprintf (stderr, "Camera gain level is %d\n", v4l2GetControl (videoIn, V4L2_CID_GAIN)); } ref_time = time (NULL); while (run) { if (verbose >= 2) fprintf (stderr, "Grabbing frame\n"); if (uvcGrab (videoIn) < 0) { fprintf (stderr, "Error grabbing\n"); close_v4l2 (videoIn); free (videoIn); exit (1); } if ((difftime (time (NULL), ref_time) > delay) || delay == 0) { if (verbose >= 1) fprintf (stderr, "Saving image to: %s\n", outputfile); file = fopen (outputfile, "wb"); if (file != NULL) { switch (videoIn->formatIn) { case V4L2_PIX_FMT_YUYV: compress_yuyv_to_jpeg (videoIn, file, quality); break; default: fwrite (videoIn->tmpbuffer, videoIn->buf.bytesused + DHT_SIZE, 1, file); break; } fclose (file); videoIn->getPict = 0; } if (post_capture_command[0]) { if (verbose >= 1) fprintf (stderr, "Executing '%s %s'\n", post_capture_command[0], post_capture_command[1]); if (spawn (post_capture_command, post_capture_command_wait, verbose)) { fprintf (stderr, "Command exited with error\n"); close_v4l2 (videoIn); free (videoIn); exit (1); } } ref_time = time (NULL); } if (delay == 0) break; } close_v4l2 (videoIn); free (videoIn); return 0; }
/* ######################################################################### Main ######################################################################### */ int main(int argc, char *argv[]) { struct sockaddr_in addr; int on = 1, disable_control_port = 0; pthread_t client, cam, cntrl, mind, uart_control; char *dev = "/dev/video0"; int fps = 5, daemon = 0; cd.width = 640; cd.height = 480; cdata = &cd; cd.control_port = htons(8081); cd.stream_port = htons(8080); while (1) { int option_index = 0, c = 0; static struct option long_options[] = \ { {"h", no_argument, 0, 0}, {"help", no_argument, 0, 0}, {"d", required_argument, 0, 0}, {"device", required_argument, 0, 0}, {"r", required_argument, 0, 0}, {"resolution", required_argument, 0, 0}, {"f", required_argument, 0, 0}, {"fps", required_argument, 0, 0}, {"p", required_argument, 0, 0}, {"port", required_argument, 0, 0}, {"v", no_argument, 0, 0}, {"version", no_argument, 0, 0}, {"b", no_argument, 0, 0}, {"background", no_argument, 0, 0}, {"c", required_argument, 0, 0}, {"control_port", required_argument, 0, 0}, {"disable_control", no_argument, 0, 0}, {"C", no_argument, 0, 0}, {"calibrate", no_argument, 0, 0}, {0, 0, 0, 0} }; c = getopt_long_only(argc, argv, "", long_options, &option_index); /* no more options to parse */ if (c == -1) break; /* unrecognized option */ if (c == '?') { help(argv[0]); return 0; } switch (option_index) { /* h, help */ case 0: case 1: help(argv[0]); return 0; break; /* d, device */ case 2: case 3: dev = strdup(optarg); break; /* r, resolution */ case 4: case 5: if (strcmp("960x720", optarg) == 0) { cd.width = 960; cd.height = 720; } else if (strcmp("640x480", optarg) == 0) { cd.width = 640; cd.height = 480; } else if (strcmp("320x240", optarg) == 0) { cd.width = 320; cd.height = 240; } else if (strcmp("160x120", optarg) == 0) { cd.width = 160; cd.height = 120; } else { fprintf(stderr, "ignoring unsupported resolution\n"); } break; /* f, fps */ case 6: case 7: fps = atoi(optarg); break; /* p, port */ case 8: case 9: cd.stream_port = htons(atoi(optarg)); break; /* v, version */ case 10: case 11: printf("UVC Streamer Version: %s\n" \ "Compilation Date....: %s\n" \ "Compilation Time....: %s\n", SOURCE_VERSION, __DATE__, __TIME__); return 0; break; /* v, version */ case 12: case 13: daemon = 1; break; /* c, control_port */ case 14: case 15: cd.control_port = htons(atoi(optarg)); break; /* disable_control */ case 16: disable_control_port = 1; break; case 17: case 18: disable_control_port = 1; break; default: help(argv[0]); return 0; } } /* ignore SIGPIPE (send if transmitting to closed sockets) */ signal(SIGPIPE, SIG_IGN); if (signal(SIGINT, signal_handler) == SIG_ERR) { fprintf(stderr, "could not register signal handler\n"); exit(1); } /* fork to the background */ if (daemon) { daemon_mode(); } /* allocate webcam datastructure */ cd.videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); fprintf(stderr, "Using V4L2 device.....: %s\n", dev); fprintf(stderr, "Resolution............: %i x %i\n", cdata->width, cdata->height); fprintf(stderr, "frames per second.....: %i\n", fps); fprintf(stderr, "TCP port..............: %i\n", ntohs(cd.stream_port)); if (disable_control_port == 1) { fprintf(stderr, "motor control server..: disabled\n"); } else { fprintf(stderr, "motor control TCP port: %i\n", ntohs(cd.control_port)); } /* open video device and prepare data structure */ cd.video_dev = init_videoIn(cd.videoIn, dev, cd.width, cd.height, fps, V4L2_PIX_FMT_MJPEG, 1); if (cd.video_dev < 0) { fprintf(stderr, "init_VideoIn failed\n"); exit(1); } /* open socket for server */ sd = socket(PF_INET, SOCK_STREAM, 0); if (sd < 0) { fprintf(stderr, "socket failed\n"); exit(1); } /* ignore "socket already in use" errors */ if (setsockopt(sd, SOL_SOCKET, SO_REUSEADDR, &on, sizeof(on)) < 0) { perror("setsockopt(SO_REUSEADDR) failed"); exit(1); } /* configure server address to listen to all local IPs */ memset(&addr, 0, sizeof(addr)); addr.sin_family = AF_INET; addr.sin_port = cd.stream_port; addr.sin_addr.s_addr = htonl(INADDR_ANY); if (bind(sd, (struct sockaddr*) &addr, sizeof(addr)) != 0) { fprintf(stderr, "bind failed\n"); perror("Bind"); exit(1); } /* start listening on socket */ if (listen(sd, 10) != 0) { fprintf(stderr, "listen failed\n"); exit(1); } /* start to read the camera, push picture buffers into global buffer */ cd.videoIn->tmpbuffer = (unsigned char *) calloc(1, (size_t) cd.videoIn->framesizeIn); g_buf = (unsigned char *) calloc(1, (size_t) cd.videoIn->framesizeIn); vision_control_init(); //must be first to call; pthread_create(&cam, 0, cam_thread, NULL); pthread_detach(cam); pthread_create(&mind, 0, mind_thread, NULL); pthread_detach(mind); init_vision(); init_controller(); pthread_create(&uart_control, 0, &uart_control_read, NULL); pthread_detach(uart_control); /* start control server */ if (disable_control_port == 0) { pthread_create(&cntrl, NULL, &uvcstream_control, cdata); pthread_detach(cntrl); } /* create a child for every client that connects */ while (1) { int *pfd = (int *) calloc(1, sizeof(int)); *pfd = accept(sd, 0, 0); pthread_create(&client, NULL, &client_thread, pfd); pthread_detach(client); } return 0; }
void startVideoSrvr() { pthread_t videoSocketThread; /* alloc mameory for the videoIn struct & initialize */ videoIn = (struct vdIn *) calloc (1, sizeof (struct vdIn)); if (init_videoIn (videoIn, (char *) videodevice, width, height, format, grabmethod) < 0) exit (1); /* alloc memory for the control struct & video out array & initialize */ ctrlStruct ctrl, *pc; if ((ctrl.imgArray = malloc(3 * width * height)) < 0) // enough space for rgb exit(-1); ctrl.doCapture = 0; //Reset all camera controls if (verbose >= 1) fprintf (stderr, "Resetting camera settings\n"); v4l2ResetControl (videoIn, V4L2_CID_BRIGHTNESS); v4l2ResetControl (videoIn, V4L2_CID_CONTRAST); v4l2ResetControl (videoIn, V4L2_CID_SATURATION); v4l2ResetControl (videoIn, V4L2_CID_GAIN); //Setup Camera Parameters if (brightness != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera brightness to %d\n", brightness); v4l2SetControl (videoIn, V4L2_CID_BRIGHTNESS, brightness); } else if (verbose >= 1) { fprintf (stderr, "Camera brightness level is %d\n", v4l2GetControl (videoIn, V4L2_CID_BRIGHTNESS)); } if (contrast != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera contrast to %d\n", contrast); v4l2SetControl (videoIn, V4L2_CID_CONTRAST, contrast); } else if (verbose >= 1) { fprintf (stderr, "Camera contrast level is %d\n", v4l2GetControl (videoIn, V4L2_CID_CONTRAST)); } if (saturation != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera saturation to %d\n", saturation); v4l2SetControl (videoIn, V4L2_CID_SATURATION, saturation); } else if (verbose >= 1) { fprintf (stderr, "Camera saturation level is %d\n", v4l2GetControl (videoIn, V4L2_CID_SATURATION)); } if (gain != 0) { if (verbose >= 1) fprintf (stderr, "Setting camera gain to %d\n", gain); v4l2SetControl (videoIn, V4L2_CID_GAIN, gain); } else if (verbose >= 1) { fprintf (stderr, "Camera gain level is %d\n", v4l2GetControl (videoIn, V4L2_CID_GAIN)); } // wait for a video client to connect before proceeding fprintf (stderr, "waiting for video client connection on port %d\n", port); if ((ctrl.videoSocket = wait4client(port)) <= 0) { fprintf (stderr, "error connecting to client: %d\n", ctrl.videoSocket); exit(-1); } // start the thread that handles the video client requests pthread_create(&videoSocketThread, NULL, (void *)cmdHandler, (void *)&ctrl); while (run) { if (verbose >= 2) fprintf (stderr, "."); if (uvcGrab (videoIn) < 0) { fprintf (stderr, "Error grabbing\n"); close_v4l2 (videoIn); free (videoIn); exit (1); } if (ctrl.doCapture == 1) { if (verbose >= 1) { fprintf (stderr, "captured %d byte image at 0x%x %dx%d\n", videoIn->framesizeIn, videoIn->framebuffer, videoIn->width, videoIn->height); } else { fprintf (stderr, "."); } if (outputType == 0) yuyv2Y(videoIn, &ctrl); else yuyv2rgb(videoIn, &ctrl); if (verbose >=1) fprintf (stderr, "converted image to luminance in buffer at 0x%x\n", ctrl.imgArray); videoIn->getPict = 0; ctrl.doCapture = 0; } } close_v4l2 (videoIn); free (videoIn); return; }
int main(int argc, char *argv[]) { char driver[128]; int status; //Uint32 currtime; //Uint32 lasttime; unsigned char *p = NULL; int hwaccel = 0; const char *videodevice = NULL; const char *mode = NULL; int format = V4L2_PIX_FMT_MJPEG; int i; int grabmethod = 1; int width = 320; int height = 240; int fps = 15; unsigned char frmrate = 0; char *avifilename = NULL; int queryformats = 0; int querycontrols = 0; int readconfigfile = 0; char *separateur; char *sizestring = NULL; char *fpsstring = NULL; int enableRawStreamCapture = 0; int enableRawFrameCapture = 0; printf("luvcview version %s \n", version); for (i = 1; i < argc; i++) { /* skip bad arguments */ if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') { continue; } if (strcmp(argv[i], "-d") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -d, aborting.\n"); exit(1); } videodevice = strdup(argv[i + 1]); } if (strcmp(argv[i], "-g") == 0) { /* Ask for read instead default mmap */ grabmethod = 0; } if (strcmp(argv[i], "-w") == 0) { /* disable hw acceleration */ hwaccel = 1; } if (strcmp(argv[i], "-f") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -f, aborting.\n"); exit(1); } mode = strdup(argv[i + 1]); if (strncmp(mode, "yuv", 3) == 0) { format = V4L2_PIX_FMT_YUYV; } else if (strncmp(mode, "jpg", 3) == 0) { format = V4L2_PIX_FMT_MJPEG; } else { format = V4L2_PIX_FMT_JPEG; } } if (strcmp(argv[i], "-s") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -s, aborting.\n"); exit(1); } sizestring = strdup(argv[i + 1]); width = strtoul(sizestring, &separateur, 10); if (*separateur != 'x') { printf("Error in size use -s widthxheight \n"); exit(1); } else { ++separateur; height = strtoul(separateur, &separateur, 10); if (*separateur != 0) printf("hmm.. dont like that!! trying this height \n"); printf(" size width: %d height: %d \n", width, height); } } if (strcmp(argv[i], "-i") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -i, aborting. \n"); exit(1); } fpsstring = strdup(argv[i + 1]); fps = strtoul(fpsstring, &separateur, 10); printf(" interval: %d fps \n", fps); } if (strcmp(argv[i], "-S") == 0) { /* Enable raw stream capture from the start */ enableRawStreamCapture = 1; } if (strcmp(argv[i], "-c") == 0) { /* Enable raw frame capture for the first frame */ enableRawFrameCapture = 1; } if (strcmp(argv[i], "-C") == 0) { /* Enable raw frame stream capture from the start*/ enableRawFrameCapture = 2; } if (strcmp(argv[i], "-o") == 0) { /* set the avi filename */ if (i + 1 >= argc) { printf("No parameter specified with -o, aborting.\n"); exit(1); } avifilename = strdup(argv[i + 1]); } if (strcmp(argv[i], "-L") == 0) { /* query list of valid video formats */ queryformats = 1; } if (strcmp(argv[i], "-l") == 0) { /* query list of valid video formats */ querycontrols = 1; } if (strcmp(argv[i], "-r") == 0) { /* query list of valid video formats */ readconfigfile = 1; } if (strcmp(argv[i], "-O") == 0) { /* get picture */ getpictureflag = 1; } if (strcmp(argv[i], "-h") == 0) { printf( "usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n"); printf("-h print this message \n"); printf("-d /dev/videoX use videoX device\n"); printf("-g use read method for grab instead mmap \n"); printf("-w disable SDL hardware accel. \n"); printf( "-f video format default jpg others options are yuv jpg \n"); printf("-i fps use specified frame interval \n"); printf("-s widthxheight use specified input size \n"); printf("-c enable raw frame capturing for the first frame\n"); printf("-C enable raw frame stream capturing from the start\n"); printf("-S enable raw stream capturing from the start\n"); printf("-o avifile create avifile, default video.avi\n"); printf("-L query valid video formats\n"); printf("-l query valid controls and settings\n"); printf("-r read and set control settings from luvcview.cfg\n"); printf("-O get picture.\n"); exit(0); } } if (videodevice == NULL || *videodevice == 0) { videodevice = "/dev/video0"; } if (avifilename == NULL || *avifilename == 0) { avifilename = "video.avi"; } videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if (queryformats) { /* if we're supposed to list the video formats, do that now and go out */ check_videoIn(videoIn, (char *) videodevice); free(videoIn); exit(1); } if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0) exit(1); /* if we're supposed to list the controls, do that now */ if (querycontrols) enum_controls(videoIn->fd); /* if we're supposed to read the control settings from a configfile, do that now */ if (readconfigfile) load_controls(videoIn->fd); printf("Enable Raw Stream Capture\n"); if (enableRawStreamCapture) { videoIn->captureFile = fopen("stream.raw", "wb"); if (videoIn->captureFile == NULL) { perror("Unable to open file for raw stream capturing"); } else { printf("Starting raw stream capturing to stream.raw ...\n"); } } if (enableRawFrameCapture) videoIn->rawFrameCapture = enableRawFrameCapture; initLut(); printf("Begin main big loop\n"); int loopNum = 0; /* main big loop */ while (videoIn->signalquit) { // if (uvcGrab(videoIn) < 0) { printf("Error grabbing \n"); break; } // /* if we're grabbing video, show the frame rate */ if (videoIn->toggleAvi) printf("\rframe rate: %d ", frmrate); // if (getpictureflag) { //if (videoIn->getPict) { switch (videoIn->formatIn) { case V4L2_PIX_FMT_MJPEG: get_picture(videoIn->tmpbuffer, videoIn->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: printf("get picture yuv...\n"); get_pictureYV2(videoIn->framebuffer, videoIn->width, videoIn->height); break; default: break; } videoIn->getPict = 0; printf("get picture !\n"); } printf("loop number %d\n",loopNum); loopNum ++; } /* if avifile is defined, we made a video: compute the exact fps and set it in the video */ if (videoIn->avifile != NULL) { float fps = (videoIn->framecount / (videoIn->recordtime / 1000)); fprintf(stderr, "setting fps to %f\n", fps); AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height, fps, "MJPG"); AVI_close(videoIn->avifile); } close_v4l2(videoIn); free(videoIn); freeLut(); printf(" Clean Up done Quit \n"); }
/****************************************************************************** Description.: This function initializes the plugin. It parses the commandline- parameter and stores the default and parsed values in the appropriate variables. Input Value.: param contains among others the command-line string Return Value: 0 if everything is fine 1 if "--help" was triggered, in this case the calling programm should stop running and leave. ******************************************************************************/ int input_init(input_parameter *param) { char *argv[MAX_ARGUMENTS]={NULL}, *dev = "/dev/video0", *s; int argc=1, width=640, height=480, fps=5, format=V4L2_PIX_FMT_MJPEG, i; in_cmd_type led = IN_CMD_LED_AUTO; /* initialize the mutes variable */ if( pthread_mutex_init(&controls_mutex, NULL) != 0 ) { IPRINT("could not initialize mutex variable\n"); exit(EXIT_FAILURE); } /* convert the single parameter-string to an array of strings */ argv[0] = INPUT_PLUGIN_NAME; if ( param->parameter_string != NULL && strlen(param->parameter_string) != 0 ) { char *arg=NULL, *saveptr=NULL, *token=NULL; arg=(char *)strdup(param->parameter_string); if ( strchr(arg, ' ') != NULL ) { token=strtok_r(arg, " ", &saveptr); if ( token != NULL ) { argv[argc] = strdup(token); argc++; while ( (token=strtok_r(NULL, " ", &saveptr)) != NULL ) { argv[argc] = strdup(token); argc++; if (argc >= MAX_ARGUMENTS) { IPRINT("ERROR: too many arguments to input plugin\n"); return 1; } } } } } /* show all parameters for DBG purposes */ for (i=0; i<argc; i++) { DBG("argv[%d]=%s\n", i, argv[i]); } /* parse the parameters */ reset_getopt(); while(1) { int option_index = 0, c=0; static struct option long_options[] = \ { {"h", no_argument, 0, 0}, {"help", no_argument, 0, 0}, {"d", required_argument, 0, 0}, {"device", required_argument, 0, 0}, {"r", required_argument, 0, 0}, {"resolution", required_argument, 0, 0}, {"f", required_argument, 0, 0}, {"fps", required_argument, 0, 0}, {"y", no_argument, 0, 0}, {"yuv", no_argument, 0, 0}, {"q", required_argument, 0, 0}, {"quality", required_argument, 0, 0}, {"m", required_argument, 0, 0}, {"minimum_size", required_argument, 0, 0}, {"n", no_argument, 0, 0}, {"no_dynctrl", no_argument, 0, 0}, {"l", required_argument, 0, 0}, {"led", required_argument, 0, 0}, {0, 0, 0, 0} }; /* parsing all parameters according to the list above is sufficent */ c = getopt_long_only(argc, argv, "", long_options, &option_index); /* no more options to parse */ if (c == -1) break; /* unrecognized option */ if (c == '?'){ help(); return 1; } /* dispatch the given options */ switch (option_index) { /* h, help */ case 0: case 1: DBG("case 0,1\n"); help(); return 1; break; /* d, device */ case 2: case 3: DBG("case 2,3\n"); dev = strdup(optarg); break; /* r, resolution */ case 4: case 5: DBG("case 4,5\n"); width = -1; height = -1; /* try to find the resolution in lookup table "resolutions" */ for ( i=0; i < LENGTH_OF(resolutions); i++ ) { if ( strcmp(resolutions[i].string, optarg) == 0 ) { width = resolutions[i].width; height = resolutions[i].height; } } /* done if width and height were set */ if(width != -1 && height != -1) break; /* parse value as decimal value */ width = strtol(optarg, &s, 10); height = strtol(s+1, NULL, 10); break; /* f, fps */ case 6: case 7: DBG("case 6,7\n"); fps=atoi(optarg); break; /* y, yuv */ case 8: case 9: DBG("case 8,9\n"); format = V4L2_PIX_FMT_YUYV; break; /* q, quality */ case 10: case 11: DBG("case 10,11\n"); format = V4L2_PIX_FMT_YUYV; gquality = MIN(MAX(atoi(optarg), 0), 100); break; /* m, minimum_size */ case 12: case 13: DBG("case 12,13\n"); minimum_size = MAX(atoi(optarg), 0); break; /* n, no_dynctrl */ case 14: case 15: DBG("case 14,15\n"); dynctrls = 0; break; /* l, led */ case 16: case 17: DBG("case 16,17\n"); if ( strcmp("on", optarg) == 0 ) { led = IN_CMD_LED_ON; } else if ( strcmp("off", optarg) == 0 ) { led = IN_CMD_LED_OFF; } else if ( strcmp("auto", optarg) == 0 ) { led = IN_CMD_LED_AUTO; } else if ( strcmp("blink", optarg) == 0 ) { led = IN_CMD_LED_BLINK; } break; default: DBG("default case\n"); help(); return 1; } } /* keep a pointer to the global variables */ pglobal = param->global; /* allocate webcam datastructure */ videoIn = malloc(sizeof(struct vdIn)); if ( videoIn == NULL ) { IPRINT("not enough memory for videoIn\n"); exit(EXIT_FAILURE); } memset(videoIn, 0, sizeof(struct vdIn)); /* display the parsed values */ IPRINT("Using V4L2 device.: %s\n", dev); IPRINT("Desired Resolution: %i x %i\n", width, height); IPRINT("Frames Per Second.: %i\n", fps); IPRINT("Format............: %s\n", (format==V4L2_PIX_FMT_YUYV)?"YUV":"MJPEG"); if ( format == V4L2_PIX_FMT_YUYV ) IPRINT("JPEG Quality......: %d\n", gquality); /* open video device and prepare data structure */ if (init_videoIn(videoIn, dev, width, height, fps, format, 1) < 0) { IPRINT("init_VideoIn failed\n"); closelog(); exit(EXIT_FAILURE); } /* * recent linux-uvc driver (revision > ~#125) requires to use dynctrls * for pan/tilt/focus/... * dynctrls must get initialized */ if (dynctrls) initDynCtrls(videoIn->fd); /* * switch the LED according to the command line parameters (if any) */ input_cmd(led, 0); return 0; }
int main(int argc, char **argv) { if (!XInitThreads()) { printf("Xlib not thread safe\n"); exit(1); } //const SDL_VideoInfo *info; char driver[128]; SDL_Surface *pscreen; //SDL_Overlay *overlay; //SDL_Rect drect, dclip; SDL_Event sdlevent; SDL_Thread *mythread; SDL_mutex *affmutex; int status; Uint32 currtime; Uint32 lasttime; unsigned char *p = NULL; int hwaccel = 0; const char *videodevice = NULL; const char *mode = NULL; int format = V4L2_PIX_FMT_MJPEG; //int sdl_format = SDL_YUY2_OVERLAY; int i; int grabmethod = 1; int width = 640; int height = 480; float fps = 30.0; // Requested frame rate float frmrate = 0.0; // Measured frame rate char *avifilename = NULL; int queryformats = 0; int querycontrols = 0; int readconfigfile = 0; char *separateur; char *sizestring = NULL; char *fpsstring = NULL; int enableRawStreamCapture = 0; int enableRawFrameCapture = 0; printf("luvcview %s\n\n", version); for (i = 1; i < argc; i++) { /* skip bad arguments */ if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') { continue; } if (strcmp(argv[i], "-d") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -d, aborting.\n"); exit(1); } videodevice = strdup(argv[i + 1]); } if (strcmp(argv[i], "-g") == 0) { /* Ask for read instead default mmap */ grabmethod = 0; } if (strcmp(argv[i], "-w") == 0) { /* disable hw acceleration */ hwaccel = 1; } if (strcmp(argv[i], "-f") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -f, aborting.\n"); exit(1); } mode = argv[i + 1]; if (strcasecmp(mode, "yuv") == 0 || strcasecmp(mode, "YUYV") == 0) { format = V4L2_PIX_FMT_YUYV; } else if (strcasecmp(mode, "uyvy") == 0 || strcasecmp(mode, "UYVY") == 0) { format = V4L2_PIX_FMT_UYVY; //sdl_format = SDL_UYVY_OVERLAY; } else if (strcasecmp(mode, "jpg") == 0 || strcasecmp(mode, "MJPG") == 0) { format = V4L2_PIX_FMT_MJPEG; } else { printf("Unknown format specified. Aborting.\n"); exit(1); } } if (strcmp(argv[i], "-s") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -s, aborting.\n"); exit(1); } sizestring = strdup(argv[i + 1]); width = strtoul(sizestring, &separateur, 10); if (*separateur != 'x') { printf("Error in size use -s widthxheight\n"); exit(1); } else { ++separateur; height = strtoul(separateur, &separateur, 10); if (*separateur != 0) printf("hmm.. dont like that!! trying this height\n"); } } if (strcmp(argv[i], "-i") == 0) { if (i + 1 >= argc) { printf("No parameter specified with -i, aborting.\n"); exit(1); } fpsstring = argv[i + 1]; fps = strtof(fpsstring, &separateur); if(*separateur != '\0') { printf("Invalid frame rate '%s' specified with -i. " "Only numbers are supported. Aborting.\n", fpsstring); exit(1); } } if (strcmp(argv[i], "-S") == 0) { /* Enable raw stream capture from the start */ enableRawStreamCapture = 1; } if (strcmp(argv[i], "-c") == 0) { /* Enable raw frame capture for the first frame */ enableRawFrameCapture = 1; } if (strcmp(argv[i], "-C") == 0) { /* Enable raw frame stream capture from the start*/ enableRawFrameCapture = 2; } if (strcmp(argv[i], "-o") == 0) { /* set the avi filename */ if (i + 1 >= argc) { printf("No parameter specified with -o, aborting.\n"); exit(1); } avifilename = strdup(argv[i + 1]); } if (strcmp(argv[i], "-L") == 0) { /* query list of valid video formats */ queryformats = 1; } if (strcmp(argv[i], "-l") == 0) { /* query list of valid video formats */ querycontrols = 1; } if (strcmp(argv[i], "-r") == 0) { /* query list of valid video formats */ readconfigfile = 1; } if (strcmp(argv[i], "-h") == 0 || strcmp(argv[i], "--help") == 0) { printf("usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r]\n"); printf("-h print this message\n"); printf("-d /dev/videoX use videoX device\n"); printf("-g use read method for grab instead mmap\n"); printf("-w disable SDL hardware accel.\n"); printf("-f choose video format (YUYV/yuv, UYVY/uyvy and MJPG/jpg are valid, MJPG is default)\n"); printf("-i fps use specified frame rate\n"); printf("-s widthxheight use specified input size\n"); printf("-c enable raw frame capturing for the first frame\n"); printf("-C enable raw frame stream capturing from the start\n"); printf("-S enable raw stream capturing from the start\n"); printf("-o avifile create avifile, default video.avi\n"); printf("-L query valid video formats\n"); printf("-l query valid controls and settings\n"); printf("-r read and set control settings from luvcview.cfg (save/restore with F1/F2)\n"); exit(0); } } /************* Test SDL capabilities ************/ if (SDL_Init(SDL_INIT_VIDEO) < 0) { fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError()); exit(1); } /* For this version, we'll be save and disable hardware acceleration */ if(hwaccel) if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") ) { putenv("SDL_VIDEO_YUV_HWACCEL=0"); } /* printf("SDL information:\n"); if (SDL_VideoDriverName(driver, sizeof(driver))) { printf(" Video driver: %s\n", driver); } /* info = SDL_GetVideoInfo(); if (info->wm_available) { printf(" A window manager is available\n"); } if (info->hw_available) { printf(" Hardware surfaces are available (%dk video memory)\n", info->video_mem); SDL_VIDEO_Flags |= SDL_HWSURFACE; } if (info->blit_hw) { printf(" Copy blits between hardware surfaces are accelerated\n"); SDL_VIDEO_Flags |= SDL_ASYNCBLIT; } if (info->blit_hw_CC) { printf (" Colorkey blits between hardware surfaces are accelerated\n"); } if (info->blit_hw_A) { printf(" Alpha blits between hardware surfaces are accelerated\n"); } if (info->blit_sw) { printf (" Copy blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_CC) { printf (" Colorkey blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_A) { printf (" Alpha blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_fill) { printf(" Color fills on hardware surfaces are accelerated\n"); } if (!(SDL_VIDEO_Flags & SDL_HWSURFACE)) SDL_VIDEO_Flags |= SDL_SWSURFACE; */ if (videodevice == NULL || *videodevice == 0) { videodevice = "/dev/video0"; } if (avifilename == NULL || *avifilename == 0) { avifilename = "video.avi"; } char* adev[2] = { "/dev/video0", "/dev/video1" }; //tCam* gM.apCam[2]; gM.aCam[0].Image_W = width; gM.aCam[0].Image_H = height; gM.Cam_N = 2; for (int i = 0; i < gM.Cam_N; ++i) { //gM.apCam[i] = malloc(sizeof *gM.apCam[i]); memset(&gM.aCam[i], 0, sizeof gM.aCam[i]); tCam* pcam = &gM.aCam[i]; pcam->Idx = i; pcam->Image_W = width; pcam->Image_H = height; pcam->UVC = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if ( queryformats ) { /* if we're supposed to list the video formats, do that now and go out */ check_videoIn(pcam->UVC, adev[i]); free(pcam->UVC); SDL_Quit(); exit(1); } if (init_videoIn (pcam->UVC, adev[i], width, height, fps, format, grabmethod, avifilename) < 0) exit(1); /* if we're supposed to list the controls, do that now */ if ( querycontrols ) enum_controls(pcam->UVC->fd); /* if we're supposed to read the control settings from a configfile, do that now */ if ( readconfigfile ) load_controls(pcam->UVC->fd); pcam->SDL_Win = SDL_CreateWindow(title_act[A_VIDEO].title, SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, pcam->UVC->width/2, pcam->UVC->height/2, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL ); if (!pcam->SDL_Win) abort(); pcam->SDL_Surf = SDL_GetWindowSurface(pcam->SDL_Win); if (!pcam->SDL_Surf) abort(); } pcam = &gM.aCam[0]; //gM.pScreen = pscreen = gM.aCam[0]->SDL_Surf; //gM.pOverlay = overlay = SDL_CreateYUVOverlay(videoIn->width, videoIn->height, sdl_format, pscreen); //p = (unsigned char *) overlay->pixels[0]; /* if (enableRawStreamCapture) { videoIn->captureFile = fopen("stream.raw", "wb"); if(videoIn->captureFile == NULL) { perror("Unable to open file for raw stream capturing"); } else { printf("Starting raw stream capturing to stream.raw ...\n"); } } if (enableRawFrameCapture) videoIn->rawFrameCapture = enableRawFrameCapture;*/ initLut(); //SDL_WM_SetCaption(title_act[A_VIDEO].title, NULL); lasttime = SDL_GetTicks(); // creatButt(videoIn->width, 32); // SDL_LockYUVOverlay(overlay); // memcpy(p + (videoIn->width * (videoIn->height) * 2), YUYVbutt, videoIn->width * 64); // SDL_UnlockYUVOverlay(overlay); /* initialize thread data */ //ptdata.ptscreen = &pscreen; //ptdata.ptvideoIn = videoIn; ptdata.ptsdlevent = &sdlevent; //ptdata.drect = &drect; affmutex = SDL_CreateMutex(); ptdata.affmutex = affmutex; // mythread = SDL_CreateThread(eventThread, (void *) &ptdata); muhaha_Init (); gM.pDst = p; muhaha_Loop (); muhaha_DeInit (); SDL_WaitThread(mythread, &status); SDL_DestroyMutex(affmutex); /* if avifile is defined, we made a video: compute the exact fps and set it in the video */ //close_v4l2(videoIn); //free(videoIn); destroyButt(); freeLut(); printf("Cleanup done. Exiting ...\n"); SDL_Quit(); }
/****************************************************************************** Description.: This function ializes the plugin. It parses the commandline- parameter and stores the default and parsed values in the appropriate variables. Input Value.: param contains among others the command-line string Return Value: 0 if everything is fine 1 if "--help" was triggered, in this case the calling programm should stop running and leave. ******************************************************************************/ int input_init(const char * dev, context_t *context) { input_parameter *param = &context->input->param; context->stop = 0; context->cleaned = 0; input_t *input = context->input; char *s; int gquality = 80, minimum_size = 10; int width = 640, height = 480, fps = 5, fps_div = 1, format = V4L2_PIX_FMT_MJPEG, i; /* parse the parameters */ reset_getopt(); while (1) { int option_index = 0, c = 0; static struct option long_options[] = { { "r", required_argument, 0, 0 }, { "resolution", required_argument, 0, 0 }, { "f", required_argument, 0, 0 }, { "fps", required_argument, 0, 0 }, { "y", no_argument, 0, 0 }, { "yuv", no_argument, 0, 0 }, { "q", required_argument, 0, 0 }, { "quality", required_argument, 0, 0 }, { "m", required_argument, 0, 0 }, { "minimum_size", required_argument, 0, 0 }, { "s", required_argument, 0, 0 }, { "fpsdiv", required_argument, 0, 0 }, { 0, 0, 0, 0 } }; /* parsing all parameters according to the list above is sufficent */ c = getopt_long_only(param->argc, param->argv, "", long_options, &option_index); /* no more options to parse */ if (c == -1) break; /* dispatch the given options */ switch (option_index) { /* r, resolution */ case 0: case 1: width = -1; height = -1; /* try to find the resolution in lookup table "resolutions" */ for (i = 0; i < LENGTH_OF(resolutions); i++) { if (strcmp(resolutions[i].string, optarg) == 0) { width = resolutions[i].width; height = resolutions[i].height; } break; } /* done if width and height were set */ if (width != -1 && height != -1) break; /* parse value as decimal value */ width = strtol(optarg, &s, 10); height = strtol(s + 1, NULL, 10); break; /* f, fps */ case 2: case 3: fps = atoi(optarg); break; /* y, yuv */ case 4: case 5: format = V4L2_PIX_FMT_YUYV; break; /* q, quality */ case 6: case 7: format = V4L2_PIX_FMT_YUYV; gquality = MIN(MAX(atoi(optarg), 0), 100); break; /* m, minimum_size */ case 8: case 9: minimum_size = MAX(atoi(optarg), 0); break; case 10: case 11: fps_div = atoi(optarg); if (fps_div < 1) fps_div = 1; break; default: return -2; } } /* allocate webcam datastructure */ context->videoIn = (vdIn*) malloc(sizeof(struct vdIn)); if (context->videoIn == NULL) { PTRACE(0, "not enough memory for videoIn"); return -2; } memset(context->videoIn, 0, sizeof(struct vdIn)); input->frame_interval = 1000 * fps_div / fps; /* display the parsed values */ PTRACE(2, "Using V4L2 device.: "<< dev); PTRACE(2, "Desired Resolution: "<<width<<" x "<<height); PTRACE(2, "Frames Per Second.: "<<fps<<"/"<<fps_div); PTRACE(2, "Format............: "<< ((format == V4L2_PIX_FMT_YUYV) ? "YUV" : "MJPEG")); if (format == V4L2_PIX_FMT_YUYV) PTRACE(2, "JPEG Quality......: "<< gquality); context->minimum_size = minimum_size; /* open video device and prepare data structure */ if (init_videoIn(context->videoIn, dev, width, height, fps, fps_div, format, 1, input) < 0) { PTRACE(0, "init_VideoIn failed"); return -3; } return 0; }
int luvcview() { const SDL_VideoInfo *info; char driver[128]; SDL_Surface *pscreen; SDL_Surface *pparent; SDL_Overlay *overlay; SDL_Rect drect; int status; Uint32 currtime; Uint32 lasttime; unsigned char *p = NULL; int hwaccel = 0; const char *videodevice = NULL; const char *mode = NULL; int format = V4L2_PIX_FMT_MJPEG; int i; int grabmethod = 1; int width = 320; int height = 240; int fps = 15; unsigned char frmrate = 0; char *avifilename = NULL; int queryformats = 0; int querycontrols = 0; int readconfigfile = 0; char *separateur; char *sizestring = NULL; char *fpsstring = NULL; int enableRawStreamCapture = 0; int enableRawFrameCapture = 0; format = V4L2_PIX_FMT_YUYV; videodevice = "/dev/video0"; printf("luvcview version %s \n", version); if (SDL_VideoDriverName(driver, sizeof(driver))) { printf("Video driver: %s\n", driver); } info = SDL_GetVideoInfo(); if (info->wm_available) { printf("A window manager is available\n"); } if (info->hw_available) { printf("Hardware surfaces are available (%dK video memory)\n", info->video_mem); SDL_VIDEO_Flags |= SDL_HWSURFACE; } if (info->blit_hw) { printf("Copy blits between hardware surfaces are accelerated\n"); SDL_VIDEO_Flags |= SDL_ASYNCBLIT; } if (info->blit_hw_CC) { printf ("Colorkey blits between hardware surfaces are accelerated\n"); } if (info->blit_hw_A) { printf("Alpha blits between hardware surfaces are accelerated\n"); } if (info->blit_sw) { printf ("Copy blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_CC) { printf ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_sw_A) { printf ("Alpha blits from software surfaces to hardware surfaces are accelerated\n"); } if (info->blit_fill) { printf("Color fills on hardware surfaces are accelerated\n"); } if (!(SDL_VIDEO_Flags & SDL_HWSURFACE)) SDL_VIDEO_Flags |= SDL_SWSURFACE; if (avifilename == NULL || *avifilename == 0) { avifilename = "video.avi"; } videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn)); if (init_videoIn (videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0) exit(1); pscreen = SDL_GetVideoSurface(); overlay = SDL_CreateYUVOverlay(videoIn->width, videoIn->height , SDL_YUY2_OVERLAY, pscreen); p = (unsigned char *) overlay->pixels[0]; drect.x = 0; drect.y = 0; drect.w = pscreen->w; drect.h = pscreen->h; initLut(); lasttime = SDL_GetTicks(); int loop = 0; /* main big loop */ while (videoIn->signalquit) { currtime = SDL_GetTicks(); if (currtime - lasttime > 0) { frmrate = 1000/(currtime - lasttime); } lasttime = currtime; if (uvcGrab(videoIn) < 0) { printf("Error grabbing \n"); break; } SDL_LockYUVOverlay(overlay); memcpy(p, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2); SDL_UnlockYUVOverlay(overlay); SDL_DisplayYUVOverlay(overlay, &drect); if (loop > 35) { printf( "loop: %d\n", loop); break; } ++loop; SDL_Delay(10); } close_v4l2(videoIn); free(videoIn); freeLut(); printf(" Clean Up done Quit \n"); }