int init_buf(unsigned int buffer_size) { int i; struct data_buf *p; int ret = 0; for(i = 0; i<(int)(BUF_COUNT); i++) { p = calloc(1,sizeof(struct data_buf)); if (!p) { printf("Out of memory\n"); return -ENOMEM; } list_add_tail(&(p->list), &qlist); } switch (io) { case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: ret = init_userp(buffer_size); break; } return ret; }
int main( int argc, char **argv ) { int dispDev, csiDev; int chunkMem; chunk_block_t priBlk0, priBlk1; gp_disp_res_t panelRes; gp_bitmap_t priBitmap; UINT16 *data; fd_set fds; struct timeval tv; int r, bufnum; struct v4l2_buffer buf; struct v4l2_format fmt; struct v4l2_queryctrl qc; struct v4l2_input in; spRect_t dstRect={0,0,0,0}; spRect_t srcRect={0,0,0,0}; spBitmap_t dst; spBitmap_t src; /* Opening the device dispDev */ dispDev = open("/dev/disp0",O_RDWR); printf("dispDev = %d\n", dispDev); ioctl(dispDev, DISPIO_SET_INITIAL, 0); ioctl(dispDev, DISPIO_GET_PANEL_RESOLUTION, &panelRes); dstRect.width = panelRes.width; dstRect.height=panelRes.height; srcRect.width=NTSC_WIDTH; srcRect.height=NTSC_HEIGHT; /* Opening /dev/chunkmem */ chunkMem = open("/dev/chunkmem", O_RDWR); /* Allocate primary frame buffer */ priBlk0.size = (NTSC_WIDTH) * (NTSC_HEIGHT) * getBpp(SP_BITMAP_YCbYCr); priBlk1.size = (panelRes.width) * (panelRes.height) *getBpp(SP_BITMAP_YCbYCr); ioctl(chunkMem, CHUNK_MEM_ALLOC, (unsigned long)&priBlk0); ioctl(chunkMem, CHUNK_MEM_ALLOC, (unsigned long)&priBlk1); dst.width = panelRes.width; dst.height = panelRes.height; dst.bpl = panelRes.width * getBpp(SP_BITMAP_YCbYCr); dst.pData = priBlk1.addr; dst.type = SP_BITMAP_YCbYCr; src.width = NTSC_WIDTH; src.height =NTSC_HEIGHT; src.bpl = NTSC_WIDTH *getBpp(SP_BITMAP_YCbYCr); src.pData = priBlk0.addr; src.type = SP_BITMAP_YCbYCr; //gp2dScale(&dst, dstRect, &src, srcRect); /* Set primary layer bitmap */ priBitmap.width = panelRes.width; priBitmap.height = panelRes.height; priBitmap.bpl = panelRes.width*getBpp(SP_BITMAP_YCbYCr); priBitmap.type = SP_BITMAP_YCbYCr; priBitmap.pData = priBlk1.addr; ioctl(dispDev, DISPIO_SET_PRI_BITMAP, &priBitmap); /* Fill primary bitmap data */ #if 0 data = (UINT16*) priBlk0.addr; init_fb(data, 0x0000, 0x001f, 0x0513, 0xffff); data = (UINT16*) priBlk1.addr; init_fb(data, 0xffff, 0x0513, 0x001f, 0x0000); #endif ioctl(dispDev, DISPIO_SET_PRI_ENABLE, 1); ioctl(dispDev, DISPIO_SET_UPDATE, 0); csiDev = open("/dev/csi0",O_RDWR); in.index = 0; ioctl(csiDev, VIDIOC_ENUMINPUT, &in); printf("name=%s\n", in.name); CLEAR(fmt); fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.width = NTSC_WIDTH; fmt.fmt.pix.height = NTSC_HEIGHT; ioctl(csiDev, VIDIOC_TRY_FMT, &fmt); CLEAR(qc); qc.id = V4L2_CID_BRIGHTNESS; ioctl(csiDev, VIDIOC_QUERYCTRL, &qc); printf("max=%d\n", qc.maximum); init_userp(csiDev, 1); CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_USERPTR; buf.index = 0; buf.m.userptr = (unsigned long)priBlk0.addr; ioctl(csiDev, VIDIOC_QBUF, &buf); // buf.index = 1; // buf.m.userptr = (unsigned long)priBlk1.addr; // ioctl(csiDev, VIDIOC_QBUF, &buf); // buf.index = 2; // buf.m.userptr = (unsigned long)priBlk2.addr; // ioctl(csiDev, VIDIOC_QBUF, &buf); FD_ZERO (&fds); FD_SET (csiDev, &fds); tv.tv_sec = 2; tv.tv_usec = 0; ioctl(csiDev, VIDIOC_STREAMON, NULL); while(1) { r = select(csiDev+1, &fds, NULL, NULL, NULL);//&tv); gp2dScale(&dst, dstRect, &src, srcRect); CLEAR (buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_USERPTR; ioctl(csiDev, VIDIOC_DQBUF, &buf); ioctl(csiDev, VIDIOC_QBUF, &buf); /* ioctl(csiDev, 3, &bufnum); if( bufnum==0 ) priBitmap.pData = priBlk0.addr; else if(bufnum==1) priBitmap.pData = priBlk1.addr; else priBitmap.pData = priBlk2.addr; ioctl(dispDev, DISPIO_SET_PRI_BITMAP, &priBitmap); ioctl(dispDev, DISPIO_SET_PRI_ENABLE, 1); ioctl(dispDev, DISPIO_SET_UPDATE, 0); ioctl(dispDev, DISPIO_WAIT_FRAME_END, 0);*/ /* r = select(csiDev+1, &fds, NULL, NULL, &tv); priBitmap.pData = priBlk1.addr; ioctl(dispDev, DISPIO_SET_PRI_BITMAP, &priBitmap); ioctl(dispDev, DISPIO_SET_PRI_ENABLE, 1); ioctl(dispDev, DISPIO_SET_UPDATE, 0); r = select(csiDev+1, &fds, NULL, NULL, &tv); priBitmap.pData = priBlk2.addr; ioctl(dispDev, DISPIO_SET_PRI_BITMAP, &priBitmap); ioctl(dispDev, DISPIO_SET_PRI_ENABLE, 1); ioctl(dispDev, DISPIO_SET_UPDATE, 0);*/ } // close(dispDev); // ioctl(chunkMem, CHUNK_MEM_FREE, (unsigned long)&priBlk); // close(chunkMem); return 0; /* int i, ret; int fdcsi; struct v4l2_queryctrl qc; printf("csi test!\n"); fdcsi = open("/dev/csi0", O_RDWR); // init_userp(fdcsi, 0x2000); close(fdcsi); return 0;*/ }
static void init_device(int image_width, int image_height, int framerate) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", camera_dev); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", camera_dev); exit(EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", camera_dev); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", camera_dev); exit(EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR(fmt); // fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // fmt.fmt.pix.width = 640; // fmt.fmt.pix.height = 480; // fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; // fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = image_width; fmt.fmt.pix.height = image_height; fmt.fmt.pix.pixelformat = pixelformat; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) errno_exit("VIDIOC_S_FMT"); /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; image_width = fmt.fmt.pix.width; image_height = fmt.fmt.pix.height; struct v4l2_streamparm stream_params; memset(&stream_params, 0, sizeof(stream_params)); stream_params.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (xioctl(fd, VIDIOC_G_PARM, &stream_params) < 0) errno_exit("Couldn't query v4l fps!\n"); //fprintf(stderr, "Capability flag: 0x%x", stream_params.parm.capture.capability); stream_params.parm.capture.timeperframe.numerator = 1; stream_params.parm.capture.timeperframe.denominator = framerate; if (xioctl(fd, VIDIOC_S_PARM, &stream_params) < 0) perror("Couldn't set camera framerate\n"); /*else fprintf(stderr, "Set framerate to be %i", framerate);*/ switch (io) { case IO_METHOD_READ: init_read(fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: init_userp(fmt.fmt.pix.sizeimage); break; } }
static void init_device (V4L2WHandler_t * handle) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; struct v4l2_streamparm stream; unsigned int min; if (-1 == xioctl (handle->fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf (stderr, "%s is no V4L2 device\n", handle->dev_name); exit (EXIT_FAILURE); } else { errno_exit ("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf (stderr, "%s is no video capture device\n", handle->dev_name); exit (EXIT_FAILURE); } switch (handle->io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf (stderr, "%s does not support read i/o\n", handle->dev_name); exit (EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf (stderr, "%s does not support streaming i/o\n", handle->dev_name); exit (EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ CLEAR (cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl (handle->fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl (handle->fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR (fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = handle->imgparam.width; fmt.fmt.pix.height = handle->imgparam.height; fmt.fmt.pix.pixelformat = handle->imgparam.pix_fmt; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl (handle->fd, VIDIOC_S_FMT, &fmt)) errno_exit ("VIDIOC_S_FMT"); CLEAR (stream); stream.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; stream.parm.capture.timeperframe.numerator = 1; stream.parm.capture.timeperframe.denominator = handle->imgparam.fps; /* stream.parm.capture.timeperframe.numerator = 1; */ /* stream.parm.capture.timeperframe.denominator = 10; */ stream.parm.capture.readbuffers = 4; if (-1 == xioctl (handle->fd, VIDIOC_S_PARM, &stream)) errno_exit ("VIDIOC_S_PARM"); //printf("%d %d/n", stream.parm.capture.timeperframe.numerator, stream.parm.capture.timeperframe.denominator); stream.parm.capture.readbuffers = 4; /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (handle->io) { case IO_METHOD_READ: init_read (handle, fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap (handle); break; case IO_METHOD_USERPTR: init_userp (handle, fmt.fmt.pix.sizeimage); break; } int ii; for (ii = 0; ii < handle->n_buffers; ii++) { memset(handle->buffers[ii].start, 0, handle->buffers[ii].length); } }
static void init_device(void) { struct v4l2_capability cap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", dev_name); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", dev_name); exit(EXIT_FAILURE); } break; } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = WIDTH; fmt.fmt.pix.height = HEIGHT; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) errno_exit("VIDIOC_S_FMT"); /* Note VIDIOC_S_FMT may change width and height. */ image_size = fmt.fmt.pix.width * fmt.fmt.pix.height * 2; /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (io) { case IO_METHOD_READ: init_read(fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: printf("capture: fmt.fmt.pix.sizeimage = %d\n", fmt.fmt.pix.sizeimage); init_userp(fmt.fmt.pix.sizeimage); break; } }
void video_driver::init_device (void) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; if (-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf (stderr, "%s is no V4L2 device\n", dev_name); exit (EXIT_FAILURE); } else { errno_exit ("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf (stderr, "%s is no video capture device\n", dev_name); exit (EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf (stderr, "%s does not support read i/o\n", dev_name); exit (EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf (stderr, "%s does not support streaming i/o\n", dev_name); exit (EXIT_FAILURE); } break; } cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (-1 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) { } crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; if (-1 == xioctl (fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: break; default: break; } } CLEAR (fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; fmt.fmt.pix.pixelformat = pixel_format; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl (fd, VIDIOC_S_FMT, &fmt)) errno_exit ("VIDIOC_S_FMT"); /*struct v4l2_frmivalenum frameinterval; width = fmt.fmt.pix.width; height = fmt.fmt.pix.height; frameinterval.index =0; frameinterval.pixel_format = pixel_format; frameinterval.width = width; frameinterval.height = height; frameinterval.discrete.numerator=1; frameinterval.discrete.denominator=5; frameinterval.type = V4L2_FRMIVAL_TYPE_DISCRETE; if (-1 == xioctl(fd,VIDIOC_ENUM_FRAMEINTERVALS,&frameinterval)){ printf("Setting frame interval failed\n"); exit(0); } */ struct v4l2_streamparm sp; sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; sp.parm.capture.timeperframe.denominator = 30; sp.parm.capture.timeperframe.numerator = 1; if (-1 == xioctl (fd, VIDIOC_S_PARM, &sp)) { printf ("Setting stream parameters failed\n"); exit (0); } switch (io) { case IO_METHOD_READ: init_read (fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap (); break; case IO_METHOD_USERPTR: init_userp (fmt.fmt.pix.sizeimage); break; } }
static void init_device(int w, int h) { struct v4lconvert_data *v4lconvert_data; struct v4l2_format src_fmt; /* raw source format */ struct v4l2_capability cap; if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } /* libv4l emulates read() on those v4l2 devices that do not support it, so this print is just instructional, it should work regardless */ printf("device capabilities\n\tread:\t%c\n\tstream:\t%c\n", (cap.capabilities & V4L2_CAP_READWRITE) ? 'Y' : 'N', (cap.capabilities & V4L2_CAP_STREAMING) ? 'Y' : 'N'); /* set our requested format to V4L2_PIX_FMT_RGB24 */ CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = w; fmt.fmt.pix.height = h; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; /* libv4l also converts mutiple supported formats to V4l2_PIX_FMT_BGR24 or V4l2_PIX_FMT_YUV420, which means the following call should *always* succeed However, we use the libv4lconvert library to print debugging information to tell us if libv4l will be doing the conversion internally*/ v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) errno_exit("v4lconvert_create"); if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) errno_exit("v4lconvert_try_format"); printf("\tpixfmt:\t%c%c%c%c (%dx%d)\n", src_fmt.fmt.pix.pixelformat & 0xff, (src_fmt.fmt.pix.pixelformat >> 8) & 0xff, (src_fmt.fmt.pix.pixelformat >> 16) & 0xff, (src_fmt.fmt.pix.pixelformat >> 24) & 0xff, src_fmt.fmt.pix.width, src_fmt.fmt.pix.height); printf("application\n\tconv:\t%c\n", v4lconvert_needs_conversion(v4lconvert_data, &src_fmt, &fmt) ? 'Y' : 'N'); v4lconvert_destroy(v4lconvert_data); /* Actually set the pixfmt so that libv4l uses its conversion magic */ if (v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt) < 0) errno_exit("VIDIOC_S_FMT"); printf("\tpixfmt:\t%c%c%c%c (%dx%d)\n", fmt.fmt.pix.pixelformat & 0xff, (fmt.fmt.pix.pixelformat >> 8) & 0xff, (fmt.fmt.pix.pixelformat >> 16) & 0xff, (fmt.fmt.pix.pixelformat >> 24) & 0xff, fmt.fmt.pix.width, fmt.fmt.pix.height); switch (io) { case IO_METHOD_READ: printf("\tio:\tio\n"); init_read(fmt.fmt.pix.sizeimage); break; case V4L2_MEMORY_MMAP: printf("\tio:\tmmap\n"); init_mmap(); break; case V4L2_MEMORY_USERPTR: printf("\tio:\tusrptr\n"); init_userp(fmt.fmt.pix.sizeimage); break; } }
static void init_device(void) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "videocapture: %s is no V4L2 device\n", dev_name); return; } else { return; } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "videocapture: %s is no video capture device\n", dev_name); return; } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "videocapture: %s does not support read i/o\n", dev_name); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "videocapture: %s does not support streaming i/o\n", dev_name); exit(EXIT_FAILURE); } break; } CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = WIDTH; fmt.fmt.pix.height = HEIGHT; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) { errno_exit("VIDIOC_S_FMT"); } /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; if (fmt.fmt.pix.width != WIDTH) WIDTH = fmt.fmt.pix.width; if (fmt.fmt.pix.height != HEIGHT) HEIGHT = fmt.fmt.pix.height; switch (io) { case IO_METHOD_READ: init_read(fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: init_userp(fmt.fmt.pix.sizeimage); break; } }
static void init_device(int w, int h) { struct v4l2_capability cap; int ret; int sizeimage; if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", dev_name); exit(EXIT_FAILURE); } break; case V4L2_MEMORY_MMAP: case V4L2_MEMORY_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", dev_name); exit(EXIT_FAILURE); } break; } // if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0) // perror("get fmt"); CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = w; fmt.fmt.pix.height = h; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; #ifdef WITH_V4L2_LIB v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) errno_exit("v4lconvert_create"); if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) errno_exit("v4lconvert_try_format"); ret = xioctl(fd, VIDIOC_S_FMT, &src_fmt); sizeimage = src_fmt.fmt.pix.sizeimage; dst_buf = malloc(fmt.fmt.pix.sizeimage); printf("raw pixfmt: %c%c%c%c %dx%d\n", src_fmt.fmt.pix.pixelformat & 0xff, (src_fmt.fmt.pix.pixelformat >> 8) & 0xff, (src_fmt.fmt.pix.pixelformat >> 16) & 0xff, (src_fmt.fmt.pix.pixelformat >> 24) & 0xff, src_fmt.fmt.pix.width, src_fmt.fmt.pix.height); #else ret = xioctl(fd, VIDIOC_S_FMT, &fmt); sizeimage = fmt.fmt.pix.sizeimage; #endif if (ret < 0) errno_exit("VIDIOC_S_FMT"); // // /* Note VIDIOC_S_FMT may change width and height. */ // printf("pixfmt: %c%c%c%c %dx%d\n", fmt.fmt.pix.pixelformat & 0xff, (fmt.fmt.pix.pixelformat >> 8) & 0xff, (fmt.fmt.pix.pixelformat >> 16) & 0xff, (fmt.fmt.pix.pixelformat >> 24) & 0xff, fmt.fmt.pix.width, fmt.fmt.pix.height); switch (io) { case IO_METHOD_READ: init_read(sizeimage); break; case V4L2_MEMORY_MMAP: init_mmap(); break; case V4L2_MEMORY_USERPTR: init_userp(sizeimage); break; } }
static bool init_device(int fd, const char* dev_name, int io, buffer** buffers, size_t n_buffers, struct v4l2_format& fmt) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; unsigned int min; if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); return false; } else { return false; } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); return false; } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", dev_name); return false; } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", dev_name); return false; } break; } /* Select video input, video standard and tune here. */ CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0) { CLEAR(fmt); fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) { cerr << "Error initializing device (VIDIOC_S_FMT)" << endl; return false; } /* Note VIDIOC_S_FMT may change width and height. */ } else { /* Preserve original settings as set by v4l2-ctl for example */ if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) { cerr << "Error initializing device (VIDIOC_G_FMT)" << endl; return false; } } /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (io) { case IO_METHOD_READ: return init_read(buffers, fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(fd, buffers, n_buffers, fmt.fmt.pix.sizeimage); break; case IO_METHOD_USERPTR: return init_userp(fd, buffers, n_buffers, fmt.fmt.pix.sizeimage); break; } return true; }
static void init_device (void) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf (stderr, "%s is no V4L2 device\n", dev_name); exit (EXIT_FAILURE); } else { errno_exit ("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf (stderr, "%s is no video capture device\n", dev_name); exit (EXIT_FAILURE); } fprintf(stderr, "Card: %s\n", cap.card); switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf (stderr, "%s does not support read i/o\n", dev_name); exit (EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf (stderr, "%s does not support streaming i/o\n", dev_name); exit (EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ { struct v4l2_input input; __u32 current; if (-1 == ioctl (fd, VIDIOC_G_INPUT, ¤t)) { perror ("VIDIOC_G_INPUT"); } memset (&input, 0, sizeof (input)); input.index = current; if (-1 == ioctl (fd, VIDIOC_ENUMINPUT, &input)) { perror ("VIDIOC_ENUMINPUT"); } else { fprintf (stderr, "Current input: %s\n", input.name); if (input.status & V4L2_IN_ST_NO_H_LOCK) fprintf (stderr, "Warning: no video lock detected\n"); } } CLEAR (cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl (fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR (fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; switch (G_size) { case 0: fmt.fmt.pix.width = G_pal ? 704 : 640; fmt.fmt.pix.height = G_pal ? 576 : 480; break; case 1: fmt.fmt.pix.width = G_pal ? 352 : 320; fmt.fmt.pix.height = G_pal ? 288 : 240; break; case 2: fmt.fmt.pix.width = G_width; fmt.fmt.pix.height = G_height; break; } switch (type) { case TYPE_JPEG: case TYPE_MJPEG: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; break; case TYPE_MPEG1: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MPEG; break; case TYPE_H264: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_H264; break; case TYPE_YUYV: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; break; case TYPE_UYVY: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; break; case TYPE_Y8: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY; break; case TYPE_NV12: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12; break; case TYPE_BGR24: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24; break; case TYPE_RGB565: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565; break; case TYPE_MP42: //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MP42; break; case TYPE_MPEGTS: case TYPE_MPEGPS: fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MPEG; break; } fmt.fmt.pix.field = G_field; if (-1 == xioctl (fd, VIDIOC_S_FMT, &fmt)) errno_exit ("VIDIOC_S_FMT"); /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; /* optional MPEG parameters */ if(type == TYPE_MPEG4 || type == TYPE_H264 || type == TYPE_MP42 || type == TYPE_MPEGTS || type == TYPE_MPEGPS || type == TYPE_MPEG2) { struct v4l2_ext_control ctrl[12] = {}; struct v4l2_ext_controls ctrls = { .ctrl_class = V4L2_CTRL_CLASS_MPEG, .count = 0, .controls = &ctrl[0], }; if (type == TYPE_MPEGTS) add_ctrl(&ctrls, V4L2_CID_MPEG_STREAM_TYPE, V4L2_MPEG_STREAM_TYPE_MPEG2_TS); else if (type == TYPE_MPEGPS) { add_ctrl(&ctrls, V4L2_CID_MPEG_STREAM_TYPE, V4L2_MPEG_STREAM_TYPE_MPEG2_PS); // add_ctrl(&ctrls, V4L2_CID_MPEG_AUDIO_ENCODING, V4L2_MPEG_AUDIO_ENCODING_PCM); } else if (type != TYPE_MP42) mux_type = type; if (mux_type == TYPE_MPEG1) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_ENCODING, V4L2_MPEG_VIDEO_ENCODING_MPEG_1); else if (mux_type == TYPE_MPEG2) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_ENCODING, V4L2_MPEG_VIDEO_ENCODING_MPEG_2); else if (mux_type == TYPE_H264) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_ENCODING, V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC); if (G_br != -1) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_BITRATE, G_br); if (G_abr != -1) add_ctrl(&ctrls, V4L2_CID_MPEG_AUDIO_AAC_BITRATE, G_abr); if (G_ach != -1) add_ctrl(&ctrls, V4L2_CID_MPEG_AUDIO_MODE, G_ach); if (G_gop != -1) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_GOP_SIZE, G_gop); if (G_idr != -1) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_H264_I_PERIOD, G_idr); #if defined(V4L2_CID_MPEG_VIDEO_H264_PROFILE) && defined(V4L2_CID_MPEG_VIDEO_H264_LEVEL) if (G_profile != -1) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_H264_PROFILE, G_profile); if (G_level != -1) add_ctrl(&ctrls, V4L2_CID_MPEG_VIDEO_H264_LEVEL, G_level); #endif #if defined(V4L2_CID_MPEG_STREAM_VBI_FMT) if (G_cc != -1) { add_ctrl(&ctrls, V4L2_CID_MPEG_STREAM_VBI_FMT, G_cc ? V4L2_MPEG_STREAM_VBI_FMT_IVTV : V4L2_MPEG_STREAM_VBI_FMT_NONE); } #endif } else if (type == TYPE_MJPEG || type == TYPE_JPEG) { if (-1 == ioctl (fd, VIDIOC_S_JPEGCOMP, &G_jc)) perror ("VIDIOC_S_JPEGCOMP"); } switch (io) { case IO_METHOD_READ: init_read (fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap (); break; case IO_METHOD_USERPTR: init_userp (fmt.fmt.pix.sizeimage); break; } }
static void init_device(Priv *priv) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; if (-1 == xioctl(priv->fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", priv->dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", priv->dev_name); exit(EXIT_FAILURE); } switch (priv->io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", priv->dev_name); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", priv->dev_name); exit(EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(priv->fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl(priv->fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (priv->force_format) { fmt.fmt.pix.width = priv->w; fmt.fmt.pix.height = priv->h; //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl(priv->fd, VIDIOC_S_FMT, &fmt)) errno_exit("VIDIOC_S_FMT"); /* Note VIDIOC_S_FMT may change width and height. */ } else { /* Preserve original settings as set by v4l2-ctl for example */ if (-1 == xioctl(priv->fd, VIDIOC_G_FMT, &fmt)) errno_exit("VIDIOC_G_FMT"); } switch (priv->io) { case IO_METHOD_READ: init_read(priv, fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(priv); break; case IO_METHOD_USERPTR: init_userp(priv, fmt.fmt.pix.sizeimage); break; } }
static void init_device(void) { struct v4l2_capability cap; //struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; /************** struct v4l2_cropcap cropcap; struct v4l2_format format; reset_cropping_parameters (); // Scale down to 1/4 size of full picture. memset (&format, 0, sizeof (format)); // defaults format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format.fmt.pix.width = cropcap.defrect.width >> 1; format.fmt.pix.height = cropcap.defrect.height >> 1; format.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; if (-1 == ioctl (fd, VIDIOC_S_FMT, &format)) { perror ("VIDIOC_S_FORMAT"); exit (EXIT_FAILURE); } ****************/ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", dev_name); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", dev_name); exit(EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ /* CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; // reset to default if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: // Cropping not supported. break; default: printf("Error in VIDIOC_S_CROP\n"); break; } } } else printf("Error in VIDIOC_CROPCAP\n"); */ CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 1280; fmt.fmt.pix.height = 720; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) errno_exit("VIDIOC_S_FMT"); /* Note VIDIOC_S_FMT may change width and height. */ image_size = fmt.fmt.pix.width * fmt.fmt.pix.height * 2; /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (io) { case IO_METHOD_READ: init_read(fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: printf("capture: fmt.fmt.pix.sizeimage = %d\n", fmt.fmt.pix.sizeimage); init_userp(fmt.fmt.pix.sizeimage); break; } }
static void init_device(void) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; struct v4l2_streamparm frameint; unsigned int min; if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", dev_name); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", dev_name); exit(EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (set_format) { fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; fmt.fmt.pix.pixelformat = pixel_format; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) errno_exit("VIDIOC_S_FMT"); if (fmt.fmt.pix.pixelformat != pixel_format) { fprintf(stderr,"Libv4l didn't accept pixel format. Can't proceed.\n"); exit(EXIT_FAILURE); } /* Note VIDIOC_S_FMT may change width and height. */ } else { /* Preserve original settings as set by v4l2-ctl for example */ if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) errno_exit("VIDIOC_G_FMT"); } CLEAR(frameint); /* Attempt to set the frame interval. */ frameint.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; frameint.parm.capture.timeperframe.numerator = 1; frameint.parm.capture.timeperframe.denominator = fps; if (-1 == xioctl(fd, VIDIOC_S_PARM, &frameint)) fprintf(stderr, "Unable to set frame interval.\n"); /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (io) { case IO_METHOD_READ: init_read(fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: init_userp(fmt.fmt.pix.sizeimage); break; } }
//*********************************************************************************** static void init_device (void) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf (stderr, "%s is no V4L2 device\n", dev_name); exit (EXIT_FAILURE); } else { errno_exit ("VIDIOC_QUERYCAP"); } } else { printf ("Caps returns: 0x%x\n", cap.capabilities); } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf (stderr, "%s is no video capture device\n", dev_name); exit (EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf (stderr, "%s does not support read i/o\n", dev_name); //exit (EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf (stderr, "%s does not support streaming i/o\n", dev_name); exit (EXIT_FAILURE); } break; case IO_METHOD_SETEXPOSURE: puts("I am in expo"); init_exposure(); break; case IO_METHOD_SETGAIN : puts("I am in gain"); init_gain(); break; } /* Select video input, video standard and tune here. */ CLEAR (cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl (fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ fprintf(stderr, " Cropping not supported\n"); break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR (fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = TARGETWIDTH; fmt.fmt.pix.height = TARGETHEIGHT; fmt.fmt.pix.pixelformat = PIXELFMT; // defined at the top of the file printf("capture: size: W - %d H - %d, format: 0x%x\n", fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.pixelformat); fmt.fmt.pix.field = V4L2_FIELD_NONE; #if 1 if (-1 == xioctl (fd, VIDIOC_S_FMT, &fmt)) { printf("xioctl(VIDIOC_S_FMT) failed--->It's doesn't matter. Continue..."); } else { printf("VIDIOC_S_FMT returned success\n"); printf(" returned: pix.width: %d pix.height: %d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); } //;jr;$* exit for now // printf("EXIT APPLICATION for now......\n"); // exit(0); #endif /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (io) { case IO_METHOD_READ: init_read (fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap (); break; case IO_METHOD_USERPTR: init_userp (fmt.fmt.pix.sizeimage); break; case IO_METHOD_SETEXPOSURE: break; } }
void Camera::Init() { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if(-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n",name); exit(1); } else { errno_exit("VIDIOC_QUERYCAP"); } } if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", name); exit(1); } switch(io) { case IO_METHOD_READ: if(!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", name); exit (1); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if(!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf (stderr, "%s does not support streaming i/o\n", name); exit(1); } break; } CLEAR (cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(0 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if(-1 == xioctl (fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR (fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if(-1 == xioctl (fd, VIDIOC_S_FMT, &fmt)) errno_exit ("VIDIOC_S_FMT"); /* struct v4l2_standard s; s.name[0]='A'; s.frameperiod.numerator=1; s.frameperiod.denominator=fps; if(-1==xioctl(fd, VIDIOC_S_STD, &s)) errno_exit("VIDIOC_S_STD"); */ struct v4l2_streamparm p; p.type=V4L2_BUF_TYPE_VIDEO_CAPTURE; //p.parm.capture.capability=V4L2_CAP_TIMEPERFRAME; //p.parm.capture.capturemode=V4L2_MODE_HIGHQUALITY; p.parm.capture.timeperframe.numerator=1; p.parm.capture.timeperframe.denominator=fps; p.parm.output.timeperframe.numerator=1; p.parm.output.timeperframe.denominator=fps; //p.parm.output.outputmode=V4L2_MODE_HIGHQUALITY; //p.parm.capture.extendedmode=0; //p.parm.capture.readbuffers=n_buffers; if(-1==xioctl(fd, VIDIOC_S_PARM, &p)) errno_exit("VIDIOC_S_PARM"); //default values, mins and maxes struct v4l2_queryctrl queryctrl; memset(&queryctrl, 0, sizeof(queryctrl)); queryctrl.id = V4L2_CID_BRIGHTNESS; if(-1 == xioctl (fd, VIDIOC_QUERYCTRL, &queryctrl)) { if(errno != EINVAL) { //perror ("VIDIOC_QUERYCTRL"); //exit(EXIT_FAILURE); printf("brightness error\n"); } else { printf("brightness is not supported\n"); } } else if(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { printf ("brightness is not supported\n"); } mb=queryctrl.minimum; Mb=queryctrl.maximum; db=queryctrl.default_value; memset(&queryctrl, 0, sizeof(queryctrl)); queryctrl.id = V4L2_CID_CONTRAST; if(-1 == xioctl (fd, VIDIOC_QUERYCTRL, &queryctrl)) { if(errno != EINVAL) { //perror ("VIDIOC_QUERYCTRL"); //exit(EXIT_FAILURE); printf("contrast error\n"); } else { printf("contrast is not supported\n"); } } else if(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { printf ("contrast is not supported\n"); } mc=queryctrl.minimum; Mc=queryctrl.maximum; dc=queryctrl.default_value; memset(&queryctrl, 0, sizeof(queryctrl)); queryctrl.id = V4L2_CID_SATURATION; if(-1 == xioctl (fd, VIDIOC_QUERYCTRL, &queryctrl)) { if(errno != EINVAL) { //perror ("VIDIOC_QUERYCTRL"); //exit(EXIT_FAILURE); printf("saturation error\n"); } else { printf("saturation is not supported\n"); } } else if(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { printf ("saturation is not supported\n"); } ms=queryctrl.minimum; Ms=queryctrl.maximum; ds=queryctrl.default_value; memset(&queryctrl, 0, sizeof(queryctrl)); queryctrl.id = V4L2_CID_HUE; if(-1 == xioctl (fd, VIDIOC_QUERYCTRL, &queryctrl)) { if(errno != EINVAL) { //perror ("VIDIOC_QUERYCTRL"); //exit(EXIT_FAILURE); printf("hue error\n"); } else { printf("hue is not supported\n"); } } else if(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { printf ("hue is not supported\n"); } mh=queryctrl.minimum; Mh=queryctrl.maximum; dh=queryctrl.default_value; memset(&queryctrl, 0, sizeof(queryctrl)); queryctrl.id = V4L2_CID_HUE_AUTO; if(-1 == xioctl (fd, VIDIOC_QUERYCTRL, &queryctrl)) { if(errno != EINVAL) { //perror ("VIDIOC_QUERYCTRL"); //exit(EXIT_FAILURE); printf("hueauto error\n"); } else { printf("hueauto is not supported\n"); } } else if(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { printf ("hueauto is not supported\n"); } ha=queryctrl.default_value; memset(&queryctrl, 0, sizeof(queryctrl)); queryctrl.id = V4L2_CID_SHARPNESS; if(-1 == xioctl (fd, VIDIOC_QUERYCTRL, &queryctrl)) { if(errno != EINVAL) { //perror ("VIDIOC_QUERYCTRL"); //exit(EXIT_FAILURE); printf("sharpness error\n"); } else { printf("sharpness is not supported\n"); } } else if(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { printf ("sharpness is not supported\n"); } msh=queryctrl.minimum; Msh=queryctrl.maximum; dsh=queryctrl.default_value; //TODO: TO ADD SETTINGS //here should go custom calls to xioctl //END TO ADD SETTINGS /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if(fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if(fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch(io) { case IO_METHOD_READ: init_read(fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: init_userp(fmt.fmt.pix.sizeimage); break; } }
static void init_device(void) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", dev_name); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", dev_name); exit(EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (force_format) { fmt.fmt.pix.width = 1280; fmt.fmt.pix.height = 720; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; fmt.fmt.pix.field = V4L2_FIELD_NONE; if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) errno_exit("VIDIOC_S_FMT"); /* Note VIDIOC_S_FMT may change width and height. */ } else { /* Preserve original settings as set by v4l2-ctl for example */ if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) errno_exit("VIDIOC_G_FMT"); } /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (io) { case IO_METHOD_READ: init_read(fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(); break; case IO_METHOD_USERPTR: init_userp(fmt.fmt.pix.sizeimage); break; } }
static void init_device(capture * cap) { struct v4l2_capability capb; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl(cap->fd, VIDIOC_QUERYCAP, &capb)) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", cap->dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(capb.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", cap->dev_name); exit(EXIT_FAILURE); } switch (cap->io) { case IO_METHOD_READ: if (!(capb.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", cap->dev_name); exit(EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(capb.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", cap->dev_name); exit(EXIT_FAILURE); } break; } /* Select video input, video standard and tune here. */ CLEAR(cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl(cap->fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl(cap->fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = cap->width; fmt.fmt.pix.height = cap->height; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12; fmt.fmt.pix.field = V4L2_FIELD_ANY; if (-1 == xioctl(cap->fd, VIDIOC_S_FMT, &fmt)) { errno_exit("VIDIOC_S_FMT"); } cap->pixel_format = fmt.fmt.pix.pixelformat; /* Note VIDIOC_S_FMT may change width and height. */ cap->width = fmt.fmt.pix.width; cap->height = fmt.fmt.pix.height; switch (cap->io) { case IO_METHOD_READ: init_read(cap, fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap(cap); break; case IO_METHOD_USERPTR: init_userp(cap, fmt.fmt.pix.sizeimage); break; } }