Beispiel #1
0
int camera_get_frame(struct picture_t *pic)
{
	struct v4l2_buffer cam_buf = {0};

	cam_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	cam_buf.memory = V4L2_MEMORY_MMAP;

	if(ioctl(fd_cam, VIDIOC_DQBUF, &cam_buf) < 0) {
		perror("VIDIOC_DQBUF");
		return 0;
	}
	applog("buf_index=%02d, seq=%d, timestamp=%d%06d", 
		cam_buf.index, cam_buf.sequence, (int)cam_buf.timestamp.tv_sec, (int)cam_buf.timestamp.tv_usec);

	if(v4lconvert_convert(lib, &src_fmt, &dst_fmt, (void*)buf_pointer[cam_buf.index], 
		cam_buf.length, current_pic.buffer, YUV420_size) <= 0){
		perror("v4lconvert_convert");
		return 0;
	}
	current_pic.timestamp = cam_buf.timestamp;
	cam_buf.flags = cam_buf.reserved = 0;
	if(ioctl(fd_cam, VIDIOC_QBUF, &cam_buf) < 0) {
		perror("VIDIOC_QBUF");
		return 0;
	}
	*pic = current_pic;
	return 1;
}
Beispiel #2
0
static void process_image(unsigned char *p, int len)
{
	if (grab) {
		FILE *f;

		f = fopen("image.dat", "w");
		fwrite(p, 1, len, f);
		fclose(f);
		printf("image dumped to 'image.dat'\n");
		exit(EXIT_SUCCESS);
	}
#ifdef WITH_V4L2_LIB
	if (v4lconvert_convert(v4lconvert_data,
				&src_fmt,
				&fmt,
				p, len,
				dst_buf, fmt.fmt.pix.sizeimage) < 0) {
		if (errno != EAGAIN)
			errno_exit("v4l_convert");
		return;
	}
	p = dst_buf;
	len = fmt.fmt.pix.sizeimage;
#endif
#ifdef WITH_GTK
	gdk_draw_rgb_image(drawing_area->window,
			   drawing_area->style->white_gc,
			   0, 0,		/* xpos, ypos */
			   fmt.fmt.pix.width, fmt.fmt.pix.height,
//			   GDK_RGB_DITHER_MAX,
			   GDK_RGB_DITHER_NORMAL,
			   p,
			   fmt.fmt.pix.width * 3);
#else
	fputc('.', stdout);
#endif
	if (info && io != V4L2_MEMORY_MMAP) {
		if (--info <= 0) {
			__time_t sec;
			long int usec;
			int d1, d2;

			sec = cur_time.tv_sec;
			usec = cur_time.tv_usec;
			gettimeofday(&cur_time, 0);
			d1 = cur_time.tv_sec - sec;
			d2 = cur_time.tv_usec - usec;
			while (d2 < 0) {
				d2 += 1000000;
				d1--;
			}
			printf("FPS: %5.2fd\n",
				(float) NFRAMES / (d1 + 0.000001 * d2));
			info = NFRAMES;
		}
	}
}
static int tc_v4l2_fetch_data_v4lconv(V4L2Source *vs,
                                      uint8_t *src, int src_len,
                                      uint8_t *dst, int dst_len)
{
    int err = v4lconvert_convert(vs->v4l_convert,
                                 &(vs->v4l_src_fmt),
                                 &(vs->v4l_dst_fmt),
                                 src, src_len, dst, dst_len);

    return (err == -1) ?TC_ERROR :TC_OK; /* FIXME */
}
Beispiel #4
0
// FIXME return 0;-1 and set errno
int video_read(struct video_t *vid, void *buffer) {
    int r;


// FIXME not necessary in case of io_watch
/*    fd_set fds;
    struct timeval tv;
    FD_ZERO(&fds);
    FD_SET(vid->fd, &fds);

    // Timeout.
    tv.tv_sec = 1;
    tv.tv_usec = 0;

    r = select(vid->fd + 1, &fds, NULL, NULL, &tv);
    if(r<0) {
        fprintf(stderr, "select error\n");
        exit(EXIT_FAILURE);
    }
    if(!r) {
        fprintf(stderr, "select timeout\n");
        exit(EXIT_FAILURE);
    }*/
// end of FIXME

    // get raw image data
    r = read(vid->fd, vid->raw_buffer, vid->src_fmt.fmt.pix.sizeimage);
    if(r<0) {
        switch(errno) {
        case EAGAIN:
            return 0;
        case EIO:
            /* Could ignore EIO, see spec. */

            /* fall through */
        default:
            fprintf(stderr, "read error\n");
            exit(EXIT_FAILURE);
        }
    }

    // convert data to desired format
    if (v4lconvert_convert(vid->convert_data, &vid->src_fmt, &vid->vid_fmt,
        vid->raw_buffer, vid->src_fmt.fmt.pix.sizeimage,    // raw data
        buffer, vid->vid_fmt.fmt.pix.sizeimage              // converted data
    ) < 0) {
        if (errno != EAGAIN)
            exit(1);    // FIXME errno_exit("v4l_convert");
        return 1;
    }

    return 0;
}
Beispiel #5
0
//
//	v4l2 version (!)
//
void gv4l2_process_image (CWEBCAM * _object, void *start)
{
	struct v4l2_format dest = THIS->fmt;

	if (THIS->format != GB_IMAGE_BGR)
		gv4l2_debug("Destination format not supported");
	
	dest.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24;
	dest.fmt.pix.sizeimage = THIS->fmt.fmt.pix.width * THIS->fmt.fmt.pix.height * 3;

	if (v4lconvert_convert(THIS->convert, &THIS->fmt, &dest, start, THIS->fmt.fmt.pix.sizeimage, THIS->frame, dest.fmt.pix.sizeimage) != dest.fmt.pix.sizeimage)
		gv4l2_debug("Unable to convert webcam image to BGR24");
}
Beispiel #6
0
void process_image(unsigned char *p, int len, int W, int H)
{

    if(v4lconvert_convert(v4lconvert_data,
                           &src_fmt,
                           &fmt,
                           p, len,
                           dst_buf,
                           fmt.fmt.pix.sizeimage) < 0)
   {
       if(errno != EAGAIN)
       {
           perror("v4l_convert");
       }
        p = dst_buf;
        len = fmt.fmt.pix.sizeimage;
    }
}
Beispiel #7
0
int V4LCamera::exec() {
    char header [50];
    sprintf(header,"P6\n%d %d 255\n",_fmt.fmt.pix.width,_fmt.fmt.pix.height);
    unsigned char *dst_buf;
#ifdef _DEBUG_CAPTURE_THREADS
    qDebug() << "[CAMERA_THREAD::V4L_CAMERA] - exec() - Started";
#endif //_DEBUG_CAPTURE_THREADS
    while(true){
        myTimer.start();
        do {
            FD_ZERO(&_fds);
            FD_SET(_fd, &_fds);
            _r = select(_fd + 1, &_fds, NULL, NULL, &_tv);
        } while ((_r == -1 && (errno = EINTR)));
        if (_r == -1)
            qFatal("[CAMERA_THREAD::V4L_CAMERA] - exec() - select() returns error!");
        CLEAR(_buf);
        _buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        _buf.memory = V4L2_MEMORY_MMAP;
        V4LSettings::qioctl(_fd, VIDIOC_DQBUF, &_buf, "V4LCamera::exec()");

        dst_buf = (unsigned char*)malloc(_fmt.fmt.pix.sizeimage);
        if (v4lconvert_convert(_v4lconvert_data, &_src_fmt, &_fmt,
                                (unsigned char*)_buffers[_buf.index].start, _buf.bytesused,
                                dst_buf, _fmt.fmt.pix.sizeimage) < 0) {
#ifdef _DEBUG_CAPTURE_THREADS
            if (errno != EAGAIN)
                qWarning("[CAMERA_THREAD::V4L_CAMERA] - exec() - v4l_convert() returns error");
#endif //_DEBUG_CAPTURE_THREADS
        }
        unsigned char* asil=(unsigned char*)malloc(_fmt.fmt.pix.sizeimage+qstrlen(header));
        memmove(asil, dst_buf, _fmt.fmt.pix.sizeimage);
        memmove(asil+qstrlen(header), asil, _fmt.fmt.pix.sizeimage);
        memcpy(asil,header,qstrlen(header));

        Mat *qq = new Mat(_fmt.fmt.pix.height, _fmt.fmt.pix.width, CV_8UC2, asil);
        if(!qq->empty()) {
            Mat newFrame = qq->clone();
            switch(_fmt.fmt.pix.height) {
                case 720:
                case 800:
                case 960:
                    cvtColor(newFrame, newFrame, CV_YUV2RGB_YUYV);
                    break;
                case 1024:
                    cvtColor(newFrame, newFrame, CV_YUV2BGR_Y422);
                    break;
                default:
                    cvtColor(newFrame, newFrame, CV_YUV2RGB_Y422);
                    break;
            }
            _cvMatbuffer.enqueue(newFrame);
            emit availableFrame();
        }
        free(asil);
        free(dst_buf);
        delete qq;
        V4LSettings::qioctl(_fd, VIDIOC_QBUF, &_buf, "V4LCamera::exec()");
        _fps = 1000.0/myTimer.elapsed();
    }
    return 0;
}
Beispiel #8
0
void capture()
{
	unsigned char *yuv420_buf;
	int yuv420_size = fmt.fmt.pix.width*fmt.fmt.pix.height*3/2;
	int src_size, i, j, k, nframe;

	struct v4lconvert_data *lib;

	buffersize = calc_size(fmt.fmt.pix.sizeimage);

	buf_alloc_mmap();

	lib = v4lconvert_create(fd_cam);
	if(!lib) {
		perror("v4lconvert_create");
		exit(1);
	}

	yuv420_buf = malloc(yuv420_size);
	if(!yuv420_buf){
		perror("malloc");
		exit(1);
	}

	if(ioctl(fd_cam, VIDIOC_STREAMON, &reqbuf.type) < 0) {
		perror("VIDIOC_STREAMON");
		exit(1);
	}

	dst_fmt = fmt;
	dst_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;

	if(!v4lconvert_supported_dst_format(dst_fmt.fmt.pix.pixelformat)){
		puts("v4lconvert_supported_dst_format");
		exit(1);
	}

	for(errno = 0, nframe = 0; nframe < NUM_FRAME; nframe++) {
		struct v4l2_buffer cam_buf = {0};

		cam_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		cam_buf.memory = reqbuf.memory;

		if(ioctl(fd_cam, VIDIOC_DQBUF, &cam_buf) < 0) {
			perror("VIDIOC_DQBUF");
			exit(1);
		}

		printf("DQBUF: index=%d, seq=%d, time=%d.%06d\n", cam_buf.index, cam_buf.sequence, cam_buf.timestamp.tv_sec, cam_buf.timestamp.tv_usec);

		src_size = cam_buf.length;

		if(v4lconvert_convert(lib, &fmt, &dst_fmt, (void*)buf_pointer[cam_buf.index], src_size, yuv420_buf, yuv420_size) <= 0){
			perror("v4lconvert_convert");
			exit(1);
		}

		cam_buf.length = buffersize;
		if(ioctl(fd_cam, VIDIOC_QBUF, &cam_buf) < 0) {
			perror("VIDIOC_QBUF");
			exit(1);
		}

		write(fd_out, yuv420_buf, yuv420_size);
	}

	if(ioctl(fd_cam, VIDIOC_STREAMOFF, &reqbuf.type) < 0) {
		perror("VIDIOC_STREAMOFF");
		exit(1);
	}

	free(yuv420_buf);

	v4lconvert_destroy(lib);

	free_buf_mmap();
}
Beispiel #9
0
// process video data
void CaptureThread::run() {
	while (devam) {
		mutex.lock();
		do {
			FD_ZERO(&fds);
			FD_SET(fd, &fds);

			/* Timeout. */
			tv.tv_sec = 2;
			tv.tv_usec = 0;

			r = select(fd + 1, &fds, NULL, NULL, &tv);
		} while ((r == -1 && (errno = EINTR)));

		if (r == -1) {
			kDebug() << "select";
			quit();
			return;
		}

		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_MMAP;
		xioctl(fd, VIDIOC_DQBUF, &buf);

		if (v4lconvert_convert(v4lconvert_data,
													 &src_fmt,
													 &fmt,
													 (unsigned char*)buffers[buf.index].start, buf.bytesused,
													 dst_buf, fmt.fmt.pix.sizeimage) < 0) {
			if (errno != EAGAIN)
				kDebug() << "v4l_convert";
		}

		unsigned char* asil=(unsigned char*)malloc(fmt.fmt.pix.sizeimage+qstrlen(header));
		memmove(asil, dst_buf, fmt.fmt.pix.sizeimage);
		memmove(asil+qstrlen(header), asil, fmt.fmt.pix.sizeimage);
		memcpy(asil,header,qstrlen(header));

		QImage *qq=new QImage();

		if(qq->loadFromData(asil,fmt.fmt.pix.sizeimage+qstrlen(header), "PPM")){
			QTransform outTransform;

			if(Settings::mirror()){
				// scaling x * -1 - making the output image mirror.
				outTransform.scale(-1, 1);
			}

			if(Settings::flip()){
				// flipping y * -1
				outTransform.scale(1, -1);
			}

			emit renderedImage(qq->transformed(outTransform));
		}
		free(asil);
		delete qq;
		if (delay>0) {
			this->msleep(delay);
		}
		xioctl(fd, VIDIOC_QBUF, &buf);
		di++;
		mutex.unlock();
	}
}
Beispiel #10
0
void CaptureThread::run(){
//do real stuff
fd = -1;
dev_name = "/dev/video0";


    fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);
    if (fd < 0) {
           qDebug("Cannot open device");
           //exit(EXIT_FAILURE);
           return;
    }


    static struct v4lconvert_data *v4lconvert_data;
    static struct v4l2_format src_fmt;
    static unsigned char *dst_buf;

    CLEAR(fmt);
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width       = 640;
    fmt.fmt.pix.height      = 480;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
    fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;
    xioctl(fd, VIDIOC_S_FMT, &fmt);
    if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) {
           printf("Libv4l didn't accept RGB24 format. Can't proceed.\n");
           //exit(EXIT_FAILURE);
           return;
    }
    if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480))
           printf("Warning: driver is sending image at %dx%d\n",
                   fmt.fmt.pix.width, fmt.fmt.pix.height);

    v4lconvert_data = v4lconvert_create(fd);
    if (v4lconvert_data == NULL)
        qDebug("v4lconvert_create");
    if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0)
        qDebug("v4lconvert_try_format");
    xioctl(fd, VIDIOC_S_FMT, &src_fmt);
    dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage);

    CLEAR(req);
    req.count = 2;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    xioctl(fd, VIDIOC_REQBUFS, &req);

    buffers = (buffer*)calloc(req.count, sizeof(*buffers));
    for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
           CLEAR(buf);

           buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
           buf.memory      = V4L2_MEMORY_MMAP;
           buf.index       = n_buffers;

           xioctl(fd, VIDIOC_QUERYBUF, &buf);

           buffers[n_buffers].length = buf.length;
           buffers[n_buffers].start = v4l2_mmap(NULL, buf.length,
                         PROT_READ | PROT_WRITE, MAP_SHARED,
                         fd, buf.m.offset);

           if (MAP_FAILED == buffers[n_buffers].start) {
                   qDebug("mmap");
                   //exit(EXIT_FAILURE);
                   return;
           }
    }

    for (int i = 0; i < n_buffers; ++i) {
           CLEAR(buf);
           buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
           buf.memory = V4L2_MEMORY_MMAP;
           buf.index = i;
           xioctl(fd, VIDIOC_QBUF, &buf);
    }
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    xioctl(fd, VIDIOC_STREAMON, &type);

    int di=0;
    char header[]="P6\n640 480 255\n";
    while(devam){
        /* bu döngü datanın birikmesini sağlıyor */
        do {
                FD_ZERO(&fds);
                FD_SET(fd, &fds);

                /* Timeout. */
                tv.tv_sec = 2;
                tv.tv_usec = 0;

                r = select(fd + 1, &fds, NULL, NULL, &tv);
        } while ((r == -1 && (errno = EINTR)));
        if (r == -1) {
                qDebug("select");
                //exit(1) ;
                return;
        }

        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        xioctl(fd, VIDIOC_DQBUF, &buf);

        try{
            
        if (v4lconvert_convert(v4lconvert_data,
                                &src_fmt,
                                &fmt,
                                (unsigned char*)buffers[buf.index].start, buf.bytesused,
                                dst_buf, fmt.fmt.pix.sizeimage) < 0) {
                if (errno != EAGAIN)
                        qDebug("v4l_convert");

        }


        unsigned char* asil=(unsigned char*)malloc(fmt.fmt.pix.sizeimage+qstrlen(header));
        memmove(asil, dst_buf, fmt.fmt.pix.sizeimage);
        memmove(asil+qstrlen(header), asil, fmt.fmt.pix.sizeimage);
        memcpy(asil,header,qstrlen(header));

        QImage qq;//=new QImage(dst_buf,640,480,QImage::Format_RGB32);

        if(qq.loadFromData(asil,fmt.fmt.pix.sizeimage+qstrlen(header),"PPM")){
            if(parent->isVisible()){
                QImage q1(qq);
                parent->img=q1;
                parent->update();
              //this->msleep(50);
            }
        //qApp->processEvents();
            if(asil)
                free(asil);
        }
        }catch(...){}
        xioctl(fd, VIDIOC_QBUF, &buf);
        di++;
   }
    try{
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    xioctl(fd, VIDIOC_STREAMOFF, &type);
    for (int i = 0; i < n_buffers; ++i)
           v4l2_munmap(buffers[i].start, buffers[i].length);

        v4l2_close(fd);
    }catch(...){}
}
Beispiel #11
0
static int v4l2_read_and_convert(int index, unsigned char *dest, int dest_size)
{
	const int max_tries = 10;
	int result, buf_size, tries = max_tries;

	buf_size = devices[index].dest_fmt.fmt.pix.sizeimage;

	if (devices[index].readbuf_size < buf_size) {
		unsigned char *new_buf;

		new_buf = realloc(devices[index].readbuf, buf_size);
		if (!new_buf)
			return -1;

		devices[index].readbuf = new_buf;
		devices[index].readbuf_size = buf_size;
	}

	do {
		result = SYS_READ(devices[index].fd, devices[index].readbuf, buf_size);
		if (result <= 0) {
			if (result && errno != EAGAIN) {
				int saved_err = errno;

				V4L2_LOG_ERR("reading: %s\n", strerror(errno));
				errno = saved_err;
			}
			return result;
		}

		result = v4lconvert_convert(devices[index].convert,
				&devices[index].src_fmt, &devices[index].dest_fmt,
				devices[index].readbuf, result, dest, dest_size);

		if (devices[index].first_frame) {
			/* Always treat convert errors as EAGAIN during the first few frames, as
			   some cams produce bad frames at the start of the stream
			   (hsync and vsync still syncing ??). */
			if (result < 0)
				errno = EAGAIN;
			devices[index].first_frame--;
		}

		if (result < 0) {
			int saved_err = errno;

			if (errno == EAGAIN)
				V4L2_LOG("warning error while converting frame data: %s",
						v4lconvert_get_error_message(devices[index].convert));
			else
				V4L2_LOG_ERR("converting / decoding frame data: %s",
						v4lconvert_get_error_message(devices[index].convert));

			errno = saved_err;
		}
		tries--;
	} while (result < 0 && errno == EAGAIN && tries);

	if (result < 0 && errno == EAGAIN) {
		V4L2_LOG_ERR("got %d consecutive frame decode errors, last error: %s",
				max_tries, v4lconvert_get_error_message(devices[index].convert));
	}

	return result;
}
Beispiel #12
0
static int v4l2_dequeue_and_convert(int index, struct v4l2_buffer *buf,
		unsigned char *dest, int dest_size)
{
	const int max_tries = 10;
	int result, tries = max_tries;

	/* Make sure we have the real v4l2 buffers mapped */
	result = v4l2_map_buffers(index);
	if (result)
		return result;

	do {
		result = SYS_IOCTL(devices[index].fd, VIDIOC_DQBUF, buf);
		if (result) {
			if (errno != EAGAIN) {
				int saved_err = errno;

				V4L2_LOG_ERR("dequeuing buf: %s\n", strerror(errno));
				errno = saved_err;
			}
			return result;
		}

		devices[index].frame_queued &= ~(1 << buf->index);

		result = v4lconvert_convert(devices[index].convert,
				&devices[index].src_fmt, &devices[index].dest_fmt,
				devices[index].frame_pointers[buf->index],
				buf->bytesused, dest ? dest : (devices[index].convert_mmap_buf +
					buf->index * V4L2_FRAME_BUF_SIZE), dest_size);

		if (devices[index].first_frame) {
			/* Always treat convert errors as EAGAIN during the first few frames, as
			   some cams produce bad frames at the start of the stream
			   (hsync and vsync still syncing ??). */
			if (result < 0)
				errno = EAGAIN;
			devices[index].first_frame--;
		}

		if (result < 0) {
			int saved_err = errno;

			if (errno == EAGAIN)
				V4L2_LOG("warning error while converting frame data: %s",
						v4lconvert_get_error_message(devices[index].convert));
			else
				V4L2_LOG_ERR("converting / decoding frame data: %s",
						v4lconvert_get_error_message(devices[index].convert));

			v4l2_queue_read_buffer(index, buf->index);
			errno = saved_err;
		}
		tries--;
	} while (result < 0 && errno == EAGAIN && tries);

	if (result < 0 && errno == EAGAIN) {
		V4L2_LOG_ERR("got %d consecutive frame decode errors, last error: %s",
				max_tries, v4lconvert_get_error_message(devices[index].convert));
		errno = EAGAIN;
	}

	return result;
}
Beispiel #13
0
static struct video_frame * vidcap_v4l2_grab(void *state, struct audio_frame **audio)
{
        struct vidcap_v4l2_state *s = (struct vidcap_v4l2_state *) state;
        struct video_frame *out;

        *audio = NULL;

        struct v4l2_buffer buf;
        memset(&buf, 0, sizeof(buf));
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;

        if(ioctl(s->fd, VIDIOC_DQBUF, &buf) != 0) {
                perror("Unable to dequeue buffer");
                return NULL;
        };

        out = vf_alloc_desc(s->desc);
        out->dispose = vidcap_v4l2_dispose_video_frame;

        if(!s->conversion_needed) {
                struct v4l2_dispose_deq_buffer_data *frame_data =
                        malloc(sizeof(struct v4l2_dispose_deq_buffer_data));
                frame_data->fd = s->fd;
                memcpy(&frame_data->buf, &buf, sizeof(buf));
                out->tiles[0].data = s->buffers[frame_data->buf.index].start;
                out->tiles[0].data_len = frame_data->buf.bytesused;
                out->dispose_udata = frame_data;
        } else {
                out->dispose_udata = NULL;
                out->tiles[0].data = (char *) malloc(out->tiles[0].data_len);
                int ret = v4lconvert_convert(s->convert,
                                &s->src_fmt,  /*  in */
                                &s->dst_fmt, /*  in */
                                s->buffers[buf.index].start,
                                buf.bytesused,
                                (unsigned char *) out->tiles[0].data,
                                out->tiles[0].data_len);

                // we do not need the driver buffer any more
                if (ioctl(s->fd, VIDIOC_QBUF, &buf) != 0) {
                        perror("Unable to enqueue buffer");
                };

                if(ret == -1) {
                        fprintf(stderr, "Error converting video.\n");
                        VIDEO_FRAME_DISPOSE(out);
                        return NULL;
                }

                out->tiles[0].data_len = ret;
        }

        s->frames++;

        struct timeval t;
        gettimeofday(&t, NULL);
        double seconds = tv_diff(t, s->t0);
        if (seconds >= 5) {
                float fps  = s->frames / seconds;
                log_msg(LOG_LEVEL_INFO, "[V4L2 capture] %d frames in %g seconds = %g FPS\n", s->frames, seconds, fps);
                s->t0 = t;
                s->frames = 0;
        }


        return out;
}
Beispiel #14
0
static inline int video_grabframe (opi_video_device_t *dev, int raw, opi_video_frameinfo_t *finfo, int *buffer, int bufsize) /*{{{*/
{
	if (dev->fd < 0) {
		/* not open */
	} else if (dev->api == 1) {
		if (dev->use_mmap == 1) {
			struct video_mmap vmmap;
			int bnum = 0;
			int r;
			
			vmmap.frame = 0;
			vmmap.width = finfo->width;
			vmmap.height = finfo->height;
			vmmap.format = convert_pal_opi_to_v4l1 (finfo->format);
			
			while (((r = ioctl (dev->fd, VIDIOCMCAPTURE, &vmmap)) == -1) && (errno == EINTR));	/* retry */
			if (r >= 0) {
				while (((r = ioctl (dev->fd, VIDIOCSYNC, &bnum)) == -1) && (errno == EINTR));		/* retry */
				if (r >= 0) {
					if (raw) {
						memcpy (buffer, dev->buffers[0].addr, bufsize);
					} else {
						v4lconvert_convert (
							(struct v4lconvert_data *) dev->convert,
							&(dev->src), &(dev->dst),
							(unsigned char *) dev->buffers[0].addr, dev->buffers[0].size, 
							(unsigned char *) buffer, bufsize);
						rgb24_rgb32 (finfo->width, finfo->height, (unsigned char *) buffer);
					}
					return 1;
				}
			}
		} else {
			/* FIXME! */
		}
	} else if (dev->api == 2) {
		if (dev->use_mmap == 1) {
			struct v4l2_buffer buf;
			int result = 0;
			int r;
			
			memset (&buf, 0, sizeof (buf));
			buf.type	= V4L2_BUF_TYPE_VIDEO_CAPTURE;
			buf.memory	= V4L2_MEMORY_MMAP;
			
			if (dev->oneshot) {
				buf.index = 0;

				while (((r = ioctl (dev->fd, VIDIOC_QBUF, &buf)) == -1) && (errno == EINTR));	/* retry */
				if (r < 0)
					return 0;
				while (((r = ioctl (dev->fd, VIDIOC_STREAMON, &(buf.type))) == -1) && (errno == EINTR));	/* retry */
				if (r < 0)
					return 0;
			}

			while (((r = ioctl (dev->fd, VIDIOC_DQBUF, &buf)) == -1) && (errno == EINTR));	/* retry */
			if (r >= 0) {
				int idx = buf.index;

				if (raw) {
					memcpy (buffer, dev->buffers[idx].addr, bufsize);
				} else {
					v4lconvert_convert (
						(struct v4lconvert_data *) dev->convert,
						&(dev->src), &(dev->dst),
						(unsigned char *) dev->buffers[idx].addr, dev->buffers[idx].size, 
						(unsigned char *) buffer, bufsize);
					rgb24_rgb32 (finfo->width, finfo->height, (unsigned char *) buffer);
				}

				if (!dev->oneshot)
					while (((r = ioctl (dev->fd, VIDIOC_QBUF, &buf)) == -1) && (errno == EINTR));  /* retry */
				
				result = 1;
			}
			
			if (dev->oneshot) {
				while (((r = ioctl (dev->fd, VIDIOC_STREAMOFF, &(buf.type))) == -1) && (errno == EINTR));	/* retry */
			}

			return result;
		} else {
			/* FIXME! */
		}
	}
	return 0;
}
Beispiel #15
0
void Videostreaming::capFrame()
{
    __u32 buftype = m_buftype;
    v4l2_plane planes[VIDEO_MAX_PLANES];
    v4l2_buffer buf;
    unsigned char *tempSrcBuffer = NULL, *tempDestBuffer = NULL, *copyDestBuffer = NULL;
    unsigned char *tempCu130DestBuffer = NULL, *tempCu130SrcBuffer = NULL;
    unsigned char *tempCu40DestBuffer = NULL, *irBuffer = NULL;
    unsigned char *tempLogtechSrcBuffer = NULL, *tempLogtechDestBuffer = NULL;
    unsigned char *displaybuf = NULL;
    unsigned short int *tempCu40SrcBuffer = NULL;
    //Modified by Nithyesh
    //Previously it was int err = 0, x, y;
    int err = 0;
    __u32 x, y;
    bool again, v4l2convert = false;

    memset(planes, 0, sizeof(planes));
    buf.length = VIDEO_MAX_PLANES;
    buf.m.planes = planes;
    if (!dqbuf_mmap(buf, buftype, again)) {
        closeDevice();
        unsigned char *m_data=NULL;
        QImage tempImage(m_data,320,240,QImage::Format_RGB888);
        qImage = QPixmap::fromImage(tempImage);
        update();
        emit deviceUnplugged("Disconnected","Device Not Found");
        emit logCriticalHandle("Device disconnected");
        return;
    }
    if (again) {
        return;
    }
    if (buf.flags & V4L2_BUF_FLAG_ERROR) {        
        qbuf(buf);
        return;
    }
#if 0
    switch(m_capSrcFormat.fmt.pix.pixelformat) {
        case V4L2_PIX_FMT_YUYV: {
            if((width*height*2) == buf.bytesused){
                validFrame = true;
            }

        }
        break;
        case V4L2_PIX_FMT_SGRBG8:{
            // if bayer - 8 bit camera
            // {
                if ((width*height) == buf.bytesused)
                    validFrame = true;
            // }
            // if bayer - 8 bit + pad camera
            // {
                if ((width*height*2) == buf.bytesused)
                    validFrame = true;
            // }
        }
        break;
        case V4L2_PIX_FMT_MJPEG:{
            validFrame = true;
            break;
        }
        default:
        // To do: for other color spaces
        break;

    }

    if (validFrame != true){
        qbuf(buf);
        qDebug()<<"validFrame != true";
     //   return;
    }
#endif

    if (camDeviceName == "e-con's CX3 RDK with M\nT9P031" || camDeviceName == "See3CAM_12CUNIR" || camDeviceName == "See3CAM_CU51")
    {
        tempSrcBuffer = (unsigned char *)malloc(width * height * 2);
        tempDestBuffer = (unsigned char *)malloc(width * height << 1);
        copyDestBuffer = tempDestBuffer;

        memcpy(tempSrcBuffer, m_buffers[buf.index].start[0], buf.bytesused);

        for(__u32 l=0; l<(width*height*2); l=l+2) /* Y16 to YUYV conversion */
        {
            *tempDestBuffer++ = (((tempSrcBuffer[l] & 0xF0) >> 4) | (tempSrcBuffer[l+1] & 0x0F) << 4);
            *tempDestBuffer++ = 0x80;
        }
        m_capSrcFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
        err = v4lconvert_convert(m_convertData, &m_capSrcFormat, &m_capDestFormat,
                                 (unsigned char *)copyDestBuffer, buf.bytesused,
                                 m_capImage->bits(), m_capDestFormat.fmt.pix.sizeimage);
        v4l2convert = true;

    }else if (camDeviceName == "See3CAM_CU40")    {