/* enumerate frames (formats, sizes and fps)
 * args:
 * width: current selected width
 * height: current selected height
 *
 * returns: pointer to LFormats struct containing list of available frame formats */
bool V4L2Camera::EnumFrameFormats()
{
    LOGD("V4L2Camera::EnumFrameFormats");
    struct v4l2_fmtdesc fmt;

    // Start with no modes
    m_AllFmts.clear();

    memset(&fmt, 0, sizeof(fmt));
    fmt.index = 0;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    while (ioctl(fd,VIDIOC_ENUM_FMT, &fmt) >= 0) {
        fmt.index++;
        LOGD("{ pixelformat = '%c%c%c%c', description = '%s' }",
                fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
                (fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
                fmt.description);

        //enumerate frame sizes for this pixel format
        if (!EnumFrameSizes(fmt.pixelformat)) {
            LOGE("  Unable to enumerate frame sizes.");
        }
    };

    // Now, select the best preview format and the best PictureFormat
    m_BestPreviewFmt = SurfaceDesc();
    m_BestPictureFmt = SurfaceDesc();

    unsigned int i;
    for (i=0; i<m_AllFmts.size(); i++) {
        SurfaceDesc s = m_AllFmts[i];

        // Prioritize size over everything else when taking pictures. use the
        // least fps possible, as that usually means better quality
        if ((s.getSize()  > m_BestPictureFmt.getSize()) ||
            (s.getSize() == m_BestPictureFmt.getSize() && s.getFps() < m_BestPictureFmt.getFps() )
            ) {
            m_BestPictureFmt = s;
        }

        // Prioritize fps, then size when doing preview
        if ((s.getFps()  > m_BestPreviewFmt.getFps()) ||
            (s.getFps() == m_BestPreviewFmt.getFps() && s.getSize() > m_BestPreviewFmt.getSize() )
            ) {
            m_BestPreviewFmt = s;
        }

    }

    return true;
}
Beispiel #2
0
char Video_v2::spriteUncompressor(byte *sprBuf, int16 srcWidth, int16 srcHeight,
	    int16 x, int16 y, int16 transp, SurfaceDesc &destDesc) {
	byte *memBuffer;
	byte *srcPtr, *destPtr, *linePtr;
	byte temp;
	uint32 sourceLeft;
	uint16 cmdVar;
	int16 curWidth, curHeight;
	int16 offset;
	int16 counter2;
	int16 bufPos;
	int16 strLen;
	int16 lenCmd;

	_vm->validateVideoMode(destDesc._vidMode);

	if (sprBuf[0] != 1)
		return 0;

	if (sprBuf[1] != 2)
		return 0;

	if (sprBuf[2] == 2) {
		SurfaceDesc sourceDesc(0x13, srcWidth, srcHeight, sprBuf + 3);
		Video::drawSprite(sourceDesc, destDesc, 0, 0, srcWidth - 1,
		    srcHeight - 1, x, y, transp);
		return 1;
	} else if (sprBuf[2] == 1) {
		memBuffer = new byte[4370];
		assert(memBuffer);

		srcPtr = sprBuf + 3;
		sourceLeft = READ_LE_UINT32(srcPtr);

		destPtr = destDesc.getVidMem() + destDesc.getWidth() * y + x;

		curWidth = 0;
		curHeight = 0;

		linePtr = destPtr;
		srcPtr += 4;

		if ((READ_LE_UINT16(srcPtr) == 0x1234) && (READ_LE_UINT16(srcPtr + 2) == 0x5678)) {
			srcPtr += 4;
			bufPos = 273;
			lenCmd = 18;
		} else {
			lenCmd = 100;
			bufPos = 4078;
		}

		memset(memBuffer, 32, bufPos);

		cmdVar = 0;
		while (1) {
			cmdVar >>= 1;
			if ((cmdVar & 0x100) == 0) {
				cmdVar = *srcPtr | 0xFF00;
				srcPtr++;
			}
			if ((cmdVar & 1) != 0) {
				temp = *srcPtr++;
				if ((temp != 0) || (transp == 0))
					*destPtr = temp;
				destPtr++;
				curWidth++;
				if (curWidth >= srcWidth) {
					curWidth = 0;
					linePtr += destDesc.getWidth();
					destPtr = linePtr;
					curHeight++;
					if (curHeight >= srcHeight)
						break;
				}
				sourceLeft--;
				memBuffer[bufPos] = temp;
				bufPos++;
				bufPos %= 4096;
				if (sourceLeft == 0)
					break;
			} else {
				offset = *srcPtr++;
				offset |= (*srcPtr & 0xF0) << 4;
				strLen = (*srcPtr & 0x0F) + 3;
				*srcPtr++;
				if (strLen == lenCmd)
					strLen = *srcPtr++ + 18;

				for (counter2 = 0; counter2 < strLen; counter2++) {
					temp = memBuffer[(offset + counter2) % 4096];
					if ((temp != 0) || (transp == 0))
						*destPtr = temp;
					destPtr++;

					curWidth++;
					if (curWidth >= srcWidth) {
						curWidth = 0;
						linePtr += destDesc.getWidth();
						destPtr = linePtr;
						curHeight++;
						if (curHeight >= srcHeight) {
							delete[] memBuffer;
							return 1;
						}
					}
					memBuffer[bufPos] = temp;
					bufPos++;
					bufPos %= 4096;
				}

				if (strLen >= ((int32) sourceLeft)) {
					delete[] memBuffer;
					return 1;
				} else
					sourceLeft--;
			}
		}
	} else
int V4L2Camera::Init(int width, int height, int fps)
{
    LOGD("V4L2Camera::Init");

    /* Initialize the capture to the specified width and height */
    static const struct {
        int fmt;            /* PixelFormat */
        int bpp;            /* bytes per pixel */
        int isplanar;       /* If format is planar or not */
        int allowscrop;     /* If we support cropping with this pixel format */
    } pixFmtsOrder[] = {
        {V4L2_PIX_FMT_YUYV,     2,0,1},
        {V4L2_PIX_FMT_YVYU,     2,0,1},
        {V4L2_PIX_FMT_UYVY,     2,0,1},
        {V4L2_PIX_FMT_YYUV,     2,0,1},
        {V4L2_PIX_FMT_SPCA501,  2,0,0},
        {V4L2_PIX_FMT_SPCA505,  2,0,0},
        {V4L2_PIX_FMT_SPCA508,  2,0,0},
        {V4L2_PIX_FMT_YUV420,   0,1,0},
        {V4L2_PIX_FMT_YVU420,   0,1,0},
        {V4L2_PIX_FMT_NV12,     0,1,0},
        {V4L2_PIX_FMT_NV21,     0,1,0},
        {V4L2_PIX_FMT_NV16,     0,1,0},
        {V4L2_PIX_FMT_NV61,     0,1,0},
        {V4L2_PIX_FMT_Y41P,     0,0,0},
        {V4L2_PIX_FMT_SGBRG8,   0,0,0},
        {V4L2_PIX_FMT_SGRBG8,   0,0,0},
        {V4L2_PIX_FMT_SBGGR8,   0,0,0},
        {V4L2_PIX_FMT_SRGGB8,   0,0,0},
        {V4L2_PIX_FMT_BGR24,    3,0,1},
        {V4L2_PIX_FMT_RGB24,    3,0,1},
        {V4L2_PIX_FMT_MJPEG,    0,1,0},
        {V4L2_PIX_FMT_JPEG,     0,1,0},
        {V4L2_PIX_FMT_GREY,     1,0,1},
        {V4L2_PIX_FMT_Y16,      2,0,1},
    };

    int ret;

    // If no formats, break here
    if (m_AllFmts.isEmpty()) {
        LOGE("No video formats available");
        return -1;
    }

    // Try to get the closest match ...
    SurfaceDesc closest;
    int closestDArea = -1;
    int closestDFps = -1;
    unsigned int i;
    int area = width * height;
    for (i = 0; i < m_AllFmts.size(); i++) {
        SurfaceDesc sd = m_AllFmts[i];

        // Always choose a bigger or equal surface
        if (sd.getWidth() >= width &&
            sd.getHeight() >= height) {

            int difArea = sd.getArea() - area;
            int difFps = my_abs(sd.getFps() - fps);

            LOGD("Trying format: (%d x %d), Fps: %d [difArea:%d, difFps:%d, cDifArea:%d, cDifFps:%d]",sd.getWidth(),sd.getHeight(),sd.getFps(), difArea, difFps, closestDArea, closestDFps);
            if (closestDArea < 0 ||
                difArea < closestDArea ||
                (difArea == closestDArea && difFps < closestDFps)) {

                // Store approximation
                closestDArea = difArea;
                closestDFps = difFps;

                // And the new surface descriptor
                closest = sd;
            }
        }
    }

    if (closestDArea == -1) {
        LOGE("Size not available: (%d x %d)",width,height);
        return -1;
    }

    LOGD("Selected format: (%d x %d), Fps: %d",closest.getWidth(),closest.getHeight(),closest.getFps());

    // Check if we will have to crop the captured image
    bool crop = width != closest.getWidth() || height != closest.getHeight();

    // Iterate through pixel formats from best to worst
    ret = -1;
    for (i=0; i < (sizeof(pixFmtsOrder) / sizeof(pixFmtsOrder[0])); i++) {

        // If we will need to crop, make sure to only select formats we can crop...
        if (!crop || pixFmtsOrder[i].allowscrop) {

            memset(&videoIn->format,0,sizeof(videoIn->format));
            videoIn->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            videoIn->format.fmt.pix.width = closest.getWidth();
            videoIn->format.fmt.pix.height = closest.getHeight();
            videoIn->format.fmt.pix.pixelformat = pixFmtsOrder[i].fmt;

            ret = ioctl(fd, VIDIOC_TRY_FMT, &videoIn->format);
            if (ret >= 0) {
                break;
            }
        }
    }
    if (ret < 0) {
        LOGE("Open: VIDIOC_TRY_FMT Failed: %s", strerror(errno));
        return ret;
    }

    /* Set the format */
    memset(&videoIn->format,0,sizeof(videoIn->format));
    videoIn->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    videoIn->format.fmt.pix.width = closest.getWidth();
    videoIn->format.fmt.pix.height = closest.getHeight();
    videoIn->format.fmt.pix.pixelformat = pixFmtsOrder[i].fmt;
    ret = ioctl(fd, VIDIOC_S_FMT, &videoIn->format);
    if (ret < 0) {
        LOGE("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
        return ret;
    }


    /* Query for the effective video format used */
    memset(&videoIn->format,0,sizeof(videoIn->format));
    videoIn->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret = ioctl(fd, VIDIOC_G_FMT, &videoIn->format);
    if (ret < 0) {
        LOGE("Open: VIDIOC_G_FMT Failed: %s", strerror(errno));
        return ret;
    }

    /* Note VIDIOC_S_FMT may change width and height. */

    /* Buggy driver paranoia. */
    unsigned int min = videoIn->format.fmt.pix.width * 2;
    if (videoIn->format.fmt.pix.bytesperline < min)
        videoIn->format.fmt.pix.bytesperline = min;
    min = videoIn->format.fmt.pix.bytesperline * videoIn->format.fmt.pix.height;
    if (videoIn->format.fmt.pix.sizeimage < min)
        videoIn->format.fmt.pix.sizeimage = min;

    /* Store the pixel formats we will use */
    videoIn->outWidth           = width;
    videoIn->outHeight          = height;
    videoIn->outFrameSize       = width * height << 1; // Calculate the expected output framesize in YUYV
    videoIn->capBytesPerPixel   = pixFmtsOrder[i].bpp;

    /* Now calculate cropping margins, if needed, rounding to even */
    int startX = ((closest.getWidth() - width) >> 1) & (-2);
    int startY = ((closest.getHeight() - height) >> 1) & (-2);

    /* Avoid crashing if the mode found is smaller than the requested */
    if (startX < 0) {
        videoIn->outWidth += startX;
        startX = 0;
    }
    if (startY < 0) {
        videoIn->outHeight += startY;
        startY = 0;
    }

    /* Calculate the starting offset into each captured frame */
    videoIn->capCropOffset = (startX * videoIn->capBytesPerPixel) +
            (videoIn->format.fmt.pix.bytesperline * startY);

    LOGI("Cropping from origin: %dx%d - size: %dx%d  (offset:%d)",
        startX,startY,
        videoIn->outWidth,videoIn->outHeight,
        videoIn->capCropOffset);

    /* sets video device frame rate */
    memset(&videoIn->params,0,sizeof(videoIn->params));
    videoIn->params.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    videoIn->params.parm.capture.timeperframe.numerator = 1;
    videoIn->params.parm.capture.timeperframe.denominator = closest.getFps();

    /* Set the framerate. If it fails, it wont be fatal */
    if (ioctl(fd,VIDIOC_S_PARM,&videoIn->params) < 0) {
        LOGE("VIDIOC_S_PARM error: Unable to set %d fps", closest.getFps());
    }

    /* Gets video device defined frame rate (not real - consider it a maximum value) */
    if (ioctl(fd,VIDIOC_G_PARM,&videoIn->params) < 0) {
        LOGE("VIDIOC_G_PARM - Unable to get timeperframe");
    }

    LOGI("Actual format: (%d x %d), Fps: %d, pixfmt: '%c%c%c%c', bytesperline: %d",
        videoIn->format.fmt.pix.width,
        videoIn->format.fmt.pix.height,
        videoIn->params.parm.capture.timeperframe.denominator,
        videoIn->format.fmt.pix.pixelformat & 0xFF, (videoIn->format.fmt.pix.pixelformat >> 8) & 0xFF,
        (videoIn->format.fmt.pix.pixelformat >> 16) & 0xFF, (videoIn->format.fmt.pix.pixelformat >> 24) & 0xFF,
        videoIn->format.fmt.pix.bytesperline);

    /* Configure JPEG quality, if dealing with those formats */
    if (videoIn->format.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG ||
        videoIn->format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {

        /* Get the compression format */
        ioctl(fd,VIDIOC_G_JPEGCOMP, &videoIn->jpegcomp);

        /* Set to maximum */
        videoIn->jpegcomp.quality = 100;

        /* Try to set it */
        if(ioctl(fd,VIDIOC_S_JPEGCOMP, &videoIn->jpegcomp) >= 0)
        {
            LOGE("VIDIOC_S_COMP:");
            if(errno == EINVAL)
            {
                videoIn->jpegcomp.quality = -1; //not supported
                LOGE("   compression control not supported\n");
            }
        }

        /* gets video stream jpeg compression parameters */
        if(ioctl(fd,VIDIOC_G_JPEGCOMP, &videoIn->jpegcomp) >= 0) {
            LOGD("VIDIOC_G_COMP:\n");
            LOGD("    quality:      %i\n", videoIn->jpegcomp.quality);
            LOGD("    APPn:         %i\n", videoIn->jpegcomp.APPn);
            LOGD("    APP_len:      %i\n", videoIn->jpegcomp.APP_len);
            LOGD("    APP_data:     %s\n", videoIn->jpegcomp.APP_data);
            LOGD("    COM_len:      %i\n", videoIn->jpegcomp.COM_len);
            LOGD("    COM_data:     %s\n", videoIn->jpegcomp.COM_data);
            LOGD("    jpeg_markers: 0x%x\n", videoIn->jpegcomp.jpeg_markers);
        } else {
            LOGE("VIDIOC_G_COMP:");
            if(errno == EINVAL) {
                videoIn->jpegcomp.quality = -1; //not supported
                LOGE("   compression control not supported\n");
            }
        }
    }

    /* Check if camera can handle NB_BUFFER buffers */
    memset(&videoIn->rb,0,sizeof(videoIn->rb));
    videoIn->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    videoIn->rb.memory = V4L2_MEMORY_MMAP;
    videoIn->rb.count = NB_BUFFER;

    ret = ioctl(fd, VIDIOC_REQBUFS, &videoIn->rb);
    if (ret < 0) {
        LOGE("Init: VIDIOC_REQBUFS failed: %s", strerror(errno));
        return ret;
    }

    for (int i = 0; i < NB_BUFFER; i++) {

        memset (&videoIn->buf, 0, sizeof (struct v4l2_buffer));
        videoIn->buf.index = i;
        videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        videoIn->buf.memory = V4L2_MEMORY_MMAP;

        ret = ioctl (fd, VIDIOC_QUERYBUF, &videoIn->buf);
        if (ret < 0) {
            LOGE("Init: Unable to query buffer (%s)", strerror(errno));
            return ret;
        }

        videoIn->mem[i] = mmap (0,
                                videoIn->buf.length,
                                PROT_READ | PROT_WRITE,
                                MAP_SHARED,
                                fd,
                                videoIn->buf.m.offset);

        if (videoIn->mem[i] == MAP_FAILED) {
            LOGE("Init: Unable to map buffer (%s)", strerror(errno));
            return -1;
        }

        ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
        if (ret < 0) {
            LOGE("Init: VIDIOC_QBUF Failed");
            return -1;
        }

        nQueued++;
    }

    // Reserve temporary buffers, if they will be needed
    size_t tmpbuf_size=0;
    switch (videoIn->format.fmt.pix.pixelformat)
    {
        case V4L2_PIX_FMT_JPEG:
        case V4L2_PIX_FMT_MJPEG:
        case V4L2_PIX_FMT_UYVY:
        case V4L2_PIX_FMT_YVYU:
        case V4L2_PIX_FMT_YYUV:
        case V4L2_PIX_FMT_YUV420: // only needs 3/2 bytes per pixel but we alloc 2 bytes per pixel
        case V4L2_PIX_FMT_YVU420: // only needs 3/2 bytes per pixel but we alloc 2 bytes per pixel
        case V4L2_PIX_FMT_Y41P:   // only needs 3/2 bytes per pixel but we alloc 2 bytes per pixel
        case V4L2_PIX_FMT_NV12:
        case V4L2_PIX_FMT_NV21:
        case V4L2_PIX_FMT_NV16:
        case V4L2_PIX_FMT_NV61:
        case V4L2_PIX_FMT_SPCA501:
        case V4L2_PIX_FMT_SPCA505:
        case V4L2_PIX_FMT_SPCA508:
        case V4L2_PIX_FMT_GREY:
        case V4L2_PIX_FMT_Y16:

        case V4L2_PIX_FMT_YUYV:
            //  YUYV doesn't need a temp buffer but we will set it if/when
            //  video processing disable control is checked (bayer processing).
            //            (logitech cameras only)
            break;

        case V4L2_PIX_FMT_SGBRG8: //0
        case V4L2_PIX_FMT_SGRBG8: //1
        case V4L2_PIX_FMT_SBGGR8: //2
        case V4L2_PIX_FMT_SRGGB8: //3
            // Raw 8 bit bayer
            // when grabbing use:
            //    bayer_to_rgb24(bayer_data, RGB24_data, width, height, 0..3)
            //    rgb2yuyv(RGB24_data, pFrameBuffer, width, height)

            // alloc a temp buffer for converting to YUYV
            // rgb buffer for decoding bayer data
            tmpbuf_size = videoIn->format.fmt.pix.width * videoIn->format.fmt.pix.height * 3;
            if (videoIn->tmpBuffer)
                free(videoIn->tmpBuffer);
            videoIn->tmpBuffer = (uint8_t*)calloc(1, tmpbuf_size);
            if (!videoIn->tmpBuffer) {
                LOGE("couldn't calloc %lu bytes of memory for frame buffer\n",
                    (unsigned long) tmpbuf_size);
                return -ENOMEM;
            }


            break;

        case V4L2_PIX_FMT_RGB24: //rgb or bgr (8-8-8)
        case V4L2_PIX_FMT_BGR24:
            break;

        default:
            LOGE("Should never arrive (1)- exit fatal !!\n");
            return -1;
    }

    return 0;
}