示例#1
0
 void grey_to_rgb(unsigned char* rgb, unsigned char* grey) {
     init_buffers();
     grey_to_yuyv(yuyv_buffer, grey, w, h);
     yuyv_to_rgb(rgb, yuyv_buffer);
 }
示例#2
0
 void grey_to_bgr(unsigned char* bgr, unsigned char* grey) {
     init_buffers();
     grey_to_yuyv(yuyv_buffer, grey, w, h);
     yuyv_to_bgr(bgr, yuyv_buffer);
 }
/* Grab frame in YUYV mode */
void V4L2Camera::GrabRawFrame (void *frameBuffer, int maxSize)
{
    LOG_FRAME("V4L2Camera::GrabRawFrame: frameBuffer:%p, len:%d",frameBuffer,maxSize);
    int ret;

    /* DQ */
    memset(&videoIn->buf,0,sizeof(videoIn->buf));
    videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    videoIn->buf.memory = V4L2_MEMORY_MMAP;
    ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
    if (ret < 0) {
        LOGE("GrabPreviewFrame: VIDIOC_DQBUF Failed");
        return;
    }

    nDequeued++;

    // Calculate the stride of the output image (YUYV) in bytes
    int strideOut = videoIn->outWidth << 1;

    // And the pointer to the start of the image
    uint8_t* src = (uint8_t*)videoIn->mem[videoIn->buf.index] + videoIn->capCropOffset;

    LOG_FRAME("V4L2Camera::GrabRawFrame - Got Raw frame (%dx%d) (buf:%d@0x%p, len:%d)",videoIn->format.fmt.pix.width,videoIn->format.fmt.pix.height,videoIn->buf.index,src,videoIn->buf.bytesused);

    /* Avoid crashing! - Make sure there is enough room in the output buffer! */
    if (maxSize < videoIn->outFrameSize) {

        LOGE("V4L2Camera::GrabRawFrame: Insufficient space in output buffer: Required: %d, Got %d - DROPPING FRAME",videoIn->outFrameSize,maxSize);

    } else {

        switch (videoIn->format.fmt.pix.pixelformat)
        {
            case V4L2_PIX_FMT_JPEG:
            case V4L2_PIX_FMT_MJPEG:
                if(videoIn->buf.bytesused <= HEADERFRAME1) {
                    // Prevent crash on empty image
                    LOGE("Ignoring empty buffer ...\n");
                    break;
                }

                if (jpeg_decode((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight) < 0) {
                    LOGE("jpeg decode errors\n");
                    break;
                }
                break;

            case V4L2_PIX_FMT_UYVY:
                uyvy_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YVYU:
                yvyu_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YYUV:
                yyuv_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YUV420:
                yuv420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YVU420:
                yvu420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV12:
                nv12_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV21:
                nv21_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV16:
                nv16_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV61:
                nv61_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_Y41P:
                y41p_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_GREY:
                grey_to_yuyv((uint8_t*)frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_Y16:
                y16_to_yuyv((uint8_t*)frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA501:
                s501_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA505:
                s505_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA508:
                s508_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YUYV:
                {
                    int h;
                    uint8_t* pdst = (uint8_t*)frameBuffer;
                    uint8_t* psrc = src;
                    int ss = videoIn->outWidth << 1;
                    for (h = 0; h < videoIn->outHeight; h++) {
                        memcpy(pdst,psrc,ss);
                        pdst += strideOut;
                        psrc += videoIn->format.fmt.pix.bytesperline;
                    }
                }
                break;

            case V4L2_PIX_FMT_SGBRG8: //0
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 0);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SGRBG8: //1
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 1);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SBGGR8: //2
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 2);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SRGGB8: //3
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 3);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_RGB24:
                rgb_to_yuyv((uint8_t*) frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_BGR24:
                bgr_to_yuyv((uint8_t*) frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            default:
                LOGE("error grabbing: unknown format: %i\n", videoIn->format.fmt.pix.pixelformat);
                break;
        }

        LOG_FRAME("V4L2Camera::GrabRawFrame - Copied frame to destination 0x%p",frameBuffer);
    }

    /* And Queue the buffer again */
    ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
    if (ret < 0) {
        LOGE("GrabPreviewFrame: VIDIOC_QBUF Failed");
        return;
    }

    nQueued++;

    LOG_FRAME("V4L2Camera::GrabRawFrame - Queued buffer");

}