示例#1
0
/*
 * decode video stream ( from raw_frame to frame buffer (yuyv format))
 * args:
 *    vd - pointer to device data
 *    frame - pointer to frame buffer
 *
 * asserts:
 *    vd is not null
 *
 * returns: error code ( 0 - E_OK)
*/
int decode_v4l2_frame(v4l2_dev_t *vd, v4l2_frame_buff_t *frame)
{
	/*asserts*/
	assert(vd != NULL);

	if(!frame->raw_frame || frame->raw_frame_size == 0)
	{
		fprintf(stderr, "V4L2_CORE: not decoding empty raw frame (frame of size %i at 0x%p)\n", (int) frame->raw_frame_size, frame->raw_frame);
		return E_DECODE_ERR;
	}

	if(verbosity > 3)
		printf("V4L2_CORE: decoding raw frame of size %i at 0x%p\n",
			(int) frame->raw_frame_size, frame->raw_frame );

	int ret = E_OK;

	int width = vd->format.fmt.pix.width;
	int height = vd->format.fmt.pix.height;

	frame->isKeyframe = 0; /*reset*/

	/*
	 * use the requested format since it may differ
	 * from format.fmt.pix.pixelformat (muxed H264)
	 */
	int format = vd->requested_fmt;

	int framesizeIn =(width * height << 1);//2 bytes per pixel
	switch (format)
	{
		case V4L2_PIX_FMT_H264:
			/*
			 * get the h264 frame in the tmp_buffer
			 */
			frame->h264_frame_size = demux_h264(
				frame->h264_frame,
				frame->raw_frame,
				frame->raw_frame_size,
				frame->h264_frame_max_size);

			/*
			 * store SPS and PPS info (usually the first two NALU)
			 * and check/store the last IDR frame
			 */
			store_extra_data(vd, frame);

			/*
			 * check for keyframe and store it
			 */
			frame->isKeyframe = is_h264_keyframe(vd, frame);

			//decode if we already have a IDR frame
			if(vd->h264_last_IDR_size > 0)
			{
				/*no need to convert output*/
				h264_decode(frame->yuv_frame, frame->h264_frame, frame->h264_frame_size);
			}
			break;

		case V4L2_PIX_FMT_JPEG:
		case V4L2_PIX_FMT_MJPEG:
			if(frame->raw_frame_size <= HEADERFRAME1)
			{
				// Prevent crash on empty image
				fprintf(stderr, "V4L2_CORE: (jpeg decoder) Ignoring empty buffer\n");
				ret = E_DECODE_ERR;
				return (ret);
			}

			ret = jpeg_decode(frame->yuv_frame, frame->raw_frame, frame->raw_frame_size);

			//memcpy(frame->tmp_buffer, frame->raw_frame, frame->raw_frame_size);
			//ret = jpeg_decode(&frame->yuv_frame, frame->tmp_buffer, width, height);
			//if ( ret < 0)
			//{
			//	fprintf(stderr, "V4L2_CORE: jpeg decoder exit with error (%i) (res: %ix%i - %x)\n", ret, width, height, vd->format.fmt.pix.pixelformat);
			//	return E_DECODE_ERR;
			//}
			if(verbosity > 3)
				fprintf(stderr, "V4L2_CORE: (jpeg decoder) decode frame of size %i\n", ret);
			ret = E_OK;
			break;

		case V4L2_PIX_FMT_UYVY:
			uyvy_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_VYUY:
			vyuy_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YVYU:
			yvyu_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YYUV:
			yyuv_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YUV444:
			y444_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YUV555:
			yuvo_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YUV565:
			yuvp_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YUV32:
			yuv4_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YUV420:
			if(frame->raw_frame_size > (width * height * 3/2))
				frame->raw_frame_size = width * height * 3/2;
			memcpy(frame->yuv_frame, frame->raw_frame, frame->raw_frame_size);
			break;

		case V4L2_PIX_FMT_YUV422P:
			yuv422p_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YVU420:
			yv12_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_NV12:
			nv12_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_NV21:
			nv21_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_NV16:
			nv16_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_NV61:
			nv61_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_NV24:
			nv24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

	case V4L2_PIX_FMT_NV42:
			nv42_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_Y41P:
			y41p_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_GREY:
			grey_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_Y10BPACK:
			y10b_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

	    case V4L2_PIX_FMT_Y16:
			y16_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;
#ifdef V4L2_PIX_FMT_Y16_BE
		case V4L2_PIX_FMT_Y16_BE:
			y16x_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;
#endif
		case V4L2_PIX_FMT_SPCA501:
			s501_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_SPCA505:
			s505_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_SPCA508:
			s508_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_YUYV:
			if(vd->isbayer>0)
			{
				if (!(frame->tmp_buffer))
				{
					/* rgb buffer for decoding bayer data*/
					frame->tmp_buffer_max_size = width * height * 3;
					frame->tmp_buffer = calloc(frame->tmp_buffer_max_size, sizeof(uint8_t));
					if(frame->tmp_buffer == NULL)
					{
						fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (v4l2core_frame_decode): %s\n", strerror(errno));
						exit(-1);
					}
				}
				/*convert raw bayer to iyuv*/
				bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, vd->bayer_pix_order);
				rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height);
			}
			else
				yuyv_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_SGBRG8: //0
			bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 0);
			rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height);
			break;

		case V4L2_PIX_FMT_SGRBG8: //1
			bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 1);
			rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height);
			break;

		case V4L2_PIX_FMT_SBGGR8: //2
			bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 2);
			rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height);
			break;
		case V4L2_PIX_FMT_SRGGB8: //3
			bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 3);
			rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height);
			break;

		case V4L2_PIX_FMT_RGB24:
			rgb24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_BGR24:
			bgr24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_RGB332:
			rgb1_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_RGB565:
			rgbp_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_RGB565X:
			rgbr_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_RGB444:
#ifdef V4L2_PIX_FMT_ARGB444
		case V4L2_PIX_FMT_ARGB444:
		case V4L2_PIX_FMT_XRGB444: //same as above but without alpha channel
#endif
			ar12_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_RGB555:
#ifdef V4L2_PIX_FMT_ARGB555
		case V4L2_PIX_FMT_ARGB555:
		case V4L2_PIX_FMT_XRGB555: //same as above but without alpha channel
#endif
			ar15_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_RGB555X:
#ifdef V4L2_PIX_FMT_ARGB4555X
		case V4L2_PIX_FMT_ARGB555X:
		case V4L2_PIX_FMT_XRGB555X: //same as above but without alpha channel
#endif
			ar15x_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_BGR666:
			bgrh_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_BGR32:
#ifdef V4L2_PIX_FMT_ABGR32
		case V4L2_PIX_FMT_ABGR32:
		case V4L2_PIX_FMT_XBGR32: //same as above but without alpha channel
#endif
			ar24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		case V4L2_PIX_FMT_RGB32:
#ifdef V4L2_PIX_FMT_ARGB32
		case V4L2_PIX_FMT_ARGB32:
		case V4L2_PIX_FMT_XRGB32: //same as above but without alpha channel
#endif
			ba24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height);
			break;

		default:
			fprintf(stderr, "V4L2_CORE: error decoding frame: unknown format: %i\n", format);
			ret = E_UNKNOWN_ERR;
			break;
	}

	return ret;
}
/* Grab frame in YUYV mode */
void V4L2Camera::GrabRawFrame (void *frameBuffer, int maxSize)
{
    LOG_FRAME("V4L2Camera::GrabRawFrame: frameBuffer:%p, len:%d",frameBuffer,maxSize);
    int ret;

    /* DQ */
    memset(&videoIn->buf,0,sizeof(videoIn->buf));
    videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    videoIn->buf.memory = V4L2_MEMORY_MMAP;
    ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
    if (ret < 0) {
        LOGE("GrabPreviewFrame: VIDIOC_DQBUF Failed");
        return;
    }

    nDequeued++;

    // Calculate the stride of the output image (YUYV) in bytes
    int strideOut = videoIn->outWidth << 1;

    // And the pointer to the start of the image
    uint8_t* src = (uint8_t*)videoIn->mem[videoIn->buf.index] + videoIn->capCropOffset;

    LOG_FRAME("V4L2Camera::GrabRawFrame - Got Raw frame (%dx%d) (buf:%d@0x%p, len:%d)",videoIn->format.fmt.pix.width,videoIn->format.fmt.pix.height,videoIn->buf.index,src,videoIn->buf.bytesused);

    /* Avoid crashing! - Make sure there is enough room in the output buffer! */
    if (maxSize < videoIn->outFrameSize) {

        LOGE("V4L2Camera::GrabRawFrame: Insufficient space in output buffer: Required: %d, Got %d - DROPPING FRAME",videoIn->outFrameSize,maxSize);

    } else {

        switch (videoIn->format.fmt.pix.pixelformat)
        {
            case V4L2_PIX_FMT_JPEG:
            case V4L2_PIX_FMT_MJPEG:
                if(videoIn->buf.bytesused <= HEADERFRAME1) {
                    // Prevent crash on empty image
                    LOGE("Ignoring empty buffer ...\n");
                    break;
                }

                if (jpeg_decode((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight) < 0) {
                    LOGE("jpeg decode errors\n");
                    break;
                }
                break;

            case V4L2_PIX_FMT_UYVY:
                uyvy_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YVYU:
                yvyu_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YYUV:
                yyuv_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YUV420:
                yuv420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YVU420:
                yvu420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV12:
                nv12_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV21:
                nv21_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV16:
                nv16_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV61:
                nv61_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_Y41P:
                y41p_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_GREY:
                grey_to_yuyv((uint8_t*)frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_Y16:
                y16_to_yuyv((uint8_t*)frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA501:
                s501_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA505:
                s505_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA508:
                s508_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YUYV:
                {
                    int h;
                    uint8_t* pdst = (uint8_t*)frameBuffer;
                    uint8_t* psrc = src;
                    int ss = videoIn->outWidth << 1;
                    for (h = 0; h < videoIn->outHeight; h++) {
                        memcpy(pdst,psrc,ss);
                        pdst += strideOut;
                        psrc += videoIn->format.fmt.pix.bytesperline;
                    }
                }
                break;

            case V4L2_PIX_FMT_SGBRG8: //0
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 0);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SGRBG8: //1
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 1);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SBGGR8: //2
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 2);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SRGGB8: //3
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 3);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_RGB24:
                rgb_to_yuyv((uint8_t*) frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_BGR24:
                bgr_to_yuyv((uint8_t*) frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            default:
                LOGE("error grabbing: unknown format: %i\n", videoIn->format.fmt.pix.pixelformat);
                break;
        }

        LOG_FRAME("V4L2Camera::GrabRawFrame - Copied frame to destination 0x%p",frameBuffer);
    }

    /* And Queue the buffer again */
    ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
    if (ret < 0) {
        LOGE("GrabPreviewFrame: VIDIOC_QBUF Failed");
        return;
    }

    nQueued++;

    LOG_FRAME("V4L2Camera::GrabRawFrame - Queued buffer");

}