Esempio n. 1
0
/*
 * Alloc image buffers for decoding video stream
 * args:
 *   vd - pointer to video device data
 *
 * asserts:
 *   vd is not null
 *
 * returns: error code  (0- E_OK)
 */
int alloc_v4l2_frames(v4l2_dev_t *vd)
{
	/*assertions*/
	assert(vd != NULL);

	if(verbosity > 2)
		printf("V4L2_CORE: allocating frame buffers\n");
	/*clean any previous frame buffers*/
	clean_v4l2_frames(vd);

	int ret = E_OK;

	int i = 0;
	size_t framebuf_size = 0;

	int width = vd->format.fmt.pix.width;
	int height = vd->format.fmt.pix.height;

	if(width <= 0 || height <= 0)
		return E_ALLOC_ERR;

	int framesizeIn = (width * height * 3/2); /* 3/2 bytes per pixel*/

	switch (vd->requested_fmt)
	{
		case V4L2_PIX_FMT_H264:
			/*init h264 context*/
			ret = h264_init_decoder(width, height);

			if(ret)
			{
				fprintf(stderr, "V4L2_CORE: couldn't init h264 decoder\n");
				return ret;
			}

			
			/*frame queue*/
			for(i=0; i<vd->frame_queue_size; ++i)
			{
				vd->frame_queue[i].h264_frame_max_size = width * height; /*1 byte per pixel*/
				vd->frame_queue[i].h264_frame = calloc(vd->frame_queue[i].h264_frame_max_size, sizeof(uint8_t));
			
				if(vd->frame_queue[i].h264_frame == NULL)
				{
					fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
					exit(-1);
				}
				
				vd->frame_queue[i].yuv_frame = calloc(framesizeIn, sizeof(uint8_t));
				if(vd->frame_queue[i].yuv_frame == NULL)
				{
					fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
					exit(-1);
				}

			}
			
			vd->h264_last_IDR = calloc(width * height, sizeof(uint8_t));
			if(vd->h264_last_IDR == NULL)
			{
				fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
				exit(-1);
			}
			vd->h264_last_IDR_size = 0; /*reset (no frame stored)*/
						
			break;

		case V4L2_PIX_FMT_JPEG:
		case V4L2_PIX_FMT_MJPEG:
			/*init jpeg decoder*/
			ret = jpeg_init_decoder(width, height);

			if(ret)
			{
				fprintf(stderr, "V4L2_CORE: couldn't init jpeg decoder\n");
				return ret;
			}
			
			/*frame queue*/
			for(i=0; i<vd->frame_queue_size; ++i)
			{
				vd->frame_queue[i].yuv_frame = calloc(framesizeIn, sizeof(uint8_t));
				if(vd->frame_queue[i].yuv_frame == NULL)
				{
					fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
					exit(-1);
				}
			}
			break;

		case V4L2_PIX_FMT_RGB24:
		case V4L2_PIX_FMT_BGR24:
		case V4L2_PIX_FMT_BGR32:
		case V4L2_PIX_FMT_RGB32:
		case V4L2_PIX_FMT_RGB332:
		case V4L2_PIX_FMT_RGB565:
		case V4L2_PIX_FMT_RGB565X:
		case V4L2_PIX_FMT_RGB444:
		case V4L2_PIX_FMT_RGB555:
		case V4L2_PIX_FMT_RGB555X:
		case V4L2_PIX_FMT_BGR666:
		case V4L2_PIX_FMT_UYVY:
		case V4L2_PIX_FMT_VYUY:
		case V4L2_PIX_FMT_YVYU:
		case V4L2_PIX_FMT_YYUV:
		case V4L2_PIX_FMT_YUV444:
		case V4L2_PIX_FMT_YUV555:
		case V4L2_PIX_FMT_YUV565:
		case V4L2_PIX_FMT_YUV32:
		case V4L2_PIX_FMT_YUV422P:
		case V4L2_PIX_FMT_YUV420:
		case V4L2_PIX_FMT_YVU420:
		case V4L2_PIX_FMT_Y41P:
		case V4L2_PIX_FMT_NV12:
		case V4L2_PIX_FMT_NV21:
		case V4L2_PIX_FMT_NV16:
		case V4L2_PIX_FMT_NV61:
		case V4L2_PIX_FMT_NV24:
		case V4L2_PIX_FMT_NV42:
		case V4L2_PIX_FMT_SPCA501:
		case V4L2_PIX_FMT_SPCA505:
		case V4L2_PIX_FMT_SPCA508:
		case V4L2_PIX_FMT_GREY:
		case V4L2_PIX_FMT_Y10BPACK:
	    case V4L2_PIX_FMT_Y16:
#ifdef V4L2_PIX_FMT_Y16_BE
		case V4L2_PIX_FMT_Y16_BE:
#endif
#ifdef V4L2_PIX_FMT_ABGR32
		case V4L2_PIX_FMT_ABGR32:
		case V4L2_PIX_FMT_XBGR32:
#endif
#ifdef V4L2_PIX_FMT_ARGB32
		case V4L2_PIX_FMT_ARGB32:
		case V4L2_PIX_FMT_XRGB32:
#endif
#ifdef V4L2_PIX_FMT_ARGB444
		case V4L2_PIX_FMT_ARGB444:
		case V4L2_PIX_FMT_XRGB444:
#endif
#ifdef V4L2_PIX_FMT_ARGB555
		case V4L2_PIX_FMT_ARGB555:
		case V4L2_PIX_FMT_XRGB555:
#endif
#ifdef V4L2_PIX_FMT_ARGB555X
		case V4L2_PIX_FMT_ARGB555X:
		case V4L2_PIX_FMT_XRGB555X:
#endif
			framebuf_size = framesizeIn;
			/*frame queue*/
			for(i=0; i<vd->frame_queue_size; ++i)
			{
				vd->frame_queue[i].yuv_frame = calloc(framebuf_size, sizeof(uint8_t));
				if(vd->frame_queue[i].yuv_frame == NULL)
				{
					fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
					exit(-1);
				}
			}
			break;

		case V4L2_PIX_FMT_YUYV:
			/*
			 * YUYV doesn't need a temp buffer but we will set it if/when
			 *  video processing disable is set (bayer processing).
			 *            (logitech cameras only)
			 */
			framebuf_size = framesizeIn;
			/*frame queue*/
			for(i=0; i<vd->frame_queue_size; ++i)
			{
				vd->frame_queue[i].yuv_frame = calloc(framebuf_size, sizeof(uint8_t));
				if(vd->frame_queue[i].yuv_frame == NULL)
				{
					fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
					exit(-1);
				}
			}
			break;

		case V4L2_PIX_FMT_SGBRG8: /*0*/
		case V4L2_PIX_FMT_SGRBG8: /*1*/
		case V4L2_PIX_FMT_SBGGR8: /*2*/
		case V4L2_PIX_FMT_SRGGB8: /*3*/
			/*
			 * Raw 8 bit bayer
			 * when grabbing use:
			 *    bayer_to_rgb24(bayer_data, RGB24_data, width, height, 0..3)
			 *    rgb2yuyv(RGB24_data, vd->framebuffer, width, height)
			 */
			framebuf_size = framesizeIn;
			/*frame queue*/
			for(i=0; i<vd->frame_queue_size; ++i)
			{
				/* alloc a temp buffer for converting to YUYV*/
				/* rgb buffer for decoding bayer data*/
				vd->frame_queue[i].tmp_buffer_max_size = width * height * 3;
				vd->frame_queue[i].tmp_buffer = calloc(vd->frame_queue[i].tmp_buffer_max_size, sizeof(uint8_t));
				if(vd->frame_queue[i].tmp_buffer == NULL)
				{
					fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
					exit(-1);
				}
				vd->frame_queue[i].yuv_frame = calloc(framebuf_size, sizeof(uint8_t));
				if(vd->frame_queue[i].yuv_frame == NULL)
				{
					fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (alloc_v4l2_frames): %s\n", strerror(errno));
					exit(-1);
				}
			}
			break;

		default:
			/*
			 * we check formats against a support formats list
			 * so we should never have to alloc for a unknown format
			 */
			fprintf(stderr, "V4L2_CORE: (v4l2uvc.c) should never arrive (1)- exit fatal !!\n");
			ret = E_UNKNOWN_ERR;
			
			if(vd->h264_last_IDR)
				free(vd->h264_last_IDR);
			vd->h264_last_IDR = NULL;
			/*frame queue*/
			for(i=0; i<vd->frame_queue_size; ++i)
			{
				vd->frame_queue[i].raw_frame = NULL;
				if(vd->frame_queue[i].yuv_frame)
					free(vd->frame_queue[i].yuv_frame);
				vd->frame_queue[i].yuv_frame = NULL;
				if(vd->frame_queue[i].tmp_buffer)
					free(vd->frame_queue[i].tmp_buffer);
				vd->frame_queue[i].tmp_buffer = NULL;
				if(vd->frame_queue[i].h264_frame)
					free(vd->frame_queue[i].h264_frame);
				vd->frame_queue[i].h264_frame = NULL;
			}
			return (ret);
	}

	for(i=0; i<vd->frame_queue_size; ++i)
	{
		int j = 0;
		/* set framebuffer to black (y=0x00 u=0x80 v=0x80) by default*/
		uint8_t *pframe = vd->frame_queue[i].yuv_frame;
		for (j=0; j<width*height; j++)
		*pframe++=0x00; //Y
		for(j=0; j<width*height/2; j++)
		{
			*pframe++=0x80; //U V
		}
	}
	return (ret);
}
Esempio n. 2
0
xint 
main(int argc, char *argv[])
{
    FILE    *fp_bit = NULL, *fp_yuvout = NULL;
    xint    EndOFFrame = 1;
    xint    f_init_decoder = 1;
    
#if __CC_ARM
    if((fp_bit = fopen("../../../../../temp/foreman_26.264", "rb")) == NULL)
    {
        fprintf(stderr, "Cannot open bit_file !\n");
        goto free_all_memory;
    }
    if((fp_yuvout = fopen("../../../../../temp/output.yuv", "wb")) == NULL)
    {
        fprintf(stderr, "Cannot open decode file !\n");
        goto free_all_memory;
    }
#else
    xint    idx;
    xint    start_ptn_frame_no, end_ptn_frame_no;
    xint    start_log_frame_no, end_log_frame_no;
    xint    h264_pattern_options, h264_pattern, h264_log_options, h264_log_mode;

    for(idx = 1 ; idx < argc ; idx++)
    {
        if(!strcmp(argv[idx], "-h"))
        {
            print_help();
            return 0;
        }
        else if(!strcmp(argv[idx], "-i"))
        {
            if((fp_bit = fopen(argv[idx+1], "rb")) == NULL)
            {
                fprintf(stderr, "Cannot open bit_file !\n");
                goto free_all_memory;
            }
            idx++;
        }
        else if(!strcmp(argv[idx], "-o"))
        {
            if((fp_yuvout = fopen(argv[idx+1], "wb")) == NULL)
            {
                fprintf(stderr, "Cannot open decode file !\n");
                goto free_all_memory;
            }
            idx++;
        }
        else if(!strcmp(argv[idx], "-p"))
        {
            sscanf(argv[++idx], "%x", &h264_pattern_options);
            sscanf(argv[++idx], "%d", &start_ptn_frame_no);
            sscanf(argv[++idx], "%d", &end_ptn_frame_no);

            h264_pattern = h264_pattern_options;
        }
        else if(!strcmp(argv[idx], "-log"))
        {
            sscanf(argv[++idx], "%x", &h264_log_options);
            sscanf(argv[++idx], "%d", &start_log_frame_no);
            sscanf(argv[++idx], "%d", &end_log_frame_no);

            h264_log_mode = h264_log_options;
        }
        else if(!strcmp(argv[idx], "-fn"))
        {
            sscanf(argv[++idx], "%d", &decoded_frame_num);
        }
    }
#endif

    if(!fp_bit || !fp_yuvout)
    {
        if(!fp_bit)    fprintf(stderr, "Error : Input file must be provided.\n");
        if(!fp_yuvout) fprintf(stderr, "Error : Output file must be provided.\n");
        goto free_all_memory;
    }

    init_nalu(&pDec->nal);

    pDec->frame_no = 0;

    while(!feof(fp_bit))
    {
        /* decode h264 header to retrieve decoding information */
        if(h264_decode_header(pDec, fp_bit) != MMES_NO_ERROR)
        {
            fprintf(stderr, "Decode h264 header error!\n");
            goto free_all_memory;
        }

        /* ================================================================== */
        /*                        Initialize codec                            */
        /* ================================================================== */
        if (h264_init_decoder(pDec, f_init_decoder) != MMES_NO_ERROR)
        {
            fprintf(stderr, "Decoder initial error!\n");
            goto free_all_memory;
        }

        /* ================================================================== */
        /*                        Parse Slice Data                            */
        /* ================================================================== */
        parse_slice_data(pDec, &EndOFFrame);

        f_init_decoder = 0;
        if(EndOFFrame)
        {
            uint8  frame_type = IS_ISLICE(pDec->curr_slice->type) ? 'I' : 'P';

            write_frame(pDec->recf, pDec->width, pDec->height, 
                pDec->crop_left, pDec->crop_right, pDec->crop_top, pDec->crop_bottom,
                pDec->chroma_format_idc, fp_yuvout);
            
            adjust_ref_lists(pDec);
            
            pDec->cur_slice_no = pDec->cur_mb_no = 0;

            printf( "<%c> [%3d]\n", frame_type, pDec->frame_no );

            pDec->frame_no++;
        }
        if(decoded_frame_num > 0 && pDec->frame_no >= decoded_frame_num)
            break;
    }

    fprintf(stdout, "\nDecoding Info\n");
    fprintf(stdout, "---------------------\n");
    fprintf(stdout, "Decoded frame num : %d\n", pDec->frame_no);
    fprintf(stdout, "Frame Size : %dx%d\n", pDec->width, pDec->height);

    destroy_nalu(&pDec->nal);
    h264_free_decoder(pDec);
    
free_all_memory:

    if (fp_bit) fclose(fp_bit), fp_bit = NULL;
    if (fp_yuvout) fclose(fp_yuvout), fp_yuvout = NULL;
    if (pDec->fp_qcoeff) fclose(pDec->fp_qcoeff), pDec->fp_qcoeff = NULL;// @chingho 06 27 2005

    return MMES_NO_ERROR;
}
Esempio n. 3
0
static void h264_decode_frame(int f_width, int f_height, char *framedata, int framesize, int slice_type)
{
    VAStatus va_status;

    DebugLog(("%s: called for frame of %d bytes (%dx%d) slice_type=%d\n", __FUNCTION__, framesize, width, height, slice_type));

    /* Initialize decode pipeline if necessary */
    if ( (f_width > cur_width) || (f_height > cur_height) ) {
        if (va_dpy != NULL)
            h264_cleanup_decoder();
        cur_width = f_width;
        cur_height = f_height;

        h264_init_decoder(f_width, f_height);
        rfbClientLog("%s: decoder initialized\n", __FUNCTION__);
    }

    /* Decode frame */
    static VAPictureH264 va_picture_h264, va_old_picture_h264;

    /* The server should always send an I-frame when a new client connects
     * or when the resolution of the framebuffer changes, but we check
     * just in case.
     */
    if ( (slice_type != SLICE_TYPE_I) && (num_frames == 0) ) {
        rfbClientLog("First frame is not an I frame !!! Skipping!!!\n");
        return;
    }

    DebugLog(("%s: frame_id=%d va_surface_id[%d]=0x%x field_order_count=%d\n", __FUNCTION__, frame_id, sid, va_surface_id[sid], field_order_count));

    va_picture_h264.picture_id = va_surface_id[sid];
    va_picture_h264.frame_idx  = frame_id;
    va_picture_h264.flags = 0;
    va_picture_h264.BottomFieldOrderCnt = field_order_count;
    va_picture_h264.TopFieldOrderCnt = field_order_count;

    /* Set up picture parameter buffer */
    if (va_pic_param_buf_id[sid] == VA_INVALID_ID) {
        va_status = vaCreateBuffer(va_dpy, va_context_id, VAPictureParameterBufferType, sizeof(VAPictureParameterBufferH264), 1, NULL, &va_pic_param_buf_id[sid]);
        CHECK_VASTATUS(va_status, "vaCreateBuffer(PicParam)");
    }
    CHECK_SURF(va_surface_id[sid]);

    VAPictureParameterBufferH264 *pic_param_buf = NULL;
    va_status = vaMapBuffer(va_dpy, va_pic_param_buf_id[sid], (void **)&pic_param_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(PicParam)");

    SetVAPictureParameterBufferH264(pic_param_buf, f_width, f_height);
    memcpy(&pic_param_buf->CurrPic, &va_picture_h264, sizeof(VAPictureH264));

    if (slice_type == SLICE_TYPE_P) {
        memcpy(&pic_param_buf->ReferenceFrames[0], &va_old_picture_h264, sizeof(VAPictureH264));
        pic_param_buf->ReferenceFrames[0].flags = 0;
    }
    else if (slice_type != SLICE_TYPE_I) {
        rfbClientLog("Frame type %d not supported!!!\n");
        return;
    }
    pic_param_buf->frame_num = frame_id;

    va_status = vaUnmapBuffer(va_dpy, va_pic_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(PicParam)");

    /* Set up IQ matrix buffer */
    if (va_mat_param_buf_id[sid] == VA_INVALID_ID) {
        va_status = vaCreateBuffer(va_dpy, va_context_id, VAIQMatrixBufferType, sizeof(VAIQMatrixBufferH264), 1, NULL, &va_mat_param_buf_id[sid]);
        CHECK_VASTATUS(va_status, "vaCreateBuffer(IQMatrix)");
    }
    CHECK_SURF(va_surface_id[sid]);

    VAIQMatrixBufferH264 *iq_matrix_buf = NULL;
    va_status = vaMapBuffer(va_dpy, va_mat_param_buf_id[sid], (void **)&iq_matrix_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(IQMatrix)");

    static const unsigned char m_MatrixBufferH264[]= {
        /* ScalingList4x4[6][16] */
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,
        /* ScalingList8x8[2][64] */
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
        0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00
    };

    memcpy(iq_matrix_buf, m_MatrixBufferH264, 224);
    va_status = vaUnmapBuffer(va_dpy, va_mat_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(IQMatrix)");

    VABufferID buffer_ids[2];
    buffer_ids[0] = va_pic_param_buf_id[sid];
    buffer_ids[1] = va_mat_param_buf_id[sid];

    CHECK_SURF(va_surface_id[sid]);
    va_status = vaRenderPicture(va_dpy, va_context_id, buffer_ids, 2);
    CHECK_VASTATUS(va_status, "vaRenderPicture");

    /* Set up slice parameter buffer */
    if (va_sp_param_buf_id[sid] == VA_INVALID_ID) {
        va_status = vaCreateBuffer(va_dpy, va_context_id, VASliceParameterBufferType, sizeof(VASliceParameterBufferH264), 1, NULL, &va_sp_param_buf_id[sid]);
        CHECK_VASTATUS(va_status, "vaCreateBuffer(SliceParam)");
    }
    CHECK_SURF(va_surface_id[sid]);

    VASliceParameterBufferH264 *slice_param_buf = NULL;
    va_status = vaMapBuffer(va_dpy, va_sp_param_buf_id[sid], (void **)&slice_param_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(SliceParam)");

    static int t2_first = 1;
    if (slice_type == SLICE_TYPE_I) {
        SetVASliceParameterBufferH264_Intra(slice_param_buf, t2_first);
        t2_first = 0;
    } else {
        SetVASliceParameterBufferH264(slice_param_buf);
        memcpy(&slice_param_buf->RefPicList0[0], &va_old_picture_h264, sizeof(VAPictureH264));
        slice_param_buf->RefPicList0[0].flags = 0;
    }
    slice_param_buf->slice_data_bit_offset = 0;
    slice_param_buf->slice_data_size = framesize;

    va_status = vaUnmapBuffer(va_dpy, va_sp_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(SliceParam)");
    CHECK_SURF(va_surface_id[sid]);

    /* Set up slice data buffer and copy H.264 encoded data */
    if (va_d_param_buf_id[sid] == VA_INVALID_ID) {
        /* TODO use estimation matching framebuffer dimensions instead of this large value */
        va_status = vaCreateBuffer(va_dpy, va_context_id, VASliceDataBufferType, 4177920, 1, NULL, &va_d_param_buf_id[sid]); /* 1080p size */
        CHECK_VASTATUS(va_status, "vaCreateBuffer(SliceData)");
    }

    char *slice_data_buf;
    va_status = vaMapBuffer(va_dpy, va_d_param_buf_id[sid], (void **)&slice_data_buf);
    CHECK_VASTATUS(va_status, "vaMapBuffer(SliceData)");
    memcpy(slice_data_buf, framedata, framesize);

    CHECK_SURF(va_surface_id[sid]);
    va_status = vaUnmapBuffer(va_dpy, va_d_param_buf_id[sid]);
    CHECK_VASTATUS(va_status, "vaUnmapBuffer(SliceData)");

    buffer_ids[0] = va_sp_param_buf_id[sid];
    buffer_ids[1] = va_d_param_buf_id[sid];

    CHECK_SURF(va_surface_id[sid]);
    va_status = vaRenderPicture(va_dpy, va_context_id, buffer_ids, 2);
    CHECK_VASTATUS(va_status, "vaRenderPicture");

    va_status = vaEndPicture(va_dpy, va_context_id);
    CHECK_VASTATUS(va_status, "vaEndPicture");

    /* Prepare next one... */
    int sid_new = (sid + 1) % SURFACE_NUM;
    DebugLog(("%s: new Surface ID = %d\n", __FUNCTION__, sid_new));
    va_status = vaBeginPicture(va_dpy, va_context_id, va_surface_id[sid_new]);
    CHECK_VASTATUS(va_status, "vaBeginPicture");

    /* Get decoded data */
    va_status = vaSyncSurface(va_dpy, va_surface_id[sid]);
    CHECK_VASTATUS(va_status, "vaSyncSurface");
    CHECK_SURF(va_surface_id[sid]);

    curr_surface = va_surface_id[sid];

    sid = sid_new;

    field_order_count += 2;
    ++frame_id;
    if (frame_id > 15) {
        frame_id = 0;
    }

    ++num_frames;

    memcpy(&va_old_picture_h264, &va_picture_h264, sizeof(VAPictureH264));
}