int PrivateDecoderCrystalHD::GetTxFreeSize(bool hwsel)
{
    BC_DTS_STATUS status;
    if (hwsel)
        status.cpbEmptySize = 0xC0000000; // set bit 31 for real HW free size
    else
        status.cpbEmptySize = 0x40000000; // set bit 30 for TX only status
    INIT_ST;
    st = DtsGetDriverStatus(m_device, &status);
    CHECK_ST;
    if (!ok)
        return -1;

    return status.cpbEmptySize;
}
void PrivateDecoderCrystalHD::FetchFrames(void)
{
    INIT_ST;
    bool valid = false;
    m_fetcher_paused = false;
    while (!m_fetcher_stop)
    {
        usleep(1000);
        if (m_fetcher_pause)
        {
            m_fetcher_paused = true;
            continue;
        }
        m_fetcher_paused = false;

        BC_DTS_STATUS status;
        st = DtsGetDriverStatus(m_device, &status);
        CHECK_ST;

        if (!status.ReadyListCount)
            continue;

        BC_DTS_PROC_OUT out;
        memset(&out, 0, sizeof(BC_DTS_PROC_OUT));
        st = DtsProcOutputNoCopy(m_device, valid ? 2000 : 20, &out);

        if (BC_STS_FMT_CHANGE == st)
        {
            LOG(VB_GENERAL, LOG_INFO, LOC + "Decoder reported format change.");
            CheckProcOutput(&out);
            valid = true;
            continue;
        }
        CHECK_ST;

        if (!ok)
        {
            LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to fetch decoded frame");
            continue;
        }

        if (ok && valid && (out.PoutFlags & BC_POUT_FLAGS_PIB_VALID))
            FillFrame(&out);
        st = DtsReleaseOutputBuffs(m_device, NULL, false);
        CHECK_ST;
    }
}
void PrivateDecoderCrystalHD::CheckStatus(void)
{
    BC_DTS_STATUS status;
    status.cpbEmptySize = 0x00000000; // set bit 31 for real HW free size
    INIT_ST;
    st = DtsGetDriverStatus(m_device, &status);
    CHECK_ST;
    if (!ok)
        return;

    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("ReadyListCount  : %1")
            .arg(status.ReadyListCount));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("FreeListCount   : %1")
            .arg(status.FreeListCount));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("PowerStateChange: %1")
            .arg(status.PowerStateChange));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("FrameDropped    : %1")
            .arg(status.FramesDropped));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("FramesCaptured  : %1")
            .arg(status.FramesCaptured));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("FramesRepeated  : %1")
            .arg(status.FramesRepeated));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("InputCount      : %1")
            .arg(status.InputCount));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("InputTotalSize  : %1")
            .arg(status.InputTotalSize));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("InputBusyCount  : %1")
            .arg(status.InputBusyCount));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("PIBMissCount    : %1")
            .arg(status.PIBMissCount));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("cpbEmptySize    : %1")
            .arg(status.cpbEmptySize));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("NextTimeStamp   : %1")
            .arg(status.NextTimeStamp));
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("PicNumFlags     : %1")
            .arg(status.picNumFlags));
}
Exemplo n.º 4
0
static inline CopyRet receive_frame(AVCodecContext *avctx,
                                    void *data, int *data_size,
                                    uint8_t second_field)
{
	BC_STATUS ret;
	BC_DTS_PROC_OUT output =
	{
		.PicInfo.width  = avctx->width,
		.PicInfo.height = avctx->height,
	};
	CHDContext *priv = avctx->priv_data;
	HANDLE dev       = priv->dev;

	*data_size = 0;

	// Request decoded data from the driver
	ret = DtsProcOutputNoCopy(dev, OUTPUT_PROC_TIMEOUT, &output);
	if (ret == BC_STS_FMT_CHANGE)
	{
		av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Initial format change\n");
		avctx->width  = output.PicInfo.width;
		avctx->height = output.PicInfo.height;
		return RET_COPY_AGAIN;
	}
	else if (ret == BC_STS_SUCCESS)
	{
		int copy_ret = -1;
		if (output.PoutFlags & BC_POUT_FLAGS_PIB_VALID)
		{
			if (priv->last_picture == -1)
			{
				/*
				 * Init to one less, so that the incrementing code doesn't
				 * need to be special-cased.
				 */
				priv->last_picture = output.PicInfo.picture_number - 1;
			}

			if (avctx->codec->id == CODEC_ID_MPEG4 &&
			        output.PicInfo.timeStamp == 0)
			{
				av_log(avctx, AV_LOG_VERBOSE,
				       "CrystalHD: Not returning packed frame twice.\n");
				priv->last_picture++;
				DtsReleaseOutputBuffs(dev, NULL, FALSE);
				return RET_COPY_AGAIN;
			}

			print_frame_info(priv, &output);

			if (priv->last_picture + 1 < output.PicInfo.picture_number)
			{
				av_log(avctx, AV_LOG_WARNING,
				       "CrystalHD: Picture Number discontinuity\n");
				/*
				 * Have we lost frames? If so, we need to shrink the
				 * pipeline length appropriately.
				 *
				 * XXX: I have no idea what the semantics of this situation
				 * are so I don't even know if we've lost frames or which
				 * ones.
				 *
				 * In any case, only warn the first time.
				 */
				priv->last_picture = output.PicInfo.picture_number - 1;
			}

			copy_ret = copy_frame(avctx, &output, data, data_size, second_field);
			if (*data_size > 0)
			{
				avctx->has_b_frames--;
				priv->last_picture++;
				av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Pipeline length: %u\n",
				       avctx->has_b_frames);
			}
		}
		else
		{
			/*
			 * An invalid frame has been consumed.
			 */
			av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput succeeded with "
			       "invalid PIB\n");
			avctx->has_b_frames--;
			copy_ret = RET_OK;
		}
		DtsReleaseOutputBuffs(dev, NULL, FALSE);

		return copy_ret;
	}
	else if (ret == BC_STS_BUSY)
	{
		return RET_COPY_AGAIN;
	}
	else
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput failed %d\n", ret);
		return RET_ERROR;
	}
}


static int decode(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt)
{
	BC_STATUS ret;
	BC_DTS_STATUS decoder_status;
	CopyRet rec_ret;
	CHDContext *priv   = avctx->priv_data;
	HANDLE dev         = priv->dev;
	int len            = avpkt->size;

	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: decode_frame\n");

	if (len)
	{
		int32_t tx_free = (int32_t)DtsTxFreeSize(dev);
		if (len < tx_free - 1024)
		{
			/*
			 * Despite being notionally opaque, either libcrystalhd or
			 * the hardware itself will mangle pts values that are too
			 * small or too large. The docs claim it should be in units
			 * of 100ns. Given that we're nominally dealing with a black
			 * box on both sides, any transform we do has no guarantee of
			 * avoiding mangling so we need to build a mapping to values
			 * we know will not be mangled.
			 */
			uint64_t pts = opaque_list_push(priv, avctx->pkt->pts);
			if (!pts)
			{
				return AVERROR(ENOMEM);
			}
			av_log(priv->avctx, AV_LOG_VERBOSE,
			       "input \"pts\": %"PRIu64"\n", pts);
			ret = DtsProcInput(dev, avpkt->data, len, pts, 0);
			if (ret == BC_STS_BUSY)
			{
				av_log(avctx, AV_LOG_WARNING,
				       "CrystalHD: ProcInput returned busy\n");
				usleep(BASE_WAIT);
				return AVERROR(EBUSY);
			}
			else if (ret != BC_STS_SUCCESS)
			{
				av_log(avctx, AV_LOG_ERROR,
				       "CrystalHD: ProcInput failed: %u\n", ret);
				return -1;
			}
			avctx->has_b_frames++;
		}
		else
		{
			av_log(avctx, AV_LOG_WARNING, "CrystalHD: Input buffer full\n");
			len = 0; // We didn't consume any bytes.
		}
	}
	else
	{
		av_log(avctx, AV_LOG_INFO, "CrystalHD: No more input data\n");
	}

	if (priv->skip_next_output)
	{
		av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Skipping next output.\n");
		priv->skip_next_output = 0;
		avctx->has_b_frames--;
		return len;
	}

	ret = DtsGetDriverStatus(dev, &decoder_status);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: GetDriverStatus failed\n");
		return -1;
	}

	/*
	 * No frames ready. Don't try to extract.
	 *
	 * Empirical testing shows that ReadyListCount can be a damn lie,
	 * and ProcOut still fails when count > 0. The same testing showed
	 * that two more iterations were needed before ProcOutput would
	 * succeed.
	 */
	if (priv->output_ready < 2)
	{
		if (decoder_status.ReadyListCount != 0)
			priv->output_ready++;
		usleep(BASE_WAIT);
		av_log(avctx, AV_LOG_INFO, "CrystalHD: Filling pipeline.\n");
		return len;
	}
	else if (decoder_status.ReadyListCount == 0)
	{
		/*
		 * After the pipeline is established, if we encounter a lack of frames
		 * that probably means we're not giving the hardware enough time to
		 * decode them, so start increasing the wait time at the end of a
		 * decode call.
		 */
		usleep(BASE_WAIT);
		priv->decode_wait += WAIT_UNIT;
		av_log(avctx, AV_LOG_INFO, "CrystalHD: No frames ready. Returning\n");
		return len;
	}

	do
	{
		rec_ret = receive_frame(avctx, data, data_size, 0);
		if (rec_ret == 0 && *data_size == 0)
		{
			if (avctx->codec->id == CODEC_ID_H264)
			{
				/*
				 * This case is for when the encoded fields are stored
				 * separately and we get a separate avpkt for each one. To keep
				 * the pipeline stable, we should return nothing and wait for
				 * the next time round to grab the second field.
				 * H.264 PAFF is an example of this.
				 */
				av_log(avctx, AV_LOG_VERBOSE, "Returning after first field.\n");
				avctx->has_b_frames--;
			}
			else
			{
				/*
				 * This case is for when the encoded fields are stored in a
				 * single avpkt but the hardware returns then separately. Unless
				 * we grab the second field before returning, we'll slip another
				 * frame in the pipeline and if that happens a lot, we're sunk.
				 * So we have to get that second field now.
				 * Interlaced mpeg2 and vc1 are examples of this.
				 */
				av_log(avctx, AV_LOG_VERBOSE, "Trying to get second field.\n");
				while (1)
				{
					usleep(priv->decode_wait);
					ret = DtsGetDriverStatus(dev, &decoder_status);
					if (ret == BC_STS_SUCCESS &&
					        decoder_status.ReadyListCount > 0)
					{
						rec_ret = receive_frame(avctx, data, data_size, 1);
						if ((rec_ret == 0 && *data_size > 0) ||
						        rec_ret == RET_ERROR)
							break;
					}
				}
				av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Got second field.\n");
			}
		}
		else if (rec_ret == RET_SKIP_NEXT_COPY)
		{
			/*
			 * Two input packets got turned into a field pair. Gawd.
			 */
			av_log(avctx, AV_LOG_VERBOSE,
			       "Don't output on next decode call.\n");
			priv->skip_next_output = 1;
		}
		/*
		 * If rec_ret == RET_COPY_AGAIN, that means that either we just handled
		 * a FMT_CHANGE event and need to go around again for the actual frame,
		 * we got a busy status and need to try again, or we're dealing with
		 * packed b-frames, where the hardware strangely returns the packed
		 * p-frame twice. We choose to keep the second copy as it carries the
		 * valid pts.
		 */
	}
	while (rec_ret == RET_COPY_AGAIN);
	usleep(priv->decode_wait);
	return len;
}
Exemplo n.º 5
0
static av_cold int init(AVCodecContext *avctx)
{
	CHDContext* priv;
	BC_STATUS ret;
	BC_INFO_CRYSTAL version;
	BC_INPUT_FORMAT format =
	{
		.FGTEnable   = FALSE,
		.Progressive = TRUE,
		.OptFlags    = 0x80000000 | vdecFrameRate59_94 | 0x40,
		.width       = avctx->width,
		.height      = avctx->height,
	};

	BC_MEDIA_SUBTYPE subtype;

	uint32_t mode = DTS_PLAYBACK_MODE |
	                DTS_LOAD_FILE_PLAY_FW |
	                DTS_SKIP_TX_CHK_CPB |
	                DTS_PLAYBACK_DROP_RPT_MODE |
	                DTS_SINGLE_THREADED_MODE |
	                DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);

	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
	       avctx->codec->name);

	avctx->pix_fmt = PIX_FMT_YUYV422;

	/* Initialize the library */
	priv               = avctx->priv_data;
	priv->avctx        = avctx;
	priv->is_nal       = avctx->extradata_size > 0 && *(avctx->extradata) == 1;
	priv->last_picture = -1;
	priv->decode_wait  = BASE_WAIT;

	subtype = id2subtype(priv, avctx->codec->id);
	switch (subtype)
	{
	case BC_MSUBTYPE_AVC1:
	{
		uint8_t *dummy_p;
		int dummy_int;
		AVBitStreamFilterContext *bsfc;

		uint32_t orig_data_size = avctx->extradata_size;
		uint8_t *orig_data = av_malloc(orig_data_size);
		if (!orig_data)
		{
			av_log(avctx, AV_LOG_ERROR,
			       "Failed to allocate copy of extradata\n");
			return AVERROR(ENOMEM);
		}
		memcpy(orig_data, avctx->extradata, orig_data_size);


		bsfc = av_bitstream_filter_init("h264_mp4toannexb");
		if (!bsfc)
		{
			av_log(avctx, AV_LOG_ERROR,
			       "Cannot open the h264_mp4toannexb BSF!\n");
			av_free(orig_data);
			return AVERROR_BSF_NOT_FOUND;
		}
		av_bitstream_filter_filter(bsfc, avctx, NULL, &dummy_p,
		                           &dummy_int, NULL, 0, 0);
		av_bitstream_filter_close(bsfc);

		priv->sps_pps_buf     = avctx->extradata;
		priv->sps_pps_size    = avctx->extradata_size;
		avctx->extradata      = orig_data;
		avctx->extradata_size = orig_data_size;

		format.pMetaData   = priv->sps_pps_buf;
		format.metaDataSz  = priv->sps_pps_size;
		format.startCodeSz = (avctx->extradata[4] & 0x03) + 1;
	}
	break;
	case BC_MSUBTYPE_H264:
		format.startCodeSz = 4;
		// Fall-through
	case BC_MSUBTYPE_VC1:
	case BC_MSUBTYPE_WVC1:
	case BC_MSUBTYPE_WMV3:
	case BC_MSUBTYPE_WMVA:
	case BC_MSUBTYPE_MPEG2VIDEO:
	case BC_MSUBTYPE_DIVX:
	case BC_MSUBTYPE_DIVX311:
		format.pMetaData  = avctx->extradata;
		format.metaDataSz = avctx->extradata_size;
		break;
	default:
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
		return AVERROR(EINVAL);
	}
	format.mSubtype = subtype;

	/* Get a decoder instance */
	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
	// Initialize the Link and Decoder devices
	ret = DtsDeviceOpen(&priv->dev, mode);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
		goto fail;
	}

	ret = DtsCrystalHDVersion(priv->dev, &version);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_VERBOSE,
		       "CrystalHD: DtsCrystalHDVersion failed\n");
		goto fail;
	}
	priv->is_70012 = version.device == 0;

	if (priv->is_70012 &&
	        (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311))
	{
		av_log(avctx, AV_LOG_VERBOSE,
		       "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
		goto fail;
	}

	ret = DtsSetInputFormat(priv->dev, &format);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
		goto fail;
	}

	ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
		goto fail;
	}

	ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
		goto fail;
	}
	ret = DtsStartDecoder(priv->dev);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
		goto fail;
	}
	ret = DtsStartCapture(priv->dev);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
		goto fail;
	}

	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");

	return 0;

fail:
	uninit(avctx);
	return -1;
}


/*
 * The CrystalHD doesn't report interlaced H.264 content in a way that allows
 * us to distinguish between specific cases that require different handling.
 * So, for now, we have to hard-code the behaviour we want.
 *
 * The default behaviour is to assume MBAFF with input and output fieldpairs.
 *
 * Define ASSUME_PAFF_OVER_MBAFF to treat input as PAFF with separate input
 * and output fields.
 *
 * Define ASSUME_TWO_INPUTS_ONE_OUTPUT to treat input as separate fields but
 * output as a single fieldpair.
 *
 * Define both to mess up your playback.
 */
#define ASSUME_PAFF_OVER_MBAFF 0
#define ASSUME_TWO_INPUTS_ONE_OUTPUT 0
static inline CopyRet copy_frame(AVCodecContext *avctx,
                                 BC_DTS_PROC_OUT *output,
                                 void *data, int *data_size,
                                 uint8_t second_field)
{
	BC_STATUS ret;
	BC_DTS_STATUS decoder_status;
	uint8_t is_paff;
	uint8_t next_frame_same;
	uint8_t interlaced;

	CHDContext *priv = avctx->priv_data;

	uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
	                       VDEC_FLAG_BOTTOMFIELD;
	uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);

	int width    = output->PicInfo.width;
	int height   = output->PicInfo.height;
	int bwidth;
	uint8_t *src = output->Ybuff;
	int sStride;
	uint8_t *dst;
	int dStride;

	ret = DtsGetDriverStatus(priv->dev, &decoder_status);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR,
		       "CrystalHD: GetDriverStatus failed: %u\n", ret);
		return RET_ERROR;
	}

	is_paff           = ASSUME_PAFF_OVER_MBAFF ||
	                    !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC);
	next_frame_same   = output->PicInfo.picture_number ==
	                    (decoder_status.picNumFlags & ~0x40000000);
	interlaced        = ((output->PicInfo.flags &
	                      VDEC_FLAG_INTERLACED_SRC) && is_paff) ||
	                    next_frame_same || bottom_field || second_field;

	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: next_frame_same: %u | %u | %u\n",
	       next_frame_same, output->PicInfo.picture_number,
	       decoder_status.picNumFlags & ~0x40000000);

	if (priv->pic.data[0] && !priv->need_second_field)
		avctx->release_buffer(avctx, &priv->pic);

	priv->need_second_field = interlaced && !priv->need_second_field;

	priv->pic.buffer_hints = FF_BUFFER_HINTS_VALID | FF_BUFFER_HINTS_PRESERVE |
	                         FF_BUFFER_HINTS_REUSABLE;
	if (!priv->pic.data[0])
	{
		if (avctx->get_buffer(avctx, &priv->pic) < 0)
		{
			av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
			return RET_ERROR;
		}
	}

	bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
	if (priv->is_70012)
	{
		int pStride;

		if (width <= 720)
			pStride = 720;
		else if (width <= 1280)
			pStride = 1280;
		else if (width <= 1080)
			pStride = 1080;
		sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
	}
	else
	{
		sStride = bwidth;
	}

	dStride = priv->pic.linesize[0];
	dst     = priv->pic.data[0];

	av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");

	if (interlaced)
	{
		int dY = 0;
		int sY = 0;

		height /= 2;
		if (bottom_field)
		{
			av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
			dY = 1;
		}
		else
		{
			av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
			dY = 0;
		}

		for (sY = 0; sY < height; dY++, sY++)
		{
			memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
			dY++;
		}
	}
	else
	{
		av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
	}

	priv->pic.interlaced_frame = interlaced;
	if (interlaced)
		priv->pic.top_field_first = !bottom_first;

	if (output->PicInfo.timeStamp != 0)
	{
		priv->pic.pkt_pts = opaque_list_pop(priv, output->PicInfo.timeStamp);
		av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
		       priv->pic.pkt_pts);
	}

	if (!priv->need_second_field)
	{
		*data_size       = sizeof(AVFrame);
		*(AVFrame *)data = priv->pic;
	}

	if (ASSUME_TWO_INPUTS_ONE_OUTPUT &&
	        output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC)
	{
		av_log(priv->avctx, AV_LOG_VERBOSE, "Fieldpair from two packets.\n");
		return RET_SKIP_NEXT_COPY;
	}

	return RET_OK;
}
Exemplo n.º 6
0
static av_cold int init(AVCodecContext *avctx)
{
    CHDContext* priv;
    BC_STATUS ret;
    BC_INFO_CRYSTAL version;
    BC_INPUT_FORMAT format = {
        .FGTEnable   = FALSE,
        .Progressive = TRUE,
        .OptFlags    = 0x80000000 | vdecFrameRate59_94 | 0x40,
        .width       = avctx->width,
        .height      = avctx->height,
    };

    BC_MEDIA_SUBTYPE subtype;

    uint32_t mode = DTS_PLAYBACK_MODE |
                    DTS_LOAD_FILE_PLAY_FW |
                    DTS_SKIP_TX_CHK_CPB |
                    DTS_PLAYBACK_DROP_RPT_MODE |
                    DTS_SINGLE_THREADED_MODE |
                    DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);

    av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
           avctx->codec->name);

    avctx->pix_fmt = PIX_FMT_YUYV422;

    /* Initialize the library */
    priv               = avctx->priv_data;
    priv->avctx        = avctx;
    priv->is_nal       = avctx->extradata_size > 0 && *(avctx->extradata) == 1;
    priv->last_picture = -1;
    priv->decode_wait  = BASE_WAIT;

    subtype = id2subtype(priv, avctx->codec->id);
    switch (subtype) {
    case BC_MSUBTYPE_AVC1:
        {
            uint8_t *dummy_p;
            int dummy_int;

            priv->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
            if (!priv->bsfc) {
                av_log(avctx, AV_LOG_ERROR,
                       "Cannot open the h264_mp4toannexb BSF!\n");
                return AVERROR_BSF_NOT_FOUND;
            }
            av_bitstream_filter_filter(priv->bsfc, avctx, NULL, &dummy_p,
                                       &dummy_int, NULL, 0, 0);
        }
        subtype = BC_MSUBTYPE_H264;
        // Fall-through
    case BC_MSUBTYPE_H264:
        format.startCodeSz = 4;
        // Fall-through
    case BC_MSUBTYPE_VC1:
    case BC_MSUBTYPE_WVC1:
    case BC_MSUBTYPE_WMV3:
    case BC_MSUBTYPE_WMVA:
    case BC_MSUBTYPE_MPEG2VIDEO:
    case BC_MSUBTYPE_DIVX:
    case BC_MSUBTYPE_DIVX311:
        format.pMetaData  = avctx->extradata;
        format.metaDataSz = avctx->extradata_size;
        break;
    default:
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
        return AVERROR(EINVAL);
    }
    format.mSubtype = subtype;

    if (priv->sWidth) {
        format.bEnableScaling = 1;
        format.ScalingParams.sWidth = priv->sWidth;
    }

    /* Get a decoder instance */
    av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
    // Initialize the Link and Decoder devices
    ret = DtsDeviceOpen(&priv->dev, mode);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
        goto fail;
    }

    ret = DtsCrystalHDVersion(priv->dev, &version);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_VERBOSE,
               "CrystalHD: DtsCrystalHDVersion failed\n");
        goto fail;
    }
    priv->is_70012 = version.device == 0;

    if (priv->is_70012 &&
        (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311)) {
        av_log(avctx, AV_LOG_VERBOSE,
               "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
        goto fail;
    }

    ret = DtsSetInputFormat(priv->dev, &format);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
        goto fail;
    }

    ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
        goto fail;
    }

    ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
        goto fail;
    }
    ret = DtsStartDecoder(priv->dev);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
        goto fail;
    }
    ret = DtsStartCapture(priv->dev);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
        goto fail;
    }

    if (avctx->codec->id == CODEC_ID_H264) {
        priv->parser = av_parser_init(avctx->codec->id);
        if (!priv->parser)
            av_log(avctx, AV_LOG_WARNING,
                   "Cannot open the h.264 parser! Interlaced h.264 content "
                   "will not be detected reliably.\n");
        priv->parser->flags = PARSER_FLAG_COMPLETE_FRAMES;
    }
    av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");

    return 0;

 fail:
    uninit(avctx);
    return -1;
}


static inline CopyRet copy_frame(AVCodecContext *avctx,
                                 BC_DTS_PROC_OUT *output,
                                 void *data, int *data_size)
{
    BC_STATUS ret;
    BC_DTS_STATUS decoder_status;
    uint8_t trust_interlaced;
    uint8_t interlaced;

    CHDContext *priv = avctx->priv_data;
    int64_t pkt_pts  = AV_NOPTS_VALUE;
    uint8_t pic_type = 0;

    uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
                           VDEC_FLAG_BOTTOMFIELD;
    uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);

    int width    = output->PicInfo.width;
    int height   = output->PicInfo.height;
    int bwidth;
    uint8_t *src = output->Ybuff;
    int sStride;
    uint8_t *dst;
    int dStride;

    if (output->PicInfo.timeStamp != 0) {
        OpaqueList *node = opaque_list_pop(priv, output->PicInfo.timeStamp);
        if (node) {
            pkt_pts = node->reordered_opaque;
            pic_type = node->pic_type;
            av_free(node);
        } else {
            /*
             * We will encounter a situation where a timestamp cannot be
             * popped if a second field is being returned. In this case,
             * each field has the same timestamp and the first one will
             * cause it to be popped. To keep subsequent calculations
             * simple, pic_type should be set a FIELD value - doesn't
             * matter which, but I chose BOTTOM.
             */
            pic_type = PICT_BOTTOM_FIELD;
        }
        av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
               output->PicInfo.timeStamp);
        av_log(avctx, AV_LOG_VERBOSE, "output picture type %d\n",
               pic_type);
    }

    ret = DtsGetDriverStatus(priv->dev, &decoder_status);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR,
               "CrystalHD: GetDriverStatus failed: %u\n", ret);
       return RET_ERROR;
    }

    /*
     * For most content, we can trust the interlaced flag returned
     * by the hardware, but sometimes we can't. These are the
     * conditions under which we can trust the flag:
     *
     * 1) It's not h.264 content
     * 2) The UNKNOWN_SRC flag is not set
     * 3) We know we're expecting a second field
     * 4) The hardware reports this picture and the next picture
     *    have the same picture number.
     *
     * Note that there can still be interlaced content that will
     * fail this check, if the hardware hasn't decoded the next
     * picture or if there is a corruption in the stream. (In either
     * case a 0 will be returned for the next picture number)
     */
    trust_interlaced = avctx->codec->id != CODEC_ID_H264 ||
                       !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ||
                       priv->need_second_field ||
                       (decoder_status.picNumFlags & ~0x40000000) ==
                       output->PicInfo.picture_number;

    /*
     * If we got a false negative for trust_interlaced on the first field,
     * we will realise our mistake here when we see that the picture number is that
     * of the previous picture. We cannot recover the frame and should discard the
     * second field to keep the correct number of output frames.
     */
    if (output->PicInfo.picture_number == priv->last_picture && !priv->need_second_field) {
        av_log(avctx, AV_LOG_WARNING,
               "Incorrectly guessed progressive frame. Discarding second field\n");
        /* Returning without providing a picture. */
        return RET_OK;
    }

    interlaced = (output->PicInfo.flags & VDEC_FLAG_INTERLACED_SRC) &&
                 trust_interlaced;

    if (!trust_interlaced && (decoder_status.picNumFlags & ~0x40000000) == 0) {
        av_log(avctx, AV_LOG_VERBOSE,
               "Next picture number unknown. Assuming progressive frame.\n");
    }

    av_log(avctx, AV_LOG_VERBOSE, "Interlaced state: %d | trust_interlaced %d\n",
           interlaced, trust_interlaced);

    if (priv->pic.data[0] && !priv->need_second_field)
        avctx->release_buffer(avctx, &priv->pic);

    priv->need_second_field = interlaced && !priv->need_second_field;

    priv->pic.buffer_hints = FF_BUFFER_HINTS_VALID | FF_BUFFER_HINTS_PRESERVE |
                             FF_BUFFER_HINTS_REUSABLE;
    if (!priv->pic.data[0]) {
        if (avctx->get_buffer(avctx, &priv->pic) < 0) {
            av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
            return RET_ERROR;
        }
    }

    bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
    if (priv->is_70012) {
        int pStride;

        if (width <= 720)
            pStride = 720;
        else if (width <= 1280)
            pStride = 1280;
        else if (width <= 1080)
            pStride = 1080;
        sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
    } else {
        sStride = bwidth;
    }

    dStride = priv->pic.linesize[0];
    dst     = priv->pic.data[0];

    av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");

    if (interlaced) {
        int dY = 0;
        int sY = 0;

        height /= 2;
        if (bottom_field) {
            av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
            dY = 1;
        } else {
            av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
            dY = 0;
        }

        for (sY = 0; sY < height; dY++, sY++) {
            memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
            dY++;
        }
    } else {
        av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
    }

    priv->pic.interlaced_frame = interlaced;
    if (interlaced)
        priv->pic.top_field_first = !bottom_first;

    priv->pic.pkt_pts = pkt_pts;

    if (!priv->need_second_field) {
        *data_size       = sizeof(AVFrame);
        *(AVFrame *)data = priv->pic;
    }

    /*
     * Two types of PAFF content have been observed. One form causes the
     * hardware to return a field pair and the other individual fields,
     * even though the input is always individual fields. We must skip
     * copying on the next decode() call to maintain pipeline length in
     * the first case.
     */
    if (!interlaced && (output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) &&
        (pic_type == PICT_TOP_FIELD || pic_type == PICT_BOTTOM_FIELD)) {
        av_log(priv->avctx, AV_LOG_VERBOSE, "Fieldpair from two packets.\n");
        return RET_SKIP_NEXT_COPY;
    }

    /*
     * Testing has shown that in all cases where we don't want to return the
     * full frame immediately, VDEC_FLAG_UNKNOWN_SRC is set.
     */
    return priv->need_second_field &&
           !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ?
           RET_COPY_NEXT_FIELD : RET_OK;
}