Пример #1
0
static av_cold int init(AVCodecContext *avctx)
{
    CHDContext* priv;
    BC_STATUS ret;
    BC_INFO_CRYSTAL version;
    BC_INPUT_FORMAT format = {
        .FGTEnable   = FALSE,
        .Progressive = TRUE,
        .OptFlags    = 0x80000000 | vdecFrameRate59_94 | 0x40,
        .width       = avctx->width,
        .height      = avctx->height,
    };

    BC_MEDIA_SUBTYPE subtype;

    uint32_t mode = DTS_PLAYBACK_MODE |
                    DTS_LOAD_FILE_PLAY_FW |
                    DTS_SKIP_TX_CHK_CPB |
                    DTS_PLAYBACK_DROP_RPT_MODE |
                    DTS_SINGLE_THREADED_MODE |
                    DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);

    av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
           avctx->codec->name);

    avctx->pix_fmt = PIX_FMT_YUYV422;

    /* Initialize the library */
    priv               = avctx->priv_data;
    priv->avctx        = avctx;
    priv->is_nal       = avctx->extradata_size > 0 && *(avctx->extradata) == 1;
    priv->last_picture = -1;
    priv->decode_wait  = BASE_WAIT;

    subtype = id2subtype(priv, avctx->codec->id);
    switch (subtype) {
    case BC_MSUBTYPE_AVC1:
        {
            uint8_t *dummy_p;
            int dummy_int;

            priv->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
            if (!priv->bsfc) {
                av_log(avctx, AV_LOG_ERROR,
                       "Cannot open the h264_mp4toannexb BSF!\n");
                return AVERROR_BSF_NOT_FOUND;
            }
            av_bitstream_filter_filter(priv->bsfc, avctx, NULL, &dummy_p,
                                       &dummy_int, NULL, 0, 0);
        }
        subtype = BC_MSUBTYPE_H264;
        // Fall-through
    case BC_MSUBTYPE_H264:
        format.startCodeSz = 4;
        // Fall-through
    case BC_MSUBTYPE_VC1:
    case BC_MSUBTYPE_WVC1:
    case BC_MSUBTYPE_WMV3:
    case BC_MSUBTYPE_WMVA:
    case BC_MSUBTYPE_MPEG2VIDEO:
    case BC_MSUBTYPE_DIVX:
    case BC_MSUBTYPE_DIVX311:
        format.pMetaData  = avctx->extradata;
        format.metaDataSz = avctx->extradata_size;
        break;
    default:
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
        return AVERROR(EINVAL);
    }
    format.mSubtype = subtype;

    if (priv->sWidth) {
        format.bEnableScaling = 1;
        format.ScalingParams.sWidth = priv->sWidth;
    }

    /* Get a decoder instance */
    av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
    // Initialize the Link and Decoder devices
    ret = DtsDeviceOpen(&priv->dev, mode);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
        goto fail;
    }

    ret = DtsCrystalHDVersion(priv->dev, &version);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_VERBOSE,
               "CrystalHD: DtsCrystalHDVersion failed\n");
        goto fail;
    }
    priv->is_70012 = version.device == 0;

    if (priv->is_70012 &&
        (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311)) {
        av_log(avctx, AV_LOG_VERBOSE,
               "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
        goto fail;
    }

    ret = DtsSetInputFormat(priv->dev, &format);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
        goto fail;
    }

    ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
        goto fail;
    }

    ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
        goto fail;
    }
    ret = DtsStartDecoder(priv->dev);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
        goto fail;
    }
    ret = DtsStartCapture(priv->dev);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
        goto fail;
    }

    if (avctx->codec->id == CODEC_ID_H264) {
        priv->parser = av_parser_init(avctx->codec->id);
        if (!priv->parser)
            av_log(avctx, AV_LOG_WARNING,
                   "Cannot open the h.264 parser! Interlaced h.264 content "
                   "will not be detected reliably.\n");
        priv->parser->flags = PARSER_FLAG_COMPLETE_FRAMES;
    }
    av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");

    return 0;

 fail:
    uninit(avctx);
    return -1;
}


static inline CopyRet copy_frame(AVCodecContext *avctx,
                                 BC_DTS_PROC_OUT *output,
                                 void *data, int *data_size)
{
    BC_STATUS ret;
    BC_DTS_STATUS decoder_status;
    uint8_t trust_interlaced;
    uint8_t interlaced;

    CHDContext *priv = avctx->priv_data;
    int64_t pkt_pts  = AV_NOPTS_VALUE;
    uint8_t pic_type = 0;

    uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
                           VDEC_FLAG_BOTTOMFIELD;
    uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);

    int width    = output->PicInfo.width;
    int height   = output->PicInfo.height;
    int bwidth;
    uint8_t *src = output->Ybuff;
    int sStride;
    uint8_t *dst;
    int dStride;

    if (output->PicInfo.timeStamp != 0) {
        OpaqueList *node = opaque_list_pop(priv, output->PicInfo.timeStamp);
        if (node) {
            pkt_pts = node->reordered_opaque;
            pic_type = node->pic_type;
            av_free(node);
        } else {
            /*
             * We will encounter a situation where a timestamp cannot be
             * popped if a second field is being returned. In this case,
             * each field has the same timestamp and the first one will
             * cause it to be popped. To keep subsequent calculations
             * simple, pic_type should be set a FIELD value - doesn't
             * matter which, but I chose BOTTOM.
             */
            pic_type = PICT_BOTTOM_FIELD;
        }
        av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
               output->PicInfo.timeStamp);
        av_log(avctx, AV_LOG_VERBOSE, "output picture type %d\n",
               pic_type);
    }

    ret = DtsGetDriverStatus(priv->dev, &decoder_status);
    if (ret != BC_STS_SUCCESS) {
        av_log(avctx, AV_LOG_ERROR,
               "CrystalHD: GetDriverStatus failed: %u\n", ret);
       return RET_ERROR;
    }

    /*
     * For most content, we can trust the interlaced flag returned
     * by the hardware, but sometimes we can't. These are the
     * conditions under which we can trust the flag:
     *
     * 1) It's not h.264 content
     * 2) The UNKNOWN_SRC flag is not set
     * 3) We know we're expecting a second field
     * 4) The hardware reports this picture and the next picture
     *    have the same picture number.
     *
     * Note that there can still be interlaced content that will
     * fail this check, if the hardware hasn't decoded the next
     * picture or if there is a corruption in the stream. (In either
     * case a 0 will be returned for the next picture number)
     */
    trust_interlaced = avctx->codec->id != CODEC_ID_H264 ||
                       !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ||
                       priv->need_second_field ||
                       (decoder_status.picNumFlags & ~0x40000000) ==
                       output->PicInfo.picture_number;

    /*
     * If we got a false negative for trust_interlaced on the first field,
     * we will realise our mistake here when we see that the picture number is that
     * of the previous picture. We cannot recover the frame and should discard the
     * second field to keep the correct number of output frames.
     */
    if (output->PicInfo.picture_number == priv->last_picture && !priv->need_second_field) {
        av_log(avctx, AV_LOG_WARNING,
               "Incorrectly guessed progressive frame. Discarding second field\n");
        /* Returning without providing a picture. */
        return RET_OK;
    }

    interlaced = (output->PicInfo.flags & VDEC_FLAG_INTERLACED_SRC) &&
                 trust_interlaced;

    if (!trust_interlaced && (decoder_status.picNumFlags & ~0x40000000) == 0) {
        av_log(avctx, AV_LOG_VERBOSE,
               "Next picture number unknown. Assuming progressive frame.\n");
    }

    av_log(avctx, AV_LOG_VERBOSE, "Interlaced state: %d | trust_interlaced %d\n",
           interlaced, trust_interlaced);

    if (priv->pic.data[0] && !priv->need_second_field)
        avctx->release_buffer(avctx, &priv->pic);

    priv->need_second_field = interlaced && !priv->need_second_field;

    priv->pic.buffer_hints = FF_BUFFER_HINTS_VALID | FF_BUFFER_HINTS_PRESERVE |
                             FF_BUFFER_HINTS_REUSABLE;
    if (!priv->pic.data[0]) {
        if (avctx->get_buffer(avctx, &priv->pic) < 0) {
            av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
            return RET_ERROR;
        }
    }

    bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
    if (priv->is_70012) {
        int pStride;

        if (width <= 720)
            pStride = 720;
        else if (width <= 1280)
            pStride = 1280;
        else if (width <= 1080)
            pStride = 1080;
        sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
    } else {
        sStride = bwidth;
    }

    dStride = priv->pic.linesize[0];
    dst     = priv->pic.data[0];

    av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");

    if (interlaced) {
        int dY = 0;
        int sY = 0;

        height /= 2;
        if (bottom_field) {
            av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
            dY = 1;
        } else {
            av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
            dY = 0;
        }

        for (sY = 0; sY < height; dY++, sY++) {
            memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
            dY++;
        }
    } else {
        av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
    }

    priv->pic.interlaced_frame = interlaced;
    if (interlaced)
        priv->pic.top_field_first = !bottom_first;

    priv->pic.pkt_pts = pkt_pts;

    if (!priv->need_second_field) {
        *data_size       = sizeof(AVFrame);
        *(AVFrame *)data = priv->pic;
    }

    /*
     * Two types of PAFF content have been observed. One form causes the
     * hardware to return a field pair and the other individual fields,
     * even though the input is always individual fields. We must skip
     * copying on the next decode() call to maintain pipeline length in
     * the first case.
     */
    if (!interlaced && (output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) &&
        (pic_type == PICT_TOP_FIELD || pic_type == PICT_BOTTOM_FIELD)) {
        av_log(priv->avctx, AV_LOG_VERBOSE, "Fieldpair from two packets.\n");
        return RET_SKIP_NEXT_COPY;
    }

    /*
     * Testing has shown that in all cases where we don't want to return the
     * full frame immediately, VDEC_FLAG_UNKNOWN_SRC is set.
     */
    return priv->need_second_field &&
           !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ?
           RET_COPY_NEXT_FIELD : RET_OK;
}
Пример #2
0
static av_cold int init(AVCodecContext *avctx)
{
	CHDContext* priv;
	BC_STATUS ret;
	BC_INFO_CRYSTAL version;
	BC_INPUT_FORMAT format =
	{
		.FGTEnable   = FALSE,
		.Progressive = TRUE,
		.OptFlags    = 0x80000000 | vdecFrameRate59_94 | 0x40,
		.width       = avctx->width,
		.height      = avctx->height,
	};

	BC_MEDIA_SUBTYPE subtype;

	uint32_t mode = DTS_PLAYBACK_MODE |
	                DTS_LOAD_FILE_PLAY_FW |
	                DTS_SKIP_TX_CHK_CPB |
	                DTS_PLAYBACK_DROP_RPT_MODE |
	                DTS_SINGLE_THREADED_MODE |
	                DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);

	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
	       avctx->codec->name);

	avctx->pix_fmt = PIX_FMT_YUYV422;

	/* Initialize the library */
	priv               = avctx->priv_data;
	priv->avctx        = avctx;
	priv->is_nal       = avctx->extradata_size > 0 && *(avctx->extradata) == 1;
	priv->last_picture = -1;
	priv->decode_wait  = BASE_WAIT;

	subtype = id2subtype(priv, avctx->codec->id);
	switch (subtype)
	{
	case BC_MSUBTYPE_AVC1:
	{
		uint8_t *dummy_p;
		int dummy_int;
		AVBitStreamFilterContext *bsfc;

		uint32_t orig_data_size = avctx->extradata_size;
		uint8_t *orig_data = av_malloc(orig_data_size);
		if (!orig_data)
		{
			av_log(avctx, AV_LOG_ERROR,
			       "Failed to allocate copy of extradata\n");
			return AVERROR(ENOMEM);
		}
		memcpy(orig_data, avctx->extradata, orig_data_size);


		bsfc = av_bitstream_filter_init("h264_mp4toannexb");
		if (!bsfc)
		{
			av_log(avctx, AV_LOG_ERROR,
			       "Cannot open the h264_mp4toannexb BSF!\n");
			av_free(orig_data);
			return AVERROR_BSF_NOT_FOUND;
		}
		av_bitstream_filter_filter(bsfc, avctx, NULL, &dummy_p,
		                           &dummy_int, NULL, 0, 0);
		av_bitstream_filter_close(bsfc);

		priv->sps_pps_buf     = avctx->extradata;
		priv->sps_pps_size    = avctx->extradata_size;
		avctx->extradata      = orig_data;
		avctx->extradata_size = orig_data_size;

		format.pMetaData   = priv->sps_pps_buf;
		format.metaDataSz  = priv->sps_pps_size;
		format.startCodeSz = (avctx->extradata[4] & 0x03) + 1;
	}
	break;
	case BC_MSUBTYPE_H264:
		format.startCodeSz = 4;
		// Fall-through
	case BC_MSUBTYPE_VC1:
	case BC_MSUBTYPE_WVC1:
	case BC_MSUBTYPE_WMV3:
	case BC_MSUBTYPE_WMVA:
	case BC_MSUBTYPE_MPEG2VIDEO:
	case BC_MSUBTYPE_DIVX:
	case BC_MSUBTYPE_DIVX311:
		format.pMetaData  = avctx->extradata;
		format.metaDataSz = avctx->extradata_size;
		break;
	default:
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
		return AVERROR(EINVAL);
	}
	format.mSubtype = subtype;

	/* Get a decoder instance */
	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
	// Initialize the Link and Decoder devices
	ret = DtsDeviceOpen(&priv->dev, mode);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
		goto fail;
	}

	ret = DtsCrystalHDVersion(priv->dev, &version);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_VERBOSE,
		       "CrystalHD: DtsCrystalHDVersion failed\n");
		goto fail;
	}
	priv->is_70012 = version.device == 0;

	if (priv->is_70012 &&
	        (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311))
	{
		av_log(avctx, AV_LOG_VERBOSE,
		       "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
		goto fail;
	}

	ret = DtsSetInputFormat(priv->dev, &format);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
		goto fail;
	}

	ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
		goto fail;
	}

	ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
		goto fail;
	}
	ret = DtsStartDecoder(priv->dev);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
		goto fail;
	}
	ret = DtsStartCapture(priv->dev);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
		goto fail;
	}

	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");

	return 0;

fail:
	uninit(avctx);
	return -1;
}


/*
 * The CrystalHD doesn't report interlaced H.264 content in a way that allows
 * us to distinguish between specific cases that require different handling.
 * So, for now, we have to hard-code the behaviour we want.
 *
 * The default behaviour is to assume MBAFF with input and output fieldpairs.
 *
 * Define ASSUME_PAFF_OVER_MBAFF to treat input as PAFF with separate input
 * and output fields.
 *
 * Define ASSUME_TWO_INPUTS_ONE_OUTPUT to treat input as separate fields but
 * output as a single fieldpair.
 *
 * Define both to mess up your playback.
 */
#define ASSUME_PAFF_OVER_MBAFF 0
#define ASSUME_TWO_INPUTS_ONE_OUTPUT 0
static inline CopyRet copy_frame(AVCodecContext *avctx,
                                 BC_DTS_PROC_OUT *output,
                                 void *data, int *data_size,
                                 uint8_t second_field)
{
	BC_STATUS ret;
	BC_DTS_STATUS decoder_status;
	uint8_t is_paff;
	uint8_t next_frame_same;
	uint8_t interlaced;

	CHDContext *priv = avctx->priv_data;

	uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
	                       VDEC_FLAG_BOTTOMFIELD;
	uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);

	int width    = output->PicInfo.width;
	int height   = output->PicInfo.height;
	int bwidth;
	uint8_t *src = output->Ybuff;
	int sStride;
	uint8_t *dst;
	int dStride;

	ret = DtsGetDriverStatus(priv->dev, &decoder_status);
	if (ret != BC_STS_SUCCESS)
	{
		av_log(avctx, AV_LOG_ERROR,
		       "CrystalHD: GetDriverStatus failed: %u\n", ret);
		return RET_ERROR;
	}

	is_paff           = ASSUME_PAFF_OVER_MBAFF ||
	                    !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC);
	next_frame_same   = output->PicInfo.picture_number ==
	                    (decoder_status.picNumFlags & ~0x40000000);
	interlaced        = ((output->PicInfo.flags &
	                      VDEC_FLAG_INTERLACED_SRC) && is_paff) ||
	                    next_frame_same || bottom_field || second_field;

	av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: next_frame_same: %u | %u | %u\n",
	       next_frame_same, output->PicInfo.picture_number,
	       decoder_status.picNumFlags & ~0x40000000);

	if (priv->pic.data[0] && !priv->need_second_field)
		avctx->release_buffer(avctx, &priv->pic);

	priv->need_second_field = interlaced && !priv->need_second_field;

	priv->pic.buffer_hints = FF_BUFFER_HINTS_VALID | FF_BUFFER_HINTS_PRESERVE |
	                         FF_BUFFER_HINTS_REUSABLE;
	if (!priv->pic.data[0])
	{
		if (avctx->get_buffer(avctx, &priv->pic) < 0)
		{
			av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
			return RET_ERROR;
		}
	}

	bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
	if (priv->is_70012)
	{
		int pStride;

		if (width <= 720)
			pStride = 720;
		else if (width <= 1280)
			pStride = 1280;
		else if (width <= 1080)
			pStride = 1080;
		sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
	}
	else
	{
		sStride = bwidth;
	}

	dStride = priv->pic.linesize[0];
	dst     = priv->pic.data[0];

	av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");

	if (interlaced)
	{
		int dY = 0;
		int sY = 0;

		height /= 2;
		if (bottom_field)
		{
			av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
			dY = 1;
		}
		else
		{
			av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
			dY = 0;
		}

		for (sY = 0; sY < height; dY++, sY++)
		{
			memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
			dY++;
		}
	}
	else
	{
		av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
	}

	priv->pic.interlaced_frame = interlaced;
	if (interlaced)
		priv->pic.top_field_first = !bottom_first;

	if (output->PicInfo.timeStamp != 0)
	{
		priv->pic.pkt_pts = opaque_list_pop(priv, output->PicInfo.timeStamp);
		av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
		       priv->pic.pkt_pts);
	}

	if (!priv->need_second_field)
	{
		*data_size       = sizeof(AVFrame);
		*(AVFrame *)data = priv->pic;
	}

	if (ASSUME_TWO_INPUTS_ONE_OUTPUT &&
	        output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC)
	{
		av_log(priv->avctx, AV_LOG_VERBOSE, "Fieldpair from two packets.\n");
		return RET_SKIP_NEXT_COPY;
	}

	return RET_OK;
}
bool PrivateDecoderCrystalHD::Init(const QString &decoder,
                                   PlayerFlags flags,
                                   AVCodecContext *avctx)
{
    if ((decoder != "crystalhd") || !(flags & kDecodeAllowEXT) ||
        !avctx || getenv("NO_CRYSTALHD"))
        return false;

    static bool debugged = false;

    uint32_t well_documented = DTS_PLAYBACK_MODE | DTS_LOAD_FILE_PLAY_FW |
                               DTS_SKIP_TX_CHK_CPB |
                               DTS_PLAYBACK_DROP_RPT_MODE |
                               DTS_DFLT_RESOLUTION(vdecRESOLUTION_CUSTOM);
    INIT_ST;
    st = DtsDeviceOpen(&m_device, well_documented);
    CHECK_ST;
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to open CrystalHD device");
        return false;
    }

    _BC_INFO_CRYSTAL_ info;
    st = DtsCrystalHDVersion(m_device, &info);
    CHECK_ST;
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to get device info.");
        return false;
    }

    m_device_type = (BC_DEVICE_TYPE)info.device;

    if (!debugged)
    {
        LOG(VB_GENERAL, LOG_INFO, LOC + QString("Device: %1")
                .arg(device_to_string(m_device_type)));
        LOG(VB_GENERAL, LOG_INFO, LOC + QString("Library : %1.%2.%3")
                .arg(info.dilVersion.dilMajor)
                .arg(info.dilVersion.dilMinor)
                .arg(info.dilVersion.version));
        LOG(VB_GENERAL, LOG_INFO, LOC + QString("Driver  : %1.%2.%3")
                .arg(info.drvVersion.drvMajor)
                .arg(info.drvVersion.drvMinor)
                .arg(info.drvVersion.version));
        LOG(VB_GENERAL, LOG_INFO, LOC + QString("Firmware: %1.%2.%3")
                .arg(info.fwVersion.fwMajor)
                .arg(info.fwVersion.fwMinor)
                .arg(info.fwVersion.version));
    }

    if (BC_70012 == m_device_type)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC +
            "BCM70012 device is currently unsupported.");
        return false;
    }

    BC_HW_CAPS hw_caps;
    uint32_t codecs;
    st = DtsGetCapabilities(m_device, &hw_caps);
    CHECK_ST;
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to get device capabilities");
        return false;
    }

    BC_OUTPUT_FORMAT m_desired_fmt = (m_device_type == BC_70015) ?
                                     OUTPUT_MODE422_YUY2 : OUTPUT_MODE420;
    m_pix_fmt = OUTPUT_MODE_INVALID;
    for (int i = 0; i < hw_caps.ColorCaps.Count; i++)
    {
        if (m_desired_fmt == hw_caps.ColorCaps.OutFmt[i])
            m_pix_fmt = m_desired_fmt;
        if (!debugged)
        {
            LOG(VB_PLAYBACK, LOG_INFO, LOC +
                QString("Supported output format: %1")
                    .arg(bcmpixfmt_to_string(hw_caps.ColorCaps.OutFmt[i])));
        }
    }
    if (m_pix_fmt != m_desired_fmt)
    {
        LOG(VB_PLAYBACK, LOG_ERR, LOC +
            "Failed to find correct output format.");
        return false;
    }
    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Using: %1")
            .arg(bcmpixfmt_to_string(m_pix_fmt)));

    codecs = hw_caps.DecCaps;
    if (!debugged)
    {
        LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("H.264 support: %1")
                .arg((bool)(codecs & BC_DEC_FLAGS_H264)));
        LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("MPEG2 support: %1")
                .arg((bool)(codecs & BC_DEC_FLAGS_MPEG2)));
        LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("VC1   support: %1")
                .arg((bool)(codecs & BC_DEC_FLAGS_VC1)));
        LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("MPEG4 support: %1")
                .arg((bool)(codecs & BC_DEC_FLAGS_M4P2)));
        debugged = true;
    }

    BC_MEDIA_SUBTYPE sub_type = BC_MSUBTYPE_INVALID;

    switch (avctx->codec_id)
    {
        case AV_CODEC_ID_MPEG4:
            if (codecs & BC_DEC_FLAGS_M4P2)
                sub_type = BC_MSUBTYPE_DIVX;
            break;
        case AV_CODEC_ID_MPEG1VIDEO:
            if (codecs & BC_DEC_FLAGS_MPEG2)
                sub_type = BC_MSUBTYPE_MPEG1VIDEO;
            break;
        case AV_CODEC_ID_MPEG2VIDEO:
            if (codecs & BC_DEC_FLAGS_MPEG2)
                sub_type = BC_MSUBTYPE_MPEG2VIDEO;
            break;
        case AV_CODEC_ID_VC1:
            if (codecs & BC_DEC_FLAGS_VC1)
            {
                if (avctx->codec_tag == MKTAG('W','V','C','1'))
                    sub_type = BC_MSUBTYPE_WVC1;
                else
                    sub_type = BC_MSUBTYPE_VC1;
            }
            break;
        case AV_CODEC_ID_WMV3:
            if (codecs & BC_DEC_FLAGS_VC1)
                sub_type = BC_MSUBTYPE_WMV3;
            break;
        case AV_CODEC_ID_H264:
            if (codecs & BC_DEC_FLAGS_H264)
            {
                if (avctx->extradata[0] == 0x01)
                {
                    if (!CreateFilter(avctx))
                    {
                        LOG(VB_PLAYBACK, LOG_ERR, LOC +
                            "Failed to create stream filter");
                        return false;
                    }
                    sub_type = BC_MSUBTYPE_AVC1;
                }
                else
                    sub_type = BC_MSUBTYPE_H264;
            }
            break;
    }

    if (sub_type == BC_MSUBTYPE_INVALID)
    {
        LOG(VB_PLAYBACK, LOG_ERR, LOC + QString("Codec %1 not supported")
                .arg(ff_codec_id_string(avctx->codec_id)));
        return false;
    }

    int nalsize = 4;
    if (avctx->codec_id == AV_CODEC_ID_H264)
    {
        LOG(VB_PLAYBACK, LOG_INFO, LOC +
            QString("H.264 Profile: %1 RefFrames: %2 Codec tag: %3")
                .arg(avctx->profile).arg(avctx->refs)
                .arg(fourcc_str(avctx->codec_tag)));
        if (avctx->extradata[0] == 1)
        {
            nalsize = (avctx->extradata[4] & 0x03) + 1;
            LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("avcC nal size: %1")
                    .arg(nalsize));
        }
    }

    BC_INPUT_FORMAT fmt;
    memset(&fmt, 0, sizeof(BC_INPUT_FORMAT));
    fmt.OptFlags       = 0x80000000 | vdecFrameRateUnknown;
    fmt.width          = avctx->coded_width;
    fmt.height         = avctx->coded_height;
    fmt.Progressive    = 1;
    fmt.FGTEnable      = 0;
    fmt.MetaDataEnable = 0;
    fmt.metaDataSz     = avctx->extradata_size;
    fmt.pMetaData      = avctx->extradata;
    fmt.startCodeSz    = nalsize;
    fmt.mSubtype       = sub_type;

    st = DtsSetInputFormat(m_device, &fmt);
    CHECK_ST;
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to set decoder input format");
        return false;
    }

    st = DtsOpenDecoder(m_device, BC_STREAM_TYPE_ES);
    CHECK_ST;
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to open CrystalHD decoder");
        return false;
    }

    st = DtsSetColorSpace(m_device, m_pix_fmt);
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to set decoder output format");
        return false;
    }

    st = DtsStartDecoder(m_device);
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to start decoder");
        return false;
    }

    st = DtsStartCapture(m_device);
    if (!ok)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to start capture");
        return false;
    }

    Reset();

    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created decoder %1 %2x%3")
        .arg(ff_codec_id_string(avctx->codec_id))
        .arg(avctx->coded_width).arg(avctx->coded_height));
    return true;
}