Exemplo n.º 1
0
STDMETHODIMP_(LRESULT) Tffvfw::decQuery(BITMAPINFO *lpbiInput,BITMAPINFO *lpbiOutput)
{
    if (initDec()) {
        if (lpbiInput==NULL) {
            return ICERR_ERROR;
        }
        CodecID codecId;
        autoptr<TvideoCodecDec> dec=initDecoder(lpbiInput,&codecId);
        if (codecId==CODEC_ID_NONE) {
            return ICERR_UNSUPPORTED;
        }
        if (lpbiOutput!=NULL) {
            if (!dec) {
                return ICERR_UNSUPPORTED;
            }
            dec->forceOutputColorspace(&lpbiInput->bmiHeader,&autoforcedilace,autoforcedcolorspaces);
            const BITMAPINFOHEADER *outhdr=&lpbiOutput->bmiHeader;
            char_t pomS[60];
            DPRINTF(_l("Tffvfw::decQuery: %s"),fourcc2str(hdr2fourcc(outhdr,NULL),pomS,60));
            if (lpbiInput->bmiHeader.biWidth!=outhdr->biWidth || abs(lpbiInput->bmiHeader.biHeight)!=abs(outhdr->biHeight) || getBMPcolorspace(outhdr,autoforcedcolorspaces.decGetForcedCsp(decVFW))==FF_CSP_NULL) {
                return ICERR_BADFORMAT;
            }
        }
        return ICERR_OK;
    } else {
        return VFW_E_RUNTIME_ERROR;
    }
}
Exemplo n.º 2
0
HRESULT TffdshowVideoInputPin::getInCodecString(char_t *buf, size_t buflen)
{
    if (!buf) {
        return E_POINTER;
    }
    if (video) {
        char_t name[60];
        tsnprintf_s(buf, buflen, _TRUNCATE, _l("%s (%s)"), fourcc2str(biIn.bmiHeader.biCompression, name, 60), video->getName());
        buf[buflen - 1] = '\0';
    } else {
        buf[0] = '\0';
    }
    return S_OK;
}
Exemplo n.º 3
0
bool TffdshowVideoInputPin::init(const CMediaType &mt)
{
    DPRINTF(_l("TffdshowVideoInputPin::init"));
    bool dont_use_rtStop_from_upper_stream = false;
    isInterlacedRawVideo = false;
    if (mt.formattype == FORMAT_VideoInfo) {
        VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)mt.pbFormat;
        init_VIH_and_VIH2_common_part(vih->rcSource, vih->rcTarget, vih->dwBitRate, vih->dwBitErrorRate, vih->AvgTimePerFrame, vih->bmiHeader);
    } else if (mt.formattype == FORMAT_VideoInfo2) {
        VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2*)mt.pbFormat;
        init_VIH_and_VIH2_common_part(vih2->rcSource, vih2->rcTarget, vih2->dwBitRate, vih2->dwBitErrorRate, vih2->AvgTimePerFrame, vih2->bmiHeader);
        isInterlacedRawVideo = vih2->dwInterlaceFlags & AMINTERLACE_IsInterlaced;
        pictIn.setDar(Rational(vih2->dwPictAspectRatioX, vih2->dwPictAspectRatioY));
        DPRINTF(_l("TffdshowVideoInputPin::initVideo: darX:%i, darY:%i"), vih2->dwPictAspectRatioX, vih2->dwPictAspectRatioY);
    } else if (mt.formattype == FORMAT_MPEGVideo) {
        MPEG1VIDEOINFO *mpeg1info = (MPEG1VIDEOINFO*)mt.pbFormat;
        biIn.bmiHeader = mpeg1info->hdr.bmiHeader;
        biIn.bmiHeader.biCompression = FOURCC_MPG1;
        pictIn.setSize(std::max(mpeg1info->hdr.rcSource.right, mpeg1info->hdr.bmiHeader.biWidth), std::max(mpeg1info->hdr.rcSource.bottom, mpeg1info->hdr.bmiHeader.biHeight));
    } else if (mt.formattype == FORMAT_MPEG2Video) {
        MPEG2VIDEOINFO *mpeg2info = (MPEG2VIDEOINFO*)mt.pbFormat;
        biIn.bmiHeader = mpeg2info->hdr.bmiHeader;
        pictIn.setSize(std::max(mpeg2info->hdr.rcSource.right, mpeg2info->hdr.bmiHeader.biWidth), std::max(mpeg2info->hdr.rcSource.bottom, mpeg2info->hdr.bmiHeader.biHeight));
        pictIn.setDar(Rational(mpeg2info->hdr.dwPictAspectRatioX, mpeg2info->hdr.dwPictAspectRatioY));
        if (biIn.bmiHeader.biCompression == 0 || biIn.bmiHeader.biCompression == 0x0038002d) {
            if (mt.subtype == MEDIASUBTYPE_H264_TRANSPORT) {
                biIn.bmiHeader.biCompression = FOURCC_H264;
            } else if (mt.subtype == MEDIASUBTYPE_AVC1 || mt.subtype == MEDIASUBTYPE_avc1 || mt.subtype == MEDIASUBTYPE_H264 || mt.subtype == MEDIASUBTYPE_h264 || mt.subtype == MEDIASUBTYPE_CCV1) {
                biIn.bmiHeader.biCompression = FOURCC_H264;
            } else {
                biIn.bmiHeader.biCompression = FOURCC_MPG2;
            }
        } else {
            biIn.bmiHeader.biCompression = FCCupper(biIn.bmiHeader.biCompression);
            dont_use_rtStop_from_upper_stream = true;
        }
    } else if (mt.formattype == FORMAT_TheoraIll) {
        memset(&biIn, 0, sizeof(biIn));
        sTheoraFormatBlock *oggFormat = (sTheoraFormatBlock*)mt.pbFormat;
        biIn.bmiHeader.biCompression = FOURCC_THEO;
        pictIn.setSize(biIn.bmiHeader.biWidth = oggFormat->width, biIn.bmiHeader.biHeight = oggFormat->height);
        pictIn.setDar(Rational(oggFormat->aspectNumerator, oggFormat->aspectDenominator));
        biIn.bmiHeader.biBitCount = 12;
    } else if (mt.formattype == FORMAT_RLTheora) {
        struct RLTheora {
            VIDEOINFOHEADER hdr;
            DWORD headerSize[3];    // 0: Header, 1: Comment, 2: Codebook
        };
        const RLTheora *rl = (const RLTheora*)mt.pbFormat;
        GetBitContext gb;
        init_get_bits(&gb, (const uint8_t*)(rl + 1), rl->headerSize[0]);
        int ptype = get_bits(&gb, 8);
        if (!(ptype & 0x80)) {
            return false;
        }
        biIn.bmiHeader.biCompression = FOURCC_THEO;
        skip_bits(&gb, 6 * 8); /* "theora" */
        int major = get_bits(&gb, 8); /* version major */
        int minor = get_bits(&gb, 8); /* version minor */
        int micro = get_bits(&gb, 8); /* version micro */
        int theora = (major << 16) | (minor << 8) | micro;

        if (theora < 0x030200) {
            ;//flipped_image = 1;
        }

        biIn.bmiHeader.biWidth = get_bits(&gb, 16) << 4;
        biIn.bmiHeader.biHeight = get_bits(&gb, 16) << 4;
        pictIn.setSize(biIn.bmiHeader.biWidth, biIn.bmiHeader.biHeight);

        skip_bits(&gb, 24); /* frame width */
        skip_bits(&gb, 24); /* frame height */

        skip_bits(&gb, 8); /* offset x */
        skip_bits(&gb, 8); /* offset y */

        skip_bits(&gb, 32); /* fps numerator */
        skip_bits(&gb, 32); /* fps denumerator */

        Rational sample_aspect_ratio;
        sample_aspect_ratio.num = get_bits(&gb, 24); /* aspect numerator */
        sample_aspect_ratio.den = get_bits(&gb, 24); /* aspect denumerator */
        pictIn.setSar(sample_aspect_ratio);
    } else {
        return false;
    }

    REFERENCE_TIME avgTimePerFrame0 = getAvgTimePerFrame(mt);
    avgTimePerFrame = avgTimePerFrame0 ? avgTimePerFrame0 : 400000;

    char_t pomS[60];
    DPRINTF(_l("TffdshowVideoInputPin::initVideo: %s, width:%i, height:%i, aspectX:%i, aspectY:%i"), fourcc2str(hdr2fourcc(&biIn.bmiHeader, &mt.subtype), pomS, 60) , pictIn.rectFull.dx, pictIn.rectFull.dy, pictIn.rectFull.dar().num, pictIn.rectFull.dar().den);
again:
    codecId = (AVCodecID)getVideoCodecId(&biIn.bmiHeader, &mt.subtype, &biIn.bmiHeader.biCompression);

    // FIXME Experimental //
    // VC1 (in EVO) stream may have attached media type during playback (say, once per 5 second).
    // When I try to use its codec private data, the video heavily stutters.
    // pContext.pDMO->SetInputType (Currently ff_wmv.cpp line 769) takes too long.
    // I gave up using it and decided to ignore it during playback of VC1 stream.
    // It works fine for my sample.
    if (video) {
        if (/*video->codecId == CODEC_ID_WMV9_LIB && */wasVC1 && biIn.bmiHeader.biCompression == 0x31435657 /* "WVC1" */) {
            return true;
        } else if (is_quicksync_codec(video->codecId)) {
            // check if output pin is connected to a supported filter
            IPin *pConnectedPin = NULL;
            if (fv && fv->output) {
                pConnectedPin = fv->output->GetConnected();
                const CLSID &out = GetCLSID(pConnectedPin);
                if (out == CLSID_SampleGrabber || out == CLSID_MediaDetFilter) {
                    delete video;
                    codec = video = NULL;
                    switch (codecId) {
                        case CODEC_ID_H264_QUICK_SYNC:
                            codecId = AV_CODEC_ID_H264;
                            break;
                        case CODEC_ID_MPEG2_QUICK_SYNC:
                            codecId = AV_CODEC_ID_MPEG2VIDEO;
                            break;
                        case CODEC_ID_VC1_QUICK_SYNC:
                            codecId = CODEC_ID_WMV9_LIB;
                            break;
                        default:
                            ASSERT(FALSE); // this shouldn't happen!
                    }
                }
            }

            // no need to reset anything
            if (video) {
                return true;
            }
        } else {
            delete video;
            codec = video = NULL;
        }
    }
    DPRINTF(_l("TffdshowVideoInputPin::initVideo Codec detected : %s"), getCodecName(codecId));
    if (codecId == AV_CODEC_ID_NONE) {
        if (pCompatibleFilter != NULL) {
            rawDecode = true;
            if (video) {
                delete video;
                codec = video = NULL;
            }
            return true;
        }
        return false;
    }

    if (h264_codec(codecId) || codecId == CODEC_ID_H264_DXVA) {
        Textradata extradata(mt, 16);
        if (extradata.size) {
            H264_SPS sps;
            decodeH264SPS(extradata.data, extradata.size, pictIn, &sps);
            // Set frame rate information from SPS::VUI.
            if (!avgTimePerFrame0 // Use information from the upper stream filter if available.
                    && sps.timing_info_present_flag && sps.time_scale && sps.num_units_in_tick) {
                avgTimePerFrame = 2 * REF_SECOND_MULT * sps.num_units_in_tick / sps.time_scale;
            }
        }
    } else if (mpeg4_codec(codecId)) {
        Textradata extradata(mt, 16);
        if (extradata.size) {
            decodeMPEG4pictureHeader(extradata.data, extradata.size, pictIn);
        }
    } else if (mpeg12_codec(codecId)) {
        Textradata extradata(mt, 16);
        if (extradata.size) {
            bool isH264;
            if (decodeMPEGsequenceHeader(biIn.bmiHeader.biCompression == FOURCC_MPG2, extradata.data, extradata.size, pictIn, &isH264) && isH264) {
                biIn.bmiHeader.biCompression = FOURCC_H264;
                goto again;
            }
        }
    }

    if (!fv->sink) {
        rawDecode = true;
        if (video) {
            delete video;
            codec = video = NULL;
        }
    } else {
        fv->initCodecSettings();
        codec = video = TvideoCodecDec::initDec(fv->deci, fv->sink, codecId, biIn.bmiHeader.biCompression, mt);

        if (!video) {
            return false;
        } else {
            static const GUID CLSID_NeroDigitalParser = {0xE206E4DE, 0xA7EE, 0x4A62, 0xB3, 0xE9, 0x4F, 0xBC, 0x8F, 0xE8, 0x4C, 0x73};
            static const GUID CLSID_HaaliMatroskaFile = {0x55DA30FC, 0xF16B, 0x49FC, 0xBA, 0xA5, 0xAE, 0x59, 0xFC, 0x65, 0xF8, 0x2D};
            codecId = video->codecId;
            //dont_use_rtStop_from_upper_stream=biIn.bmiHeader.biCompression==FOURCC_AVC1 && (searchPreviousFilter(this,CLSID_NeroDigitalParser) || searchPreviousFilter(this,CLSID_HaaliMatroskaFile));
            video->connectedSplitter = connectedSplitter;
            video->isInterlacedRawVideo = isInterlacedRawVideo;
            video->containerSar = pictIn.rectFull.sar;
            if (!video->beginDecompress(pictIn,
                                        biIn.bmiHeader.biCompression, mt,
                                        (dont_use_rtStop_from_upper_stream ? TvideoCodecDec::SOURCE_REORDER : 0))) {
                delete video;
                codec = video = NULL;
                return false;
            }
        }
        rawDecode = raw_codec(codecId);
    }
    allocator.NotifyMediaType(mt);
    strippacket = !!(mt.majortype == MEDIATYPE_DVD_ENCRYPTED_PACK);
    wasVC1 = biIn.bmiHeader.biCompression == 0x31435657 /* "WVC1" */;
    return true;
}
Exemplo n.º 4
0
HRESULT TffdshowVideoInputPin::CheckMediaType(const CMediaType* mt)
{
    if (mt->majortype != MEDIATYPE_Video && !(mt->majortype == MEDIATYPE_DVD_ENCRYPTED_PACK && supdvddec)) {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }
    if (mt->subtype == MEDIASUBTYPE_DVD_SUBPICTURE) {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }
    BITMAPINFOHEADER *hdr = NULL, hdr0;

    if (mt->formattype == FORMAT_VideoInfo) {
        VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)mt->pbFormat;
        hdr = &vih->bmiHeader;
        fixMPEGinAVI(hdr->biCompression);
    } else if (mt->formattype == FORMAT_VideoInfo2) {
        VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2*)mt->pbFormat;
        hdr = &vih2->bmiHeader;
        fixMPEGinAVI(hdr->biCompression);
    } else if (mt->formattype == FORMAT_MPEGVideo) {
        MPEG1VIDEOINFO *mpeg1info = (MPEG1VIDEOINFO*)mt->pbFormat;
        hdr = &(hdr0 = mpeg1info->hdr.bmiHeader);
        hdr->biCompression = FOURCC_MPG1;
    } else if (mt->formattype == FORMAT_MPEG2Video) {
        MPEG2VIDEOINFO *mpeg2info = (MPEG2VIDEOINFO*)mt->pbFormat;
        hdr = &(hdr0 = mpeg2info->hdr.bmiHeader);
        if (hdr->biCompression == 0 || hdr->biCompression == 0x0038002d) {
            if (mt->subtype == MEDIASUBTYPE_H264_TRANSPORT) {
                hdr->biCompression = FOURCC_H264;
            } else if (mt->subtype == MEDIASUBTYPE_AVC1 || mt->subtype == MEDIASUBTYPE_avc1 || mt->subtype == MEDIASUBTYPE_H264 || mt->subtype == MEDIASUBTYPE_h264 || mt->subtype == MEDIASUBTYPE_CCV1) {
                hdr->biCompression = FOURCC_H264;
            } else {
                hdr->biCompression = FOURCC_MPG2;
            }
        }
    } else if (mt->formattype == FORMAT_TheoraIll) {
        sTheoraFormatBlock *oggFormat = (sTheoraFormatBlock*)mt->pbFormat;
        hdr = &hdr0;
        hdr->biWidth = oggFormat->width;
        hdr->biHeight = oggFormat->height;
        hdr->biCompression = FOURCC_THEO;
    } else if (mt->formattype == FORMAT_RLTheora) {
        hdr = &hdr0;
        hdr->biCompression = FOURCC_THEO;
    } else {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    char_t pomS[60];
    DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: %s, %i, %i"), fourcc2str(hdr2fourcc(hdr, &mt->subtype), pomS, 60), hdr->biWidth, hdr->biHeight);

    /* Information : WMP 11 and Media Center under Vista do not check for uncompressed format anymore, so no way to get
       ffdshow raw video decoder for postprocessing on uncompressed.
       So instead of saying "Media Type not supported", we says it is but only if there is an existing filter that can
       take this format in charge, and then ffdshow will be plugged after this codec (plug is done by TffdshowDecVideo::ConnectCompatibleFilter). */
    int res = getVideoCodecId(hdr, &mt->subtype, NULL);

    OSVERSIONINFO osvi;
    ZeroMemory(&osvi, sizeof(OSVERSIONINFO));
    osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
    GetVersionEx(&osvi);
    ffstring exeFilename(fv->getExefilename());
    exeFilename.ConvertToLowerCase();

    if (res == 0 && pCompatibleFilter == NULL &&
            fv->deci->getParam2(IDFF_alternateUncompressed) == 1 && // Enable WMP11 postprocessing
            fv->deci->getParam2(IDFF_rawv) != 0 && // Raw video not on disabled
            (exeFilename == _l("wmplayer.exe") ||
             exeFilename == _l("ehshell.exe"))) { // Only WMP and Media Center are concerned
        bool doPostProcessing = false;
        if (osvi.dwMajorVersion > 5) { // OS >= VISTA
            doPostProcessing = true;
        } else if (osvi.dwMajorVersion == 5 // If OS=XP, check version of WMP
                   && exeFilename == _l("ehshell.exe")) { // But only for Media Center
            // Read WMP version from the aRegistry
            HKEY hKey = NULL;
            LONG regErr;

            // Read WMP version from the following registry key
            regErr = RegOpenKeyEx(HKEY_LOCAL_MACHINE, _l("SOFTWARE\\Microsoft\\MediaPlayer\\Setup\\Installed Versions"), 0, KEY_READ, &hKey);
            if (regErr != ERROR_SUCCESS) {
                return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
            }

            DWORD dwType;
            BYTE buf[4096] = { '\0' };   // make it big enough for any kind of values
            DWORD dwSize = sizeof(buf);
            regErr = RegQueryValueEx(hKey, _T("wmplayer.exe"), 0, &dwType, buf, &dwSize);

            if (hKey) {
                RegCloseKey(hKey);
            }

            if (regErr != ERROR_SUCCESS || dwType != REG_BINARY) {
                return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
            }

            if (buf[2] >= 0x0b) { // Third byte is the major version number
                doPostProcessing = true;
            }
        }


        if (doPostProcessing) {
            DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: input format disabled or not supported. Trying to maintain in the graph..."));
            IFilterMapper2 *pMapper = NULL;
            IEnumMoniker *pEnum = NULL;

            HRESULT hr = CoCreateInstance(CLSID_FilterMapper2,
                                          NULL, CLSCTX_INPROC, IID_IFilterMapper2,
                                          (void **) &pMapper);

            if (FAILED(hr)) {
                // Error handling omitted for clarity.
            }

            GUID arrayInTypes[2];
            arrayInTypes[0] = mt->majortype;//MEDIATYPE_Video;
            arrayInTypes[1] = mt->subtype;//MEDIASUBTYPE_dvsd;

            hr = pMapper->EnumMatchingFilters(
                     &pEnum,
                     0,                  // Reserved.
                     TRUE,               // Use exact match?
                     MERIT_DO_NOT_USE + 1, // Minimum merit.
                     TRUE,               // At least one input pin?
                     1,                  // Number of major type/subtype pairs for input.
                     arrayInTypes,       // Array of major type/subtype pairs for input.
                     NULL,               // Input medium.
                     NULL,               // Input pin category.
                     FALSE,              // Must be a renderer?
                     TRUE,               // At least one output pin?
                     0,                  // Number of major type/subtype pairs for output.
                     NULL,               // Array of major type/subtype pairs for output.
                     NULL,               // Output medium.
                     NULL);              // Output pin category.

            // Enumerate the monikers.
            IMoniker *pMoniker;
            ULONG cFetched;

            while (pEnum->Next(1, &pMoniker, &cFetched) == S_OK) {
                IPropertyBag *pPropBag = NULL;
                hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
                                             (void **)&pPropBag);

                if (SUCCEEDED(hr)) {
                    // To retrieve the friendly name of the filter, do the following:
                    VARIANT varName;
                    VariantInit(&varName);
                    hr = pPropBag->Read(L"FriendlyName", &varName, 0);
                    if (SUCCEEDED(hr)) {
                        if (varName.pbstrVal == NULL || _strnicmp(FFDSHOW_NAME_L, varName.bstrVal, 22) != 0) {
                            // Display the name in your UI somehow.
                            DPRINTF(_l("TffdshowVideoInputPin::CheckMediaType: compatible filter found (%s)"), varName.pbstrVal);
                            hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)&pCompatibleFilter);
                        }
                    }

                    // Now add the filter to the graph. Remember to release pFilter later.
                    IFilterGraph *pGraph = NULL;
                    fv->deci->getGraph(&pGraph);

                    IGraphBuilder *pGraphBuilder = NULL;
                    hr = pGraph->QueryInterface(IID_IGraphBuilder, (void **)&pGraphBuilder);
                    if (hr == S_OK) {
                        pGraphBuilder->AddFilter(pCompatibleFilter, varName.bstrVal);
                    } else {
                        pCompatibleFilter->Release();
                        pCompatibleFilter = NULL;
                    }

                    // Clean up.
                    VariantClear(&varName);
                    pGraphBuilder->Release();
                    pPropBag->Release();
                }
                pMoniker->Release();
                if (pCompatibleFilter != NULL) {
                    break;
                }
            }

            // Clean up.
            pMapper->Release();
            pEnum->Release();
        }
    }
    if (pCompatibleFilter != NULL) {
        return S_OK;
    }
    return res == AV_CODEC_ID_NONE ? VFW_E_TYPE_NOT_ACCEPTED : S_OK;
}
Exemplo n.º 5
0
const char_t* TinfoDecVideo::TinfoValueDecVideo::getVal0(bool &wasChange, bool &splitline)
{
    int percent;
    switch (item->type) {
#ifdef OSDTIMETABALE
        case IDFF_OSDtype_timetable:
            tsprintf(s, _l("%3.2fms"), deciV->getOSDtime() / 10000.0);
            wasChange = true;
            return s;
#endif
        case IDFF_OSDtype_TimeOnffdshow:
            percent = deciV->get_time_on_ffdshow_percent();
            if (percent < 0 || percent > 300) {
                tsprintf(s, _l("%3dms (N/A )"), deciV->get_time_on_ffdshow());
            } else {
                tsprintf(s, _l("%3dms (%3d%%)"), deciV->get_time_on_ffdshow(), percent);
            }
            wasChange = true;
            return s;
        case IDFF_OSDtype_inputSize:
            return getInputSize(s, wasChange);
        case IDFF_OSDtype_inputAspect:
            return getInputAspect(s, wasChange, countof(s));
        case IDFF_OSDtype_inputSizeAspect: {
            bool wasChangeSize = false;
            getInputSize(sizeStr, wasChangeSize);
            bool wasChangeAspect = false;
            getInputAspect(aspectStr, wasChangeAspect, countof(aspectStr));
            if (wasChange = (wasChangeSize || wasChangeAspect)) {
                tsnprintf_s(s, countof(s), _TRUNCATE, _l("%s, %s"), sizeStr, aspectStr);
            }
            return s;
        }
        case IDFF_OSDtype_meanQuant: {
            char_t news[60];
            float q;
            if (deciV->calcMeanQuant(&q) == S_OK && q > 0) {
                tsprintf(news, _l("%-5.2f"), q);
            } else {
                ff_strncpy(news, _l("not available"), countof(news));
            }
            if (strcmp(news, olds) != 0) {
                ff_strncpy(s, news, countof(s));
                wasChange = true;
            }
            return s;
        }
        case IDFF_OSDtype_outputFOURCC:
            deciV->getOutputFourcc(s, 50);
            wasChange = strcmp(s, olds) != 0;
            return s;
        case IDFF_OSDtype_currentFrameTime: {
            int val;
            if (SUCCEEDED(deciV->getCurrentFrameTime((unsigned int*)&val))) {
                tsprintf(s, _l("%02i:%02i:%02i"), val / 3600, (val / 60) % 60, val % 60);
                wasChange = true;
            } else {
                strcpy(s, _l("failed"));
                wasChange = false;
            }
            return s;
        }
        case IDFF_OSDtype_remainingFrameTime: {
            int val;
            if (SUCCEEDED(deciV->getRemainingFrameTime((unsigned int*)&val))) {
                tsprintf(s, _l("%02i:%02i:%02i"), val / 3600, (val / 60) % 60, val % 60);
                wasChange = true;
            } else {
                strcpy(s, _l("failed"));
                wasChange = false;
            }
            return s;
        }
        case IDFF_OSDtype_accurDeblock:
            if (olds[0] == '\0') {
                tsprintf(s, deciV->quantsAvailable() == S_OK ? _l("yes") : _l("no"));
                wasChange = true;
            }
            return s;
        case IDFF_OSDtype_inputFPS: {
            unsigned int fps1000;
            if (deciV->getAVIfps(&fps1000) != S_OK) {
                s[0] = '\0';
            } else {
                tsprintf(s, _l("%-7.3f"), float(fps1000 / 1000.0));
            }
            wasChange = strcmp(s, olds) != 0;
            return s;
        }
        case IDFF_OSDtype_inputFOURCC: {
            fourcc2str(deciV->getMovieFOURCC(), s, countof(s));
            wasChange = strcmp(s, olds) != 0;
            return s;
        }
        case IDFF_OSDtype_QueueCount: {
            int val = deciV->getQueuedCount();
            wasChange = true;
            if (val >= 0) {
                tsprintf(s, _l("%2d"), val);
            } else {
                val = -1 * val;
                switch (val) {
                    case IDD_QUEUEMSG_1:
                    case IDD_QUEUEMSG_2:
                    case IDD_QUEUEMSG_3:
                    case IDD_QUEUEMSG_4:
                    case IDD_QUEUEMSG_5:
                    case IDD_QUEUEMSG_6:
                    case IDD_QUEUEMSG_7:
                        ff_strncpy(s, trans->translate(val), countof(s));
                        break;
                    case IDD_QUEUEMSG_8: {
                        int late = (int)((-1) * deciV->getLate() / 10000);
                        tsnprintf_s(s, countof(s), _TRUNCATE, _l("%s %4dms"), trans->translate(val), late > 0 ? late : 0);
                    }
                }
            }
            return s;
        }
        case IDFF_OSDtype_Late: {
            int late = (int)deciV->getLate() / 10000;
            tsprintf(s, _l("%7dms"), late > 0 ? late : 0);
            wasChange = true;
            return s;
        }
        case IDFF_OSDtype_idct: {
            const char *idct0 = deciV->get_current_idct();
            if (idct0) {
                text<char_t> idct(idct0);
                ff_strncpy(s, (const char_t*)idct, countof(s));
            } else {
                tsprintf(s, _l("unknown"));
            }
            return s;
        }
        case IDFF_OSDtype_AviSynth_Info: {
            const char *info0 = deciV->getAviSynthInfo();
            if (info0) {
                text<char_t> info(info0);
                ff_strncpy(s, (const char_t*)info, countof(s));
            } else {
                tsprintf(s, _l("unavailable"));
            }
            wasChange = true;
            return s;
        }
        default:
            return TinfoValueDec::getVal0(wasChange, splitline);
    }
}