Exemple #1
0
void CHdmvSub::Render(SubPicDesc& spd, REFERENCE_TIME rt, RECT& bbox)
{
    HDMV_PRESENTATION_SEGMENT* pPresentationSegment = FindPresentationSegment(rt);

    bbox.left   = LONG_MAX;
    bbox.top    = LONG_MAX;
    bbox.right  = 0;
    bbox.bottom = 0;

    if (pPresentationSegment) {
        POSITION pos = pPresentationSegment->objects.GetHeadPosition();

        TRACE_HDMVSUB(_T("CHdmvSub:Render Presentation segment %d --> %s - %s\n"), pPresentationSegment->composition_descriptor.nNumber,
                      ReftimeToString(pPresentationSegment->rtStart), ReftimeToString(pPresentationSegment->rtStop));

        while (pos) {
            CompositionObject* pObject = pPresentationSegment->objects.GetNext(pos);

            if (pObject->GetRLEDataSize() && pObject->m_width > 0 && pObject->m_height > 0
                    && spd.w >= (pObject->m_horizontal_position + pObject->m_width) && spd.h >= (pObject->m_vertical_position + pObject->m_height)) {
                pObject->SetPalette(pPresentationSegment->CLUT.size, pPresentationSegment->CLUT.palette, pPresentationSegment->video_descriptor.nVideoWidth > 720);

                bbox.left   = min(pObject->m_horizontal_position, bbox.left);
                bbox.top    = min(pObject->m_vertical_position, bbox.top);
                bbox.right  = max(pObject->m_horizontal_position + pObject->m_width, bbox.right);
                bbox.bottom = max(pObject->m_vertical_position + pObject->m_height, bbox.bottom);

                TRACE_HDMVSUB(_T(" --> Object %d (Res=%dx%d, SPDRes=%dx%d)\n"), pObject->m_object_id_ref, pObject->m_width, pObject->m_height, spd.w, spd.h);
                pObject->RenderHdmv(spd);
            } else {
                TRACE_HDMVSUB(_T(" --> Invalid object %d\n"), pObject->m_object_id_ref);
            }
        }
    }
}
Exemple #2
0
HRESULT CPGSSub::Render(SubPicDesc& spd, REFERENCE_TIME rt, RECT& bbox, bool bRemoveOldSegments)
{
    CAutoLock cAutoLock(&m_csCritSec);

    bool bRendered = false;

    rt -= m_rtCurrentSegmentStart; // Make sure the timing are relative to the current segment start

    if (bRemoveOldSegments) {
        RemoveOldSegments(rt);
    }

    POSITION posPresentationSegment = FindPresentationSegment(rt);

    if (posPresentationSegment) {
        const auto& pPresentationSegment = m_pPresentationSegments.GetAt(posPresentationSegment);

        bool BT709 = m_infoSourceTarget.sourceMatrix == BT_709 ? true : m_infoSourceTarget.sourceMatrix == NONE ? (pPresentationSegment->video_descriptor.nVideoWidth > 720) : false;

        TRACE_PGSSUB(_T("CPGSSub:Render Presentation segment %d --> %s - %s\n"), pPresentationSegment->composition_descriptor.nNumber,
                     ReftimeToString(pPresentationSegment->rtStart + m_rtCurrentSegmentStart),
                     (pPresentationSegment->rtStop == INFINITE_TIME) ? _T("?") : ReftimeToString(pPresentationSegment->rtStop + m_rtCurrentSegmentStart));

        bbox.left = bbox.top = LONG_MAX;
        bbox.right = bbox.bottom = 0;

        POSITION pos = pPresentationSegment->objects.GetHeadPosition();
        while (pos) {
            const auto& pObject = pPresentationSegment->objects.GetNext(pos);

            if (pObject->GetRLEDataSize() && pObject->m_width > 0 && pObject->m_height > 0
                    && spd.w >= (pObject->m_horizontal_position + pObject->m_width) && spd.h >= (pObject->m_vertical_position + pObject->m_height)) {
                pObject->SetPalette(pPresentationSegment->CLUT.size, pPresentationSegment->CLUT.palette, BT709,
                                    m_infoSourceTarget.sourceBlackLevel, m_infoSourceTarget.sourceWhiteLevel, m_infoSourceTarget.targetBlackLevel, m_infoSourceTarget.targetWhiteLevel);
                bbox.left = std::min(pObject->m_horizontal_position, bbox.left);
                bbox.top = std::min(pObject->m_vertical_position, bbox.top);
                bbox.right = std::max(pObject->m_horizontal_position + pObject->m_width, bbox.right);
                bbox.bottom = std::max(pObject->m_vertical_position + pObject->m_height, bbox.bottom);

                TRACE_PGSSUB(_T(" --> Object %d (Pos=%dx%d, Res=%dx%d, SPDRes=%dx%d)\n"),
                             pObject->m_object_id_ref, pObject->m_horizontal_position, pObject->m_vertical_position, pObject->m_width, pObject->m_height, spd.w, spd.h);
                pObject->RenderHdmv(spd);

                bRendered = true;
            } else {
                TRACE_PGSSUB(_T(" --> Invalid object %d\n"), pObject->m_object_id_ref);
            }
        }
    }

    if (!bRendered) {
        bbox = { 0, 0, 0, 0 };
    }

    return S_OK;
}
Exemple #3
0
void CHdmvSub::Render(SubPicDesc& spd, REFERENCE_TIME rt, RECT& bbox)
{
    HDMV_PRESENTATION_SEGMENT* pPresentationSegment = FindPresentationSegment(rt);

    bbox.left   = LONG_MAX;
    bbox.top    = LONG_MAX;
    bbox.right  = 0;
    bbox.bottom = 0;

    if (pPresentationSegment) {
        POSITION pos = pPresentationSegment->objects.GetHeadPosition();

        TRACE_HDMVSUB( (_T("CHdmvSub:Render Presentation segment %d --> %lS - %lS\n"), pPresentationSegment->composition_descriptor.nNumber,
                      ReftimeToCString(pPresentationSegment->rtStart), ReftimeToCString(pPresentationSegment->rtStop)) );

        while (pos) {
            CompositionObject* pObject = pPresentationSegment->objects.GetNext(pos);

            if (pObject->GetRLEDataSize() && pObject->m_width > 0 && pObject->m_height > 0
                && spd.w >= (pObject->m_horizontal_position + pObject->m_width) 
                && spd.h >= (pObject->m_vertical_position + pObject->m_height)) 
            {
                CompositionObject::ColorType color_type = m_colorTypeSetting;
                if (color_type==CompositionObject::NONE)
                {
                    color_type = pPresentationSegment->video_descriptor.nVideoWidth > 720 ? 
                        CompositionObject::YUV_Rec709 : CompositionObject::YUV_Rec601;
                }
                pObject->SetPalette(pPresentationSegment->CLUT.size, pPresentationSegment->CLUT.palette, color_type, 
                    m_yuvRangeSetting==CompositionObject::RANGE_NONE ? CompositionObject::RANGE_TV : m_yuvRangeSetting);

                bbox.left   = min(pObject->m_horizontal_position, bbox.left);
                bbox.top    = min(pObject->m_vertical_position, bbox.top);
                bbox.right  = max(pObject->m_horizontal_position + pObject->m_width, bbox.right);
                bbox.bottom = max(pObject->m_vertical_position + pObject->m_height, bbox.bottom);

                ASSERT(spd.h>=0);
                bbox.left = bbox.left > 0 ? bbox.left : 0;
                bbox.top = bbox.top > 0 ? bbox.top : 0;
                bbox.right = bbox.right < spd.w ? bbox.right : spd.w;
                bbox.bottom = bbox.bottom < spd.h ? bbox.bottom : spd.h;

                pObject->InitColor(spd);
                TRACE_HDMVSUB( (_T(" --> Object %d (Pos=%dx%d, Res=%dx%d, SPDRes=%dx%d)\n"),
                              pObject->m_object_id_ref, pObject->m_horizontal_position, pObject->m_vertical_position, pObject->m_width, pObject->m_height, spd.w, spd.h) );
                pObject->RenderHdmv(spd);
            } else {
                TRACE_HDMVSUB( (_T(" --> Invalid object %d\n"), pObject->m_object_id_ref) );
            }
        }
    }
}
Exemple #4
0
HRESULT CPGSSub::Render(SubPicDesc& spd, REFERENCE_TIME rt, RECT& bbox, bool bRemoveOldSegments)
{
    CAutoLock cAutoLock(&m_csCritSec);

    bool bRendered = false;

    if (bRemoveOldSegments) {
        RemoveOldSegments(rt);
    }

    POSITION posPresentationSegment = FindPresentationSegment(rt);

    if (posPresentationSegment) {
        const auto& pPresentationSegment = m_pPresentationSegments.GetAt(posPresentationSegment);

        m_eSourceMatrix = ColorConvTable::NONE ? (pPresentationSegment->video_descriptor.nVideoWidth > 720) ? ColorConvTable::BT709 : ColorConvTable::BT601 : m_eSourceMatrix;

        TRACE_PGSSUB(_T("CPGSSub:Render Presentation segment %d --> %s - %s\n"), pPresentationSegment->composition_descriptor.nNumber,
                     ReftimeToString(pPresentationSegment->rtStart),
                     (pPresentationSegment->rtStop == UNKNOWN_TIME) ? _T("?") : ReftimeToString(pPresentationSegment->rtStop));

        bbox.left = bbox.top = LONG_MAX;
        bbox.right = bbox.bottom = 0;

        for (const auto& pObject : pPresentationSegment->objects) {
            if (pObject->GetRLEDataSize() && pObject->m_width > 0 && pObject->m_height > 0
                    && spd.w >= (pObject->m_horizontal_position + pObject->m_width) && spd.h >= (pObject->m_vertical_position + pObject->m_height)) {
                pObject->SetPalette(pPresentationSegment->CLUT.size, pPresentationSegment->CLUT.palette.data(), m_eSourceMatrix);
                bbox.left = std::min(pObject->m_horizontal_position, bbox.left);
                bbox.top = std::min(pObject->m_vertical_position, bbox.top);
                bbox.right = std::max(pObject->m_horizontal_position + pObject->m_width, bbox.right);
                bbox.bottom = std::max(pObject->m_vertical_position + pObject->m_height, bbox.bottom);

                TRACE_PGSSUB(_T(" --> Object %d (Pos=%dx%d, Res=%dx%d, SPDRes=%dx%d)\n"),
                             pObject->m_object_id_ref, pObject->m_horizontal_position, pObject->m_vertical_position, pObject->m_width, pObject->m_height, spd.w, spd.h);
                pObject->RenderHdmv(spd);

                bRendered = true;
            } else {
                TRACE_PGSSUB(_T(" --> Invalid object %d\n"), pObject->m_object_id_ref);
            }
        }
    }

    if (!bRendered) {
        bbox = { 0, 0, 0, 0 };
    }

    return S_OK;
}