Example #1
0
FrameAvailableCode SyncVideoDecoder::getRenderedBmps(vector<BitmapPtr>& pBmps, 
        float timeWanted)
{
    AVG_ASSERT(getState() == DECODING);
    ScopeTimer timer(RenderToBmpProfilingZone);
    FrameAvailableCode frameAvailable;
    if (timeWanted == -1) {
        readFrame(m_pFrame);
        frameAvailable = FA_NEW_FRAME;
    } else {
        frameAvailable = readFrameForTime(m_pFrame, timeWanted);
    }
    if (frameAvailable == FA_USE_LAST_FRAME || isEOF()) {
        return FA_USE_LAST_FRAME;
    } else {
        allocFrameBmps(pBmps);
        if (pixelFormatIsPlanar(getPixelFormat())) {
            ScopeTimer timer(CopyImageProfilingZone);
            for (unsigned i = 0; i < pBmps.size(); ++i) {
                m_pFrameDecoder->copyPlaneToBmp(pBmps[i], m_pFrame->data[i],
                        m_pFrame->linesize[i]);
            }
        } else {
            m_pFrameDecoder->convertFrameToBmp(m_pFrame, pBmps[0]);
        }
        return FA_NEW_FRAME;
    }
}
Example #2
0
bool VideoDecoderThread::work() 
{
    if (m_pDecoder->isEOF(SS_VIDEO)) {
        if (!m_pDecoder->getVideoInfo().m_bHasAudio) {
            m_pDecoder->seek(0);
        } else {
            // TODO: Replace this with waitForMessage()
            msleep(10);
        }
    } else {
        ScopeTimer timer(DecoderProfilingZone);
        vdpau_render_state* pRenderState = 0;
        FrameAvailableCode frameAvailable;
        vector<BitmapPtr> pBmps;
        bool usesVDPAU = m_pDecoder->getVideoInfo().m_bUsesVDPAU;
        if (usesVDPAU) {
#ifdef AVG_ENABLE_VDPAU
            frameAvailable = m_pDecoder->renderToVDPAU(&pRenderState);
#else
            frameAvailable = FA_NEW_FRAME; // Never executed - silences compiler warning.
#endif
        } else {
            IntPoint size = m_pDecoder->getSize();
            IntPoint halfSize(size.x/2, size.y/2);
            PixelFormat pf = m_pDecoder->getPixelFormat();
            if (pixelFormatIsPlanar(pf)) {
                pBmps.push_back(getBmp(m_pBmpQ, size, I8));
                pBmps.push_back(getBmp(m_pHalfBmpQ, halfSize, I8));
                pBmps.push_back(getBmp(m_pHalfBmpQ, halfSize, I8));
                if (pf == YCbCrA420p) {
                    pBmps.push_back(getBmp(m_pBmpQ, size, I8));
                }
            } else {
                pBmps.push_back(getBmp(m_pBmpQ, size, pf));
            }
            frameAvailable = m_pDecoder->renderToBmps(pBmps, -1);
        }
        if (m_pDecoder->isEOF(SS_VIDEO)) {
            VideoMsgPtr pMsg(new VideoMsg());
            pMsg->setEOF();
            m_MsgQ.push(pMsg);
        } else {
            ScopeTimer timer(PushMsgProfilingZone);
            AVG_ASSERT(frameAvailable == FA_NEW_FRAME);
            VideoMsgPtr pMsg(new VideoMsg());
            if (usesVDPAU) {
                pMsg->setVDPAUFrame(pRenderState, m_pDecoder->getCurTime(SS_VIDEO));
            } else {
                pMsg->setFrame(pBmps, m_pDecoder->getCurTime(SS_VIDEO));
            }
            m_MsgQ.push(pMsg);
            msleep(0);
        }
        ThreadProfiler::get()->reset();
    }
    return true;
}
Example #3
0
void VideoDecoderThread::returnFrame(VideoMsgPtr pMsg)
{
    m_pBmpQ->push(pMsg->getFrameBitmap(0));
    if (pixelFormatIsPlanar(m_PF)) {
        m_pHalfBmpQ->push(pMsg->getFrameBitmap(1));
        m_pHalfBmpQ->push(pMsg->getFrameBitmap(2));
        if (m_PF == YCbCrA420p) {
            m_pBmpQ->push(pMsg->getFrameBitmap(3));
        }
    }
}
Example #4
0
void OGLSurface::create(PixelFormat pf, GLTexturePtr pTex0, GLTexturePtr pTex1, 
        GLTexturePtr pTex2, GLTexturePtr pTex3)
{
    m_pf = pf;
    m_Size = pTex0->getSize();
    m_pTextures[0] = pTex0;
    m_pTextures[1] = pTex1;
    m_pTextures[2] = pTex2;
    m_pTextures[3] = pTex3;
    m_bIsDirty = true;

    // Make sure pixel format and number of textures line up.
    if (pixelFormatIsPlanar(pf)) {
        AVG_ASSERT(m_pTextures[2]);
        if (pixelFormatHasAlpha(m_pf)) {
            AVG_ASSERT(m_pTextures[3]);
        } else {
            AVG_ASSERT(!m_pTextures[3]);
        }
    } else {
        AVG_ASSERT(!m_pTextures[1]);
    }
}
Example #5
0
void VideoDecoderThread::sendFrame(AVFrame* pFrame)
{
    VideoMsgPtr pMsg(new VideoMsg());
    vector<BitmapPtr> pBmps;
    if (pixelFormatIsPlanar(m_PF)) {
        ScopeTimer timer(CopyImageProfilingZone);
        IntPoint halfSize(m_Size.x/2, m_Size.y/2);
        pBmps.push_back(getBmp(m_pBmpQ, m_Size, I8));
        pBmps.push_back(getBmp(m_pHalfBmpQ, halfSize, I8));
        pBmps.push_back(getBmp(m_pHalfBmpQ, halfSize, I8));
        if (m_PF == YCbCrA420p) {
            pBmps.push_back(getBmp(m_pBmpQ, m_Size, I8));
        }
        for (unsigned i = 0; i < pBmps.size(); ++i) {
            m_pFrameDecoder->copyPlaneToBmp(pBmps[i], pFrame->data[i], 
                    pFrame->linesize[i]);
        }
    } else {
        pBmps.push_back(getBmp(m_pBmpQ, m_Size, m_PF));
        m_pFrameDecoder->convertFrameToBmp(pFrame, pBmps[0]);
    }
    pMsg->setFrame(pBmps, m_pFrameDecoder->getCurTime());
    pushMsg(pMsg);
}
Example #6
0
void OGLSurface::activate(const IntPoint& logicalSize, bool bPremultipliedAlpha) const
{
    StandardShaderPtr pShader = StandardShader::get();

    GLContext::checkError("OGLSurface::activate()");
    switch (m_pf) {
        case YCbCr420p:
        case YCbCrJ420p:
            pShader->setColorModel(1);
            break;
        case YCbCrA420p:
            pShader->setColorModel(3);
            break;
        case A8:
            pShader->setColorModel(2);
            break;
        default:
            pShader->setColorModel(0);
    }

    m_pTextures[0]->activate(GL_TEXTURE0);

    if (pixelFormatIsPlanar(m_pf)) {
        m_pTextures[1]->activate(GL_TEXTURE1);
        m_pTextures[2]->activate(GL_TEXTURE2);
        if (m_pf == YCbCrA420p) {
            m_pTextures[3]->activate(GL_TEXTURE3);
        }
    }
    if (pixelFormatIsPlanar(m_pf) || colorIsModified()) {
        glm::mat4 mat = calcColorspaceMatrix();
        pShader->setColorspaceMatrix(mat);
    } else {
        pShader->disableColorspaceMatrix();
    }
    pShader->setGamma(glm::vec4(1/m_Gamma.x, 1/m_Gamma.y, 1/m_Gamma.z, 
                1./m_AlphaGamma));

    pShader->setPremultipliedAlpha(bPremultipliedAlpha);
    if (m_pMaskTexture) {
        m_pMaskTexture->activate(GL_TEXTURE4);
        // Special case for pot textures: 
        //   The tex coords in the vertex array are scaled to fit the image texture. We 
        //   need to undo this and fit to the mask texture. In the npot case, everything
        //   evaluates to (1,1);
        glm::vec2 texSize = glm::vec2(m_pTextures[0]->getGLSize());
        glm::vec2 imgSize = glm::vec2(m_pTextures[0]->getSize());
        glm::vec2 maskTexSize = glm::vec2(m_pMaskTexture->getGLSize());
        glm::vec2 maskImgSize = glm::vec2(m_pMaskTexture->getSize());
        glm::vec2 maskScale = glm::vec2(maskTexSize.x/maskImgSize.x, 
                maskTexSize.y/maskImgSize.y);
        glm::vec2 imgScale = glm::vec2(texSize.x/imgSize.x, texSize.y/imgSize.y);
        glm::vec2 maskPos = m_MaskPos/maskScale;
        // Special case for words nodes.
        if (logicalSize != IntPoint(0,0)) {
            maskScale *= glm::vec2((float)logicalSize.x/m_Size.x, 
                    (float)logicalSize.y/m_Size.y);
        }
        pShader->setMask(true, maskPos, m_MaskSize*maskScale/imgScale);
    } else {
        pShader->setMask(false);
    }
    pShader->activate();
    GLContext::checkError("OGLSurface::activate");
}