コード例 #1
0
OSCL_EXPORT_REF void PvmfPortBaseImpl::LogMediaDataInfo(PVMFSharedMediaDataPtr aMediaData, const char* msg, int32 qsize)
//log media data info, description, and associated q-depth.
{
    OSCL_UNUSED_ARG(aMediaData);
    OSCL_UNUSED_ARG(msg);
    OSCL_UNUSED_ARG(qsize);

    if (!iDatapathLogger)
        return;

    LOGDATAPATH(
        (0, "PORT %s %s MediaData SeqNum %d, SId %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
         , msg
         , aMediaData->getSeqNum()
         , aMediaData->getStreamID()
         , aMediaData->getTimestamp()
         , qsize
        ));

}
コード例 #2
0
OSCL_EXPORT_REF void PvmfPortBaseImpl::LogMediaMsgInfo(PVMFSharedMediaMsgPtr aMediaMsg, const char* msg, int32 qsize)
//log media msg info, description, and associated q-depth.
{
    OSCL_UNUSED_ARG(msg);
    OSCL_UNUSED_ARG(qsize);

    if (!iDatapathLogger)
        return;

    switch (aMediaMsg->getFormatID())
    {
        case PVMF_MEDIA_CMD_BOS_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "BOS"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , qsize
                ));
        }
        break;
        case PVMF_MEDIA_CMD_EOS_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "EOS"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , qsize
                ));
        }
        break;
        case PVMF_MEDIA_CMD_BOC_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "BOC"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , qsize
                ));
        }
        break;
        case PVMF_MEDIA_CMD_EOC_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "EOC"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , qsize
                ));
        }
        break;
        case PVMF_MEDIA_MSG_DATA_FORMAT_ID:
        {
            PVMFSharedMediaDataPtr mediaData;
            convertToPVMFMediaData(mediaData, aMediaMsg);
            LOGDATAPATH(
                (0, "PORT %s %s MediaData SeqNum %d, SId %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , mediaData->getSeqNum()
                 , mediaData->getStreamID()
                 , mediaData->getTimestamp()
                 , qsize
                ));
        }
        break;
        default:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %d, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , aMediaMsg->getFormatID()
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , qsize
                ));
        }
        break;
    }
}
コード例 #3
0
void PvmfPortBaseImpl::LogMediaMsgInfo(PVMFSharedMediaMsgPtr aMediaMsg, const char* msg, PvmfPortBaseImplQueue&q)
//log media msg info, description, and associated q-depth.
{
    // to avoid compiler warnings when logger is not available
    OSCL_UNUSED_ARG(msg);
    OSCL_UNUSED_ARG(q);

    switch (aMediaMsg->getFormatID())
    {
        case PVMF_MEDIA_CMD_BOS_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "BOS"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , q.iQ.size()
                 , q.iCapacity
                ));
        }
        break;
        case PVMF_MEDIA_CMD_EOS_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "EOS"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , q.iQ.size()
                 , q.iCapacity
                ));
        }
        break;
        case PVMF_MEDIA_CMD_BOC_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "BOC"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , q.iQ.size()
                 , q.iCapacity
                ));
        }
        break;
        case PVMF_MEDIA_CMD_EOC_FORMAT_ID:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %s, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , "EOC"
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , q.iQ.size()
                 , q.iCapacity
                ));
        }
        break;
        case PVMF_MEDIA_MSG_DATA_FORMAT_ID:
        {
            PVMFSharedMediaDataPtr mediaData;
            convertToPVMFMediaData(mediaData, aMediaMsg);
            LOGDATAPATH(
                (0, "PORT %s %s MediaData SeqNum %d, SId %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , mediaData->getSeqNum()
                 , mediaData->getStreamID()
                 , mediaData->getTimestamp()
                 , q.iQ.size()
                 , q.iCapacity
                ));
        }
        break;
        default:
        {
            LOGDATAPATH(
                (0, "PORT %s %s MediaCmd FmtId %d, SeqNum %d, SId %d, ClipID %d, TS %d, Q-depth %d/%d", iPortName.get_cstr()
                 , msg
                 , aMediaMsg->getFormatID()
                 , aMediaMsg->getSeqNum()
                 , aMediaMsg->getStreamID()
                 , aMediaMsg->getClipID()
                 , aMediaMsg->getTimestamp()
                 , q.iQ.size()
                 , q.iCapacity
                ));
        }
        break;
    }
}
/* ======================================================================== */
uint8 PV_LATM_Parser::composeMultipleFrame(PVMFSharedMediaDataPtr& mediaDataIn)
{

    uint32 tmp;
    uint8 * myData;
    uint32 i;


    OsclRefCounterMemFrag memFragIn;
    mediaDataIn->getMediaFragment(0, memFragIn);

    // pool made for output data
    OsclRefCounterMemFrag memFragOut;
    mediaDataOut->getMediaFragment(0, memFragOut);

    int32 pktsize = memFragIn.getMemFrag().len;

    // make sure we have enough memory to hold the data
    if (bytesRead + pktsize > currSize)
    {
        uint8 * tempPtr = (uint8*) oscl_calloc(bytesRead + pktsize, sizeof(uint8));
        if (tempPtr == NULL)
        {
            // memory problem?
            return FRAME_ERROR;
        }
        currSize = bytesRead + pktsize;
        oscl_memcpy(tempPtr, multiFrameBuf, bytesRead);
        oscl_free(multiFrameBuf);
        multiFrameBuf = tempPtr;
    }

    oscl_memcpy(multiFrameBuf + bytesRead, memFragIn.getMemFrag().ptr, pktsize);

    bytesRead += pktsize;
    //newpkt->frame_size = bytesRead;

    // to update number of bytes copied
    memFragOut.getMemFrag().len = bytesRead;
    mediaDataOut->setMediaFragFilledLen(0, bytesRead);
    mediaDataOut->setSeqNum(mediaDataIn->getSeqNum());
    mediaDataOut->setTimestamp(mediaDataIn->getTimestamp());

    if (mediaDataIn->getMarkerInfo())
    {
        // means this is the last packet for this audioMuxElement

        myData = multiFrameBuf;

        uint32 outPtrPos = 0;
        for (i = 0;i <= sMC->numSubFrames;i++)
        {
            framesize = 0;
            do
            {
                tmp = *(myData);
                framesize += tmp;
            }
            while (*(myData++) == 0xff);

            //int32 bUsed = (framesize/255)+1; // 0-254: 1, 255-511: 2 ...
            // do a check on the last one
            if (i == sMC->numSubFrames && !sMC->otherDataPresent)
            {
                if (framesize != bytesRead - (myData - multiFrameBuf))
                {
                    // to update number of bytes copied
                    memFragOut.getMemFrag().len = 0;
                    mediaDataOut->setMediaFragFilledLen(0, 0);

                    return FRAME_INCOMPLETE;
                }
            }
            oscl_memcpy((uint8*)memFragOut.getMemFrag().ptr + outPtrPos, myData, framesize);
            myData += framesize;
            outPtrPos += framesize;
        }



        // to update number of bytes copied
        memFragOut.getMemFrag().len = outPtrPos;
        mediaDataOut->setMediaFragFilledLen(0, outPtrPos);

        bytesRead = 0;
        framesize = 0;
        compositenumframes = 0;

    }
    else
    {
        compositenumframes++;

        if (compositenumframes < MAX_NUM_COMPOSITE_FRAMES)
        {
            return FRAME_INCOMPLETE;
        }
        else
        {
            return FRAME_ERROR;
        }

    }

    return FRAME_COMPLETE;
}
uint8 PV_LATM_Parser::composeSingleFrame(PVMFSharedMediaDataPtr& mediaDataIn)
{
    int32 tmp = 0;

    //changed
    OsclRefCounterMemFrag memFragIn;
    mediaDataIn->getMediaFragment(0, memFragIn);

    // pool made for output data
    OsclRefCounterMemFrag memFragOut;
    mediaDataOut->getMediaFragment(0, memFragOut);

    //uint8 * myData = newpkt->data;
    uint8 * myData = (uint8*)memFragIn.getMemFrag().ptr;

    /*
     *  Total Payload length, in bytes, includes
     *      length of the AudioMuxElement()
     *      AudioMuxElement()
     *      Other data (for RF3016 not supported)
     */
    int32 pktsize = memFragIn.getMemFrag().len;

    int32 m_bit = mediaDataIn->getMarkerInfo();

    /*
     *  All streams have same time framing (there is only one stream anyway)
     */
    if (firstBlock)
    {
        /*
         *  AudioMuxElement() fits in a single rtp packet or this is the first
         *  block of an AudioMuxElement() spread accross more than one rtp packet
         */


        int32 bUsed = 0;

        /*
         *      PayLoadlenghtInfo( )
         */

        do
        {
            tmp = *(myData++);      /* get payload lenght  8-bit in bytes */
            framesize += tmp;
            bUsed++;
        }
        while (tmp == 0xff);      /* 0xff is the escape sequence for values bigger than 255 */


        /*
         *      PayLoadMux( )
         */

        bytesRead = (pktsize - bUsed);

        // framesize must be equal to the bytesRead if mbit is 1
        // or greater than bytesRead if mbit is 0
        if ((m_bit && framesize != bytesRead && !sMC->otherDataPresent) ||
                (!m_bit && framesize < bytesRead && !sMC->otherDataPresent))
        {
            // to update number of bytes copied
            memFragOut.getMemFrag().len = 0;
            mediaDataOut->setMediaFragFilledLen(0, 0);
            bytesRead = 0;

            return FRAME_ERROR;
        }

        oscl_memcpy((uint8*)memFragOut.getMemFrag().ptr, myData, bytesRead); //ptr +1 changed

        if (sMC->otherDataPresent)
        {
            ;   /* dont' care at this point, no MUX other than aac supported */
        }

    }
    else
    {
        /*
         *  We have an AudioMuxElement() spread accross more than one rtp packet
         */
        if ((m_bit && framesize != pktsize + (bytesRead - 1) && !sMC->otherDataPresent) /* last block */ ||
                (!m_bit && framesize <  pktsize + (bytesRead - 1) && !sMC->otherDataPresent) /* intermediate block */)
        {

            // to update number of bytes copied
            memFragOut.getMemFrag().len = 0;
            mediaDataOut->setMediaFragFilledLen(0, 0);

            return FRAME_ERROR;
        }

        /*
         *  Accumulate  blocks until the full frame is complete
         */
        oscl_memcpy((uint8*)memFragOut.getMemFrag().ptr + bytesRead, myData, pktsize);
        bytesRead += pktsize;
    }


    // to update number of bytes copied
    memFragOut.getMemFrag().len = bytesRead;
    mediaDataOut->setMediaFragFilledLen(0, bytesRead);
    mediaDataOut->setSeqNum(mediaDataIn->getSeqNum());
    mediaDataOut->setTimestamp(mediaDataIn->getTimestamp());


    firstBlock = false;     /* we already processed the first block, so this should be false  */

    if (m_bit)              /* check if it is a complete packet (m bit ==1) */
    {
        firstBlock = true;  /* if m-bit is "1", then the farme fits in a block or this was the last
                               block of the frame, set for next call */
        framesize = 0;
        frameNum = 0;
        bytesRead = 0;
        compositenumframes = 0;
    }
    else
    {
        /*
         *  We have an AudioMuxElement() spread accross more than one rtp packet
         */
        compositenumframes++;

        if (compositenumframes < MAX_NUM_COMPOSITE_FRAMES)
        {
            // this is not yet a finished packet
            return FRAME_INCOMPLETE;
        }
        else
        {
            return FRAME_ERROR;
        }

    }
    return FRAME_COMPLETE;
}
OSCL_EXPORT_REF uint8 PV_LATM_Parser::compose(PVMFSharedMediaDataPtr& mediaDataIn)
{
    uint8 retVal = 0;

    OsclRefCounterMemFrag memFragIn;
    mediaDataIn->getMediaFragment(0, memFragIn);

    // Don't need the ref to iMediaData so unbind it
    mediaDataOut.Unbind();

    int errcode = 0;
    OsclSharedPtr<PVMFMediaDataImpl> mediaDataImpl;
    OSCL_TRY_NO_TLS(iOsclErrorTrapImp, errcode, mediaDataImpl = iMediaDataSimpleAlloc.allocate((uint32)memFragIn.getMemFrag().len));
    OSCL_FIRST_CATCH_ANY(errcode, return FRAME_OUTPUTNOTAVAILABLE);

    errcode = 0;
    OSCL_TRY_NO_TLS(iOsclErrorTrapImp, errcode, mediaDataOut = PVMFMediaData::createMediaData(mediaDataImpl, &iMediaDataMemPool));
    OSCL_FIRST_CATCH_ANY(errcode, return FRAME_OUTPUTNOTAVAILABLE);

    OsclRefCounterMemFrag memFragOut;
    mediaDataOut->getMediaFragment(0, memFragOut);

    /*
     *  Latch for very first packet, sequence number is not established yet.
     */
    int32 seqNum = mediaDataIn->getSeqNum();

    if (!firstPacket)
    {
        if ((seqNum - last_sequence_num) > 1)    /* detect any gap in sequence */
        {
            // means we missed an RTP packet.
            dropFrames = true;
        }
    }
    else
    {
        firstPacket = false;
    }

    last_timestamp = mediaDataIn->getTimestamp();
    last_sequence_num = seqNum;
    last_mbit = mediaDataIn->getMarkerInfo();

    if (dropFrames)
    {
        if (mediaDataIn->getMarkerInfo())
        {
            /*
             *  try to recover packet as sequencing was broken, new packet could be valid
             *  it is possible that the received packet contains a complete audioMuxElement()
             *  so try to retrieve it.
             */

            dropFrames = false;
        }
        else
        {

            /*
             *  we are in the middle of a spread audioMuxElement(), or faulty rtp header
             *  return error
             */

            framesize = 0;
            frameNum = 0;
            bytesRead = 0;
            compositenumframes = 0;

            /*
             *  Drop frame as we are not certain if it is a valid frame
             */
            memFragOut.getMemFrag().len = 0;
            mediaDataOut->setMediaFragFilledLen(0, 0);

            firstBlock = true; // set for next call
            return FRAME_ERROR;
        }
    }


    if (sMC->numSubFrames > 0 || (sMC->cpresent == 1 && ((*(uint8*)(memFragIn.getMemFrag().ptr)) &(0x80))))
    {
        // this is a less efficient version that must be used when you know an AudioMuxElement has
        // more than one subFrame -- I also added the case where the StreamMuxConfig is inline
        // The reason for this is that the StreamMuxConfig can be possibly large and there is no
        // way to know its size without parsing it. (the problem is it can straddle an RTP boundary)
        // it is less efficient because it composes the AudioMuxElement in a separate buffer (one
        // oscl_memcpy() per rtp packet) then parses it (one oscl_memcpy() per audio frame to the output
        // buffer (newpkt->outptr)) when it gets a whole AudioMuxElement.
        // The function below does a oscl_memcpy() directly into the output buffer
        // note, composeMultipleFrame will also work for the simple case in case there is another reason
        // to have to use it..

        retVal = composeMultipleFrame(mediaDataIn);
    }
    else
    {
        // this is an efficient version that can be used when you know an AudioMuxElement has
        // only one subFrame
        retVal = composeSingleFrame(mediaDataIn);
    }

    // set this to drop frames in the future -- till we find another marker bit
    if (retVal == FRAME_ERROR)
    {
        dropFrames = true;

        framesize = 0;
        frameNum = 0;
        bytesRead = 0;
        compositenumframes = 0;

        //changed
        memFragOut.getMemFrag().len = 0;
        mediaDataOut->setMediaFragFilledLen(0, 0);

        firstBlock = true; // set for next call

    }
    return retVal;
}
////////////////////////////////////////////////////////////////////////////////////
//////	HttpParsingBasicObject implementation
////////////////////////////////////////////////////////////////////////////////////
int32 HttpParsingBasicObject::parseResponse(INPUT_DATA_QUEUE &aDataQueue)
{
    PVMFSharedMediaDataPtr mediaData;
    int32 status = getNextMediaData(aDataQueue, mediaData);
    if (status != PARSE_SUCCESS)
    {
        if (status == PARSE_EOS_INPUT_DATA)
        {
            return validateEOSInput(status);
        }
        return status; // no input data or eos
    }

    OsclRefCounterMemFrag fragIn;
    mediaData->getMediaFragment(0, fragIn);
    HttpParsingBasicObjectAutoCleanup cleanup(this);

    while (status == PARSE_SUCCESS)
    {
        RefCountHTTPEntityUnit entityUnit;
        int32 parsingStatus = iParser->parse(fragIn, entityUnit);
        if (parsingStatus < 0)
        {
            PVMF_PROTOCOL_ENGINE_LOGERRINFODATAPATH((0,
                                                    "HttpParsingBasicObject::parseResponse(), iParser->parse() retval=%d(iHttpHeaderParsed=%d)",
                                                    parsingStatus, (int32)iHttpHeaderParsed));
        }
        else
        {

            // save output data if there is
            iOutputQueue->clear();
            uint32 size = saveOutputData(entityUnit, *iOutputQueue);
            if (size == 0xFFFFFFFF) return PARSE_GENERAL_ERROR;

            if (parsingStatus == HTTPParser::PARSE_HEADER_AVAILABLE)
            {
                iHttpHeaderParsed = true;
                iParser->getContentInfo(iContentInfo);
                extractServerVersionNum();

                // update BandWidthEstimationInfo
                iBWEstInfo.update(mediaData, iHttpHeaderParsed);

                // do sanity check for HTTP header
                int32 sanityCheckStatus = iParser->doSanityCheckForResponseHeader();
                if (sanityCheckStatus == HTTPParser::PARSE_TRANSFER_ENCODING_NOT_SUPPORTED)
                {
                    parsingStatus = sanityCheckStatus;
                }
                else
                {
                    // output data
                    status = iObserver->OutputDataAvailable(iOutputQueue, true);
                    if (status < 0) return status;
                }
            }
            else if (iHttpHeaderParsed && size > 0)
            {
                iTotalDLHttpBodySize += size;
                if (iLatestMediaDataTimestamp < mediaData->getTimestamp()) iLatestMediaDataTimestamp = mediaData->getTimestamp();

                // update BandWidthEstimationInfo
                iBWEstInfo.update(mediaData, iHttpHeaderParsed);
                PVMF_PROTOCOL_ENGINE_LOGERRINFODATAPATH((0, "HttpParsingBasicObject::parseResponse() file size = %d, download size = %d, curr_size = %d, new download size = %d",
                                                        iContentInfo.iContentLength, iTotalDLHttpBodySize, size, iBWEstInfo.iTotalSizePerRequest));
            }
        }

        // check the condition of whether parsing the current input is done or not
        // may send out callback for end of message or end of input cases
        if ((status = checkParsingDone(parsingStatus)) != PARSE_SUCCESS)
        {
            if (status != PROCESS_WAIT_FOR_INCOMING_DATA)
            {
                PVMF_PROTOCOL_ENGINE_LOGERRINFODATAPATH((0, "HttpParsingBasicObject::parseResponse() status=checkParsingDone(parsingStatus)); parsingStatus = %d , status = %d",
                                                        parsingStatus, status));
            }
            return status;
        }
    }

    return PARSE_SUCCESS;
}