bool
PVMFOMXEncPort::pvmiSetPortFormatSpecificInfoSync(OsclRefCounterMemFrag& aMemFrag)
{
    if ((iConnectedPort) &&
            (iTag == PVMF_OMX_ENC_NODE_PORT_TYPE_OUTPUT))
    {
        OsclAny* temp = NULL;
        iConnectedPort->QueryInterface(PVMI_CAPABILITY_AND_CONFIG_PVUUID, temp);
        PvmiCapabilityAndConfig *config = (PvmiCapabilityAndConfig*) temp;

        /*
         * Create PvmiKvp for capability settings
         */
        if ((config) && (aMemFrag.getMemFragSize() > 0))
        {
            OsclMemAllocator alloc;
            PvmiKvp kvp;
            kvp.key = NULL;
            kvp.length = oscl_strlen(PVMF_FORMAT_SPECIFIC_INFO_KEY) + 1; // +1 for \0
            kvp.key = (PvmiKeyType)alloc.ALLOCATE(kvp.length);
            if (kvp.key == NULL)
            {
                return false;
            }
            oscl_strncpy(kvp.key, PVMF_FORMAT_SPECIFIC_INFO_KEY, kvp.length);

            kvp.value.key_specific_value = (OsclAny*)(aMemFrag.getMemFragPtr());
            kvp.capacity = aMemFrag.getMemFragSize();
            kvp.length = aMemFrag.getMemFragSize();
            PvmiKvp* retKvp = NULL; // for return value
            int32 err;
            OSCL_TRY(err, config->setParametersSync(NULL, &kvp, 1, retKvp););
OSCL_EXPORT_REF bool
PVMFOMXDecPort::pvmiSetPortFormatSpecificInfoSync(OsclRefCounterMemFrag& aMemFrag, PvmiKeyType KvpKey)
{
    if ((iConnectedPort) &&
            (iTag == PVMF_OMX_DEC_NODE_PORT_TYPE_OUTPUT))
    {
        OsclAny* temp = NULL;
        iConnectedPort->QueryInterface(PVMI_CAPABILITY_AND_CONFIG_PVUUID, temp);

        PvmiCapabilityAndConfig *config = (PvmiCapabilityAndConfig*)temp;

        /*
         * Create PvmiKvp for capability settings
         */
        if ((config) && (aMemFrag.getMemFragSize() > 0))
        {
            PvmiKvp kvp;

            kvp.length = oscl_strlen(KvpKey) + 1; // +1 for \0
            kvp.key = KvpKey;
            if (kvp.key == NULL)
            {
                return false;
            }

            kvp.value.key_specific_value = (OsclAny*)(aMemFrag.getMemFragPtr());
            kvp.capacity = aMemFrag.getMemFragSize();
            PvmiKvp* retKvp = NULL; // for return value
            int32 err;
            OSCL_TRY(err, config->setParametersSync(NULL, &kvp, 1, retKvp););
bool HttpParsingBasicObject::saveOutputData(RefCountHTTPEntityUnit &entityUnit, OUTPUT_DATA_QUEUE &aOutputData, uint32 &aTotalEntityDataSize)
{
    aTotalEntityDataSize = 0;
    int32 err = 0;
    OSCL_TRY(err,
             for (uint32 i = 0; i < entityUnit.getEntityUnit().getNumFragments(); i++)
{
    OsclRefCounterMemFrag memfrag;
    entityUnit.getEntityUnit().getMemFrag(i, memfrag);
        aOutputData.push_back(memfrag);
        aTotalEntityDataSize += memfrag.getMemFragSize();
    }
            );
void PVMFBufferDataSource::TimeoutOccurred(int32 timerID, int32 timeoutInfo)
{
    OSCL_UNUSED_ARG(timerID);
    unsigned bytesToSend = timeoutInfo;
    if (bytesToSend <= 0)
        return;

    if (!IsConnected())
        return;

    // Create new media data buffer
    OsclSharedPtr<PVMFMediaDataImpl> mediaDataImpl = iMediaDataAlloc->allocate(bytesToSend);
    PVMFSharedMediaDataPtr mediaData;
    int leavecode = 0;
    OSCL_TRY(leavecode, mediaData = PVMFMediaData::createMediaData(mediaDataImpl));
    OSCL_FIRST_CATCH_ANY(leavecode, return);

    // Send FSI if available
    if (iFsi)
    {
        OsclSharedPtr<PVMFMediaDataImpl> fsiMediaDataImpl = iMediaDataAlloc->allocate(iFsiLen);
        PVMFSharedMediaDataPtr fsiMediaData;
        OSCL_TRY(leavecode, fsiMediaData = PVMFMediaData::createMediaData(fsiMediaDataImpl));
        OSCL_FIRST_CATCH_ANY(leavecode, return);
        OsclRefCounterMemFrag fsi_frag;
        fsiMediaData->getMediaFragment(0, fsi_frag);
        oscl_memcpy((uint8*)fsi_frag.getMemFragPtr(), iFsi, iFsiLen);
        fsi_frag.getMemFrag().len = iFsiLen;
        mediaData->setFormatSpecificInfo(fsi_frag);
        OSCL_DEFAULT_FREE(iFsi);
        iFsi = NULL;
        iFsiLen = 0;
    }

    // Retrieve memory fragment to write to
    OsclRefCounterMemFrag refCtrMemFrag;
    mediaData->getMediaFragment(0, refCtrMemFrag);
    if (refCtrMemFrag.getCapacity() < bytesToSend)
        return;

    oscl_memset((uint8*)refCtrMemFrag.getMemFragPtr(), 7, bytesToSend);
    mediaDataImpl->setMediaFragFilledLen(0, bytesToSend);
    mediaData->setTimestamp(iTimestamp);
    iTimestamp += iSampleInterval;

    // Send frame to downstream node
    PVMFSharedMediaMsgPtr mediaMsg;
    convertToPVMFMediaMsg(mediaMsg, mediaData);
    QueueOutgoingMsg(mediaMsg);
}
/* ======================================================================== */
uint8 PV_LATM_Parser::composeMultipleFrame(PVMFSharedMediaDataPtr& mediaDataIn)
{

    uint32 tmp;
    uint8 * myData;
    uint32 i;


    OsclRefCounterMemFrag memFragIn;
    mediaDataIn->getMediaFragment(0, memFragIn);

    // pool made for output data
    OsclRefCounterMemFrag memFragOut;
    mediaDataOut->getMediaFragment(0, memFragOut);

    int32 pktsize = memFragIn.getMemFrag().len;

    // make sure we have enough memory to hold the data
    if (bytesRead + pktsize > currSize)
    {
        uint8 * tempPtr = (uint8*) oscl_calloc(bytesRead + pktsize, sizeof(uint8));
        if (tempPtr == NULL)
        {
            // memory problem?
            return FRAME_ERROR;
        }
        currSize = bytesRead + pktsize;
        oscl_memcpy(tempPtr, multiFrameBuf, bytesRead);
        oscl_free(multiFrameBuf);
        multiFrameBuf = tempPtr;
    }

    oscl_memcpy(multiFrameBuf + bytesRead, memFragIn.getMemFrag().ptr, pktsize);

    bytesRead += pktsize;
    //newpkt->frame_size = bytesRead;

    // to update number of bytes copied
    memFragOut.getMemFrag().len = bytesRead;
    mediaDataOut->setMediaFragFilledLen(0, bytesRead);
    mediaDataOut->setSeqNum(mediaDataIn->getSeqNum());
    mediaDataOut->setTimestamp(mediaDataIn->getTimestamp());

    if (mediaDataIn->getMarkerInfo())
    {
        // means this is the last packet for this audioMuxElement

        myData = multiFrameBuf;

        uint32 outPtrPos = 0;
        for (i = 0;i <= sMC->numSubFrames;i++)
        {
            framesize = 0;
            do
            {
                tmp = *(myData);
                framesize += tmp;
            }
            while (*(myData++) == 0xff);

            //int32 bUsed = (framesize/255)+1; // 0-254: 1, 255-511: 2 ...
            // do a check on the last one
            if (i == sMC->numSubFrames && !sMC->otherDataPresent)
            {
                if (framesize != bytesRead - (myData - multiFrameBuf))
                {
                    // to update number of bytes copied
                    memFragOut.getMemFrag().len = 0;
                    mediaDataOut->setMediaFragFilledLen(0, 0);

                    return FRAME_INCOMPLETE;
                }
            }
            oscl_memcpy((uint8*)memFragOut.getMemFrag().ptr + outPtrPos, myData, framesize);
            myData += framesize;
            outPtrPos += framesize;
        }



        // to update number of bytes copied
        memFragOut.getMemFrag().len = outPtrPos;
        mediaDataOut->setMediaFragFilledLen(0, outPtrPos);

        bytesRead = 0;
        framesize = 0;
        compositenumframes = 0;

    }
    else
    {
        compositenumframes++;

        if (compositenumframes < MAX_NUM_COMPOSITE_FRAMES)
        {
            return FRAME_INCOMPLETE;
        }
        else
        {
            return FRAME_ERROR;
        }

    }

    return FRAME_COMPLETE;
}
uint8 PV_LATM_Parser::composeSingleFrame(PVMFSharedMediaDataPtr& mediaDataIn)
{
    int32 tmp = 0;

    //changed
    OsclRefCounterMemFrag memFragIn;
    mediaDataIn->getMediaFragment(0, memFragIn);

    // pool made for output data
    OsclRefCounterMemFrag memFragOut;
    mediaDataOut->getMediaFragment(0, memFragOut);

    //uint8 * myData = newpkt->data;
    uint8 * myData = (uint8*)memFragIn.getMemFrag().ptr;

    /*
     *  Total Payload length, in bytes, includes
     *      length of the AudioMuxElement()
     *      AudioMuxElement()
     *      Other data (for RF3016 not supported)
     */
    int32 pktsize = memFragIn.getMemFrag().len;

    int32 m_bit = mediaDataIn->getMarkerInfo();

    /*
     *  All streams have same time framing (there is only one stream anyway)
     */
    if (firstBlock)
    {
        /*
         *  AudioMuxElement() fits in a single rtp packet or this is the first
         *  block of an AudioMuxElement() spread accross more than one rtp packet
         */


        int32 bUsed = 0;

        /*
         *      PayLoadlenghtInfo( )
         */

        do
        {
            tmp = *(myData++);      /* get payload lenght  8-bit in bytes */
            framesize += tmp;
            bUsed++;
        }
        while (tmp == 0xff);      /* 0xff is the escape sequence for values bigger than 255 */


        /*
         *      PayLoadMux( )
         */

        bytesRead = (pktsize - bUsed);

        // framesize must be equal to the bytesRead if mbit is 1
        // or greater than bytesRead if mbit is 0
        if ((m_bit && framesize != bytesRead && !sMC->otherDataPresent) ||
                (!m_bit && framesize < bytesRead && !sMC->otherDataPresent))
        {
            // to update number of bytes copied
            memFragOut.getMemFrag().len = 0;
            mediaDataOut->setMediaFragFilledLen(0, 0);
            bytesRead = 0;

            return FRAME_ERROR;
        }

        oscl_memcpy((uint8*)memFragOut.getMemFrag().ptr, myData, bytesRead); //ptr +1 changed

        if (sMC->otherDataPresent)
        {
            ;   /* dont' care at this point, no MUX other than aac supported */
        }

    }
    else
    {
        /*
         *  We have an AudioMuxElement() spread accross more than one rtp packet
         */
        if ((m_bit && framesize != pktsize + (bytesRead - 1) && !sMC->otherDataPresent) /* last block */ ||
                (!m_bit && framesize <  pktsize + (bytesRead - 1) && !sMC->otherDataPresent) /* intermediate block */)
        {

            // to update number of bytes copied
            memFragOut.getMemFrag().len = 0;
            mediaDataOut->setMediaFragFilledLen(0, 0);

            return FRAME_ERROR;
        }

        /*
         *  Accumulate  blocks until the full frame is complete
         */
        oscl_memcpy((uint8*)memFragOut.getMemFrag().ptr + bytesRead, myData, pktsize);
        bytesRead += pktsize;
    }


    // to update number of bytes copied
    memFragOut.getMemFrag().len = bytesRead;
    mediaDataOut->setMediaFragFilledLen(0, bytesRead);
    mediaDataOut->setSeqNum(mediaDataIn->getSeqNum());
    mediaDataOut->setTimestamp(mediaDataIn->getTimestamp());


    firstBlock = false;     /* we already processed the first block, so this should be false  */

    if (m_bit)              /* check if it is a complete packet (m bit ==1) */
    {
        firstBlock = true;  /* if m-bit is "1", then the farme fits in a block or this was the last
                               block of the frame, set for next call */
        framesize = 0;
        frameNum = 0;
        bytesRead = 0;
        compositenumframes = 0;
    }
    else
    {
        /*
         *  We have an AudioMuxElement() spread accross more than one rtp packet
         */
        compositenumframes++;

        if (compositenumframes < MAX_NUM_COMPOSITE_FRAMES)
        {
            // this is not yet a finished packet
            return FRAME_INCOMPLETE;
        }
        else
        {
            return FRAME_ERROR;
        }

    }
    return FRAME_COMPLETE;
}
OSCL_EXPORT_REF uint8 PV_LATM_Parser::compose(PVMFSharedMediaDataPtr& mediaDataIn)
{
    uint8 retVal = 0;

    OsclRefCounterMemFrag memFragIn;
    mediaDataIn->getMediaFragment(0, memFragIn);

    // Don't need the ref to iMediaData so unbind it
    mediaDataOut.Unbind();

    int errcode = 0;
    OsclSharedPtr<PVMFMediaDataImpl> mediaDataImpl;
    OSCL_TRY_NO_TLS(iOsclErrorTrapImp, errcode, mediaDataImpl = iMediaDataSimpleAlloc.allocate((uint32)memFragIn.getMemFrag().len));
    OSCL_FIRST_CATCH_ANY(errcode, return FRAME_OUTPUTNOTAVAILABLE);

    errcode = 0;
    OSCL_TRY_NO_TLS(iOsclErrorTrapImp, errcode, mediaDataOut = PVMFMediaData::createMediaData(mediaDataImpl, &iMediaDataMemPool));
    OSCL_FIRST_CATCH_ANY(errcode, return FRAME_OUTPUTNOTAVAILABLE);

    OsclRefCounterMemFrag memFragOut;
    mediaDataOut->getMediaFragment(0, memFragOut);

    /*
     *  Latch for very first packet, sequence number is not established yet.
     */
    int32 seqNum = mediaDataIn->getSeqNum();

    if (!firstPacket)
    {
        if ((seqNum - last_sequence_num) > 1)    /* detect any gap in sequence */
        {
            // means we missed an RTP packet.
            dropFrames = true;
        }
    }
    else
    {
        firstPacket = false;
    }

    last_timestamp = mediaDataIn->getTimestamp();
    last_sequence_num = seqNum;
    last_mbit = mediaDataIn->getMarkerInfo();

    if (dropFrames)
    {
        if (mediaDataIn->getMarkerInfo())
        {
            /*
             *  try to recover packet as sequencing was broken, new packet could be valid
             *  it is possible that the received packet contains a complete audioMuxElement()
             *  so try to retrieve it.
             */

            dropFrames = false;
        }
        else
        {

            /*
             *  we are in the middle of a spread audioMuxElement(), or faulty rtp header
             *  return error
             */

            framesize = 0;
            frameNum = 0;
            bytesRead = 0;
            compositenumframes = 0;

            /*
             *  Drop frame as we are not certain if it is a valid frame
             */
            memFragOut.getMemFrag().len = 0;
            mediaDataOut->setMediaFragFilledLen(0, 0);

            firstBlock = true; // set for next call
            return FRAME_ERROR;
        }
    }


    if (sMC->numSubFrames > 0 || (sMC->cpresent == 1 && ((*(uint8*)(memFragIn.getMemFrag().ptr)) &(0x80))))
    {
        // this is a less efficient version that must be used when you know an AudioMuxElement has
        // more than one subFrame -- I also added the case where the StreamMuxConfig is inline
        // The reason for this is that the StreamMuxConfig can be possibly large and there is no
        // way to know its size without parsing it. (the problem is it can straddle an RTP boundary)
        // it is less efficient because it composes the AudioMuxElement in a separate buffer (one
        // oscl_memcpy() per rtp packet) then parses it (one oscl_memcpy() per audio frame to the output
        // buffer (newpkt->outptr)) when it gets a whole AudioMuxElement.
        // The function below does a oscl_memcpy() directly into the output buffer
        // note, composeMultipleFrame will also work for the simple case in case there is another reason
        // to have to use it..

        retVal = composeMultipleFrame(mediaDataIn);
    }
    else
    {
        // this is an efficient version that can be used when you know an AudioMuxElement has
        // only one subFrame
        retVal = composeSingleFrame(mediaDataIn);
    }

    // set this to drop frames in the future -- till we find another marker bit
    if (retVal == FRAME_ERROR)
    {
        dropFrames = true;

        framesize = 0;
        frameNum = 0;
        bytesRead = 0;
        compositenumframes = 0;

        //changed
        memFragOut.getMemFrag().len = 0;
        mediaDataOut->setMediaFragFilledLen(0, 0);

        firstBlock = true; // set for next call

    }
    return retVal;
}