Beispiel #1
0
bool OBS::ProcessFrame(FrameProcessInfo &frameInfo)
{
    List<DataPacket> videoPackets;
    List<PacketType> videoPacketTypes;

    //------------------------------------
    // encode

    bufferedTimes << frameInfo.frameTimestamp;

    VideoSegment curSegment;
    bool bProcessedFrame, bSendFrame = false;
    VOID *picIn;

    //profileIn("call to encoder");

    if (bShutdownEncodeThread)
        picIn = NULL;
    else
        picIn = frameInfo.pic->picOut ? (LPVOID)frameInfo.pic->picOut : (LPVOID)frameInfo.pic->mfxOut;

    videoEncoder->Encode(picIn, videoPackets, videoPacketTypes, bufferedTimes[0]);

    bProcessedFrame = (videoPackets.Num() != 0);

    //buffer video data before sending out
    if(bProcessedFrame)
    {
        bSendFrame = BufferVideoData(videoPackets, videoPacketTypes, bufferedTimes[0], curSegment);
        bufferedTimes.Remove(0);
    }
    else
        nop();

    //profileOut;

    //------------------------------------
    // upload

    profileIn("sending stuff out");

    //send headers before the first frame if not yet sent
    if(bSendFrame)
        SendFrame(curSegment, frameInfo.firstFrameTime);

    profileOut;

    return bProcessedFrame;
}
Beispiel #2
0
bool OBS::ProcessFrame(FrameProcessInfo &frameInfo)
{
    List<DataPacket> videoPackets;
    List<PacketType> videoPacketTypes;

    //------------------------------------
    // encode

    bufferedTimes << frameInfo.frameTimestamp;

    VideoSegment curSegment;
    bool bProcessedFrame, bSendFrame = false;
    int curCTSOffset = 0;
    VOID *picIn;

    profileIn("call to encoder");

    if (!bRunning)
        picIn = NULL;
    else
        picIn = frameInfo.pic->picOut ? (LPVOID)frameInfo.pic->picOut : (LPVOID)frameInfo.pic->mfxOut;

    videoEncoder->Encode(picIn, videoPackets, videoPacketTypes, bufferedTimes[0], ctsOffset);
    if(bUsing444) frameInfo.prevTexture->Unmap(0);

    ctsOffsets << ctsOffset;

    bProcessedFrame = (videoPackets.Num() != 0);

    //buffer video data before sending out
    if(bProcessedFrame)
    {
        bSendFrame = BufferVideoData(videoPackets, videoPacketTypes, bufferedTimes[0], curSegment);
        bufferedTimes.Remove(0);

        curCTSOffset = ctsOffsets[0];
        ctsOffsets.Remove(0);
    }

    profileOut;

    //------------------------------------
    // upload

    profileIn("sending stuff out");

    //send headers before the first frame if not yet sent
    if(bSendFrame)
    {
        if(!bSentHeaders)
        {
            network->BeginPublishing();
            bSentHeaders = true;
        }

        OSEnterMutex(hSoundDataMutex);

        if(pendingAudioFrames.Num())
        {
            while(pendingAudioFrames.Num())
            {
                if(frameInfo.firstFrameTime < pendingAudioFrames[0].timestamp)
                {
                    UINT audioTimestamp = UINT(pendingAudioFrames[0].timestamp-frameInfo.firstFrameTime);

                    /*if(bFirstAudioPacket)
                    {
                        audioTimestamp = 0;
                        bFirstAudioPacket = false;
                    }
                    else*/
                        audioTimestamp += curCTSOffset;

                    //stop sending audio packets when we reach an audio timestamp greater than the video timestamp
                    if(audioTimestamp > curSegment.timestamp)
                        break;

                    if(audioTimestamp == 0 || audioTimestamp > lastAudioTimestamp)
                    {
                        List<BYTE> &audioData = pendingAudioFrames[0].audioData;
                        if(audioData.Num())
                        {
                            //Log(TEXT("a:%u, %llu, cts: %d"), audioTimestamp, frameInfo.firstFrameTime+audioTimestamp-curCTSOffset, curCTSOffset);

                            network->SendPacket(audioData.Array(), audioData.Num(), audioTimestamp, PacketType_Audio);
                            if(fileStream)
                                fileStream->AddPacket(audioData.Array(), audioData.Num(), audioTimestamp, PacketType_Audio);

                            audioData.Clear();

                            lastAudioTimestamp = audioTimestamp;
                        }
                    }
                }
                else
                    nop();

                pendingAudioFrames[0].audioData.Clear();
                pendingAudioFrames.Remove(0);
            }
        }

        OSLeaveMutex(hSoundDataMutex);

        for(UINT i=0; i<curSegment.packets.Num(); i++)
        {
            VideoPacketData &packet = curSegment.packets[i];

            if(packet.type == PacketType_VideoHighest)
                bRequestKeyframe = false;

            //Log(TEXT("v:%u, %llu"), curSegment.timestamp, frameInfo.firstFrameTime+curSegment.timestamp);

            network->SendPacket(packet.data.Array(), packet.data.Num(), curSegment.timestamp, packet.type);
            if(fileStream)
                fileStream->AddPacket(packet.data.Array(), packet.data.Num(), curSegment.timestamp, packet.type);
        }
    }

    profileOut;

    return bProcessedFrame;
}