~X264Encoder()
	{
		ClearPackets();

		if (x264)
			x264_encoder_close(x264);
	}
Пример #2
0
void CBufferOutputVideoEngine::ResetStatus()
{
		BufferTrace("CBufferOutputVideoEngine::ResetStatus() \n");
		MutexLocker locker(&m_mutex); //m_mapOutputVideoBuf buffer播放线程 m_mapFrameToSeq
		m_bFilled = false;
 		//m_lastRecvTime = GetTickCount();
// 		if (m_iMinSequenceId>m_iMaxSequenceId){
// 			m_iCheckSeqId = 0;
// 			m_iMinFrameId = 0;
// 			m_iMaxFrameId = 0;
// 			m_iMinSequenceId = 0;
// 			m_iMaxSequenceId = 0;
// 			m_iPlayFrameId = 0;
// 			m_bStartPlay = true;
// 			m_dwStartPlayTime = 0;
// 		}
		{
			ClearPackets();
			m_mapFrameToSeq.clear();
			m_mapOutputVideoReqInf.clear();
			m_iCheckSeqId = 0;
			m_iMinFrameId = 0;
			m_iMaxFrameId = 0;
			m_iMinSequenceId = 0;
			m_iMaxSequenceId = 0;
			m_iPlayFrameId = 0;
			m_bStartPlay = true;
			m_dwStartPlayTime = 0;
		}
		m_iCheckFrameId = 0;
		m_iTotalPacketNum = 0;
		m_iLostPacketNum = 0;
		m_fLostRate = 0;
		m_dwLastLostTime = GetTickCount();
		m_dwFirstStartTime = 0;
		m_dwSecondStartTime = 0;
		m_dwLostStartTime = 0;
		m_iFirstStartNum = 0;
		m_dwLastRecvRateTime = 0;
		m_iLastRecvRateInter = BUFFER_OUTPUT_RATECALC_MIN_INTERVAL;
		m_iBufferSize = BUFFER_OUTPUT_ONMIC_VIDEO_MAX_LIMIT;
		m_iFirstStartFrameSeq = 0;
		m_iFirstCurFrameSeq = 0;
		m_iSecondStartFrameSeq = 0;
		m_iSecondCurFrameSeq = 0;
		m_iFirstStartFrameNum = 0;
		m_iSecondStartFrameNum = 0;
		m_iAudioBufferSize = BUFFER_OUTPUT_ONMIC_AUDIO_MAX_LIMIT;
		m_dwLastCalcBufSizeTime = 0;
		m_dwLastRecvAudioTime = 0;
		m_fRecvRate = 0.0f;
		m_fRecvFrameRate = 0.0f;
		m_dwLastPlayRateTime=0;
		m_dwSendRate = 1000/BUFFER_PERSEC_VIDEO_FRAMES;
		m_dwLastPlayTime = 0; 
		m_iStartPlayCount = 0;
		m_iWillPlayFrameId = 0;
		m_iSecondStartNum = 0;
		m_dwStartCalcAudioParam = 0;
}
Пример #3
0
static void ClearEmptyPackets(Session_Info *session_info, int *seq_num_list,
                                            int seq_num_list_len) {
    int i;
    int empty_low_seq_num = session_info->empty_low_seq_num;
    int empty_high_seq_num = session_info->empty_high_seq_num;
    if(empty_low_seq_num != -1 && empty_high_seq_num != -1) {
        ClearPackets(seq_num_list, seq_num_list_len, empty_low_seq_num,
                        empty_high_seq_num, -2);
    }
}
Пример #4
0
void CBufferOutputVideoEngine::UnInitialize()
{
		BufferTrace("CBufferOutputVideoEngine::UnInitialize() \n");
		MutexLocker m_locker(&m_static_mutex);//网络线程 播放线程 m_mapVideoOutput
		map<UInt64,CBufferOutputVideoEngine*>::iterator iter = m_mapVideoOutput.find(MakeSession(m_iRoomId,m_iFromUin));
		if (iter!=m_mapVideoOutput.end()){
			m_mapVideoOutput.erase(iter);
		}
		m_pInPlace = NULL;
		m_pRate = NULL;
		ClearPackets();
}
Пример #5
0
PJ_DEF(int) session_info_buildSoftNackList(Session_Info *session_info, int *seq_num_list,
                                            int seq_num_list_len, pj_uint32_t rtt_ms) {
    if(seq_num_list_len == 0) return 0;
    int empty_low_seq_num = session_info->empty_low_seq_num;
    int empty_high_seq_num = session_info->empty_high_seq_num;
    //clear empty packets
    if(empty_low_seq_num != -1)
        ClearEmptyPackets(session_info, seq_num_list, seq_num_list_len);
    JTPacket *packet = session_info->packetList.next;
    printf("check if has packets in session\n");
    if(packet == &session_info->packetList ) //empty
        return 0;
    int media_seq_num_low, media_seq_num_high;
    pj_bool_t session_nacked = PJ_TRUE;
    //if not key frame and previous frame lost, don't retransmit
    if(session_info->previous_frame_loss && !session_info->isKeyFrame) {
        session_nacked = PJ_FALSE;
    }
    //if (now + rtt) > (ts + FRAME_MAX_DELAY), don't retransmit
    //use ntp time, but ts in rtp is not ntp time in current, so substitute received time for ts
    //if (now + rtt * 1.5) > (received + FRAME_MAX_DELAY), don't retransmit
    if(session_info->frame_type != PJMEDIA_FRAME_TYPE_NONE && session_info->frame_type != 
            PJMEDIA_FRAME_TYPE_EMPTY) {
        //has media packets
        pj_timestamp now, received;
        pj_get_timestamp(&now);
        pj_add_timestamp32(&now, rtt_ms * 1000 * 3 / 2);
        memcpy(&received, &session_info->oldest_media_packet, sizeof(pj_timestamp));
        pj_add_timestamp32(&received, FRAME_MAX_DELAY * 1000);
        if(pj_cmp_timestamp(&now, &received) > 0) {
            session_nacked = PJ_FALSE;
        }     
    }
    if(!session_nacked) {
        media_seq_num_low = session_info->packetList.next->isFirst? session_info->packetList.next->seq:
                                    session_info->packetList.next->seq - 1;
        media_seq_num_high = session_info->packetList.prev->isMarket? session_info->packetList.prev->seq:
                                    session_info->packetList.prev->seq + 1;
        ClearPackets(seq_num_list, seq_num_list_len, media_seq_num_low, media_seq_num_high, -1);
        return 0;
    }
    
    return session_info_buildHardNackList(session_info, seq_num_list, seq_num_list_len);   
}
Пример #6
0
void NetClient::Cleanup( void )
{
	if( ! Connected )
	{
		Raptor::Game->ChangeState( Raptor::State::DISCONNECTED );
		
		if( ! Thread )
		{
			// If the connection is open, close it.
			if( Socket )
			{
				SDLNet_TCP_Close( Socket );
				Socket = NULL;
			}
			
			// This empties the incoming packet buffer.
			ClearPackets();
			
			PingTimes.clear();
			SentPings.clear();
		}
	}
}
Пример #7
0
void CBufferOutputAudioEngine::ResetStatus()//重置缓冲区状态
{
		m_bFilled = false;
// 		m_lastRecvTime = GetTickCount();
//		m_dwSendRate = BUFFER_PERSEC_AUDIO_SEND_INTER;
//  		if (m_iMinSequenceId>m_iMaxSequenceId){
// 			m_iCheckSeqId = 0;
// 			m_iMinFrameId = 0;
// 			m_iMaxFrameId = 0;
// 			m_iMinSequenceId = 0;
// 			m_iMaxSequenceId = 0;
// 		}
		{
			ClearPackets();
			m_mapOutputAudioReqInf.clear();
			m_iCheckSeqId = 0;
			m_iMinFrameId = 0;
			m_iMaxFrameId = 0;
			m_iMinSequenceId = 0;
			m_iMaxSequenceId = 0;
		}
		m_dwLastLostTime = GetTickCount();
		m_iTotalPacketNum = 0;
		m_iLostPacketNum = 0;
		m_fLostRate = 0.0f;
		m_iLastRecvRateInter = BUFFER_OUTPUT_RATECALC_MIN_INTERVAL;
		m_iBufferSize = BUFFER_OUTPUT_ONMIC_AUDIO_MAX_LIMIT;
		m_dwLastRecvRateTime = 0;
		m_dwFirstStartTime = 0;
		m_dwSecondStartTime = 0;
		m_iFirstStartNum = 0;
		m_iSecondStartNum = 0;
		m_fRecvRate = BUFFER_PERSEC_AUDIO_NUMBER;
		m_dwLastCalcRecvRate = GetTickCount();
		m_fLastPlayTime = GetTickCount();
		m_iStartPlayCount = 0;
}
Пример #8
0
    void ProcessEncodedFrame(List<DataPacket> &packets, List<PacketType> &packetTypes, DWORD outputTimestamp, int &ctsOffset, mfxU32 wait=0)
    {
        if(!encoded_tasks.Num())
            return;

        encode_task& task = encode_tasks[encoded_tasks[0]];
        auto& sp = task.sp;
        if(MFXVideoCORE_SyncOperation(session, sp, wait) != MFX_ERR_NONE)
            return;

        mfxBitstream& bs = task.bs;

        List<x264_nal_t> nalOut;
        mfxU8 *start = bs.Data + bs.DataOffset,
              *end = bs.Data + bs.DataOffset + bs.DataLength;
        static mfxU8 start_seq[] = {0, 0, 1};
        start = std::search(start, end, start_seq, start_seq+3);
        while(start != end)
        {
            decltype(start) next = std::search(start+1, end, start_seq, start_seq+3);
            x264_nal_t nal;
            nal.i_ref_idc = start[3]>>5;
            nal.i_type = start[3]&0x1f;
            if(nal.i_type == NAL_SLICE_IDR)
                nal.i_ref_idc = NAL_PRIORITY_HIGHEST;
            nal.p_payload = start;
            nal.i_payload = int(next-start);
            nalOut << nal;
            start = next;
        }
        size_t nalNum = nalOut.Num();

        packets.Clear();
        ClearPackets();

        INT64 dts;

        if(bUsingDecodeTimestamp && bs.DecodeTimeStamp != MFX_TIMESTAMP_UNKNOWN)
        {
            dts = msFromTimestamp(bs.DecodeTimeStamp);
        }
        else
            dts = outputTimestamp;

        INT64 in_pts = msFromTimestamp(task.surf.Data.TimeStamp),
              out_pts = msFromTimestamp(bs.TimeStamp);

        if(!bFirstFrameProcessed && nalNum)
        {
            delayOffset = -dts;
            bFirstFrameProcessed = true;
        }

        INT64 ts = INT64(outputTimestamp);
        int timeOffset;

        if(bDupeFrames)
        {
            //if frame duplication is being used, the shift will be insignificant, so just don't bother adjusting audio
            timeOffset = int(out_pts-dts);
            timeOffset += frameShift;

            if(nalNum && timeOffset < 0)
            {
                frameShift -= timeOffset;
                timeOffset = 0;
            }
        }
        else
        {
            timeOffset = int(out_pts+delayOffset-ts);
            timeOffset += ctsOffset;

            //dynamically adjust the CTS for the stream if it gets lower than the current value
            //(thanks to cyrus for suggesting to do this instead of a single shift)
            if(nalNum && timeOffset < 0)
            {
                ctsOffset -= timeOffset;
                timeOffset = 0;
            }
        }
        //Log(TEXT("inpts: %005d, dts: %005d, pts: %005d, timestamp: %005d, offset: %005d, newoffset: %005d"), task.surf.Data.TimeStamp/90, dts, bs.TimeStamp/90, outputTimestamp, timeOffset, bs.TimeStamp/90-dts);

        timeOffset = htonl(timeOffset);

        BYTE *timeOffsetAddr = ((BYTE*)&timeOffset)+1;

        VideoPacket *newPacket = NULL;

        PacketType bestType = PacketType_VideoDisposable;
        bool bFoundFrame = false;

        for(int i=0; i<nalNum; i++)
        {
            x264_nal_t &nal = nalOut[i];

            if(nal.i_type == NAL_SEI)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);

                if (nal.p_payload[skipBytes+1] == 0x5) {
                    SEIData.Clear();
                    BufferOutputSerializer packetOut(SEIData);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                } else {
                    if (!newPacket)
                        newPacket = CurrentPackets.CreateNew();

                    BufferOutputSerializer packetOut(newPacket->Packet);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                }
            }
            /*else if(nal.i_type == NAL_FILLER) //QSV does not produce NAL_FILLER
            {
            BYTE *skip = nal.p_payload;
            while(*(skip++) != 0x1);
            int skipBytes = (int)(skip-nal.p_payload);

            int newPayloadSize = (nal.i_payload-skipBytes);

            if (!newPacket)
            newPacket = CurrentPackets.CreateNew();

            BufferOutputSerializer packetOut(newPacket->Packet);

            packetOut.OutputDword(htonl(newPayloadSize));
            packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
            }*/
            else if(nal.i_type == NAL_SLICE_IDR || nal.i_type == NAL_SLICE)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                if (!bFoundFrame)
                {
                    newPacket->Packet.Insert(0, (nal.i_type == NAL_SLICE_IDR) ? 0x17 : 0x27);
                    newPacket->Packet.Insert(1, 1);
                    newPacket->Packet.InsertArray(2, timeOffsetAddr, 3);

                    bFoundFrame = true;
                }

                int newPayloadSize = (nal.i_payload-skipBytes);
                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);

                switch(nal.i_ref_idc)
                {
                case NAL_PRIORITY_DISPOSABLE:   bestType = MAX(bestType, PacketType_VideoDisposable);  break;
                case NAL_PRIORITY_LOW:          bestType = MAX(bestType, PacketType_VideoLow);         break;
                case NAL_PRIORITY_HIGH:         bestType = MAX(bestType, PacketType_VideoHigh);        break;
                case NAL_PRIORITY_HIGHEST:      bestType = MAX(bestType, PacketType_VideoHighest);     break;
                }
            }
            /*else if(nal.i_type == NAL_SPS)
            {
            VideoPacket *newPacket = CurrentPackets.CreateNew();
            BufferOutputSerializer headerOut(newPacket->Packet);

            headerOut.OutputByte(0x17);
            headerOut.OutputByte(0);
            headerOut.Serialize(timeOffsetAddr, 3);
            headerOut.OutputByte(1);
            headerOut.Serialize(nal.p_payload+5, 3);
            headerOut.OutputByte(0xff);
            headerOut.OutputByte(0xe1);
            headerOut.OutputWord(htons(nal.i_payload-4));
            headerOut.Serialize(nal.p_payload+4, nal.i_payload-4);

            x264_nal_t &pps = nalOut[i+1]; //the PPS always comes after the SPS

            headerOut.OutputByte(1);
            headerOut.OutputWord(htons(pps.i_payload-4));
            headerOut.Serialize(pps.p_payload+4, pps.i_payload-4);
            }*/
            else
                continue;
        }

        packetTypes << bestType;

        packets.SetSize(CurrentPackets.Num());
        for(UINT i=0; i<packets.Num(); i++)
        {
            packets[i].lpPacket = CurrentPackets[i].Packet.Array();
            packets[i].size     = CurrentPackets[i].Packet.Num();
        }

        msdk_locked_tasks << encoded_tasks[0];
        encoded_tasks.Remove(0);
    }
Пример #9
0
 ~QSVEncoder()
 {
     ClearPackets();
 }
Пример #10
0
    void ProcessEncodedFrame(List<DataPacket> &packets, List<PacketType> &packetTypes, DWORD outputTimestamp, mfxU32 wait=0)
    {
        if(!filled_bitstream_waiter.wait_for(2, wait))
            return;

        uint32_t index = 0;
        {
            auto lock = lock_mutex(filled_bitstream);
            index = *filled_bitstream;
            *filled_bitstream = -1;
        }
        encode_task& task = encode_tasks[index];

        mfxBitstream& bs = task.bs;

        List<x264_nal_t> nalOut;
        mfxU8 *start, *end;
        {
            bitstream_info &info = bs_info[index];
            bs.TimeStamp = info.time_stamp;
            bs.DataLength = info.data_length;
            bs.DataOffset = info.data_offset;
            bs.PicStruct = info.pic_struct;
            bs.FrameType = info.frame_type;
        }
        start = bs.Data + bs.DataOffset;
        end = bs.Data + bs.DataOffset + bs.DataLength;
        const static mfxU8 start_seq[] = {0, 0, 1};
        start = std::search(start, end, start_seq, start_seq+3);
        while(start != end)
        {
            decltype(start) next = std::search(start+1, end, start_seq, start_seq+3);
            x264_nal_t nal;
            nal.i_ref_idc = start[3]>>5;
            nal.i_type = start[3]&0x1f;
            if(nal.i_type == NAL_SLICE_IDR)
                nal.i_ref_idc = NAL_PRIORITY_HIGHEST;
            else if(nal.i_type == NAL_SLICE)
            {
                switch(bs.FrameType & (MFX_FRAMETYPE_REF | (MFX_FRAMETYPE_S-1)))
                {
                case MFX_FRAMETYPE_REF|MFX_FRAMETYPE_I:
                case MFX_FRAMETYPE_REF|MFX_FRAMETYPE_P:
                    nal.i_ref_idc = NAL_PRIORITY_HIGH;
                    break;
                case MFX_FRAMETYPE_REF|MFX_FRAMETYPE_B:
                    nal.i_ref_idc = NAL_PRIORITY_LOW;
                    break;
                case MFX_FRAMETYPE_B:
                    nal.i_ref_idc = NAL_PRIORITY_DISPOSABLE;
                    break;
                default:
                    Log(TEXT("Unhandled frametype %u"), bs.FrameType);
                }
            }
            start[3] = ((nal.i_ref_idc<<5)&0x60) | nal.i_type;
            nal.p_payload = start;
            nal.i_payload = int(next-start);
            nalOut << nal;
            start = next;
        }
        size_t nalNum = nalOut.Num();

        packets.Clear();
        ClearPackets();

        INT64 dts = outputTimestamp;

        INT64 in_pts = msFromTimestamp(task.surf.Data.TimeStamp),
            out_pts = msFromTimestamp(bs.TimeStamp);

        if(!bFirstFrameProcessed && nalNum)
        {
            delayOffset = -dts;
            bFirstFrameProcessed = true;
        }

        INT64 ts = INT64(outputTimestamp);
        int timeOffset;

        //if frame duplication is being used, the shift will be insignificant, so just don't bother adjusting audio
        timeOffset = int(out_pts-dts);
        timeOffset += frameShift;

        if(nalNum && timeOffset < 0)
        {
            frameShift -= timeOffset;
            timeOffset = 0;
        }
        //Log(TEXT("inpts: %005d, dts: %005d, pts: %005d, timestamp: %005d, offset: %005d, newoffset: %005d"), task.surf.Data.TimeStamp/90, dts, bs.TimeStamp/90, outputTimestamp, timeOffset, bs.TimeStamp/90-dts);

        timeOffset = htonl(timeOffset);

        BYTE *timeOffsetAddr = ((BYTE*)&timeOffset)+1;

        VideoPacket *newPacket = NULL;

        PacketType bestType = PacketType_VideoDisposable;
        bool bFoundFrame = false;

        for(unsigned i=0; i<nalNum; i++)
        {
            x264_nal_t &nal = nalOut[i];

            if(nal.i_type == NAL_SEI)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);
                BYTE *sei_start = skip+1;
                while(sei_start < (nal.p_payload+nal.i_payload))
                {
                    BYTE *sei = sei_start;
                    int sei_type = 0;
                    while(*sei == 0xff)
                    {
                        sei_type += 0xff;
                        sei += 1;
                    }
                    sei_type += *sei++;

                    int payload_size = 0;
                    while(*sei == 0xff)
                    {
                        payload_size += 0xff;
                        sei += 1;
                    }
                    payload_size += *sei++;

                    const static BYTE emulation_prevention_pattern[] = {0, 0, 3};
                    BYTE *search = sei;
                    for(BYTE *search = sei;;)
                    {
                        search = std::search(search, sei+payload_size, emulation_prevention_pattern, emulation_prevention_pattern+3);
                        if(search == sei+payload_size)
                            break;
                        payload_size += 1;
                        search += 3;
                    }

                    int sei_size = (int)(sei-sei_start) + payload_size;
                    sei_start[-1] = NAL_SEI;

                    if(sei_type == SEI_USER_DATA_UNREGISTERED) {
                        SEIData.Clear();
                        BufferOutputSerializer packetOut(SEIData);

                        packetOut.OutputDword(htonl(sei_size+1));
                        packetOut.Serialize(sei_start-1, sei_size+1);
                    } else {
                        if (!newPacket)
                            newPacket = CurrentPackets.CreateNew();

                        BufferOutputSerializer packetOut(newPacket->Packet);

                        packetOut.OutputDword(htonl(sei_size+1));
                        packetOut.Serialize(sei_start-1, sei_size+1);
                    }
                    sei_start += sei_size;
                }
            }
            else if(nal.i_type == NAL_AUD)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
            }
            else if(nal.i_type == NAL_SLICE_IDR || nal.i_type == NAL_SLICE)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                if (!bFoundFrame)
                {
                    newPacket->Packet.Insert(0, (nal.i_type == NAL_SLICE_IDR) ? 0x17 : 0x27);
                    newPacket->Packet.Insert(1, 1);
                    newPacket->Packet.InsertArray(2, timeOffsetAddr, 3);

                    bFoundFrame = true;
                }

                int newPayloadSize = (nal.i_payload-skipBytes);
                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);

                switch(nal.i_ref_idc)
                {
                case NAL_PRIORITY_DISPOSABLE:   bestType = MAX(bestType, PacketType_VideoDisposable);  break;
                case NAL_PRIORITY_LOW:          bestType = MAX(bestType, PacketType_VideoLow);         break;
                case NAL_PRIORITY_HIGH:         bestType = MAX(bestType, PacketType_VideoHigh);        break;
                case NAL_PRIORITY_HIGHEST:      bestType = MAX(bestType, PacketType_VideoHighest);     break;
                }
            }
            /*else if(nal.i_type == NAL_SPS)
            {
            VideoPacket *newPacket = CurrentPackets.CreateNew();
            BufferOutputSerializer headerOut(newPacket->Packet);

            headerOut.OutputByte(0x17);
            headerOut.OutputByte(0);
            headerOut.Serialize(timeOffsetAddr, 3);
            headerOut.OutputByte(1);
            headerOut.Serialize(nal.p_payload+5, 3);
            headerOut.OutputByte(0xff);
            headerOut.OutputByte(0xe1);
            headerOut.OutputWord(htons(nal.i_payload-4));
            headerOut.Serialize(nal.p_payload+4, nal.i_payload-4);

            x264_nal_t &pps = nalOut[i+1]; //the PPS always comes after the SPS

            headerOut.OutputByte(1);
            headerOut.OutputWord(htons(pps.i_payload-4));
            headerOut.Serialize(pps.p_payload+4, pps.i_payload-4);
            }*/
            else
                continue;
        }

        packetTypes << bestType;

        packets.SetSize(CurrentPackets.Num());
        for(UINT i=0; i<packets.Num(); i++)
        {
            packets[i].lpPacket = CurrentPackets[i].Packet.Array();
            packets[i].size     = CurrentPackets[i].Packet.Num();
        }

        idle_tasks << index;
        assert(queued_tasks[0] == index);
        queued_tasks.Remove(0);
    }
Пример #11
0
 ~QSVEncoder()
 {
     stop.signal();
     ClearPackets();
 }
Пример #12
0
    bool Encode(LPVOID picInPtr, List<DataPacket> &packets, List<PacketType> &packetTypes, DWORD outputTimestamp)
    {
        x264_picture_t *picIn = (x264_picture_t*)picInPtr;

        x264_nal_t *nalOut;
        int nalNum;

        packets.Clear();
        ClearPackets();

        if(bRequestKeyframe && picIn)
            picIn->i_type = X264_TYPE_IDR;

        if(x264_encoder_encode(x264, &nalOut, &nalNum, picIn, &picOut) < 0)
        {
            AppWarning(TEXT("x264 encode failed"));
            return false;
        }

        if(bRequestKeyframe && picIn)
        {
            picIn->i_type = X264_TYPE_AUTO;
            bRequestKeyframe = false;
        }

        if(!bFirstFrameProcessed && nalNum)
        {
            delayOffset = -picOut.i_dts;
            bFirstFrameProcessed = true;
        }

        INT64 ts = INT64(outputTimestamp);
        int timeOffset;

        //if frame duplication is being used, the shift will be insignificant, so just don't bother adjusting audio
        timeOffset = int(picOut.i_pts-picOut.i_dts);
        timeOffset += frameShift;

        if(nalNum && timeOffset < 0)
        {
            frameShift -= timeOffset;
            timeOffset = 0;
        }

        //Log(TEXT("inpts: %005d, dts: %005d, pts: %005d, timestamp: %005d, offset: %005d, newoffset: %005d"), picIn->i_pts, picOut.i_dts, picOut.i_pts, outputTimestamp, timeOffset, picOut.i_pts-picOut.i_dts);

        timeOffset = htonl(timeOffset);

        BYTE *timeOffsetAddr = ((BYTE*)&timeOffset)+1;

        VideoPacket *newPacket = NULL;

        PacketType bestType = PacketType_VideoDisposable;
        bool bFoundFrame = false;

        for(int i=0; i<nalNum; i++)
        {
            x264_nal_t &nal = nalOut[i];

            if(nal.i_type == NAL_SEI)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);

                if (nal.p_payload[skipBytes+1] == 0x5) {
                    SEIData.Clear();
                    BufferOutputSerializer packetOut(SEIData);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                } else {
                    if (!newPacket)
                        newPacket = CurrentPackets.CreateNew();

                    BufferOutputSerializer packetOut(newPacket->Packet);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                }
            }
            else if(nal.i_type == NAL_FILLER)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
            }
            else if(nal.i_type == NAL_SLICE_IDR || nal.i_type == NAL_SLICE)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                if (!bFoundFrame)
                {
                    newPacket->Packet.Insert(0, (nal.i_type == NAL_SLICE_IDR) ? 0x17 : 0x27);
                    newPacket->Packet.Insert(1, 1);
                    newPacket->Packet.InsertArray(2, timeOffsetAddr, 3);

                    bFoundFrame = true;
                }

                int newPayloadSize = (nal.i_payload-skipBytes);
                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);

                switch(nal.i_ref_idc)
                {
                    case NAL_PRIORITY_DISPOSABLE:   bestType = MAX(bestType, PacketType_VideoDisposable);  break;
                    case NAL_PRIORITY_LOW:          bestType = MAX(bestType, PacketType_VideoLow);         break;
                    case NAL_PRIORITY_HIGH:         bestType = MAX(bestType, PacketType_VideoHigh);        break;
                    case NAL_PRIORITY_HIGHEST:      bestType = MAX(bestType, PacketType_VideoHighest);     break;
                }
            }
            /*else if(nal.i_type == NAL_SPS)
            {
                VideoPacket *newPacket = CurrentPackets.CreateNew();
                BufferOutputSerializer headerOut(newPacket->Packet);

                headerOut.OutputByte(0x17);
                headerOut.OutputByte(0);
                headerOut.Serialize(timeOffsetAddr, 3);
                headerOut.OutputByte(1);
                headerOut.Serialize(nal.p_payload+5, 3);
                headerOut.OutputByte(0xff);
                headerOut.OutputByte(0xe1);
                headerOut.OutputWord(htons(nal.i_payload-4));
                headerOut.Serialize(nal.p_payload+4, nal.i_payload-4);

                x264_nal_t &pps = nalOut[i+1]; //the PPS always comes after the SPS

                headerOut.OutputByte(1);
                headerOut.OutputWord(htons(pps.i_payload-4));
                headerOut.Serialize(pps.p_payload+4, pps.i_payload-4);
            }*/
            else
                continue;
        }

        packetTypes << bestType;

        packets.SetSize(CurrentPackets.Num());
        for(UINT i=0; i<packets.Num(); i++)
        {
            packets[i].lpPacket = CurrentPackets[i].Packet.Array();
            packets[i].size     = CurrentPackets[i].Packet.Num();
        }

        return true;
    }
Пример #13
0
    bool Encode(LPVOID picInPtr, List<DataPacket> &packets, List<PacketType> &packetTypes, DWORD outputTimestamp, int &ctsOffset)
    {
        bs.DataLength = 0;
        bs.DataOffset = 0;
        mfxFrameSurface1& pic = *(mfxFrameSurface1*)picInPtr;
        enc_surf.Data.Y = pic.Data.Y;
        enc_surf.Data.UV = pic.Data.UV;
        enc_surf.Data.Pitch = pic.Data.Pitch;
        enc_surf.Data.TimeStamp = pic.Data.TimeStamp*90;
        mfxSyncPoint sp = nullptr;
        auto sts = enc->EncodeFrameAsync(bRequestKeyframe ? &ctrl : nullptr, &enc_surf, &bs, &sp);

        sts = MFXVideoCORE_SyncOperation(session, sp, INFINITE);

        List<x264_nal_t> nalOut;
        mfxU8 *start = bs.Data + bs.DataOffset,
              *end = bs.Data + bs.DataOffset + bs.DataLength;
        static mfxU8 start_seq[] = {0, 0, 1};
        start = std::search(start, end, start_seq, start_seq+3);
        while(start != end)
        {
            decltype(start) next = std::search(start+1, end, start_seq, start_seq+3);
            x264_nal_t nal;
            nal.i_ref_idc = start[3]>>5;
            nal.i_type = start[3]&0x1f;
            if(nal.i_type == NAL_SLICE_IDR)
                nal.i_ref_idc = NAL_PRIORITY_HIGHEST;
            nal.p_payload = start;
            nal.i_payload = int(next-start);
            nalOut << nal;
            start = next;
        }
        size_t nalNum = nalOut.Num();

        packets.Clear();
        ClearPackets();

        if(bRequestKeyframe)
            bRequestKeyframe = false;

        if(!bFirstFrameProcessed && nalNum)
        {
            //delayOffset = -picOut.i_dts;
            bFirstFrameProcessed = true;
        }

        INT64 ts = INT64(outputTimestamp);
        int timeOffset = 0;//int((picOut.i_pts+delayOffset)-ts);

        if(bDupeFrames)
        {
            //if frame duplication is being used, the shift will be insignificant, so just don't bother adjusting audio
            timeOffset += frameShift;

            if(nalNum && timeOffset < 0)
            {
                frameShift -= timeOffset;
                timeOffset = 0;
            }
        }
        else
        {
            timeOffset += ctsOffset;

            //dynamically adjust the CTS for the stream if it gets lower than the current value
            //(thanks to cyrus for suggesting to do this instead of a single shift)
            if(nalNum && timeOffset < 0)
            {
                ctsOffset -= timeOffset;
                timeOffset = 0;
            }
        }

        timeOffset = htonl(timeOffset);

        BYTE *timeOffsetAddr = ((BYTE*)&timeOffset)+1;

        VideoPacket *newPacket = NULL;

        PacketType bestType = PacketType_VideoDisposable;
        bool bFoundFrame = false;

        for(int i=0; i<nalNum; i++)
        {
            x264_nal_t &nal = nalOut[i];

            if(nal.i_type == NAL_SEI)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);

                if (nal.p_payload[skipBytes+1] == 0x5) {
                    SEIData.Clear();
                    BufferOutputSerializer packetOut(SEIData);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                } else {
                    if (!newPacket)
                        newPacket = CurrentPackets.CreateNew();

                    BufferOutputSerializer packetOut(newPacket->Packet);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                }
            }
            /*else if(nal.i_type == NAL_FILLER) //QSV does not produce NAL_FILLER
            {
            BYTE *skip = nal.p_payload;
            while(*(skip++) != 0x1);
            int skipBytes = (int)(skip-nal.p_payload);

            int newPayloadSize = (nal.i_payload-skipBytes);

            if (!newPacket)
            newPacket = CurrentPackets.CreateNew();

            BufferOutputSerializer packetOut(newPacket->Packet);

            packetOut.OutputDword(htonl(newPayloadSize));
            packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
            }*/
            else if(nal.i_type == NAL_SLICE_IDR || nal.i_type == NAL_SLICE)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                if (!bFoundFrame)
                {
                    newPacket->Packet.Insert(0, (nal.i_type == NAL_SLICE_IDR) ? 0x17 : 0x27);
                    newPacket->Packet.Insert(1, 1);
                    newPacket->Packet.InsertArray(2, timeOffsetAddr, 3);

                    bFoundFrame = true;
                }

                int newPayloadSize = (nal.i_payload-skipBytes);
                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);

                switch(nal.i_ref_idc)
                {
                case NAL_PRIORITY_DISPOSABLE:   bestType = MAX(bestType, PacketType_VideoDisposable);  break;
                case NAL_PRIORITY_LOW:          bestType = MAX(bestType, PacketType_VideoLow);         break;
                case NAL_PRIORITY_HIGH:         bestType = MAX(bestType, PacketType_VideoHigh);        break;
                case NAL_PRIORITY_HIGHEST:      bestType = MAX(bestType, PacketType_VideoHighest);     break;
                }
            }
            /*else if(nal.i_type == NAL_SPS)
            {
            VideoPacket *newPacket = CurrentPackets.CreateNew();
            BufferOutputSerializer headerOut(newPacket->Packet);

            headerOut.OutputByte(0x17);
            headerOut.OutputByte(0);
            headerOut.Serialize(timeOffsetAddr, 3);
            headerOut.OutputByte(1);
            headerOut.Serialize(nal.p_payload+5, 3);
            headerOut.OutputByte(0xff);
            headerOut.OutputByte(0xe1);
            headerOut.OutputWord(htons(nal.i_payload-4));
            headerOut.Serialize(nal.p_payload+4, nal.i_payload-4);

            x264_nal_t &pps = nalOut[i+1]; //the PPS always comes after the SPS

            headerOut.OutputByte(1);
            headerOut.OutputWord(htons(pps.i_payload-4));
            headerOut.Serialize(pps.p_payload+4, pps.i_payload-4);
            }*/
            else
                continue;
        }

        packetTypes << bestType;

        packets.SetSize(CurrentPackets.Num());
        for(UINT i=0; i<packets.Num(); i++)
        {
            packets[i].lpPacket = CurrentPackets[i].Packet.Array();
            packets[i].size     = CurrentPackets[i].Packet.Num();
        }

        return true;
    }