Esempio n. 1
0
    void vtCallback(void *outputCallbackRefCon,
                    void *sourceFrameRefCon,
                    OSStatus status,
                    VTEncodeInfoFlags infoFlags,
                    CMSampleBufferRef sampleBuffer )
    {
        CMBlockBufferRef block = CMSampleBufferGetDataBuffer(sampleBuffer);
        CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, false);
        CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        
        bool isKeyframe = false;
        if(attachments != NULL) {
            CFDictionaryRef attachment;
            CFBooleanRef dependsOnOthers;
            attachment = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
            dependsOnOthers = (CFBooleanRef)CFDictionaryGetValue(attachment, kCMSampleAttachmentKey_DependsOnOthers);
            isKeyframe = (dependsOnOthers == kCFBooleanFalse);
        }
        
        if(isKeyframe) {

            // Send the SPS and PPS.
            CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
            size_t spsSize, ppsSize;
            size_t parmCount;
            const uint8_t* sps, *pps;
            
            CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sps, &spsSize, &parmCount, nullptr );
            CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pps, &ppsSize, &parmCount, nullptr );
            
            std::unique_ptr<uint8_t[]> sps_buf (new uint8_t[spsSize + 4]) ;
            std::unique_ptr<uint8_t[]> pps_buf (new uint8_t[ppsSize + 4]) ;
            
            memcpy(&sps_buf[4], sps, spsSize);
            spsSize+=4 ;
            memcpy(&sps_buf[0], &spsSize, 4);
            memcpy(&pps_buf[4], pps, ppsSize);
            ppsSize += 4;
            memcpy(&pps_buf[0], &ppsSize, 4);
            
            ((H264Encode*)outputCallbackRefCon)->compressionSessionOutput((uint8_t*)sps_buf.get(),spsSize, pts.value);
            ((H264Encode*)outputCallbackRefCon)->compressionSessionOutput((uint8_t*)pps_buf.get(),ppsSize, pts.value);
        }
       
        char* bufferData;
        size_t size;
        CMBlockBufferGetDataPointer(block, 0, NULL, &size, &bufferData);

        ((H264Encode*)outputCallbackRefCon)->compressionSessionOutput((uint8_t*)bufferData,size, pts.value);
        
    }
Esempio n. 2
0
int CVideoEncodeVt::CopySampleBufferToAVPakcet(CMSampleBufferRef sampleBuffer, AVPacket* apPacket)
{
    //取得头大小
    int iLengthCodeSize = 0;
    int ret = GetLengthCodeSize(sampleBuffer, &iLengthCodeSize);
    if(ret < 0)
    {
        CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "GetLengthCodeSize failed!");
        assert(false);
        return -1;
    }
    
    int iHeaderSize = 0;
    bool bIsKeyFrame = IsKeyFrame(sampleBuffer);
    bool bAddHeader = bIsKeyFrame;
    CMVideoFormatDescriptionRef videoFmtDesc = NULL;
    
    //需要添加头
    if (bAddHeader)
    {
        videoFmtDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
        if (NULL == videoFmtDesc)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CMSampleBufferGetFormatDescription failed!");
            assert(false);
            return -1;
        }
      
        //取得头大小
        ret = GetParamSize(videoFmtDesc, &iHeaderSize);
        if(ret < 0)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "GetParamSize failed!");
            assert(false);
            return -1;
        }
    }
    
    //nalu的数量
    int iNaluCnt = 0;
    ret = GetNaluCnt(sampleBuffer, iLengthCodeSize, &iNaluCnt);
    if(ret < 0)
    {
        CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "GetNaluCnt failed!");
        assert(false);
        return -1;
    }
    int iSampleSize = (int)CMSampleBufferGetTotalSampleSize(sampleBuffer);
    int iPacketSize = iHeaderSize + iSampleSize + iNaluCnt * ((int)sizeof(StartCode) - (int)iLengthCodeSize);
    
    
    av_free_packet(apPacket);
    ret = av_new_packet(apPacket, iPacketSize);
    if(ret < 0)
    {
        CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "av_new_packet failed!");
        assert(false);
        return -1;
    }
    
    do
    {
        //添加头
        if(bAddHeader)
        {
            //参数集拷贝到头
            ret = CopyParamSets(videoFmtDesc, apPacket->data, iPacketSize);
            if(ret < 0)
            {
                CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CopyParamSets failed!");
                assert(false);
                break;
            }
        }
        
        //拷贝每个nalu
        ret = CopyNalus(sampleBuffer, iLengthCodeSize, apPacket->data + iHeaderSize, apPacket->size - iHeaderSize);
        if(ret < 0)
        {
            CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CopyNalus failed!");
            assert(false);
            break;
        }
        
        if (bIsKeyFrame)
        {
            apPacket->flags |= AV_PKT_FLAG_KEY;
        }
        
        //设置时间戳
        CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        CMTime dts = CMSampleBufferGetDecodeTimeStamp(sampleBuffer);
        
        if (CMTIME_IS_INVALID(dts))
        {
            if (!m_bHasBFrames)
            {
                dts = pts;
            }
            else
            {
                CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "dts is invalid!");
                assert(false);
                break;
            }
        }
        
        int64_t iDtsDelta = m_iDtsDelta >= 0 ? m_iDtsDelta : 0;
        
        apPacket->pts = pts.value / m_iFrameRate;
        apPacket->dts = dts.value / m_iFrameRate - iDtsDelta;
        apPacket->size = iPacketSize;
        
        return 0;
        
    }while(0);
    
    
    av_free_packet(apPacket);
    return -1;
}