void CConferenceInfo::doVideoProc(void) { while (!m_killed) { CMemberData::pointer memberData; if (!m_datasv.front(memberData, true)) { #ifdef WIN32 Sleep(20); #else usleep(20000); #endif continue; } CConferenceMember * pDataConferenceMember = (CConferenceMember*)memberData->getRtpParam(); if (pDataConferenceMember->getClosed() || pDataConferenceMember->getAudioHandler() == 0 || pDataConferenceMember->getVideoHandler() == 0) { continue; } BOOST_ASSERT (pDataConferenceMember->getVideoHandler().get() == memberData->getDoRtpHandler().get()); sendVideoFrame(memberData); } }
JNIEXPORT void JNICALL Java_cn_edu_hust_buildingtalkback_jni_NativeInterface_sendVideo (JNIEnv *env, jclass clazz, jbyteArray data, jint num) { LOGD("BT_VIDEOS -- Begin NativeSend"); uint8_t buf[num]; (*env)->GetByteArrayRegion(env, data, 0, num, (jbyte *)buf); LOGD("BT_VIDEOS -- GEt Byte Success"); if(buf == NULL) LOGE("BT_VIDEOS -- buf == NULL"); LOGD("BT_VIDEOS -- Begin capture_send_buf"); sendVideoFrame(buf, num); LOGD("BT_VIDEOS -- One capture_sended"); }
/***************************************************************************** Function: handleMediaFrame Description: 拼完一帧PS数据后处理媒体数据 Input: rtpFrameList PS数据包列表 Output: Return: N/A *****************************************************************************/ void CPs2EsProcessor::handleMediaFrame(RTP_FRAME_LIST_T &rtpFrameList) { if(NULL == m_pExtendHeader ) { ERROR_LOG("m_pExtendHeader is NULL"); return; } if(NULL == m_pRtpFrameCache) { ERROR_LOG("m_pRtpFrameCache is NULL"); return; } if (rtpFrameList.empty()) { ERROR_LOG("Handle PS media frame abnormal , the frame list is empty"); return; } if (MAX_RTP_PACKET_COUNT < rtpFrameList.size()) { ERROR_LOG("Handle PS media frame abnormal , the frame list exceeds the Threshold[1024], the rtp packet count: %d",rtpFrameList.size()); return; } // 新的一帧到达,缓存应该是空的 if (m_pWritePos != m_pRtpFrameCache) { m_pWritePos = m_pRtpFrameCache; //BP_RUN_LOG_INF("Handle PS media frame abnormal", "Write postion not in cache head, serviceID=%s.", m_strServiceID.c_str()); } // 将收到的一帧PS数据拷贝至缓冲区 ACE_Message_Block* pRtpBlock = NULL; bool bFirst = true; CRtpPacket rtpPacket; unsigned int unCacheSize = RTP_FRAME_CACHE_SIZE; int iRet = IVS_SUCCEED; for (RTP_FRAME_LIST_T_ITER iter = rtpFrameList.begin(); iter != rtpFrameList.end(); ++iter) { pRtpBlock = *iter; iRet = rtpPacket.ParsePacket(pRtpBlock->rd_ptr(), pRtpBlock->length()); if (IVS_SUCCEED != iRet) { m_pWritePos = m_pRtpFrameCache; m_pExtendHeader->reset(); ERROR_LOG("Parse rtp packet fail ,retcode:%d",iRet); return; } if (bFirst) { if (1 == rtpPacket.GetExtension()) { if(NULL != rtpPacket.GetMuExtData()) { m_pExtendHeader->copy((char *)rtpPacket.GetMuExtData(), sizeof(RTP_EXTENSION_DATA_MU_S)); } else if(NULL != rtpPacket.GetExtData()) { m_pExtendHeader->copy((char *)rtpPacket.GetExtData(), sizeof(RTP_EXTENSION_DATA_S)); } else { ERROR_LOG("Error extension label"); } } if (m_bAppendExtInfo) { REAL_RECORD_TIME* realRecordTime = (REAL_RECORD_TIME*)(pRtpBlock->base()); if (NULL != realRecordTime) { m_uiRealRecordSecond = realRecordTime->uiSecond; m_uiRealRecordMSecond = realRecordTime->uiMSecond; uint32_t* pStreamRate = (uint32_t*)(pRtpBlock->base() + sizeof(REAL_RECORD_TIME)); m_uiReserved = *pStreamRate; } else { ERROR_LOG("Error real record time info"); } } bFirst = false; } // 移除RTP消息头 pRtpBlock->rd_ptr(rtpPacket.GetHeadLen()); if (unCacheSize >= pRtpBlock->length()) { memcpy(m_pWritePos, pRtpBlock->rd_ptr(), pRtpBlock->length()); m_pWritePos += pRtpBlock->length(); unCacheSize -= pRtpBlock->length(); } else { // 缓冲区长度不够 ERROR_LOG("Current frame is too big exceed cache size 1.5M , will discard part data, rtp package list size=%d,curr rtp package length &d",rtpFrameList.size(),rtpPacket.GetPacketLen()); m_pWritePos = m_pRtpFrameCache; m_pExtendHeader->reset(); return; } } // 将PS数据转换成ES数据 int iVideoLen = 0; int iAudioLen = 0; int iTotalLen = m_pWritePos - m_pRtpFrameCache; unsigned char *pFrame = NULL; if (NRU_ZERO > iTotalLen || RTP_FRAME_CACHE_SIZE < iTotalLen) { m_pWritePos = m_pRtpFrameCache; m_pExtendHeader->reset(); ERROR_LOG("Parse PS packet to ES fail and discard curr frame , ulVideoTimeTick:%d,iTotalLen:%d",m_ulVideoTimeTick,iTotalLen); return; } iRet = HSPspkt2ESFrm((unsigned char*)m_pRtpFrameCache, iTotalLen, pFrame, iVideoLen, iAudioLen); if (IVS_SUCCEED != iRet) { m_pWritePos = m_pRtpFrameCache; m_pExtendHeader->reset(); ERROR_LOG("Parse PS packet to ES fail and discard curr frame, ulVideoTimeTick:%d",m_ulVideoTimeTick); return; } if ((NRU_ZERO > iVideoLen || RTP_FRAME_CACHE_SIZE < iVideoLen) || (NRU_ZERO > iAudioLen || MAX_AUDIO_LENGTH < iAudioLen)) { m_pWritePos = m_pRtpFrameCache; m_pExtendHeader->reset(); ERROR_LOG("Parse PS packet to ES fail and discard curr frame, ulVideoTimeTick: %d,iVideoLen:%d,iAudioLen:%d",m_ulVideoTimeTick,iVideoLen,iAudioLen); return; } if (0 != iVideoLen) { (void)sendVideoFrame((char *)pFrame, iVideoLen); } if (0 != iAudioLen) { (void)sendAudioFrame((char *)(pFrame + iVideoLen), iAudioLen); } // 重置缓冲区写位置 m_pWritePos = m_pRtpFrameCache; m_pExtendHeader->reset(); return; }