void CDahuaChannel::DoRealDataCallBack(LLONG lRealHandle, DWORD dwDataType, BYTE *pBuffer, DWORD dwBufSize, LONG param)
{
	if (dwDataType == 0)
	{
		ANA_InputData(m_parser, (unsigned char *)pBuffer, dwBufSize);
		if (ANA_GetNextFrame(m_parser, &m_frame) == 0)
		{
			if (m_frame.nType == FRAME_TYPE_VIDEO)
			{
				int nOffset = 4;

				J_StreamHeader streamHeader = { 0 };
				TLock(m_vecLocker);
				std::vector<RingBufferInfo>::iterator it = m_vecRingBuffer.begin();
				for (; it != m_vecRingBuffer.end(); it++)
				{
					if (m_frame.nSubType == TYPE_VIDEO_I_FRAME)
					{
						streamHeader.frameType = jo_video_a_frame;
						streamHeader.dataLen = 9;
						it->pRingBuffer->PushBuffer((const char *)m_frame.pFrameBody + nOffset, streamHeader);
						nOffset += (9 + 4);

						streamHeader.frameType = jo_video_a_frame;
						streamHeader.dataLen = 4;
						it->pRingBuffer->PushBuffer((const char *)m_frame.pFrameBody + nOffset, streamHeader);
						nOffset += (4 + 4);

						streamHeader.frameType = jo_video_a_frame;
						streamHeader.dataLen = 5;
						it->pRingBuffer->PushBuffer((const char *)m_frame.pFrameBody + nOffset, streamHeader);
						nOffset += (5 + 4);

						it->isNeedIFrame = false;
					}
					
					if (it->isNeedIFrame == false)
					{
						streamHeader.frameType = (m_frame.nSubType == TYPE_VIDEO_I_FRAME) ? jo_video_i_frame : jo_video_p_frame;
						streamHeader.dataLen = m_frame.nBodyLength - nOffset;
						it->pRingBuffer->PushBuffer((const char *)m_frame.pFrameBody + nOffset, streamHeader);
					}
				}
				TUnlock(m_vecLocker);
			}
			else if (m_frame.nType == FRAME_TYPE_AUDIO)
			{

			}
			else
			{

			}
		}
	}
	else if (dwDataType == 1)// video
	{
		tagVideoFrameParam *frameHeader = (tagVideoFrameParam *)param;
		J_StreamHeader streamHeader = { 0 };
		streamHeader.frameType = (frameHeader->frametype == 0) ? jo_video_i_frame : jo_video_p_frame;
		streamHeader.dataLen = dwBufSize;
		TLock(m_vecLocker);
		std::vector<RingBufferInfo>::iterator it = m_vecRingBuffer.begin();
		for (; it != m_vecRingBuffer.end(); it++)
		{
			it->pRingBuffer->PushBuffer((const char *)pBuffer, streamHeader);
		}
		TUnlock(m_vecLocker);
	}
	else if (dwDataType == 3)//audio
	{
		tagCBPCMDataParam *pcmHeader = (tagCBPCMDataParam *)param;
	}
}
Exemple #2
0
		EnumErrno Analyze(const BYTE *pData, INT iSize )
		{
			INT iRet;
			ANA_FRAME_INFO stInfo;
			CFrameCache *pFrame = NULL;
			EnumGSCodeID eCodeId =  GS_CODEID_NONE;
			EnumGSMediaType eMediaType = GS_MEDIA_TYPE_NONE;

			BOOL bKey = FALSE;


			while( iSize>0 )
			{
				iRet = ANA_InputData(m_hANA, 
					(uint8 *)pData, 
					iSize);

				if( iRet>=0  )
				{
					iSize -= iRet;
					pData += iRet;
				}
				else
				{
					iSize = 0;
					//	ANA_Reset(m_hANA, 0);
				}

				while(  0==(iRet = ANA_GetMediaFrame(m_hANA, &stInfo) ) )
				{
					eCodeId = GS_CODEID_NONE;				
					eMediaType = GS_MEDIA_TYPE_NONE;
					bKey = FALSE;
					if( stInfo.nType == FRAME_TYPE_VIDEO  )
					{				
						eMediaType = GS_MEDIA_TYPE_VIDEO;

						if( stInfo.nSubType == TYPE_VIDEO_I_FRAME)
						{
							bKey = TRUE;
						}

						switch( stInfo.nEncodeType )
						{
						case ENCODE_VIDEO_MPEG4 :
							{
								eCodeId = GS_CODEID_ST_MP4;							
							}
							break;
						case ENCODE_VIDEO_DH_H264 :
						case ENCODE_VIDEO_HI_H264 :
							{
								eCodeId = GS_CODEID_ST_H264;							
							}
							break;
						case ENCODE_VIDEO_JPEG :
							{
								eCodeId = GS_CODEID_ST_MP4;							
							}
							break;
						default :
							{
								eCodeId = GS_CODEID_NONE;
							}
							break;
						} // end switch

					}
					else if( stInfo.nType == FRAME_TYPE_AUDIO  )
					{
						eMediaType = GS_MEDIA_TYPE_AUDIO;
						bKey = TRUE;
						switch( stInfo.nEncodeType )
						{
						case ENCODE_AUDIO_PCM :
							{
								eCodeId = GS_CODEID_AUDIO_ST_PCM;
							}
							break;
						case ENCODE_AUDIO_G729 :						
							{
								eCodeId = GS_CODEID_AUDIO_ST_G729;							
							}
							break;
						case ENCODE_AUDIO_G721 :
							{
								eCodeId = GS_CODEID_AUDIO_ST_G721;

							}
							break;
						case ENCODE_AUDIO_G711A :
							{
								eCodeId = GS_CODEID_AUDIO_ST_G711A;

							}
							break;
						case ENCODE_AUDIO_G711U :
							{
								eCodeId = GS_CODEID_AUDIO_ST_G711U;							
							}
							break;
						case ENCODE_AUDIO_G723 :
							{
								eCodeId = GS_CODEID_AUDIO_ST_G723;							
							}
							break;
						case ENCODE_VIDEO_H263 :
							{
								eCodeId = GS_CODEID_AUDIO_ST_H263;
							}
							break;
						default :
							{
								eCodeId = GS_CODEID_NONE;
							}
							break;
						} // end switch
					}
					else
					{
						//其他数据不需要 
						continue;
					}

					UINT iChn = GetMediaChannel(eMediaType);
					if( iChn>GSP_MAX_MEDIA_CHANNELS || eCodeId == GS_CODEID_NONE /*|| !stInfo.bValid*/ )
					{
						/*GS_ASSERT(!stInfo.bValid);*/
						continue;
					}
					if( eCodeId == m_vInfoCtx[iChn].eCodeId  )
					{ 

					}
					else if( m_vInfoCtx[iChn].eCodeId == GS_CODEID_NONE )
					{
						if( eMediaType == GS_MEDIA_TYPE_AUDIO )
						{
							UINT iTempCh = GetMediaChannel(GS_MEDIA_TYPE_VIDEO);
							if( iTempCh>=GSP_MAX_MEDIA_CHANNELS 
								|| m_vInfoCtx[iTempCh].eCodeId == GS_CODEID_NONE)
							{
								//等待视频后再设置
								continue;
							}
						}
						m_vInfoCtx[iChn].eCodeId = eCodeId;					
					}
					else 
					{
						//中间改变了编码
						GS_ASSERT(0);
						return eERRNO_SYS_ECODEID;
					}

					StruBaseBuf stTemp;
					bzero(&stTemp, sizeof(stTemp));
					if( m_bOutFactorStream )
					{
						//厂商流
						stTemp.iSize = stInfo.nLength;
						stTemp.pBuffer =  (BYTE*)stInfo.pHeader; 
					}
					else
					{
						//裸标准流
						stTemp.iSize = stInfo.nBodyLength;
						stTemp.pBuffer =  (BYTE*)stInfo.pFrameBody; 
					}
					pFrame = pFrame->Create(&stTemp, 1);

					GS_ASSERT(pFrame);
					if( pFrame )
					{
						pFrame->m_stFrameInfo.iChnNo = iChn;
						pFrame->m_stFrameInfo.bKey = bKey;
						pFrame->m_stFrameInfo.bSysHeader = (eMediaType == GS_MEDIA_TYPE_SYSHEADER);
						pFrame->m_stFrameInfo.eMediaType = eMediaType;
						if ( stInfo.nYear > 0 )
						{
							m_iLastTime = MakeTime(stInfo.nYear,stInfo.nMonth,
								stInfo.nDay,stInfo.nHour,
								stInfo.nMinute,stInfo.nSecond);
						}
						pFrame->m_stFrameInfo.iTimestamp  = m_iLastTime;
						if( m_listFrameCache.AddTail(pFrame) )
						{
							GS_ASSERT(0);
							pFrame->UnrefObject();
						}
					}
				} // end while(  0==(iRet
			} // end while(iSize>0)
			return eERRNO_SUCCESS;
		}