예제 #1
0
void ConfigEngine::Thread()
{
	DWORD				bEOF;
	DWORD				uTimeOut;
	BYTE				*pData;
	FILETIME			timestamp;
	BOOL				bRun = TRUE;
	DWORD				dwFrameRate = 0;
	__int64 iRefClock = 0;
	__int64 iStreamClock = 0;
	unsigned int uiSleepTime = 33;
	__int64 iLastStreamTime = 0;
	__int64 iLastRefTime = 0;
	float				fAvgTimeBetweenFrames = 0;

	float				fWaitTime = 33.0f;

	CoInitialize(NULL);

	m_pVideoSource->Start();

	VERIFY( m_pVideoSource->Control( IVCAVideoSource::CMD_GET_FRAMERATE, (DWORD)&dwFrameRate, 0 ) );

	//CheckFormatChange();

	ASSERT( dwFrameRate > 0 );
	// Prevent divide by 0 error
	if( !dwFrameRate ) dwFrameRate = 30;

	uTimeOut	= (100000/(dwFrameRate));

	HANDLE	hEvents[2];
	hEvents[0] =	m_hEndEvent;
	hEvents[1] =	m_pVideoSource->GetEvent();

	if( m_pVideoSource->GetEvent() )
	{
		// Using event, not timeout
		uTimeOut = INFINITE;
	}

	while( bRun )
	{
		switch( WaitForMultipleObjects( 2, hEvents, FALSE, uTimeOut ) )
		{
			case WAIT_OBJECT_0:
			{
				bRun = FALSE;
			}
			break;

			//-----------------------------------------------------------------------------
			// SIMPLE streaming
			case WAIT_TIMEOUT:
			{
				if( m_pVideoSource->GetRawImage(&pData, &timestamp, &bEOF))
				{
					// Got one frame, must be streaming
					m_ulStatus = VCA_ENGINE_STREAMING;

					CheckFormatChange();

					OnNewFrame( pData, &m_bih, (VCA5_TIMESTAMP *)&timestamp );

					m_pVideoSource->ReleaseRawImage();
				}
				else
				{
					TRACE("Engine[%d] Can not GetRawImage \n", m_ulEngineId);

					// Check for EOF
					if( bEOF )
					{
						bRun = FALSE;
					}
				}
			}
			break;

			//-----------------------------------------------------------------------------
			// ADAPTIVE streaming
			case WAIT_OBJECT_0+1:
			{

				while(bRun && m_pVideoSource->GetRawImage(&pData, &timestamp, &bEOF))
				{
					// Got one frame, must be streaming
					m_ulStatus = VCA_ENGINE_STREAMING;

					if( !iRefClock ) iRefClock = ((__int64)GetTickCount()) * 10000;
					int iTicks = GetTickCount();

					__int64 iRefDiff = (((__int64)iTicks) * 10000) - iRefClock;


					__int64 ts = timestamp.dwHighDateTime;
					ts <<= 32;
					ts |= timestamp.dwLowDateTime;


					if( !iStreamClock ) iStreamClock = ts;

					__int64 iStreamDiff = ts - iStreamClock;

					// Now, the time we have to wait is the difference between stream and reference time (stream time in the future)
					int iDiff = (int)(iStreamDiff/10000) - (int)(iRefDiff/10000);

					__int64 iLastDiff = ts - iLastStreamTime;
					__int64 iLastRefDiff = (__int64)iTicks - iLastRefTime;

	//				TRACE( _T("REFDIFF: %I64d STREAMDIFF: %I64d in ms: refdiff:%d streamdiff:%d FINALDIFF:%d LASTSTREAMDIFF:%d LASTREFDIFF:%d\n"),
	//						iRefDiff, iStreamDiff, (int)(iRefDiff/10000), (int)(iStreamDiff/10000), iDiff, (int)(iLastDiff/10000), (int)(iLastRefDiff) );


					if( !fAvgTimeBetweenFrames ) fAvgTimeBetweenFrames = 33.0f;//(float)(iLastDiff/10000);

					fAvgTimeBetweenFrames = (0.9f * fAvgTimeBetweenFrames) + (0.1f * (float)(iLastDiff/10000));

					iLastStreamTime = ts;
					iLastRefTime = iTicks;

					if( abs(iDiff) > 1000 )
					{
						// Way off the mark here, reset
						TRACE( _T("-------RESETTING----------\n"));

						iRefClock = 0;
						iStreamClock = 0;
						iDiff = 10;
					}
					else
//					if( iDiff < 0 )
//					{
//						OutputDebugString( _T("FRAME WAS LATE, DISPLAY NOW\n"));
//						// our reference clock is running ahead of the stream clock (in the future)
//						// schedule straight away to try and catch up
//				//		iRefClock += (40 * 10000);
//
//				//		iDiff = 0;
//					}
//					else
//					if( iDiff > 100 )
//					{
//						OutputDebugString( _T("FRAME WAS VERY EARLY\n"));
//						// our reference clock is too far ahead of the frame clock, try to slow down a bit
//	//					iRefClock += (40 * 10000);
//					}
//					else
					{
						// Everything seems to be in order, so try and catch up a bit, depending on the size of the video buffer
				//		OutputDebugString(_T("----OK\n"));

						unsigned int uiMaxLen, uiCurLen;
						if( m_pVideoSource->GetBufferLen( &uiMaxLen, &uiCurLen ) )
						{
							// Figure out how full the buffer is
							float fFullPct = (float)uiCurLen/(float)uiMaxLen;

							unsigned int uiMsInBuffer = uiCurLen * (unsigned int)fAvgTimeBetweenFrames;

					//		CString s; s.Format( _T("AVG frame time: %dms. MS in buffer: %d\n"), (int)fAvgTimeBetweenFrames, uiMsInBuffer );
					//		OutputDebugString(s);


							// Try to maintain at least 500ms in the buffer
							if( uiCurLen > (uiMaxLen/2) )//uiMsInBuffer >= 500 && uiCurLen > 1)
							{
								iRefClock -= ( 10000);
					//			OutputDebugString(_T("SPEED UP\n"));
							}
							else
							if( uiCurLen < (uiMaxLen/2))//uiMsInBuffer < 200 && uiCurLen > 1 )
							{
								iRefClock += 10000;
								// Slow down a bit, getting a bit close to the edge
					//			OutputDebugString(_T("SLOW DOWN\n"));
							}
						}
						else
						{
							// Video source needs to provide this info to use adaptive streaming
							ASSERT( FALSE );
						}
					}

					// Update waiting time
					uiSleepTime = max( 0, min( 1000, iDiff ));

					// Wait for this long
				//	CString s; s.Format(_T("WAIT:%d\n"), uiSleepTime );
				//	OutputDebugString(s);
					switch( WaitForSingleObject( m_hEndEvent, uiSleepTime ) )
					{
						case WAIT_OBJECT_0:
						{
							bRun = FALSE;
						}
						break;

						case WAIT_TIMEOUT:
						{
							// Deliver the frame
							CheckFormatChange();

							OnNewFrame( pData, &m_bih, (VCA5_TIMESTAMP *)&timestamp );

							m_pVideoSource->ReleaseRawImage();
						}
						break;
					}
				}
			}
			break;
			
		}
	}

	m_ulStatus = VCA_ENGINE_READY;

	m_pVideoSource->Stop();

	TRACE("End of Engine [%d] \n", m_ulEngineId);
	CoUninitialize();
}
예제 #2
0
파일: Engine.cpp 프로젝트: anyboo/UCADemo
void CEngine::Thread()
{
	DWORD				bEOF;
	DWORD				uTimeOut;
	BYTE				*pData;
	FILETIME			timestamp;
	BOOL				bRun = TRUE;
	DWORD				dwFrameRate = 0;

	CoInitialize(NULL);

	m_pVideoSource->Start();

	VERIFY( m_pVideoSource->Control( IVCAVideoSource::CMD_GET_FRAMERATE, (DWORD)&dwFrameRate, 0 ) );

//	CheckFormatChange();

	//ASSERT( dwFrameRate > 0 );
	// Prevent divide by 0 error
	if( !dwFrameRate ) dwFrameRate = 30;

	HANDLE	hEvents[2];
	hEvents[0] =	m_hEndEvent;
	hEvents[1] =	m_pVideoSource->GetEvent();
	DWORD dwNumHandles;

	if( hEvents[1] )
	{
		// Scheduling will be done by video source - they tell us when frame is ready by firing event
		uTimeOut = 1000;//INFINITE;

		dwNumHandles = 2;
	}
	else
	{
		// We do the scheduling and check back at regular intervals to retrieve the next frame
		uTimeOut = (1000/(dwFrameRate));

		dwNumHandles = 1;
	}

	int iFrames = 0;
	while( bRun ) 
	{
		DWORD dwResult = WaitForMultipleObjects(dwNumHandles, hEvents, FALSE, uTimeOut);
		switch( dwResult )
		{
			case WAIT_OBJECT_0:
			{
				// Die die die
				bRun = FALSE;
				TRACE(_T("CEngine::Thread DIE DIE DIE Engine Id[%d] \n"), m_ulEngineId);
			}
			break;

			case WAIT_OBJECT_0+1:
			case WAIT_TIMEOUT:
			{
				if( m_pVideoSource->GetRawImage(&pData, &timestamp, &bEOF))
				{
					// Got one frame, must be streaming
					m_ulStatus = VCA_ENGINE_STREAMING;

					CheckFormatChange();

					OnNewFrame( pData, &m_bih, (VCA5_TIMESTAMP *)&timestamp );

					m_pVideoSource->ReleaseRawImage();
				}
				else
				{
					TRACE("Engine[%d] Can not GetRawImage \n", m_ulEngineId);

					// Check for EOF
					if( bEOF )
					{
						bRun = FALSE;
					}
				}
			}
			break;

		}
	}

	m_pVideoSource->Stop();
	

	TRACE("End of Engine [%d] \n", m_ulEngineId);
	CoUninitialize();
}
void GazeboRosThermalCamera_<Base>::OnNewImageFrame(const unsigned char *_image,
    unsigned int _width, unsigned int _height, unsigned int _depth,
    const std::string &_format)
{
  OnNewFrame(_image, _width, _height, _depth, _format);
}
예제 #4
0
//--------------------------------------------------------------------------------------
BOOL CVirtualVideoSource::Control(IVCAVideoSource::eCMD cmd, DWORD_PTR param1, DWORD_PTR param2)
{
	switch( cmd )
	{
		case CMD_SET_VIDEOFORMAT:
		case CMD_SET_COLORFORMAT:
		case CMD_SET_IMGSIZE:
			return TRUE;
			
		case CMD_SET_FRAMERATE:
		//	m_FPS = param1;
			return TRUE;


		case CMD_GET_COLORFORMAT:
		{
			ULONG ulColorFmt = VCA5_COLOR_FORMAT_YUY2;
			if( m_bih.biCompression == mmioFOURCC('Y','U','Y','2') )
			{
				ulColorFmt = VCA5_COLOR_FORMAT_YUY2;
			}
			else
			if( m_bih.biCompression == mmioFOURCC('Y','V','1','2') )
			{
				ulColorFmt = VCA5_COLOR_FORMAT_YV12;
			}
			else
			if( m_bih.biCompression == mmioFOURCC('U','Y','V','Y') )
			{
				ulColorFmt = VCA5_COLOR_FORMAT_UYVY;
			}
			else
			if( m_bih.biCompression == BI_RGB )
			{
				if( m_bih.biBitCount == 16 )
				{
					ulColorFmt = VCA5_COLOR_FORMAT_RGB16;
				}
				else
				if( m_bih.biBitCount == 15 )
				{
					ulColorFmt = VCA5_COLOR_FORMAT_RGB15;
				}
				else
				if( m_bih.biBitCount == 24 )
				{
					ulColorFmt = VCA5_COLOR_FORMAT_RGB24;
				}
				else
				{
					// Not supported
				//	ASSERT( FALSE );
				}
			}

			(*(DWORD *) param1) = ulColorFmt;
			return TRUE;
		}

		case CMD_GET_IMGSIZE:{
			(*(DWORD *) param1) = VCA5_MAKEIMGSIZE(m_bih.biWidth, m_bih.biHeight);
			return TRUE;
		 }
		
		case CMD_GET_FRAMERATE:
			(*(DWORD *) param1) = 30; // Make it up - we'll adjust once we know
			return TRUE;
		
		case CMD_GET_VIDEOFORMAT:
			(*(DWORD *) param1) = VCA5_VIDEO_FORMAT_PAL_B;
			return TRUE;

		case CMD_ADD_MEDIASAMPLE:
			IVCAMediaSample * pSamp = (IVCAMediaSample *)param1;
			OnNewFrame( pSamp );
			return TRUE;
	}

	return FALSE;
}