Exemplo n.º 1
0
void video_set_rate(int id, double rate) {
	get_video(videoStruct, id);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	
	// Set the playback rate, can not be 0
	hr = pSeek->SetRate(rate);

	pSeek->Release();
}
STDMETHODIMP
CPosPassThru::GetRate(double * pdRate)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }
    hr = pMS->GetRate(pdRate);
    pMS->Release();
    return hr;
}
Exemplo n.º 3
0
double video_get_rate(int id) {
	get_videor(videoStruct, id, -1);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	
	double rate = 0;
	hr = pSeek->GetRate(&rate);

	pSeek->Release();
	
	return rate;
}
Exemplo n.º 4
0
//----------------------------------------------------------------------------
//! @brief	  	レンダリング前にコールされる
//!
//! メディアサンプルにメディアタイムを記録する。
//! メディアタイムは開始フレームに現在のストリーム時間を加算したものになる。
//! もし、フィルタのIMediaSeekingインターフェイスが利用できない場合は、
//! このレンダーフィルタが描画したフレーム数とドロップしたフレーム数を加算する。
//! この場合、より上位のフィルタでドロップしたフレーム数はわからないので、
//! 若干精度が落ちる。
//! @param		pMediaSample : メディアサンプル
//----------------------------------------------------------------------------
void TBufferRenderer::OnRenderStart( IMediaSample *pMediaSample )
{
	CBaseVideoRenderer::OnRenderStart(pMediaSample);

	HRESULT		hr;
	bool		bGetTime = false;
	LONGLONG	Current = 0, Stop = 0;
	IMediaSeeking	*mediaSeeking = NULL;
	if( GetMediaPositionInterface( IID_IMediaSeeking, (void**)&mediaSeeking) == S_OK )
	{
		GUID	Format;
		if( SUCCEEDED(hr = mediaSeeking->GetTimeFormat( &Format ) ) )
		{
			if( SUCCEEDED(hr = mediaSeeking->GetCurrentPosition( &Current )) &&
				SUCCEEDED(hr = mediaSeeking->GetStopPosition( &Stop )) )
			{
				if( IsEqualGUID( TIME_FORMAT_MEDIA_TIME, Format ) )
				{
					double	renderTime = Current / 10000000.0;
					double	stopTime = Stop / 10000000.0;
					REFTIME	AvgTimePerFrame;	// REFTIME :  秒数を示す小数を表す倍精度浮動小数点数。
					if( SUCCEEDED( hr = get_AvgTimePerFrame( &AvgTimePerFrame ) ) )
					{
						Current = (LONGLONG)(renderTime / AvgTimePerFrame + 0.5);
						Stop = (LONGLONG)(stopTime / AvgTimePerFrame + 0.5);
						bGetTime = true;
					}
				}
				else if( IsEqualGUID( TIME_FORMAT_FRAME, Format ) )
				{
					bGetTime = true;
				}
			}
		}
		mediaSeeking->Release();
		mediaSeeking = NULL;
	}
	LONGLONG	TimeStart = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
	LONGLONG	TimeEnd = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
	if( bGetTime == true )
	{
		TimeStart = m_StartFrame + Current;
		TimeEnd = m_StartFrame + Current;
		m_StopFrame = m_StartFrame + static_cast<LONG>(Stop);
	}
	else
	{
		TimeStart = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
		TimeEnd = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
		m_StopFrame = 0;
	}
	pMediaSample->SetMediaTime( &TimeStart, &TimeEnd );
}
STDMETHODIMP
CPosPassThru::IsUsingTimeFormat(const GUID * pFormat)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->IsUsingTimeFormat(pFormat);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::QueryPreferredFormat(GUID *pFormat)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->QueryPreferredFormat(pFormat);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::GetAvailable( LONGLONG *pEarliest, LONGLONG *pLatest )
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->GetAvailable( pEarliest, pLatest );
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::GetPositions(LONGLONG *pCurrent, LONGLONG * pStop)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->GetPositions(pCurrent,pStop);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::CheckCapabilities(DWORD * pCaps)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->CheckCapabilities(pCaps);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::SetPositions( LONGLONG * pCurrent, DWORD CurrentFlags
			  , LONGLONG * pStop, DWORD StopFlags )
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->SetPositions(pCurrent, CurrentFlags, pStop, StopFlags );
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::ConvertTimeFormat(LONGLONG * pTarget, const GUID * pTargetFormat,
				LONGLONG    Source, const GUID * pSourceFormat )
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->ConvertTimeFormat(pTarget, pTargetFormat, Source, pSourceFormat );
    pMS->Release();
    return hr;
}
HRESULT
CPosPassThru::GetSeekingLongLong
( HRESULT (__stdcall IMediaSeeking::*pMethod)( LONGLONG * )
, LONGLONG * pll
)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (SUCCEEDED(hr))
    {
	hr = (pMS->*pMethod)(pll);
	pMS->Release();
    }
    return hr;
}
STDMETHODIMP
CPosPassThru::SetRate(double dRate)
{
    if (0.0 == dRate) {
		return E_INVALIDARG;
    }

    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }
    hr = pMS->SetRate(dRate);
    pMS->Release();
    return hr;
}
Exemplo n.º 14
0
long video_get_duration(int id) {
	get_videor(videoStruct, id, -1);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	DWORD dwCap = 0;
	hr = pSeek->GetCapabilities(&dwCap);
	
	long long duration = 0;
	if (AM_SEEKING_CanSeekAbsolute & dwCap) //TODO: This if check might not be needed
	{
		hr = pSeek->GetDuration(&duration);
	}

	pSeek->Release();
	
	return duration;
}
Exemplo n.º 15
0
long video_get_seek(int id) {
	get_videor(videoStruct, id, -1);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	DWORD dwCap = 0;
	hr = pSeek->GetCapabilities(&dwCap);
	
	long long position = 0;
	if (AM_SEEKING_CanSeekAbsolute & dwCap)
	{
		hr = pSeek->GetCurrentPosition(&position);
	}

	pSeek->Release();
	
	return position;
}
Exemplo n.º 16
0
void video_set_seek(int id, long position) {
	get_video(videoStruct, id);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	DWORD dwCap = 0;
	hr = pSeek->GetCapabilities(&dwCap);
		
	if (AM_SEEKING_CanSeekAbsolute & dwCap)
	{
		// Graph can seek to absolute positions.
		REFERENCE_TIME rtNow = position;
		hr = pSeek->SetPositions(
			&rtNow, AM_SEEKING_AbsolutePositioning, 
			NULL, AM_SEEKING_NoPositioning
			);
	}

	pSeek->Release();
}
Exemplo n.º 17
0
bool gui::dx::audio_playerX::play(const char * url, char* clipBegin, char* clipEnd) {

	EnterCriticalSection(&m_csSequence);

	TRACE(L"%s", L"\n####### -- PLAY DX\n");
	amis::util::Log* p_log = amis::util::Log::Instance();
	p_log->writeTrace("Play DX", "audio_playerX::play");

	m_url.assign(url);

	if (m_graph_builder != NULL)
	{ 
		stop(true);
	}

	if (m_graph_builder == NULL)
	{
		HRESULT hr = CoCreateInstance(CLSID_FilterGraph,0,CLSCTX_INPROC_SERVER,
			IID_IGraphBuilder,(void**)&m_graph_builder);
		if(FAILED(hr))
		{
			win_report_error("CoCreateInstance(CLSID_FilterGraph, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	//WCHAR wsz[MAX_PATH];
	//MultiByteToWideChar(CP_ACP,0, url.c_str(), -1, wsz, MAX_PATH);

	std::string strFileName;
	strFileName.assign(url);

	USES_CONVERSION;
	LPCWSTR str = A2CW(strFileName.c_str());

	HRESULT hr = m_graph_builder->RenderFile(str, 0);
	if(FAILED(hr)){
		
		amis::util::Log* p_log = amis::util::Log::Instance();
		string log_msg = "CANNOT PLAY: ";	
		log_msg.append(strFileName);
		p_log->writeError(log_msg, "audio_playerX::play");

		//m_graph_builder->Release();
		//m_graph_builder = 0;
		/* 
		if (hr == 0x800c000d)  // XXX This value experimentally determined:-)
		errorX("%s: Unsupported URL protocol", url.c_str());
		else if (hr == VFW_E_CANNOT_CONNECT)
		errorX("%s: Unsupported video format", url.c_str());
		else
		errorX("%s: DirectX error 0x%x", url.c_str(), hr);
		*/
		release_player();
		LeaveCriticalSection(&m_csSequence);
		return false;
	}
#ifdef WITH_TPB_AUDIO_SPEEDUP
	initialize_speedup_filter();
#endif

	if (m_media_control == NULL) { 
		HRESULT hr = m_graph_builder->QueryInterface(IID_IMediaControl, (void **) &m_media_control);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IMediaControl, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	if (m_media_position == NULL) { 
		m_graph_builder->QueryInterface(IID_IMediaPosition, (void **) &m_media_position);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IMediaPosition, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	if (m_media_event == NULL) { 
		m_graph_builder->QueryInterface(IID_IMediaEvent, (void **) &m_media_event);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IMediaEvent, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	if (m_basic_audio == NULL) { 
		m_graph_builder->QueryInterface(IID_IBasicAudio, (void **) &m_basic_audio);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IBasicAudio, ...)", hr);	
		}
	}

	SmilTimeCode startStop(clipBegin, clipBegin, clipEnd);
	unsigned long begin = startStop.getStart();
	unsigned long end = startStop.getEnd();

	LONGLONG llDuration = 0;

	IMediaSeeking *pIMS;
	if (m_graph_builder->QueryInterface(IID_IMediaSeeking, (void**) &pIMS) == S_OK)
	{

		if (pIMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME) == S_OK) {
			if (pIMS->GetPositions(NULL, &llDuration) != S_OK) llDuration = -10000;
		}
		else
		{
			llDuration = - 10000;
		}

		long duration = (long) ((llDuration / 10000) & 0xFFFFFFFF);	  

		if (! (begin == 0 && end == 0)) {

			if (begin >= 0 && begin < duration) {

				LONGLONG Value = (LONGLONG) (begin);
				Value *= 10000;
				if (pIMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME) == S_OK) {
					if (pIMS->SetPositions(&Value, AM_SEEKING_AbsolutePositioning, NULL, AM_SEEKING_NoPositioning) != S_OK){}
				}
			}
			
			if (end >= 0 && end > begin && end < duration) {
				
				LONGLONG Value = (LONGLONG) (end);
				Value *= 10000;
				if (pIMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME) == S_OK) {
					if (pIMS->SetPositions(NULL, AM_SEEKING_NoPositioning, &Value, AM_SEEKING_AbsolutePositioning) != S_OK) {}
				}
			}
		}
		pIMS->Release();
	}

	if (hEventHandler == NULL) {
		unsigned long lpdwThreadID;
		//hEventHandler = (HANDLE) _beginthreadex(NULL, 0, eventHandler, (void*) this, 0, &lpdwThreadID);
		hEventHandler = CreateThread(NULL, 0, &eventHandler, this, 0, &lpdwThreadID);
		//GetCurrentThreadId
		TRACE("\nTHREAD ID (DX_AUDIO_PLAYER): %x\n", lpdwThreadID);
		
		amis::util::Log* p_log = amis::util::Log::Instance();
		string log_msg = "Thread ID: ";
		char strID[10];
		sprintf(strID, "%x", lpdwThreadID);			
		log_msg.append(strID);
		p_log->writeTrace(log_msg, "audio_playerX::play");
	}
#ifdef SINGLE_THREAD_HACK
	SetEvent(m_hEventWakeup);
#endif

	set_volume(s_current_volume);

	hr = m_media_control->Run();
	if(FAILED(hr)) {
		win_report_error("IMediaControl::run()", hr);	
	}

	LeaveCriticalSection(&m_csSequence);
	return true;
}
void CDownloads_VideoPreview::Play()
{
    USES_CONVERSION;

    if (m_bUsingVideo2)
        return;

    if (m_wndVideo.m_player.Get_State () != VFPS_CLOSED)
    {
        if (m_wndVideo.m_player.Get_State () != VFPS_RUNNING)
            m_wndVideo.m_player.Run ();
        return;
    }

    if (m_pActiveDownload)
    {
        HRESULT hr = E_FAIL;
        BOOL bWasErrMsg = FALSE;


        if (S_FALSE == CoInitialize (NULL))
            CoUninitialize ();

        if (m_pActiveDownload->pMgr->IsDone ())
        {
            hr = m_wndVideo.m_player.Open2 (m_pActiveDownload->pMgr->get_OutputFilePathName ());

            if (FAILED (hr))
            {
                fsString strFile = m_pActiveDownload->pMgr->get_OutputFilePathName ();
                LPCSTR psz = strrchr (strFile, '.');
                if (psz++)
                {
                    if (lstrcmpi (psz, "flv") == 0)
                    {
                        CRect rc (0,0,0,0);
                        m_wndVideo.GetWindowRect (&rc);
                        ScreenToClient (&rc);
                        m_wndVideo2.Create (NULL, WS_CHILD, rc, this, 0x111a);
                        m_wndVideo2.SetBackgroundColor (0);
                        m_wndVideo2.LoadMovie (0, ((CFdmApp*)AfxGetApp ())->m_strAppPath + "player.swf");

                        m_wndVideo.ShowWindow (SW_HIDE);
                        m_wndVideo2.ShowWindow (SW_SHOW);
                        m_bUsingVideo2 = TRUE;

                        CString str;
                        str = "<invoke name=\"loadAndPlayVideo\" returntype=\"xml\">";
                        str += "<arguments><string>";
                        str += strFile;
                        str += "</string></arguments></invoke>";
                        m_wndVideo2.CallFunction (str);

                        hr = S_OK;
                    }
                }
            }


        }
        else
        {
            UINT64 nAvail = Get_AvailFileSize ();

            if (nAvail)
            {
                if (FALSE == OpenFile ())
                {
                    hr = E_FAIL;
                    bWasErrMsg = TRUE;
                }
                else
                    hr = m_wndVideo.m_player.Open (m_hFile, nAvail);

                if (FAILED (hr) && bWasErrMsg == FALSE)
                {
                    CloseFile ();
                    hr = m_wndVideo.m_player.Open2 (m_pActiveDownload->pMgr->get_OutputFilePathName ());
                }
            }
            else
                bWasErrMsg = TRUE;
        }

        if (FAILED (hr))
        {
            CloseFile ();

            if (bWasErrMsg == FALSE)
            {
                if (m_dwDXVersion == 0)
                    m_dwDXVersion = fsDirectXVersion::GetCurrentVersion ();

                if (m_dwDXVersion < 0x0800)
                {
                    CString strErr = LS (L_FAILEDTOOPENFILE);
                    strErr += "\n\n";
                    strErr += LS (L_NEEDDX8ORBETTER);
                    MessageBox (strErr, LS (L_ERR), MB_ICONERROR);
                }
                else
                {
                    CString strErr = LS (L_FAILEDTOOPENFILE);
                    strErr += "\n\n";
                    strErr += LS (L_UNKMEDIATYPE);
                    MessageBox (strErr, LS (L_ERR), MB_ICONERROR);
                }
            }

            m_vidseek.Set_MediaSeeking (NULL);
            return;
        }

        if (m_bUsingVideo2 == FALSE)
        {
            IMediaSeeking* pMS = Get_Player ()->Get_MediaSeeking ();
            m_vidseek.Set_MediaSeeking (pMS);
            if (pMS)
                pMS->Release ();

            m_wndVideo.m_player.Run ();
            m_vidman.ApplyVolumeSettings ();
        }
    }
}
Exemplo n.º 19
0
STDMETHODIMP CTsReaderFilter::Pause()
{
  //m_ShowBufferVideo = INIT_SHOWBUFFERVIDEO;
  //m_ShowBufferAudio = INIT_SHOWBUFFERAUDIO;

  LogDebug("CTsReaderFilter::Pause() - IsTimeShifting = %d - state = %d", IsTimeShifting(), m_State);
  CAutoLock cObjectLock(m_pLock);

  if (m_State == State_Running)
  {
    m_lastPause = GetTickCount();
    m_RandomCompensation = 0;
  }

  //pause filter
  HRESULT hr=CSource::Pause();

  if (!m_bPauseOnClockTooFast)
  {
    //are we using rtsp?
    if (m_fileDuration==NULL)
    {
    //yes, are we busy seeking?
    if (!IsSeeking())
    {
      //not seeking, is rtsp streaming at the moment?
      if (!m_rtspClient.IsRunning())
      {
        //not streaming atm
        double startTime=m_seekTime.Millisecs();
        startTime/=1000.0;

        long Old_rtspDuration = m_rtspClient.Duration() ;
        //clear buffers
        LogDebug("  -- Pause()  ->start rtsp from %f", startTime);
        m_buffer.Clear();
        m_demultiplexer.Flush();

        //start streaming
        m_buffer.Run(true);
        m_rtspClient.Play(startTime,0.0);
//        m_tickCount = GetTickCount();
        LogDebug("  -- Pause()  ->rtsp started");

        //update the duration of the stream
        CPcr pcrStart, pcrEnd, pcrMax ;
        double duration = m_rtspClient.Duration() / 1000.0f ;

        if (m_bTimeShifting)
        {
          // EndPcr is continuously increasing ( until ~26 hours for rollover that will fail ! )
          // So, we refer duration to End, and just update start.
          pcrEnd   = m_duration.EndPcr() ;
          double start  = pcrEnd.ToClock() - duration;
	        if (start<0) start=0 ;
          pcrStart.FromClock(start) ;
          m_duration.Set( pcrStart, pcrEnd, pcrMax) ;     // Pause()-RTSP
        }
        else
        {
          // It's a record, eventually end can increase if recording is in progress, let the end virtually updated by ThreadProc()
          //m_bRecording = (Old_rtspDuration != m_rtspClient.Duration()) ;
          m_bRecording = true; // duration may have not increased in such a short time
        }
        LogDebug("Timeshift %d, Recording %d, StartPCR %f, EndPcr %f, Duration %f",m_bTimeShifting,m_bRecording,m_duration.StartPcr().ToClock(),m_duration.EndPcr().ToClock(),(float)m_duration.Duration().Millisecs()/1000.0f) ;
      }
      else
      {
        //we are streaming at the moment.
       
        //query the current position, so it can resume on un-pause at this position
        //can be required in multiseat with rtsp when changing audio streams 
        IMediaSeeking * ptrMediaPos;
        if (SUCCEEDED(GetFilterGraph()->QueryInterface(IID_IMediaSeeking, (void**)&ptrMediaPos)))
        {
          ptrMediaPos->GetCurrentPosition(&m_seekTime.m_time);
          ptrMediaPos->Release();
        }
        //pause the streaming
        LogDebug("  -- Pause()  ->pause rtsp at position: %f", (m_seekTime.Millisecs() / 1000.0f));
        m_rtspClient.Pause();
      }
    }
    else //we are seeking
    {
      IMediaSeeking * ptrMediaPos;

      if (SUCCEEDED(GetFilterGraph()->QueryInterface(IID_IMediaSeeking, (void**)&ptrMediaPos)))
      {
        LONGLONG currentPos;
        ptrMediaPos->GetCurrentPosition(&currentPos);
        ptrMediaPos->Release();
        double clock = currentPos;clock /= 10000000.0;
        float clockEnd = m_duration.EndPcr().ToClock() ;
        if (clock >= clockEnd && clockEnd > 0 )
        {
          LogDebug("End of rtsp stream...");
          m_demultiplexer.SetEndOfFile(true);
        }
      }
    }
    }
    m_demultiplexer.m_LastDataFromRtsp = GetTickCount() ;
  }

  //is the duration update thread running?
  if (!IsThreadRunning())
  {
    //nop? then start it
    //LogDebug("  CTsReaderFilter::Pause()->start duration thread");
    StartThread();
  }

  LogDebug("CTsReaderFilter::Pause() - END - state = %d", m_State);
  return hr;
}
Exemplo n.º 20
0
void 
recChannel_t::posReaderThread(void)
{
    __int64 position = 0;
    __int64 duration = 0;

    Sleep(1000);
    while (!fControl)
    {
        Sleep(1000);
    }

    fControl->slideMoved = true;

    IMediaSeeking * pSeek = NULL;
    while(true)
    {
        int hr = pGraph->QueryInterface(IID_IMediaSeeking,(void **)&pSeek);
        //read file pos and put at slicer 
        if (pSeek && hr == S_OK)
        {
            if (!fControl->play & fControl->event)
            {
                pControl->StopWhenReady();
                fControl->event = false;
                continue;
            }

            if (fControl->play & fControl->event)
            {
                pControl->Run();
                fControl->event = false;
            }
                
            if (fControl->slideMoved)
            {
                position = fControl->m_slide.GetPos();
                position *=1000000;
                fControl->slideMoved = false;
                pSeek->SetPositions(&position,
                                     AM_SEEKING_AbsolutePositioning,
                                     NULL,
                                     AM_SEEKING_NoPositioning);
            }else{

                if (!duration)
                {
                    pSeek->GetDuration(&duration);
                    duration /= 1000000;
                    fControl->m_slide.SetRangeMin(0,true);
                    fControl->m_slide.SetRangeMax(duration,true);
                }
                pSeek->GetCurrentPosition(&position);
                position /= 1000000;
                fControl->m_slide.SetPos(position);
            }
        }
        pSeek->Release();
        Sleep(1000);
    }
}
Exemplo n.º 21
0
int main(int argc, char* argv[])
{
    ICaptureGraphBuilder2   *pCaptureGraphBuilder = NULL;
    IGraphBuilder           *pGraphBuilder = NULL;
    IBaseFilter             *pSource = NULL;
    IBaseFilter             *pMux = NULL;
    IBaseFilter             *pVideoCompressor = NULL;
    IBaseFilter             *pAudioCompressor = NULL;

    IAMStreamConfig         *pAMStreamConfig = NULL;
    IAMVideoCompression     *pAMVideoCompression = NULL;

    IMediaControl           *pControl = NULL;
    IMediaSeeking           *pSeek = NULL;
    IMediaEvent             *pEvent = NULL;

    HRESULT hr;

    DWORD pdwRegister=0;
    CoInitialize(NULL);

    // Create the capture graph builder.
    CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
                     IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);

    // Make the rendering section of the graph.
    pCaptureGraphBuilder->SetOutputFileName(
        &MEDIASUBTYPE_Avi,  // File type.
        L"C:\\STDIUE1.avi",  // File name.
        &pMux,              // pointer to the multiplexer.
        NULL);              // pointer to the file writer.

    // Load the source file.
    pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
    pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);

    // Add the compressor filter.
    CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
                     IID_IBaseFilter, (void **)&pVideoCompressor);
    pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");

    // Render the video stream, through the compressor.
    pCaptureGraphBuilder->RenderStream(
        NULL,       // Output pin category
        NULL,       // Media type
        pSource,       // Source filter
        pVideoCompressor,     // Compressor filter
        pMux);      // Sink filter (the AVI Mux)

    /* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
             IID_IBaseFilter, (void **)&pAudioCompressor);
     pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/

    // Render the audio stream.
    pCaptureGraphBuilder->RenderStream(
        NULL,
        NULL,
        pSource,
        pAudioCompressor,
        pMux);

    // Compress at 100k/second data rate.
    AM_MEDIA_TYPE *pmt;
    pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);

    pAMStreamConfig->GetFormat(&pmt);

    if (pmt->formattype == FORMAT_VideoInfo)
    {

        ((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;

        pAMStreamConfig->SetFormat(pmt);
    }


    // Request key frames every four frames.
    pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
    pAMVideoCompression->put_KeyFrameRate(4);
    pAMVideoCompression->Release();
    pAMStreamConfig->Release();

    // Run the graph.

    pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
    pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);

    hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);


    pControl->Run();
    printf("Recompressing... \n");

    long evCode;
    if (SUCCEEDED(hr))
    {
        REFERENCE_TIME rtTotal, rtNow = 0;
        pSeek->GetDuration(&rtTotal);
        while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
        {
            pSeek->GetCurrentPosition(&rtNow);
            printf("%d%%\n", (rtNow * 100)/rtTotal);
        }
        pSeek->Release();
    }
    else  // Cannot update the progress.
    {
        pEvent->WaitForCompletion(INFINITE, &evCode);
    }
    pControl->Stop();
    printf("All done\n");

    pSource->Release();
    pMux->Release();
    pVideoCompressor->Release();
    pAudioCompressor->Release ();
    pControl->Release();
    pEvent->Release();
    pCaptureGraphBuilder->Release();
    pGraphBuilder->Release();
    CoUninitialize();

    return 0;
}