示例#1
0
void video_set_rate(int id, double rate) {
	get_video(videoStruct, id);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	
	// Set the playback rate, can not be 0
	hr = pSeek->SetRate(rate);

	pSeek->Release();
}
示例#2
0
double video_get_rate(int id) {
	get_videor(videoStruct, id, -1);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	
	double rate = 0;
	hr = pSeek->GetRate(&rate);

	pSeek->Release();
	
	return rate;
}
示例#3
0
//----------------------------------------------------------------------------
//! @brief	  	レンダリング前にコールされる
//!
//! メディアサンプルにメディアタイムを記録する。
//! メディアタイムは開始フレームに現在のストリーム時間を加算したものになる。
//! もし、フィルタのIMediaSeekingインターフェイスが利用できない場合は、
//! このレンダーフィルタが描画したフレーム数とドロップしたフレーム数を加算する。
//! この場合、より上位のフィルタでドロップしたフレーム数はわからないので、
//! 若干精度が落ちる。
//! @param		pMediaSample : メディアサンプル
//----------------------------------------------------------------------------
void TBufferRenderer::OnRenderStart( IMediaSample *pMediaSample )
{
	CBaseVideoRenderer::OnRenderStart(pMediaSample);

	HRESULT		hr;
	bool		bGetTime = false;
	LONGLONG	Current = 0, Stop = 0;
	IMediaSeeking	*mediaSeeking = NULL;
	if( GetMediaPositionInterface( IID_IMediaSeeking, (void**)&mediaSeeking) == S_OK )
	{
		GUID	Format;
		if( SUCCEEDED(hr = mediaSeeking->GetTimeFormat( &Format ) ) )
		{
			if( SUCCEEDED(hr = mediaSeeking->GetCurrentPosition( &Current )) &&
				SUCCEEDED(hr = mediaSeeking->GetStopPosition( &Stop )) )
			{
				if( IsEqualGUID( TIME_FORMAT_MEDIA_TIME, Format ) )
				{
					double	renderTime = Current / 10000000.0;
					double	stopTime = Stop / 10000000.0;
					REFTIME	AvgTimePerFrame;	// REFTIME :  秒数を示す小数を表す倍精度浮動小数点数。
					if( SUCCEEDED( hr = get_AvgTimePerFrame( &AvgTimePerFrame ) ) )
					{
						Current = (LONGLONG)(renderTime / AvgTimePerFrame + 0.5);
						Stop = (LONGLONG)(stopTime / AvgTimePerFrame + 0.5);
						bGetTime = true;
					}
				}
				else if( IsEqualGUID( TIME_FORMAT_FRAME, Format ) )
				{
					bGetTime = true;
				}
			}
		}
		mediaSeeking->Release();
		mediaSeeking = NULL;
	}
	LONGLONG	TimeStart = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
	LONGLONG	TimeEnd = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
	if( bGetTime == true )
	{
		TimeStart = m_StartFrame + Current;
		TimeEnd = m_StartFrame + Current;
		m_StopFrame = m_StartFrame + static_cast<LONG>(Stop);
	}
	else
	{
		TimeStart = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
		TimeEnd = m_StartFrame + m_cFramesDrawn + m_cFramesDropped;;
		m_StopFrame = 0;
	}
	pMediaSample->SetMediaTime( &TimeStart, &TimeEnd );
}
STDMETHODIMP
CPosPassThru::GetRate(double * pdRate)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }
    hr = pMS->GetRate(pdRate);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::IsUsingTimeFormat(const GUID * pFormat)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->IsUsingTimeFormat(pFormat);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::QueryPreferredFormat(GUID *pFormat)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->QueryPreferredFormat(pFormat);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::CheckCapabilities(DWORD * pCaps)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->CheckCapabilities(pCaps);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::GetPositions(LONGLONG *pCurrent, LONGLONG * pStop)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->GetPositions(pCurrent,pStop);
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::GetAvailable( LONGLONG *pEarliest, LONGLONG *pLatest )
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->GetAvailable( pEarliest, pLatest );
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::SetPositions( LONGLONG * pCurrent, DWORD CurrentFlags
			  , LONGLONG * pStop, DWORD StopFlags )
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->SetPositions(pCurrent, CurrentFlags, pStop, StopFlags );
    pMS->Release();
    return hr;
}
STDMETHODIMP
CPosPassThru::ConvertTimeFormat(LONGLONG * pTarget, const GUID * pTargetFormat,
				LONGLONG    Source, const GUID * pSourceFormat )
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }

    hr = pMS->ConvertTimeFormat(pTarget, pTargetFormat, Source, pSourceFormat );
    pMS->Release();
    return hr;
}
HRESULT
CPosPassThru::GetSeekingLongLong
( HRESULT (__stdcall IMediaSeeking::*pMethod)( LONGLONG * )
, LONGLONG * pll
)
{
    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (SUCCEEDED(hr))
    {
	hr = (pMS->*pMethod)(pll);
	pMS->Release();
    }
    return hr;
}
STDMETHODIMP
CPosPassThru::SetRate(double dRate)
{
    if (0.0 == dRate) {
		return E_INVALIDARG;
    }

    IMediaSeeking* pMS;
    HRESULT hr = GetPeerSeeking(&pMS);
    if (FAILED(hr)) {
	return hr;
    }
    hr = pMS->SetRate(dRate);
    pMS->Release();
    return hr;
}
示例#14
0
long video_get_duration(int id) {
	get_videor(videoStruct, id, -1);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	DWORD dwCap = 0;
	hr = pSeek->GetCapabilities(&dwCap);
	
	long long duration = 0;
	if (AM_SEEKING_CanSeekAbsolute & dwCap) //TODO: This if check might not be needed
	{
		hr = pSeek->GetDuration(&duration);
	}

	pSeek->Release();
	
	return duration;
}
示例#15
0
long video_get_seek(int id) {
	get_videor(videoStruct, id, -1);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	DWORD dwCap = 0;
	hr = pSeek->GetCapabilities(&dwCap);
	
	long long position = 0;
	if (AM_SEEKING_CanSeekAbsolute & dwCap)
	{
		hr = pSeek->GetCurrentPosition(&position);
	}

	pSeek->Release();
	
	return position;
}
示例#16
0
void video_set_seek(int id, long position) {
	get_video(videoStruct, id);
	IMediaSeeking* pSeek;
	HRESULT hr = videoStruct->pGraph->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);
	DWORD dwCap = 0;
	hr = pSeek->GetCapabilities(&dwCap);
		
	if (AM_SEEKING_CanSeekAbsolute & dwCap)
	{
		// Graph can seek to absolute positions.
		REFERENCE_TIME rtNow = position;
		hr = pSeek->SetPositions(
			&rtNow, AM_SEEKING_AbsolutePositioning, 
			NULL, AM_SEEKING_NoPositioning
			);
	}

	pSeek->Release();
}
示例#17
0
HRESULT CLAVSplitterTrayIcon::ProcessMenuCommand(HMENU hMenu, int cmd)
{
  CheckPointer(m_pFilter, E_FAIL);
  DbgLog((LOG_TRACE, 10, L"Menu Command %d", cmd));
  if (cmd >= STREAM_CMD_OFFSET && cmd < m_NumStreams + STREAM_CMD_OFFSET) {
    IAMStreamSelect *pStreamSelect = nullptr;
    if (SUCCEEDED(m_pFilter->QueryInterface(&pStreamSelect))) {
      pStreamSelect->Enable(cmd - STREAM_CMD_OFFSET, AMSTREAMSELECTENABLE_ENABLE);
      SafeRelease(&pStreamSelect);
    }
  } else if (cmd > CHAPTER_CMD_OFFSET && cmd <= m_NumChapters + CHAPTER_CMD_OFFSET) {
    IAMExtendedSeeking *pExSeeking = nullptr;
    if (SUCCEEDED(m_pFilter->QueryInterface(IID_IAMExtendedSeeking, (void **)&pExSeeking))) {
      double markerTime;
      if (FAILED(pExSeeking->GetMarkerTime(cmd - CHAPTER_CMD_OFFSET, &markerTime)))
        goto failchapterseek;

      REFERENCE_TIME rtMarkerTime = (REFERENCE_TIME)(markerTime * 10000000.0);

      // Try to get the graph to seek on, its much safer than directly trying to seek on LAV
      FILTER_INFO info;
      if (FAILED(m_pFilter->QueryFilterInfo(&info)) || !info.pGraph)
        goto failchapterseek;

      IMediaSeeking *pSeeking = nullptr;
      if (SUCCEEDED(info.pGraph->QueryInterface(&pSeeking))) {
        pSeeking->SetPositions(&rtMarkerTime, AM_SEEKING_AbsolutePositioning, nullptr, AM_SEEKING_NoPositioning);
        SafeRelease(&pSeeking);
      }
      SafeRelease(&info.pGraph);

failchapterseek:
      SafeRelease(&pExSeeking);
    }
  } else if (cmd == STREAM_CMD_OFFSET - 1) {
    OpenPropPage();
  } else {
    return E_UNEXPECTED;
  }
  return S_OK;
}
void CDownloads_VideoPreview::Play()
{
    USES_CONVERSION;

    if (m_bUsingVideo2)
        return;

    if (m_wndVideo.m_player.Get_State () != VFPS_CLOSED)
    {
        if (m_wndVideo.m_player.Get_State () != VFPS_RUNNING)
            m_wndVideo.m_player.Run ();
        return;
    }

    if (m_pActiveDownload)
    {
        HRESULT hr = E_FAIL;
        BOOL bWasErrMsg = FALSE;


        if (S_FALSE == CoInitialize (NULL))
            CoUninitialize ();

        if (m_pActiveDownload->pMgr->IsDone ())
        {
            hr = m_wndVideo.m_player.Open2 (m_pActiveDownload->pMgr->get_OutputFilePathName ());

            if (FAILED (hr))
            {
                fsString strFile = m_pActiveDownload->pMgr->get_OutputFilePathName ();
                LPCSTR psz = strrchr (strFile, '.');
                if (psz++)
                {
                    if (lstrcmpi (psz, "flv") == 0)
                    {
                        CRect rc (0,0,0,0);
                        m_wndVideo.GetWindowRect (&rc);
                        ScreenToClient (&rc);
                        m_wndVideo2.Create (NULL, WS_CHILD, rc, this, 0x111a);
                        m_wndVideo2.SetBackgroundColor (0);
                        m_wndVideo2.LoadMovie (0, ((CFdmApp*)AfxGetApp ())->m_strAppPath + "player.swf");

                        m_wndVideo.ShowWindow (SW_HIDE);
                        m_wndVideo2.ShowWindow (SW_SHOW);
                        m_bUsingVideo2 = TRUE;

                        CString str;
                        str = "<invoke name=\"loadAndPlayVideo\" returntype=\"xml\">";
                        str += "<arguments><string>";
                        str += strFile;
                        str += "</string></arguments></invoke>";
                        m_wndVideo2.CallFunction (str);

                        hr = S_OK;
                    }
                }
            }


        }
        else
        {
            UINT64 nAvail = Get_AvailFileSize ();

            if (nAvail)
            {
                if (FALSE == OpenFile ())
                {
                    hr = E_FAIL;
                    bWasErrMsg = TRUE;
                }
                else
                    hr = m_wndVideo.m_player.Open (m_hFile, nAvail);

                if (FAILED (hr) && bWasErrMsg == FALSE)
                {
                    CloseFile ();
                    hr = m_wndVideo.m_player.Open2 (m_pActiveDownload->pMgr->get_OutputFilePathName ());
                }
            }
            else
                bWasErrMsg = TRUE;
        }

        if (FAILED (hr))
        {
            CloseFile ();

            if (bWasErrMsg == FALSE)
            {
                if (m_dwDXVersion == 0)
                    m_dwDXVersion = fsDirectXVersion::GetCurrentVersion ();

                if (m_dwDXVersion < 0x0800)
                {
                    CString strErr = LS (L_FAILEDTOOPENFILE);
                    strErr += "\n\n";
                    strErr += LS (L_NEEDDX8ORBETTER);
                    MessageBox (strErr, LS (L_ERR), MB_ICONERROR);
                }
                else
                {
                    CString strErr = LS (L_FAILEDTOOPENFILE);
                    strErr += "\n\n";
                    strErr += LS (L_UNKMEDIATYPE);
                    MessageBox (strErr, LS (L_ERR), MB_ICONERROR);
                }
            }

            m_vidseek.Set_MediaSeeking (NULL);
            return;
        }

        if (m_bUsingVideo2 == FALSE)
        {
            IMediaSeeking* pMS = Get_Player ()->Get_MediaSeeking ();
            m_vidseek.Set_MediaSeeking (pMS);
            if (pMS)
                pMS->Release ();

            m_wndVideo.m_player.Run ();
            m_vidman.ApplyVolumeSettings ();
        }
    }
}
示例#19
0
HRESULT 
recChannel_t::map(void)
{

    __CONTEXT("recChannel_t::map");
       
	int hr = 0;
	IBaseFilter * pFilter = NULL;
	IBaseFilter * pFilter2 = NULL;
	IPin * pVideoInputPin = NULL;
	pControl->StopWhenReady();
	
	mapping = true;
	pOutput = camInfo->output;


	if (remaped){
		
	    //refresh Codec BW before creation
        pSender->sampleGrabber->BWController->refreshBW();
		pSender->rebind();
	
		hr = pGraph->Render(pOutput);
		{
				
				// Enumerate the filters in the graph.
				IEnumFilters *pEnum = NULL;
				int hr = pGraph->EnumFilters(&pEnum);
				if (SUCCEEDED(hr))
				{
					IBaseFilter *pFilter = NULL;
					pEnum->Reset();
					while (S_OK == pEnum->Next(1, &pFilter, NULL))
					{
						CLSID filterId;
						pFilter->GetClassID(&filterId);
						if(filterId == CLSID_AviSplitter)
			   			{

							IEnumPins * pEnumpin = NULL;
								
							hr = pFilter->EnumPins(&pEnumpin);
							if (!hr)
							{
								IPin * pPin = NULL;
								pEnumpin->Reset();
								while (pEnumpin->Next(1, &pPin, 0) == S_OK)
								{
									bool break_loop = false;
									AM_MEDIA_TYPE * mediaType;
									IEnumMediaTypes * enumMedia = NULL;
						
									hr = pPin->EnumMediaTypes(&enumMedia);
									if(!hr)
									{
										enumMedia->Reset();
										while(enumMedia->Next(1,&mediaType , NULL) == S_OK)
										{
											if (mediaType->majortype == MEDIATYPE_Audio)
											{
												pPin->Disconnect();
												pGraph->Render(pPin);
												pPin->Release();
												break_loop = true;
												break;
											}
										}
										enumMedia->Release();
										if (break_loop)
											break;
									}
								}
								pEnumpin->Release();
							}
							
						}
						pFilter->Release();
					}
					pEnum->Release();
				}
		}

		pipeCreated = true;
	
		if (hr)
		{
				errorCheck(hr);
				NOTIFY("[recChannel_t::map]WARNING :: Can't render actual format, restoring default settings...\r\n");
				capInfo.heigth = DEFAULT_CAPTURE_HEIGTH;
				capInfo.width = DEFAULT_CAPTURE_WIDTH;
				ql_t<AM_MEDIA_TYPE *> auxFormats = camInfo->getFormatList();
				pSender->SetActualCodec(DEFAULT_CODEC_STR);
		}
	}

	if (fullScreen){
		set_full_screen(true);
	}else{
		hr = setWindowGeometry(windowInfo);
		errorCheck(hr);
	}

//	IVideoWindow *pWindowInfo = NULL;
//	hr = pGraph->QueryInterface(IID_IVideoWindow, (void **)&pWindowInfo);
//	if (!hr)
//	{
//		wchar_t wtext[100];
//		long windowStyle,windowStyleEx;
//		lText(wtext,title);
//		pWindowInfo->get_WindowStyle(&windowStyle);
//        pWindowInfo->get_WindowStyleEx(&windowStyleEx);
//		windowStyle = windowStyle + DEFAULT_WINDOW_PROPS - DEFAULT_WINDOW_NON_PROPS;
//		windowStyleEx = windowStyleEx - WS_EX_APPWINDOW;
//		pWindowInfo->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS);
//        pWindowInfo->put_WindowStyleEx(WS_EX_TOOLWINDOW);
//		pWindowInfo->put_Caption(wtext);
//
//#ifdef _WINDOWS
//        if (camInfo->getKind() == MEDIA)
//        {
//            fControl->setGeometry(windowInfo);
//
//        }
//#endif  	
////Ares daemon don't show local windows on
////recChannels
//#ifndef __ARES		
//		if (camInfo->getKind() != SHARED)
//		{
//			pWindowInfo->put_Visible(OATRUE);
//			pWindowInfo->put_AutoShow(OATRUE);
//		}
//		else
//		{
//#endif
//			pWindowInfo->put_Visible(OAFALSE);
//			pWindowInfo->put_AutoShow(OAFALSE);
//#ifndef __ARES
//		}
//#endif
//
//		pWindowInfo->Release();
//		setOwner();
//	}
	
	IMediaSeeking * pSeek = NULL;
    pGraph->QueryInterface(IID_IMediaSeeking,(void **)&pSeek);
    if (pSeek)pSeek->SetRate(1);
        
	pControl->Run();

	if (camInfo->getKind() == SHARED)
    {
		camInfo->RunSource();
    }
		
	if (camInfo->getKind() == TEST) 
    {        
        if (pSeek) pSeek->SetRate(0.5);
        looper->Run();
    }
	
    remaped = false;
	return hr;
}
示例#20
0
void 
recChannel_t::posReaderThread(void)
{
    __int64 position = 0;
    __int64 duration = 0;

    Sleep(1000);
    while (!fControl)
    {
        Sleep(1000);
    }

    fControl->slideMoved = true;

    IMediaSeeking * pSeek = NULL;
    while(true)
    {
        int hr = pGraph->QueryInterface(IID_IMediaSeeking,(void **)&pSeek);
        //read file pos and put at slicer 
        if (pSeek && hr == S_OK)
        {
            if (!fControl->play & fControl->event)
            {
                pControl->StopWhenReady();
                fControl->event = false;
                continue;
            }

            if (fControl->play & fControl->event)
            {
                pControl->Run();
                fControl->event = false;
            }
                
            if (fControl->slideMoved)
            {
                position = fControl->m_slide.GetPos();
                position *=1000000;
                fControl->slideMoved = false;
                pSeek->SetPositions(&position,
                                     AM_SEEKING_AbsolutePositioning,
                                     NULL,
                                     AM_SEEKING_NoPositioning);
            }else{

                if (!duration)
                {
                    pSeek->GetDuration(&duration);
                    duration /= 1000000;
                    fControl->m_slide.SetRangeMin(0,true);
                    fControl->m_slide.SetRangeMax(duration,true);
                }
                pSeek->GetCurrentPosition(&position);
                position /= 1000000;
                fControl->m_slide.SetPos(position);
            }
        }
        pSeek->Release();
        Sleep(1000);
    }
}
示例#21
0
STDMETHODIMP CTsReaderFilter::Pause()
{
  //m_ShowBufferVideo = INIT_SHOWBUFFERVIDEO;
  //m_ShowBufferAudio = INIT_SHOWBUFFERAUDIO;

  LogDebug("CTsReaderFilter::Pause() - IsTimeShifting = %d - state = %d", IsTimeShifting(), m_State);
  CAutoLock cObjectLock(m_pLock);

  if (m_State == State_Running)
  {
    m_lastPause = GetTickCount();
    m_RandomCompensation = 0;
  }

  //pause filter
  HRESULT hr=CSource::Pause();

  if (!m_bPauseOnClockTooFast)
  {
    //are we using rtsp?
    if (m_fileDuration==NULL)
    {
    //yes, are we busy seeking?
    if (!IsSeeking())
    {
      //not seeking, is rtsp streaming at the moment?
      if (!m_rtspClient.IsRunning())
      {
        //not streaming atm
        double startTime=m_seekTime.Millisecs();
        startTime/=1000.0;

        long Old_rtspDuration = m_rtspClient.Duration() ;
        //clear buffers
        LogDebug("  -- Pause()  ->start rtsp from %f", startTime);
        m_buffer.Clear();
        m_demultiplexer.Flush();

        //start streaming
        m_buffer.Run(true);
        m_rtspClient.Play(startTime,0.0);
//        m_tickCount = GetTickCount();
        LogDebug("  -- Pause()  ->rtsp started");

        //update the duration of the stream
        CPcr pcrStart, pcrEnd, pcrMax ;
        double duration = m_rtspClient.Duration() / 1000.0f ;

        if (m_bTimeShifting)
        {
          // EndPcr is continuously increasing ( until ~26 hours for rollover that will fail ! )
          // So, we refer duration to End, and just update start.
          pcrEnd   = m_duration.EndPcr() ;
          double start  = pcrEnd.ToClock() - duration;
	        if (start<0) start=0 ;
          pcrStart.FromClock(start) ;
          m_duration.Set( pcrStart, pcrEnd, pcrMax) ;     // Pause()-RTSP
        }
        else
        {
          // It's a record, eventually end can increase if recording is in progress, let the end virtually updated by ThreadProc()
          //m_bRecording = (Old_rtspDuration != m_rtspClient.Duration()) ;
          m_bRecording = true; // duration may have not increased in such a short time
        }
        LogDebug("Timeshift %d, Recording %d, StartPCR %f, EndPcr %f, Duration %f",m_bTimeShifting,m_bRecording,m_duration.StartPcr().ToClock(),m_duration.EndPcr().ToClock(),(float)m_duration.Duration().Millisecs()/1000.0f) ;
      }
      else
      {
        //we are streaming at the moment.
       
        //query the current position, so it can resume on un-pause at this position
        //can be required in multiseat with rtsp when changing audio streams 
        IMediaSeeking * ptrMediaPos;
        if (SUCCEEDED(GetFilterGraph()->QueryInterface(IID_IMediaSeeking, (void**)&ptrMediaPos)))
        {
          ptrMediaPos->GetCurrentPosition(&m_seekTime.m_time);
          ptrMediaPos->Release();
        }
        //pause the streaming
        LogDebug("  -- Pause()  ->pause rtsp at position: %f", (m_seekTime.Millisecs() / 1000.0f));
        m_rtspClient.Pause();
      }
    }
    else //we are seeking
    {
      IMediaSeeking * ptrMediaPos;

      if (SUCCEEDED(GetFilterGraph()->QueryInterface(IID_IMediaSeeking, (void**)&ptrMediaPos)))
      {
        LONGLONG currentPos;
        ptrMediaPos->GetCurrentPosition(&currentPos);
        ptrMediaPos->Release();
        double clock = currentPos;clock /= 10000000.0;
        float clockEnd = m_duration.EndPcr().ToClock() ;
        if (clock >= clockEnd && clockEnd > 0 )
        {
          LogDebug("End of rtsp stream...");
          m_demultiplexer.SetEndOfFile(true);
        }
      }
    }
    }
    m_demultiplexer.m_LastDataFromRtsp = GetTickCount() ;
  }

  //is the duration update thread running?
  if (!IsThreadRunning())
  {
    //nop? then start it
    //LogDebug("  CTsReaderFilter::Pause()->start duration thread");
    StartThread();
  }

  LogDebug("CTsReaderFilter::Pause() - END - state = %d", m_State);
  return hr;
}
示例#22
0
bool gui::dx::audio_playerX::play(const char * url, char* clipBegin, char* clipEnd) {

	EnterCriticalSection(&m_csSequence);

	TRACE(L"%s", L"\n####### -- PLAY DX\n");
	amis::util::Log* p_log = amis::util::Log::Instance();
	p_log->writeTrace("Play DX", "audio_playerX::play");

	m_url.assign(url);

	if (m_graph_builder != NULL)
	{ 
		stop(true);
	}

	if (m_graph_builder == NULL)
	{
		HRESULT hr = CoCreateInstance(CLSID_FilterGraph,0,CLSCTX_INPROC_SERVER,
			IID_IGraphBuilder,(void**)&m_graph_builder);
		if(FAILED(hr))
		{
			win_report_error("CoCreateInstance(CLSID_FilterGraph, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	//WCHAR wsz[MAX_PATH];
	//MultiByteToWideChar(CP_ACP,0, url.c_str(), -1, wsz, MAX_PATH);

	std::string strFileName;
	strFileName.assign(url);

	USES_CONVERSION;
	LPCWSTR str = A2CW(strFileName.c_str());

	HRESULT hr = m_graph_builder->RenderFile(str, 0);
	if(FAILED(hr)){
		
		amis::util::Log* p_log = amis::util::Log::Instance();
		string log_msg = "CANNOT PLAY: ";	
		log_msg.append(strFileName);
		p_log->writeError(log_msg, "audio_playerX::play");

		//m_graph_builder->Release();
		//m_graph_builder = 0;
		/* 
		if (hr == 0x800c000d)  // XXX This value experimentally determined:-)
		errorX("%s: Unsupported URL protocol", url.c_str());
		else if (hr == VFW_E_CANNOT_CONNECT)
		errorX("%s: Unsupported video format", url.c_str());
		else
		errorX("%s: DirectX error 0x%x", url.c_str(), hr);
		*/
		release_player();
		LeaveCriticalSection(&m_csSequence);
		return false;
	}
#ifdef WITH_TPB_AUDIO_SPEEDUP
	initialize_speedup_filter();
#endif

	if (m_media_control == NULL) { 
		HRESULT hr = m_graph_builder->QueryInterface(IID_IMediaControl, (void **) &m_media_control);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IMediaControl, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	if (m_media_position == NULL) { 
		m_graph_builder->QueryInterface(IID_IMediaPosition, (void **) &m_media_position);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IMediaPosition, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	if (m_media_event == NULL) { 
		m_graph_builder->QueryInterface(IID_IMediaEvent, (void **) &m_media_event);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IMediaEvent, ...)", hr);	

			LeaveCriticalSection(&m_csSequence);
			return false;
		}
	}

	if (m_basic_audio == NULL) { 
		m_graph_builder->QueryInterface(IID_IBasicAudio, (void **) &m_basic_audio);
		if(FAILED(hr)) {
			win_report_error("QueryInterface(IID_IBasicAudio, ...)", hr);	
		}
	}

	SmilTimeCode startStop(clipBegin, clipBegin, clipEnd);
	unsigned long begin = startStop.getStart();
	unsigned long end = startStop.getEnd();

	LONGLONG llDuration = 0;

	IMediaSeeking *pIMS;
	if (m_graph_builder->QueryInterface(IID_IMediaSeeking, (void**) &pIMS) == S_OK)
	{

		if (pIMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME) == S_OK) {
			if (pIMS->GetPositions(NULL, &llDuration) != S_OK) llDuration = -10000;
		}
		else
		{
			llDuration = - 10000;
		}

		long duration = (long) ((llDuration / 10000) & 0xFFFFFFFF);	  

		if (! (begin == 0 && end == 0)) {

			if (begin >= 0 && begin < duration) {

				LONGLONG Value = (LONGLONG) (begin);
				Value *= 10000;
				if (pIMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME) == S_OK) {
					if (pIMS->SetPositions(&Value, AM_SEEKING_AbsolutePositioning, NULL, AM_SEEKING_NoPositioning) != S_OK){}
				}
			}
			
			if (end >= 0 && end > begin && end < duration) {
				
				LONGLONG Value = (LONGLONG) (end);
				Value *= 10000;
				if (pIMS->SetTimeFormat(&TIME_FORMAT_MEDIA_TIME) == S_OK) {
					if (pIMS->SetPositions(NULL, AM_SEEKING_NoPositioning, &Value, AM_SEEKING_AbsolutePositioning) != S_OK) {}
				}
			}
		}
		pIMS->Release();
	}

	if (hEventHandler == NULL) {
		unsigned long lpdwThreadID;
		//hEventHandler = (HANDLE) _beginthreadex(NULL, 0, eventHandler, (void*) this, 0, &lpdwThreadID);
		hEventHandler = CreateThread(NULL, 0, &eventHandler, this, 0, &lpdwThreadID);
		//GetCurrentThreadId
		TRACE("\nTHREAD ID (DX_AUDIO_PLAYER): %x\n", lpdwThreadID);
		
		amis::util::Log* p_log = amis::util::Log::Instance();
		string log_msg = "Thread ID: ";
		char strID[10];
		sprintf(strID, "%x", lpdwThreadID);			
		log_msg.append(strID);
		p_log->writeTrace(log_msg, "audio_playerX::play");
	}
#ifdef SINGLE_THREAD_HACK
	SetEvent(m_hEventWakeup);
#endif

	set_volume(s_current_volume);

	hr = m_media_control->Run();
	if(FAILED(hr)) {
		win_report_error("IMediaControl::run()", hr);	
	}

	LeaveCriticalSection(&m_csSequence);
	return true;
}
示例#23
0
int main(int argc, char* argv[])
{
    ICaptureGraphBuilder2   *pCaptureGraphBuilder = NULL;
    IGraphBuilder           *pGraphBuilder = NULL;
    IBaseFilter             *pSource = NULL;
    IBaseFilter             *pMux = NULL;
    IBaseFilter             *pVideoCompressor = NULL;
    IBaseFilter             *pAudioCompressor = NULL;

    IAMStreamConfig         *pAMStreamConfig = NULL;
    IAMVideoCompression     *pAMVideoCompression = NULL;

    IMediaControl           *pControl = NULL;
    IMediaSeeking           *pSeek = NULL;
    IMediaEvent             *pEvent = NULL;

    HRESULT hr;

    DWORD pdwRegister=0;
    CoInitialize(NULL);

    // Create the capture graph builder.
    CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
                     IID_ICaptureGraphBuilder2, (void **)&pCaptureGraphBuilder);

    // Make the rendering section of the graph.
    pCaptureGraphBuilder->SetOutputFileName(
        &MEDIASUBTYPE_Avi,  // File type.
        L"C:\\STDIUE1.avi",  // File name.
        &pMux,              // pointer to the multiplexer.
        NULL);              // pointer to the file writer.

    // Load the source file.
    pCaptureGraphBuilder->GetFiltergraph(&pGraphBuilder);
    pGraphBuilder->AddSourceFilter(L"C:\\Program Files\\Microsoft Money\\Media\\STDIUE1.avi", L"Source Filter", &pSource);

    // Add the compressor filter.
    CoCreateInstance(CLSID_AVICo, NULL, CLSCTX_INPROC,
                     IID_IBaseFilter, (void **)&pVideoCompressor);
    pGraphBuilder->AddFilter(pVideoCompressor, L"Video Compressor");

    // Render the video stream, through the compressor.
    pCaptureGraphBuilder->RenderStream(
        NULL,       // Output pin category
        NULL,       // Media type
        pSource,       // Source filter
        pVideoCompressor,     // Compressor filter
        pMux);      // Sink filter (the AVI Mux)

    /* CoCreateInstance(CLSID_GSM, NULL, CLSCTX_INPROC,
             IID_IBaseFilter, (void **)&pAudioCompressor);
     pGraphBuilder->AddFilter(pAudioCompressor, L"Audio Compressor");*/

    // Render the audio stream.
    pCaptureGraphBuilder->RenderStream(
        NULL,
        NULL,
        pSource,
        pAudioCompressor,
        pMux);

    // Compress at 100k/second data rate.
    AM_MEDIA_TYPE *pmt;
    pCaptureGraphBuilder->FindInterface(NULL, NULL, pVideoCompressor, IID_IAMStreamConfig, (void **)&pAMStreamConfig);

    pAMStreamConfig->GetFormat(&pmt);

    if (pmt->formattype == FORMAT_VideoInfo)
    {

        ((VIDEOINFOHEADER *)(pmt->pbFormat))->dwBitRate = 100000;

        pAMStreamConfig->SetFormat(pmt);
    }


    // Request key frames every four frames.
    pAMStreamConfig->QueryInterface(IID_IAMVideoCompression, (void **)&pAMVideoCompression);
    pAMVideoCompression->put_KeyFrameRate(4);
    pAMVideoCompression->Release();
    pAMStreamConfig->Release();

    // Run the graph.

    pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pControl);
    pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pEvent);

    hr = pMux->QueryInterface(IID_IMediaSeeking, (void**)&pSeek);


    pControl->Run();
    printf("Recompressing... \n");

    long evCode;
    if (SUCCEEDED(hr))
    {
        REFERENCE_TIME rtTotal, rtNow = 0;
        pSeek->GetDuration(&rtTotal);
        while ((pEvent->WaitForCompletion(1000, &evCode)) == E_ABORT)
        {
            pSeek->GetCurrentPosition(&rtNow);
            printf("%d%%\n", (rtNow * 100)/rtTotal);
        }
        pSeek->Release();
    }
    else  // Cannot update the progress.
    {
        pEvent->WaitForCompletion(INFINITE, &evCode);
    }
    pControl->Stop();
    printf("All done\n");

    pSource->Release();
    pMux->Release();
    pVideoCompressor->Release();
    pAudioCompressor->Release ();
    pControl->Release();
    pEvent->Release();
    pCaptureGraphBuilder->Release();
    pGraphBuilder->Release();
    CoUninitialize();

    return 0;
}