Exemplo n.º 1
0
//查找Pin上某一个接口,只要找到Filter上某一个Pin又该接口即返回
BOOL FindPinInterface(IBaseFilter* pFilter, REFGUID iid, void** ppUnk)
{
	if(!pFilter || !ppUnk)
		return FALSE;

	HRESULT hr = E_FAIL;
	IEnumPins* pEnumPin = NULL;
	if(FAILED(pFilter->EnumPins(&pEnumPin)))
	{
		return FALSE;
	}

	IPin* pPin = NULL;
	while(pEnumPin->Next(1,&pPin,NULL) == S_OK)
	{
		hr = pPin->QueryInterface(iid,ppUnk);
		pPin->Release();
		if(SUCCEEDED(hr))
		{
			pEnumPin->Release();
			return TRUE;
		}
	}

	pEnumPin->Release();

	return FALSE;
}
HRESULT CH264DecoderOutputPin::GetUncompSurfacesInfo(
    const GUID* profileID, AMVAUncompBufferInfo* uncompBufInfo)
{
    HRESULT r = E_INVALIDARG;
    if (m_decoder->IsFormatSupported(*profileID))
    {
        intrusive_ptr<IAMVideoAccelerator> accel;
        IPin* connected = GetConnected();
        if (!connected)
            return E_UNEXPECTED;

        r = connected->QueryInterface(IID_IAMVideoAccelerator,
                                      reinterpret_cast<void**>(&accel));
        if (SUCCEEDED(r) && accel)
        {
            const int surfCount = getDecodeSurfacesCount();
            uncompBufInfo->dwMaxNumSurfaces = surfCount;
            uncompBufInfo->dwMinNumSurfaces = surfCount;
            r = m_decoder->ConfirmDXVA1UncompFormat(
                accel.get(), profileID,
                &uncompBufInfo->ddUncompPixelFormat);
            if (SUCCEEDED(r))
            {
                memcpy(&m_uncompPixelFormat,
                       &uncompBufInfo->ddUncompPixelFormat,
                       sizeof(m_uncompPixelFormat));
                m_DXVA1DecoderID = *profileID;
            }
        }
    }

    return r;
}
Exemplo n.º 3
0
	HRESULT GetAMConfigForSinglePin(IUnknown* pUnk, PIN_DIRECTION direction, IAMStreamConfig** ppConfig)
	{
		IBaseFilter* pBaseFilter = NULL;
		HRESULT hr = pUnk->QueryInterface(IID_IBaseFilter, (void**)&pBaseFilter);
		if (SUCCEEDED(hr))
		{
			IEnumPins* pEnumPins = NULL;
			hr = pBaseFilter->EnumPins(&pEnumPins);
			if (SUCCEEDED(hr))
			{
				pEnumPins->Reset();
				if (SUCCEEDED(hr))
				{
					IPin* pPin = NULL;
					BOOL bFound = FALSE;
					while (((pEnumPins->Next(1, &pPin, NULL)) == S_OK) && !bFound)
					{
						PIN_DIRECTION fetchedDir;
						hr = pPin->QueryDirection(&fetchedDir);
						if (SUCCEEDED(hr) && (fetchedDir == direction))
						{
							hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)ppConfig);
							bFound = SUCCEEDED(hr);
						}
						pPin->Release();
					}
				}
				pEnumPins->Release();
			}
			pBaseFilter->Release();
		}
		return hr;
	}
Exemplo n.º 4
0
HRESULT FindPinInterface(
	IBaseFilter *pFilter,  // Pointer to the filter to search.
	REFGUID iid,           // IID of the interface.
	void **ppUnk)          // Receives the interface pointer.
{
	if (!pFilter || !ppUnk) return E_POINTER;

	HRESULT hr = E_FAIL;
	IEnumPins *pEnum = 0;
	if (FAILED(pFilter->EnumPins(&pEnum)))
	{
		return E_FAIL;
	}
	// Query every pin for the interface.
	IPin *pPin = 0;
	while (S_OK == pEnum->Next(1, &pPin, 0))
	{
		hr = pPin->QueryInterface(iid, ppUnk);
		pPin->Release();
		if (SUCCEEDED(hr))
		{
			break;
		}
	}
	pEnum->Release();
	return hr;
}
Exemplo n.º 5
0
	HRESULT GetAMConfigForMultiPin(IUnknown* pUnk, PIN_DIRECTION direct, IAMStreamConfig** ppConfig)
	{
		IBaseFilter* pBaseFilter = NULL;
		HRESULT hr = pUnk->QueryInterface(IID_IBaseFilter, (void**)&pBaseFilter);
		if (SUCCEEDED(hr))
		{
			IEnumPins* pEnumPins = NULL;
			hr = pBaseFilter->EnumPins(&pEnumPins);
			if (SUCCEEDED(hr))
			{
				pEnumPins->Reset();
				if (SUCCEEDED(hr))
				{
					IPin* pPin = NULL;
					BOOL bFound = FALSE;
					while ((pEnumPins->Next(1, &pPin, NULL) == S_OK) && !bFound)
					{
						PIN_DIRECTION fetchedDir;
						hr = pPin->QueryDirection(&fetchedDir);
						if (SUCCEEDED(hr) && (fetchedDir == direct))
						{
							IKsPropertySet* pPS;
							hr = pPin->QueryInterface(IID_IKsPropertySet, (void**)&pPS);
							if (SUCCEEDED(hr))
							{
								GUID guid = { 0 };
								DWORD dwReturn = 0;
								hr = pPS->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, 0, 0, &guid, sizeof(guid), &dwReturn);
								if (SUCCEEDED(hr) && ::IsEqualGUID(guid, PIN_CATEGORY_CAPTURE))
								{
									hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)ppConfig);
									bFound = SUCCEEDED(hr);
								}
								pPS->Release();
							}
						}
						pPin->Release();
					}
				}
				pEnumPins->Release();
			}
			pBaseFilter->Release();
		}
		return hr;
	}
Exemplo n.º 6
0
void CCaptureDevice::SetCaptureBufferSize(void)
{
	IPin * pCapturePin = GetPin();
	if (pCapturePin)
	{
		DWORD  dwBytesPerSec = 0;
		AM_MEDIA_TYPE * pmt = {0};
		IAMStreamConfig * pCfg = NULL;
		HRESULT hr = pCapturePin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
		if ( hr==S_OK )
		{
            hr = pCfg->GetFormat(&pmt);
			if ( hr==S_OK )
			{
				WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
				dwBytesPerSec = pWF->nAvgBytesPerSec;
				pWF->nChannels = 1;
				pWF->wBitsPerSample = 8;
				pWF->nSamplesPerSec = 11025;
				pWF->nAvgBytesPerSec = pWF->nSamplesPerSec * pWF->nChannels * pWF->wBitsPerSample / 8;
				pWF->nBlockAlign = 1;
/*
	info.cbSize = sizeof(WAVEFORMATEX);
	info.wFormatTag = 1;
	info.nChannels = 2;
	info.nSamplesPerSec = 44100;
	//info.nSamplesPerSec = 22050;
	11025
	info.wBitsPerSample = 16;
	info.nAvgBytesPerSec = info.nSamplesPerSec * info.nChannels * info.wBitsPerSample / 8;
	info.nBlockAlign = 4;
	*/
				pCfg->SetFormat( pmt );
				DeleteMediaType(pmt);
			}
			pCfg->Release();
		}
/*		if (dwBytesPerSec)
		{
			IAMBufferNegotiation * pNeg = NULL;
			hr = pCapturePin->QueryInterface(IID_IAMBufferNegotiation, 
				(void **)&pNeg);
			if (SUCCEEDED(hr))
			{
				ALLOCATOR_PROPERTIES AllocProp;
				AllocProp.cbAlign  = -1;  // -1 means no preference.
				AllocProp.cbBuffer = dwBytesPerSec *  dwLatencyInMilliseconds / 1000;
				AllocProp.cbPrefix = -1;
				AllocProp.cBuffers = -1;
				hr = pNeg->SuggestAllocatorProperties(&AllocProp);
				pNeg->Release();
			}
		}*/
	}
}
Exemplo n.º 7
0
/*
 * Class:     sage_DShowMediaPlayer
 * Method:    setVideoHWND0
 * Signature: (JJ)V
 */
JNIEXPORT void JNICALL Java_sage_DShowMediaPlayer_setVideoHWND0
  (JNIEnv *env, jobject jo, jlong dataPtr, jlong vhwnd)
{
	CPlayerData* playData = (CPlayerData*) dataPtr;
	IGraphBuilder* pGraph = playData->GetGraph();
	IVideoWindow* pVW = NULL;
	HRESULT hr = pGraph->QueryInterface(IID_IVideoWindow, (void**)&pVW);
	if (SUCCEEDED(hr))
	{
		slog((env, "DShowPlayer setVideoHWND(%d)\r\n", (int) vhwnd));
		pVW->put_AutoShow(OAFALSE);
		pVW->put_Owner((OAHWND)vhwnd);
		pVW->put_MessageDrain((OAHWND)vhwnd);
		pVW->put_WindowStyle(WS_CHILD | WS_CLIPSIBLINGS | WS_CLIPCHILDREN);
		pVW->put_Visible(OATRUE);

		// We do all of our own aspect ratio control, so don't let DShow do any for us
		// by setting the aspect ratio mode on the video rendering filter's pin
		IEnumFilters *pEnum = NULL;
		hr = pGraph->EnumFilters(&pEnum);
		if (SUCCEEDED(hr))
		{
			IBaseFilter *currFilt = NULL;
			while (pEnum->Next(1, &currFilt, NULL) == S_OK)
			{
				IPin *overlayPin = NULL;
				hr = currFilt->FindPin(L"Input0", &overlayPin);
				if (SUCCEEDED(hr))
				{
					// Right pin name, let's see if it's overlay
					IMixerPinConfig *pOverlayMix = NULL;
					hr = overlayPin->QueryInterface(IID_IMixerPinConfig, (void**)&pOverlayMix);
					if (SUCCEEDED(hr))
					{
						pOverlayMix->SetAspectRatioMode(AM_ARMODE_STRETCHED);
						SAFE_RELEASE(pOverlayMix);
					}
					SAFE_RELEASE(overlayPin);
				}
				SAFE_RELEASE(currFilt);
			}
			SAFE_RELEASE(pEnum);
			hr = S_OK;
		}
		SAFE_RELEASE(pVW);
	}
	HTESTPRINT(hr);
}
Exemplo n.º 8
0
IAMStreamConfig * CCaptureDevice::GetStreamConfig(void)
{
	IAMStreamConfig * pConfig = NULL;
	if (m_pBaseFilter)
	{
		// Get the capture output pin first
		IPin * pCapture = GetPin();
		if (pCapture)
		{
			pCapture->QueryInterface(IID_IAMStreamConfig, (void **)&pConfig);
		}

		if (pConfig)
		{
			pConfig->Release();
		}
	}
	return pConfig;
}
HRESULT CH264DecoderOutputPin::GetCreateVideoAcceleratorData(
    const GUID* profileID, DWORD* miscDataSize, void** miscData)
{
    IPin* connected = GetConnected();
    if (!connected)
        return E_UNEXPECTED;

    intrusive_ptr<IAMVideoAccelerator> accel;
    HRESULT r = connected->QueryInterface(IID_IAMVideoAccelerator,
                                          reinterpret_cast<void**>(&accel));
    if (FAILED(r))
        return r;

    AMVAUncompDataInfo uncompDataInfo;
    memcpy(&uncompDataInfo.ddUncompPixelFormat, &m_uncompPixelFormat,
           sizeof(m_uncompPixelFormat));
    uncompDataInfo.dwUncompWidth = 720;
    uncompDataInfo.dwUncompHeight = 480;

    AMVACompBufferInfo compInfo[30];
    DWORD numTypesCompBuffers = arraysize(compInfo);
    r = accel->GetCompBufferInfo(&m_DXVA1DecoderID, &uncompDataInfo,
                                 &numTypesCompBuffers, compInfo);
    if (FAILED(r))
        return r;

    r = m_decoder->ActivateDXVA1(accel.get(), profileID, uncompDataInfo,
                                 m_DXVA1SurfCount);
    if (SUCCEEDED(r))
    {
        m_decoder->SetDXVA1PixelFormat(m_uncompPixelFormat);
        DXVA_ConnectMode* connectMode =
            reinterpret_cast<DXVA_ConnectMode*>(
                CoTaskMemAlloc(sizeof(DXVA_ConnectMode)));
        connectMode->guidMode = m_DXVA1DecoderID;
        connectMode->wRestrictedMode = DXVA_RESTRICTED_MODE_H264_E;
        *miscDataSize = sizeof(*connectMode);
        *miscData = connectMode;
    }

    return r;
}
HRESULT
CPosPassThru::GetPeerSeeking(IMediaSeeking ** ppMS)
{
    *ppMS = NULL;

    IPin *pConnected;
    HRESULT hr = m_pPin->ConnectedTo(&pConnected);
    if (FAILED(hr)) {
	return E_NOTIMPL;
    }
    IMediaSeeking * pMS;
    hr = pConnected->QueryInterface(IID_IMediaSeeking, (void **) &pMS);
    pConnected->Release();
    if (FAILED(hr)) {
	return E_NOTIMPL;
    }

    *ppMS = pMS;
    return S_OK;
}
HRESULT
CPosPassThru::GetPeer(IMediaPosition ** ppMP)
{
    *ppMP = NULL;

    IPin *pConnected;
    HRESULT hr = m_pPin->ConnectedTo(&pConnected);
    if (FAILED(hr)) {
	return E_NOTIMPL;
    }
    IMediaPosition * pMP;
    hr = pConnected->QueryInterface(IID_IMediaPosition, (void **) &pMP);
    pConnected->Release();
    if (FAILED(hr)) {
	return E_NOTIMPL;
    }

    *ppMP = pMP;
    return S_OK;
}
Exemplo n.º 12
0
//
// CFilePlayer::GetColorKeyInternal(): Private method to query the color key
// value from teh first input pin of the OverlayMixer.
//
HRESULT CFilePlayer::GetColorKeyInternal(IBaseFilter *pOvM)
{
    DbgLog((LOG_TRACE, 5, TEXT("CFilePlayer::GetColorKeyInternal() entered"))) ;

    if (NULL == pOvM)
        return E_INVALIDARG ;

    IEnumPins  *pEnumPins ;
    IPin       *pPin ;
    ULONG       ul ;
    PIN_DIRECTION  pd ;
    DWORD       dwColorKey ;
    IMixerPinConfig  *pMPC ;
    HRESULT  hr = pOvM->EnumPins(&pEnumPins) ;
    ASSERT(pEnumPins) ;
    while (S_OK == pEnumPins->Next(1, &pPin, &ul)  &&  1 == ul)  // try all pins
    {
        pPin->QueryDirection(&pd) ;
        if (PINDIR_INPUT == pd)  // only the 1st in pin
        {
            hr = pPin->QueryInterface(IID_IMixerPinConfig, (LPVOID *) &pMPC) ;
            ASSERT(SUCCEEDED(hr) && pMPC) ;
            hr = pMPC->GetColorKey(NULL, &dwColorKey) ;  // just get the physical color
            SetColorKey(dwColorKey) ;

            //  Set mode to stretch - that way we don't fight the overlay
            //  mixer about the exact way to fix the aspect ratio
            pMPC->SetAspectRatioMode(AM_ARMODE_STRETCHED);
            ASSERT(SUCCEEDED(hr)) ;
            pMPC->Release() ;
            pPin->Release() ; // exiting early; release pin
            break ;   // we are done
        }
        pPin->Release() ;
    }
    pEnumPins->Release() ;  // done with pin enum

    return S_OK ;
}
Exemplo n.º 13
0
Camera::Camera(bool Show,bool Start) : eHandler(this),_realData(false),_UpdateWindow(Show),_LastData(0),_CurData(0) {
	DWORD no;
	IGraphBuilder *graph = 0;
	ctrl = 0;
	ICreateDevEnum *devs = 0;
	IEnumMoniker *cams = 0;
	IMoniker *mon = 0;
	IBaseFilter *cam = 0;
	IEnumPins *pins = 0;
	IPin *pin = 0;
	IEnumFilters *fil = 0;
	IBaseFilter *rnd = 0;
	IMemInputPin *mem = 0;
	curCamera = this;
	_isOn = Start;

	CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
	graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );
	CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
	devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0);
	cams->Next (1,&mon,0);												// get first found capture device (webcam)    
	mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
	
	graph->AddFilter(cam, L"Capture Source");							// add web cam to graph as source
	cam->EnumPins(&pins);												// we need output pin to autogenerate rest of the graph
	pins->Next(1,&pin, 0);												// via graph->Render
	graph->Render(pin);													// graph builder now builds whole filter chain including MJPG decompression on some webcams
	graph->EnumFilters(&fil);											// from all newly added filters
	fil->Next(1,&rnd,0);												// we find last one (renderer)
	rnd->EnumPins(&pins);												// because data we are intersted in are pumped to renderers input pin 
	pins->Next(1,&pin, 0);												// via Receive member of IMemInputPin interface
	pin->QueryInterface(IID_IMemInputPin,(void**)&mem);

	DsHook(mem,6,Receive);												// so we redirect it to our own proc to grab image data

	if (Start) this->Start();
}
Exemplo n.º 14
0
HRESULT CAccessSys::BuildPreview(void)
{
	HRESULT hr;
	IBaseFilter *pSrcFilter = NULL;

	if (b_buildPreview){
		return S_OK;
	}

	// Get DirectShow interfaces
	hr = GetInterfaces();
	if (FAILED(hr))
	{
		Msg(TEXT("Failed to get video interfaces!  hr=0x%x"), hr);
		return hr;
	}


	pSrcFilter = p_streams[0].p_device_filter;
	
	hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
		pSrcFilter, NULL, NULL);
	if (FAILED(hr))
	{
		Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
			TEXT("The capture device may already be in use by another application.\r\n\r\n")
			TEXT("The sample will now close."), hr);
		pSrcFilter->Release();
		return hr;
	}

	//{
	//	IEnumPins *ep;
	//	IPin *inputpin = NULL;
	//	IPin *voutputpin = NULL;
	//	IPin *aoutputpin = NULL;
	//	IPin *pin = NULL;
	//	bool bFindI420 = false;
	//	bool bFindPCM = false;

	//	pSrcFilter = p_streams[0].p_device_filter;

	//	pSrcFilter->EnumPins(&ep);
	//	if (SUCCEEDED(hr)){
	//		ep->Reset();
	//		while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
	//			PIN_DIRECTION pinDir;
	//			pin->QueryDirection(&pinDir);
	//			if (pinDir == PINDIR_OUTPUT){
	//				AM_MEDIA_TYPE *pmt;
	//				IEnumMediaTypes *emt;
	//				pin->EnumMediaTypes(&emt);
	//				while (hr = emt->Next(1, &pmt, NULL), hr != S_FALSE){
	//					if (pmt->majortype == MEDIATYPE_Video){
	//						if (pmt->subtype == MEDIASUBTYPE_RGB24){
	//							//Msg(TEXT("MEDIASUBTYPE_RGB24"));
	//						}
	//						else if (pmt->subtype == MEDIASUBTYPE_I420){
	//							//Msg(TEXT("MEDIASUBTYPE_I420"));
	//							bFindI420 = true;
	//						}
	//						else if (pmt->subtype == MEDIASUBTYPE_YUY2){}
	//					}
	//					TCHAR buf[64] = { 0 };
	//					swprintf(buf, TEXT("{%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X}"),
	//						pmt->subtype.Data1, pmt->subtype.Data2, pmt->subtype.Data3,
	//						pmt->subtype.Data4[0], pmt->subtype.Data4[1],
	//						pmt->subtype.Data4[2], pmt->subtype.Data4[3],
	//						pmt->subtype.Data4[4], pmt->subtype.Data4[5],
	//						pmt->subtype.Data4[6], pmt->subtype.Data4[7]);
	//					//Msg(buf);
	//					DeleteMediaType(pmt);
	//				}
	//				emt->Release();
	//			}
	//			pin->Release();
	//			pin = NULL;
	//		}
	//	}
	//	RELEASE(ep);
	//}

	pSrcFilter = p_streams[1].p_device_filter;
	
    // do not render local audio
	//hr = p_capture_graph_builder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Audio,
	//	pSrcFilter, NULL, NULL);
	//if (FAILED(hr))
	//{
	//	Msg(TEXT("Couldn't render the video capture stream.  hr=0x%x\r\n")
	//		TEXT("The capture device may already be in use by another application.\r\n\r\n")
	//		TEXT("The sample will now close."), hr);
	//	pSrcFilter->Release();
	//	return hr;
	//}

    {
        IEnumPins *ep;
        IPin *pin = NULL;

        IAMBufferNegotiation *buffer_negotiation = NULL;
        ALLOCATOR_PROPERTIES props = { -1, -1, -1, -1 };

        pSrcFilter->EnumPins(&ep);
        ep->Reset();
        while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
            if (pin->QueryInterface(IID_IAMBufferNegotiation, (void **)&buffer_negotiation) == S_OK){
                buffer_negotiation->GetAllocatorProperties(&props);
                props.cbBuffer = 4096; // set to 4096 byte: acc encode frame length
                buffer_negotiation->SuggestAllocatorProperties(&props);
                RELEASE(buffer_negotiation);
            }
            RELEASE(pin);
        }
        RELEASE(ep);
    }

	//{
	//	IEnumPins *ep;
	//	IPin *inputpin = NULL;
	//	IPin *voutputpin = NULL;
	//	IPin *aoutputpin = NULL;
	//	IPin *pin = NULL;
	//	bool bFindI420 = false;
	//	bool bFindPCM = false;

	//	//pSrcFilter = p_streams[0].p_device_filter;

	//	pSrcFilter->EnumPins(&ep);
	//	if (SUCCEEDED(hr)){
	//		ep->Reset();
	//		while (SUCCEEDED(hr = ep->Next(1, &pin, 0)) && hr != S_FALSE){
	//			PIN_DIRECTION pinDir;
	//			pin->QueryDirection(&pinDir);
	//			if (pinDir == PINDIR_OUTPUT){
	//				AM_MEDIA_TYPE *pmt;
	//				IEnumMediaTypes *emt;
	//				pin->EnumMediaTypes(&emt);
	//				while (hr = emt->Next(1, &pmt, NULL), hr != S_FALSE){
	//					if (pmt->majortype == MEDIATYPE_Audio){
	//						if (pmt->subtype == MEDIASUBTYPE_PCM){
	//							//Msg(TEXT("MEDIASUBTYPE_PCM"));
	//						}
	//						else if (pmt->subtype == MEDIASUBTYPE_I420){
	//							//Msg(TEXT("MEDIASUBTYPE_I420"));
	//							bFindI420 = true;
	//						}
	//						else{
	//							bFindI420 = true;
	//						}
	//					}
	//					TCHAR buf[64] = { 0 };
	//					swprintf(buf, TEXT("{%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X}"),
	//						pmt->subtype.Data1, pmt->subtype.Data2, pmt->subtype.Data3,
	//						pmt->subtype.Data4[0], pmt->subtype.Data4[1],
	//						pmt->subtype.Data4[2], pmt->subtype.Data4[3],
	//						pmt->subtype.Data4[4], pmt->subtype.Data4[5],
	//						pmt->subtype.Data4[6], pmt->subtype.Data4[7]);
	//					//Msg(buf);
	//					DeleteMediaType(pmt);
	//				}
	//				emt->Release();
	//			}
	//			pin->Release();
	//			pin = NULL;
	//		}
	//	}
	//	RELEASE(ep);
	//}

	b_buildPreview = true;
	return hr;
}
HRESULT CFLVConverter::AssembeAudioBranch(const CParameters & param, IPin* pSrc, IPin* pDest)
{
HRESULT hr=S_OK;
IPin* pEndPin = NULL;
AM_MEDIA_TYPE mtSrc;
AM_MEDIA_TYPE *pmtWrp    = NULL;
WE_WAVEFORMATEX* pwfxSrc = NULL;
WE_WAVEFORMATEX* pwfxWrp = NULL;
ULONG ulFetched = 0;
	if(!pSrc || !pDest)
		return E_POINTER;

	m_wfx.wFormatTag      = WAVE_FORMAT_MPEGLAYER3;
	m_wfx.nChannels       = 1;
	m_wfx.nSamplesPerSec  = 11025;
	m_wfx.wBitsPerSample  = 0;
	m_wfx.nBlockAlign     = 1;
	m_wfx.nAvgBytesPerSec = 2500;
	m_wfx.cbSize          = MPEGLAYER3_WFX_EXTRA_BYTES;
	MPEGLAYER3WAVEFORMAT* pMp3Format = (MPEGLAYER3WAVEFORMAT*) &m_wfx;
	pMp3Format->wID             = MPEGLAYER3_ID_MPEG;
	pMp3Format->fdwFlags        = MPEGLAYER3_FLAG_PADDING_ON;//MPEGLAYER3_FLAG_PADDING_OFF;
	pMp3Format->nBlockSize      = 132;
	pMp3Format->nFramesPerBlock = 1;
	pMp3Format->nCodecDelay     = 1393;

	CComPtr<IMediaSeeking> cpMediaSeeking;
	hr = pSrc->QueryInterface(IID_IMediaSeeking, (void **)&cpMediaSeeking);
	if(SUCCEEDED(hr))
	{
		hr = cpMediaSeeking->SetTimeFormat(&TIME_FORMAT_SAMPLE);
		if(SUCCEEDED(hr))
		{
			cpMediaSeeking->GetDuration(&m_llAudioSamplesCount);
		}
	}

	pEndPin = pSrc;
	pEndPin->AddRef();

	CComPtr<IBaseFilter> cpAudioSkipFilter;
	hr = CreateAndInsertFilter(pGB, CLSID_CAudioSkipper, &cpAudioSkipFilter, L"AudioSkipper");
	if(SUCCEEDED(hr))
	{

		hr = JoinFilterToChain(pGB, cpAudioSkipFilter, &pEndPin);

		CComPtr<IAudioSkip> cpAudioSkip;
		hr = cpAudioSkipFilter->QueryInterface(IID_IAudioSkip, (void**) &cpAudioSkip);
		if(SUCCEEDED(hr))
		{
			cpAudioSkip->SetSamplesCount(m_llVideoFramesCount, m_llAudioSamplesCount);
			cpAudioSkip->SetIntervals((void*)param.GetAllDeletedInterval(), (void*)param.GetAudioDeletedInterval());
		}
	}
	if(FAILED(hr))
	{
		SAFE_RELEASE(pEndPin);
		return hr;
	}
	hr = pSrc->ConnectionMediaType(&mtSrc);
	if(SUCCEEDED(hr))
	{
		pwfxSrc = (WE_WAVEFORMATEX*) mtSrc.pbFormat;
		if((pwfxSrc->wFormatTag!=m_wfx.wFormatTag) || (pwfxSrc->nSamplesPerSec!=m_wfx.nSamplesPerSec))
		{
			CComPtr<IBaseFilter> cpAcmWrapper = NULL;
			hr = CoCreateInstance(CLSID_ACMWrapper, NULL, CLSCTX_INPROC, IID_IBaseFilter, (LPVOID *)&cpAcmWrapper);
			if(SUCCEEDED(hr))
			{
				hr = JoinFilterToChain(pGB, cpAcmWrapper, L"ACM Wrapper", &pEndPin);
				if(SUCCEEDED(hr))
				{
					CComPtr<IAMStreamConfig> cpAudioStreamConfig;
					hr = pEndPin->QueryInterface(IID_IAMStreamConfig,(void **)&cpAudioStreamConfig);
					if(SUCCEEDED(hr))
					{
						hr = cpAudioStreamConfig->GetFormat(&pmtWrp);
						if(SUCCEEDED(hr))
						{
							pwfxWrp = (WE_WAVEFORMATEX*) pmtWrp->pbFormat;
							if(WAVE_FORMAT_PCM!=m_wfx.wFormatTag)
							{
								WAVEFORMATEX wfxSrs;
								wfxSrs.nChannels  = m_wfx.nChannels;
								wfxSrs.wFormatTag = WAVE_FORMAT_PCM;
								MMRESULT mmr = acmFormatSuggest(NULL, &m_wfx, &wfxSrs, sizeof(wfxSrs),ACM_FORMATSUGGESTF_NCHANNELS| ACM_FORMATSUGGESTF_WFORMATTAG);
								if(MMSYSERR_NOERROR==mmr)
								{
									pwfxWrp->nChannels       = wfxSrs.nChannels;
									pwfxWrp->nSamplesPerSec  = wfxSrs.nSamplesPerSec;
									pwfxWrp->nAvgBytesPerSec = wfxSrs.nAvgBytesPerSec;
									pwfxWrp->nBlockAlign     = wfxSrs.nBlockAlign;
									pwfxWrp->wBitsPerSample  = wfxSrs.wBitsPerSample;
								}
							}
							else
							{
								pwfxWrp->nChannels       = m_wfx.nChannels;
								pwfxWrp->nSamplesPerSec  = m_wfx.nSamplesPerSec;
								pwfxWrp->nAvgBytesPerSec = m_wfx.nAvgBytesPerSec;
								pwfxWrp->nBlockAlign     = m_wfx.nBlockAlign;
								pwfxWrp->wBitsPerSample  = m_wfx.wBitsPerSample;
							}
							hr = cpAudioStreamConfig->SetFormat(pmtWrp);
							DeleteMediaType(pmtWrp);
						}
					}
				}
			}

			if(WAVE_FORMAT_PCM!=m_wfx.wFormatTag)
			{
				CComPtr<IBaseFilter> cpAComp;
				hr = CreateAudioCompressor(&m_wfx, &cpAComp);
				if(S_OK==hr)
				{
					hr = JoinFilterToChain(pGB, cpAComp, L"Audio compressor", &pEndPin); 
					if(SUCCEEDED(hr))
					{
						CComPtr<IBaseFilter> cpTimeRemover;
						hr = CoCreateInstance(CLSID_CTimeRemover, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&cpTimeRemover);
						if(SUCCEEDED(hr))
						{	//this filter needs as workaround for 0x80040228 error code generated by some
							//audio compressors:
							hr = JoinFilterToChain(pGB, cpTimeRemover, L"TimeStamp remover", &pEndPin); 
						}
					}
				}
			}
		}
	}
	hr = pGB->Connect(pDest, pEndPin);
	FreeMediaType(mtSrc);
	SAFE_RELEASE(pEndPin);
	return hr;
}
Exemplo n.º 16
0
bool DeviceSource::LoadFilters()
{
    if(bCapturing || bFiltersLoaded)
        return false;

    bool bSucceeded = false;

    List<MediaOutputInfo> outputList;
    IAMStreamConfig *config = NULL;
    bool bAddedVideoCapture = false, bAddedAudioCapture = false, bAddedDevice = false;
    GUID expectedMediaType;
    IPin *devicePin = NULL, *audioPin = NULL;
    HRESULT err;
    String strShader;

    bUseThreadedConversion = API->UseMultithreadedOptimizations() && (OSGetTotalCores() > 1);

    //------------------------------------------------
    // basic initialization vars

    bool bForceCustomAudio = data->GetInt(TEXT("forceCustomAudioDevice")) != 0;

    bUseCustomResolution = data->GetInt(TEXT("customResolution"));
    strDevice = data->GetString(TEXT("device"));
    strDeviceName = data->GetString(TEXT("deviceName"));
    strDeviceID = data->GetString(TEXT("deviceID"));
    strAudioDevice = data->GetString(TEXT("audioDevice"));
    strAudioName = data->GetString(TEXT("audioDeviceName"));
    strAudioID = data->GetString(TEXT("audioDeviceID"));

    bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
    bFlipHorizontal = data->GetInt(TEXT("flipImageHorizontal")) != 0;
    bUsePointFiltering = data->GetInt(TEXT("usePointFiltering")) != 0;

    opacity = data->GetInt(TEXT("opacity"), 100);

    float volume = data->GetFloat(TEXT("volume"), 1.0f);

    bUseBuffering = data->GetInt(TEXT("useBuffering")) != 0;
    bufferTime = data->GetInt(TEXT("bufferTime"))*10000;

    //------------------------------------------------
    // chrom key stuff

    bUseChromaKey = data->GetInt(TEXT("useChromaKey")) != 0;
    keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
    keySimilarity = data->GetInt(TEXT("keySimilarity"));
    keyBlend = data->GetInt(TEXT("keyBlend"), 80);
    keySpillReduction = data->GetInt(TEXT("keySpillReduction"), 50);

    if(keyBaseColor.x < keyBaseColor.y && keyBaseColor.x < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.x;
    else if(keyBaseColor.y < keyBaseColor.x && keyBaseColor.y < keyBaseColor.z)
        keyBaseColor -= keyBaseColor.y;
    else if(keyBaseColor.z < keyBaseColor.x && keyBaseColor.z < keyBaseColor.y)
        keyBaseColor -= keyBaseColor.z;

    //------------------------------------------------
    // get the device filter and pins

    if(strDeviceName.IsValid())
        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDeviceName, L"DevicePath", strDeviceID);
    else
    {
        if(!strDevice.IsValid())
        {
            AppWarning(TEXT("DShowPlugin: Invalid device specified"));
            goto cleanFinish;
        }

        deviceFilter = GetDeviceByValue(CLSID_VideoInputDeviceCategory, L"FriendlyName", strDevice);
    }
    
    if(!deviceFilter)
    {
        AppWarning(TEXT("DShowPlugin: Could not create device filter"));
        goto cleanFinish;
    }

    devicePin = GetOutputPin(deviceFilter, &MEDIATYPE_Video);
    if(!devicePin)
    {
        AppWarning(TEXT("DShowPlugin: Could not get device video pin"));
        goto cleanFinish;
    }

    soundOutputType = data->GetInt(TEXT("soundOutputType")); //0 is for backward-compatibility
    if (strAudioID.CompareI(TEXT("Disabled")))
        soundOutputType = 0;

    if(soundOutputType != 0)
    {
        if(!bForceCustomAudio)
        {
            err = capture->FindPin(deviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
            bDeviceHasAudio = SUCCEEDED(err);
        }
        else
            bDeviceHasAudio = false;

        if(!bDeviceHasAudio)
        {
            if(strDeviceName.IsValid())
            {
                audioDeviceFilter = GetDeviceByValue(CLSID_AudioInputDeviceCategory, L"FriendlyName", strAudioName, L"DevicePath", strAudioID);
                if(!audioDeviceFilter)
                    AppWarning(TEXT("DShowPlugin: Invalid audio device: name '%s', path '%s'"), strAudioName.Array(), strAudioID.Array());
            }
            else if(strAudioDevice.IsValid())
            {
                audioDeviceFilter = GetDeviceByValue(CLSID_AudioInputDeviceCategory, L"FriendlyName", strAudioDevice);
                if(!audioDeviceFilter)
                    AppWarning(TEXT("DShowPlugin: Could not create audio device filter"));
            }

            if(audioDeviceFilter)
                err = capture->FindPin(audioDeviceFilter, PINDIR_OUTPUT, &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, FALSE, 0, &audioPin);
            else
                err = E_FAIL;
        }

        if(FAILED(err) || !audioPin)
        {
            Log(TEXT("DShowPlugin: No audio pin, result = %lX"), err);
            soundOutputType = 0;
        }
    }
    else
        bDeviceHasAudio = bForceCustomAudio = false;

    int soundTimeOffset = data->GetInt(TEXT("soundTimeOffset"));

    GetOutputList(devicePin, outputList);

    //------------------------------------------------
    // initialize the basic video variables and data

    renderCX = renderCY = 0;
    frameInterval = 0;

    if(bUseCustomResolution)
    {
        renderCX = data->GetInt(TEXT("resolutionWidth"));
        renderCY = data->GetInt(TEXT("resolutionHeight"));
        frameInterval = data->GetInt(TEXT("frameInterval"));
    }
    else
    {
        SIZE size;
        if (!GetClosestResolution(outputList, size, frameInterval))
        {
            AppWarning(TEXT("DShowPlugin: Unable to find appropriate resolution"));
            renderCX = renderCY = 64;
            goto cleanFinish;
        }

        renderCX = size.cx;
        renderCY = size.cy;
    }

    if(!renderCX || !renderCY || !frameInterval)
    {
        AppWarning(TEXT("DShowPlugin: Invalid size/fps specified"));
        goto cleanFinish;
    }

    preferredOutputType = (data->GetInt(TEXT("usePreferredType")) != 0) ? data->GetInt(TEXT("preferredType")) : -1;

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    for(int i=0; i<numThreads; i++)
    {
        convertData[i].width  = renderCX;
        convertData[i].height = renderCY;
        convertData[i].sample = NULL;
        convertData[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertData[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);

        if(i == 0)
            convertData[i].startY = 0;
        else
            convertData[i].startY = convertData[i-1].endY;

        if(i == (numThreads-1))
            convertData[i].endY = renderCY;
        else
            convertData[i].endY = ((renderCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bFirstFrame = true;

    //------------------------------------------------
    // get the closest media output for the settings used

    MediaOutputInfo *bestOutput = GetBestMediaOutput(outputList, renderCX, renderCY, preferredOutputType, frameInterval);
    if(!bestOutput)
    {
        AppWarning(TEXT("DShowPlugin: Could not find appropriate resolution to create device image source"));
        goto cleanFinish;
    }

    //------------------------------------------------
    // log video info

    {
        String strTest = FormattedString(TEXT("    device: %s,\r\n    device id %s,\r\n    chosen type: %s, usingFourCC: %s, res: %ux%u - %ux%u, frameIntervals: %llu-%llu"),
            strDevice.Array(), strDeviceID.Array(),
            EnumToName[(int)bestOutput->videoType],
            bestOutput->bUsingFourCC ? TEXT("true") : TEXT("false"),
            bestOutput->minCX, bestOutput->minCY, bestOutput->maxCX, bestOutput->maxCY,
            bestOutput->minFrameInterval, bestOutput->maxFrameInterval);

        BITMAPINFOHEADER *bmiHeader = GetVideoBMIHeader(bestOutput->mediaType);

        char fourcc[5];
        mcpy(fourcc, &bmiHeader->biCompression, 4);
        fourcc[4] = 0;

        if(bmiHeader->biCompression > 1000)
            strTest << FormattedString(TEXT(", fourCC: '%S'\r\n"), fourcc);
        else
            strTest << FormattedString(TEXT(", fourCC: %08lX\r\n"), bmiHeader->biCompression);

        if(!bDeviceHasAudio) strTest << FormattedString(TEXT("    audio device: %s,\r\n    audio device id %s,\r\n"), strAudioDevice.Array(), strAudioID.Array());

        Log(TEXT("------------------------------------------"));
        Log(strTest.Array());
    }

    //------------------------------------------------
    // set up shaders and video output data

    expectedMediaType = bestOutput->mediaType->subtype;

    colorType = DeviceOutputType_RGB;
    if(bestOutput->videoType == VideoOutputType_I420)
        colorType = DeviceOutputType_I420;
    else if(bestOutput->videoType == VideoOutputType_YV12)
        colorType = DeviceOutputType_YV12;
    else if(bestOutput->videoType == VideoOutputType_YVYU)
        colorType = DeviceOutputType_YVYU;
    else if(bestOutput->videoType == VideoOutputType_YUY2)
        colorType = DeviceOutputType_YUY2;
    else if(bestOutput->videoType == VideoOutputType_UYVY)
        colorType = DeviceOutputType_UYVY;
    else if(bestOutput->videoType == VideoOutputType_HDYC)
        colorType = DeviceOutputType_HDYC;
    else
    {
        colorType = DeviceOutputType_RGB;
        expectedMediaType = MEDIASUBTYPE_RGB32;
    }

    strShader = ChooseShader();
    if(strShader.IsValid())
        colorConvertShader = CreatePixelShaderFromFile(strShader);

    if(colorType != DeviceOutputType_RGB && !colorConvertShader)
    {
        AppWarning(TEXT("DShowPlugin: Could not create color space conversion pixel shader"));
        goto cleanFinish;
    }

    if(colorType == DeviceOutputType_YV12 || colorType == DeviceOutputType_I420)
    {
        for(int i=0; i<numThreads; i++)
            hConvertThreads[i] = OSCreateThread((XTHREAD)PackPlanarThread, convertData+i);
    }

    //------------------------------------------------
    // set chroma details

    keyBaseColor = Color4().MakeFromRGBA(keyColor);
    Matrix4x4TransformVect(keyChroma, (colorType == DeviceOutputType_HDYC) ? (float*)yuv709Mat : (float*)yuvMat, keyBaseColor);
    keyChroma *= 2.0f;

    //------------------------------------------------
    // configure video pin

    if(FAILED(err = devicePin->QueryInterface(IID_IAMStreamConfig, (void**)&config)))
    {
        AppWarning(TEXT("DShowPlugin: Could not get IAMStreamConfig for device pin, result = %08lX"), err);
        goto cleanFinish;
    }

    AM_MEDIA_TYPE outputMediaType;
    CopyMediaType(&outputMediaType, bestOutput->mediaType);

    VIDEOINFOHEADER *vih  = reinterpret_cast<VIDEOINFOHEADER*>(outputMediaType.pbFormat);
    BITMAPINFOHEADER *bmi = GetVideoBMIHeader(&outputMediaType);
    vih->AvgTimePerFrame  = frameInterval;
    bmi->biWidth          = renderCX;
    bmi->biHeight         = renderCY;
    bmi->biSizeImage      = renderCX*renderCY*(bmi->biBitCount>>3);

    if(FAILED(err = config->SetFormat(&outputMediaType)))
    {
        if(err != E_NOTIMPL)
        {
            AppWarning(TEXT("DShowPlugin: SetFormat on device pin failed, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    FreeMediaType(outputMediaType);

    //------------------------------------------------
    // get audio pin configuration, optionally configure audio pin to 44100

    GUID expectedAudioType;

    if(soundOutputType == 1)
    {
        IAMStreamConfig *audioConfig;
        if(SUCCEEDED(audioPin->QueryInterface(IID_IAMStreamConfig, (void**)&audioConfig)))
        {
            AM_MEDIA_TYPE *audioMediaType;
            if(SUCCEEDED(err = audioConfig->GetFormat(&audioMediaType)))
            {
                SetAudioInfo(audioMediaType, expectedAudioType);
            }
            else if(err == E_NOTIMPL) //elgato probably
            {
                IEnumMediaTypes *audioMediaTypes;
                if(SUCCEEDED(err = audioPin->EnumMediaTypes(&audioMediaTypes)))
                {
                    ULONG i = 0;
                    if((err = audioMediaTypes->Next(1, &audioMediaType, &i)) == S_OK)
                        SetAudioInfo(audioMediaType, expectedAudioType);
                    else
                    {
                        AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                        soundOutputType = 0;
                    }

                    audioMediaTypes->Release();
                }
                else
                {
                    AppWarning(TEXT("DShowPlugin: audioMediaTypes->Next failed, result = %08lX"), err);
                    soundOutputType = 0;
                }
            }
            else
            {
                AppWarning(TEXT("DShowPlugin: Could not get audio format, result = %08lX"), err);
                soundOutputType = 0;
            }

            audioConfig->Release();
        }
        else {
            soundOutputType = 0;
        }
    }

    //------------------------------------------------
    // add video capture filter if any

    captureFilter = new CaptureFilter(this, MEDIATYPE_Video, expectedMediaType);

    if(FAILED(err = graph->AddFilter(captureFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add video capture filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    bAddedVideoCapture = true;

    //------------------------------------------------
    // add audio capture filter if any

    if(soundOutputType == 1)
    {
        audioFilter = new CaptureFilter(this, MEDIATYPE_Audio, expectedAudioType);
        if(!audioFilter)
        {
            AppWarning(TEXT("Failed to create audio capture filter"));
            soundOutputType = 0;
        }
    }
    else if(soundOutputType == 2)
    {
        if(FAILED(err = CoCreateInstance(CLSID_AudioRender, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&audioFilter)))
        {
            AppWarning(TEXT("DShowPlugin: failed to create audio renderer, result = %08lX"), err);
            soundOutputType = 0;
        }

        IBasicAudio *basicAudio;
        if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
        {
            long lVol = long((double(volume)*NEAR_SILENTf)-NEAR_SILENTf);
            if(lVol <= -NEAR_SILENT)
                lVol = -10000;
            basicAudio->put_Volume(lVol);
            basicAudio->Release();
        }
    }

    if(soundOutputType != 0)
    {
        if(FAILED(err = graph->AddFilter(audioFilter, NULL)))
            AppWarning(TEXT("DShowPlugin: Failed to add audio capture filter to graph, result = %08lX"), err);

        bAddedAudioCapture = true;
    }

    //------------------------------------------------
    // add primary device filter

    if(FAILED(err = graph->AddFilter(deviceFilter, NULL)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to add device filter to graph, result = %08lX"), err);
        goto cleanFinish;
    }

    if(soundOutputType != 0 && !bDeviceHasAudio)
    {
        if(FAILED(err = graph->AddFilter(audioDeviceFilter, NULL)))
            AppWarning(TEXT("DShowPlugin: Failed to add audio device filter to graph, result = %08lX"), err);
    }

    bAddedDevice = true;

    //------------------------------------------------
    // connect all pins and set up the whole capture thing

    //THANK THE NINE DIVINES I FINALLY GOT IT WORKING
    bool bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, deviceFilter, NULL, captureFilter));
    if(!bConnected)
    {
        if(FAILED(err = graph->Connect(devicePin, captureFilter->GetCapturePin())))
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the video device pin to the video capture pin, result = %08lX"), err);
            goto cleanFinish;
        }
    }

    if(soundOutputType != 0)
    {
        if(!bDeviceHasAudio)
            bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, audioDeviceFilter, NULL, audioFilter));
        else
            bConnected = SUCCEEDED(err = capture->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Audio, deviceFilter, NULL, audioFilter));

        if(!bConnected)
        {
            AppWarning(TEXT("DShowPlugin: Failed to connect the audio device pin to the audio capture pin, result = %08lX"), err);
            soundOutputType = 0;
        }
    }

    if(FAILED(err = graph->QueryInterface(IID_IMediaControl, (void**)&control)))
    {
        AppWarning(TEXT("DShowPlugin: Failed to get IMediaControl, result = %08lX"), err);
        goto cleanFinish;
    }

    if (bUseBuffering) {
        if (!(hStopSampleEvent = CreateEvent(NULL, FALSE, FALSE, NULL))) {
            AppWarning(TEXT("DShowPlugin: Failed to create stop event"), err);
            goto cleanFinish;
        }

        if (!(hSampleThread = OSCreateThread((XTHREAD)SampleThread, this))) {
            AppWarning(TEXT("DShowPlugin: Failed to create sample thread"), err);
            goto cleanFinish;
        }
    }

    if(soundOutputType == 1)
    {
        audioOut = new DeviceAudioSource;
        audioOut->Initialize(this);
        API->AddAudioSource(audioOut);

        audioOut->SetAudioOffset(soundTimeOffset);
        audioOut->SetVolume(volume);
    }

    bSucceeded = true;

cleanFinish:
    SafeRelease(config);
    SafeRelease(devicePin);
    SafeRelease(audioPin);

    for(UINT i=0; i<outputList.Num(); i++)
        outputList[i].FreeData();

    if(!bSucceeded)
    {
        bCapturing = false;

        if(bAddedVideoCapture)
            graph->RemoveFilter(captureFilter);
        if(bAddedAudioCapture)
            graph->RemoveFilter(audioFilter);

        if(bAddedDevice)
        {
            if(!bDeviceHasAudio && audioDeviceFilter)
                graph->RemoveFilter(audioDeviceFilter);
            graph->RemoveFilter(deviceFilter);
        }

        SafeRelease(audioDeviceFilter);
        SafeRelease(deviceFilter);
        SafeRelease(captureFilter);
        SafeRelease(audioFilter);
        SafeRelease(control);

        if (hSampleThread) {
            SetEvent(hStopSampleEvent);
            WaitForSingleObject(hSampleThread, INFINITE);
            CloseHandle(hSampleThread);
            hSampleThread = NULL;
        }

        if (hStopSampleEvent) {
            CloseHandle(hStopSampleEvent);
            hStopSampleEvent = NULL;
        }

        if(colorConvertShader)
        {
            delete colorConvertShader;
            colorConvertShader = NULL;
        }

        if(audioOut)
        {
            delete audioOut;
            audioOut = NULL;
        }

        if(lpImageBuffer)
        {
            Free(lpImageBuffer);
            lpImageBuffer = NULL;
        }

        bReadyToDraw = true;
    }
    else
        bReadyToDraw = false;

    if(!renderCX) renderCX = 32;
    if(!renderCY) renderCY = 32;

    //-----------------------------------------------------
    // create the texture regardless, will just show up as red to indicate failure
    BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);

    if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
    {
        msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
    }
    else //if we're working with planar YUV, we can just use regular RGB textures instead
    {
        msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
        texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
    }

    if(bSucceeded && bUseThreadedConversion)
    {
        if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            LPBYTE lpData;
            if(texture->Map(lpData, texturePitch))
                texture->Unmap();
            else
                texturePitch = renderCX*4;

            lpImageBuffer = (LPBYTE)Allocate(texturePitch*renderCY);
        }
    }

    Free(textureData);

    bFiltersLoaded = bSucceeded;
    return bSucceeded;
}
Exemplo n.º 17
0
HRESULT CAudioCompressorFormats::GetSupportedFormats(std::vector<WAVEFORMATEX*>& listFormats)
{
	CStringW swDeviceName(m_sAudComp);

	HRESULT hr = m_pSysDevEnum->CreateClassEnumerator(CLSID_AudioCompressorCategory, &m_pEnumCat, 0);
	if(NULL == m_pEnumCat)
		return E_POINTER;
	if(S_OK == hr)
	{
		ULONG cFetched;
		while(m_pEnumCat->Next(1, &m_pMoniker, &cFetched) == S_OK)
		{
			IPropertyBag *pPropBag;
			hr = m_pMoniker->BindToStorage(0, 0, IID_IPropertyBag, 
				(void **)&pPropBag);
			if (SUCCEEDED(hr))
			{
				VARIANT varName;
				VariantInit(&varName);
				hr = pPropBag->Read(L"FriendlyName", &varName, 0);
				if (SUCCEEDED(hr))
				{
					if(wcscmp((WCHAR*)varName.pbstrVal, swDeviceName.GetBuffer()) == 0)
					{
						m_pMoniker->AddRef();
						break;
					}
				}
				VariantClear(&varName);
				pPropBag->Release();
			}
			m_pMoniker->Release();
		}
	}
	if(m_pMoniker)
	{
		IBaseFilter *pFilter = 0;
		hr = m_pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pFilter);
		if(SUCCEEDED(hr))
		{
			IEnumPins *pEnum = NULL;
			hr = pFilter->EnumPins(&pEnum);
			if (SUCCEEDED(hr))
			{
				IPin *pPin = NULL;
				while(S_OK == pEnum->Next(1, &pPin, NULL))
				{
					IAMStreamConfig *pConf;
					hr = pPin->QueryInterface(IID_IAMStreamConfig, (void**)&pConf);
					if (SUCCEEDED(hr))
					{
						CString sFormat;
						int iCount, iSize;
						BYTE *pSCC = NULL;
						AM_MEDIA_TYPE *pmt;
						float fSample;
						hr = pConf->GetNumberOfCapabilities(&iCount, &iSize);
						pSCC = new BYTE[iSize];
						if (pSCC == NULL)
						{
							return E_POINTER;
						}
						if (iSize == sizeof(AUDIO_STREAM_CONFIG_CAPS))
						{
							// Use the audio capabilities structure.
							for (int iFormat = 0; iFormat < iCount; iFormat++)
							{
								AUDIO_STREAM_CONFIG_CAPS scc;
								AM_MEDIA_TYPE *pmtConfig;
								hr = pConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
								if (SUCCEEDED(hr))
								{
									if(pmtConfig->formattype == FORMAT_WaveFormatEx)
									{
										WAVEFORMATEX *pFormat = new WAVEFORMATEX(*(reinterpret_cast<WAVEFORMATEX*>(pmtConfig->pbFormat)));
										if(pFormat)
										{
											listFormats.push_back(pFormat);
										}
										FreeMediaType(*pmtConfig); 
										CoTaskMemFree(pmtConfig);
									}
								}
							}
							delete pSCC;
						}
						pConf->Release();
					}
					pPin->Release();
				}
				pEnum->Release();
			}
			pFilter->Release();
		}
	}
}
Exemplo n.º 18
0
	void TestCamera()
	{
		InitOpenCL();
		//TCHAR szDeviceName[80];
		//TCHAR szDeviceVersion[80];

		//for (int wIndex = 0; wIndex < 10; wIndex++) 
		//{
		//	if (capGetDriverDescription(
		//		wIndex, 
		//		szDeviceName, 
		//		sizeof (szDeviceName), 
		//		szDeviceVersion, 
		//		sizeof (szDeviceVersion)
		//		)) 
		//	{
		//		// Append name to list of installed capture drivers
		//		// and then let the user select a driver to use.
		//	}
		//} 

		//HWND hWndC = capCreateCaptureWindow(TEXT("PunkCapture"),
		//	WS_CHILD | WS_VISIBLE, 0, 0, 160, 120, *System::Window::Instance(), 1);

		//SendMessage (hWndC, WM_CAP_DRIVER_CONNECT, 0, 0L); 
		//// 
		//// Or, use the macro to connect to the MSVIDEO driver: 
		//// fOK = capDriverConnect(hWndC, 0); 
		//// 
		//// Place code to set up and capture video here. 
		//// 
		////capDriverDisconnect (hWndC); 

		//CAPDRIVERCAPS CapDriverCaps = { }; 
		//CAPSTATUS     CapStatus = { };

		//capDriverGetCaps(hWndC, &CapDriverCaps, sizeof(CAPDRIVERCAPS)); 

		//// Video source dialog box. 
		//if (CapDriverCaps.fHasDlgVideoSource)
		//{
		//	capDlgVideoSource(hWndC); 
		//}

		//// Video format dialog box. 
		//if (CapDriverCaps.fHasDlgVideoFormat) 
		//{
		//	capDlgVideoFormat(hWndC); 

		//	// Are there new image dimensions?
		//	capGetStatus(hWndC, &CapStatus, sizeof (CAPSTATUS));

		//	// If so, notify the parent of a size change.
		//} 

		//// Video display dialog box. 
		//if (CapDriverCaps.fHasDlgVideoDisplay)
		//{
		//	capDlgVideoDisplay(hWndC); 
		//}


		HRESULT hr;
		IGraphBuilder*  graph= 0;  hr = CoCreateInstance( CLSID_FilterGraph, 0, CLSCTX_INPROC,IID_IGraphBuilder, (void **)&graph );
		IMediaControl*  ctrl = 0;  hr = graph->QueryInterface( IID_IMediaControl, (void **)&ctrl );

		ICreateDevEnum* devs = 0;  hr = CoCreateInstance (CLSID_SystemDeviceEnum, 0, CLSCTX_INPROC, IID_ICreateDevEnum, (void **) &devs);
		IEnumMoniker*   cams = 0;  hr = devs?devs->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &cams, 0):0;  
		IMoniker*       mon  = 0;  hr = cams->Next (1,&mon,0);  // get first found capture device (webcam?)    
		IBaseFilter*    cam  = 0;  hr = mon->BindToObject(0,0,IID_IBaseFilter, (void**)&cam);
		hr = graph->AddFilter(cam, L"Capture Source"); // add web cam to graph as source
		IEnumPins*      pins = 0;  hr = cam?cam->EnumPins(&pins):0;   // we need output pin to autogenerate rest of the graph
		IPin*           pin  = 0;  hr = pins?pins->Next(1,&pin, 0):0; // via graph->Render
		hr = graph->Render(pin); // graph builder now builds whole filter chain including MJPG decompression on some webcams
		IEnumFilters*   fil  = 0;  hr = graph->EnumFilters(&fil); // from all newly added filters
		IBaseFilter*    rnd  = 0;  hr = fil->Next(1,&rnd,0); // we find last one (renderer)
		hr = rnd->EnumPins(&pins);  // because data we are intersted in are pumped to renderers input pin 
		hr = pins->Next(1,&pin, 0); // via Receive member of IMemInputPin interface
		IMemInputPin*   mem  = 0;  hr = pin->QueryInterface(IID_IMemInputPin,(void**)&mem);

		DsHook(mem,6,Receive); // so we redirect it to our own proc to grab image data

		hr = ctrl->Run();   

	};
Exemplo n.º 19
0
QVector<VideoMode> DirectShow::getDeviceModes(QString devName)
{
    QVector<VideoMode> modes;

    IBaseFilter* devFilter = getDevFilter(devName);
    if (!devFilter)
        return modes;

    // The outter loop tries to find a valid output pin
    GUID category;
    DWORD r2;
    IEnumPins *pins = nullptr;
    IPin *pin;
    if (devFilter->EnumPins(&pins) != S_OK)
        return modes;
    while (pins->Next(1, &pin, nullptr) == S_OK)
    {
        IKsPropertySet *p = nullptr;
        PIN_INFO info;

        pin->QueryPinInfo(&info);
        info.pFilter->Release();
        if (info.dir != PINDIR_OUTPUT)
            goto next;
        if (pin->QueryInterface(IID_IKsPropertySet, (void**)&p) != S_OK)
            goto next;
        if (p->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY,
                nullptr, 0, &category, sizeof(GUID), &r2) != S_OK)
            goto next;
        if (!IsEqualGUID(category, PIN_CATEGORY_CAPTURE))
            goto next;

        // Now we can list the video modes for the current pin
        // Prepare for another wall of spaghetti DIRECT SHOW QUALITY code
        {
            IAMStreamConfig *config = nullptr;
            VIDEO_STREAM_CONFIG_CAPS *vcaps = nullptr;
            int size, n;
            if (pin->QueryInterface(IID_IAMStreamConfig, (void**)&config) != S_OK)
                goto next;
            if (config->GetNumberOfCapabilities(&n, &size) != S_OK)
                goto pinend;
            assert(size == sizeof(VIDEO_STREAM_CONFIG_CAPS));
            vcaps = new VIDEO_STREAM_CONFIG_CAPS;

            for (int i=0; i<n; ++i)
            {
                AM_MEDIA_TYPE* type = nullptr;
                if (config->GetStreamCaps(i, &type, (BYTE*)vcaps) != S_OK)
                    goto nextformat;

                if (!IsEqualGUID(type->formattype, FORMAT_VideoInfo)
                    && !IsEqualGUID(type->formattype, FORMAT_VideoInfo2))
                    goto nextformat;

                VideoMode mode;
                mode.width = vcaps->MaxOutputSize.cx;
                mode.height = vcaps->MaxOutputSize.cy;
                mode.FPS = 1e7 / vcaps->MinFrameInterval;
                if (!modes.contains(mode))
                    modes.append(std::move(mode));

nextformat:
                if (type->pbFormat)
                    CoTaskMemFree(type->pbFormat);
                CoTaskMemFree(type);
            }
pinend:
            config->Release();
            delete vcaps;
        }
next:
        if (p)
            p->Release();
        pin->Release();
    }

    return modes;
}
Exemplo n.º 20
0
HRESULT CAudioDecFilter::ReconnectOutput(long BufferSize, const CMediaType &mt)
{
	HRESULT hr;

	IPin *pPin = m_pOutput->GetConnected();
	if (pPin == NULL)
		return E_POINTER;

	IMemInputPin *pMemInputPin = NULL;
	hr = pPin->QueryInterface(IID_IMemInputPin, pointer_cast<void**>(&pMemInputPin));
	if (FAILED(hr)) {
		OutputLog(TEXT("IMemInputPinインターフェースが取得できません。(%08x)\r\n"), hr);
	} else {
		IMemAllocator *pAllocator = NULL;
		hr = pMemInputPin->GetAllocator(&pAllocator);
		if (FAILED(hr)) {
			OutputLog(TEXT("IMemAllocatorインターフェースが取得できません。(%08x)\r\n"), hr);
		} else {
			ALLOCATOR_PROPERTIES Props;
			hr = pAllocator->GetProperties(&Props);
			if (FAILED(hr)) {
				OutputLog(TEXT("IMemAllocatorのプロパティが取得できません。(%08x)\r\n"), hr);
			} else {
				if (mt != m_pOutput->CurrentMediaType()
						|| Props.cBuffers < NUM_SAMPLE_BUFFERS
						|| Props.cbBuffer < BufferSize) {
					hr = S_OK;
					if (Props.cBuffers < NUM_SAMPLE_BUFFERS
							|| Props.cbBuffer < BufferSize) {
						ALLOCATOR_PROPERTIES ActualProps;

						Props.cBuffers = NUM_SAMPLE_BUFFERS;
						Props.cbBuffer = BufferSize * 3 / 2;
						OutputLog(TEXT("バッファサイズを設定します。(%ld bytes)\r\n"), Props.cbBuffer);
						if (SUCCEEDED(hr = m_pOutput->DeliverBeginFlush())
								&& SUCCEEDED(hr = m_pOutput->DeliverEndFlush())
								&& SUCCEEDED(hr = pAllocator->Decommit())
								&& SUCCEEDED(hr = pAllocator->SetProperties(&Props, &ActualProps))
								&& SUCCEEDED(hr = pAllocator->Commit())) {
							if (ActualProps.cBuffers < Props.cBuffers
									|| ActualProps.cbBuffer < BufferSize) {
								OutputLog(TEXT("バッファサイズの要求が受け付けられません。(%ld / %ld)\r\n"),
										  ActualProps.cbBuffer, Props.cbBuffer);
								hr = E_FAIL;
								NotifyEvent(EC_ERRORABORT, hr, 0);
							} else {
								OutputLog(TEXT("ピンの再接続成功\r\n"));
								hr = S_OK;
							}
						} else {
							OutputLog(TEXT("ピンの再接続ができません。(%08x)\r\n"), hr);
						}
					}
				} else {
					hr = S_FALSE;
				}
			}

			pAllocator->Release();
		}

		pMemInputPin->Release();
	}

	return hr;
}
Exemplo n.º 21
0
/*
 * Class:     sage_DShowMediaPlayer
 * Method:    getColorKey0
 * Signature: (J)Ljava/awt/Color;
 */
JNIEXPORT jobject JNICALL Java_sage_DShowMediaPlayer_getColorKey0
  (JNIEnv *env, jobject jo, jlong dataPtr)
{
	if (!dataPtr) return 0;
	CPlayerData* playData = (CPlayerData*) dataPtr;
	IGraphBuilder* pGraph = playData->GetGraph();
	if (!pGraph) return 0;
	BOOL forceColorKey = FALSE;
	HRESULT hr;
	DWORD holder;
	HKEY myKey;
	DWORD readType;
	DWORD hsize = sizeof(holder);
	DWORD keyedColor = RGB(1,1,1);
	if (RegCreateKeyEx(HKEY_LOCAL_MACHINE, "SOFTWARE\\Frey Technologies\\Common\\DirectShow", 0, 0,
		REG_OPTION_NON_VOLATILE, KEY_ALL_ACCESS, 0, &myKey, 0) == ERROR_SUCCESS)
	{
		if (RegQueryValueEx(myKey, "TransparentColorKey", 0, &readType, (LPBYTE) &holder, &hsize) == ERROR_SUCCESS)
		{
			keyedColor = holder;
			forceColorKey = TRUE;
		}
		RegCloseKey(myKey);
	}
	COLORKEY overlayKey;
	overlayKey.KeyType = CK_RGB;
	overlayKey.PaletteIndex = 0;
	overlayKey.LowColorValue = keyedColor;
	overlayKey.HighColorValue = keyedColor;

	COLORKEY defaultKey;

	BOOL overlayIsUsed = FALSE;
	IEnumFilters *pEnum = NULL;
	if (!forceColorKey)
	{
		// Find the default color key
		hr = pGraph->EnumFilters(&pEnum);
		if (SUCCEEDED(hr))
		{
			IBaseFilter *currFilt = NULL;
			while (pEnum->Next(1, &currFilt, NULL) == S_OK)
			{
				IPin *overlayPin = NULL;
				IEnumPins* pPinEnum = NULL;
				hr = currFilt->EnumPins(&pPinEnum);
				if (hr == NO_ERROR)
				{
					IPin *pPin = NULL;
					hr = E_FAIL;
					while(S_OK == pPinEnum->Next(1, &pPin, NULL))
					{
						IOverlay *pOverlayPin = NULL;
						hr = pPin->QueryInterface(IID_IOverlay, (void**)&pOverlayPin);
						if (SUCCEEDED(hr))
						{
							hr = pOverlayPin->GetDefaultColorKey(&defaultKey);
							if (!forceColorKey && SUCCEEDED(hr))
							{
								keyedColor = defaultKey.LowColorValue;
								slog((env, "Got the default color key 0x%x\r\n", keyedColor));
								//forceColorKey = TRUE;
							}
							SAFE_RELEASE(pOverlayPin);
						}
						SAFE_RELEASE(pPin);
					}
					SAFE_RELEASE(pPinEnum);
				}
				SAFE_RELEASE(currFilt);
			}
			SAFE_RELEASE(pEnum);
		}
	}
	pEnum = NULL;

	// Set the color key value
	hr = pGraph->EnumFilters(&pEnum);
	if (SUCCEEDED(hr))
	{
		IBaseFilter *currFilt = NULL;
		while (pEnum->Next(1, &currFilt, NULL) == S_OK)
		{
			IPin *overlayPin = NULL;
			IEnumPins* pPinEnum = NULL;
			currFilt->EnumPins(&pPinEnum);
			while (pPinEnum->Next(1, &overlayPin, NULL) == S_OK)
			{
				// let's see if it's overlay
				IMixerPinConfig *pOverlayMix = NULL;
				hr = overlayPin->QueryInterface(IID_IMixerPinConfig, (void**)&pOverlayMix);
				if (SUCCEEDED(hr))
				{
					if (!forceColorKey)
						keyedColor = overlayKey.LowColorValue;
					else
						overlayKey.LowColorValue = overlayKey.HighColorValue = keyedColor;
					IPin* testPin = NULL;
					overlayPin->ConnectedTo(&testPin);
					BOOL currPinUsed = FALSE;
					if (testPin)
					{
						currPinUsed = TRUE;
						SAFE_RELEASE(testPin);
					}
					if (currPinUsed)
					{
						if (forceColorKey)
						{
							slog((env, "Setting the color key to 0x%x\r\n", keyedColor));
							hr = pOverlayMix->SetColorKey(&overlayKey);
						}
						else
						{
							hr = pOverlayMix->GetColorKey(&defaultKey, &keyedColor);
							slog((env, "Got the default overlay color key of 0x%x\r\n", keyedColor));
						}
//						HTESTPRINT(hr);
						if (!overlayIsUsed)
						{
							// Force the color key on all connected mixer pins to match
							overlayIsUsed = TRUE;
							forceColorKey = TRUE;
						}
					}
					SAFE_RELEASE(pOverlayMix);
				}
				SAFE_RELEASE(overlayPin);
			}
			SAFE_RELEASE(pPinEnum);
			SAFE_RELEASE(currFilt);
		}
		SAFE_RELEASE(pEnum);
	}
	static jclass colorClass = (jclass) env->NewGlobalRef(env->FindClass("java/awt/Color"));
	static jmethodID constMeth = env->GetMethodID(colorClass, "<init>", "(I)V");
	// Set the alpha to be 255 for the color.
	keyedColor = keyedColor | 0xFF000000;
	slog((env, "Returning the color key as 0x%x\r\n", keyedColor));
	return env->NewObject(colorClass, constMeth, keyedColor);
}
Exemplo n.º 22
0
/// 设置音频信息
BOOL CAudioCapture::SetAudioFormat(ENUM_FREQUENCY_TYPE enFrequency,
	ENUM_CHANNEL_TYPE enChannel, ENUM_SAMPLE_TYPE enSample)
{
	if(NULL != m_pCaptureFilter)
	{
		BOOL bResult = FALSE;
		do
		{
			IPin* pOutPin = GetOutputPin(m_pCaptureFilter, (uint16_t)0);
			if(NULL != pOutPin)
			{
				IAMBufferNegotiation *pNeg = NULL;
				IAMStreamConfig *pCfg = NULL;

				// Get buffer negotiation interface
				HRESULT hr = pOutPin->QueryInterface(IID_IAMBufferNegotiation, (void **)&pNeg);
				if (FAILED(hr))
				{
					pOutPin->Release();
					break;
				}

				// Find number of bytes in one second
				long lBytesPerSecond = (long) (enSample * enFrequency * enChannel);

				// 针对FAAC编码器 做出的调整
				long lBufferSize =  1024 * enSample * enChannel;

				// Set the buffer size based on selected settings
				ALLOCATOR_PROPERTIES prop={0};
				prop.cbBuffer = lBufferSize;
				prop.cBuffers = 6;
				prop.cbAlign = enSample * enChannel;
				hr = pNeg->SuggestAllocatorProperties(&prop);
				pNeg->Release();

				// Now set the actual format of the audio data
				hr = pOutPin->QueryInterface(IID_IAMStreamConfig, (void **)&pCfg);
				if (FAILED(hr))
				{
					pOutPin->Release();
					break;
				}            

				// Read current media type/format
				AM_MEDIA_TYPE *pmt={0};
				hr = pCfg->GetFormat(&pmt);

				if (SUCCEEDED(hr))
				{
					// Fill in values for the new format
					WAVEFORMATEX *pWF = (WAVEFORMATEX *) pmt->pbFormat;
					pWF->nChannels = (WORD) enChannel;
					pWF->nSamplesPerSec = enFrequency;
					pWF->nAvgBytesPerSec = lBytesPerSecond;
					pWF->wBitsPerSample = (WORD) (enSample * 8);
					pWF->nBlockAlign = (WORD) (enSample * enChannel);

					// Set the new formattype for the output pin
					hr = pCfg->SetFormat(pmt);
					UtilDeleteMediaType(pmt);
				}

				// Release interfaces
				pCfg->Release();
				pOutPin->Release();

				bResult = TRUE;
			}
		}while(FALSE);

		return bResult;
	}
	else
	{
		m_enFrequency = enFrequency;
		m_enChannel = enChannel;
		m_enSample = enSample;
		return TRUE;
	}
}
Exemplo n.º 23
0
HRESULT SetVboxFrequency( JNIEnv *env, DShowCaptureInfo* pCapInfo, ULONG ulFrequency )
{
    HRESULT hr;
    DWORD dwSupported=0;  
    IEnumPins* pEnumPin;
    IPin* pInputPin;
    ULONG ulFetched;
    PIN_INFO infoPin;

	if ( pCapInfo->pBDATuner == NULL )
		return E_FAIL;

	if( ulFrequency == 0 )
	{
		slog( (env,"VOX tuner skips frequency 0\r\n") );
		return S_OK;
	}

    IBaseFilter* pTunerDevice = pCapInfo->pBDATuner; 
    pTunerDevice->EnumPins(&pEnumPin);

    if( SUCCEEDED( hr = pEnumPin->Reset() ) )
    {
		while((hr = pEnumPin->Next( 1, &pInputPin, &ulFetched )) == S_OK)
		{
			pInputPin->QueryPinInfo(&infoPin);
				
			// Release AddRef'd filter, we don't need it
			if( infoPin.pFilter != NULL )
			infoPin.pFilter->Release();

			if(infoPin.dir == PINDIR_INPUT)
			break;
		}

		if(hr != S_OK)
		{
			slog( (env,"Vbox tuner input pin query failed \r\n") );
			return hr;
		}
    }
    else
    {
		slog( (env,"Vbox tuner reset failed \r\n") );
		return E_FAIL;
    }
    
    IKsPropertySet *pKsPropertySet;
    pInputPin->QueryInterface(&pKsPropertySet);
	
    if (!pKsPropertySet)
    {
		slog( (env,"Vbox tuner input pin's QueryInterface failed \r\n") );

		return E_FAIL;
    }
        
    KSPROPERTY_TUNER_MODE_CAPS_S ModeCaps;
    KSPROPERTY_TUNER_FREQUENCY_S Frequency;
    memset(&ModeCaps,0,sizeof(KSPROPERTY_TUNER_MODE_CAPS_S));
    memset(&Frequency,0,sizeof(KSPROPERTY_TUNER_FREQUENCY_S));
    ModeCaps.Mode = AMTUNER_MODE_TV; 

    // Check either the Property is supported or not by the Tuner drivers 

    hr = pKsPropertySet->QuerySupported(PROPSETID_TUNER, 
          KSPROPERTY_TUNER_MODE_CAPS,&dwSupported);
    if(SUCCEEDED(hr) && dwSupported&KSPROPERTY_SUPPORT_GET)
    {
        DWORD cbBytes=0;
        hr = pKsPropertySet->Get(PROPSETID_TUNER,KSPROPERTY_TUNER_MODE_CAPS,
            INSTANCEDATA_OF_PROPERTY_PTR(&ModeCaps),
            INSTANCEDATA_OF_PROPERTY_SIZE(ModeCaps),
            &ModeCaps,
            sizeof(ModeCaps),
            &cbBytes);  
    }
    else
    {
		SAFE_RELEASE(pKsPropertySet);
		slog( (env,"Vbox tuner input pin's not support GET query \r\n") );
        return E_FAIL; 
    }

    Frequency.Frequency=ulFrequency; // in Hz
    if(ModeCaps.Strategy==KS_TUNER_STRATEGY_DRIVER_TUNES)
        Frequency.TuningFlags=KS_TUNER_TUNING_FINE;
    else
        Frequency.TuningFlags=KS_TUNER_TUNING_EXACT;

    // Here the real magic starts
    //if(ulFrequency>=ModeCaps.MinFrequency && ulFrequency<=ModeCaps.MaxFrequency)
    {
        hr = pKsPropertySet->Set(PROPSETID_TUNER,
            KSPROPERTY_TUNER_FREQUENCY,
            INSTANCEDATA_OF_PROPERTY_PTR(&Frequency),
            INSTANCEDATA_OF_PROPERTY_SIZE(Frequency),
            &Frequency,
            sizeof(Frequency));
        if(FAILED(hr))
        {
			slog( (env,"Vbox tuner input pin's set frequency %d failed hr=0x%x\r\n", Frequency.Frequency, hr ) );
			SAFE_RELEASE(pKsPropertySet);
            return E_FAIL; 
        }
    }

  //  else
  //  {
		//slog( (env,"Vbox tuning frequency %d is out of range (%d %d)\r\n", 
		//	          ulFrequency, ModeCaps.MinFrequency, ModeCaps.MaxFrequency ) );
  //      return E_FAIL;
  //  }

	SAFE_RELEASE(pKsPropertySet);
	slog( (env,"Vbox tuner tuning overider frequency %d  successful. \r\n", ulFrequency) );
    return S_OK;
}
bool CFLVConverter::PreSaveToFLV(	const HWND & hMainWnd,
									const CParameters & param,
									const wchar_t * SourceFileName,
									IMediaEventEx ** pMEE,
                                    Timeline *  timeline)
{
IPin * pOutputPin = NULL;
IPin * pInputPin = NULL;
bool showLogo = true;


			m_llAudioSamplesCount = m_llVideoFramesCount = 0;
			

			int iLen = wcslen(SourceFileName)+1;
			DistFileName = new wchar_t[MAX_PATH];
			wcscpy_s(DistFileName, MAX_PATH, SourceFileName);
			DistFileName[iLen-2] = 'v';
			DistFileName[iLen-3] = 'l';
			DistFileName[iLen-4] = 'f';
			if (!OpenFileDialog(hMainWnd, SourceFileName, DistFileName))
			{
				delete[] DistFileName;
				return false;
			}

			//	Ask user about file quality.
			FileQualitySelector dlg(hMainWnd);
			if (IDOK == dlg.Show())
				SelectedFileQuality = dlg.SelectedQuality();
			else
				return false;

			SendMessage(hMainWnd, WM_NCPAINT, 1,0);

			iLen = wcslen(DistFileName)+14;
			tmpAVI = new wchar_t[iLen ];
			wcscpy_s(tmpAVI, iLen, DistFileName);
			wcscat_s(tmpAVI, iLen, L"59temp44.avi");
			// Create a DirectShow GraphBuilder object
			HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&pGB);
			if (FAILED(hr))
			{
				CError::ErrMsg(hMainWnd, TEXT("Error %x: Filter graph built failed"), hr);
				Stop();
				return false;
			}
		
			// Get DirectShow interfaces
			hr = pGB->QueryInterface(IID_IMediaControl, (void **)&pMC);
			if (FAILED(hr))
			{
				CError::ErrMsg(hMainWnd, TEXT("Error %x: Create filter graph control"), hr);
				Stop();
				return false;
			}
			hr = pGB->QueryInterface(IID_IMediaEventEx, (void **)&pME);
			if (FAILED(hr))
			{
				CError::ErrMsg(hMainWnd, TEXT("Error %x: Create filter graph event handler"), hr);
				Stop();
				return false;
			}

			// Have the graph signal event via window callbacks
			hr = pME->SetNotifyWindow((OAHWND)hMainWnd, WM_FGNOTIFY2, 0);
			if (FAILED(hr))
			{
				CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed graph notify message"), hr);
				Stop();
				return false;
			}

			*pMEE = pME;
			(*pMEE)->AddRef();

			if(SUCCEEDED(hr))
			{
				// Add the source filter to the graph
				hr = pGB->AddSourceFilter(SourceFileName, L"SOURCE", &pSource);
				if (FAILED(hr))
				{
					CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed adding source filter"), hr);
					Stop();
					return false;
				}

				// Add the splitter filter to the graph
				hr = CoCreateInstance(CLSID_AviSplitter, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pAVISplitter);
				if (FAILED(hr))
				{
					CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating splitter filter"), hr);
					Stop();
					return false;
				}
				// Add the mixer filter to the graph
				hr = CoCreateInstance(CLSID_CAdder, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pAdder);
				if (FAILED(hr))
				{
					CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating adder filter"), hr);
					Stop();
					return false;
				}

                if(timeline)
                {
	                CComPtr <ITimelineFilter> cpTimelineFilter;
		            hr = pAdder->QueryInterface(IID_ITimelineFilter, (void **)&cpTimelineFilter);
		            if (FAILED(hr))
		            {
			            CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating ITimelineFilter interface"), hr);
			            Stop();
			            return false;
		            }
                    CComPtr <ITimeline> cpTimeline;
                    hr = timeline->QueryInterface(IID_ITimeline, (void**) &cpTimeline);
                    if(SUCCEEDED(hr))
                    {
                        hr = cpTimelineFilter->AssignTimeline(cpTimeline);
                        if(FAILED(hr))
                        {
			                CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed assigning ITimeline instance to filter"), hr);
			                Stop();
			                return false;
                        }
                    }
                    else
                    {
			            CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating ITimeline interface"), hr);
			            Stop();
			            return false;
                    }
                }

				hr = pAdder->QueryInterface(IID_IAdder, (void **)&pAdderSettings);
				if (FAILED(hr))
				{
					CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating adder settings interface"), hr);
					Stop();
					return false;
				}				
				
				pAdderSettings->SetIntervals((void*)param.GetAllDeletedInterval(),
											 (void*)param.GetWebDeletedInterval(),
											 (void*)param.GetWebPosInterval(),
											 (void*)param.GetArrowInterval(),
											 (void*)param.GetTextInterval());

                //  Generate thumbnail file name.
                TCHAR thumbnail[MAX_PATH];
                if (0 == _tcscpy_s(thumbnail, _countof(thumbnail), DistFileName))
                {
                    TCHAR *end = _tcsrchr(thumbnail, _T('.'));
                    if (!end)
                        end = &thumbnail[_tcslen(thumbnail)];

                    if ((end - &thumbnail[0]) + _tcslen(g_ThumbnailSuffix) < MAX_PATH)
                    {
                        _tcscpy(end, g_ThumbnailSuffix);
                        CT2W wThumbnail(thumbnail);
                        pAdderSettings->SetThumbnailPath(CComBSTR(wThumbnail));
                    }
                }

				// Add the logo filter to the graph
				if(showLogo)
				{
					hr = CoCreateInstance(CLSID_CWebinariaLogoFilter, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pLogo);
					if (FAILED(hr))
					{
						CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating logo filter"), hr);
						Stop();
						return false;
					}
				}
				
				// Add the mux filter to the graph
				hr = CoCreateInstance(CLSID_AviDest, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pAVIMux);
				if (FAILED(hr))
				{
					CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating destination avi filter"), hr);
					Stop();
					return false;
				}

				// Add the writer filter to the graph
				hr = CoCreateInstance(CLSID_FileWriter, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void **)&pWriter);
				if (FAILED(hr))
				{
					CError::ErrMsg(hMainWnd, TEXT("Error %x: Failed creating destination avi filter"), hr);
					Stop();
					return false;
				}

				IFileSinkFilter2 * pSink;
				hr = pWriter->QueryInterface(IID_IFileSinkFilter2, (void**)&pSink);
				pSink->SetFileName(tmpAVI, NULL);
				pSink->SetMode(AM_FILE_OVERWRITE);
				pSink->Release();

				// Get the interface for the first output pin
				hr = GetUnconnectedPin(pSource, PINDIR_OUTPUT, &pOutputPin);

				hr = JoinFilterToChain(pGB, pAVISplitter, L"AVI Splitter", &pOutputPin);
				if(SUCCEEDED(hr))
				{
					//asking Out pin of the avi splitter for IMediaSeeking:
					hr = pOutputPin->QueryInterface(IID_IMediaSeeking, (void **)&pMS);
					if (FAILED(hr))
					{
						CError::ErrMsg(hMainWnd, TEXT("Error %x: Create filter graph frame seeking"), hr);
						Stop();
						return false;
					}

					if(showLogo)
					{
						hr = JoinFilterToChain(pGB, pLogo, L"WebinariaLogoFilter", &pOutputPin);
					}

					hr = pMS->SetTimeFormat( &TIME_FORMAT_FRAME );
					if(SUCCEEDED(hr))
					{
						pMS->GetDuration( &m_llVideoFramesCount );
					}

				}
				if(SUCCEEDED(hr))
				{
					hr = JoinFilterToChain(pGB, pAdder, L"Adder", &pOutputPin);
				}
				// Add compressor
				if(SUCCEEDED(hr))
				{
					hr = CreateVideoCompressor(&pCompressor);
					if(SUCCEEDED(hr))
					{
						hr = JoinFilterToChain(pGB, pCompressor, L"Video Compressor", &pOutputPin);
					}
					else
					{
						CError::ErrMsg(hMainWnd, TEXT("Error %x: Cannot add video compressor filter"), hr);
					}
				}
				if(SUCCEEDED(hr))
				{
					hr = JoinFilterToChain(pGB, pAVIMux, L"AVI MUX", &pOutputPin);
				}
				if(SUCCEEDED(hr))
				{
					hr = JoinFilterToChain(pGB, pWriter, L"File Writer", &pOutputPin);
				}						
				SAFE_RELEASE(pOutputPin);
				
				if(SUCCEEDED(hr))
				{//try to connect optional streams:
					for(int i=0; i<2; i++)
					{
						GetUnconnectedPin(pAVISplitter, PINDIR_OUTPUT, &pOutputPin);
						if(pOutputPin)
						{
							if(ptVideo==GetPinType(pOutputPin ))
							{//second video stream (webcam)
								GetUnconnectedPin(pAdder, PINDIR_INPUT, &pInputPin);
								if(pInputPin)
								{
									hr = pGB->Connect(pOutputPin, pInputPin);
									SAFE_RELEASE(pInputPin);
									if(FAILED(hr))
									{
										CError::ErrMsg(hMainWnd, TEXT("Error %x: Cannot connect webcam stream to muxer"), hr);
									}

								}
							}
							else if(ptAudio==GetPinType(pOutputPin ))
							{// audio stream
								GetUnconnectedPin(pAVIMux, PINDIR_INPUT, &pInputPin);
								if (pOutputPin && pInputPin)
								{
									hr = AssembeAudioBranch(param, pOutputPin, pInputPin);
									SAFE_RELEASE(pInputPin);
									if(FAILED(hr))
									{
										CError::ErrMsg(hMainWnd, TEXT("Error %x: Cannot add render audio branch"), hr);
									}
								}
							}
							SAFE_RELEASE(pOutputPin);
							if(FAILED(hr))
							{
								break;
							}
						}
						else
						{
							break;
						}
					}
				}
				if(FAILED(hr))
				{
					Stop();
					return false;
				}
				AddGraphToRot(pGB, &dwRotReg);
			}

			hr = pMC->Run();

			return true;
		}