Esempio n. 1
0
//-------------------------------------------------------------------
// getVideoFormat:  Gets format information for the video stream.
//
HRESULT VidReader::getVideoFormat()
{
    HRESULT hr = S_OK;
    IMFMediaType *pType = NULL;
	GUID subtype = { 0 };

    // Get the media type from the stream.
    hr = m_pReader->GetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pType);
    if (FAILED(hr)) goto done;

    // Make sure it is a video format.
    
    hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
    if (subtype != MFVideoFormat_RGB32)
    {
        hr = E_UNEXPECTED;
        goto done;
    }

    // Get the width and height
    hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &m_imagewidth, &m_imageheight);
    if (FAILED(hr)) goto done;

	// Get the frame rate
	UINT32 frN, frD;
	hr = MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &frN, &frD);   
	if (FAILED(hr)) goto done;
	m_framerate = (double)frN / (double)frD;

done:
    
	SafeRelease(&pType);
    return hr;
}
Esempio n. 2
0
int CaptureClass::scanMediaTypes(unsigned int aWidth, unsigned int aHeight)
{
	HRESULT hr;
	HRESULT nativeTypeErrorCode = S_OK;
	DWORD count = 0;
	int besterror = 0xfffffff;
	int bestfit = 0;

	while (nativeTypeErrorCode == S_OK && besterror)
	{
		IMFMediaType * nativeType = NULL;
		nativeTypeErrorCode = mReader->GetNativeMediaType(
			(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			count,
			&nativeType);
		ScopedRelease<IMFMediaType> nativeType_s(nativeType);

		if (nativeTypeErrorCode != S_OK) continue;

		// get the media type 
		GUID nativeGuid = { 0 };
		hr = nativeType->GetGUID(MF_MT_SUBTYPE, &nativeGuid);

		if (FAILED(hr)) return bestfit;

		if (isMediaOk(nativeType, count))
		{
			UINT32 width, height;
			hr = MFGetAttributeSize(nativeType, MF_MT_FRAME_SIZE, &width, &height);

			if (FAILED(hr)) return bestfit;

			int error = 0;

			// prefer (hugely) to get too much than too little data..

			if (aWidth < width) error += (width - aWidth);
			if (aHeight < height) error += (height - aHeight);
			if (aWidth > width) error += (aWidth - width) * 2;
			if (aHeight > height) error += (aHeight - height) * 2;

			if (aWidth == width && aHeight == height) // ..but perfect match is a perfect match
				error = 0;

			if (besterror > error)
			{
				besterror = error;
				bestfit = count;
			}
			/*
			char temp[1024];
			sprintf(temp, "%d x %d, %x:%x:%x:%x %d %d\n", width, height, nativeGuid.Data1, nativeGuid.Data2, nativeGuid.Data3, nativeGuid.Data4, bestfit == count, besterror);
			OutputDebugStringA(temp);
			*/
		}

		count++;
	}
	return bestfit;
}
Esempio n. 3
0
STDMETHODIMP CDecWMV9MFT::SelectOutputType()
{
  HRESULT hr = S_OK;
  int idx = 0;

  m_OutPixFmt = LAVPixFmt_None;
  IMFMediaType *pMTOut = nullptr;
  while (SUCCEEDED(hr = m_pMFT->GetOutputAvailableType(0, idx++, &pMTOut)) && m_OutPixFmt == LAVPixFmt_None) {
    GUID outSubtype;
    if (SUCCEEDED(pMTOut->GetGUID(MF_MT_SUBTYPE, &outSubtype))) {
      if (outSubtype == MEDIASUBTYPE_NV12) {
        hr = m_pMFT->SetOutputType(0, pMTOut, 0);
        m_OutPixFmt = LAVPixFmt_NV12;
        break;
      } else if (outSubtype == MEDIASUBTYPE_YV12) {
        hr = m_pMFT->SetOutputType(0, pMTOut, 0);
        m_OutPixFmt = LAVPixFmt_YUV420;
        break;
      }
    }
    SafeRelease(&pMTOut);
  }

  return hr;
}
HRESULT ConfigureVideoEncoding(IMFCaptureSource *pSource, IMFCaptureRecordSink *pRecord, REFGUID guidEncodingType)
{
    IMFMediaType *pMediaType = NULL;
    IMFMediaType *pMediaType2 = NULL;
    GUID guidSubType = GUID_NULL;

    // Configure the video format for the recording sink.
    HRESULT hr = pSource->GetCurrentDeviceMediaType((DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_RECORD , &pMediaType);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = CloneVideoMediaType(pMediaType, guidEncodingType, &pMediaType2);
    if (FAILED(hr))
    {
        goto done;
    }


    hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &guidSubType);
    if(FAILED(hr))
    {
        goto done;
    }

    if(guidSubType == MFVideoFormat_H264_ES || guidSubType == MFVideoFormat_H264)
    {
        //When the webcam supports H264_ES or H264, we just bypass the stream. The output from Capture engine shall be the same as the native type supported by the webcam
        hr = pMediaType2->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
    }
    else
    {    
        UINT32 uiEncodingBitrate;
        hr = GetEncodingBitrate(pMediaType2, &uiEncodingBitrate);
        if (FAILED(hr))
        {
            goto done;
        }

        hr = pMediaType2->SetUINT32(MF_MT_AVG_BITRATE, uiEncodingBitrate);
    }

    if (FAILED(hr))
    {
        goto done;
    }

    // Connect the video stream to the recording sink.
    DWORD dwSinkStreamIndex;
    hr = pRecord->AddStream((DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_RECORD, pMediaType2, NULL, &dwSinkStreamIndex);

done:
    SafeRelease(&pMediaType);
    SafeRelease(&pMediaType2);
    return hr;
}
Esempio n. 5
0
void SetSampleMetaData(IMFSourceReader *pReader, DWORD streamIndex, PyObject *out)
{
	//Set meta data in output object
	IMFMediaType *pCurrentType = NULL;
	LONG plStride = 0;
	GUID majorType=GUID_NULL, subType=GUID_NULL;
	UINT32 width = 0;
	UINT32 height = 0;

	HRESULT hr = pReader->GetCurrentMediaType(streamIndex, &pCurrentType);
	if(!SUCCEEDED(hr)) cout << "Error 3\n";
	BOOL isComp = FALSE;
	hr = pCurrentType->IsCompressedFormat(&isComp);
	PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp));
	hr = pCurrentType->GetGUID(MF_MT_MAJOR_TYPE, &majorType);
	LPCWSTR typePtr = GetGUIDNameConst(majorType);
	if(!SUCCEEDED(hr)) cout << "Error 4\n";
	hr = pCurrentType->GetGUID(MF_MT_SUBTYPE, &subType);
	if(!SUCCEEDED(hr)) cout << "Error 5\n";
	int isVideo = (majorType==MFMediaType_Video);
	if(isVideo)
	{
		GetDefaultStride(pCurrentType, &plStride);
		hr = MFGetAttributeSize(pCurrentType, MF_MT_FRAME_SIZE, &width, &height);
		if(!SUCCEEDED(hr)) cout << "Error 20\n";
	}

	LPCWSTR subTypePtr = GetGUIDNameConst(subType);
	//if(subTypePtr!=0) wcout << "subtype\t" << subTypePtr << "\n";

	PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp));
	if(typePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "type", PyUnicode_FromWideChar(typePtr, wcslen(typePtr)));
	if(subTypePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "subtype", PyUnicode_FromWideChar(subTypePtr, wcslen(subTypePtr)));
	if(!isComp) PyDict_SetItemStringAndDeleteVar(out, "stride", PyInt_FromLong(plStride));
	PyDict_SetItemStringAndDeleteVar(out, "width", PyInt_FromLong(width));
	PyDict_SetItemStringAndDeleteVar(out, "height", PyInt_FromLong(height));

}
Esempio n. 6
0
/*
List all the media modes available on the device.
*/
void FindVideoMode(IMFSourceReader *pReader, const GUID mediaSubType, int width, int height, /* out */ IMFMediaType *&foundpType)
{
	HRESULT hr = NULL;
	DWORD dwMediaTypeIndex = 0;

	while (SUCCEEDED(hr))
	{
		IMFMediaType *pType = NULL;
		hr = pReader->GetNativeMediaType(0, dwMediaTypeIndex, &pType);
		if (hr == MF_E_NO_MORE_TYPES)
		{
			hr = S_OK;
			break;
		}
		else if (SUCCEEDED(hr))
		{
			// Examine the media type. (Not shown.)
			/*CMediaTypeTrace *nativeTypeMediaTrace = new CMediaTypeTrace(pType);
			printf("Native media type: %s.\n", nativeTypeMediaTrace->GetString());*/

			GUID videoSubType;
			UINT32 pWidth = 0, pHeight = 0;

			hr = pType->GetGUID(MF_MT_SUBTYPE, &videoSubType);
			MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &pWidth, &pHeight);

			if (SUCCEEDED(hr))
			{
				//printf("Video subtype %s, width=%i, height=%i.\n", STRING_FROM_GUID(videoSubType), pWidth, pHeight);

				if (videoSubType == mediaSubType && pWidth == width && pHeight == height)
				{
					foundpType = pType;
					printf("Media type successfully located.\n");
					break;
				}
			}

			pType->Release();
		}
		++dwMediaTypeIndex;
	}
}
HRESULT CASFManager::SetupStreamDecoder (WORD wStreamNumber, 
                                         GUID* pguidCurrentMediaType)
{
    if (! m_pContentInfo)
    {
        return MF_E_NOT_INITIALIZED;
    }

    if (wStreamNumber == 0)
    {
        return E_INVALIDARG;
    }

    IMFASFProfile* pProfile = NULL;
    IMFMediaType* pMediaType = NULL;
    IMFASFStreamConfig *pStream = NULL;

    GUID    guidMajorType = GUID_NULL;
    GUID    guidSubType = GUID_NULL;
    GUID    guidDecoderCategory = GUID_NULL;

    BOOL fIsCompressed = TRUE;

    CLSID *pDecoderCLSIDs = NULL;   // Pointer to an array of CLISDs. 
    UINT32 cDecoderCLSIDs = 0;   // Size of the array.
    
    HRESULT hr = S_OK;

    //Get the profile object that stores stream information
    CHECK_HR(hr =  m_pContentInfo->GetProfile(&pProfile));

    //Get stream configuration object from the profile
    CHECK_HR(hr = pProfile->GetStreamByNumber(wStreamNumber, &pStream));

    //Get the media type
    CHECK_HR(hr = pStream->GetMediaType(&pMediaType));

    //Get the major media type
    CHECK_HR(hr = pMediaType->GetMajorType(&guidMajorType));
        
    //Get the sub media type
    CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &guidSubType));
    
    //find out if the media type is compressed
    CHECK_HR(hr = pMediaType->IsCompressedFormat(&fIsCompressed));

    if (fIsCompressed)
    {
        //get decoder category
        if (guidMajorType == MFMediaType_Video)
        {
            guidDecoderCategory = MFT_CATEGORY_VIDEO_DECODER;
        }
        else if (guidMajorType == MFMediaType_Audio)
        {
            guidDecoderCategory = MFT_CATEGORY_AUDIO_DECODER;
        }
        else
        {
            CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
        }

        // Look for a decoder.
        MFT_REGISTER_TYPE_INFO tinfo;
        tinfo.guidMajorType = guidMajorType;
        tinfo.guidSubtype = guidSubType;

        CHECK_HR(hr = MFTEnum(
            guidDecoderCategory,
            0,                  // Reserved
            &tinfo,             // Input type to match. (Encoded type.)
            NULL,               // Output type to match. (Don't care.)
            NULL,               // Attributes to match. (None.)
            &pDecoderCLSIDs,    // Receives a pointer to an array of CLSIDs.
            &cDecoderCLSIDs     // Receives the size of the array.
            ));

        // MFTEnum can return zero matches.
        if (cDecoderCLSIDs == 0)
        {
            hr = MF_E_TOPO_CODEC_NOT_FOUND;
        }
        else
        {
            //if the CDecoder instance does not exist, create one.
            if (!m_pDecoder)
            {
                CHECK_HR(hr = CDecoder::CreateInstance(&m_pDecoder));
            }
            
            //Load the first MFT in the array for the current media type
            CHECK_HR(hr = m_pDecoder->Initialize(pDecoderCLSIDs[0], pMediaType));
        }
        *pguidCurrentMediaType = guidMajorType;
    }
    else
    {
        // Not compressed. Don't need a decoder. 
         CHECK_HR(hr = MF_E_INVALIDREQUEST);
    }


    TRACE((L"Stream decoder loaded.\n"));

done:

    LOG_MSG_IF_FAILED(L"CASFManager::SetupStreamDecoder failed.\n", hr);
    
    SAFE_RELEASE(pProfile);
    SAFE_RELEASE(pMediaType);
    SAFE_RELEASE(pStream);

    CoTaskMemFree(pDecoderCLSIDs);

    return hr;
}
HRESULT ConfigureSourceReader(IMFSourceReader *pReader)
{
    // The list of acceptable types.
    GUID subtypes[] = { 
        MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY,
        MFVideoFormat_RGB32, MFVideoFormat_RGB24, MFVideoFormat_IYUV
    };

    HRESULT hr = S_OK;
    BOOL    bUseNativeType = FALSE;

    GUID subtype = { 0 };

    IMFMediaType *pType = NULL;

    // If the source's native format matches any of the formats in 
    // the list, prefer the native format.

    // Note: The camera might support multiple output formats, 
    // including a range of frame dimensions. The application could
    // provide a list to the user and have the user select the
    // camera's output format. That is outside the scope of this
    // sample, however.

    hr = pReader->GetNativeMediaType(
        (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
        0,  // Type index
        &pType
        );

    if (FAILED(hr)) { goto done; }

    hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);

    if (FAILED(hr)) { goto done; }

    for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
    {
        if (subtype == subtypes[i])
        {
            hr = pReader->SetCurrentMediaType(
                (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
                NULL, 
                pType
                );

            bUseNativeType = TRUE;
            break;
        }
    }

    if (!bUseNativeType)
    {
        // None of the native types worked. The camera might offer 
        // output a compressed type such as MJPEG or DV.

        // Try adding a decoder.

        for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
        {
            hr = pType->SetGUID(MF_MT_SUBTYPE, subtypes[i]);

            if (FAILED(hr)) { goto done; }

            hr = pReader->SetCurrentMediaType(
                (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
                NULL, 
                pType
                );

            if (SUCCEEDED(hr))
            {
                break;
            }
        }
    }

done:
    SafeRelease(&pType);
    return hr;
}
Esempio n. 9
0
camera_t * camera_open(const char *portname, int highres)
{
	camera_internal_t *camera = (camera_internal_t*)malloc(sizeof(camera_internal_t));
	camera->reader = NULL;

	if (highres)
	{
		console_printf("camera: highres is not supported on windows (yet).\n");
		highres = 0;
	}

	HRESULT hr = S_OK;

	// Initialize Media Foundation
	if (SUCCEEDED(hr))
	{
		hr = MFStartup(MF_VERSION);
	}
	///////////////////////////////////////////
	IMFAttributes *pAttributes = NULL;
	UINT32 m_cDevices = 0;
	IMFActivate **m_ppDevices = NULL;

	// Initialize an attribute store. We will use this to
	// specify the enumeration parameters.

	hr = MFCreateAttributes(&pAttributes, 1);

	// Ask for source type = video capture devices
	if (SUCCEEDED(hr))
	{
		hr = pAttributes->SetGUID(
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
			);
	}

	// Enumerate devices.
	if (SUCCEEDED(hr))
	{
		hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
	}

	SafeRelease(&pAttributes);

	/////////////////////////////////////////////////
	IMFActivate *pActivate = NULL;
	if (m_cDevices)
	{
		console_printf("camera: there are %d camera devices connected (0..%d).\n", m_cDevices, m_cDevices > 0 ? m_cDevices - 1 : 0);
		int device = strtol(portname, 0, 10);
		if (device < 0 || device >= m_cDevices)
			console_printf("camera: device %d does not exist.\n", device);
		else
			pActivate = m_ppDevices[device];
	}
	else
	{
		console_printf("camera: could not find a device\n");
	}
	/////////////////////////////////////////////////

	IMFMediaSource *pSource = NULL;

	//EnterCriticalSection(&m_critsec);

	// Create the media source for the device.
	hr = pActivate->ActivateObject(
		__uuidof(IMFMediaSource),
		(void**)&pSource
		);
	///////////////////////////////////////////

	//IMFAttributes *pAttributes = NULL;

	/*hr = MFCreateAttributes(&pAttributes, 2);

	if (SUCCEEDED(hr))
	{
	hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this);
	}*/

	if (SUCCEEDED(hr))
	{
		hr = MFCreateSourceReaderFromMediaSource(
			pSource,
			NULL,//pAttributes,
			&camera->reader
			);
	}

	//SafeRelease(&pAttributes);

	////////////////////////////////////////////////////
	// The list of acceptable types.
	GUID subtypes[] = {
		MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY,
		MFVideoFormat_RGB32, MFVideoFormat_RGB24, MFVideoFormat_IYUV
	};

	//HRESULT hr = S_OK;
	BOOL bUseNativeType = FALSE;

	GUID subtype = { 0 };

	IMFMediaType *pType = NULL;

	UINT32 width = 0, height = 0;
	int selectedSubtype = -1;

	// If the source's native format matches any of the formats in
	// the list, prefer the native format.

	// Note: The camera might support multiple output formats,
	// including a range of frame dimensions. The application could
	// provide a list to the user and have the user select the
	// camera's output format. That is outside the scope of this
	// sample, however.

	DWORD selectedStreamIndex = MF_SOURCE_READER_FIRST_VIDEO_STREAM;

	//while (true)
	//{
		hr = camera->reader->GetNativeMediaType(
			selectedStreamIndex,
			0,  // Type index
			&pType
			);

		if (FAILED(hr)) { console_printf("camera: could not get media type\n"); goto done; }

		hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);

		if (FAILED(hr)) { console_printf("camera: could not get resolution\n"); goto done; }

		//if (width != 1280 || height != 960)
		//{
			console_printf("camera: found resolution %dx%d\n", width, height);
			//selectedStreamIndex++;
			//continue;
		//}

		camera->size.width = width;
		camera->size.height = height;
		//break;
	//}


	/*UINT32 num = 0, denom = 0;
	hr = ::MFGetAttributeRatio(pType, MF_MT_FRAME_RATE_RANGE_MAX, &num, &denom);

	if (FAILED(hr)) { goto done; }*/

	//hr = ::MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, 1280, 960);

	//if (FAILED(hr)) { goto done; }

	/*hr = ::MFSetAttributeRatio(pType, MF_MT_FRAME_RATE, num, denom);

	if (FAILED(hr)) { goto done; }*/

	hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);

	if (FAILED(hr)) { console_printf("camera: could not get stream type(1)\n"); goto done; }

	for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
	{
		if (subtype == subtypes[i])
		{
			hr = camera->reader->SetCurrentMediaType(
				(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
				NULL,
				pType
				);

			bUseNativeType = TRUE;
			selectedSubtype = i;
			break;
		}
	}

	if (!bUseNativeType)
	{
		// None of the native types worked. The camera might offer
		// output a compressed type such as MJPEG or DV.

		// Try adding a decoder.

		for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
		{
			hr = pType->SetGUID(MF_MT_SUBTYPE, subtypes[i]);

			if (FAILED(hr)) { console_printf("camera: could not get stream type(2)\n"); goto done; }

			hr = camera->reader->SetCurrentMediaType(
				(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
				NULL,
				pType
				);

			if (SUCCEEDED(hr))
			{
				selectedSubtype = i;
				break;
			}
		}
	}

/*	hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
	WIDTH = width;
	HEIGHT = height;*/

	if (FAILED(hr)) { console_printf("camera: could not find stream type\n"); goto done; }

done:
	SafeRelease(&pType);

	console_printf("camera: selected type: %d, native: %s, resolution: %dx%d\n",
		selectedSubtype, bUseNativeType ? "yes" : "no", camera->size.width, camera->size.height);

	///////////////////////////////////////
	/*if (SUCCEEDED(hr))
	{
	hr = camera->reader->GetCurrentMediaType(
	(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
	&pType
	);
	}

	if (SUCCEEDED(hr))
	{
	// Register the color converter DSP for this process, in the video
	// processor category. This will enable the sink writer to enumerate
	// the color converter when the sink writer attempts to match the
	// media types.

	hr = MFTRegisterLocalByCLSID(
	__uuidof(CColorConvertDMO),
	MFT_CATEGORY_VIDEO_PROCESSOR,
	L"",
	MFT_ENUM_FLAG_SYNCMFT,
	0,
	NULL,
	0,
	NULL
	);
	}*/

	/////////////////////////////////////////////////

/*	IMFSample *pSample = NULL;
	DWORD streamIndex = 0, flags = 0;
	LONGLONG llTimeStamp = 0;

	hr = camera->reader->ReadSample(
		(DWORD)MF_SOURCE_READER_ANY_STREAM,    // Stream index.
		0,                              // Flags.
		&streamIndex,                   // Receives the actual stream index.
		&flags,                         // Receives status flags.
		&llTimeStamp,                   // Receives the time stamp.
		&pSample                        // Receives the sample or NULL.
		);*/

	if (selectedSubtype != 4)
	{
		console_printf("camera: unexpected stream type.\n");
		SafeRelease(&camera->reader);
		free(camera);
		return 0;
	}
	return (camera_t*)camera;
}
Esempio n. 10
0
	HRESULT VideoCapture::ConfigureSourceReader()
	{
		HRESULT hr;
		IMFMediaType *pNativeType = NULL;
		GUID majorType;
		GUID subtype;

		hr = this->SourceReader->GetNativeMediaType(
			MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
			0, 
			&pNativeType
		);
		if (hr != S_OK)
		{
			return hr;
		}

		hr = pNativeType->GetGUID(MF_MT_MAJOR_TYPE, &majorType);
		if (hr != S_OK)
		{
			return hr;
		}

		hr = pNativeType->GetGUID(MF_MT_SUBTYPE, &subtype);
		if (hr != S_OK)
		{
			return hr;
		}

		hr = this->CreateInputMediaType(majorType, subtype);
		if (hr != S_OK)
        {
			return hr;
        }

		hr = this->SourceReader->SetCurrentMediaType(
			MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
			NULL, 
			this->InputMediaType
		);
		if (hr != S_OK)
        {
			return hr;
        }

	    GUID subtypes[] = { 
			MFVideoFormat_NV12, 
			MFVideoFormat_UYVY,
			MFVideoFormat_RGB32, 
			MFVideoFormat_RGB24, 
			MFVideoFormat_IYUV,
			MFVideoFormat_YUY2
		};
		bool isSubtypeSetted = false;

		for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
        {
			if (subtype == subtypes[i])
			{
				isSubtypeSetted = true;
				break;
			}
        }
		if (isSubtypeSetted)
		{
			return hr;
		}

		for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
		{
			hr = this->InputMediaType->SetGUID(MF_MT_SUBTYPE, subtypes[i]);
			if (hr != S_OK)
			{
				return hr;
			}

			hr = this->SourceReader->SetCurrentMediaType(
				MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
				NULL, 
				this->InputMediaType
			);
			if (hr == S_OK)
			{
				break;
			}
		}

		return hr;
	}