//----------------------------------------------------------------------------
  long MediaFoundationVideoDevice::CheckDevice(IMFAttributes *pAttributes, IMFActivate **pDevice)
  {
    HRESULT hr = S_OK;
    *pDevice = NULL;

    IMFActivate **ppDevices = NULL;
    UINT32 count=0;
    if (SUCCEEDED(hr))
    {
      hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count);
      if (SUCCEEDED(hr))
      {
        if (count <= 0)
        {
          LOG_ERROR("MediaFoundationVideoDevice::CheckDevice failed: No devices available");
          hr = S_FALSE;
        }
        else if (this->DeviceIndex >= count)
        {
          LOG_ERROR("MediaFoundationVideoDevice::CheckDevice failed: Device index " << this->DeviceIndex << " is of current device is out of the range of availalbe devices (0.."<<count-1<<")");
          hr = S_FALSE;
        }
      }
      else
      {
        LOG_ERROR("MediaFoundationVideoDevice::CheckDevice failed: List of DeviceSources cannot be enumerated.");
      }
    }

    if (SUCCEEDED(hr))
    {
      wchar_t *newFriendlyName = NULL;
      hr = ppDevices[this->DeviceIndex]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &newFriendlyName, NULL);
      if (SUCCEEDED(hr))
      {
        if(wcscmp(newFriendlyName, this->FriendlyName) != 0)
        {
          LOG_ERROR("MediaFoundationVideoDevice::CheckDevice failed: Device index " << this->DeviceIndex << " name is not found");
          hr = S_FALSE;
        }
      }
      else
      {
        LOG_ERROR("MediaFoundationVideoDevice::CheckDevice failed: Device index " << this->DeviceIndex << " name of device cannot be retrieved");
      }
    }

    if (SUCCEEDED(hr))
    {
      *pDevice = ppDevices[this->DeviceIndex];
      (*pDevice)->AddRef();
    }

    for(UINT32 i = 0; i < count; i++)
    {
      SafeRelease(&ppDevices[i]);
    }
    SafeRelease(ppDevices);
    return hr;
  }
// Initialize Device List
HRESULT DeviceList::EnumerateDevices()
{
	HRESULT hr = S_OK;
	IMFAttributes *pAttributes = NULL;

	Clear();

	// Initialize an attribute store. We will use this to
	// specify the enumeration parameters.

	hr = MFCreateAttributes(&pAttributes, 1);

	// Ask for source type = video capture devices
	if (SUCCEEDED(hr))
	{
		hr = pAttributes->SetGUID(
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
			);
	}

	// Enumerate devices.
	if (SUCCEEDED(hr))
	{
		hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
	}

	SafeRelease(&pAttributes);

	return hr;
}
Exemple #3
0
int CountCaptureDevices()
{
	HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);

	if (FAILED(hr)) return 0;

	hr = MFStartup(MF_VERSION);

	if (FAILED(hr)) return 0;

	// choose device
	IMFAttributes *attributes = NULL;
	hr = MFCreateAttributes(&attributes, 1);
	ScopedRelease<IMFAttributes> attributes_s(attributes);

	if (FAILED(hr)) return 0;

	hr = attributes->SetGUID(
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
		);
	if (FAILED(hr)) return 0;

	ChooseDeviceParam param = { 0 };
	hr = MFEnumDeviceSources(attributes, &param.mDevices, &param.mCount);

	if (FAILED(hr)) return 0;

	return param.mCount;
}
Exemple #4
0
static int mf_enumerate_cameras(int (*callback)(IMFSourceReader* reader)) {
    com_t<IMFAttributes> config;
    if_failed_return_result(MFCreateAttributes(&config, 1));
    if_failed_return_result(config->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID));
    UINT32 num = 0;
    IMFActivate** factory = null;  // [count]
    if_failed_return_result(MFEnumDeviceSources(config, &factory, &num));
    if (num == 0) {
        return 0;
    }
    for (int i = 0; i < (int)num; i++) {
        const char* sl = "";
        const char* fn= "";
        WCHAR* symbolic_link = null;
        WCHAR* friendly_name = null;
        UINT32 size = 0;
        if (OK(factory[i]->GetAllocatedString(MF_SL, &symbolic_link, &size))) { sl = wcs2str(symbolic_link); }
        if (OK(factory[i]->GetAllocatedString(MF_FN, &friendly_name, &size))) { fn = wcs2str(friendly_name); }
        com_t<IMFMediaSource> source;
        if_failed_return_result(factory[i]->ActivateObject(IID_PPV_ARGS(&source)));
        com_t<IMFSourceReader> reader;
        if_failed_return_result(MFCreateSourceReaderFromMediaSource(source, null, &reader));
        print("\nMMF camera %d %s %s\n", i, fn, sl);
        callback(reader);
        com_release(factory[i]);
    }
    CoTaskMemFree(factory);
    return S_OK;
} 
	bool VideoCapture::CreateMediaSource()
	{
		HRESULT hr;

		pin_ptr<IMFAttributes *> pConfig = &(this->Config);
		hr = MFCreateAttributes(pConfig, 1);
		if (hr != S_OK)
		{ 
			MFUtil::ShowErrorNameFromCode(hr);
			return false;
		}

		hr = this->Config->SetGUID(
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, 
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
		); 
		if (hr != S_OK)
		{ 
			MFUtil::ShowErrorNameFromCode(hr);
			return false;
		}

		pin_ptr<IMFActivate **> pDevices = &(this->Devices);
		pin_ptr<UINT32> pCount = &(this->DeviceCount);
		hr = MFEnumDeviceSources(this->Config, pDevices, pCount);
		if (hr != S_OK)
		{ 
			MFUtil::ShowErrorNameFromCode(hr);
			return false;
		}
		if (this->DeviceCount <= 0) 
		{ 
			MFUtil::ShowMessage(TEXT("Device Not Found."), ML_ERROR);
			return false;
		} 

		// Debug : 利用するデバイス(リストの先頭)の名前を確認
		LPWSTR devName = NULL;
		UINT32 devNameLen = 0;
		hr = this->Devices[0]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &devName, &devNameLen);
		if (hr != S_OK)
		{ 
			MFUtil::ShowErrorNameFromCode(hr);
			return false;
		}
		MFUtil::ShowMessage(devName, ML_DEBUG);

		pin_ptr<IMFMediaSource *> pSource = &(this->Source);
		hr = this->Devices[0]->ActivateObject(
			__uuidof(IMFMediaSource), 
			reinterpret_cast<void**>(pSource)
		);  
		if (hr != S_OK)
		{ 
			MFUtil::ShowErrorNameFromCode(hr);
			return false;
		}

		return true;
	}
    void OnChooseDevice(HWND hwnd)
    {
        ChooseDeviceParam param;

        IMFAttributes *pAttributes = NULL;

        HRESULT hr = MFCreateAttributes(&pAttributes, 1);
        if (FAILED(hr))
        {
            goto done;
        }

        // Ask for source type = video capture devices
        hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
                MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
        if (FAILED(hr))
        {
            goto done;
        }

        // Enumerate devices.
        hr = MFEnumDeviceSources(pAttributes, &param.ppDevices, &param.count);
        if (FAILED(hr))
        {
            goto done;
        }

        // Ask the user to select one.
        INT_PTR result = DialogBoxParam(GetModuleHandle(NULL),
            MAKEINTRESOURCE(IDD_CHOOSE_DEVICE), hwnd,
            ChooseDeviceDlgProc, (LPARAM)&param);

        if ((result == IDOK) && (param.selection != (UINT32)-1))
        {
            UINT iDevice = param.selection;

            if (iDevice >= param.count)
            {
                hr = E_UNEXPECTED;
                goto done;
            }

            hr = g_pEngine->InitializeCaptureManager(hPreview, param.ppDevices[iDevice]);
            if (FAILED(hr))
            {
                goto done;
            }
            SafeRelease(&pSelectedDevice);
            pSelectedDevice = param.ppDevices[iDevice];
            pSelectedDevice->AddRef();
        }

    done:
        SafeRelease(&pAttributes);
        if (FAILED(hr))
        {
            ShowError(hwnd, IDS_ERR_SET_DEVICE, hr);
        }
        UpdateUI(hwnd);
    }
Exemple #7
0
void GetCaptureDeviceName(int aDevice, char * aNamebuffer, int aBufferlength)
{
	int i;
	if (!aNamebuffer || aBufferlength <= 0)
		return;

	aNamebuffer[0] = 0;

	HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);

	if (FAILED(hr)) return;

	hr = MFStartup(MF_VERSION);

	if (FAILED(hr)) return;

	// choose device
	IMFAttributes *attributes = NULL;
	hr = MFCreateAttributes(&attributes, 1);
	ScopedRelease<IMFAttributes> attributes_s(attributes);

	if (FAILED(hr)) return;

	hr = attributes->SetGUID(
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
		);

	if (FAILED(hr)) return;

	ChooseDeviceParam param = { 0 };
	hr = MFEnumDeviceSources(attributes, &param.mDevices, &param.mCount);

	if (FAILED(hr)) return;

	if (aDevice < (signed)param.mCount)
	{
		WCHAR *name = 0;
		UINT32 namelen = 255;
		hr = param.mDevices[aDevice]->GetAllocatedString(
			MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
			&name,
			&namelen
			);
		if (SUCCEEDED(hr) && name)
		{
			i = 0;
			while (i < aBufferlength - 1 && i < (signed)namelen && name[i] != 0)
			{
				aNamebuffer[i] = (char)name[i];
				i++;
			}
			aNamebuffer[i] = 0;

			CoTaskMemFree(name);
		}
	}
}
Exemple #8
0
IMFActivate* WinCaptureDevice::ChooseFirst(std::string& error)
{
	IMFActivate* result = NULL;
	HRESULT hr = S_OK;
	UINT iDevice = 0;   // Index into the array of devices
	BOOL bCancel = FALSE;

	// Initialize an attribute store to specify enumeration parameters.
	IMFAttributes* pAttributes = NULL;
	hr = MFCreateAttributes(&pAttributes, 1);

	if (FAILED(hr)) { goto done; }

	// Ask for source type = video capture devices.

	hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);

	if (FAILED(hr)) { goto done; }

	// Enumerate devices.
	IMFActivate **devices = NULL;
	uint numDevices = 0;
	hr = MFEnumDeviceSources(pAttributes, &devices, &numDevices);

	if (FAILED(hr)) { goto done; }

	if (numDevices > 0)
		result = devices[0];

done:

	SafeRelease(&pAttributes);

	for (uint i = 0; i < numDevices; i++)
	{
		if (devices[i] != result)
			SafeRelease(&devices[i]);
	}
	CoTaskMemFree(devices);

	if (FAILED(hr))
	{
		//ShowErrorMessage(L"Cannot create a video capture device", hr);
	}

	return result;
}
Exemple #9
0
HRESULT CMFCamCapture::enumVideoDevices()
{
    IMFAttributes *pAttributes = NULL;
    IMFActivate **ppDevices = NULL;

    // Create an attribute store to specify the enumeration parameters.
    HRESULT hr = MFCreateAttributes(&pAttributes, 1);
    if (FAILED(hr)) {
        goto done;
    }

    // Source type: video capture devices
    hr = pAttributes->SetGUID(
        MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
        MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
        );
    if (FAILED(hr)) {
        goto done;
    }

    // Enumerate devices.
    UINT32 count;
    hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count);
    if (FAILED(hr)) {
        goto done;
    }

    if (count == 0) {
        hr = E_FAIL;
        goto done;
    }

    // cache the devices.
    m_deviceActivateObjects.clear();
    for (DWORD i = 0; i < count; i++) {
        m_deviceActivateObjects.push_back(CComPtr<IMFActivate>(ppDevices[i]));
    }
    
done:
    SafeRelease(&pAttributes);
    for (DWORD i = 0; i < count; i++) {
        SafeRelease(&ppDevices[i]);
    }
    CoTaskMemFree(ppDevices);
    return hr;
}
Exemple #10
0
    void GetCameraDevices(IMFActivate*** pppDevices, UINT32* pnCount)
    {
        HRESULT hr = S_OK;
        IMFAttributes* pAttributes = NULL;
        WCHAR* pszFriendlyName = NULL;
        UINT32 cchName = 0;

        hr = MFCreateAttributes(&pAttributes, 1);
        if (FAILED(hr))
            goto END;

        hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
        if (FAILED(hr))
            goto END;

        hr = MFEnumDeviceSources(pAttributes, pppDevices, pnCount);
        if (FAILED(hr))
            goto END;

END:
        SafeRelease(&pAttributes);

        return;
    }
Exemple #11
0
HRESULT InitMFStreamer()
{
	printf("InitMFStreamer.\n");

	CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);

	CHECK_HR(InitVPXEncoder(&_vpxConfig, &_vpxCodec, WIDTH, HEIGHT), L"Failed to intialise the VPX encoder.\n");

	// Create an attribute store to hold the search criteria.
	CHECK_HR(MFCreateAttributes(&videoConfig, 1), L"Error creating video configuation.");
	//CHECK_HR(MFCreateAttributes(&audioConfig, 1), L"Error creating audio configuation.");;

	// Request video capture devices.
	CHECK_HR(videoConfig->SetGUID(
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID), L"Error initialising video configuration object.");

	// Request audio capture devices.
	/*CHECK_HR(audioConfig->SetGUID(
	MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
	MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID), L"Error initialising audio configuration object.");*/

	// Enumerate the devices,
	CHECK_HR(MFEnumDeviceSources(videoConfig, &videoDevices, &videoDeviceCount), L"Error enumerating video devices.");
	//CHECK_HR(MFEnumDeviceSources(audioConfig, &audioDevices, &audioDeviceCount), L"Error enumerating audio devices.");

	//printf("Video device Count: %i, Audio device count: %i.\n", videoDeviceCount, audioDeviceCount);
	printf("Video device Count: %i.\n", videoDeviceCount);

	CHECK_HR(videoDevices[0]->ActivateObject(IID_PPV_ARGS(&videoSource)), L"Error activating video device.");
	//CHECK_HR(audioDevices[0]->ActivateObject(IID_PPV_ARGS(&audioSource)), L"Error activating audio device.");

	// Initialize the Media Foundation platform.
	CHECK_HR(MFStartup(MF_VERSION), L"Error on Media Foundation startup.");

	/*WCHAR *pwszFileName = L"sample.mp4";
	IMFSinkWriter *pWriter;*/

	/*CHECK_HR(MFCreateSinkWriterFromURL(
	pwszFileName,
	NULL,
	NULL,
	&pWriter), L"Error creating mp4 sink writer.");*/

	// Create the source readers.
	CHECK_HR(MFCreateSourceReaderFromMediaSource(
		videoSource,
		videoConfig,
		&videoReader), L"Error creating video source reader.");

	//ListModes(videoReader);

	/*CHECK_HR(MFCreateSourceReaderFromMediaSource(
	audioSource,
	audioConfig,
	&audioReader), L"Error creating audio source reader.");*/

	FindVideoMode(videoReader, MF_INPUT_FORMAT, WIDTH, HEIGHT, desiredInputVideoType);

	if (desiredInputVideoType == NULL) {
		printf("The specified media type could not be found for the MF video reader.\n");
	}
	else {
		CHECK_HR(videoReader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, desiredInputVideoType),
			L"Error setting video reader media type.\n");

		CHECK_HR(videoReader->GetCurrentMediaType(
			(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			&videoType), L"Error retrieving current media type from first video stream.");
		CMediaTypeTrace *videoTypeMediaTrace = new CMediaTypeTrace(videoType);

		printf("Video input media type: %s.\n", videoTypeMediaTrace->GetString());

		LONG pStride = 0;
		GetDefaultStride(videoType, &pStride);
		printf("Stride %i.\n", pStride);
			
		/*printf("Press any key to continue...");
		getchar();*/

		/*audioReader->GetCurrentMediaType(
		(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
		&audioType);*/
		//CMediaTypeTrace *audioTypeMediaTrace = new CMediaTypeTrace(audioType);
		//printf("Audio input media type: %s.\n", audioTypeMediaTrace->GetString());

		//printf("Configuring H.264 sink.\n");

		// Set up the H.264 sink.
		/*CHECK_HR(ConfigureEncoder(videoType, &videoStreamIndex, &audioStreamIndex, pWriter), L"Error configuring encoder.");
		printf("Video stream index %i, audio stream index %i.\n", videoStreamIndex, audioStreamIndex);*/

		// Register the color converter DSP for this process, in the video 
		// processor category. This will enable the sink writer to enumerate
		// the color converter when the sink writer attempts to match the
		// media types.
		CHECK_HR(MFTRegisterLocalByCLSID(
			__uuidof(CColorConvertDMO),
			MFT_CATEGORY_VIDEO_PROCESSOR,
			L"",
			MFT_ENUM_FLAG_SYNCMFT,
			0,
			NULL,
			0,
			NULL
			), L"Error registering colour converter DSP.");

		// Add the input types to the H.264 sink writer.
		/*CHECK_HR(pWriter->SetInputMediaType(videoStreamIndex, videoType, NULL), L"Error setting the sink writer video input type.");
		videoType->Release();
		CHECK_HR(pWriter->SetInputMediaType(audioStreamIndex, audioType, NULL), L"Error setting the sink writer audio input type.");
		audioType->Release();*/

		//CHECK_HR(pWriter->BeginWriting(), L"Failed to begin writing on the H.264 sink.");

		//InitializeCriticalSection(&critsec);
	}
}
Exemple #12
0
HRESULT CaptureClass::initCapture(int aDevice)
{
	mWhoAmI = aDevice;
	HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);

	DO_OR_DIE;

	hr = MFStartup(MF_VERSION);

	DO_OR_DIE;

	// choose device
	IMFAttributes *attributes = NULL;
	hr = MFCreateAttributes(&attributes, 1);
	ScopedRelease<IMFAttributes> attributes_s(attributes);

	DO_OR_DIE;

	hr = attributes->SetGUID(
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
		MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
		);

	DO_OR_DIE;

	ChooseDeviceParam param = { 0 };
	hr = MFEnumDeviceSources(attributes, &param.mDevices, &param.mCount);

	DO_OR_DIE;

	if ((signed)param.mCount > aDevice)
	{
		// use param.ppDevices[0]
		IMFAttributes   *attributes = NULL;
		IMFMediaType    *type = NULL;
		EnterCriticalSection(&mCritsec);

		hr = param.mDevices[aDevice]->ActivateObject(
			__uuidof(IMFMediaSource),
			(void**)&mSource
			);

		DO_OR_DIE_CRITSECTION;

		hr = MFCreateAttributes(&attributes, 3);
		ScopedRelease<IMFAttributes> attributes_s(attributes);

		DO_OR_DIE_CRITSECTION;

		hr = attributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE);

		DO_OR_DIE_CRITSECTION;

		hr = attributes->SetUnknown(
			MF_SOURCE_READER_ASYNC_CALLBACK,
			this
			);

		DO_OR_DIE_CRITSECTION;

		hr = MFCreateSourceReaderFromMediaSource(
			mSource,
			attributes,
			&mReader
			);

		DO_OR_DIE_CRITSECTION;

		int preferredmode = scanMediaTypes(gParams[mWhoAmI].mWidth, gParams[mWhoAmI].mHeight);
		mUsedIndex = preferredmode;

		hr = mReader->GetNativeMediaType(
			(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			preferredmode,
			&type
			);
		ScopedRelease<IMFMediaType> type_s(type);

		DO_OR_DIE_CRITSECTION;

		hr = setVideoType(type);

		DO_OR_DIE_CRITSECTION;

		hr = mReader->SetCurrentMediaType(
			(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			NULL,
			type
			);

		DO_OR_DIE_CRITSECTION;

		hr = mReader->ReadSample(
			(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			0,
			NULL,
			NULL,
			NULL,
			NULL
			);

		DO_OR_DIE_CRITSECTION;

		LeaveCriticalSection(&mCritsec);
	}
	else
	{
		return MF_E_INVALIDINDEX;
	}

	/*
	for (i = 0; i < 16; i++)
	{
	char temp[128];
	float v;
	int f;
	int r = GetProperty(i, v, f);
	sprintf(temp, "%d: %3.3f %d (%d)\n", i, v, f, r);
	OutputDebugStringA(temp);
	}
	*/

	return 0;
}
Exemple #13
0
camera_t * camera_open(const char *portname, int highres)
{
	camera_internal_t *camera = (camera_internal_t*)malloc(sizeof(camera_internal_t));
	camera->reader = NULL;

	if (highres)
	{
		console_printf("camera: highres is not supported on windows (yet).\n");
		highres = 0;
	}

	HRESULT hr = S_OK;

	// Initialize Media Foundation
	if (SUCCEEDED(hr))
	{
		hr = MFStartup(MF_VERSION);
	}
	///////////////////////////////////////////
	IMFAttributes *pAttributes = NULL;
	UINT32 m_cDevices = 0;
	IMFActivate **m_ppDevices = NULL;

	// Initialize an attribute store. We will use this to
	// specify the enumeration parameters.

	hr = MFCreateAttributes(&pAttributes, 1);

	// Ask for source type = video capture devices
	if (SUCCEEDED(hr))
	{
		hr = pAttributes->SetGUID(
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
			);
	}

	// Enumerate devices.
	if (SUCCEEDED(hr))
	{
		hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices);
	}

	SafeRelease(&pAttributes);

	/////////////////////////////////////////////////
	IMFActivate *pActivate = NULL;
	if (m_cDevices)
	{
		console_printf("camera: there are %d camera devices connected (0..%d).\n", m_cDevices, m_cDevices > 0 ? m_cDevices - 1 : 0);
		int device = strtol(portname, 0, 10);
		if (device < 0 || device >= m_cDevices)
			console_printf("camera: device %d does not exist.\n", device);
		else
			pActivate = m_ppDevices[device];
	}
	else
	{
		console_printf("camera: could not find a device\n");
	}
	/////////////////////////////////////////////////

	IMFMediaSource *pSource = NULL;

	//EnterCriticalSection(&m_critsec);

	// Create the media source for the device.
	hr = pActivate->ActivateObject(
		__uuidof(IMFMediaSource),
		(void**)&pSource
		);
	///////////////////////////////////////////

	//IMFAttributes *pAttributes = NULL;

	/*hr = MFCreateAttributes(&pAttributes, 2);

	if (SUCCEEDED(hr))
	{
	hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this);
	}*/

	if (SUCCEEDED(hr))
	{
		hr = MFCreateSourceReaderFromMediaSource(
			pSource,
			NULL,//pAttributes,
			&camera->reader
			);
	}

	//SafeRelease(&pAttributes);

	////////////////////////////////////////////////////
	// The list of acceptable types.
	GUID subtypes[] = {
		MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY,
		MFVideoFormat_RGB32, MFVideoFormat_RGB24, MFVideoFormat_IYUV
	};

	//HRESULT hr = S_OK;
	BOOL bUseNativeType = FALSE;

	GUID subtype = { 0 };

	IMFMediaType *pType = NULL;

	UINT32 width = 0, height = 0;
	int selectedSubtype = -1;

	// If the source's native format matches any of the formats in
	// the list, prefer the native format.

	// Note: The camera might support multiple output formats,
	// including a range of frame dimensions. The application could
	// provide a list to the user and have the user select the
	// camera's output format. That is outside the scope of this
	// sample, however.

	DWORD selectedStreamIndex = MF_SOURCE_READER_FIRST_VIDEO_STREAM;

	//while (true)
	//{
		hr = camera->reader->GetNativeMediaType(
			selectedStreamIndex,
			0,  // Type index
			&pType
			);

		if (FAILED(hr)) { console_printf("camera: could not get media type\n"); goto done; }

		hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);

		if (FAILED(hr)) { console_printf("camera: could not get resolution\n"); goto done; }

		//if (width != 1280 || height != 960)
		//{
			console_printf("camera: found resolution %dx%d\n", width, height);
			//selectedStreamIndex++;
			//continue;
		//}

		camera->size.width = width;
		camera->size.height = height;
		//break;
	//}


	/*UINT32 num = 0, denom = 0;
	hr = ::MFGetAttributeRatio(pType, MF_MT_FRAME_RATE_RANGE_MAX, &num, &denom);

	if (FAILED(hr)) { goto done; }*/

	//hr = ::MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, 1280, 960);

	//if (FAILED(hr)) { goto done; }

	/*hr = ::MFSetAttributeRatio(pType, MF_MT_FRAME_RATE, num, denom);

	if (FAILED(hr)) { goto done; }*/

	hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);

	if (FAILED(hr)) { console_printf("camera: could not get stream type(1)\n"); goto done; }

	for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
	{
		if (subtype == subtypes[i])
		{
			hr = camera->reader->SetCurrentMediaType(
				(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
				NULL,
				pType
				);

			bUseNativeType = TRUE;
			selectedSubtype = i;
			break;
		}
	}

	if (!bUseNativeType)
	{
		// None of the native types worked. The camera might offer
		// output a compressed type such as MJPEG or DV.

		// Try adding a decoder.

		for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
		{
			hr = pType->SetGUID(MF_MT_SUBTYPE, subtypes[i]);

			if (FAILED(hr)) { console_printf("camera: could not get stream type(2)\n"); goto done; }

			hr = camera->reader->SetCurrentMediaType(
				(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
				NULL,
				pType
				);

			if (SUCCEEDED(hr))
			{
				selectedSubtype = i;
				break;
			}
		}
	}

/*	hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
	WIDTH = width;
	HEIGHT = height;*/

	if (FAILED(hr)) { console_printf("camera: could not find stream type\n"); goto done; }

done:
	SafeRelease(&pType);

	console_printf("camera: selected type: %d, native: %s, resolution: %dx%d\n",
		selectedSubtype, bUseNativeType ? "yes" : "no", camera->size.width, camera->size.height);

	///////////////////////////////////////
	/*if (SUCCEEDED(hr))
	{
	hr = camera->reader->GetCurrentMediaType(
	(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
	&pType
	);
	}

	if (SUCCEEDED(hr))
	{
	// Register the color converter DSP for this process, in the video
	// processor category. This will enable the sink writer to enumerate
	// the color converter when the sink writer attempts to match the
	// media types.

	hr = MFTRegisterLocalByCLSID(
	__uuidof(CColorConvertDMO),
	MFT_CATEGORY_VIDEO_PROCESSOR,
	L"",
	MFT_ENUM_FLAG_SYNCMFT,
	0,
	NULL,
	0,
	NULL
	);
	}*/

	/////////////////////////////////////////////////

/*	IMFSample *pSample = NULL;
	DWORD streamIndex = 0, flags = 0;
	LONGLONG llTimeStamp = 0;

	hr = camera->reader->ReadSample(
		(DWORD)MF_SOURCE_READER_ANY_STREAM,    // Stream index.
		0,                              // Flags.
		&streamIndex,                   // Receives the actual stream index.
		&flags,                         // Receives status flags.
		&llTimeStamp,                   // Receives the time stamp.
		&pSample                        // Receives the sample or NULL.
		);*/

	if (selectedSubtype != 4)
	{
		console_printf("camera: unexpected stream type.\n");
		SafeRelease(&camera->reader);
		free(camera);
		return 0;
	}
	return (camera_t*)camera;
}
	bool initialise()
	{
		UINT32 videoDeviceCount = 0;
		IMFAttributes *videoConfig = NULL;
		IMFActivate **videoDevices = NULL;
		WCHAR *webcamFriendlyName;
		
		CHECK_HR(MFTRegisterLocalByCLSID(
			__uuidof(CColorConvertDMO),
			MFT_CATEGORY_VIDEO_PROCESSOR,
			L"",
			MFT_ENUM_FLAG_SYNCMFT,
			0,
			NULL,
			0,
			NULL
			), "Error registering colour converter DSP.\n");

		// Get the first available webcam.
		CHECK_HR(MFCreateAttributes(&videoConfig, 1), "Error creating video configuation.\n");

		// Request video capture devices.
		CHECK_HR(videoConfig->SetGUID(
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
			MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID), "Error initialising video configuration object.");

		CHECK_HR(MFEnumDeviceSources(videoConfig, &videoDevices, &videoDeviceCount), "Error enumerating video devices.\n");

		CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &webcamFriendlyName, NULL), "Error retrieving vide device friendly name.\n");

		wprintf(L"First available webcam: %s\n", webcamFriendlyName);

		CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->ActivateObject(IID_PPV_ARGS(&videoSource)), "Error activating video device.\n");

		// Create a source reader.
		CHECK_HR(MFCreateSourceReaderFromMediaSource(
			videoSource,
			videoConfig,
			&_videoReader), "Error creating video source reader.\n");

		//ListModes(_videoReader);

		CHECK_HR(_videoReader->GetCurrentMediaType(
			(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			&videoSourceOutputType), "Error retrieving current media type from first video stream.\n");

		Console::WriteLine(GetMediaTypeDescription(videoSourceOutputType));

		// Note the webcam needs to support this media type. The list of media types supported can be obtained using the ListTypes function in MFUtility.h.
		MFCreateMediaType(&pSrcOutMediaType);
		pSrcOutMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
		//pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, WMMEDIASUBTYPE_I420);
		pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24);
		MFSetAttributeSize(pSrcOutMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT);
		CHECK_HR(MFSetAttributeRatio(pSrcOutMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on video device out type.\n");

		CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, pSrcOutMediaType), "Failed to set media type on source reader.\n");
		//CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, videoSourceOutputType), "Failed to setdefault  media type on source reader.\n");

		// Create H.264 encoder.
		CHECK_HR(CoCreateInstance(CLSID_CMSH264EncoderMFT, NULL, CLSCTX_INPROC_SERVER,
			IID_IUnknown, (void**)&spTransformUnk), "Failed to create H264 encoder MFT.\n");

		CHECK_HR(spTransformUnk->QueryInterface(IID_PPV_ARGS(&_pTransform)), "Failed to get IMFTransform interface from H264 encoder MFT object.\n");

		MFCreateMediaType(&pMFTOutputMediaType);
		pMFTOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
		pMFTOutputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
		//pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, 240000);
		CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, TARGET_AVERAGE_BIT_RATE), "Failed to set average bit rate on H264 output media type.\n");
		CHECK_HR(MFSetAttributeSize(pMFTOutputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n");
		CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n");
		CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n");
		pMFTOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2);	// 2 = Progressive scan, i.e. non-interlaced.
		pMFTOutputMediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
		//CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base), "Failed to set profile on H264 MFT out type.\n");
		//CHECK_HR(pMFTOutputMediaType->SetDouble(MF_MT_MPEG2_LEVEL, 3.1), "Failed to set level on H264 MFT out type.\n");
		//CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_MAX_KEYFRAME_SPACING, 10), "Failed to set key frame interval on H264 MFT out type.\n");
		//CHECK_HR(pMFTOutputMediaType->SetUINT32(CODECAPI_AVEncCommonQuality, 100), "Failed to set H264 codec qulaity.\n");
		//hr = pAttributes->SetUINT32(CODECAPI_AVEncMPVGOPSize, 1)

		CHECK_HR(_pTransform->SetOutputType(0, pMFTOutputMediaType, 0), "Failed to set output media type on H.264 encoder MFT.\n");

		MFCreateMediaType(&pMFTInputMediaType);
		pMFTInputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
		pMFTInputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_IYUV);
		CHECK_HR(MFSetAttributeSize(pMFTInputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n");
		CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n");
		CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n");
		pMFTInputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2);

		CHECK_HR(_pTransform->SetInputType(0, pMFTInputMediaType, 0), "Failed to set input media type on H.264 encoder MFT.\n");

		CHECK_HR(_pTransform->GetInputStatus(0, &mftStatus), "Failed to get input status from H.264 MFT.\n");
		if (MFT_INPUT_STATUS_ACCEPT_DATA != mftStatus) {
			printf("E: ApplyTransform() pTransform->GetInputStatus() not accept data.\n");
			goto done;
		}

		//Console::WriteLine(GetMediaTypeDescription(pMFTOutputMediaType));

		CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.264 MFT.\n");
		CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.264 MFT.\n");
		CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.264 MFT.\n");

		memset(&_outputDataBuffer, 0, sizeof _outputDataBuffer);

		return true;

	done:

		printf("MediaFoundationH264LiveSource initialisation failed.\n");
		return false;
	}
void OnChooseDevice(HWND hwnd)
{
    HRESULT hr = S_OK;
    ChooseDeviceParam param = { 0 };

    IMFAttributes *pAttributes = NULL;

    // Release the previous instance of the preview object, if any.
    if (g_pPreview)
    {
        g_pPreview->CloseDevice();
        SafeRelease(&g_pPreview);
    }

    // Create a new instance of the preview object.
    hr = CPreview::CreateInstance(hwnd, &g_pPreview);

    // Create an attribute store to specify the enumeration parameters.

    if (SUCCEEDED(hr))
    {
        hr = MFCreateAttributes(&pAttributes, 1);
    }

    // Ask for source type = video capture devices

    if (SUCCEEDED(hr))
    {
        hr = pAttributes->SetGUID(
            MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, 
            MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
            );
    }
    
    // Enumerate devices.

    if (SUCCEEDED(hr))
    {
        hr = MFEnumDeviceSources(pAttributes, &param.ppDevices, &param.count);
    }

    if (SUCCEEDED(hr))
    {
        // Ask the user to select one.
        INT_PTR result = DialogBoxParam(
            GetModuleHandle(NULL),
            MAKEINTRESOURCE(IDD_CHOOSE_DEVICE),
            hwnd,
            DlgProc,
            (LPARAM)&param
            );

        if ((result == IDOK) && (param.selection != (UINT32)-1))
        {
            UINT iDevice = param.selection;

            if (iDevice >= param.count)
            {
                hr = E_UNEXPECTED;
            }
			else
            {
                // Give this source to the CPreview object for preview.
                hr = g_pPreview->SetDevice(param.ppDevices[iDevice]);
            }
        }
    }

    SafeRelease(&pAttributes);

    for (DWORD i = 0; i < param.count; i++)
    {
        SafeRelease(&param.ppDevices[i]);
    }
    CoTaskMemFree(param.ppDevices);

    if (FAILED(hr))
    {
        ShowErrorMessage(hwnd, L"Cannot create the video capture device", hr);
    }
}