// Initialize Device List HRESULT DeviceList::EnumerateDevices() { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; Clear(); // Initialize an attribute store. We will use this to // specify the enumeration parameters. hr = MFCreateAttributes(&pAttributes, 1); // Ask for source type = video capture devices if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } // Enumerate devices. if (SUCCEEDED(hr)) { hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices); } SafeRelease(&pAttributes); return hr; }
bool MediaFoundationCaptureLibrary::BuildListOfDevices() { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; CoInitialize(NULL); hr = MFCreateAttributes(&pAttributes, 1); if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } if (SUCCEEDED(hr)) { hr = MediaFoundationVideoDevices::GetInstance().InitDevices(pAttributes); } else { LOG_ERROR("MEDIA FOUNDATION: The access to the video cameras denied."); } SafeRelease(&pAttributes); return (SUCCEEDED(hr)); }
int CountCaptureDevices() { HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); if (FAILED(hr)) return 0; hr = MFStartup(MF_VERSION); if (FAILED(hr)) return 0; // choose device IMFAttributes *attributes = NULL; hr = MFCreateAttributes(&attributes, 1); ScopedRelease<IMFAttributes> attributes_s(attributes); if (FAILED(hr)) return 0; hr = attributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); if (FAILED(hr)) return 0; ChooseDeviceParam param = { 0 }; hr = MFEnumDeviceSources(attributes, ¶m.mDevices, ¶m.mCount); if (FAILED(hr)) return 0; return param.mCount; }
void OnChooseDevice(HWND hwnd) { ChooseDeviceParam param; IMFAttributes *pAttributes = NULL; HRESULT hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) { goto done; } // Ask for source type = video capture devices hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (FAILED(hr)) { goto done; } // Enumerate devices. hr = MFEnumDeviceSources(pAttributes, ¶m.ppDevices, ¶m.count); if (FAILED(hr)) { goto done; } // Ask the user to select one. INT_PTR result = DialogBoxParam(GetModuleHandle(NULL), MAKEINTRESOURCE(IDD_CHOOSE_DEVICE), hwnd, ChooseDeviceDlgProc, (LPARAM)¶m); if ((result == IDOK) && (param.selection != (UINT32)-1)) { UINT iDevice = param.selection; if (iDevice >= param.count) { hr = E_UNEXPECTED; goto done; } hr = g_pEngine->InitializeCaptureManager(hPreview, param.ppDevices[iDevice]); if (FAILED(hr)) { goto done; } SafeRelease(&pSelectedDevice); pSelectedDevice = param.ppDevices[iDevice]; pSelectedDevice->AddRef(); } done: SafeRelease(&pAttributes); if (FAILED(hr)) { ShowError(hwnd, IDS_ERR_SET_DEVICE, hr); } UpdateUI(hwnd); }
void GetCaptureDeviceName(int aDevice, char * aNamebuffer, int aBufferlength) { int i; if (!aNamebuffer || aBufferlength <= 0) return; aNamebuffer[0] = 0; HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); if (FAILED(hr)) return; hr = MFStartup(MF_VERSION); if (FAILED(hr)) return; // choose device IMFAttributes *attributes = NULL; hr = MFCreateAttributes(&attributes, 1); ScopedRelease<IMFAttributes> attributes_s(attributes); if (FAILED(hr)) return; hr = attributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); if (FAILED(hr)) return; ChooseDeviceParam param = { 0 }; hr = MFEnumDeviceSources(attributes, ¶m.mDevices, ¶m.mCount); if (FAILED(hr)) return; if (aDevice < (signed)param.mCount) { WCHAR *name = 0; UINT32 namelen = 255; hr = param.mDevices[aDevice]->GetAllocatedString( MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &namelen ); if (SUCCEEDED(hr) && name) { i = 0; while (i < aBufferlength - 1 && i < (signed)namelen && name[i] != 0) { aNamebuffer[i] = (char)name[i]; i++; } aNamebuffer[i] = 0; CoTaskMemFree(name); } } }
HRESULT CTranscoder::ConfigureVideoOutput() { assert (m_pProfile); HRESULT hr = S_OK; IMFAttributes* pVideoAttrs = NULL; // Configure the video stream // Create a new attribute store. if (SUCCEEDED(hr)) { hr = MFCreateAttributes( &pVideoAttrs, 5 ); } // Set the encoder to be Windows Media video encoder, so that the appropriate MFTs are added to the topology. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); } // Set the frame rate. if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_FRAME_RATE, 30, 1); } //Set the frame size. if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pVideoAttrs, MF_MT_FRAME_SIZE, 320, 240); } //Set the pixel aspect ratio if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } // Set the bit rate. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetUINT32(MF_MT_AVG_BITRATE, 300000); } // Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetVideoAttributes( pVideoAttrs ); } SafeRelease(&pVideoAttrs); return hr; }
IMFActivate* WinCaptureDevice::ChooseFirst(std::string& error) { IMFActivate* result = NULL; HRESULT hr = S_OK; UINT iDevice = 0; // Index into the array of devices BOOL bCancel = FALSE; // Initialize an attribute store to specify enumeration parameters. IMFAttributes* pAttributes = NULL; hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) { goto done; } // Ask for source type = video capture devices. hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (FAILED(hr)) { goto done; } // Enumerate devices. IMFActivate **devices = NULL; uint numDevices = 0; hr = MFEnumDeviceSources(pAttributes, &devices, &numDevices); if (FAILED(hr)) { goto done; } if (numDevices > 0) result = devices[0]; done: SafeRelease(&pAttributes); for (uint i = 0; i < numDevices; i++) { if (devices[i] != result) SafeRelease(&devices[i]); } CoTaskMemFree(devices); if (FAILED(hr)) { //ShowErrorMessage(L"Cannot create a video capture device", hr); } return result; }
HRESULT CMFCamCapture::enumVideoDevices() { IMFAttributes *pAttributes = NULL; IMFActivate **ppDevices = NULL; // Create an attribute store to specify the enumeration parameters. HRESULT hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) { goto done; } // Source type: video capture devices hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); if (FAILED(hr)) { goto done; } // Enumerate devices. UINT32 count; hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count); if (FAILED(hr)) { goto done; } if (count == 0) { hr = E_FAIL; goto done; } // cache the devices. m_deviceActivateObjects.clear(); for (DWORD i = 0; i < count; i++) { m_deviceActivateObjects.push_back(CComPtr<IMFActivate>(ppDevices[i])); } done: SafeRelease(&pAttributes); for (DWORD i = 0; i < count; i++) { SafeRelease(&ppDevices[i]); } CoTaskMemFree(ppDevices); return hr; }
HRESULT CTranscoder::ConfigureContainer() { assert (m_pProfile); HRESULT hr = S_OK; IMFAttributes* pContainerAttrs = NULL; //Set container attributes hr = MFCreateAttributes( &pContainerAttrs, 2 ); //Set the output container to be ASF type if (SUCCEEDED(hr)) { hr = pContainerAttrs->SetGUID( MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_ASF ); } // Use the default setting. Media Foundation will use the stream // settings set in ConfigureAudioOutput and ConfigureVideoOutput. if (SUCCEEDED(hr)) { hr = pContainerAttrs->SetUINT32( MF_TRANSCODE_ADJUST_PROFILE, MF_TRANSCODE_ADJUST_PROFILE_DEFAULT ); } //Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetContainerAttributes(pContainerAttrs); } SafeRelease(&pContainerAttrs); return hr; }
void GetCameraDevices(IMFActivate*** pppDevices, UINT32* pnCount) { HRESULT hr = S_OK; IMFAttributes* pAttributes = NULL; WCHAR* pszFriendlyName = NULL; UINT32 cchName = 0; hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) goto END; hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (FAILED(hr)) goto END; hr = MFEnumDeviceSources(pAttributes, pppDevices, pnCount); if (FAILED(hr)) goto END; END: SafeRelease(&pAttributes); return; }
//---------------------------------------------------------------------------- long MediaFoundationVideoDevice::InitDevice() { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; IMFActivate * vd_pActivate = NULL; CoInitialize(NULL); if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAttributes, 1); } if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (!SUCCEEDED(hr)) { LOG_ERROR("MediaFoundationVideoDevice::InitDevice failed: device " << this->DeviceIndex << ": The attribute of the capture device cannot be retrieved"); } } if (SUCCEEDED(hr)) { hr = CheckDevice(pAttributes, &vd_pActivate); if (SUCCEEDED(hr) && vd_pActivate) { SafeRelease(&this->Source); hr = vd_pActivate->ActivateObject(__uuidof(IMFMediaSource), (void**)&this->Source); SafeRelease(&vd_pActivate); } else { LOG_ERROR("MediaFoundationVideoDevice::InitDevice failed: device " << this->DeviceIndex << ": Cannot activate device"); } } SafeRelease(&pAttributes); return hr; }
HRESULT CaptureClass::initCapture(int aDevice) { mWhoAmI = aDevice; HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); DO_OR_DIE; hr = MFStartup(MF_VERSION); DO_OR_DIE; // choose device IMFAttributes *attributes = NULL; hr = MFCreateAttributes(&attributes, 1); ScopedRelease<IMFAttributes> attributes_s(attributes); DO_OR_DIE; hr = attributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); DO_OR_DIE; ChooseDeviceParam param = { 0 }; hr = MFEnumDeviceSources(attributes, ¶m.mDevices, ¶m.mCount); DO_OR_DIE; if ((signed)param.mCount > aDevice) { // use param.ppDevices[0] IMFAttributes *attributes = NULL; IMFMediaType *type = NULL; EnterCriticalSection(&mCritsec); hr = param.mDevices[aDevice]->ActivateObject( __uuidof(IMFMediaSource), (void**)&mSource ); DO_OR_DIE_CRITSECTION; hr = MFCreateAttributes(&attributes, 3); ScopedRelease<IMFAttributes> attributes_s(attributes); DO_OR_DIE_CRITSECTION; hr = attributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); DO_OR_DIE_CRITSECTION; hr = attributes->SetUnknown( MF_SOURCE_READER_ASYNC_CALLBACK, this ); DO_OR_DIE_CRITSECTION; hr = MFCreateSourceReaderFromMediaSource( mSource, attributes, &mReader ); DO_OR_DIE_CRITSECTION; int preferredmode = scanMediaTypes(gParams[mWhoAmI].mWidth, gParams[mWhoAmI].mHeight); mUsedIndex = preferredmode; hr = mReader->GetNativeMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, preferredmode, &type ); ScopedRelease<IMFMediaType> type_s(type); DO_OR_DIE_CRITSECTION; hr = setVideoType(type); DO_OR_DIE_CRITSECTION; hr = mReader->SetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, type ); DO_OR_DIE_CRITSECTION; hr = mReader->ReadSample( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL ); DO_OR_DIE_CRITSECTION; LeaveCriticalSection(&mCritsec); } else { return MF_E_INVALIDINDEX; } /* for (i = 0; i < 16; i++) { char temp[128]; float v; int f; int r = GetProperty(i, v, f); sprintf(temp, "%d: %3.3f %d (%d)\n", i, v, f, r); OutputDebugStringA(temp); } */ return 0; }
camera_t * camera_open(const char *portname, int highres) { camera_internal_t *camera = (camera_internal_t*)malloc(sizeof(camera_internal_t)); camera->reader = NULL; if (highres) { console_printf("camera: highres is not supported on windows (yet).\n"); highres = 0; } HRESULT hr = S_OK; // Initialize Media Foundation if (SUCCEEDED(hr)) { hr = MFStartup(MF_VERSION); } /////////////////////////////////////////// IMFAttributes *pAttributes = NULL; UINT32 m_cDevices = 0; IMFActivate **m_ppDevices = NULL; // Initialize an attribute store. We will use this to // specify the enumeration parameters. hr = MFCreateAttributes(&pAttributes, 1); // Ask for source type = video capture devices if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } // Enumerate devices. if (SUCCEEDED(hr)) { hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices); } SafeRelease(&pAttributes); ///////////////////////////////////////////////// IMFActivate *pActivate = NULL; if (m_cDevices) { console_printf("camera: there are %d camera devices connected (0..%d).\n", m_cDevices, m_cDevices > 0 ? m_cDevices - 1 : 0); int device = strtol(portname, 0, 10); if (device < 0 || device >= m_cDevices) console_printf("camera: device %d does not exist.\n", device); else pActivate = m_ppDevices[device]; } else { console_printf("camera: could not find a device\n"); } ///////////////////////////////////////////////// IMFMediaSource *pSource = NULL; //EnterCriticalSection(&m_critsec); // Create the media source for the device. hr = pActivate->ActivateObject( __uuidof(IMFMediaSource), (void**)&pSource ); /////////////////////////////////////////// //IMFAttributes *pAttributes = NULL; /*hr = MFCreateAttributes(&pAttributes, 2); if (SUCCEEDED(hr)) { hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this); }*/ if (SUCCEEDED(hr)) { hr = MFCreateSourceReaderFromMediaSource( pSource, NULL,//pAttributes, &camera->reader ); } //SafeRelease(&pAttributes); //////////////////////////////////////////////////// // The list of acceptable types. GUID subtypes[] = { MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY, MFVideoFormat_RGB32, MFVideoFormat_RGB24, MFVideoFormat_IYUV }; //HRESULT hr = S_OK; BOOL bUseNativeType = FALSE; GUID subtype = { 0 }; IMFMediaType *pType = NULL; UINT32 width = 0, height = 0; int selectedSubtype = -1; // If the source's native format matches any of the formats in // the list, prefer the native format. // Note: The camera might support multiple output formats, // including a range of frame dimensions. The application could // provide a list to the user and have the user select the // camera's output format. That is outside the scope of this // sample, however. DWORD selectedStreamIndex = MF_SOURCE_READER_FIRST_VIDEO_STREAM; //while (true) //{ hr = camera->reader->GetNativeMediaType( selectedStreamIndex, 0, // Type index &pType ); if (FAILED(hr)) { console_printf("camera: could not get media type\n"); goto done; } hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); if (FAILED(hr)) { console_printf("camera: could not get resolution\n"); goto done; } //if (width != 1280 || height != 960) //{ console_printf("camera: found resolution %dx%d\n", width, height); //selectedStreamIndex++; //continue; //} camera->size.width = width; camera->size.height = height; //break; //} /*UINT32 num = 0, denom = 0; hr = ::MFGetAttributeRatio(pType, MF_MT_FRAME_RATE_RANGE_MAX, &num, &denom); if (FAILED(hr)) { goto done; }*/ //hr = ::MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, 1280, 960); //if (FAILED(hr)) { goto done; } /*hr = ::MFSetAttributeRatio(pType, MF_MT_FRAME_RATE, num, denom); if (FAILED(hr)) { goto done; }*/ hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype); if (FAILED(hr)) { console_printf("camera: could not get stream type(1)\n"); goto done; } for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++) { if (subtype == subtypes[i]) { hr = camera->reader->SetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, pType ); bUseNativeType = TRUE; selectedSubtype = i; break; } } if (!bUseNativeType) { // None of the native types worked. The camera might offer // output a compressed type such as MJPEG or DV. // Try adding a decoder. for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++) { hr = pType->SetGUID(MF_MT_SUBTYPE, subtypes[i]); if (FAILED(hr)) { console_printf("camera: could not get stream type(2)\n"); goto done; } hr = camera->reader->SetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, pType ); if (SUCCEEDED(hr)) { selectedSubtype = i; break; } } } /* hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); WIDTH = width; HEIGHT = height;*/ if (FAILED(hr)) { console_printf("camera: could not find stream type\n"); goto done; } done: SafeRelease(&pType); console_printf("camera: selected type: %d, native: %s, resolution: %dx%d\n", selectedSubtype, bUseNativeType ? "yes" : "no", camera->size.width, camera->size.height); /////////////////////////////////////// /*if (SUCCEEDED(hr)) { hr = camera->reader->GetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pType ); } if (SUCCEEDED(hr)) { // Register the color converter DSP for this process, in the video // processor category. This will enable the sink writer to enumerate // the color converter when the sink writer attempts to match the // media types. hr = MFTRegisterLocalByCLSID( __uuidof(CColorConvertDMO), MFT_CATEGORY_VIDEO_PROCESSOR, L"", MFT_ENUM_FLAG_SYNCMFT, 0, NULL, 0, NULL ); }*/ ///////////////////////////////////////////////// /* IMFSample *pSample = NULL; DWORD streamIndex = 0, flags = 0; LONGLONG llTimeStamp = 0; hr = camera->reader->ReadSample( (DWORD)MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llTimeStamp, // Receives the time stamp. &pSample // Receives the sample or NULL. );*/ if (selectedSubtype != 4) { console_printf("camera: unexpected stream type.\n"); SafeRelease(&camera->reader); free(camera); return 0; } return (camera_t*)camera; }
bool initialise() { UINT32 videoDeviceCount = 0; IMFAttributes *videoConfig = NULL; IMFActivate **videoDevices = NULL; WCHAR *webcamFriendlyName; CHECK_HR(MFTRegisterLocalByCLSID( __uuidof(CColorConvertDMO), MFT_CATEGORY_VIDEO_PROCESSOR, L"", MFT_ENUM_FLAG_SYNCMFT, 0, NULL, 0, NULL ), "Error registering colour converter DSP.\n"); // Get the first available webcam. CHECK_HR(MFCreateAttributes(&videoConfig, 1), "Error creating video configuation.\n"); // Request video capture devices. CHECK_HR(videoConfig->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID), "Error initialising video configuration object."); CHECK_HR(MFEnumDeviceSources(videoConfig, &videoDevices, &videoDeviceCount), "Error enumerating video devices.\n"); CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &webcamFriendlyName, NULL), "Error retrieving vide device friendly name.\n"); wprintf(L"First available webcam: %s\n", webcamFriendlyName); CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->ActivateObject(IID_PPV_ARGS(&videoSource)), "Error activating video device.\n"); // Create a source reader. CHECK_HR(MFCreateSourceReaderFromMediaSource( videoSource, videoConfig, &_videoReader), "Error creating video source reader.\n"); //ListModes(_videoReader); CHECK_HR(_videoReader->GetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &videoSourceOutputType), "Error retrieving current media type from first video stream.\n"); Console::WriteLine(GetMediaTypeDescription(videoSourceOutputType)); // Note the webcam needs to support this media type. The list of media types supported can be obtained using the ListTypes function in MFUtility.h. MFCreateMediaType(&pSrcOutMediaType); pSrcOutMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); //pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, WMMEDIASUBTYPE_I420); pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); MFSetAttributeSize(pSrcOutMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT); CHECK_HR(MFSetAttributeRatio(pSrcOutMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on video device out type.\n"); CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, pSrcOutMediaType), "Failed to set media type on source reader.\n"); //CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, videoSourceOutputType), "Failed to setdefault media type on source reader.\n"); // Create H.264 encoder. CHECK_HR(CoCreateInstance(CLSID_CMSH264EncoderMFT, NULL, CLSCTX_INPROC_SERVER, IID_IUnknown, (void**)&spTransformUnk), "Failed to create H264 encoder MFT.\n"); CHECK_HR(spTransformUnk->QueryInterface(IID_PPV_ARGS(&_pTransform)), "Failed to get IMFTransform interface from H264 encoder MFT object.\n"); MFCreateMediaType(&pMFTOutputMediaType); pMFTOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMFTOutputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); //pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, 240000); CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, TARGET_AVERAGE_BIT_RATE), "Failed to set average bit rate on H264 output media type.\n"); CHECK_HR(MFSetAttributeSize(pMFTOutputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n"); pMFTOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2); // 2 = Progressive scan, i.e. non-interlaced. pMFTOutputMediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); //CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base), "Failed to set profile on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetDouble(MF_MT_MPEG2_LEVEL, 3.1), "Failed to set level on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_MAX_KEYFRAME_SPACING, 10), "Failed to set key frame interval on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetUINT32(CODECAPI_AVEncCommonQuality, 100), "Failed to set H264 codec qulaity.\n"); //hr = pAttributes->SetUINT32(CODECAPI_AVEncMPVGOPSize, 1) CHECK_HR(_pTransform->SetOutputType(0, pMFTOutputMediaType, 0), "Failed to set output media type on H.264 encoder MFT.\n"); MFCreateMediaType(&pMFTInputMediaType); pMFTInputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMFTInputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_IYUV); CHECK_HR(MFSetAttributeSize(pMFTInputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n"); pMFTInputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2); CHECK_HR(_pTransform->SetInputType(0, pMFTInputMediaType, 0), "Failed to set input media type on H.264 encoder MFT.\n"); CHECK_HR(_pTransform->GetInputStatus(0, &mftStatus), "Failed to get input status from H.264 MFT.\n"); if (MFT_INPUT_STATUS_ACCEPT_DATA != mftStatus) { printf("E: ApplyTransform() pTransform->GetInputStatus() not accept data.\n"); goto done; } //Console::WriteLine(GetMediaTypeDescription(pMFTOutputMediaType)); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.264 MFT.\n"); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.264 MFT.\n"); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.264 MFT.\n"); memset(&_outputDataBuffer, 0, sizeof _outputDataBuffer); return true; done: printf("MediaFoundationH264LiveSource initialisation failed.\n"); return false; }
void OnChooseDevice(HWND hwnd) { HRESULT hr = S_OK; ChooseDeviceParam param = { 0 }; IMFAttributes *pAttributes = NULL; // Release the previous instance of the preview object, if any. if (g_pPreview) { g_pPreview->CloseDevice(); SafeRelease(&g_pPreview); } // Create a new instance of the preview object. hr = CPreview::CreateInstance(hwnd, &g_pPreview); // Create an attribute store to specify the enumeration parameters. if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAttributes, 1); } // Ask for source type = video capture devices if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } // Enumerate devices. if (SUCCEEDED(hr)) { hr = MFEnumDeviceSources(pAttributes, ¶m.ppDevices, ¶m.count); } if (SUCCEEDED(hr)) { // Ask the user to select one. INT_PTR result = DialogBoxParam( GetModuleHandle(NULL), MAKEINTRESOURCE(IDD_CHOOSE_DEVICE), hwnd, DlgProc, (LPARAM)¶m ); if ((result == IDOK) && (param.selection != (UINT32)-1)) { UINT iDevice = param.selection; if (iDevice >= param.count) { hr = E_UNEXPECTED; } else { // Give this source to the CPreview object for preview. hr = g_pPreview->SetDevice(param.ppDevices[iDevice]); } } } SafeRelease(&pAttributes); for (DWORD i = 0; i < param.count; i++) { SafeRelease(¶m.ppDevices[i]); } CoTaskMemFree(param.ppDevices); if (FAILED(hr)) { ShowErrorMessage(hwnd, L"Cannot create the video capture device", hr); } }
HRESULT CTranscoder::ConfigureAudioOutput() { assert (m_pProfile); HRESULT hr = S_OK; DWORD dwMTCount = 0; IMFCollection *pAvailableTypes = NULL; IUnknown *pUnkAudioType = NULL; IMFMediaType *pAudioType = NULL; IMFAttributes *pAudioAttrs = NULL; // Get the list of output formats supported by the Windows Media // audio encoder. hr = MFTranscodeGetAudioOutputAvailableTypes( MFAudioFormat_WMAudioV9, MFT_ENUM_FLAG_ALL, NULL, &pAvailableTypes ); // Get the number of elements in the list. if (SUCCEEDED(hr)) { hr = pAvailableTypes->GetElementCount( &dwMTCount ); if (dwMTCount == 0) { hr = E_UNEXPECTED; } } // In this simple case, use the first media type in the collection. if (SUCCEEDED(hr)) { hr = pAvailableTypes->GetElement(0, &pUnkAudioType); } if (SUCCEEDED(hr)) { hr = pUnkAudioType->QueryInterface(IID_PPV_ARGS(&pAudioType)); } // Create a copy of the attribute store so that we can modify it safely. if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAudioAttrs, 0); } if (SUCCEEDED(hr)) { hr = pAudioType->CopyAllItems(pAudioAttrs); } // Set the encoder to be Windows Media audio encoder, so that the // appropriate MFTs are added to the topology. if (SUCCEEDED(hr)) { hr = pAudioAttrs->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_WMAudioV9); } // Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetAudioAttributes( pAudioAttrs ); } SafeRelease(&pAvailableTypes); SafeRelease(&pAudioType); SafeRelease(&pUnkAudioType); SafeRelease(&pAudioAttrs); return hr; }