void MFPlayer::init(HWND handle) { HRESULT ret; // Initialize M$ bullshit //ret = CoInitializeEx(NULL, COINIT_MULTITHREADED); //assert(ret == S_OK); ret = MFStartup(MF_VERSION); assert(ret == S_OK); // Create factory IMFMediaEngineClassFactory *pMediaEngineClassFactory = nullptr; ret = CoCreateInstance(CLSID_MFMediaEngineClassFactory, nullptr, CLSCTX_ALL, IID_PPV_ARGS(&pMediaEngineClassFactory)); assert(ret == S_OK); // Create notify m_pPlayerNodify = new MFPlayerNotify(shared_from_this()); // Create attributes IMFAttributes *pAttributes = nullptr; ret = MFCreateAttributes(&pAttributes, 1); assert(ret == S_OK); ret = pAttributes->SetUnknown(MF_MEDIA_ENGINE_CALLBACK, m_pPlayerNodify); assert(ret == S_OK); ret = pAttributes->SetUINT64(MF_MEDIA_ENGINE_PLAYBACK_HWND, reinterpret_cast<UINT64>(handle)); assert(ret == S_OK); // Create player ret = pMediaEngineClassFactory->CreateInstance(0, pAttributes, &m_pMediaEngine); assert(ret == S_OK); // Release bullshits pAttributes->Release(); pMediaEngineClassFactory->Release(); }
// Initialize Device List HRESULT DeviceList::EnumerateDevices() { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; Clear(); // Initialize an attribute store. We will use this to // specify the enumeration parameters. hr = MFCreateAttributes(&pAttributes, 1); // Ask for source type = video capture devices if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } // Enumerate devices. if (SUCCEEDED(hr)) { hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices); } SafeRelease(&pAttributes); return hr; }
bool MediaFoundationCaptureLibrary::BuildListOfDevices() { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; CoInitialize(NULL); hr = MFCreateAttributes(&pAttributes, 1); if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } if (SUCCEEDED(hr)) { hr = MediaFoundationVideoDevices::GetInstance().InitDevices(pAttributes); } else { LOG_ERROR("MEDIA FOUNDATION: The access to the video cameras denied."); } SafeRelease(&pAttributes); return (SUCCEEDED(hr)); }
int CountCaptureDevices() { HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); if (FAILED(hr)) return 0; hr = MFStartup(MF_VERSION); if (FAILED(hr)) return 0; // choose device IMFAttributes *attributes = NULL; hr = MFCreateAttributes(&attributes, 1); ScopedRelease<IMFAttributes> attributes_s(attributes); if (FAILED(hr)) return 0; hr = attributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); if (FAILED(hr)) return 0; ChooseDeviceParam param = { 0 }; hr = MFEnumDeviceSources(attributes, ¶m.mDevices, ¶m.mCount); if (FAILED(hr)) return 0; return param.mCount; }
//------------------------------------------------------------------- // Initialise the source reader // HRESULT VidReader::initSourceReader(WCHAR *filename) { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; SafeRelease(&m_pReader); // Configure the source reader to perform video processing hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) goto done; hr = pAttributes->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, TRUE); if (FAILED(hr)) goto done; // Create the source reader from the URL hr = MFCreateSourceReaderFromURL(filename, pAttributes, &m_pReader); if (FAILED(hr)) goto done; // Attempt to find a video stream hr = selectVideoStream(); if (FAILED(hr)) goto done; // Get the stream format hr = getVideoFormat(); if (FAILED(hr)) goto done; // Get the duration hr = getDuration(); done: return hr; }
HRESULT CCapture::OpenMediaSource(IMFMediaSource *pSource) { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; hr = MFCreateAttributes(&pAttributes, 2); if (SUCCEEDED(hr)) { hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this); } if (SUCCEEDED(hr)) { hr = MFCreateSourceReaderFromMediaSource( pSource, pAttributes, &m_pReader ); } SafeRelease(&pAttributes); return hr; }
void OnChooseDevice(HWND hwnd) { ChooseDeviceParam param; IMFAttributes *pAttributes = NULL; HRESULT hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) { goto done; } // Ask for source type = video capture devices hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (FAILED(hr)) { goto done; } // Enumerate devices. hr = MFEnumDeviceSources(pAttributes, ¶m.ppDevices, ¶m.count); if (FAILED(hr)) { goto done; } // Ask the user to select one. INT_PTR result = DialogBoxParam(GetModuleHandle(NULL), MAKEINTRESOURCE(IDD_CHOOSE_DEVICE), hwnd, ChooseDeviceDlgProc, (LPARAM)¶m); if ((result == IDOK) && (param.selection != (UINT32)-1)) { UINT iDevice = param.selection; if (iDevice >= param.count) { hr = E_UNEXPECTED; goto done; } hr = g_pEngine->InitializeCaptureManager(hPreview, param.ppDevices[iDevice]); if (FAILED(hr)) { goto done; } SafeRelease(&pSelectedDevice); pSelectedDevice = param.ppDevices[iDevice]; pSelectedDevice->AddRef(); } done: SafeRelease(&pAttributes); if (FAILED(hr)) { ShowError(hwnd, IDS_ERR_SET_DEVICE, hr); } UpdateUI(hwnd); }
bool VideoCapture::CreateSourceReaderAsync() { HRESULT hr; HANDLE hEvent = CreateEvent(NULL, FALSE, FALSE, NULL); if (hEvent == NULL) { MFUtil::ShowMessage(TEXT("CreateEvent Failed."), ML_ERROR); return false; } this->CallBack = new SourceReaderCallBack(hEvent); if (this->CallBack == NULL) { MFUtil::ShowMessage(TEXT("CreateCallBack Failed."), ML_ERROR); return false; } IMFAttributes *pAttributes = NULL; hr = MFCreateAttributes(&pAttributes, 1); if (hr != S_OK) { MFUtil::ShowMessage(TEXT("CreateAttributes Failed."), ML_ERROR); this->ReleaseDevices(); return false; } hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this->CallBack); if (hr != S_OK) { MFUtil::ShowMessage(TEXT("AttributeSetting Failed."), ML_ERROR); this->ReleaseDevices(); return false; } pin_ptr<IMFSourceReader *> pSourceReader = &(this->SourceReader); hr = MFCreateSourceReaderFromMediaSource(this->Source, pAttributes, pSourceReader); if (hr != S_OK) { MFUtil::ShowMessage(TEXT("CreateSourceReader Failed."), ML_ERROR); this->ReleaseDevices(); return false; } hr = this->ConfigureSourceReader(); if (hr != S_OK) { MFUtil::ShowMessage(TEXT("ConfigureDecoder Failed."), ML_ERROR); MFUtil::ShowErrorNameFromCode(hr); return false; } // 二回目以降の ReadSample() はコールバック内で呼ぶため、 // SourceReaderCallBack クラスにも SourceReader が必要。 this->CallBack->SourceReader = this->SourceReader; return true; }
void GetCaptureDeviceName(int aDevice, char * aNamebuffer, int aBufferlength) { int i; if (!aNamebuffer || aBufferlength <= 0) return; aNamebuffer[0] = 0; HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); if (FAILED(hr)) return; hr = MFStartup(MF_VERSION); if (FAILED(hr)) return; // choose device IMFAttributes *attributes = NULL; hr = MFCreateAttributes(&attributes, 1); ScopedRelease<IMFAttributes> attributes_s(attributes); if (FAILED(hr)) return; hr = attributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); if (FAILED(hr)) return; ChooseDeviceParam param = { 0 }; hr = MFEnumDeviceSources(attributes, ¶m.mDevices, ¶m.mCount); if (FAILED(hr)) return; if (aDevice < (signed)param.mCount) { WCHAR *name = 0; UINT32 namelen = 255; hr = param.mDevices[aDevice]->GetAllocatedString( MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &namelen ); if (SUCCEEDED(hr) && name) { i = 0; while (i < aBufferlength - 1 && i < (signed)namelen && name[i] != 0) { aNamebuffer[i] = (char)name[i]; i++; } aNamebuffer[i] = 0; CoTaskMemFree(name); } } }
HRESULT CTranscoder::ConfigureVideoOutput() { assert (m_pProfile); HRESULT hr = S_OK; IMFAttributes* pVideoAttrs = NULL; // Configure the video stream // Create a new attribute store. if (SUCCEEDED(hr)) { hr = MFCreateAttributes( &pVideoAttrs, 5 ); } // Set the encoder to be Windows Media video encoder, so that the appropriate MFTs are added to the topology. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); } // Set the frame rate. if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_FRAME_RATE, 30, 1); } //Set the frame size. if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pVideoAttrs, MF_MT_FRAME_SIZE, 320, 240); } //Set the pixel aspect ratio if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } // Set the bit rate. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetUINT32(MF_MT_AVG_BITRATE, 300000); } // Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetVideoAttributes( pVideoAttrs ); } SafeRelease(&pVideoAttrs); return hr; }
void MFPlayer::init(const OTextureRef& pRenderTarget) { m_pRenderTarget = pRenderTarget; auto pRendererD3D11 = std::dynamic_pointer_cast<ORendererD3D11>(oRenderer); HRESULT ret; // Initialize M$ bullshit //ret = CoInitializeEx(NULL, COINIT_MULTITHREADED); //assert(ret == S_OK); ret = MFStartup(MF_VERSION); assert(ret == S_OK); // Create factory IMFMediaEngineClassFactory *pMediaEngineClassFactory = nullptr; ret = CoCreateInstance(CLSID_MFMediaEngineClassFactory, nullptr, CLSCTX_ALL, IID_PPV_ARGS(&pMediaEngineClassFactory)); assert(ret == S_OK); // Create notify m_pPlayerNodify = new MFPlayerNotify(shared_from_this()); // Create attributes IMFAttributes *pAttributes = nullptr; ret = MFCreateAttributes(&pAttributes, 1); assert(ret == S_OK); ret = pAttributes->SetUnknown(MF_MEDIA_ENGINE_CALLBACK, m_pPlayerNodify); assert(ret == S_OK); ID3D10Multithread *pMultithread = nullptr; ID3D11Device *pDevice = pRendererD3D11->getDevice(); ret = pDevice->QueryInterface(IID_PPV_ARGS(&pMultithread)); assert(ret == S_OK); pMultithread->SetMultithreadProtected(TRUE); pMultithread->Release(); UINT resetToken = 0; ret = MFCreateDXGIDeviceManager(&resetToken, &m_pDXGIManager); assert(ret == S_OK); ret = m_pDXGIManager->ResetDevice(pRendererD3D11->getDevice(), resetToken); assert(ret == S_OK); ret = pAttributes->SetUnknown(MF_MEDIA_ENGINE_DXGI_MANAGER, m_pDXGIManager); assert(ret == S_OK); ret = pAttributes->SetUINT32(MF_MEDIA_ENGINE_VIDEO_OUTPUT_FORMAT, DXGI_FORMAT_R8G8B8A8_UNORM); assert(ret == S_OK); // Create player ret = pMediaEngineClassFactory->CreateInstance(MF_MEDIA_ENGINE_WAITFORSTABLE_STATE, pAttributes, &m_pMediaEngine); assert(ret == S_OK); // Release bullshits pAttributes->Release(); pMediaEngineClassFactory->Release(); }
HRESULT ConfigureAudioEncoding(IMFCaptureSource *pSource, IMFCaptureRecordSink *pRecord, REFGUID guidEncodingType) { IMFCollection *pAvailableTypes = NULL; IMFMediaType *pMediaType = NULL; IMFAttributes *pAttributes = NULL; // Configure the audio format for the recording sink. HRESULT hr = MFCreateAttributes(&pAttributes, 1); if(FAILED(hr)) { goto done; } // Enumerate low latency media types hr = pAttributes->SetUINT32(MF_LOW_LATENCY, TRUE); if(FAILED(hr)) { goto done; } // Get a list of encoded output formats that are supported by the encoder. hr = MFTranscodeGetAudioOutputAvailableTypes(guidEncodingType, MFT_ENUM_FLAG_ALL | MFT_ENUM_FLAG_SORTANDFILTER, pAttributes, &pAvailableTypes); if (FAILED(hr)) { goto done; } // Pick the first format from the list. hr = GetCollectionObject(pAvailableTypes, 0, &pMediaType); if (FAILED(hr)) { goto done; } // Connect the audio stream to the recording sink. DWORD dwSinkStreamIndex; hr = pRecord->AddStream((DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_AUDIO, pMediaType, NULL, &dwSinkStreamIndex); if(hr == MF_E_INVALIDSTREAMNUMBER) { //If an audio device is not present, allow video only recording hr = S_OK; } done: SafeRelease(&pAvailableTypes); SafeRelease(&pMediaType); SafeRelease(&pAttributes); return hr; }
IMFActivate* WinCaptureDevice::ChooseFirst(std::string& error) { IMFActivate* result = NULL; HRESULT hr = S_OK; UINT iDevice = 0; // Index into the array of devices BOOL bCancel = FALSE; // Initialize an attribute store to specify enumeration parameters. IMFAttributes* pAttributes = NULL; hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) { goto done; } // Ask for source type = video capture devices. hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (FAILED(hr)) { goto done; } // Enumerate devices. IMFActivate **devices = NULL; uint numDevices = 0; hr = MFEnumDeviceSources(pAttributes, &devices, &numDevices); if (FAILED(hr)) { goto done; } if (numDevices > 0) result = devices[0]; done: SafeRelease(&pAttributes); for (uint i = 0; i < numDevices; i++) { if (devices[i] != result) SafeRelease(&devices[i]); } CoTaskMemFree(devices); if (FAILED(hr)) { //ShowErrorMessage(L"Cannot create a video capture device", hr); } return result; }
HRESULT CMFCamCapture::enumVideoDevices() { IMFAttributes *pAttributes = NULL; IMFActivate **ppDevices = NULL; // Create an attribute store to specify the enumeration parameters. HRESULT hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) { goto done; } // Source type: video capture devices hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); if (FAILED(hr)) { goto done; } // Enumerate devices. UINT32 count; hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count); if (FAILED(hr)) { goto done; } if (count == 0) { hr = E_FAIL; goto done; } // cache the devices. m_deviceActivateObjects.clear(); for (DWORD i = 0; i < count; i++) { m_deviceActivateObjects.push_back(CComPtr<IMFActivate>(ppDevices[i])); } done: SafeRelease(&pAttributes); for (DWORD i = 0; i < count; i++) { SafeRelease(&ppDevices[i]); } CoTaskMemFree(ppDevices); return hr; }
STDMETHODIMP UnLockAsynMFT(IMFTransform* pTransform) { HRESULT hr = S_OK; IMFAttributes *pAttributes; UINT32 unValue; DMFTCHECKNULL_GOTO(pTransform,done, E_INVALIDARG); DMFTCHECKHR_GOTO(pTransform->GetAttributes(&pAttributes),done); DMFTCHECKHR_GOTO(pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &unValue), done); if (unValue) { DMFTCHECKHR_GOTO(pAttributes->SetUINT32(MF_TRANSFORM_ASYNC, true), done); } done: return hr; }
IMFMediaType* MFDecoderSourceReader::setSource(IMFMediaSource *source, const QAudioFormat &audioFormat) { IMFMediaType *mediaType = NULL; if (m_source == source) return mediaType; if (m_source) { m_source->Release(); m_source = NULL; } if (m_sourceReader) { m_sourceReader->Release(); m_sourceReader = NULL; } if (!source) return mediaType; IMFAttributes *attr = NULL; MFCreateAttributes(&attr, 1); if (SUCCEEDED(attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this))) { if (SUCCEEDED(MFCreateSourceReaderFromMediaSource(source, attr, &m_sourceReader))) { m_source = source; m_source->AddRef(); m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_ALL_STREAMS), FALSE); m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE); IMFMediaType *pPartialType = NULL; MFCreateMediaType(&pPartialType); pPartialType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio); if (audioFormat.sampleType() == QAudioFormat::Float) { pPartialType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_Float); } else { pPartialType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM); } m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), NULL, pPartialType); pPartialType->Release(); m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), &mediaType); // Ensure the stream is selected. m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE); } attr->Release(); } return mediaType; }
HRESULT CTranscoder::ConfigureContainer() { assert (m_pProfile); HRESULT hr = S_OK; IMFAttributes* pContainerAttrs = NULL; //Set container attributes hr = MFCreateAttributes( &pContainerAttrs, 2 ); //Set the output container to be ASF type if (SUCCEEDED(hr)) { hr = pContainerAttrs->SetGUID( MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_ASF ); } // Use the default setting. Media Foundation will use the stream // settings set in ConfigureAudioOutput and ConfigureVideoOutput. if (SUCCEEDED(hr)) { hr = pContainerAttrs->SetUINT32( MF_TRANSCODE_ADJUST_PROFILE, MF_TRANSCODE_ADJUST_PROFILE_DEFAULT ); } //Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetContainerAttributes(pContainerAttrs); } SafeRelease(&pContainerAttrs); return hr; }
// Sets the zoom rectangle on the mixer. HRESULT EVRCustomPresenter::SetMixerSourceRect(IMFTransform *pMixer, const MFVideoNormalizedRect& nrcSource) { Log("EVRCustomPresenter::SetMixerSourceRect"); CheckPointer(pMixer, E_POINTER); HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; hr = pMixer->GetAttributes(&pAttributes); CHECK_HR(hr, "EVRCustomPresenter::SetMixerSourceRect could not get mixer attributes"); hr = pAttributes->SetBlob(VIDEO_ZOOM_RECT, (const UINT8*)&nrcSource, sizeof(nrcSource)); if (FAILED(hr)) { Log("EVRCustomPresenter::SetMixerSourceRect could not set zoom rectangle"); SAFE_RELEASE(pAttributes); return hr; } SAFE_RELEASE(pAttributes); return hr; }
void GetCameraDevices(IMFActivate*** pppDevices, UINT32* pnCount) { HRESULT hr = S_OK; IMFAttributes* pAttributes = NULL; WCHAR* pszFriendlyName = NULL; UINT32 cchName = 0; hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) goto END; hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (FAILED(hr)) goto END; hr = MFEnumDeviceSources(pAttributes, pppDevices, pnCount); if (FAILED(hr)) goto END; END: SafeRelease(&pAttributes); return; }
//---------------------------------------------------------------------------- long MediaFoundationVideoDevice::InitDevice() { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; IMFActivate * vd_pActivate = NULL; CoInitialize(NULL); if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAttributes, 1); } if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); if (!SUCCEEDED(hr)) { LOG_ERROR("MediaFoundationVideoDevice::InitDevice failed: device " << this->DeviceIndex << ": The attribute of the capture device cannot be retrieved"); } } if (SUCCEEDED(hr)) { hr = CheckDevice(pAttributes, &vd_pActivate); if (SUCCEEDED(hr) && vd_pActivate) { SafeRelease(&this->Source); hr = vd_pActivate->ActivateObject(__uuidof(IMFMediaSource), (void**)&this->Source); SafeRelease(&vd_pActivate); } else { LOG_ERROR("MediaFoundationVideoDevice::InitDevice failed: device " << this->DeviceIndex << ": Cannot activate device"); } } SafeRelease(&pAttributes); return hr; }
camera_t * camera_open(const char *portname, int highres) { camera_internal_t *camera = (camera_internal_t*)malloc(sizeof(camera_internal_t)); camera->reader = NULL; if (highres) { console_printf("camera: highres is not supported on windows (yet).\n"); highres = 0; } HRESULT hr = S_OK; // Initialize Media Foundation if (SUCCEEDED(hr)) { hr = MFStartup(MF_VERSION); } /////////////////////////////////////////// IMFAttributes *pAttributes = NULL; UINT32 m_cDevices = 0; IMFActivate **m_ppDevices = NULL; // Initialize an attribute store. We will use this to // specify the enumeration parameters. hr = MFCreateAttributes(&pAttributes, 1); // Ask for source type = video capture devices if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } // Enumerate devices. if (SUCCEEDED(hr)) { hr = MFEnumDeviceSources(pAttributes, &m_ppDevices, &m_cDevices); } SafeRelease(&pAttributes); ///////////////////////////////////////////////// IMFActivate *pActivate = NULL; if (m_cDevices) { console_printf("camera: there are %d camera devices connected (0..%d).\n", m_cDevices, m_cDevices > 0 ? m_cDevices - 1 : 0); int device = strtol(portname, 0, 10); if (device < 0 || device >= m_cDevices) console_printf("camera: device %d does not exist.\n", device); else pActivate = m_ppDevices[device]; } else { console_printf("camera: could not find a device\n"); } ///////////////////////////////////////////////// IMFMediaSource *pSource = NULL; //EnterCriticalSection(&m_critsec); // Create the media source for the device. hr = pActivate->ActivateObject( __uuidof(IMFMediaSource), (void**)&pSource ); /////////////////////////////////////////// //IMFAttributes *pAttributes = NULL; /*hr = MFCreateAttributes(&pAttributes, 2); if (SUCCEEDED(hr)) { hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this); }*/ if (SUCCEEDED(hr)) { hr = MFCreateSourceReaderFromMediaSource( pSource, NULL,//pAttributes, &camera->reader ); } //SafeRelease(&pAttributes); //////////////////////////////////////////////////// // The list of acceptable types. GUID subtypes[] = { MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY, MFVideoFormat_RGB32, MFVideoFormat_RGB24, MFVideoFormat_IYUV }; //HRESULT hr = S_OK; BOOL bUseNativeType = FALSE; GUID subtype = { 0 }; IMFMediaType *pType = NULL; UINT32 width = 0, height = 0; int selectedSubtype = -1; // If the source's native format matches any of the formats in // the list, prefer the native format. // Note: The camera might support multiple output formats, // including a range of frame dimensions. The application could // provide a list to the user and have the user select the // camera's output format. That is outside the scope of this // sample, however. DWORD selectedStreamIndex = MF_SOURCE_READER_FIRST_VIDEO_STREAM; //while (true) //{ hr = camera->reader->GetNativeMediaType( selectedStreamIndex, 0, // Type index &pType ); if (FAILED(hr)) { console_printf("camera: could not get media type\n"); goto done; } hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); if (FAILED(hr)) { console_printf("camera: could not get resolution\n"); goto done; } //if (width != 1280 || height != 960) //{ console_printf("camera: found resolution %dx%d\n", width, height); //selectedStreamIndex++; //continue; //} camera->size.width = width; camera->size.height = height; //break; //} /*UINT32 num = 0, denom = 0; hr = ::MFGetAttributeRatio(pType, MF_MT_FRAME_RATE_RANGE_MAX, &num, &denom); if (FAILED(hr)) { goto done; }*/ //hr = ::MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, 1280, 960); //if (FAILED(hr)) { goto done; } /*hr = ::MFSetAttributeRatio(pType, MF_MT_FRAME_RATE, num, denom); if (FAILED(hr)) { goto done; }*/ hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype); if (FAILED(hr)) { console_printf("camera: could not get stream type(1)\n"); goto done; } for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++) { if (subtype == subtypes[i]) { hr = camera->reader->SetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, pType ); bUseNativeType = TRUE; selectedSubtype = i; break; } } if (!bUseNativeType) { // None of the native types worked. The camera might offer // output a compressed type such as MJPEG or DV. // Try adding a decoder. for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++) { hr = pType->SetGUID(MF_MT_SUBTYPE, subtypes[i]); if (FAILED(hr)) { console_printf("camera: could not get stream type(2)\n"); goto done; } hr = camera->reader->SetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, pType ); if (SUCCEEDED(hr)) { selectedSubtype = i; break; } } } /* hr = ::MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); WIDTH = width; HEIGHT = height;*/ if (FAILED(hr)) { console_printf("camera: could not find stream type\n"); goto done; } done: SafeRelease(&pType); console_printf("camera: selected type: %d, native: %s, resolution: %dx%d\n", selectedSubtype, bUseNativeType ? "yes" : "no", camera->size.width, camera->size.height); /////////////////////////////////////// /*if (SUCCEEDED(hr)) { hr = camera->reader->GetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pType ); } if (SUCCEEDED(hr)) { // Register the color converter DSP for this process, in the video // processor category. This will enable the sink writer to enumerate // the color converter when the sink writer attempts to match the // media types. hr = MFTRegisterLocalByCLSID( __uuidof(CColorConvertDMO), MFT_CATEGORY_VIDEO_PROCESSOR, L"", MFT_ENUM_FLAG_SYNCMFT, 0, NULL, 0, NULL ); }*/ ///////////////////////////////////////////////// /* IMFSample *pSample = NULL; DWORD streamIndex = 0, flags = 0; LONGLONG llTimeStamp = 0; hr = camera->reader->ReadSample( (DWORD)MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llTimeStamp, // Receives the time stamp. &pSample // Receives the sample or NULL. );*/ if (selectedSubtype != 4) { console_printf("camera: unexpected stream type.\n"); SafeRelease(&camera->reader); free(camera); return 0; } return (camera_t*)camera; }
void OnChooseDevice(HWND hwnd) { HRESULT hr = S_OK; ChooseDeviceParam param = { 0 }; IMFAttributes *pAttributes = NULL; // Release the previous instance of the preview object, if any. if (g_pPreview) { g_pPreview->CloseDevice(); SafeRelease(&g_pPreview); } // Create a new instance of the preview object. hr = CPreview::CreateInstance(hwnd, &g_pPreview); // Create an attribute store to specify the enumeration parameters. if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAttributes, 1); } // Ask for source type = video capture devices if (SUCCEEDED(hr)) { hr = pAttributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); } // Enumerate devices. if (SUCCEEDED(hr)) { hr = MFEnumDeviceSources(pAttributes, ¶m.ppDevices, ¶m.count); } if (SUCCEEDED(hr)) { // Ask the user to select one. INT_PTR result = DialogBoxParam( GetModuleHandle(NULL), MAKEINTRESOURCE(IDD_CHOOSE_DEVICE), hwnd, DlgProc, (LPARAM)¶m ); if ((result == IDOK) && (param.selection != (UINT32)-1)) { UINT iDevice = param.selection; if (iDevice >= param.count) { hr = E_UNEXPECTED; } else { // Give this source to the CPreview object for preview. hr = g_pPreview->SetDevice(param.ppDevices[iDevice]); } } } SafeRelease(&pAttributes); for (DWORD i = 0; i < param.count; i++) { SafeRelease(¶m.ppDevices[i]); } CoTaskMemFree(param.ppDevices); if (FAILED(hr)) { ShowErrorMessage(hwnd, L"Cannot create the video capture device", hr); } }
unsigned char *BBWin8Game::LoadAudioData( String path,int *length,int *channels,int *format,int *hertz ){ String url=PathToFilePath( path ); DXASS( MFStartup( MF_VERSION ) ); IMFAttributes *attrs; DXASS( MFCreateAttributes( &attrs,1 ) ); DXASS( attrs->SetUINT32( MF_LOW_LATENCY,TRUE ) ); IMFSourceReader *reader; DXASS( MFCreateSourceReaderFromURL( url.ToCString<wchar_t>(),attrs,&reader ) ); attrs->Release(); IMFMediaType *mediaType; DXASS( MFCreateMediaType( &mediaType ) ); DXASS( mediaType->SetGUID( MF_MT_MAJOR_TYPE,MFMediaType_Audio ) ); DXASS( mediaType->SetGUID( MF_MT_SUBTYPE,MFAudioFormat_PCM ) ); DXASS( reader->SetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,mediaType ) ); mediaType->Release(); IMFMediaType *outputMediaType; DXASS( reader->GetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,&outputMediaType ) ); WAVEFORMATEX *wformat; uint32 formatByteCount=0; DXASS( MFCreateWaveFormatExFromMFMediaType( outputMediaType,&wformat,&formatByteCount ) ); *channels=wformat->nChannels; *format=wformat->wBitsPerSample/8; *hertz=wformat->nSamplesPerSec; CoTaskMemFree( wformat ); outputMediaType->Release(); /* PROPVARIANT var; DXASS( reader->GetPresentationAttribute( MF_SOURCE_READER_MEDIASOURCE,MF_PD_DURATION,&var ) ); LONGLONG duration=var.uhVal.QuadPart; float64 durationInSeconds=(duration / (float64)(10000 * 1000)); m_maxStreamLengthInBytes=(uint32)( durationInSeconds * m_waveFormat.nAvgBytesPerSec ); */ std::vector<unsigned char*> bufs; std::vector<uint32> lens; uint32 len=0; for( ;; ){ uint32 flags=0; IMFSample *sample; DXASS( reader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,0,reinterpret_cast<DWORD*>(&flags),0,&sample ) ); if( flags & MF_SOURCE_READERF_ENDOFSTREAM ){ break; } if( sample==0 ){ abort(); } IMFMediaBuffer *mediaBuffer; DXASS( sample->ConvertToContiguousBuffer( &mediaBuffer ) ); uint8 *audioData=0; uint32 sampleBufferLength=0; DXASS( mediaBuffer->Lock( &audioData,0,reinterpret_cast<DWORD*>( &sampleBufferLength ) ) ); unsigned char *buf=(unsigned char*)malloc( sampleBufferLength ); memcpy( buf,audioData,sampleBufferLength ); bufs.push_back( buf ); lens.push_back( sampleBufferLength ); len+=sampleBufferLength; DXASS( mediaBuffer->Unlock() ); mediaBuffer->Release(); sample->Release(); } reader->Release(); *length=len/(*channels * *format); unsigned char *data=(unsigned char*)malloc( len ); unsigned char *p=data; for( int i=0;i<bufs.size();++i ){ memcpy( p,bufs[i],lens[i] ); free( bufs[i] ); p+=lens[i]; } gc_force_sweep=true; return data; }
HRESULT CPreview::SetDevice(IMFActivate *pActivate) { HRESULT hr = S_OK; IMFMediaSource *pSource = NULL; IMFAttributes *pAttributes = NULL; IMFMediaType *pType = NULL; EnterCriticalSection(&m_critsec); // Release the current device, if any. hr = CloseDevice(); // Create the media source for the device. if (SUCCEEDED(hr)) { hr = pActivate->ActivateObject( __uuidof(IMFMediaSource), (void**)&pSource ); } // Get the symbolic link. if (SUCCEEDED(hr)) { hr = pActivate->GetAllocatedString( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &m_pwszSymbolicLink, &m_cchSymbolicLink ); } // // Create the source reader. // // Create an attribute store to hold initialization settings. if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAttributes, 2); } if (SUCCEEDED(hr)) { hr = pAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); } // Set the callback pointer. if (SUCCEEDED(hr)) { hr = pAttributes->SetUnknown( MF_SOURCE_READER_ASYNC_CALLBACK, this ); } if (SUCCEEDED(hr)) { hr = MFCreateSourceReaderFromMediaSource( pSource, pAttributes, &m_pReader ); } // Try to find a suitable output type. if (SUCCEEDED(hr)) { for (DWORD i = 0; ; i++) { hr = m_pReader->GetNativeMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &pType ); if (FAILED(hr)) { break; } hr = TryMediaType(pType); SafeRelease(&pType); if (SUCCEEDED(hr)) { // Found an output type. break; } } } if (SUCCEEDED(hr)) { // Ask for the first sample. hr = m_pReader->ReadSample( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL ); } if (FAILED(hr)) { if (pSource) { pSource->Shutdown(); // NOTE: The source reader shuts down the media source // by default, but we might not have gotten that far. } CloseDevice(); } SafeRelease(&pSource); SafeRelease(&pAttributes); SafeRelease(&pType); LeaveCriticalSection(&m_critsec); return hr; }
HRESULT CaptureClass::initCapture(int aDevice) { mWhoAmI = aDevice; HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); DO_OR_DIE; hr = MFStartup(MF_VERSION); DO_OR_DIE; // choose device IMFAttributes *attributes = NULL; hr = MFCreateAttributes(&attributes, 1); ScopedRelease<IMFAttributes> attributes_s(attributes); DO_OR_DIE; hr = attributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); DO_OR_DIE; ChooseDeviceParam param = { 0 }; hr = MFEnumDeviceSources(attributes, ¶m.mDevices, ¶m.mCount); DO_OR_DIE; if ((signed)param.mCount > aDevice) { // use param.ppDevices[0] IMFAttributes *attributes = NULL; IMFMediaType *type = NULL; EnterCriticalSection(&mCritsec); hr = param.mDevices[aDevice]->ActivateObject( __uuidof(IMFMediaSource), (void**)&mSource ); DO_OR_DIE_CRITSECTION; hr = MFCreateAttributes(&attributes, 3); ScopedRelease<IMFAttributes> attributes_s(attributes); DO_OR_DIE_CRITSECTION; hr = attributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); DO_OR_DIE_CRITSECTION; hr = attributes->SetUnknown( MF_SOURCE_READER_ASYNC_CALLBACK, this ); DO_OR_DIE_CRITSECTION; hr = MFCreateSourceReaderFromMediaSource( mSource, attributes, &mReader ); DO_OR_DIE_CRITSECTION; int preferredmode = scanMediaTypes(gParams[mWhoAmI].mWidth, gParams[mWhoAmI].mHeight); mUsedIndex = preferredmode; hr = mReader->GetNativeMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, preferredmode, &type ); ScopedRelease<IMFMediaType> type_s(type); DO_OR_DIE_CRITSECTION; hr = setVideoType(type); DO_OR_DIE_CRITSECTION; hr = mReader->SetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, type ); DO_OR_DIE_CRITSECTION; hr = mReader->ReadSample( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL ); DO_OR_DIE_CRITSECTION; LeaveCriticalSection(&mCritsec); } else { return MF_E_INVALIDINDEX; } /* for (i = 0; i < 16; i++) { char temp[128]; float v; int f; int r = GetProperty(i, v, f); sprintf(temp, "%d: %3.3f %d (%d)\n", i, v, f, r); OutputDebugStringA(temp); } */ return 0; }
HRESULT CaptureManager::InitializeCaptureManager(HWND hwndPreview, IUnknown* pUnk) { HRESULT hr = S_OK; IMFAttributes* pAttributes = NULL; IMFCaptureEngineClassFactory* pFactory = NULL; DestroyCaptureEngine(); m_hEvent = CreateEvent(NULL, FALSE, FALSE, NULL); if (NULL == m_hEvent) { hr = HRESULT_FROM_WIN32(GetLastError()); goto Exit; } m_pCallback = new (std::nothrow) CaptureEngineCB(m_hwndEvent); if (m_pCallback == NULL) { hr = E_OUTOFMEMORY; goto Exit; } m_pCallback->m_pManager = this; m_hwndPreview = hwndPreview; //Create a D3D Manager hr = CreateD3DManager(); if (FAILED(hr)) { goto Exit; } hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) { goto Exit; } hr = pAttributes->SetUnknown(MF_CAPTURE_ENGINE_D3D_MANAGER, g_pDXGIMan); if (FAILED(hr)) { goto Exit; } // Create the factory object for the capture engine. hr = CoCreateInstance(CLSID_MFCaptureEngineClassFactory, NULL, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pFactory)); if (FAILED(hr)) { goto Exit; } // Create and initialize the capture engine. hr = pFactory->CreateInstance(CLSID_MFCaptureEngine, IID_PPV_ARGS(&m_pEngine)); if (FAILED(hr)) { goto Exit; } hr = m_pEngine->Initialize(m_pCallback, pAttributes, NULL, pUnk); if (FAILED(hr)) { goto Exit; } Exit: if (NULL != pAttributes) { pAttributes->Release(); pAttributes = NULL; } if (NULL != pFactory) { pFactory->Release(); pFactory = NULL; } return hr; }
bool WinCaptureDevice::InitializeFirst(std::string& error) { HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); if (!SUCCEEDED(hr)) { return false; error = "CoInitializeEx failed"; } hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); if (!SUCCEEDED(hr)) { error = "MFStartup failed"; return false; } Close(); memset(&InputType, 0, sizeof(InputType)); IMFActivate* activate = WinCaptureDevice::ChooseFirst(error); if (!activate) return false; IMFMediaSource *pSource = NULL; IMFAttributes *pAttributes = NULL; IMFMediaType *pType = NULL; UINT32 m_cchSymbolicLink = 0; // Create the media source for the device. if (SUCCEEDED(hr)) hr = activate->ActivateObject(__uuidof(IMFMediaSource), (void**) &pSource); // Get the symbolic link. if (SUCCEEDED(hr)) hr = activate->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &SymbolicLink, &m_cchSymbolicLink); // // Create the source reader. // // Create an attribute store to hold initialization settings. if (SUCCEEDED(hr)) hr = MFCreateAttributes(&pAttributes, 2); if (SUCCEEDED(hr)) hr = pAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); // Set the callback pointer. if (SUCCEEDED(hr)) hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this); if (SUCCEEDED(hr)) hr = MFCreateSourceReaderFromMediaSource(pSource, pAttributes, &Reader); // Try to find a suitable input type. if (SUCCEEDED(hr)) { for (uint i = 0; ; i++) { hr = Reader->GetNativeMediaType((DWORD) MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &pType); if (FAILED(hr)) { error = "Failed to find a supported output format (ie RGB24)"; break; } memset(&InputType, 0, sizeof(InputType)); bool isTypeOK = IsMediaTypeSupported(pType, InputType); if (isTypeOK) { // Get the frame size. hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &InputWidth, &InputHeight); // Get the image stride. hr = GetDefaultStride(pType, &InputDefaultStride); // Get the interlace mode. Default: assume progressive. InputInterlaceMode = (MFVideoInterlaceMode) MFGetAttributeUINT32(pType, MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); } SafeRelease(&pType); if (isTypeOK) break; } } if (SUCCEEDED(hr)) { // Ask for the first sample. EnableCapture = 1; hr = Reader->ReadSample((DWORD) MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL); } if (FAILED(hr)) { if (pSource) { pSource->Shutdown(); // NOTE: The source reader shuts down the media source by default, but we might not have gotten that far. } Close(); } SafeRelease(&pSource); SafeRelease(&pAttributes); SafeRelease(&pType); SafeRelease(&activate); if (FAILED(hr) && error.length() == 0) error = ErrorMessage(L"Failed to initialize video capture device", hr); return SUCCEEDED(hr); }
HRESULT CTranscoder::ConfigureAudioOutput() { assert (m_pProfile); HRESULT hr = S_OK; DWORD dwMTCount = 0; IMFCollection *pAvailableTypes = NULL; IUnknown *pUnkAudioType = NULL; IMFMediaType *pAudioType = NULL; IMFAttributes *pAudioAttrs = NULL; // Get the list of output formats supported by the Windows Media // audio encoder. hr = MFTranscodeGetAudioOutputAvailableTypes( MFAudioFormat_WMAudioV9, MFT_ENUM_FLAG_ALL, NULL, &pAvailableTypes ); // Get the number of elements in the list. if (SUCCEEDED(hr)) { hr = pAvailableTypes->GetElementCount( &dwMTCount ); if (dwMTCount == 0) { hr = E_UNEXPECTED; } } // In this simple case, use the first media type in the collection. if (SUCCEEDED(hr)) { hr = pAvailableTypes->GetElement(0, &pUnkAudioType); } if (SUCCEEDED(hr)) { hr = pUnkAudioType->QueryInterface(IID_PPV_ARGS(&pAudioType)); } // Create a copy of the attribute store so that we can modify it safely. if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAudioAttrs, 0); } if (SUCCEEDED(hr)) { hr = pAudioType->CopyAllItems(pAudioAttrs); } // Set the encoder to be Windows Media audio encoder, so that the // appropriate MFTs are added to the topology. if (SUCCEEDED(hr)) { hr = pAudioAttrs->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_WMAudioV9); } // Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetAudioAttributes( pAudioAttrs ); } SafeRelease(&pAvailableTypes); SafeRelease(&pAudioType); SafeRelease(&pUnkAudioType); SafeRelease(&pAudioAttrs); return hr; }
bool initialise() { UINT32 videoDeviceCount = 0; IMFAttributes *videoConfig = NULL; IMFActivate **videoDevices = NULL; WCHAR *webcamFriendlyName; CHECK_HR(MFTRegisterLocalByCLSID( __uuidof(CColorConvertDMO), MFT_CATEGORY_VIDEO_PROCESSOR, L"", MFT_ENUM_FLAG_SYNCMFT, 0, NULL, 0, NULL ), "Error registering colour converter DSP.\n"); // Get the first available webcam. CHECK_HR(MFCreateAttributes(&videoConfig, 1), "Error creating video configuation.\n"); // Request video capture devices. CHECK_HR(videoConfig->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID), "Error initialising video configuration object."); CHECK_HR(MFEnumDeviceSources(videoConfig, &videoDevices, &videoDeviceCount), "Error enumerating video devices.\n"); CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &webcamFriendlyName, NULL), "Error retrieving vide device friendly name.\n"); wprintf(L"First available webcam: %s\n", webcamFriendlyName); CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->ActivateObject(IID_PPV_ARGS(&videoSource)), "Error activating video device.\n"); // Create a source reader. CHECK_HR(MFCreateSourceReaderFromMediaSource( videoSource, videoConfig, &_videoReader), "Error creating video source reader.\n"); //ListModes(_videoReader); CHECK_HR(_videoReader->GetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &videoSourceOutputType), "Error retrieving current media type from first video stream.\n"); Console::WriteLine(GetMediaTypeDescription(videoSourceOutputType)); // Note the webcam needs to support this media type. The list of media types supported can be obtained using the ListTypes function in MFUtility.h. MFCreateMediaType(&pSrcOutMediaType); pSrcOutMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); //pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, WMMEDIASUBTYPE_I420); pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); MFSetAttributeSize(pSrcOutMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT); CHECK_HR(MFSetAttributeRatio(pSrcOutMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on video device out type.\n"); CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, pSrcOutMediaType), "Failed to set media type on source reader.\n"); //CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, videoSourceOutputType), "Failed to setdefault media type on source reader.\n"); // Create H.264 encoder. CHECK_HR(CoCreateInstance(CLSID_CMSH264EncoderMFT, NULL, CLSCTX_INPROC_SERVER, IID_IUnknown, (void**)&spTransformUnk), "Failed to create H264 encoder MFT.\n"); CHECK_HR(spTransformUnk->QueryInterface(IID_PPV_ARGS(&_pTransform)), "Failed to get IMFTransform interface from H264 encoder MFT object.\n"); MFCreateMediaType(&pMFTOutputMediaType); pMFTOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMFTOutputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); //pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, 240000); CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, TARGET_AVERAGE_BIT_RATE), "Failed to set average bit rate on H264 output media type.\n"); CHECK_HR(MFSetAttributeSize(pMFTOutputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n"); pMFTOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2); // 2 = Progressive scan, i.e. non-interlaced. pMFTOutputMediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); //CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base), "Failed to set profile on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetDouble(MF_MT_MPEG2_LEVEL, 3.1), "Failed to set level on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_MAX_KEYFRAME_SPACING, 10), "Failed to set key frame interval on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetUINT32(CODECAPI_AVEncCommonQuality, 100), "Failed to set H264 codec qulaity.\n"); //hr = pAttributes->SetUINT32(CODECAPI_AVEncMPVGOPSize, 1) CHECK_HR(_pTransform->SetOutputType(0, pMFTOutputMediaType, 0), "Failed to set output media type on H.264 encoder MFT.\n"); MFCreateMediaType(&pMFTInputMediaType); pMFTInputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMFTInputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_IYUV); CHECK_HR(MFSetAttributeSize(pMFTInputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n"); pMFTInputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2); CHECK_HR(_pTransform->SetInputType(0, pMFTInputMediaType, 0), "Failed to set input media type on H.264 encoder MFT.\n"); CHECK_HR(_pTransform->GetInputStatus(0, &mftStatus), "Failed to get input status from H.264 MFT.\n"); if (MFT_INPUT_STATUS_ACCEPT_DATA != mftStatus) { printf("E: ApplyTransform() pTransform->GetInputStatus() not accept data.\n"); goto done; } //Console::WriteLine(GetMediaTypeDescription(pMFTOutputMediaType)); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.264 MFT.\n"); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.264 MFT.\n"); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.264 MFT.\n"); memset(&_outputDataBuffer, 0, sizeof _outputDataBuffer); return true; done: printf("MediaFoundationH264LiveSource initialisation failed.\n"); return false; }
HRESULT CPlayer::CreateSession() { TRACE((L"CPlayer::CreateSession\n")); HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; IMFActivate *pEnablerActivate = NULL; // Close the old session, if any. CHECK_HR(hr = CloseSession()); assert(m_state == Closed); // Create a new attribute store. CHECK_HR(hr = MFCreateAttributes(&pAttributes, 1)); // Create the content protection manager. assert(m_pContentProtectionManager == NULL); // Was released in CloseSession CHECK_HR(hr = ContentProtectionManager::CreateInstance( m_hwndEvent, &m_pContentProtectionManager )); // Set the MF_SESSION_CONTENT_PROTECTION_MANAGER attribute with a pointer // to the content protection manager. CHECK_HR(hr = pAttributes->SetUnknown( MF_SESSION_CONTENT_PROTECTION_MANAGER, (IMFContentProtectionManager*)m_pContentProtectionManager )); // Create the PMP media session. CHECK_HR(hr = MFCreatePMPMediaSession( 0, // Can use this flag: MFPMPSESSION_UNPROTECTED_PROCESS pAttributes, &m_pSession, &pEnablerActivate )); // TODO: // If MFCreatePMPMediaSession fails it might return an IMFActivate pointer. // This indicates that a trusted binary failed to load in the protected process. // An application can use the IMFActivate pointer to create an enabler object, which // provides revocation and renewal information for the component that failed to // load. // This sample does not demonstrate that feature. Instead, we simply treat this // case as a playback failure. // Start pulling events from the media session CHECK_HR(hr = m_pSession->BeginGetEvent((IMFAsyncCallback*)this, NULL)); done: SAFE_RELEASE(pAttributes); SAFE_RELEASE(pEnablerActivate); return hr; }