//------------------------------------------------------------------- // Initialise the source reader // HRESULT VidReader::initSourceReader(WCHAR *filename) { HRESULT hr = S_OK; IMFAttributes *pAttributes = NULL; SafeRelease(&m_pReader); // Configure the source reader to perform video processing hr = MFCreateAttributes(&pAttributes, 1); if (FAILED(hr)) goto done; hr = pAttributes->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, TRUE); if (FAILED(hr)) goto done; // Create the source reader from the URL hr = MFCreateSourceReaderFromURL(filename, pAttributes, &m_pReader); if (FAILED(hr)) goto done; // Attempt to find a video stream hr = selectVideoStream(); if (FAILED(hr)) goto done; // Get the stream format hr = getVideoFormat(); if (FAILED(hr)) goto done; // Get the duration hr = getDuration(); done: return hr; }
HRESULT CTranscoder::ConfigureVideoOutput() { assert (m_pProfile); HRESULT hr = S_OK; IMFAttributes* pVideoAttrs = NULL; // Configure the video stream // Create a new attribute store. if (SUCCEEDED(hr)) { hr = MFCreateAttributes( &pVideoAttrs, 5 ); } // Set the encoder to be Windows Media video encoder, so that the appropriate MFTs are added to the topology. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); } // Set the frame rate. if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_FRAME_RATE, 30, 1); } //Set the frame size. if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pVideoAttrs, MF_MT_FRAME_SIZE, 320, 240); } //Set the pixel aspect ratio if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } // Set the bit rate. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetUINT32(MF_MT_AVG_BITRATE, 300000); } // Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetVideoAttributes( pVideoAttrs ); } SafeRelease(&pVideoAttrs); return hr; }
void MFPlayer::init(const OTextureRef& pRenderTarget) { m_pRenderTarget = pRenderTarget; auto pRendererD3D11 = std::dynamic_pointer_cast<ORendererD3D11>(oRenderer); HRESULT ret; // Initialize M$ bullshit //ret = CoInitializeEx(NULL, COINIT_MULTITHREADED); //assert(ret == S_OK); ret = MFStartup(MF_VERSION); assert(ret == S_OK); // Create factory IMFMediaEngineClassFactory *pMediaEngineClassFactory = nullptr; ret = CoCreateInstance(CLSID_MFMediaEngineClassFactory, nullptr, CLSCTX_ALL, IID_PPV_ARGS(&pMediaEngineClassFactory)); assert(ret == S_OK); // Create notify m_pPlayerNodify = new MFPlayerNotify(shared_from_this()); // Create attributes IMFAttributes *pAttributes = nullptr; ret = MFCreateAttributes(&pAttributes, 1); assert(ret == S_OK); ret = pAttributes->SetUnknown(MF_MEDIA_ENGINE_CALLBACK, m_pPlayerNodify); assert(ret == S_OK); ID3D10Multithread *pMultithread = nullptr; ID3D11Device *pDevice = pRendererD3D11->getDevice(); ret = pDevice->QueryInterface(IID_PPV_ARGS(&pMultithread)); assert(ret == S_OK); pMultithread->SetMultithreadProtected(TRUE); pMultithread->Release(); UINT resetToken = 0; ret = MFCreateDXGIDeviceManager(&resetToken, &m_pDXGIManager); assert(ret == S_OK); ret = m_pDXGIManager->ResetDevice(pRendererD3D11->getDevice(), resetToken); assert(ret == S_OK); ret = pAttributes->SetUnknown(MF_MEDIA_ENGINE_DXGI_MANAGER, m_pDXGIManager); assert(ret == S_OK); ret = pAttributes->SetUINT32(MF_MEDIA_ENGINE_VIDEO_OUTPUT_FORMAT, DXGI_FORMAT_R8G8B8A8_UNORM); assert(ret == S_OK); // Create player ret = pMediaEngineClassFactory->CreateInstance(MF_MEDIA_ENGINE_WAITFORSTABLE_STATE, pAttributes, &m_pMediaEngine); assert(ret == S_OK); // Release bullshits pAttributes->Release(); pMediaEngineClassFactory->Release(); }
HRESULT ConfigureAudioEncoding(IMFCaptureSource *pSource, IMFCaptureRecordSink *pRecord, REFGUID guidEncodingType) { IMFCollection *pAvailableTypes = NULL; IMFMediaType *pMediaType = NULL; IMFAttributes *pAttributes = NULL; // Configure the audio format for the recording sink. HRESULT hr = MFCreateAttributes(&pAttributes, 1); if(FAILED(hr)) { goto done; } // Enumerate low latency media types hr = pAttributes->SetUINT32(MF_LOW_LATENCY, TRUE); if(FAILED(hr)) { goto done; } // Get a list of encoded output formats that are supported by the encoder. hr = MFTranscodeGetAudioOutputAvailableTypes(guidEncodingType, MFT_ENUM_FLAG_ALL | MFT_ENUM_FLAG_SORTANDFILTER, pAttributes, &pAvailableTypes); if (FAILED(hr)) { goto done; } // Pick the first format from the list. hr = GetCollectionObject(pAvailableTypes, 0, &pMediaType); if (FAILED(hr)) { goto done; } // Connect the audio stream to the recording sink. DWORD dwSinkStreamIndex; hr = pRecord->AddStream((DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_AUDIO, pMediaType, NULL, &dwSinkStreamIndex); if(hr == MF_E_INVALIDSTREAMNUMBER) { //If an audio device is not present, allow video only recording hr = S_OK; } done: SafeRelease(&pAvailableTypes); SafeRelease(&pMediaType); SafeRelease(&pAttributes); return hr; }
STDMETHODIMP UnLockAsynMFT(IMFTransform* pTransform) { HRESULT hr = S_OK; IMFAttributes *pAttributes; UINT32 unValue; DMFTCHECKNULL_GOTO(pTransform,done, E_INVALIDARG); DMFTCHECKHR_GOTO(pTransform->GetAttributes(&pAttributes),done); DMFTCHECKHR_GOTO(pAttributes->GetUINT32(MF_TRANSFORM_ASYNC, &unValue), done); if (unValue) { DMFTCHECKHR_GOTO(pAttributes->SetUINT32(MF_TRANSFORM_ASYNC, true), done); } done: return hr; }
HRESULT CTranscoder::ConfigureContainer() { assert (m_pProfile); HRESULT hr = S_OK; IMFAttributes* pContainerAttrs = NULL; //Set container attributes hr = MFCreateAttributes( &pContainerAttrs, 2 ); //Set the output container to be ASF type if (SUCCEEDED(hr)) { hr = pContainerAttrs->SetGUID( MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_ASF ); } // Use the default setting. Media Foundation will use the stream // settings set in ConfigureAudioOutput and ConfigureVideoOutput. if (SUCCEEDED(hr)) { hr = pContainerAttrs->SetUINT32( MF_TRANSCODE_ADJUST_PROFILE, MF_TRANSCODE_ADJUST_PROFILE_DEFAULT ); } //Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetContainerAttributes(pContainerAttrs); } SafeRelease(&pContainerAttrs); return hr; }
unsigned char *BBWin8Game::LoadAudioData( String path,int *length,int *channels,int *format,int *hertz ){ String url=PathToFilePath( path ); DXASS( MFStartup( MF_VERSION ) ); IMFAttributes *attrs; DXASS( MFCreateAttributes( &attrs,1 ) ); DXASS( attrs->SetUINT32( MF_LOW_LATENCY,TRUE ) ); IMFSourceReader *reader; DXASS( MFCreateSourceReaderFromURL( url.ToCString<wchar_t>(),attrs,&reader ) ); attrs->Release(); IMFMediaType *mediaType; DXASS( MFCreateMediaType( &mediaType ) ); DXASS( mediaType->SetGUID( MF_MT_MAJOR_TYPE,MFMediaType_Audio ) ); DXASS( mediaType->SetGUID( MF_MT_SUBTYPE,MFAudioFormat_PCM ) ); DXASS( reader->SetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,mediaType ) ); mediaType->Release(); IMFMediaType *outputMediaType; DXASS( reader->GetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,&outputMediaType ) ); WAVEFORMATEX *wformat; uint32 formatByteCount=0; DXASS( MFCreateWaveFormatExFromMFMediaType( outputMediaType,&wformat,&formatByteCount ) ); *channels=wformat->nChannels; *format=wformat->wBitsPerSample/8; *hertz=wformat->nSamplesPerSec; CoTaskMemFree( wformat ); outputMediaType->Release(); /* PROPVARIANT var; DXASS( reader->GetPresentationAttribute( MF_SOURCE_READER_MEDIASOURCE,MF_PD_DURATION,&var ) ); LONGLONG duration=var.uhVal.QuadPart; float64 durationInSeconds=(duration / (float64)(10000 * 1000)); m_maxStreamLengthInBytes=(uint32)( durationInSeconds * m_waveFormat.nAvgBytesPerSec ); */ std::vector<unsigned char*> bufs; std::vector<uint32> lens; uint32 len=0; for( ;; ){ uint32 flags=0; IMFSample *sample; DXASS( reader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,0,reinterpret_cast<DWORD*>(&flags),0,&sample ) ); if( flags & MF_SOURCE_READERF_ENDOFSTREAM ){ break; } if( sample==0 ){ abort(); } IMFMediaBuffer *mediaBuffer; DXASS( sample->ConvertToContiguousBuffer( &mediaBuffer ) ); uint8 *audioData=0; uint32 sampleBufferLength=0; DXASS( mediaBuffer->Lock( &audioData,0,reinterpret_cast<DWORD*>( &sampleBufferLength ) ) ); unsigned char *buf=(unsigned char*)malloc( sampleBufferLength ); memcpy( buf,audioData,sampleBufferLength ); bufs.push_back( buf ); lens.push_back( sampleBufferLength ); len+=sampleBufferLength; DXASS( mediaBuffer->Unlock() ); mediaBuffer->Release(); sample->Release(); } reader->Release(); *length=len/(*channels * *format); unsigned char *data=(unsigned char*)malloc( len ); unsigned char *p=data; for( int i=0;i<bufs.size();++i ){ memcpy( p,bufs[i],lens[i] ); free( bufs[i] ); p+=lens[i]; } gc_force_sweep=true; return data; }
HRESULT CaptureClass::initCapture(int aDevice) { mWhoAmI = aDevice; HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); DO_OR_DIE; hr = MFStartup(MF_VERSION); DO_OR_DIE; // choose device IMFAttributes *attributes = NULL; hr = MFCreateAttributes(&attributes, 1); ScopedRelease<IMFAttributes> attributes_s(attributes); DO_OR_DIE; hr = attributes->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID ); DO_OR_DIE; ChooseDeviceParam param = { 0 }; hr = MFEnumDeviceSources(attributes, ¶m.mDevices, ¶m.mCount); DO_OR_DIE; if ((signed)param.mCount > aDevice) { // use param.ppDevices[0] IMFAttributes *attributes = NULL; IMFMediaType *type = NULL; EnterCriticalSection(&mCritsec); hr = param.mDevices[aDevice]->ActivateObject( __uuidof(IMFMediaSource), (void**)&mSource ); DO_OR_DIE_CRITSECTION; hr = MFCreateAttributes(&attributes, 3); ScopedRelease<IMFAttributes> attributes_s(attributes); DO_OR_DIE_CRITSECTION; hr = attributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); DO_OR_DIE_CRITSECTION; hr = attributes->SetUnknown( MF_SOURCE_READER_ASYNC_CALLBACK, this ); DO_OR_DIE_CRITSECTION; hr = MFCreateSourceReaderFromMediaSource( mSource, attributes, &mReader ); DO_OR_DIE_CRITSECTION; int preferredmode = scanMediaTypes(gParams[mWhoAmI].mWidth, gParams[mWhoAmI].mHeight); mUsedIndex = preferredmode; hr = mReader->GetNativeMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, preferredmode, &type ); ScopedRelease<IMFMediaType> type_s(type); DO_OR_DIE_CRITSECTION; hr = setVideoType(type); DO_OR_DIE_CRITSECTION; hr = mReader->SetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, type ); DO_OR_DIE_CRITSECTION; hr = mReader->ReadSample( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL ); DO_OR_DIE_CRITSECTION; LeaveCriticalSection(&mCritsec); } else { return MF_E_INVALIDINDEX; } /* for (i = 0; i < 16; i++) { char temp[128]; float v; int f; int r = GetProperty(i, v, f); sprintf(temp, "%d: %3.3f %d (%d)\n", i, v, f, r); OutputDebugStringA(temp); } */ return 0; }
HRESULT CPreview::SetDevice(IMFActivate *pActivate) { HRESULT hr = S_OK; IMFMediaSource *pSource = NULL; IMFAttributes *pAttributes = NULL; IMFMediaType *pType = NULL; EnterCriticalSection(&m_critsec); // Release the current device, if any. hr = CloseDevice(); // Create the media source for the device. if (SUCCEEDED(hr)) { hr = pActivate->ActivateObject( __uuidof(IMFMediaSource), (void**)&pSource ); } // Get the symbolic link. if (SUCCEEDED(hr)) { hr = pActivate->GetAllocatedString( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &m_pwszSymbolicLink, &m_cchSymbolicLink ); } // // Create the source reader. // // Create an attribute store to hold initialization settings. if (SUCCEEDED(hr)) { hr = MFCreateAttributes(&pAttributes, 2); } if (SUCCEEDED(hr)) { hr = pAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); } // Set the callback pointer. if (SUCCEEDED(hr)) { hr = pAttributes->SetUnknown( MF_SOURCE_READER_ASYNC_CALLBACK, this ); } if (SUCCEEDED(hr)) { hr = MFCreateSourceReaderFromMediaSource( pSource, pAttributes, &m_pReader ); } // Try to find a suitable output type. if (SUCCEEDED(hr)) { for (DWORD i = 0; ; i++) { hr = m_pReader->GetNativeMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &pType ); if (FAILED(hr)) { break; } hr = TryMediaType(pType); SafeRelease(&pType); if (SUCCEEDED(hr)) { // Found an output type. break; } } } if (SUCCEEDED(hr)) { // Ask for the first sample. hr = m_pReader->ReadSample( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL ); } if (FAILED(hr)) { if (pSource) { pSource->Shutdown(); // NOTE: The source reader shuts down the media source // by default, but we might not have gotten that far. } CloseDevice(); } SafeRelease(&pSource); SafeRelease(&pAttributes); SafeRelease(&pType); LeaveCriticalSection(&m_critsec); return hr; }
bool WinCaptureDevice::InitializeFirst(std::string& error) { HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); if (!SUCCEEDED(hr)) { return false; error = "CoInitializeEx failed"; } hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); if (!SUCCEEDED(hr)) { error = "MFStartup failed"; return false; } Close(); memset(&InputType, 0, sizeof(InputType)); IMFActivate* activate = WinCaptureDevice::ChooseFirst(error); if (!activate) return false; IMFMediaSource *pSource = NULL; IMFAttributes *pAttributes = NULL; IMFMediaType *pType = NULL; UINT32 m_cchSymbolicLink = 0; // Create the media source for the device. if (SUCCEEDED(hr)) hr = activate->ActivateObject(__uuidof(IMFMediaSource), (void**) &pSource); // Get the symbolic link. if (SUCCEEDED(hr)) hr = activate->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &SymbolicLink, &m_cchSymbolicLink); // // Create the source reader. // // Create an attribute store to hold initialization settings. if (SUCCEEDED(hr)) hr = MFCreateAttributes(&pAttributes, 2); if (SUCCEEDED(hr)) hr = pAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE); // Set the callback pointer. if (SUCCEEDED(hr)) hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this); if (SUCCEEDED(hr)) hr = MFCreateSourceReaderFromMediaSource(pSource, pAttributes, &Reader); // Try to find a suitable input type. if (SUCCEEDED(hr)) { for (uint i = 0; ; i++) { hr = Reader->GetNativeMediaType((DWORD) MF_SOURCE_READER_FIRST_VIDEO_STREAM, i, &pType); if (FAILED(hr)) { error = "Failed to find a supported output format (ie RGB24)"; break; } memset(&InputType, 0, sizeof(InputType)); bool isTypeOK = IsMediaTypeSupported(pType, InputType); if (isTypeOK) { // Get the frame size. hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &InputWidth, &InputHeight); // Get the image stride. hr = GetDefaultStride(pType, &InputDefaultStride); // Get the interlace mode. Default: assume progressive. InputInterlaceMode = (MFVideoInterlaceMode) MFGetAttributeUINT32(pType, MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); } SafeRelease(&pType); if (isTypeOK) break; } } if (SUCCEEDED(hr)) { // Ask for the first sample. EnableCapture = 1; hr = Reader->ReadSample((DWORD) MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL); } if (FAILED(hr)) { if (pSource) { pSource->Shutdown(); // NOTE: The source reader shuts down the media source by default, but we might not have gotten that far. } Close(); } SafeRelease(&pSource); SafeRelease(&pAttributes); SafeRelease(&pType); SafeRelease(&activate); if (FAILED(hr) && error.length() == 0) error = ErrorMessage(L"Failed to initialize video capture device", hr); return SUCCEEDED(hr); }