HRESULT VidWriter::configureInput(IMFMediaType *pMT) { HRESULT hr = S_OK; UINT32 frNumerator, frDenominator; hr = pMT->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); if (FAILED(hr)) goto done; // Set format hr = pMT->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32); if (FAILED(hr)) goto done; // Set progressive frames hr = pMT->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); if (FAILED(hr)) goto done; // Set frame size hr = MFSetAttributeSize(pMT, MF_MT_FRAME_SIZE, m_width, m_height); if (FAILED(hr)) goto done; // Set frame rate (not sure why we need to set a frame rate on the input) hr = MFAverageTimePerFrameToFrameRate(m_frametime, &frNumerator, &frDenominator); if (FAILED(hr)) goto done; hr = MFSetAttributeRatio(pMT, MF_MT_FRAME_RATE, frNumerator, frDenominator); if (FAILED(hr)) goto done; // Set PAR hr = MFSetAttributeRatio(pMT, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); if (FAILED(hr)) goto done; done: return hr; }
// 创建sink writer. 返回流索引. HRESULT VideoEncoder::CreateSinkWriter(IMFSinkWriter** ppSinkWriter, DWORD* pStreamIndex) { HRESULT hr = S_OK; if (this->m_outputFile == L"") { return ERROR_FILE_INVALID; } // 创建sink writer. *ppSinkWriter = nullptr; IMFSinkWriter *pSinkWriter = nullptr; IMFMediaType* pOutputMediaType = nullptr; IMFMediaType *pInMediaType = nullptr; CheckHR(MFCreateSinkWriterFromURL(this->m_outputFile.c_str(), nullptr, nullptr, &pSinkWriter)); // 创建和配置输出媒体类型. CheckHR(MFCreateMediaType(&pOutputMediaType)); CheckHR(pOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)); CheckHR(pOutputMediaType->SetGUID(MF_MT_SUBTYPE, this->m_outputVideoFormat)); CheckHR(pOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, this->m_videoBitRate)); CheckHR(pOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)); CheckHR(MFSetAttributeSize(pOutputMediaType, MF_MT_FRAME_SIZE, this->m_frameWidth, this->m_frameHeight)); CheckHR(MFSetAttributeRatio(pOutputMediaType, MF_MT_FRAME_RATE, (UINT32)this->m_fps, 1)); CheckHR(MFSetAttributeRatio(pOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1)); DWORD streamIndex; CheckHR(pSinkWriter->AddStream(pOutputMediaType, &streamIndex)); // 设置输入的媒体类型. CheckHR(MFCreateMediaType(&pInMediaType)); CheckHR(pInMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)); CheckHR(pInMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32)); CheckHR(pInMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)); // 输入的步幅信息不为所有输出编码解码器需要.但某些编解码器需要它,如 H.264. // 如果步幅是去掉,或设置为负值,H.264 将从下到上处理图像. CheckHR(pInMediaType->SetUINT32(MF_MT_DEFAULT_STRIDE, this->m_frameStride)); CheckHR(MFSetAttributeSize(pInMediaType, MF_MT_FRAME_SIZE, this->m_frameWidth, this->m_frameHeight)); CheckHR(MFSetAttributeRatio(pInMediaType, MF_MT_FRAME_RATE, (UINT32)this->m_fps, 1)); CheckHR(MFSetAttributeRatio(pInMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1)); CheckHR(pSinkWriter->SetInputMediaType(streamIndex, pInMediaType, nullptr)); // 开始编写. CheckHR(pSinkWriter->BeginWriting()); *ppSinkWriter = pSinkWriter; (*ppSinkWriter)->AddRef(); *pStreamIndex = streamIndex; cleanup: if (!SUCCEEDED(hr)) { DWORD error = GetLastError(); this->m_logFileStream << "意外错误: " << error << endl; } SafeRelease(&pSinkWriter); SafeRelease(&pOutputMediaType); return hr; }
HRESULT CTranscoder::ConfigureVideoOutput() { assert (m_pProfile); HRESULT hr = S_OK; IMFAttributes* pVideoAttrs = NULL; // Configure the video stream // Create a new attribute store. if (SUCCEEDED(hr)) { hr = MFCreateAttributes( &pVideoAttrs, 5 ); } // Set the encoder to be Windows Media video encoder, so that the appropriate MFTs are added to the topology. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); } // Set the frame rate. if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_FRAME_RATE, 30, 1); } //Set the frame size. if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pVideoAttrs, MF_MT_FRAME_SIZE, 320, 240); } //Set the pixel aspect ratio if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pVideoAttrs, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } // Set the bit rate. if (SUCCEEDED(hr)) { hr = pVideoAttrs->SetUINT32(MF_MT_AVG_BITRATE, 300000); } // Set the attribute store on the transcode profile. if (SUCCEEDED(hr)) { hr = m_pProfile->SetVideoAttributes( pVideoAttrs ); } SafeRelease(&pVideoAttrs); return hr; }
HRESULT EncodeTransform::SetOutputMediaType() { if (!mpEncoder) { return MF_E_NOT_INITIALIZED; } IMFMediaType* pMediaTypeOut = NULL; HRESULT hr = MFCreateMediaType(&pMediaTypeOut); // Set the output media type. if (SUCCEEDED(hr)) { hr = MFCreateMediaType(&pMediaTypeOut); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, cVideoEncodingFormat); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, VIDEO_BIT_RATE); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1); } if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, mStreamWidth, mStreamHeight); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetUINT32(MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } if (SUCCEEDED(hr)) { hr = mpEncoder->SetOutputType(0, pMediaTypeOut, 0); } return hr; }
void VideoCompressor::InitMediaType(IMFMediaType *M, const GUID &Format, UINT BitRate, UINT Width, UINT Height, UINT FrameRate) { M->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); M->SetGUID(MF_MT_SUBTYPE, Format); M->SetUINT32(MF_MT_AVG_BITRATE, BitRate); MFSetAttributeSize(M, MF_MT_FRAME_SIZE, Width, Height); MFSetAttributeRatio(M, MF_MT_FRAME_RATE, FrameRate, 1); MFSetAttributeRatio(M, MF_MT_FRAME_RATE_RANGE_MAX, FrameRate, 1); MFSetAttributeRatio(M, MF_MT_FRAME_RATE_RANGE_MIN, FrameRate / 2, 1); MFSetAttributeRatio(M, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); M->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); M->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1); M->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, 1); M->SetUINT32(MF_MT_SAMPLE_SIZE, Width * Height * 4); M->SetUINT32(MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Main); //eAVEncH264VProfile_Base //M->SetUINT32(MF_MT_DEFAULT_STRIDE, -960); //M->SetGUID(MF_MT_AM_FORMAT_TYPE, Webcam); }
void MfVideoEncoder::Init(int width, int height, int fps) { mWidth = width; mHeight = height; mFps = fps; mFrameTime = 10 * 1000 * 1000 / fps; CComPtr<IMFMediaType> pMediaTypeOut; CComPtr<IMFMediaType> pMediaTypeIn; HRESULT hr = MFCreateSinkWriterFromURL(mFilename.c_str(), NULL, NULL, &mSinkWriter); // Set the output media type. if (SUCCEEDED(hr)) { hr = MFCreateMediaType(&pMediaTypeOut); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, 8000000); } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); } if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, width, height); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, fps, 1); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } pMediaTypeOut->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1); if (SUCCEEDED(hr)) { hr = mSinkWriter->AddStream(pMediaTypeOut, (DWORD*)&mStreamIndex); } // Set the input media type. if (SUCCEEDED(hr)) { hr = MFCreateMediaType(&pMediaTypeIn); } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32); } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetUINT32(MF_MT_MPEG2_PROFILE, 77); } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); } if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, width, height); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, fps, 1); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } if (SUCCEEDED(hr)) { hr = mSinkWriter->SetInputMediaType(mStreamIndex, pMediaTypeIn, nullptr); } // Tell the sink writer to start accepting data. if (SUCCEEDED(hr)) { hr = mSinkWriter->BeginWriting(); } CComPtr<ICodecAPI> encoder; hr = mSinkWriter->GetServiceForStream(0, GUID_NULL, IID_PPV_ARGS(&encoder)); if (SUCCEEDED(hr)) { CComVariant quality((UINT32)eAVEncCommonRateControlMode_CBR, VT_UI4); hr = encoder->SetValue(&CODECAPI_AVEncCommonRateControlMode, &quality); } if (SUCCEEDED(hr)) { CComVariant quality((UINT32)80000000, VT_UI4); hr = encoder->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &quality); } // Return the pointer to the caller. if (!SUCCEEDED(hr)) { mSinkWriter.Release(); throw TempleException("Unable to begin writing to the video stream"); } }
HRESULT CHWMFT::GetOutputAvailableType( DWORD dwOutputStreamID, DWORD dwTypeIndex, IMFMediaType** ppType) { /***************************************** ** Todo: This function will return a media ** type at a given index. The SDK ** implementation uses a static array of ** media types. Your MFT may want to use ** a dynamic array and modify the list ** order depending on the MFTs state ** See http://msdn.microsoft.com/en-us/library/ms703812(v=VS.85).aspx *****************************************/ HRESULT hr = S_OK; IMFMediaType* pMT = NULL; do { if(IsLocked() != FALSE) { hr = MF_E_TRANSFORM_ASYNC_LOCKED; break; } if(ppType == NULL) { hr = E_POINTER; break; } /***************************************** ** Todo: If your MFT supports more than one ** stream, make sure you modify ** MFT_MAX_STREAMS and adjust this function ** accordingly *****************************************/ if(dwOutputStreamID >= MFT_MAX_STREAMS) { hr = MF_E_INVALIDSTREAMNUMBER; break; } /***************************************** ** Todo: Modify the accepted output list ** g_ppguidOutputTypes or use your own ** implementation of this function *****************************************/ if(dwTypeIndex >= g_dwNumOutputTypes) { hr = MF_E_NO_MORE_TYPES; break; } { CAutoLock lock(&m_csLock); hr = MFCreateMediaType(&pMT); if(FAILED(hr)) { break; } hr = pMT->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); if(FAILED(hr)) { break; } hr = pMT->SetGUID(MF_MT_SUBTYPE, *(g_ppguidOutputTypes[dwTypeIndex])); if(FAILED(hr)) { break; } /***************************************** ** Todo: The following implementation ** forces a standard output resolution ** and framerate. Your MFT should set these ** values properly and update the Media ** Type as necessary after decoding the ** stream *****************************************/ hr = MFSetAttributeSize(pMT, MF_MT_FRAME_SIZE, MFT_OUTPUT_WIDTH, MFT_OUTPUT_HEIGHT); if(FAILED(hr)) { break; } hr = MFSetAttributeRatio(pMT, MF_MT_FRAME_RATE, MFT_FRAMERATE_NUMERATOR, MFT_FRAMERATE_DENOMINATOR); if(FAILED(hr)) { break; } (*ppType) = pMT; (*ppType)->AddRef(); } }while(false); SAFERELEASE(pMT); return hr; }
HRESULT EncodeTransform::SetInputMediaType() { if (!mpEncoder) { return MF_E_NOT_INITIALIZED; } IMFMediaType* pMediaTypeIn = NULL; HRESULT hr = MFCreateMediaType(&pMediaTypeIn); // Set the input media type. if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, cVideoInputFormat); } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); } if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, mStreamWidth, mStreamHeight); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, VIDEO_FPS, 1); } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); } if (SUCCEEDED(hr)) { hr = mpEncoder->SetInputType(0, pMediaTypeIn, 0); // if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) { std::cout << "MF_E_TRANSFORM_TYPE_NOT_SET -> 0xC00D6D60L: You must set the output type first" << std::endl; } if (hr == MF_E_INVALIDMEDIATYPE) { std::cout << "MF_E_INVALIDMEDIATYPE -> 0xc00d36b4: the data specified for the media type is invalid, inconsistent, or not supported by this object" << std::endl; } #if defined(CODECAPI_AVLowLatencyMode) // Win8 only hr = mpEncoder->QueryInterface(IID_PPV_ARGS(&mpCodecAPI)); if (SUCCEEDED(hr)) { VARIANT var; var.vt = VT_UI4; var.ulVal = eAVEncCommonRateControlMode_Quality; hr = mpCodecAPI->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var); if (FAILED(hr)){printf("Failed to set rate control mode.\n");} var.vt = VT_BOOL; var.boolVal = VARIANT_TRUE; hr = mpCodecAPI->SetValue(&CODECAPI_AVLowLatencyMode, &var); if (FAILED(hr)){ printf("Failed to enable low latency mode.\n"); } // This property controls the quality level when the encoder is not using a constrained bit rate. The AVEncCommonRateControlMode property determines whether the bit rate is constrained. VARIANT quality; InitVariantFromUInt32(50, &quality); hr = mpCodecAPI->SetValue(&CODECAPI_AVEncCommonQuality, &quality); if (FAILED(hr)){ printf("Failed to adjust quality mode.\n"); } } #endif } return hr; }
bool initialise() { UINT32 videoDeviceCount = 0; IMFAttributes *videoConfig = NULL; IMFActivate **videoDevices = NULL; WCHAR *webcamFriendlyName; CHECK_HR(MFTRegisterLocalByCLSID( __uuidof(CColorConvertDMO), MFT_CATEGORY_VIDEO_PROCESSOR, L"", MFT_ENUM_FLAG_SYNCMFT, 0, NULL, 0, NULL ), "Error registering colour converter DSP.\n"); // Get the first available webcam. CHECK_HR(MFCreateAttributes(&videoConfig, 1), "Error creating video configuation.\n"); // Request video capture devices. CHECK_HR(videoConfig->SetGUID( MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID), "Error initialising video configuration object."); CHECK_HR(MFEnumDeviceSources(videoConfig, &videoDevices, &videoDeviceCount), "Error enumerating video devices.\n"); CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &webcamFriendlyName, NULL), "Error retrieving vide device friendly name.\n"); wprintf(L"First available webcam: %s\n", webcamFriendlyName); CHECK_HR(videoDevices[WEBCAM_DEVICE_INDEX]->ActivateObject(IID_PPV_ARGS(&videoSource)), "Error activating video device.\n"); // Create a source reader. CHECK_HR(MFCreateSourceReaderFromMediaSource( videoSource, videoConfig, &_videoReader), "Error creating video source reader.\n"); //ListModes(_videoReader); CHECK_HR(_videoReader->GetCurrentMediaType( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &videoSourceOutputType), "Error retrieving current media type from first video stream.\n"); Console::WriteLine(GetMediaTypeDescription(videoSourceOutputType)); // Note the webcam needs to support this media type. The list of media types supported can be obtained using the ListTypes function in MFUtility.h. MFCreateMediaType(&pSrcOutMediaType); pSrcOutMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); //pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, WMMEDIASUBTYPE_I420); pSrcOutMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); MFSetAttributeSize(pSrcOutMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT); CHECK_HR(MFSetAttributeRatio(pSrcOutMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on video device out type.\n"); CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, pSrcOutMediaType), "Failed to set media type on source reader.\n"); //CHECK_HR(_videoReader->SetCurrentMediaType(0, NULL, videoSourceOutputType), "Failed to setdefault media type on source reader.\n"); // Create H.264 encoder. CHECK_HR(CoCreateInstance(CLSID_CMSH264EncoderMFT, NULL, CLSCTX_INPROC_SERVER, IID_IUnknown, (void**)&spTransformUnk), "Failed to create H264 encoder MFT.\n"); CHECK_HR(spTransformUnk->QueryInterface(IID_PPV_ARGS(&_pTransform)), "Failed to get IMFTransform interface from H264 encoder MFT object.\n"); MFCreateMediaType(&pMFTOutputMediaType); pMFTOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMFTOutputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); //pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, 240000); CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, TARGET_AVERAGE_BIT_RATE), "Failed to set average bit rate on H264 output media type.\n"); CHECK_HR(MFSetAttributeSize(pMFTOutputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n"); pMFTOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2); // 2 = Progressive scan, i.e. non-interlaced. pMFTOutputMediaType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); //CHECK_HR(MFSetAttributeRatio(pMFTOutputMediaType, MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base), "Failed to set profile on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetDouble(MF_MT_MPEG2_LEVEL, 3.1), "Failed to set level on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetUINT32(MF_MT_MAX_KEYFRAME_SPACING, 10), "Failed to set key frame interval on H264 MFT out type.\n"); //CHECK_HR(pMFTOutputMediaType->SetUINT32(CODECAPI_AVEncCommonQuality, 100), "Failed to set H264 codec qulaity.\n"); //hr = pAttributes->SetUINT32(CODECAPI_AVEncMPVGOPSize, 1) CHECK_HR(_pTransform->SetOutputType(0, pMFTOutputMediaType, 0), "Failed to set output media type on H.264 encoder MFT.\n"); MFCreateMediaType(&pMFTInputMediaType); pMFTInputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMFTInputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_IYUV); CHECK_HR(MFSetAttributeSize(pMFTInputMediaType, MF_MT_FRAME_SIZE, CAMERA_RESOLUTION_WIDTH, CAMERA_RESOLUTION_HEIGHT), "Failed to set frame size on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_FRAME_RATE, TARGET_FRAME_RATE, 1), "Failed to set frame rate on H264 MFT out type.\n"); CHECK_HR(MFSetAttributeRatio(pMFTInputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1), "Failed to set aspect ratio on H264 MFT out type.\n"); pMFTInputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, 2); CHECK_HR(_pTransform->SetInputType(0, pMFTInputMediaType, 0), "Failed to set input media type on H.264 encoder MFT.\n"); CHECK_HR(_pTransform->GetInputStatus(0, &mftStatus), "Failed to get input status from H.264 MFT.\n"); if (MFT_INPUT_STATUS_ACCEPT_DATA != mftStatus) { printf("E: ApplyTransform() pTransform->GetInputStatus() not accept data.\n"); goto done; } //Console::WriteLine(GetMediaTypeDescription(pMFTOutputMediaType)); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, NULL), "Failed to process FLUSH command on H.264 MFT.\n"); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL), "Failed to process BEGIN_STREAMING command on H.264 MFT.\n"); CHECK_HR(_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, NULL), "Failed to process START_OF_STREAM command on H.264 MFT.\n"); memset(&_outputDataBuffer, 0, sizeof _outputDataBuffer); return true; done: printf("MediaFoundationH264LiveSource initialisation failed.\n"); return false; }
STDMETHODIMP CDecWMV9MFT::InitDecoder(AVCodecID codec, const CMediaType *pmt) { HRESULT hr = S_OK; DbgLog((LOG_TRACE, 10, L"CDecWMV9MFT::InitDecoder(): Initializing WMV9 MFT decoder")); DestroyDecoder(false); BITMAPINFOHEADER *pBMI = nullptr; REFERENCE_TIME rtAvg = 0; DWORD dwARX = 0, dwARY = 0; videoFormatTypeHandler(*pmt, &pBMI, &rtAvg, &dwARX, &dwARY); size_t extralen = 0; BYTE *extra = nullptr; getExtraData(*pmt, nullptr, &extralen); if (extralen > 0) { extra = (BYTE *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE); getExtraData(*pmt, extra, &extralen); } if (codec == AV_CODEC_ID_VC1 && extralen) { size_t i = 0; for (i = 0; i < (extralen - 4); i++) { uint32_t code = AV_RB32(extra + i); if ((code & ~0xFF) == 0x00000100) break; } if (i == 0) { memmove(extra + 1, extra, extralen); *extra = 0; extralen++; } else if (i > 1) { DbgLog((LOG_TRACE, 10, L"-> VC-1 Header at position %u (should be 0 or 1)", i)); } } if (extralen > 0) { m_vc1Header = new CVC1HeaderParser(extra, extralen, codec); } /* Create input type */ m_nCodecId = codec; IMFMediaType *pMTIn = nullptr; MF.CreateMediaType(&pMTIn); pMTIn->SetUINT32(MF_MT_COMPRESSED, TRUE); pMTIn->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, FALSE); pMTIn->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, FALSE); pMTIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMTIn->SetGUID(MF_MT_SUBTYPE, VerifySubtype(codec, pmt->subtype)); MFSetAttributeSize(pMTIn, MF_MT_FRAME_SIZE, pBMI->biWidth, pBMI->biHeight); UINT32 rateNum = 0, rateDen = 0; MF.AverageTimePerFrameToFrameRate(rtAvg, &rateNum, &rateDen); MFSetAttributeRatio(pMTIn, MF_MT_FRAME_RATE, rateNum, rateDen); pMTIn->SetBlob(MF_MT_USER_DATA, extra, (UINT32)extralen); av_freep(&extra); hr = m_pMFT->SetInputType(0, pMTIn, 0); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to set input type on MFT")); return hr; } /* Create output type */ hr = SelectOutputType(); SafeRelease(&pMTIn); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to set output type on MFT")); return hr; } IMFMediaType *pMTOut = nullptr; m_pMFT->GetOutputCurrentType(0, &pMTOut); m_bInterlaced = MFGetAttributeUINT32(pMTOut, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown) > MFVideoInterlace_Progressive; SafeRelease(&pMTOut); m_bManualReorder = (codec == AV_CODEC_ID_VC1) && !(m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_ONLY_DTS); return S_OK; }
HRESULT VideoCapture::CreateInputMediaType(GUID majorType, GUID subtype) { HRESULT hr; pin_ptr<IMFMediaType *> pInputMediaType = &(this->InputMediaType); hr = MFCreateMediaType(pInputMediaType); if (hr != S_OK) { return hr; } hr = this->InputMediaType->SetGUID(MF_MT_MAJOR_TYPE, majorType); if (hr != S_OK) { return hr; } hr = this->InputMediaType->SetGUID(MF_MT_SUBTYPE, subtype); if (hr != S_OK) { return hr; } hr = this->InputMediaType->SetUINT32( MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive ); if (hr != S_OK) { return hr; } hr = MFSetAttributeSize( this->InputMediaType, MF_MT_FRAME_SIZE, 640, 480 ); if (hr != S_OK) { return hr; } hr = MFSetAttributeRatio( this->InputMediaType, MF_MT_FRAME_RATE, 30, 1 ); if (hr != S_OK) { return hr; } hr = MFSetAttributeRatio( this->InputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1 ); if (hr != S_OK) { return hr; } return hr; }
video_writer::video_writer( std::wstring& target_path, IMFMediaTypePtr& audio_media_type, ID3D11DeviceContext2Ptr& context, ID3D11Texture2DPtr& texture /*, unsigned int width, unsigned int height*/) : target_path_(target_path), audio_media_type_(audio_media_type), context_(context), texture_(texture) { D3D11_TEXTURE2D_DESC desc = {}; texture->GetDesc(&desc); width_ = desc.Width; height_ = desc.Height; const unsigned int WIDTH = width_; const unsigned int HEIGHT = height_; const unsigned int BITRATE = 3000000; const unsigned int ASPECT_NUM = 1; const unsigned int ASPECT_DENOM = 1; const unsigned long BPP_IN = 32; const unsigned long cbMaxLength = WIDTH * HEIGHT * BPP_IN / 8; const unsigned int ONE_SECOND = RATE_NUM / RATE_DENOM; const unsigned int FRAME_NUM = 10 * ONE_SECOND; samples_per_second = 44100; average_bytes_per_second = 24000; channel_count = 2; bits_per_sample = 16; // 入力ストリームから SinkWriterを生成する CHK(MFCreateFile(MF_FILE_ACCESSMODE::MF_ACCESSMODE_WRITE, MF_FILE_OPENMODE::MF_OPENMODE_DELETE_IF_EXIST, MF_FILE_FLAGS::MF_FILEFLAGS_NONE, target_path.c_str(), &byte_stream_)); CHK(MFCreateAttributes(&attr_, 10)); CHK(attr_->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, true)); CHK(attr_->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, false)); CHK(attr_->SetUINT32(MF_SINK_WRITER_DISABLE_THROTTLING, true)); IMFSinkWriterPtr sinkWriter; CHK(MFCreateSinkWriterFromURL(L".mp4", byte_stream_.Get(), attr_.Get(), &sinkWriter)); CHK(sinkWriter.As(&sink_writer_)); //CHK(MFCreateSinkWriterFromURL(L".mp4", byte_stream_.Get(), attr_.Get(), &sink_writer_)); // // 出力メディアタイプのセットアップ // // ビデオ CHK(MFCreateMediaType(&media_type_out_)); CHK(media_type_out_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)); CHK(media_type_out_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264)); CHK(media_type_out_->SetUINT32(MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Main)); //CHK(media_type_out_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32)); CHK(media_type_out_->SetUINT32(MF_MT_AVG_BITRATE, BITRATE)); CHK(media_type_out_->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)); CHK(MFSetAttributeSize(media_type_out_.Get(), MF_MT_FRAME_SIZE, WIDTH, HEIGHT)); CHK(MFSetAttributeRatio(media_type_out_.Get(), MF_MT_FRAME_RATE, RATE_NUM, RATE_DENOM)); CHK(MFSetAttributeRatio(media_type_out_.Get(), MF_MT_PIXEL_ASPECT_RATIO, ASPECT_NUM, ASPECT_DENOM)); CHK(sink_writer_->AddStream(media_type_out_.Get(), &stream_index_)); IMFTransformPtr mft; //IMFRateSupportPtr ptr; //CHK(sink_writer_->GetServiceForStream(stream_index_, MF_RATE_CONTROL_SERVICE, __uuidof(IMFRateSupport), &ptr)); // オーディオ CHK(MFCreateMediaType(&media_type_out_audio_)); CHK(media_type_out_audio_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio)); CHK(media_type_out_audio_->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_AAC)); CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, samples_per_second)); CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample)); CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, channel_count)); CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, average_bytes_per_second)); CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, 1)); CHK(sink_writer_->AddStream(media_type_out_audio_.Get(), &stream_index_audio_)); // // 入力メディアタイプのセットアップ // // ビデオ CHK(MFCreateMediaType(&media_type_in_)); CHK(media_type_in_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)); CHK(media_type_in_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32)); CHK(media_type_in_->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)); CHK(MFSetAttributeSize(media_type_in_.Get(), MF_MT_FRAME_SIZE, WIDTH, HEIGHT)); CHK(MFSetAttributeRatio(media_type_in_.Get(), MF_MT_FRAME_RATE, RATE_NUM, RATE_DENOM)); CHK(MFSetAttributeRatio(media_type_in_.Get(), MF_MT_PIXEL_ASPECT_RATIO, ASPECT_NUM, ASPECT_DENOM)); // エンコーダーのセットアップ //prop_variant prop; //IPropertyStorePtr pPropertyStore; //IMFAttributesPtr pEncoderParameters; //CHK(PSCreateMemoryPropertyStore(__uuidof(IPropertyStore), (void**) &pPropertyStore)); //prop.value().vt = VT_BOOL; //prop.value().boolVal = VARIANT_FALSE; //CHK(pPropertyStore->SetValue(MFPKEY_VBRENABLED, prop.value())); //prop.value().vt = VT_I4; //prop.value().lVal = 100; //CHK(pPropertyStore->SetValue(MFPKEY_VBRQUALITY, prop.value())); //CHK(MFCreateAttributes(&pEncoderParameters, 5)); //CHK(attr_->SetUnknown(MF_SINK_WRITER_ENCODER_CONFIG, pPropertyStore.Get())); CHK(sink_writer_->SetInputMediaType(stream_index_, media_type_in_.Get(), nullptr /*pEncoderParameters.Get()*/)); // オーディオ CHK(MFCreateMediaType(&media_type_in_audio_)); //CHK(media_type_in_audio_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio)); //CHK(media_type_in_audio_->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM)); //CHK(media_type_in_audio_->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample)); //CHK(media_type_in_audio_->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, samples_per_second)); //CHK(media_type_in_audio_->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, channel_count)); audio_media_type_->CopyAllItems(media_type_in_audio_.Get()); CHK(sink_writer_->SetInputMediaType(stream_index_audio_, media_type_in_audio_.Get(), NULL)); // ハードウェアエンコーダが使われているかの確認 { IMFTransformPtr transform; ICodecAPIPtr codec; GUID guid; CHK(sink_writer_->GetServiceForStream(stream_index_, GUID_NULL, IID_IMFTransform, &transform)); IMFAttributesPtr attributes; CHK(transform->GetAttributes(&attributes)); UINT32 l = 0; std::wstring str; bool use_hw = false; HRESULT hr = attributes->GetStringLength(MFT_ENUM_HARDWARE_URL_Attribute, &l); if (SUCCEEDED(hr)) { str.reserve(l + 1); hr = attributes->GetString(MFT_ENUM_HARDWARE_URL_Attribute, (LPWSTR) str.data(), l + 1, &l); if (SUCCEEDED(hr)){ use_hw = true; DOUT2(L"/////// HARDWARE ENCODE IS USED. ////\n"); } } } // // 出力開始 // CHK(sink_writer_->BeginWriting()); // // メディア・サンプルの作成 // CHK(MFCreateSample(&sample_)); video_sample_time_ = 0; CHK(sample_->SetSampleDuration(hnsSampleDuration)); // // メディア・バッファの生成と、メディア・サンプルへの追加 // CHK(MFCreateAlignedMemoryBuffer(cbMaxLength, MF_16_BYTE_ALIGNMENT, &buffer_));// 16 byteアラインメント CHK(buffer_->SetCurrentLength(cbMaxLength)); CHK(sample_->AddBuffer(buffer_.Get())); // // 読み込みテクスチャをマップ sf::map<> map(context,texture, 0, D3D11_MAP_READ, 0); copy_image_.reset(new video_writer::copy_image(width_, height_, map.row_pitch())); copy_func_ = (copy_func_t)copy_image_->getCode(); }