Esempio n. 1
0
// 创建sink writer. 返回流索引.
HRESULT VideoEncoder::CreateSinkWriter(IMFSinkWriter** ppSinkWriter, DWORD* pStreamIndex)
{
	HRESULT hr = S_OK;
	if (this->m_outputFile == L"")
	{
		return ERROR_FILE_INVALID;
	}

	// 创建sink writer.
	*ppSinkWriter = nullptr;	
	IMFSinkWriter *pSinkWriter = nullptr;
	IMFMediaType* pOutputMediaType = nullptr;
	IMFMediaType *pInMediaType = nullptr;   
	CheckHR(MFCreateSinkWriterFromURL(this->m_outputFile.c_str(), nullptr, nullptr, &pSinkWriter));

	// 创建和配置输出媒体类型.
	CheckHR(MFCreateMediaType(&pOutputMediaType));
	CheckHR(pOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
	CheckHR(pOutputMediaType->SetGUID(MF_MT_SUBTYPE, this->m_outputVideoFormat));
	CheckHR(pOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, this->m_videoBitRate));
	CheckHR(pOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));

	CheckHR(MFSetAttributeSize(pOutputMediaType, MF_MT_FRAME_SIZE, this->m_frameWidth, this->m_frameHeight));
	CheckHR(MFSetAttributeRatio(pOutputMediaType, MF_MT_FRAME_RATE, (UINT32)this->m_fps, 1));
	CheckHR(MFSetAttributeRatio(pOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
	DWORD streamIndex;
	CheckHR(pSinkWriter->AddStream(pOutputMediaType, &streamIndex));

	// 设置输入的媒体类型.
    CheckHR(MFCreateMediaType(&pInMediaType));   
    CheckHR(pInMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));   
    CheckHR(pInMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32));     
    CheckHR(pInMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)); 

	// 输入的步幅信息不为所有输出编码解码器需要.但某些编解码器需要它,如 H.264.
	// 如果步幅是去掉,或设置为负值,H.264 将从下到上处理图像.
	CheckHR(pInMediaType->SetUINT32(MF_MT_DEFAULT_STRIDE, this->m_frameStride));
    CheckHR(MFSetAttributeSize(pInMediaType, MF_MT_FRAME_SIZE, this->m_frameWidth, this->m_frameHeight));
    CheckHR(MFSetAttributeRatio(pInMediaType, MF_MT_FRAME_RATE, (UINT32)this->m_fps, 1));   
    CheckHR(MFSetAttributeRatio(pInMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
    CheckHR(pSinkWriter->SetInputMediaType(streamIndex, pInMediaType, nullptr));   

	// 开始编写.
	CheckHR(pSinkWriter->BeginWriting());

	*ppSinkWriter = pSinkWriter;
	(*ppSinkWriter)->AddRef();
	*pStreamIndex = streamIndex;

cleanup:
	if (!SUCCEEDED(hr))
	{
		DWORD error = GetLastError();
		this->m_logFileStream << "意外错误: " << error << endl;
	}
	SafeRelease(&pSinkWriter);
	SafeRelease(&pOutputMediaType);
	return hr;
}
Esempio n. 2
0
//-------------------------------------------------------------------
// Open a file for writing and prepare the sink writer
//
HRESULT VidWriter::initSinkWriter(WCHAR *filename)
{
    HRESULT hr = S_OK;
	IMFMediaType *pMediaTypeOut = NULL;   
    IMFMediaType *pMediaTypeIn = NULL;   
   
	// Create the sink writer
	SafeRelease(&m_pWriter);
	hr = MFCreateSinkWriterFromURL(filename, NULL, NULL, &m_pWriter);
	if (FAILED(hr)) goto done;

	// Create the output media type
	hr = MFCreateMediaType(&pMediaTypeOut);   
	if (FAILED(hr)) goto done;

	// Configure it
	hr = configureOutput(pMediaTypeOut);
	if (FAILED(hr)) goto done;

	// Add it to the sink writer
	hr = m_pWriter->AddStream(pMediaTypeOut, &m_streamIndex);   
	if (FAILED(hr)) goto done;

	// Create the input media type
	hr = MFCreateMediaType(&pMediaTypeIn);   
	if (FAILED(hr)) goto done;
	
	// Configure it
	hr = configureInput(pMediaTypeIn);
	if (FAILED(hr)) goto done;

	// Add it to the sink writer
	hr = m_pWriter->SetInputMediaType(m_streamIndex, pMediaTypeIn, NULL);   
   	if (FAILED(hr)) goto done;  

    // Tell the sink writer to start accepting data
	hr = m_pWriter->BeginWriting();

	// Reset the frame timer
	m_rtStart = 0;

done:    
	SafeRelease(&pMediaTypeOut);
    SafeRelease(&pMediaTypeIn);
    return hr;
}
Esempio n. 3
0
void MfVideoEncoder::Init(int width, int height, int fps) {
	mWidth = width;
	mHeight = height;
	mFps = fps;
	mFrameTime = 10 * 1000 * 1000 / fps;

	CComPtr<IMFMediaType> pMediaTypeOut;
	CComPtr<IMFMediaType> pMediaTypeIn;

	HRESULT hr = MFCreateSinkWriterFromURL(mFilename.c_str(), NULL, NULL, &mSinkWriter);

	// Set the output media type.
	if (SUCCEEDED(hr)) {
		hr = MFCreateMediaType(&pMediaTypeOut);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, 8000000);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
	}
	if (SUCCEEDED(hr)) {
		hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, width, height);
	}
	if (SUCCEEDED(hr)) {
		hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, fps, 1);
	}
	if (SUCCEEDED(hr)) {
		hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
	}
	pMediaTypeOut->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1);

	if (SUCCEEDED(hr)) {
		hr = mSinkWriter->AddStream(pMediaTypeOut, (DWORD*)&mStreamIndex);
	}

	// Set the input media type.
	if (SUCCEEDED(hr)) {
		hr = MFCreateMediaType(&pMediaTypeIn);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeIn->SetUINT32(MF_MT_MPEG2_PROFILE, 77);
	}
	if (SUCCEEDED(hr)) {
		hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
	}
	if (SUCCEEDED(hr)) {
		hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, width, height);
	}
	if (SUCCEEDED(hr)) {
		hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, fps, 1);
	}
	if (SUCCEEDED(hr)) {
		hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
	}
	if (SUCCEEDED(hr)) {
		hr = mSinkWriter->SetInputMediaType(mStreamIndex, pMediaTypeIn, nullptr);
	}
	
	// Tell the sink writer to start accepting data.
	if (SUCCEEDED(hr)) {
		hr = mSinkWriter->BeginWriting();
	}
	
	CComPtr<ICodecAPI> encoder;
	hr = mSinkWriter->GetServiceForStream(0, GUID_NULL, IID_PPV_ARGS(&encoder));
	if (SUCCEEDED(hr)) {
		CComVariant quality((UINT32)eAVEncCommonRateControlMode_CBR, VT_UI4);
		hr = encoder->SetValue(&CODECAPI_AVEncCommonRateControlMode, &quality);
	}	
	if (SUCCEEDED(hr)) {
		CComVariant quality((UINT32)80000000, VT_UI4);
		hr = encoder->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &quality);
	}
	
	// Return the pointer to the caller.
	if (!SUCCEEDED(hr)) {
		mSinkWriter.Release();
		throw TempleException("Unable to begin writing to the video stream");
	}
}
HRESULT CCapture::StartCapture(
    IMFActivate *pActivate, 
    const WCHAR *pwszFileName, 
    const EncodingParameters& param
    )
{
    HRESULT hr = S_OK;

    IMFMediaSource *pSource = NULL;

    EnterCriticalSection(&m_critsec);

    // Create the media source for the device.
    hr = pActivate->ActivateObject(
        __uuidof(IMFMediaSource), 
        (void**)&pSource
        );

    // Get the symbolic link. This is needed to handle device-
    // loss notifications. (See CheckDeviceLost.)

    if (SUCCEEDED(hr))
    {
        hr = pActivate->GetAllocatedString(
            MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK,
            &m_pwszSymbolicLink,
            NULL
            );
    }

    if (SUCCEEDED(hr))
    {
        hr = OpenMediaSource(pSource);
    }

    // Create the sink writer 
    if (SUCCEEDED(hr))
    {
        hr = MFCreateSinkWriterFromURL(
            pwszFileName,
            NULL,
            NULL,
            &m_pWriter
            );
    }    

    // Set up the encoding parameters.
    if (SUCCEEDED(hr))
    {
        hr = ConfigureCapture(param);
    }

    if (SUCCEEDED(hr))
    {
        m_bFirstSample = TRUE;
        m_llBaseTime = 0;

        // Request the first video frame.

        hr = m_pReader->ReadSample(
            (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
            0,
            NULL,
            NULL,
            NULL,
            NULL
            );
    }

    SafeRelease(&pSource);
    LeaveCriticalSection(&m_critsec);
    return hr;
}
Esempio n. 5
0
VideoCompressorResult VideoCompressor::OpenFile(const String &Filename, UINT Width, UINT Height, UINT BitRate, UINT FrameRate, UINT AudioDeviceIndex, Clock *Timer)
{
    VideoCompressorResult Result = VideoCompressorResultSuccess;
    _Width = Width;
    _Height = Height;
    _CapturingAudio = (AudioDeviceIndex != 0xFFFFFFFF);
    _Clock = Timer;
    HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE);
    //PersistentAssert(SUCCEEDED(hr), "CoInitializeEx failed");

    hr = MFStartup(MF_VERSION);
    PersistentAssert(SUCCEEDED(hr), "MFStartup failed");
    
    hr = MFCreateSinkWriterFromURL(
            UnicodeString(Filename).CString(),
            NULL,
            NULL,
            &_Writer
            );
    PersistentAssert(SUCCEEDED(hr), "MFCreateSinkWriterFromURL failed");

    const UINT RawBufferSize = Width * Height * 4;
    
    IMFMediaType *OutputMediaType;
    MFCreateMediaType(&OutputMediaType);
    InitMediaType(OutputMediaType, MFVideoFormat_H264, BitRate, Width, Height, FrameRate);

    IMFMediaType *InputMediaType;
    MFCreateMediaType(&InputMediaType);
    InitMediaType(InputMediaType, MFVideoFormat_RGB32, RawBufferSize, Width, Height, FrameRate);

    DWORD VideoStreamIndex;
    hr = _Writer->AddStream(OutputMediaType, &VideoStreamIndex);
    PersistentAssert(SUCCEEDED(hr), "AddStream failed");
    OutputMediaType->Release();
    
    /*hr = MFTRegisterLocalByCLSID(
            __uuidof(CColorConvertDMO),
            MFT_CATEGORY_VIDEO_PROCESSOR,
            L"",
            MFT_ENUM_FLAG_SYNCMFT,
            0,
            NULL,
            0,
            NULL
            );
    PersistentAssert(SUCCEEDED(hr), "MFTRegisterLocalByCLSID failed");*/

    hr = _Writer->SetInputMediaType(VideoStreamIndex, InputMediaType, NULL);
    InputMediaType->Release();
    if(FAILED(hr))
    {
        if(Width > 1920 || Height > 1080)
        {
            MessageBox(NULL, "The maximum resolution for H.264 video is 1920x1080.", "Invalid Window Dimensions", MB_OK | MB_ICONERROR);
        }
        else
        {
            MessageBox(NULL, "There was an error when attempting to initialize video capture.  The maximum resolution for H.264 video is 1920x1080.", "Invalid Window Dimensions", MB_OK | MB_ICONERROR);
        }
        _Writer->Release();
        _Writer = NULL;
        _Clock = NULL;
        return VideoCompressorResultFailure;
    }
    
    if(_CapturingAudio)
    {
        //
        // Setup the output media type
        //
        IMFMediaType *OutputAudioType;
        hr = MFCreateMediaType( &OutputAudioType );
        PersistentAssert(SUCCEEDED(hr), "MFCreateMediaType failed");

        const UINT SamplesPerSecond = 44100;
        const UINT AverageBytesPerSecond = 24000;
        const UINT ChannelCount = 2;
        const UINT BitsPerSample = 16;
        
        OutputAudioType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio ) ;  
        OutputAudioType->SetGUID( MF_MT_SUBTYPE, MFAudioFormat_AAC ) ;
        OutputAudioType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, SamplesPerSecond ) ;
        OutputAudioType->SetUINT32( MF_MT_AUDIO_BITS_PER_SAMPLE, BitsPerSample ) ;
        OutputAudioType->SetUINT32( MF_MT_AUDIO_NUM_CHANNELS, ChannelCount ) ;
        OutputAudioType->SetUINT32( MF_MT_AUDIO_AVG_BYTES_PER_SECOND, AverageBytesPerSecond ) ;
        OutputAudioType->SetUINT32( MF_MT_AUDIO_BLOCK_ALIGNMENT, 1 ) ;
        //OutputAudioType->SetUINT32( MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, 0x29 ) ;

        DWORD AudioStreamIndex;
        hr = _Writer->AddStream( OutputAudioType, &AudioStreamIndex );
        PersistentAssert(SUCCEEDED(hr), "AddStream failed");

        //
        // Setup the input media type
        //

        IMFMediaType *InputAudioType;
        MFCreateMediaType( &InputAudioType );
        InputAudioType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio );
        InputAudioType->SetGUID( MF_MT_SUBTYPE, MFAudioFormat_PCM );
        InputAudioType->SetUINT32( MF_MT_AUDIO_BITS_PER_SAMPLE, BitsPerSample );
        InputAudioType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, SamplesPerSecond );
        InputAudioType->SetUINT32( MF_MT_AUDIO_NUM_CHANNELS, ChannelCount );

        hr = _Writer->SetInputMediaType( AudioStreamIndex, InputAudioType, NULL );
        PersistentAssert(SUCCEEDED(hr), "SetInputMediaType failed");

        _AudioCapture.StartCapture(this, AudioDeviceIndex);
    }

    hr = _Writer->BeginWriting();
    PersistentAssert(SUCCEEDED(hr), "BeginWriting failed");

    
    hr = MFCreateSample(&_Sample);
    PersistentAssert(SUCCEEDED(hr), "MFCreateSample failed");

    hr = MFCreateMemoryBuffer(RawBufferSize, &_Buffer);
    _Buffer->SetCurrentLength(RawBufferSize);
    _Sample->AddBuffer(_Buffer);

    return Result;
}
Esempio n. 6
0
	video_writer::video_writer(
		std::wstring& target_path, IMFMediaTypePtr& audio_media_type, ID3D11DeviceContext2Ptr& context, ID3D11Texture2DPtr& texture
		/*, unsigned int width, unsigned int height*/) : target_path_(target_path), audio_media_type_(audio_media_type), context_(context), texture_(texture)
	{
		D3D11_TEXTURE2D_DESC desc = {};
		texture->GetDesc(&desc);
		width_ = desc.Width;
		height_ = desc.Height;

		const unsigned int WIDTH = width_;
		const unsigned int HEIGHT = height_;
		const unsigned int BITRATE = 3000000;
		const unsigned int ASPECT_NUM = 1;
		const unsigned int ASPECT_DENOM = 1;
		const unsigned long  BPP_IN = 32;
		const unsigned long cbMaxLength = WIDTH * HEIGHT * BPP_IN / 8;
		const unsigned int ONE_SECOND = RATE_NUM / RATE_DENOM;
		const unsigned int FRAME_NUM = 10 * ONE_SECOND;

		samples_per_second = 44100;
		average_bytes_per_second = 24000;
		channel_count = 2;
		bits_per_sample = 16;

		// 入力ストリームから SinkWriterを生成する

		CHK(MFCreateFile(MF_FILE_ACCESSMODE::MF_ACCESSMODE_WRITE, MF_FILE_OPENMODE::MF_OPENMODE_DELETE_IF_EXIST, MF_FILE_FLAGS::MF_FILEFLAGS_NONE, target_path.c_str(), &byte_stream_));

		CHK(MFCreateAttributes(&attr_, 10));
		CHK(attr_->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, true));
		CHK(attr_->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, false));
		CHK(attr_->SetUINT32(MF_SINK_WRITER_DISABLE_THROTTLING, true));




		IMFSinkWriterPtr sinkWriter;

		CHK(MFCreateSinkWriterFromURL(L".mp4", byte_stream_.Get(), attr_.Get(), &sinkWriter));
		CHK(sinkWriter.As(&sink_writer_));
		//CHK(MFCreateSinkWriterFromURL(L".mp4", byte_stream_.Get(), attr_.Get(), &sink_writer_));



		//   
		// 出力メディアタイプのセットアップ   
		//   

		// ビデオ

		CHK(MFCreateMediaType(&media_type_out_));
		CHK(media_type_out_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
		CHK(media_type_out_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
		CHK(media_type_out_->SetUINT32(MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Main));
		//CHK(media_type_out_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32));   
		CHK(media_type_out_->SetUINT32(MF_MT_AVG_BITRATE, BITRATE));
		CHK(media_type_out_->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
		CHK(MFSetAttributeSize(media_type_out_.Get(), MF_MT_FRAME_SIZE, WIDTH, HEIGHT));
		CHK(MFSetAttributeRatio(media_type_out_.Get(), MF_MT_FRAME_RATE, RATE_NUM, RATE_DENOM));
		CHK(MFSetAttributeRatio(media_type_out_.Get(), MF_MT_PIXEL_ASPECT_RATIO, ASPECT_NUM, ASPECT_DENOM));

		CHK(sink_writer_->AddStream(media_type_out_.Get(), &stream_index_));




		IMFTransformPtr mft;
		//IMFRateSupportPtr ptr;

		//CHK(sink_writer_->GetServiceForStream(stream_index_, MF_RATE_CONTROL_SERVICE, __uuidof(IMFRateSupport), &ptr));

		// オーディオ

		CHK(MFCreateMediaType(&media_type_out_audio_));
		CHK(media_type_out_audio_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
		CHK(media_type_out_audio_->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_AAC));
		CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, samples_per_second));
		CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample));
		CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, channel_count));
		CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, average_bytes_per_second));
		CHK(media_type_out_audio_->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, 1));
		CHK(sink_writer_->AddStream(media_type_out_audio_.Get(), &stream_index_audio_));

		//   
		// 入力メディアタイプのセットアップ  
		//   

		// ビデオ

		CHK(MFCreateMediaType(&media_type_in_));
		CHK(media_type_in_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
		CHK(media_type_in_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32));
		CHK(media_type_in_->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
		CHK(MFSetAttributeSize(media_type_in_.Get(), MF_MT_FRAME_SIZE, WIDTH, HEIGHT));
		CHK(MFSetAttributeRatio(media_type_in_.Get(), MF_MT_FRAME_RATE, RATE_NUM, RATE_DENOM));
		CHK(MFSetAttributeRatio(media_type_in_.Get(), MF_MT_PIXEL_ASPECT_RATIO, ASPECT_NUM, ASPECT_DENOM));

		// エンコーダーのセットアップ
		//prop_variant prop;
		//IPropertyStorePtr pPropertyStore;
		//IMFAttributesPtr pEncoderParameters;

		//CHK(PSCreateMemoryPropertyStore(__uuidof(IPropertyStore), (void**) &pPropertyStore));

		//prop.value().vt = VT_BOOL;
		//prop.value().boolVal = VARIANT_FALSE;
		//CHK(pPropertyStore->SetValue(MFPKEY_VBRENABLED, prop.value()));
		//prop.value().vt = VT_I4;
		//prop.value().lVal = 100;
		//CHK(pPropertyStore->SetValue(MFPKEY_VBRQUALITY, prop.value()));

		//CHK(MFCreateAttributes(&pEncoderParameters, 5));
		//CHK(attr_->SetUnknown(MF_SINK_WRITER_ENCODER_CONFIG, pPropertyStore.Get()));

		CHK(sink_writer_->SetInputMediaType(stream_index_, media_type_in_.Get(), nullptr /*pEncoderParameters.Get()*/));

		// オーディオ

		CHK(MFCreateMediaType(&media_type_in_audio_));
		//CHK(media_type_in_audio_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
		//CHK(media_type_in_audio_->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
		//CHK(media_type_in_audio_->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample));
		//CHK(media_type_in_audio_->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, samples_per_second));
		//CHK(media_type_in_audio_->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, channel_count));
		audio_media_type_->CopyAllItems(media_type_in_audio_.Get());
		CHK(sink_writer_->SetInputMediaType(stream_index_audio_, media_type_in_audio_.Get(), NULL));

		// ハードウェアエンコーダが使われているかの確認

		{
			IMFTransformPtr transform;
			ICodecAPIPtr codec;
			GUID guid;

			CHK(sink_writer_->GetServiceForStream(stream_index_, GUID_NULL, IID_IMFTransform, &transform));

			IMFAttributesPtr attributes;
			CHK(transform->GetAttributes(&attributes));
			UINT32 l = 0;
			std::wstring str;
			bool use_hw = false;
			HRESULT hr = attributes->GetStringLength(MFT_ENUM_HARDWARE_URL_Attribute, &l);
			if (SUCCEEDED(hr))
			{
				str.reserve(l + 1);
				hr = attributes->GetString(MFT_ENUM_HARDWARE_URL_Attribute, (LPWSTR) str.data(), l + 1, &l);
				if (SUCCEEDED(hr)){
					use_hw = true;
					DOUT2(L"/////// HARDWARE ENCODE IS USED. ////\n");
				}
			}
		}

		//   
		// 出力開始  
		//   

		CHK(sink_writer_->BeginWriting());

		//   
		// メディア・サンプルの作成   
		//   

		CHK(MFCreateSample(&sample_));
		video_sample_time_ = 0;
		CHK(sample_->SetSampleDuration(hnsSampleDuration));

		//   
		// メディア・バッファの生成と、メディア・サンプルへの追加    
		//   

		CHK(MFCreateAlignedMemoryBuffer(cbMaxLength, MF_16_BYTE_ALIGNMENT, &buffer_));// 16 byteアラインメント
		CHK(buffer_->SetCurrentLength(cbMaxLength));
		CHK(sample_->AddBuffer(buffer_.Get()));

		//
		// 読み込みテクスチャをマップ
		sf::map<> map(context,texture, 0, D3D11_MAP_READ, 0);
		copy_image_.reset(new video_writer::copy_image(width_, height_, map.row_pitch()));
		copy_func_ = (copy_func_t)copy_image_->getCode();

	}