Пример #1
0
/// <summary>
/// <c>wACMFormatSuggest</c> 
/// </summary>
/// <remarks>
/// </remarks>
/// <param name="had"></param>
/// <param name="pwfxSrc"></param>
/// <param name="pwfxDst"></param>
/// <param name="cbwfxDst"></param>
/// <param name="fdwSuggest"></param>
/// <returns>HRESULT __stdcall</returns>
HRESULT __stdcall 
wACMFormatSuggest(HACMDRIVER had, const PWAVEFORMATEX pwfxSrc, const PWAVEFORMATEX pwfxDst, 
				  ULONG cbwfxDst, ULONG fdwSuggest)
{
	PSTR		pszwfDst;
	PSTR		pszwfSrc;
	PSTR		pszErrorMessage;
	char		Buffer2[128];
	char		Buffer[128];
	MMRESULT	hResult;

	pszwfDst = GetWaveFormat(Buffer, pwfxDst);
	pszwfSrc = GetWaveFormat(Buffer2, pwfxSrc);
	InternalFunctionSpew(
		"GameOS_DirectSound",
		"acmFormatSuggest(0x%x, %s, %s, 0x%x, 0x%x)",
		had,
		pszwfSrc,
		pszwfDst,
		cbwfxDst,
		fdwSuggest);
	hResult = acmFormatSuggest(had, pwfxSrc, pwfxDst, cbwfxDst, fdwSuggest);
	if (MMFAILED(hResult))
	{
		pszwfDst = GetWaveFormat(Buffer, pwfxDst);
		pszwfSrc = GetWaveFormat(Buffer2, pwfxSrc);
		pszErrorMessage = ErrorNumberToMessage(hResult);
		if ( InternalFunctionPause(
			"FAILED (0x%x - %s) - acmFormatSuggest(0x%x, %s, %s, 0x%x, 0x%x)",
			hResult,
			pszErrorMessage,
			had,
			pszwfSrc,
			pszwfDst,
			cbwfxDst,
			fdwSuggest) )
			ENTER_DEBUGGER;
	}

	return hResult;
}
    void KinectAudioSource::Start()
    {

        DMO_MEDIA_TYPE mt = {0};

        ULONG cbProduced = 0;

        memset( &outputBufferStruct_, 0, sizeof(outputBufferStruct_) );
        outputBufferStruct_.pBuffer = &mediaBuffer_;

        // Set DMO output format
        CHECKHR( MoInitMediaType(&mt, sizeof(WAVEFORMATEX)) );

        mt.majortype = MEDIATYPE_Audio;
        mt.subtype = MEDIASUBTYPE_PCM;
        mt.lSampleSize = 0;
        mt.bFixedSizeSamples = TRUE;
        mt.bTemporalCompression = FALSE;
        mt.formattype = FORMAT_WaveFormatEx;
        memcpy(mt.pbFormat, &GetWaveFormat(), sizeof(WAVEFORMATEX));
    
        CHECKHR( mediaObject_->SetOutputType(0, &mt, 0) );
        MoFreeMediaType(&mt);

        // Allocate streaming resources. This step is optional. If it is not called here, it
        // will be called when first time ProcessInput() is called. However, if you want to 
        // get the actual frame size being used, it should be called explicitly here.
        CHECKHR( mediaObject_->AllocateStreamingResources() );
    
        // Get actually frame size being used in the DMO. (optional, do as you need)
        int iFrameSize;
        PROPVARIANT pvFrameSize;
        PropVariantInit(&pvFrameSize);
        CHECKHR(propertyStore_->GetValue(MFPKEY_WMAAECMA_FEATR_FRAME_SIZE, &pvFrameSize));
        iFrameSize = pvFrameSize.lVal;
        PropVariantClear(&pvFrameSize);

        // allocate output buffer
        mediaBuffer_.SetBufferLength( GetWaveFormat().nSamplesPerSec * GetWaveFormat().nBlockAlign );
    }
Пример #3
0
		/**
			@brief Open stream
			@param	mode	AEC_SYSTEM_MODE
		*/
		void AudioStream::Open(AEC_SYSTEM_MODE mode /*= OPTIBEAM_ARRAY_ONLY */)
		{
#ifndef USES_KINECT_AUDIOSTREAM
			return;
#else
			if((mode == ADAPTIVE_ARRAY_ONLY) || (mode == ADAPTIVE_ARRAY_AND_AEC) || (mode == MODE_NOT_SET)){
				return;
			}
			CoInitialize(NULL);
			isOpen = true;
			HRESULT ret = sensor_->NuiGetAudioSource(&audioBeam_);
			if(FAILED(ret)){
				return;
			}
			ret = audioBeam_->QueryInterface(IID_IMediaObject, (void**)&mediaObject_);
			if(FAILED(ret)){
				return;
			}
			ret = audioBeam_->QueryInterface(IID_IPropertyStore, (void**)&propertyStore_);
			if(FAILED(ret)){
				return;
			}
/*			if(audioBeam_ != NULL){
				audioBeam_->Release();
				audioBeam_ = NULL;
			}*/

			PROPVARIANT pvSysMode;
			PropVariantInit(&pvSysMode);
			pvSysMode.vt = VT_I4;
			pvSysMode.lVal = (LONG)mode;
			ret = propertyStore_->SetValue(MFPKEY_WMAAECMA_SYSTEM_MODE, pvSysMode);
			if(FAILED(ret)){
				return;
			}
			PropVariantClear(&pvSysMode);

			DMO_MEDIA_TYPE mt = {0};
			// Set DMO output format
			ret = MoInitMediaType(&mt, sizeof(WAVEFORMATEX));
			if(FAILED(ret)){
				return;
			}
    
			mt.majortype = MEDIATYPE_Audio;
			mt.subtype = MEDIASUBTYPE_PCM;
			mt.lSampleSize = 0;
			mt.bFixedSizeSamples = TRUE;
			mt.bTemporalCompression = FALSE;
			mt.formattype = FORMAT_WaveFormatEx;	
			memcpy(mt.pbFormat, &GetWaveFormat(), sizeof(WAVEFORMATEX));
    
			ret = mediaObject_->SetOutputType(0, &mt, 0); 
			if(FAILED(ret)){
				return;
			}
			MoFreeMediaType(&mt);

			// Allocate streaming resources. This step is optional. If it is not called here, it
			// will be called when first time ProcessInput() is called. However, if you want to 
			// get the actual frame size being used, it should be called explicitly here.
			ret = mediaObject_->AllocateStreamingResources();
			if(FAILED(ret)){
				return;
			}
    
			// Get actually frame size being used in the DMO. (optional, do as you need)
			int iFrameSize;
			PROPVARIANT pvFrameSize;
			PropVariantInit(&pvFrameSize);
			propertyStore_->GetValue(MFPKEY_WMAAECMA_FEATR_FRAME_SIZE, &pvFrameSize);
			if(FAILED(ret)){
				return;
			}
			iFrameSize = pvFrameSize.lVal;
			PropVariantClear(&pvFrameSize);

			ret = mediaObject_->QueryInterface(IID_INuiAudioBeam, (void**)&audioBeam_);
			if(FAILED(ret)){
				return;
			}

			memset(&outputBufferStruct_, 0, sizeof(outputBufferStruct_));
			outputBufferStruct_.pBuffer = &outputMediaBuffer_;
			outputMediaBuffer_.SetBufferLength(GetWaveFormat().nSamplesPerSec * GetWaveFormat().nBlockAlign);
#endif
		}
Пример #4
0
/// <summary>
/// <c>wACMStreamOpen</c> 
/// </summary>
/// <remarks>
/// </remarks>
/// <param name="phas"></param>
/// <param name="had"></param>
/// <param name="pwfxSrc"></param>
/// <param name="pwfxDst"></param>
/// <param name="pwfltr"></param>
/// <param name="dwCallback"></param>
/// <param name="dwInstance"></param>
/// <param name="fdwOpen"></param>
/// <returns>HRESULT __stdcall</returns>
HRESULT __stdcall 
wACMStreamOpen(PHACMSTREAM phas, HACMDRIVER had, const PWAVEFORMATEX pwfxSrc, 
			   const PWAVEFORMATEX pwfxDst, PWAVEFILTER pwfltr, ULONG_PTR dwCallback, 
			   ULONG_PTR dwInstance, ULONG fdwOpen)
{
	PSTR		pszwfDst;
	PSTR		pszwfSrc;
	PSTR		pszErrorMessage;
	MMRESULT	hResult;
	char		BUffer2[128];
	char		Buffer[128];

	pszwfDst = GetWaveFormat(Buffer, pwfxDst);
	pszwfSrc = GetWaveFormat(BUffer2, pwfxSrc);
	InternalFunctionSpew(
		"GameOS_DirectSound",
		"acmStreamOpen(0x%x, 0x%x, %s, %s, 0x%x,0x%x, 0x%x, 0x%x)",
		phas,
		had,
		pszwfSrc,
		pszwfDst,
		pwfltr,
		dwCallback,
		dwInstance,
		fdwOpen);

	hResult = acmStreamOpen(phas, had, pwfxSrc, pwfxDst, pwfltr, dwCallback, dwInstance, 1u);
	if (MMFAILED(hResult))
	{
		pszwfDst = GetWaveFormat(Buffer, pwfxDst);
		pszwfSrc = GetWaveFormat(BUffer2, pwfxSrc);
		pszErrorMessage = ErrorNumberToMessage(hResult);
		if ( InternalFunctionPause(
			"FAILED (0x%x - %s) - acmStreamOpen(0x%x, 0x%x, %s, %s, 0x%x,0x%x, 0x%x, 0x%x)",
			hResult,
			pszErrorMessage,
			phas,
			had,
			pszwfSrc,
			pszwfDst,
			pwfltr,
			dwCallback,
			dwInstance,
			fdwOpen) )
			ENTER_DEBUGGER;
	}
	hResult = acmStreamOpen(phas, had, pwfxSrc, pwfxDst, pwfltr, dwCallback, dwInstance, fdwOpen);
	if (MMFAILED(hResult))
	{
		pszwfDst = GetWaveFormat(Buffer, pwfxDst);
		pszwfSrc = GetWaveFormat(BUffer2, pwfxSrc);
		pszErrorMessage = ErrorNumberToMessage(hResult);
		if ( InternalFunctionPause(
			"FAILED (0x%x - %s) - acmStreamOpen(0x%x, 0x%x, %s, %s, 0x%x,0x%x, 0x%x, 0x%x)",
			hResult,
			pszErrorMessage,
			phas,
			had,
			pszwfSrc,
			pszwfDst,
			pwfltr,
			dwCallback,
			dwInstance,
			fdwOpen) )
			ENTER_DEBUGGER;
	}

	return hResult;
}