コード例 #1
0
ファイル: aflDirectShow.cpp プロジェクト: mofon001/AflLib
void MediaSampler::initDevice()
{
	DWORD outputCount = 0;
	m_wmReader->GetOutputCount(&outputCount );

	m_videoNumber = -1;
	m_audioNumber = -1;
	INT i;
	for(i=0;i<(INT)outputCount;i++)
	{
		IWMOutputMediaProps* props = NULL;
		m_wmReader->GetOutputProps( i, &props);
		if(props)
		{
			ULONG mediaSize = 0;
			props->GetMediaType( NULL, &mediaSize );
			WM_MEDIA_TYPE* mediaType = (WM_MEDIA_TYPE*)new BYTE[mediaSize];
			props->GetMediaType( mediaType, &mediaSize );
			

			if(mediaType->majortype == WMMEDIATYPE_Audio)
			{
				m_audioNumber = i;
				CopyMemory(&m_waveFormat,mediaType->pbFormat,sizeof(WAVEFORMATEX));
			}
			else if(mediaType->majortype == WMMEDIATYPE_Video)
			{
				WMVIDEOINFOHEADER* wmvVideoInfo = (WMVIDEOINFOHEADER*)mediaType->pbFormat;
				m_videoNumber = i;
				m_imageWidth = wmvVideoInfo->bmiHeader.biWidth;
				m_imageHeight = wmvVideoInfo->bmiHeader.biHeight;
				onImageInit();
			}
			delete[] mediaType;
			props->Release();
		}
	}

	AutoCom<IWMProfile> readerProfile;
	m_wmReader->QueryInterface( IID_IWMProfile, (void **)readerProfile );
}
コード例 #2
0
void CVideoInstance::StartCruncher(CVideoInstance* aInstance)
{
    aInstance->myThreadIsDone = false;
    ::CoInitialize(nullptr);
    HRESULT hr;
    hr = WMCreateSyncReader(NULL, 0, &aInstance->mySyncReader);
    if (hr != S_OK)
    {
        INFO_PRINT("%s", "Error: Could not create WMCreateSyncReader, video can not be played!");
        return;
    }
    //Step 2: Open the file
    //note: When the synchronous reader opens the file, it assigns an output number to each stream

    hr = aInstance->mySyncReader->Open(aInstance->myFileName);
    if (hr != S_OK)
    {
        INFO_PRINT("%s%ls", "Error: Could not open file, video can not be played! ", aInstance->myFileName);
        return;
    }


    aInstance->mySyncReader->GetOutputCount(&aInstance->mytheOutputsCount);

    int videoIndex = 0;
    DWORD theSize;
    IWMOutputMediaProps* videoOutputProps = nullptr;
    for (unsigned int i = 0; i<aInstance->mytheOutputsCount; i++)
    {
        SAFE_ARRAYDELETE(aInstance->mytheMediaType);
        aInstance->mySyncReader->GetOutputProps(i, &videoOutputProps);
        hr = videoOutputProps->GetMediaType(NULL, &theSize);
        aInstance->mytheMediaType = (WM_MEDIA_TYPE*) new BYTE[theSize];
        hr = videoOutputProps->GetMediaType(aInstance->mytheMediaType, &theSize);

        if (FAILED(hr))
        {
            INFO_PRINT("%s%s", "Error: Could not query for the space needed for media type, video can not be played!", aInstance->myFileName);
            return;
            //_tcscpy( tszErrMsg, _T( "Could not query for the space needed for media type" ) );
            //break;
        }

        if (WMMEDIATYPE_Video == aInstance->mytheMediaType->majortype)//pMediaType->majortype )
        {
            videoIndex = i;
            aInstance->mySyncReader->GetStreamNumberForOutput(videoIndex, (WORD*)&aInstance->myVideoStreamNumber);

            if (aInstance->mytheMediaType->formattype == WMFORMAT_VideoInfo)
            {
                //setting the bitmapInfoHeader by reading the WmvInfoHeader
                memcpy(&aInstance->myTheVideoInfoHeader, aInstance->mytheMediaType->pbFormat, sizeof(WMVIDEOINFOHEADER));
                aInstance->myBitmapInfoHdr = aInstance->myTheVideoInfoHeader.bmiHeader;

                aInstance->myWidth = aInstance->myBitmapInfoHdr.biWidth;
                aInstance->myHeight = aInstance->myBitmapInfoHdr.biHeight;

                WORD bitCount = aInstance->myBitmapInfoHdr.biBitCount;
                aInstance->myBytes_per_pixel = bitCount / 8;


                unsigned int rowSize = (bitCount * aInstance->myBitmapInfoHdr.biWidth + 7) / 8; // +7 to round up
                if (rowSize % 4)
                    rowSize += (4 - (rowSize % 4)); // Pad to DWORD Boundary

                aInstance->myRow_increment = rowSize;// theInfoHeader.biWidth * bytes_per_pixel_;

            }


        }
        if (WMMEDIATYPE_Audio == aInstance->mytheMediaType->majortype)
        {
            aInstance->myAudioOutputNumber = i;
            aInstance->mySyncReader->GetStreamNumberForOutput(aInstance->myAudioOutputNumber, (WORD*)&aInstance->m_iAudioStreamNumber);
        }

    }
    //relesae the memory
    SAFE_RELEASE(videoOutputProps);
    videoOutputProps = nullptr;
    SAFE_ARRAYDELETE(aInstance->mytheMediaType);

    BYTE* pValue = new BYTE[5];
    strcpy((char*)pValue, "TRUE");
    hr = aInstance->mySyncReader->SetOutputSetting(videoIndex, g_wszVideoSampleDurations, WMT_TYPE_BOOL, pValue, sizeof(pValue));
    delete[]pValue;
    if (FAILED(hr))
    {
        INFO_PRINT("%s%s", "Error: Could not set Video Ouptut Sample durations, video can not be played!", aInstance->myFileName);
        return;
        //AfxMessageBox("could not set Video Ouptut Sample durations");
    }


    hr = aInstance->mySyncReader->SetReadStreamSamples(aInstance->myVideoStreamNumber, FALSE);
    if (FAILED(hr))
    {
        INFO_PRINT("%s%s", "Error: Could not set Video Stream to give Uncompressed Samples, video can not be played!", aInstance->myFileName);
        return;
        //AfxMessageBox("could not set Video Stream to give Uncompressed Samples");
    }

    IWMMetadataEditor *pEditor;

    hr = WMCreateEditor(&pEditor);
    if (hr == S_OK)
    {
        pEditor->Open(aInstance->myFileName);
        IWMHeaderInfo3* pHdrInfo;
        pHdrInfo = NULL;
        hr = pEditor->QueryInterface(IID_IWMHeaderInfo3, (void**)&pHdrInfo);
        WORD wStream = 0;
        WMT_ATTR_DATATYPE dType;
        QWORD dwDuration;
        WORD wSize = 0;
        hr = pHdrInfo->GetAttributeByName(&wStream, L"Duration", &dType, (BYTE*)NULL, &wSize);

        BYTE* pValue = nullptr;
        if (wSize > 0)
        {
            pValue = new BYTE[wSize];
            hr = pHdrInfo->GetAttributeByName(&wStream, L"Duration", &dType, pValue, &wSize);
            dwDuration = *((QWORD*)pValue);
            aInstance->m_qwTotalTimeInSeconds = (dwDuration * 100) / 1000000000;
            SAFE_ARRAYDELETE(pValue);
            SAFE_RELEASE(pHdrInfo);
            SAFE_RELEASE(pEditor);
        }

    }
    aInstance->myOpenSuccesfully = true;
    StartUpdateLoop(aInstance);
}
コード例 #3
0
ファイル: App.cpp プロジェクト: rkabir/Cinder
bool WMA::PostOpen()
{
    WaitForSingleObject( m_hRespondEvent, INFINITE );
    if( FAILED(m_hrCallbackResult) ) {
        return false;
    }

    uint32_t nOutputCount;
    HRESULT hr = m_pReader->GetOutputCount( &nOutputCount );

    if( FAILED(hr) ) {
        return false;
    }

    //ensure this is audio only
    if( nOutputCount != 1 ) {
        return false;
    }

    uint32_t nOutputFormatCount;
    hr = m_pReader->GetOutputFormatCount(0, &nOutputFormatCount);
    if(FAILED(hr)) {
        return FALSE;
    }

    uint32_t nFormatSize = 0;
    BYTE* pBuf = 0;
    IWMOutputMediaProps* pProps = 0;
    for( uint32_t j = 0; j < nOutputFormatCount; j++ ) {
        hr = m_pReader->GetOutputFormat( 0, j, &pProps  );
        if(FAILED(hr)) {
            continue;
        }

        //get required size of the media type structure
        uint32_t nNewSize = 0;
        hr = pProps->GetMediaType( NULL, & nNewSize );
        if( FAILED(hr) ) {
            continue;
        }

        if(nNewSize > nFormatSize)
        {
            if( pBuf ) {
                delete [] pBuf;
                pBuf = 0;
            }

            nFormatSize = nNewSize;
            pBuf = new BYTE[nFormatSize];
        }

        WM_MEDIA_TYPE* pType = (WM_MEDIA_TYPE*) pBuf;
        hr = pProps->GetMediaType(pType, & nFormatSize);
        if(FAILED(hr))
            continue;


        if(pType->formattype == WMFORMAT_WaveFormatEx)
        {
            memcpy( &m_WaveFormatEx, pType->pbFormat, pType->cbFormat );
            if((m_WaveFormatEx.nChannels == 2) && (m_WaveFormatEx.wBitsPerSample == 16) && (m_WaveFormatEx.nSamplesPerSec == 44100)) {
                break;
            }
        }
        if(pProps) {
            pProps->Release();
            pProps = 0;
        }


    }
    if(pBuf) {
        delete [] pBuf;
        pBuf = 0;
    }

    m_pReader->SetOutputProps( 0, pProps );
    if(FAILED(hr)) {
        return false;
    }

    if(pProps) {
        pProps->Release();
        pProps = 0;
    }


    //tells it to read as fast possible
    //hr = m_pReaderAdvanced->SetUserProvidedClock(true);

    if(FAILED(hr)) {
        return false;
    }

    WORD wStreamNum = 0;
    WMT_ATTR_DATATYPE Type;
    QWORD dwDuration = 0;
    WORD wLength = 8;
    hr = m_pHeaderInfo->GetAttributeByName(&wStreamNum, g_wszWMDuration, &Type, (BYTE*)&dwDuration, &wLength);
    if(FAILED(hr)) {
        return false;
    }

    //divide by 10 million to get seconds
    double fTime = double(dwDuration) / 10000000.0f;

    m_nStreamSize = fTime * m_WaveFormatEx.nAvgBytesPerSec * 1.5;

    //create a default 1.5 second scratch buffer for seconding streams
    m_pBuffer = new uint8_t[m_WaveFormatEx.nAvgBytesPerSec * 1.5];
    m_nBufferSize = m_WaveFormatEx.nAvgBytesPerSec * 1.5;

    m_nTargetPtr = 0;
    m_nWritePtr = 0;

    m_bOpen = true;

    return true;
}
コード例 #4
0
LoaderSourceFileWindowsMedia::LoaderSourceFileWindowsMedia( SourceFileWindowsMedia * source, Target * target ) 
	: Loader(), mSource( source ), mSampleOffset( 0 ), mSrcSampleRate( 0 ), mSrcChannelCount( 0 ), mSrcBitsPerSample( 0 )
{
	::HRESULT hr;

	//setup readers
	IWMSyncReader * IWMReaderP = NULL;
	hr = ::WMCreateSyncReader(0, 0, &IWMReaderP );
	if( FAILED( hr ) ) {
		throw IoExceptionFailedLoad();
	}
	mReader = msw::makeComShared<IWMSyncReader>( IWMReaderP );

	IWMHeaderInfo * IWMHeaderInfoP = NULL;
	hr = mReader->QueryInterface( IID_IWMHeaderInfo, (void**)&IWMHeaderInfoP );
	if( FAILED(hr) ) {
		throw IoExceptionFailedLoad();
	}
	mHeaderInfo = msw::makeComShared<IWMHeaderInfo>( IWMHeaderInfoP );

	//turn data into stream
	::IStream * iStreamP = NULL;
	hr = ::CreateStreamOnHGlobal( mSource->mMemHandle.get(), FALSE, &iStreamP );
	if(FAILED(hr)) {
		throw IoExceptionFailedLoad();
	}
	std::shared_ptr<::IStream> pStream = msw::makeComShared<::IStream>( iStreamP );
	
	hr = mReader->OpenStream( pStream.get() );
	if( FAILED(hr) ) {
		throw IoExceptionFailedLoad();
	}

	DWORD nOutputFormatCount;
	hr = mReader->GetOutputFormatCount(0, &nOutputFormatCount);
	if( FAILED(hr) ) {
		throw IoExceptionFailedLoad();
	}
	
	DWORD nFormatSize = 0; 
	BYTE* pBuf = 0;
	IWMOutputMediaProps* pProps = 0;
	for( uint32_t j = 0; j < nOutputFormatCount; j++ ) {
		hr = mReader->GetOutputFormat( 0, j, &pProps  );
		if(FAILED(hr)) {
			continue;
		}

		//get required size of the media type structure
		DWORD nNewSize = 0;
		hr = pProps->GetMediaType( NULL, & nNewSize );
		if( FAILED(hr) ) {
			continue;
		}

		if(nNewSize > nFormatSize)
		{
			if( pBuf ) {
				delete [] pBuf;
				pBuf = 0;
			}

			nFormatSize = nNewSize;
			pBuf = new BYTE[nFormatSize];
		}

		WM_MEDIA_TYPE* pType = (WM_MEDIA_TYPE*) pBuf;
		size_t acrunk = sizeof( WM_MEDIA_TYPE );
		hr = pProps->GetMediaType( pType, &nFormatSize );
		if(FAILED(hr))
			continue;
		
		if( IsEqualIID( WMFORMAT_WaveFormatEx, pType->formattype ) )
		{
			//casting format buffer as WAVEFORMATEX
			//when formattype == WMFORMAT_WaveFormatEx, pbFormat will be
			//WAVEFORMATEX or WAVEFORMATEXTENSIBLE, both of which can be cast
			//safely as WAVEFORMATEX
			mOutputFormat = *((WAVEFORMATEX *)pType->pbFormat);

			if( ( mOutputFormat.wFormatTag == ( ( target->isFloat() ) ? WAVE_FORMAT_IEEE_FLOAT : WAVE_FORMAT_PCM ) ) &&
				( mOutputFormat.nChannels == target->getChannelCount() ) &&
				( mOutputFormat.wBitsPerSample == target->getBitsPerSample() ) &&
				( mOutputFormat.nSamplesPerSec == target->getSampleRate() ) &&
				( mOutputFormat.nBlockAlign == target->getBlockAlign() )
			) {
				mSrcSampleRate = mOutputFormat.nSamplesPerSec;
				mSrcChannelCount = mOutputFormat.nChannels;
				mSrcBitsPerSample = mOutputFormat.wBitsPerSample;
				mSrcBlockAlign = mOutputFormat.nBlockAlign;
				break;
			}
		}
		if( pProps ) {
			pProps->Release();
			pProps = 0;
		}


	}
	if( pBuf ) {
		delete [] pBuf;
		pBuf = 0;
	}

	hr = mReader->SetOutputProps( 0, pProps );
	if( FAILED( hr ) ) {
		throw IoExceptionFailedLoad();
	}

	if( pProps ) {
		pProps->Release();
		pProps = 0;
	}

	WORD wStreamNum = 0;
	WMT_ATTR_DATATYPE Type;
	QWORD dwDuration = 0;
	WORD wLength = 8;
	hr = mHeaderInfo->GetAttributeByName( &wStreamNum, g_wszWMDuration, &Type, (BYTE*)&dwDuration, &wLength );
	if( FAILED( hr ) ) {
		throw IoExceptionFailedLoad();
	}

	//divide by 10 million to get seconds
	double fTime = double( dwDuration ) / 10000000.0f;

	mStreamSize = (uint32_t)( fTime * mOutputFormat.nAvgBytesPerSec * 1.5 );
	
	DWORD tempMaxBufferSize = 0;
	mMaxBufferSize = 0;
	hr = mReader->GetMaxOutputSampleSize( 0, &tempMaxBufferSize );
	mMaxBufferSize = tempMaxBufferSize;
	if( FAILED( hr ) ) {
		throw IoExceptionFailedLoad();
	}

	//set data to not be compressed
	/*hr = mReader->SetReadStreamSamples( 0, FALSE );
	if( FAILED( hr ) ) {
		throw IoExceptionFailedLoad();
	}*/
}
コード例 #5
0
ファイル: audio decoder.cpp プロジェクト: hownam/fennec
int decoder_load(unsigned long id, const string sname)
{
	HRESULT               hres;
	IWMOutputMediaProps*  ppProps;
	WM_MEDIA_TYPE*        wmt = 0;
	DWORD                 wmpz = 0;
	WAVEFORMATEX          wfx;
	DWORD                 i, outcount = 0;
	IWMHeaderInfo*        wminfo;
	WORD                  wmistream = 0;
    WMT_ATTR_DATATYPE     Type;
	WORD                  wmilen;

	CoInitialize(0);

	hres = WMCreateSyncReader(0, 0, &pstreams[id].wmreader);
	if(FAILED(hres))return 0;

	hres = pstreams[id].wmreader->Open(sname);

	pstreams[id].wmreader->GetOutputCount(&outcount);

	for(i=0; i<outcount; i++)
	{
		
		hres = pstreams[id].wmreader->GetOutputProps(i, &ppProps);
		if(FAILED(hres))
		{
			ppProps->Release();
			continue;
		}

		hres = ppProps->GetMediaType(0, &wmpz);
		if(FAILED(hres))
		{
			ppProps->Release();
			continue;
		}

		wmt = (WM_MEDIA_TYPE*) malloc(wmpz);

		hres = ppProps->GetMediaType(wmt, &wmpz);

		if(WMMEDIATYPE_Audio != wmt->majortype)
		{
			ppProps->Release();
			free(wmt);
			continue;
		}

		memcpy(&wfx, wmt->pbFormat, wmt->cbFormat);

		pstreams[id].channels      = wfx.nChannels;
		pstreams[id].frequency     = wfx.nSamplesPerSec;
		pstreams[id].bitspersample = wfx.wBitsPerSample;

		pstreams[id].wmaudioout = i;

		free(wmt);

		ppProps->Release();
		break;
	}
	pstreams[id].buffer = 0;
	pstreams[id].buffersize = 0;

	/* get information */

	hres = pstreams[id].wmreader->QueryInterface(IID_IWMHeaderInfo, (VOID **)&wminfo);
	if(FAILED(hres))return 0;

	wmistream = 0;

	hres = wminfo->GetAttributeByName(&wmistream, g_wszWMDuration, &Type, 0, &wmilen);

	if(hres == S_OK)
	{
		QWORD dur;
		wminfo->GetAttributeByName(&wmistream, g_wszWMDuration, &Type, (BYTE*)&dur,&wmilen);
		pstreams[id].duration = (DWORD)(dur / 10000);
	}

	wminfo->Release();
	return 1;
}