void LoaderSourceFileWindowsMedia::loadData( BufferList *ioData )
{	
	HRESULT hr;
	INSSBuffer * outBuffer;
	QWORD pcnsSampleTime;
	QWORD pcnsDuration;
	DWORD pdwFlags;
	DWORD pdwOutputNum;
	WORD pwStreamNum;

	hr = mReader->GetNextSample( 0, &outBuffer, &pcnsSampleTime, &pcnsDuration, &pdwFlags, &pdwOutputNum, &pwStreamNum );
	if( hr == NS_E_NO_MORE_SAMPLES ) {
		ioData->mBuffers[0].mSampleCount = 0;
		return;
	}

	DWORD bufferLen;
	BYTE * rawBuffer = 0;
	hr = outBuffer->GetBufferAndLength( &rawBuffer, &bufferLen );
	if( hr != S_OK ) {
		//TODO
	}

	uint32_t bufferSize = bufferLen;
	if( bufferSize > ioData->mBuffers[0].mDataByteSize ) {
		bufferSize = ioData->mBuffers[0].mDataByteSize;
	}

	memcpy( ioData->mBuffers[0].mData, rawBuffer, bufferSize );
	ioData->mBuffers[0].mDataByteSize = bufferSize;
	ioData->mBuffers[0].mSampleCount = bufferSize / mSrcBlockAlign;
	mSampleOffset += ioData->mBuffers[0].mSampleCount;
}
Exemple #2
0
unsigned long decoder_read(unsigned long id, void* adata, unsigned long dsize)
{
	HRESULT      hres = 0;
	DWORD        i = 0;
	INSSBuffer*  ppSample;
	QWORD        pcnsSampleTime;
	QWORD        pcnsDuration;
	DWORD        pdwFlags;
	DWORD        pdwOutputNum;
	WORD         pwStreamNum;
	DWORD        lenret = 0;
	unsigned char* ptbuff;

	if(pstreams[id].buffersize && (pstreams[id].bufferpt < pstreams[id].buffersize))
	{
		memmove((char*)adata, pstreams[id].buffer + pstreams[id].bufferpt, min(dsize, pstreams[id].buffersize - pstreams[id].bufferpt));
		i += min(dsize, pstreams[id].buffersize - pstreams[id].bufferpt);
		pstreams[id].bufferpt += min(dsize, pstreams[id].buffersize - pstreams[id].bufferpt);

		if(i >= dsize)return dsize;
	}

	while(1)
	{
		hres = pstreams[id].wmreader->GetNextSample((WORD)pstreams[id].wmaudioout, &ppSample, &pcnsSampleTime, &pcnsDuration, &pdwFlags, &pdwOutputNum, &pwStreamNum);
		if(hres == NS_E_NO_MORE_SAMPLES || FAILED(hres))return 0;

		ppSample->GetBufferAndLength(&ptbuff, &lenret);
		
		if(!lenret)break;

		memmove((char*)adata + i, ptbuff, min(dsize - i, lenret));
		i += lenret;

		if(i > dsize)
		{
			if(!pstreams[id].buffer)
			{
				pstreams[id].buffer = (char*)malloc(lenret);
				pstreams[id].bufferallocsize = lenret;
				pstreams[id].buffersize = lenret;
			}else{
				if(lenret > pstreams[id].bufferallocsize)
				{
					pstreams[id].buffer = (char*) realloc(pstreams[id].buffer, lenret);
					pstreams[id].bufferallocsize = lenret;
				}
				pstreams[id].buffersize = lenret;
			}
			memcpy(pstreams[id].buffer, ptbuff, lenret);

			pstreams[id].bufferpt = lenret - (i - dsize);
		}

		ppSample->Release();

		if(i >= dsize)return dsize;
	}
    return i;
}
// THREADED, NO TOUCH
int CVideoInstance::NextFrame()
{
    HRESULT hr = S_OK;
    QWORD cnsSampleTime = 0;
    QWORD cnsSampleDuration = 0;
    DWORD dwFlags = 0;
    if (!mySyncReader)
    {
        return 0;
    }

    INSSBuffer* iNSSBuffer;
    hr = mySyncReader->GetNextSample(myVideoStreamNumber,
                                     &iNSSBuffer,
                                     &cnsSampleTime,
                                     &cnsSampleDuration,
                                     &dwFlags,
                                     NULL,//&dwOutputNumber,
                                     NULL);
    if (hr == NS_E_NO_MORE_SAMPLES)
    {
        //no more samples to read in file
        myIsStreamDone = true;
        return 0;
    }


    if (SUCCEEDED(hr))
    {
        // TODO: Process the sample in whatever way is appropriate
        // to your application. When finished, clean up.
        if (dwFlags == WM_SF_CLEANPOINT) //this a clean point frame, a picture to take read sdk for explantion
        {
            //					AfxMessageBox("found new sample");
            //1. Get the Bitmap from the frame
            //BYTE* meir;
            unsigned char *buffer;
            iNSSBuffer->GetBufferAndLength(&buffer, &myDwrdBitmapBufferLength);
            if (myBitmapBuffer == NULL)
            {
                myBitmapBuffer = new unsigned char[myDwrdBitmapBufferLength];
            }
            memcpy(myBitmapBuffer, buffer, myDwrdBitmapBufferLength);
            //calc the current postion in seconds
            m_qwCurrrentPostionInSeconds = (cnsSampleTime * 100) / 1000000000;
            myFramesRead++;


        }
        iNSSBuffer->Release();
        iNSSBuffer = NULL;
    }

    return 1;
}
Exemple #4
0
unsigned EncodedBuffer::AllocateDataUnit(DWORD cbDataUnit, INSSBuffer** ppDataUnit)
{
    if (!ppDataUnit)
	{
        return RET_INVALID_ARG;
    }

    INSSBuffer * pNSSBuf = new INSSBuffer();
    if (!pNSSBuf || pNSSBuf->SetLength(cbDataUnit) != RET_OK)
    {
        return RET_LOW_MEMORY;
    }
    
    *ppDataUnit = pNSSBuf;

    return RET_OK;
}
//------------------------------------------------------------------------------
// Name: FrameNumberToTime()
// Desc: Converts a frame number to a reference time (100-nanosecond units).
//
// pSyncReader:      Pointer to the synchronous reader's IWMSyncReader interface.
// wFrameSeekStream: Specifies the stream number. 
// qwFrame:          Specifies the frame number.
// prtFrameTime:     Receives the reference time.
//------------------------------------------------------------------------------
HRESULT FrameNumberToTime( IWMSyncReader * pSyncReader, WORD wFrameSeekStream, 
                           QWORD qwFrame, REFERENCE_TIME * prtFrameTime )
{
    if( !pSyncReader || !prtFrameTime )
        return E_POINTER;

    // Use the Windows Media Format SDK synchronous reader object to seek to
    // the specified frame. The reader object returns the presentation time
    // for the next sample.
    
    // Seek to the specified frame number.
    HRESULT hr = pSyncReader->SetRangeByFrame( wFrameSeekStream, qwFrame, 0 );
    if( FAILED( hr ) )
    {
        _tprintf(_T("SetRangeByFrameFailed... hr=0x%x\nPlayback aborted.\n\n"), hr);
    }
    else 
    {
        // Get the next sample and return the presentation time to the caller.
        INSSBuffer * pINSSBuffer;
        QWORD qwSampleTime, qwSampleDuration;
        DWORD dwFlags;
        WORD idStream;
        
        hr = pSyncReader->GetNextSample( 
                                       wFrameSeekStream,
                                       &pINSSBuffer,
                                       &qwSampleTime,
                                       &qwSampleDuration,
                                       &dwFlags,
                                       NULL,
                                       &idStream );
        if( SUCCEEDED( hr ) )
        {
            pINSSBuffer->Release();
            *prtFrameTime = qwSampleTime;                
        }
    }

    return hr;  
}
Exemple #6
0
//----------------------------------------------------------------------------
//! @brief	  	次のサンプルを得る
//! @param		pSample : サンプルを返すポインタのポインタ
//! @return		エラーコード
//----------------------------------------------------------------------------
HRESULT CWMOutput::GetNextSample( IMediaSample **pSample )
{
	HRESULT hr;
	if( m_StreamNum == 0 || pSample == NULL )
		return S_FALSE;	// このストリームはない

	INSSBuffer	*pWMSample = NULL;
	QWORD	cnsSampleTime;
	QWORD	cnsDuration;
	DWORD	dwFlags;

	if( FAILED(hr = WMReader()->GetNextSample( m_StreamNum, &pWMSample, &cnsSampleTime, &cnsDuration, &dwFlags, NULL, NULL )) )
	{
		if( hr == NS_E_NO_MORE_SAMPLES ) return S_FALSE;
		return hr;
	}

	REFERENCE_TIME	startTime = (REFERENCE_TIME)cnsSampleTime;
	REFERENCE_TIME	endTime = (REFERENCE_TIME)(cnsSampleTime + cnsDuration);
	IMediaSample *pOutSample = reinterpret_cast<CWMBuffer*>(pWMSample)->GetSample();
	pOutSample->AddRef();
	pWMSample->Release();
	pOutSample->SetMediaTime(&startTime, &endTime);
#if 0
	if( startTime < Reader()->m_StartTime )
		pOutSample->SetPreroll(TRUE);
	else
		pOutSample->SetPreroll(FALSE);
#endif
	startTime -= Reader()->m_StartTime;
	endTime -= Reader()->m_StartTime;
	pOutSample->SetTime(&startTime, &endTime);
	pOutSample->SetSyncPoint(dwFlags & WM_SF_CLEANPOINT);
	*pSample = pOutSample;

	return hr;
}
HRESULT CReader::ReadSamples()
{
    HRESULT hr = S_OK;
    INSSBuffer* pSample = NULL;

    WORD	wStream = 1;
    WMT_STREAM_SELECTION	wmtSS = WMT_ON;
    QWORD cnsSampleTime = 0, cnsPrevSampleTime = 0;
    QWORD cnsDuration = 0;
    DWORD dwFlags = 0;
    DWORD dwOutputNum = 0;
    WORD wStreamNum = 0;
    static DWORD dwVideoSamplesCnt = 0;
    static DWORD dwAudioSamplesCnt = 0;


    if( 0 != m_wAudioStreamNum )
    {
        if( m_fAudioStream )
        {
            wmtSS = WMT_ON;
        }
        else
        {
            wmtSS = WMT_OFF;
        }

        hr = m_pReader->SetStreamsSelected( 1, &m_wAudioStreamNum, &wmtSS );
        if ( FAILED( hr ) )
        {
            _tprintf( _T(  "SetStreamsSelected (hr=0x%08x).\n" ), hr );
            return( hr );
        }

        hr = m_pReader->SetReadStreamSamples( m_wAudioStreamNum, m_fCompressed );
        if ( FAILED( hr ) )
        {
            _tprintf( _T(  "SetReadStreamSamples (hr=0x%08x).\n" ), hr );
            return( hr );
        }
    }

    if( 0 != m_wVideoStreamNum )
    {
        if( m_fVideoStream )
        {
            wmtSS = WMT_ON;
        }
        else
        {
            wmtSS = WMT_OFF;
        }

        hr = m_pReader->SetStreamsSelected( 1, &m_wVideoStreamNum, &wmtSS );
        if ( FAILED( hr ) )
        {
            _tprintf( _T(  "SetStreamsSelected (hr=0x%08x).\n" ), hr );
            return( hr );
        }

        hr = m_pReader->SetReadStreamSamples( m_wVideoStreamNum, m_fCompressed );
        if ( FAILED( hr ) )
        {
            _tprintf( _T(  "SetReadStreamSamples (hr=0x%08x).\n" ), hr );
            return( hr );
        }

        if( m_fRangeInFrames )
        {
            QWORD qwDuration = 0;

            if( 0 != m_cnsEnd )
            {
                qwDuration = m_cnsEnd - m_cnsStart;
            }

            hr = m_pReader->SetRangeByFrame( m_wVideoStreamNum, m_cnsStart, qwDuration );
            if ( FAILED( hr ) )
            {
                _tprintf( _T(  "SetRangeByFrame (hr=0x%08x).\n" ), hr );
                return( hr );
            }
        }
    }

    if( 0 == m_wVideoStreamNum || ( !m_fRangeInFrames && 0 != m_wVideoStreamNum ) )
    {
        QWORD qwDuration = 0;

        if( 0 != m_cnsEnd )
        {
            qwDuration = ( m_cnsEnd - m_cnsStart ) * 10000L;
        }

        hr = m_pReader->SetRange( m_cnsStart * 10000L, qwDuration );
        if ( FAILED( hr ) )
        {
            _tprintf( _T(  "SetRange (hr=0x%08x).\n" ), hr );
            return( hr );
        }
    }

    _tprintf( _T( "\nGetting samples ...\n" ) );

    while( SUCCEEDED( hr ) )
    {


	    hr = m_pReader->GetNextSample( 0, &pSample,
                                            &cnsSampleTime,
                                            &cnsDuration,
                                            &dwFlags,
                                            &dwOutputNum,
                                            &wStreamNum );

        if( FAILED( hr ) )
        {
            if( NS_E_NO_MORE_SAMPLES == hr )
            {
                hr = S_OK;
                _tprintf( _T( "\nLast sample reached.\n" ) );
                _tprintf( _T( "\nLast sample time : %lu ms\n" ), cnsPrevSampleTime/10000 );
                break;
            }
            else
            {
                _tprintf( _T( "GetNextSample() failed : (hr=0x%08x).\n" ), hr );
                return( hr );
            }
        }

        cnsPrevSampleTime = cnsSampleTime;

        if( 0 == dwVideoSamplesCnt && 0 == dwAudioSamplesCnt )
        {
            _tprintf( _T( "\nFirst sample time : %lu ms\n" ), cnsSampleTime/10000 );
        }

        if( m_wVideoStreamNum == wStreamNum )
        {
            dwVideoSamplesCnt++;
            if ( 0 == dwVideoSamplesCnt % 4 )
            {
                _tprintf( _T( "v" ) );
            }
        }
        else if( m_wAudioStreamNum == wStreamNum )
        {
            dwAudioSamplesCnt++;

            if ( 0 == dwAudioSamplesCnt % 4 )
            {
                _tprintf( _T( "a" ) );
            }
        }


        pSample->Release();
    }
    return( hr );
}