HRESULT GetSampleFromMFStreamer(/* out */ const vpx_codec_cx_pkt_t *& vpkt) { //printf("Get Sample...\n"); IMFSample *videoSample = NULL; // Initial read results in a null pSample?? CHECK_HR(videoReader->ReadSample( MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), L"Error reading video sample."); if (!videoSample) { printf("Failed to get video sample from MF.\n"); } else { DWORD nCurrBufferCount = 0; CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n"); IMFMediaBuffer * pMediaBuffer; CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n"); DWORD nCurrLen = 0; CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n"); byte *imgBuff; DWORD buffCurrLen = 0; DWORD buffMaxLen = 0; pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen); /*BYTE *i420 = new BYTE[4608000]; YUY2ToI420(WIDTH, HEIGHT, STRIDE, imgBuff, i420); vpx_image_t* img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, i420);*/ vpx_image_t* const img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, imgBuff); const vpx_codec_cx_pkt_t * pkt; vpx_enc_frame_flags_t flags = 0; if (vpx_codec_encode(&_vpxCodec, &_rawImage, _sampleCount, 1, flags, VPX_DL_REALTIME)) { printf("VPX codec failed to encode the frame.\n"); return -1; } else { vpx_codec_iter_t iter = NULL; while ((pkt = vpx_codec_get_cx_data(&_vpxCodec, &iter))) { switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: vpkt = pkt; // const_cast<vpx_codec_cx_pkt_t **>(&pkt); break; default: break; } printf("%s %i\n", pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? "K" : ".", pkt->data.frame.sz); } } _sampleCount++; vpx_img_free(img); pMediaBuffer->Unlock(); pMediaBuffer->Release(); //delete i420; videoSample->Release(); return S_OK; } }
//------------------------------------------------------------------- // Read a frame and provide access to the data // HRESULT VidReader::getReadBuffer(BYTE **ppData) { HRESULT hr = S_OK; DWORD dwFlags = 0; DWORD cbBitmapData = 0; // Size of data, in bytes IMFSample *pSample; if (!m_pReader) return E_ABORT; // if no source reader run away while (1) { hr = m_pReader->ReadSample( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, &dwFlags, &m_timestamp, &pSample ); if (FAILED(hr)) goto done; if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) { break; } if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) { // Type change. Get the new format. hr = getVideoFormat(); if (FAILED(hr)) goto done; } if (pSample == NULL) { continue; } // We got a sample. break; } if (pSample) { UINT32 pitch = 4 * m_imagewidth; hr = pSample->ConvertToContiguousBuffer(&m_pBuffer); if (FAILED(hr)) goto done; hr = m_pBuffer->Lock(ppData, NULL, &cbBitmapData); if (FAILED(hr)) goto done; assert(cbBitmapData == (pitch * m_imageheight)); } else { hr = MF_E_END_OF_STREAM; } done: SafeRelease(&pSample); return hr; }
unsigned char *BBWin8Game::LoadAudioData( String path,int *length,int *channels,int *format,int *hertz ){ String url=PathToFilePath( path ); DXASS( MFStartup( MF_VERSION ) ); IMFAttributes *attrs; DXASS( MFCreateAttributes( &attrs,1 ) ); DXASS( attrs->SetUINT32( MF_LOW_LATENCY,TRUE ) ); IMFSourceReader *reader; DXASS( MFCreateSourceReaderFromURL( url.ToCString<wchar_t>(),attrs,&reader ) ); attrs->Release(); IMFMediaType *mediaType; DXASS( MFCreateMediaType( &mediaType ) ); DXASS( mediaType->SetGUID( MF_MT_MAJOR_TYPE,MFMediaType_Audio ) ); DXASS( mediaType->SetGUID( MF_MT_SUBTYPE,MFAudioFormat_PCM ) ); DXASS( reader->SetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,mediaType ) ); mediaType->Release(); IMFMediaType *outputMediaType; DXASS( reader->GetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,&outputMediaType ) ); WAVEFORMATEX *wformat; uint32 formatByteCount=0; DXASS( MFCreateWaveFormatExFromMFMediaType( outputMediaType,&wformat,&formatByteCount ) ); *channels=wformat->nChannels; *format=wformat->wBitsPerSample/8; *hertz=wformat->nSamplesPerSec; CoTaskMemFree( wformat ); outputMediaType->Release(); /* PROPVARIANT var; DXASS( reader->GetPresentationAttribute( MF_SOURCE_READER_MEDIASOURCE,MF_PD_DURATION,&var ) ); LONGLONG duration=var.uhVal.QuadPart; float64 durationInSeconds=(duration / (float64)(10000 * 1000)); m_maxStreamLengthInBytes=(uint32)( durationInSeconds * m_waveFormat.nAvgBytesPerSec ); */ std::vector<unsigned char*> bufs; std::vector<uint32> lens; uint32 len=0; for( ;; ){ uint32 flags=0; IMFSample *sample; DXASS( reader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,0,reinterpret_cast<DWORD*>(&flags),0,&sample ) ); if( flags & MF_SOURCE_READERF_ENDOFSTREAM ){ break; } if( sample==0 ){ abort(); } IMFMediaBuffer *mediaBuffer; DXASS( sample->ConvertToContiguousBuffer( &mediaBuffer ) ); uint8 *audioData=0; uint32 sampleBufferLength=0; DXASS( mediaBuffer->Lock( &audioData,0,reinterpret_cast<DWORD*>( &sampleBufferLength ) ) ); unsigned char *buf=(unsigned char*)malloc( sampleBufferLength ); memcpy( buf,audioData,sampleBufferLength ); bufs.push_back( buf ); lens.push_back( sampleBufferLength ); len+=sampleBufferLength; DXASS( mediaBuffer->Unlock() ); mediaBuffer->Release(); sample->Release(); } reader->Release(); *length=len/(*channels * *format); unsigned char *data=(unsigned char*)malloc( len ); unsigned char *p=data; for( int i=0;i<bufs.size();++i ){ memcpy( p,bufs[i],lens[i] ); free( bufs[i] ); p+=lens[i]; } gc_force_sweep=true; return data; }
HRESULT CHWMFT::ProcessOutput( DWORD dwFlags, DWORD dwOutputBufferCount, MFT_OUTPUT_DATA_BUFFER* pOutputSamples, DWORD* pdwStatus) { /***************************************** ** See http://msdn.microsoft.com/en-us/library/ms704014(v=VS.85).aspx *****************************************/ HRESULT hr = S_OK; IMFSample* pSample = NULL; TraceString(CHMFTTracing::TRACE_INFORMATION, L"%S(): Enter", __FUNCTION__); do { if(IsLocked() != FALSE) { hr = MF_E_TRANSFORM_ASYNC_LOCKED; break; } { CAutoLock lock(&m_csLock); TraceString(CHMFTTracing::TRACE_INFORMATION, L"%S(): HaveOutputCount: %u", __FUNCTION__, m_dwHaveOutputCount); if(m_dwHaveOutputCount == 0) { // This call does not correspond to a have output call hr = E_UNEXPECTED; break; } else { m_dwHaveOutputCount--; } } /***************************************** ** Todo: If your MFT supports more than one ** stream, make sure you modify ** MFT_MAX_STREAMS and adjust this function ** accordingly *****************************************/ if(dwOutputBufferCount < MFT_MAX_STREAMS) { hr = E_INVALIDARG; break; } if(IsMFTReady() == FALSE) { hr = MF_E_TRANSFORM_TYPE_NOT_SET; break; } /*************************************** ** Since this in an internal function ** we know m_pOutputSampleQueue can never be ** NULL due to InitializeTransform() ***************************************/ hr = m_pOutputSampleQueue->GetNextSample(&pSample); if(FAILED(hr)) { break; } if(pSample == NULL) { hr = MF_E_TRANSFORM_NEED_MORE_INPUT; break; } /******************************* ** Todo: This MFT only has one ** input stream, so the output ** samples array and stream ID ** will only use the first ** member *******************************/ pOutputSamples[0].dwStreamID = 0; if((pOutputSamples[0].pSample) == NULL) { // The MFT is providing it's own samples (pOutputSamples[0].pSample) = pSample; (pOutputSamples[0].pSample)->AddRef(); } else { // The pipeline has allocated the samples IMFMediaBuffer* pBuffer = NULL; do { hr = pSample->ConvertToContiguousBuffer(&pBuffer); if(FAILED(hr)) { break; } hr = (pOutputSamples[0].pSample)->AddBuffer(pBuffer); if(FAILED(hr)) { break; } }while(false); SAFERELEASE(pBuffer); if(FAILED(hr)) { break; } } /*************************************** ** Since this in an internal function ** we know m_pOutputSampleQueue can never be ** NULL due to InitializeTransform() ***************************************/ if(m_pOutputSampleQueue->IsQueueEmpty() != FALSE) { // We're out of samples in the output queue CAutoLock lock(&m_csLock); if((m_dwStatus & MYMFT_STATUS_DRAINING) != 0) { // We're done draining, time to send the event IMFMediaEvent* pDrainCompleteEvent = NULL; do { hr = MFCreateMediaEvent(METransformDrainComplete , GUID_NULL, S_OK, NULL, &pDrainCompleteEvent); if(FAILED(hr)) { break; } /******************************* ** Todo: This MFT only has one ** input stream, so the drain ** is always on stream zero. ** Update this is your MFT ** has more than one stream *******************************/ hr = pDrainCompleteEvent->SetUINT32(MF_EVENT_MFT_INPUT_STREAM_ID, 0); if(FAILED(hr)) { break; } /*************************************** ** Since this in an internal function ** we know m_pEventQueue can never be ** NULL due to InitializeTransform() ***************************************/ hr = m_pEventQueue->QueueEvent(pDrainCompleteEvent); if(FAILED(hr)) { break; } }while(false); SAFERELEASE(pDrainCompleteEvent); if(FAILED(hr)) { break; } m_dwStatus &= (~MYMFT_STATUS_DRAINING); } } }while(false); SAFERELEASE(pSample); TraceString(CHMFTTracing::TRACE_INFORMATION, L"%S(): Exit (hr=0x%x)", __FUNCTION__, hr); return hr; }
int camera_capture(camera_t *cam, unsigned char *outdata) { camera_internal_t *camera = (camera_internal_t*)cam; if (!camera->reader) return 1; // error should be zero... HRESULT hr = S_OK; IMFSample *pSample = NULL; DWORD streamIndex = 0, flags = 0; LONGLONG llTimeStamp = 0; // skip one hr = camera->reader->ReadSample( (DWORD)MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llTimeStamp, // Receives the time stamp. &pSample // Receives the sample or NULL. ); SafeRelease(&pSample); hr = camera->reader->ReadSample( (DWORD)MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llTimeStamp, // Receives the time stamp. &pSample // Receives the sample or NULL. ); IMFMediaBuffer *mediaBuffer = NULL; BYTE *pData = NULL; DWORD pLength = 0; pSample->ConvertToContiguousBuffer(&mediaBuffer); hr = mediaBuffer->Lock(&pData, NULL, &pLength); //console_printf("camera: length: %d, WxH: %d, 2WxH: %d, 3WxH: %d\n", pLength, WIDTH * HEIGHT, 2 * WIDTH * HEIGHT, 3 * WIDTH * HEIGHT); /*unsigned char *rgbData = new unsigned char[WIDTH * HEIGHT * (24 / 8)]; unsigned char *yuy2Ptr = pData, *rgbPtr = rgbData; for (UINT32 j = 0; j < HEIGHT; j++) { for (UINT32 i = 0; i < WIDTH / 2; ++i) { int y0 = yuy2Ptr[0]; int u0 = yuy2Ptr[1]; int y1 = yuy2Ptr[2]; int v0 = yuy2Ptr[3]; yuy2Ptr += 4; int c = y0 - 16; int d = u0 - 128; int e = v0 - 128; rgbPtr[0] = clip((298 * c + 516 * d + 128) >> 8); // blue rgbPtr[1] = clip((298 * c - 100 * d - 208 * e + 128) >> 8); // green rgbPtr[2] = clip((298 * c + 409 * e + 128) >> 8); // red c = y1 - 16; rgbPtr[3] = clip((298 * c + 516 * d + 128) >> 8); // blue rgbPtr[4] = clip((298 * c - 100 * d - 208 * e + 128) >> 8); // green rgbPtr[5] = clip((298 * c + 409 * e + 128) >> 8); // red rgbPtr += 6; } }*/ unsigned char *in = pData + camera->size.width * (camera->size.height - 1) * 3, *out = outdata; for (UINT32 j = 0; j < camera->size.height; j++) { for (UINT32 i = 0; i < camera->size.width; ++i) { out[2] = in[0]; out[1] = in[1]; out[0] = in[2]; in += 3; out += 3; } in -= 2 * camera->size.width * 3; } mediaBuffer->Unlock(); SafeRelease(&pSample); SafeRelease(&mediaBuffer); return 1; }