HRESULT GetSampleFromMFStreamer(/* out */ const vpx_codec_cx_pkt_t *& vpkt) { //printf("Get Sample...\n"); IMFSample *videoSample = NULL; // Initial read results in a null pSample?? CHECK_HR(videoReader->ReadSample( MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), L"Error reading video sample."); if (!videoSample) { printf("Failed to get video sample from MF.\n"); } else { DWORD nCurrBufferCount = 0; CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n"); IMFMediaBuffer * pMediaBuffer; CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n"); DWORD nCurrLen = 0; CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n"); byte *imgBuff; DWORD buffCurrLen = 0; DWORD buffMaxLen = 0; pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen); /*BYTE *i420 = new BYTE[4608000]; YUY2ToI420(WIDTH, HEIGHT, STRIDE, imgBuff, i420); vpx_image_t* img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, i420);*/ vpx_image_t* const img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, imgBuff); const vpx_codec_cx_pkt_t * pkt; vpx_enc_frame_flags_t flags = 0; if (vpx_codec_encode(&_vpxCodec, &_rawImage, _sampleCount, 1, flags, VPX_DL_REALTIME)) { printf("VPX codec failed to encode the frame.\n"); return -1; } else { vpx_codec_iter_t iter = NULL; while ((pkt = vpx_codec_get_cx_data(&_vpxCodec, &iter))) { switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: vpkt = pkt; // const_cast<vpx_codec_cx_pkt_t **>(&pkt); break; default: break; } printf("%s %i\n", pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? "K" : ".", pkt->data.frame.sz); } } _sampleCount++; vpx_img_free(img); pMediaBuffer->Unlock(); pMediaBuffer->Release(); //delete i420; videoSample->Release(); return S_OK; } }
HRESULT CASFManager::ReadDataIntoBuffer( IMFByteStream *pStream, // Pointer to the byte stream. DWORD cbOffset, // Offset at which to start reading DWORD cbToRead, // Number of bytes to read IMFMediaBuffer **ppBuffer // Receives a pointer to the buffer. ) { HRESULT hr = S_OK; BYTE *pData = NULL; DWORD cbRead = 0; // Actual amount of data read IMFMediaBuffer *pBuffer = NULL; // Create the media buffer. This function allocates the memory. CHECK_HR(hr = MFCreateMemoryBuffer(cbToRead, &pBuffer)); // Access the buffer. CHECK_HR(hr = pBuffer->Lock(&pData, NULL, NULL)); //Set the offset CHECK_HR(hr = pStream->SetCurrentPosition(cbOffset)); // Read the data from the byte stream. CHECK_HR(hr = pStream->Read(pData, cbToRead, &cbRead)); CHECK_HR(hr = pBuffer->Unlock()); pData = NULL; // Update the size of the valid data. CHECK_HR(hr = pBuffer->SetCurrentLength(cbRead)); // Return the pointer to the caller. *ppBuffer = pBuffer; (*ppBuffer)->AddRef(); TRACE((L"Read data from the ASF file into a media buffer.\n")); done: LOG_MSG_IF_FAILED(L"CASFManager::ReadDataIntoBuffer failed.\n", hr); if (pData) { pBuffer->Unlock(); } SAFE_RELEASE(pBuffer); return hr; }
void CDecWMV9MFT::wmv9_buffer_destruct(LAVFrame *pFrame) { CDecWMV9MFT *pDec = (CDecWMV9MFT *)pFrame->priv_data; IMFMediaBuffer * pMFBuffer = (IMFMediaBuffer *)pFrame->data[3]; pMFBuffer->Unlock(); pDec->ReleaseBuffer(pMFBuffer); }
IMFMediaBuffer * CDecWMV9MFT::CreateMediaBuffer(const BYTE * pData, DWORD dwDataLen) { HRESULT hr; IMFMediaBuffer *pBuffer = nullptr; hr = MF.CreateAlignedMemoryBuffer(dwDataLen, MF_16_BYTE_ALIGNMENT, &pBuffer); if (FAILED(hr)) { DbgLog((LOG_ERROR, 10, L"Unable to allocate MF Media Buffer, hr: 0x%x", hr)); goto done; } BYTE * pOutBuffer = nullptr; hr = pBuffer->Lock(&pOutBuffer, NULL, NULL); if (FAILED(hr)) { SafeRelease(&pBuffer); DbgLog((LOG_ERROR, 10, L"Unable to lock MF Media Buffer, hr: 0x%x", hr)); goto done; } memcpy(pOutBuffer, pData, dwDataLen); pBuffer->Unlock(); pBuffer->SetCurrentLength(dwDataLen); done: return pBuffer; }
int copy_sample_to_buffer(IMFSample* sample, void** output, DWORD* len) { IMFMediaBuffer *buffer; HRESULT hr = S_OK; BYTE *data; hr = sample->GetTotalLength(len); if (FAILED(hr)) { ReportError(L"Failed to get the length of sample buffer", hr); return -1; } sample->ConvertToContiguousBuffer(&buffer); if (FAILED(hr)) { ReportError(L"Failed to get sample buffer", hr); return -1; } hr = buffer->Lock(&data, NULL, NULL); if (FAILED(hr)) { ReportError(L"Failed to lock the buffer", hr); SafeRelease(&buffer); return -1; } *output = malloc(*len); memcpy(*output, data, *len); // if buffer unlock fails, then... whatever, we have already got data buffer->Unlock(); SafeRelease(&buffer); return 0; }
//------------------------------------------------------------------- // Write the sample // HRESULT VidWriter::writeFrame(BYTE *pData) { HRESULT hr; IMFSample *pSample = NULL; const DWORD cbBuffer = 4 * m_width * m_height; // Unlock the buffer if (m_pBuffer) m_pBuffer->Unlock(); // Set the data length of the buffer hr = m_pBuffer->SetCurrentLength(cbBuffer); if (FAILED(hr)) goto done; // Create a media sample and add the buffer to it hr = MFCreateSample(&pSample); if (FAILED(hr)) goto done; hr = pSample->AddBuffer(m_pBuffer); if (FAILED(hr)) goto done; // Set the time stamp and the duration hr = pSample->SetSampleTime(m_rtStart); if (FAILED(hr)) goto done; hr = pSample->SetSampleDuration(m_frametime); if (FAILED(hr)) goto done; // increment the time stamp m_rtStart += m_frametime; // Send the sample to the Sink Writer hr = m_pWriter->WriteSample(m_streamIndex, pSample); done: SafeRelease(&pSample); return hr; }
DWORD SampleToStaticObj(IMFSample *pSample, char **buff) { if(*buff!=NULL) throw runtime_error("Buff ptr should be initially null"); IMFMediaBuffer *ppBuffer = NULL; HRESULT hr = pSample->ConvertToContiguousBuffer(&ppBuffer); //cout << "ConvertToContiguousBuffer=" << SUCCEEDED(hr) << "\tstride="<< plStride << "\n"; IMF2DBuffer *m_p2DBuffer = NULL; ppBuffer->QueryInterface(IID_IMF2DBuffer, (void**)&m_p2DBuffer); //cout << "IMF2DBuffer=" << (m_p2DBuffer != NULL) << "\n"; DWORD pcbCurrentLength = 0; BYTE *ppbBuffer = NULL; DWORD pcbMaxLength = 0; if(SUCCEEDED(hr)) { hr = ppBuffer->Lock(&ppbBuffer, &pcbMaxLength, &pcbCurrentLength); //cout << "pcbMaxLength="<< pcbMaxLength << "\tpcbCurrentLength=" <<pcbCurrentLength << "\n"; //Return buffer as python format data *buff = new char[pcbCurrentLength]; memcpy(*buff, ppbBuffer, pcbCurrentLength); ppBuffer->Unlock(); } if(ppBuffer) ppBuffer->Release(); return pcbCurrentLength; }
//------------------------------------------------------------------- // Release the buffer // HRESULT VidReader::releaseBuffer() { HRESULT hr = S_OK; if (m_pBuffer) hr = m_pBuffer->Unlock(); SafeRelease(&m_pBuffer); return hr; }
HRESULT VideoEncoder::WriteTransitionSample(UINT64 sampleDuration, TransitionBase* pTransition, DWORD streamIndex, LONGLONG* startTime) { HRESULT hr = S_OK; IMFMediaBuffer* pMediaBuffer = nullptr; BYTE* pFrameBuffer = nullptr; IMFSample* pSample = nullptr; BYTE* pOutputFrame = nullptr; for (DWORD i = 0; i < sampleDuration; i++) { CheckHR(MFCreateMemoryBuffer(this->m_frameBufferSize, &pMediaBuffer)); pMediaBuffer->Lock(&pFrameBuffer, nullptr, nullptr); float time = (float)i / (float)sampleDuration; pOutputFrame = pTransition->GetOutputFrame(time); CheckHR(MFCopyImage(pFrameBuffer, this->m_frameStride, pOutputFrame, this->m_frameStride, this->m_frameStride, this->m_frameHeight)); CheckHR(pMediaBuffer->Unlock()); CheckHR(pMediaBuffer->SetCurrentLength(this->m_frameBufferSize)); CheckHR(MFCreateSample(&pSample)); CheckHR(pSample->AddBuffer(pMediaBuffer)); CheckHR(pSample->SetSampleTime(*startTime)); CheckHR(pSample->SetSampleDuration(this->GetFrameDuration())); CheckHR(this->m_pSinkWriter->WriteSample(streamIndex, pSample)); (*startTime) += this->GetFrameDuration(); // 释放示例资源. SafeRelease(&pMediaBuffer); SafeRelease(&pSample); if (pOutputFrame != nullptr) { delete pOutputFrame; pOutputFrame = nullptr; } } cleanup: if (!SUCCEEDED(hr)) { DWORD error = GetLastError(); this->m_logFileStream << "意外错误: " << error << endl; } SafeRelease(&pMediaBuffer); SafeRelease(&pSample); if (pOutputFrame != nullptr) { delete pOutputFrame; pOutputFrame = nullptr; } return hr; }
IMFSample* create_sample(void *data, DWORD len, DWORD alignment, LONGLONG duration) { HRESULT hr = S_OK; IMFMediaBuffer *buf = NULL; IMFSample *sample = NULL; hr = MFCreateSample(&sample); if (FAILED(hr)) { ReportError(L"Unable to allocate a sample", hr); return NULL; } // Yes, the argument for alignment is the actual alignment - 1 hr = MFCreateAlignedMemoryBuffer(len, alignment - 1, &buf); if (FAILED(hr)) { ReportError(L"Unable to allocate a memory buffer for sample", hr); return NULL; } if (data) { BYTE *buffer; // lock the MediaBuffer // this is actually not a thread-safe lock hr = buf->Lock(&buffer, NULL, NULL); if (FAILED(hr)) { SafeRelease(&sample); SafeRelease(&buf); return NULL; } memcpy(buffer, data, len); buf->SetCurrentLength(len); buf->Unlock(); } sample->AddBuffer(buf); hr = sample->SetSampleDuration(duration); SafeRelease(&buf); return sample; }
// Process the incomming NAL from the queue: wraps it up into a // IMFMediaSample, sends it to the decoder. // // Thread context: decoder thread bool DecoderMF::DoProcessInputNAL(IBMDStreamingH264NALPacket* nalPacket) { bool ret = false; HRESULT hr; IMFMediaBuffer* newBuffer = NULL; BYTE* newBufferPtr; void* nalPacketPtr; // IMFSample* newSample = NULL; ULONGLONG nalPresentationTime; const BYTE nalPrefix[] = {0, 0, 0, 1}; // Get a pointer to the NAL data hr = nalPacket->GetBytes(&nalPacketPtr); if (FAILED(hr)) goto bail; // Create the MF media buffer (+ 4 bytes for the NAL Prefix (0x00 0x00 0x00 0x01) // which MF requires. hr = MFCreateMemoryBuffer(nalPacket->GetPayloadSize()+4, &newBuffer); if (FAILED(hr)) goto bail; // Lock the MF media buffer hr = newBuffer->Lock(&newBufferPtr, NULL, NULL); if (FAILED(hr)) goto bail; // Copy the prefix and the data memcpy(newBufferPtr, nalPrefix, 4); memcpy(newBufferPtr+4, nalPacketPtr, nalPacket->GetPayloadSize()); // Unlock the MF media buffer hr = newBuffer->Unlock(); if (FAILED(hr)) goto bail; // Update the current length of the MF media buffer hr = newBuffer->SetCurrentLength(nalPacket->GetPayloadSize()+4); if (FAILED(hr)) goto bail; // We now have a IMFMediaBuffer with the contents of the NAL // packet. We now construct a IMFSample with the buffer hr = MFCreateSample(&newSample); if (FAILED(hr)) goto bail; hr = newSample->AddBuffer(newBuffer); if (FAILED(hr)) goto bail; // Get the presentation (display) time in 100-nanosecond units // TODO: this is pretty meaningless without setting the start time. hr = nalPacket->GetDisplayTime(1000 * 1000 * 10, &nalPresentationTime); if (FAILED(hr)) goto bail; // Set presentation time on the sample hr = newSample->SetSampleTime(nalPresentationTime); if (FAILED(hr)) goto bail; // Now parse it to the decoder for (;;) { hr = m_h264Decoder->ProcessInput(0, newSample, 0); if (hr == S_OK) break; if (hr != MF_E_NOTACCEPTING || DoProcessOutput() == false) goto bail; } ret = true; bail: if (newBuffer != NULL) newBuffer->Release(); if (newSample != NULL) newSample->Release(); return ret; }
SINT SoundSourceMediaFoundation::readSampleFrames( SINT numberOfFrames, CSAMPLE* sampleBuffer) { SINT numberOfFramesRemaining = numberOfFrames; CSAMPLE* pSampleBuffer = sampleBuffer; while (numberOfFramesRemaining > 0) { SampleBuffer::ReadableChunk readableChunk( m_sampleBuffer.readFromHead( frames2samples(numberOfFramesRemaining))); DEBUG_ASSERT(readableChunk.size() <= frames2samples(numberOfFramesRemaining)); if (readableChunk.size() > 0) { DEBUG_ASSERT(m_currentFrameIndex < getMaxFrameIndex()); if (sampleBuffer != nullptr) { SampleUtil::copy( pSampleBuffer, readableChunk.data(), readableChunk.size()); pSampleBuffer += readableChunk.size(); } m_currentFrameIndex += samples2frames(readableChunk.size()); numberOfFramesRemaining -= samples2frames(readableChunk.size()); } if (numberOfFramesRemaining == 0) { break; // finished reading } // No more decoded sample frames available DEBUG_ASSERT(m_sampleBuffer.isEmpty()); if (m_pSourceReader == nullptr) { break; // abort if reader is dead } DWORD dwFlags = 0; LONGLONG streamPos = 0; IMFSample* pSample = nullptr; HRESULT hrReadSample = m_pSourceReader->ReadSample( kStreamIndex, // [in] DWORD dwStreamIndex, 0, // [in] DWORD dwControlFlags, nullptr, // [out] DWORD *pdwActualStreamIndex, &dwFlags, // [out] DWORD *pdwStreamFlags, &streamPos, // [out] LONGLONG *pllTimestamp, &pSample); // [out] IMFSample **ppSample if (FAILED(hrReadSample)) { qWarning() << kLogPreamble << "IMFSourceReader::ReadSample() failed" << hrReadSample << "-> abort decoding"; DEBUG_ASSERT(pSample == nullptr); break; // abort } if (dwFlags & MF_SOURCE_READERF_ERROR) { qWarning() << kLogPreamble << "IMFSourceReader::ReadSample()" << "detected stream errors" << "(MF_SOURCE_READERF_ERROR)" << "-> abort and stop decoding"; DEBUG_ASSERT(pSample == nullptr); safeRelease(&m_pSourceReader); // kill the reader break; // abort } else if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) { DEBUG_ASSERT(pSample == nullptr); break; // finished reading } else if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) { qWarning() << kLogPreamble << "IMFSourceReader::ReadSample()" << "detected that the media type has changed" << "(MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)" << "-> abort decoding"; DEBUG_ASSERT(pSample == nullptr); break; // abort } DEBUG_ASSERT(pSample != nullptr); SINT readerFrameIndex = m_streamUnitConverter.toFrameIndex(streamPos); DEBUG_ASSERT( (m_currentFrameIndex == getMaxFrameIndex()) || // unknown position after seeking (m_currentFrameIndex == readerFrameIndex)); m_currentFrameIndex = readerFrameIndex; DWORD dwSampleBufferCount = 0; HRESULT hrGetBufferCount = pSample->GetBufferCount(&dwSampleBufferCount); if (FAILED(hrGetBufferCount)) { qWarning() << kLogPreamble << "IMFSample::GetBufferCount() failed" << hrGetBufferCount << "-> abort decoding"; safeRelease(&pSample); break; // abort } DWORD dwSampleTotalLengthInBytes = 0; HRESULT hrGetTotalLength = pSample->GetTotalLength(&dwSampleTotalLengthInBytes); if (FAILED(hrGetTotalLength)) { qWarning() << kLogPreamble << "IMFSample::GetTotalLength() failed" << hrGetTotalLength << "-> abort decoding"; safeRelease(&pSample); break; // abort } // Enlarge temporary buffer (if necessary) DEBUG_ASSERT((dwSampleTotalLengthInBytes % kBytesPerSample) == 0); SINT numberOfSamplesToBuffer = dwSampleTotalLengthInBytes / kBytesPerSample; SINT sampleBufferCapacity = m_sampleBuffer.getCapacity(); DEBUG_ASSERT(sampleBufferCapacity > 0); while (sampleBufferCapacity < numberOfSamplesToBuffer) { sampleBufferCapacity *= 2; } if (m_sampleBuffer.getCapacity() < sampleBufferCapacity) { qDebug() << kLogPreamble << "Enlarging sample buffer capacity" << m_sampleBuffer.getCapacity() << "->" << sampleBufferCapacity; m_sampleBuffer.resetCapacity(sampleBufferCapacity); } DWORD dwSampleBufferIndex = 0; while (dwSampleBufferIndex < dwSampleBufferCount) { IMFMediaBuffer* pMediaBuffer = nullptr; HRESULT hrGetBufferByIndex = pSample->GetBufferByIndex(dwSampleBufferIndex, &pMediaBuffer); if (FAILED(hrGetBufferByIndex)) { qWarning() << kLogPreamble << "IMFSample::GetBufferByIndex() failed" << hrGetBufferByIndex << "-> abort decoding"; DEBUG_ASSERT(pMediaBuffer == nullptr); break; // prematurely exit buffer loop } CSAMPLE* pLockedSampleBuffer = nullptr; DWORD lockedSampleBufferLengthInBytes = 0; HRESULT hrLock = pMediaBuffer->Lock( reinterpret_cast<quint8**>(&pLockedSampleBuffer), nullptr, &lockedSampleBufferLengthInBytes); if (FAILED(hrLock)) { qWarning() << kLogPreamble << "IMFMediaBuffer::Lock() failed" << hrLock << "-> abort decoding"; safeRelease(&pMediaBuffer); break; // prematurely exit buffer loop } DEBUG_ASSERT((lockedSampleBufferLengthInBytes % sizeof(pLockedSampleBuffer[0])) == 0); SINT lockedSampleBufferCount = lockedSampleBufferLengthInBytes / sizeof(pLockedSampleBuffer[0]); SINT copySamplesCount = std::min( frames2samples(numberOfFramesRemaining), lockedSampleBufferCount); if (copySamplesCount > 0) { // Copy samples directly into output buffer if possible if (pSampleBuffer != nullptr) { SampleUtil::copy( pSampleBuffer, pLockedSampleBuffer, copySamplesCount); pSampleBuffer += copySamplesCount; } pLockedSampleBuffer += copySamplesCount; lockedSampleBufferCount -= copySamplesCount; m_currentFrameIndex += samples2frames(copySamplesCount); numberOfFramesRemaining -= samples2frames(copySamplesCount); } // Buffer the remaining samples SampleBuffer::WritableChunk writableChunk( m_sampleBuffer.writeToTail(lockedSampleBufferCount)); // The required capacity has been calculated in advance (see above) DEBUG_ASSERT(writableChunk.size() == lockedSampleBufferCount); SampleUtil::copy( writableChunk.data(), pLockedSampleBuffer, writableChunk.size()); HRESULT hrUnlock = pMediaBuffer->Unlock(); VERIFY_OR_DEBUG_ASSERT(SUCCEEDED(hrUnlock)) { qWarning() << kLogPreamble << "IMFMediaBuffer::Unlock() failed" << hrUnlock; // ignore and continue } safeRelease(&pMediaBuffer); ++dwSampleBufferIndex; } safeRelease(&pSample); if (dwSampleBufferIndex < dwSampleBufferCount) { // Failed to read data from all buffers -> kill the reader qWarning() << kLogPreamble << "Failed to read all buffered samples" << "-> abort and stop decoding"; safeRelease(&m_pSourceReader); break; // abort } } return numberOfFrames - numberOfFramesRemaining; }
STDMETHODIMP CDecWMV9MFT::ProcessOutput() { HRESULT hr = S_OK; DWORD dwStatus = 0; MFT_OUTPUT_STREAM_INFO outputInfo = {0}; m_pMFT->GetOutputStreamInfo(0, &outputInfo); IMFMediaBuffer *pMFBuffer = nullptr; ASSERT(!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)); MFT_OUTPUT_DATA_BUFFER OutputBuffer = {0}; if (!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) { pMFBuffer = GetBuffer(outputInfo.cbSize); if (!pMFBuffer) { DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffere")); return E_FAIL; } IMFSample *pSampleOut = nullptr; hr = MF.CreateSample(&pSampleOut); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"Unable to allocate MF sample, hr: 0x%x", hr)); ReleaseBuffer(pMFBuffer); return E_FAIL; } pSampleOut->AddBuffer(pMFBuffer); OutputBuffer.pSample = pSampleOut; } hr = m_pMFT->ProcessOutput(0, 1, &OutputBuffer, &dwStatus); // We don't process events, just release them SafeRelease(&OutputBuffer.pEvents); // handle stream format changes if (hr == MF_E_TRANSFORM_STREAM_CHANGE || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE ) { SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); hr = SelectOutputType(); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to handle stream change, hr: %x", hr)); return E_FAIL; } // try again with the new type, it should work now! return ProcessOutput(); } // the MFT generated no output, discard the sample and return if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE) { SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); return S_FALSE; } // unknown error condition if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr)); SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); return E_FAIL; } LAVFrame *pFrame = nullptr; AllocateFrame(&pFrame); IMFMediaType *pMTOut = nullptr; m_pMFT->GetOutputCurrentType(0, &pMTOut); MFGetAttributeSize(pMTOut, MF_MT_FRAME_SIZE, (UINT32 *)&pFrame->width, (UINT32 *)&pFrame->height); pFrame->format = m_OutPixFmt; AVRational pixel_aspect_ratio = {1, 1}; MFGetAttributeRatio(pMTOut, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&pixel_aspect_ratio.num, (UINT32*)&pixel_aspect_ratio.den); AVRational display_aspect_ratio = {0, 0}; av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, (int64_t)pixel_aspect_ratio.num * pFrame->width, (int64_t)pixel_aspect_ratio.den * pFrame->height, INT_MAX); pFrame->aspect_ratio = display_aspect_ratio; pFrame->interlaced = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_Interlaced, FALSE); pFrame->repeat = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_RepeatFirstField, FALSE); LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder(); pFrame->tff = (fo == DeintFieldOrder_Auto) ? !MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_BottomFieldFirst, FALSE) : (fo == DeintFieldOrder_TopFieldFirst); if (pFrame->interlaced && !m_bInterlaced) m_bInterlaced = TRUE; pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable); pFrame->ext_format.VideoPrimaries = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_PRIMARIES, MFVideoPrimaries_Unknown); pFrame->ext_format.VideoTransferFunction = MFGetAttributeUINT32(pMTOut, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown); pFrame->ext_format.VideoTransferMatrix = MFGetAttributeUINT32(pMTOut, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown); pFrame->ext_format.VideoChromaSubsampling = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown); pFrame->ext_format.NominalRange = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown); // HACK: don't flag range=limited if its the only value set, since its also the implied default, this helps to avoid a reconnect // The MFT always sets this value, even if the bitstream says nothing about it, causing a reconnect on every vc1/wmv3 file if (pFrame->ext_format.value == 0x2000) pFrame->ext_format.value = 0; // Timestamps if (m_bManualReorder) { if (!m_timestampQueue.empty()) { pFrame->rtStart = m_timestampQueue.front(); m_timestampQueue.pop(); LONGLONG llDuration = 0; hr = OutputBuffer.pSample->GetSampleDuration(&llDuration); if (SUCCEEDED(hr) && llDuration > 0) { pFrame->rtStop = pFrame->rtStart + llDuration; } } } else { LONGLONG llTimestamp = 0; hr = OutputBuffer.pSample->GetSampleTime(&llTimestamp); if (SUCCEEDED(hr)) { pFrame->rtStart = llTimestamp; LONGLONG llDuration = 0; hr = OutputBuffer.pSample->GetSampleDuration(&llDuration); if (SUCCEEDED(hr) && llDuration > 0) { pFrame->rtStop = pFrame->rtStart + llDuration; } } } SafeRelease(&pMTOut); // Lock memory in the buffer BYTE *pBuffer = nullptr; pMFBuffer->Lock(&pBuffer, NULL, NULL); // Check alignment // If not properly aligned, we need to make the data aligned. int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32; if ((pFrame->width % alignment) != 0) { hr = AllocLAVFrameBuffers(pFrame); if (FAILED(hr)) { pMFBuffer->Unlock(); ReleaseBuffer(pMFBuffer); SafeRelease(&OutputBuffer.pSample); return hr; } size_t ySize = pFrame->width * pFrame->height; memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height); if (m_OutPixFmt == LAVPixFmt_NV12) { memcpy_plane(pFrame->data[1], pBuffer + ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2); } else if (m_OutPixFmt == LAVPixFmt_YUV420) { size_t uvSize = ySize / 4; memcpy_plane(pFrame->data[2], pBuffer + ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2); memcpy_plane(pFrame->data[1], pBuffer + ySize + uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2); } pMFBuffer->Unlock(); ReleaseBuffer(pMFBuffer); } else { if (m_OutPixFmt == LAVPixFmt_NV12) { pFrame->data[0] = pBuffer; pFrame->data[1] = pBuffer + pFrame->width * pFrame->height; pFrame->stride[0] = pFrame->stride[1] = pFrame->width; } else if (m_OutPixFmt == LAVPixFmt_YUV420) { pFrame->data[0] = pBuffer; pFrame->data[2] = pBuffer + pFrame->width * pFrame->height; pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2); pFrame->stride[0] = pFrame->width; pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2; } pFrame->data[3] = (BYTE *)pMFBuffer; pFrame->destruct = wmv9_buffer_destruct; pFrame->priv_data = this; } pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY; Deliver(pFrame); SafeRelease(&OutputBuffer.pSample); if (OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_INCOMPLETE) return ProcessOutput(); return hr; }
HRESULT WavStream::CreateAudioSample(IMFSample **ppSample) { HRESULT hr = S_OK; IMFMediaBuffer *pBuffer = NULL; IMFSample *pSample = NULL; DWORD cbBuffer = 0; BYTE *pData = NULL; LONGLONG duration = 0; // Start with one second of data, rounded up to the nearest block. cbBuffer = AlignUp<DWORD>(m_pRiff->Format()->nAvgBytesPerSec, m_pRiff->Format()->nBlockAlign); // Don't request any more than what's left. cbBuffer = min(cbBuffer, m_pRiff->BytesRemainingInChunk()); // Create the buffer. hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer); // Get a pointer to the buffer memory. if (SUCCEEDED(hr)) { hr = pBuffer->Lock(&pData, NULL, NULL); } // Fill the buffer if (SUCCEEDED(hr)) { hr = m_pRiff->ReadDataFromChunk(pData, cbBuffer); } // Unlock the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->Unlock(); pData = NULL; } // Set the size of the valid data in the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->SetCurrentLength(cbBuffer); } // Create a new sample and add the buffer to it. if (SUCCEEDED(hr)) { hr = MFCreateSample(&pSample); } if (SUCCEEDED(hr)) { hr = pSample->AddBuffer(pBuffer); } // Set the time stamps, duration, and sample flags. if (SUCCEEDED(hr)) { hr = pSample->SetSampleTime(m_rtCurrentPosition); } if (SUCCEEDED(hr)) { duration = AudioDurationFromBufferSize(m_pRiff->Format(), cbBuffer); hr = pSample->SetSampleDuration(duration); } // Set the discontinuity flag. if (SUCCEEDED(hr)) { if (m_discontinuity) { hr = pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE); } } if (SUCCEEDED(hr)) { // Update our current position. m_rtCurrentPosition += duration; // Give the pointer to the caller. *ppSample = pSample; (*ppSample)->AddRef(); } if (pData && pBuffer) { hr = pBuffer->Unlock(); } SafeRelease(&pBuffer); SafeRelease(&pSample); return hr; }
virtual void doGetNextFrame() { if (!_isInitialised) { _isInitialised = true; if (!initialise()) { printf("Video device initialisation failed, stopping."); return; } } if (!isCurrentlyAwaitingData()) return; DWORD processOutputStatus = 0; IMFSample *videoSample = NULL; DWORD streamIndex, flags; LONGLONG llVideoTimeStamp, llSampleDuration; HRESULT mftProcessInput = S_OK; HRESULT mftProcessOutput = S_OK; MFT_OUTPUT_STREAM_INFO StreamInfo; IMFMediaBuffer *pBuffer = NULL; IMFSample *mftOutSample = NULL; DWORD mftOutFlags; bool frameSent = false; CHECK_HR(_videoReader->ReadSample( MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), "Error reading video sample."); if (videoSample) { _frameCount++; CHECK_HR(videoSample->SetSampleTime(llVideoTimeStamp), "Error setting the video sample time.\n"); CHECK_HR(videoSample->GetSampleDuration(&llSampleDuration), "Error getting video sample duration.\n"); // Pass the video sample to the H.264 transform. CHECK_HR(_pTransform->ProcessInput(0, videoSample, 0), "The resampler H264 ProcessInput call failed.\n"); CHECK_HR(_pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n"); if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY) { printf("Sample ready.\n"); CHECK_HR(_pTransform->GetOutputStreamInfo(0, &StreamInfo), "Failed to get output stream info from H264 MFT.\n"); CHECK_HR(MFCreateSample(&mftOutSample), "Failed to create MF sample.\n"); CHECK_HR(MFCreateMemoryBuffer(StreamInfo.cbSize, &pBuffer), "Failed to create memory buffer.\n"); CHECK_HR(mftOutSample->AddBuffer(pBuffer), "Failed to add sample to buffer.\n"); while (true) { _outputDataBuffer.dwStreamID = 0; _outputDataBuffer.dwStatus = 0; _outputDataBuffer.pEvents = NULL; _outputDataBuffer.pSample = mftOutSample; mftProcessOutput = _pTransform->ProcessOutput(0, 1, &_outputDataBuffer, &processOutputStatus); if (mftProcessOutput != MF_E_TRANSFORM_NEED_MORE_INPUT) { CHECK_HR(_outputDataBuffer.pSample->SetSampleTime(llVideoTimeStamp), "Error setting MFT sample time.\n"); CHECK_HR(_outputDataBuffer.pSample->SetSampleDuration(llSampleDuration), "Error setting MFT sample duration.\n"); IMFMediaBuffer *buf = NULL; DWORD bufLength; CHECK_HR(_outputDataBuffer.pSample->ConvertToContiguousBuffer(&buf), "ConvertToContiguousBuffer failed.\n"); CHECK_HR(buf->GetCurrentLength(&bufLength), "Get buffer length failed.\n"); BYTE * rawBuffer = NULL; auto now = GetTickCount(); printf("Writing sample %i, spacing %I64dms, sample time %I64d, sample duration %I64d, sample size %i.\n", _frameCount, now - _lastSendAt, llVideoTimeStamp, llSampleDuration, bufLength); fFrameSize = bufLength; fDurationInMicroseconds = 0; gettimeofday(&fPresentationTime, NULL); buf->Lock(&rawBuffer, NULL, NULL); memmove(fTo, rawBuffer, fFrameSize); FramedSource::afterGetting(this); buf->Unlock(); SafeRelease(&buf); frameSent = true; _lastSendAt = GetTickCount(); } SafeRelease(&pBuffer); SafeRelease(&mftOutSample); break; } } else { printf("No sample.\n"); } SafeRelease(&videoSample); } if (!frameSent) { envir().taskScheduler().triggerEvent(eventTriggerId, this); } return; done: printf("MediaFoundationH264LiveSource doGetNextFrame failed.\n"); }
unsigned char *BBWin8Game::LoadAudioData( String path,int *length,int *channels,int *format,int *hertz ){ String url=PathToFilePath( path ); DXASS( MFStartup( MF_VERSION ) ); IMFAttributes *attrs; DXASS( MFCreateAttributes( &attrs,1 ) ); DXASS( attrs->SetUINT32( MF_LOW_LATENCY,TRUE ) ); IMFSourceReader *reader; DXASS( MFCreateSourceReaderFromURL( url.ToCString<wchar_t>(),attrs,&reader ) ); attrs->Release(); IMFMediaType *mediaType; DXASS( MFCreateMediaType( &mediaType ) ); DXASS( mediaType->SetGUID( MF_MT_MAJOR_TYPE,MFMediaType_Audio ) ); DXASS( mediaType->SetGUID( MF_MT_SUBTYPE,MFAudioFormat_PCM ) ); DXASS( reader->SetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,mediaType ) ); mediaType->Release(); IMFMediaType *outputMediaType; DXASS( reader->GetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,&outputMediaType ) ); WAVEFORMATEX *wformat; uint32 formatByteCount=0; DXASS( MFCreateWaveFormatExFromMFMediaType( outputMediaType,&wformat,&formatByteCount ) ); *channels=wformat->nChannels; *format=wformat->wBitsPerSample/8; *hertz=wformat->nSamplesPerSec; CoTaskMemFree( wformat ); outputMediaType->Release(); /* PROPVARIANT var; DXASS( reader->GetPresentationAttribute( MF_SOURCE_READER_MEDIASOURCE,MF_PD_DURATION,&var ) ); LONGLONG duration=var.uhVal.QuadPart; float64 durationInSeconds=(duration / (float64)(10000 * 1000)); m_maxStreamLengthInBytes=(uint32)( durationInSeconds * m_waveFormat.nAvgBytesPerSec ); */ std::vector<unsigned char*> bufs; std::vector<uint32> lens; uint32 len=0; for( ;; ){ uint32 flags=0; IMFSample *sample; DXASS( reader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,0,reinterpret_cast<DWORD*>(&flags),0,&sample ) ); if( flags & MF_SOURCE_READERF_ENDOFSTREAM ){ break; } if( sample==0 ){ abort(); } IMFMediaBuffer *mediaBuffer; DXASS( sample->ConvertToContiguousBuffer( &mediaBuffer ) ); uint8 *audioData=0; uint32 sampleBufferLength=0; DXASS( mediaBuffer->Lock( &audioData,0,reinterpret_cast<DWORD*>( &sampleBufferLength ) ) ); unsigned char *buf=(unsigned char*)malloc( sampleBufferLength ); memcpy( buf,audioData,sampleBufferLength ); bufs.push_back( buf ); lens.push_back( sampleBufferLength ); len+=sampleBufferLength; DXASS( mediaBuffer->Unlock() ); mediaBuffer->Release(); sample->Release(); } reader->Release(); *length=len/(*channels * *format); unsigned char *data=(unsigned char*)malloc( len ); unsigned char *p=data; for( int i=0;i<bufs.size();++i ){ memcpy( p,bufs[i],lens[i] ); free( bufs[i] ); p+=lens[i]; } gc_force_sweep=true; return data; }
HRESULT CMediaController::PlayAudio() { if (! m_hWaveOut || ! m_pAudioTestSample) { return E_FAIL; } //Check if the device is busy if (m_fAudioDeviceBusy) { return E_ACCESSDENIED; } HRESULT hr = S_OK; MMRESULT mmt; //WAVEHDR pWaveHeader; IMFMediaBuffer* pAudioBuffer = NULL; BYTE *pData = NULL; DWORD cbData = 0; //Get all the buffers in the test sample in one buffer CHECK_HR (hr = m_pAudioTestSample->ConvertToContiguousBuffer(&pAudioBuffer)); // Get a pointer to the buffer. CHECK_HR (hr = pAudioBuffer->Lock( &pData, NULL, &cbData )); // Prepare the header for playing. m_WaveHeader.lpData = (LPSTR)pData; m_WaveHeader.dwBufferLength = cbData; m_WaveHeader.dwBytesRecorded = cbData; m_WaveHeader.dwUser = (DWORD_PTR)pAudioBuffer; // Store the sample pointer as user data. This will be released in the MM_WOM_DONE handler m_WaveHeader.dwLoops = 0; m_WaveHeader.dwFlags = 0; mmt = waveOutPrepareHeader( m_hWaveOut, &m_WaveHeader, sizeof( WAVEHDR ) ); if (mmt == MMSYSERR_NOERROR) { // Send the sample to the waveOut device. mmt = waveOutWrite( m_hWaveOut, &m_WaveHeader, sizeof( WAVEHDR ) ); hr = S_OK; } if (mmt != MMSYSERR_NOERROR) { SAFE_RELEASE(pAudioBuffer); CHECK_HR(hr = E_FAIL); } else { hr = S_OK; //Set the device to busy m_fAudioDeviceBusy = TRUE; } TRACE((L"Playing test audio.\n")); done: if (FAILED(hr)) { pAudioBuffer->Unlock(); SAFE_RELEASE (pAudioBuffer); } return hr; }
int camera_capture(camera_t *cam, unsigned char *outdata) { camera_internal_t *camera = (camera_internal_t*)cam; if (!camera->reader) return 1; // error should be zero... HRESULT hr = S_OK; IMFSample *pSample = NULL; DWORD streamIndex = 0, flags = 0; LONGLONG llTimeStamp = 0; // skip one hr = camera->reader->ReadSample( (DWORD)MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llTimeStamp, // Receives the time stamp. &pSample // Receives the sample or NULL. ); SafeRelease(&pSample); hr = camera->reader->ReadSample( (DWORD)MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llTimeStamp, // Receives the time stamp. &pSample // Receives the sample or NULL. ); IMFMediaBuffer *mediaBuffer = NULL; BYTE *pData = NULL; DWORD pLength = 0; pSample->ConvertToContiguousBuffer(&mediaBuffer); hr = mediaBuffer->Lock(&pData, NULL, &pLength); //console_printf("camera: length: %d, WxH: %d, 2WxH: %d, 3WxH: %d\n", pLength, WIDTH * HEIGHT, 2 * WIDTH * HEIGHT, 3 * WIDTH * HEIGHT); /*unsigned char *rgbData = new unsigned char[WIDTH * HEIGHT * (24 / 8)]; unsigned char *yuy2Ptr = pData, *rgbPtr = rgbData; for (UINT32 j = 0; j < HEIGHT; j++) { for (UINT32 i = 0; i < WIDTH / 2; ++i) { int y0 = yuy2Ptr[0]; int u0 = yuy2Ptr[1]; int y1 = yuy2Ptr[2]; int v0 = yuy2Ptr[3]; yuy2Ptr += 4; int c = y0 - 16; int d = u0 - 128; int e = v0 - 128; rgbPtr[0] = clip((298 * c + 516 * d + 128) >> 8); // blue rgbPtr[1] = clip((298 * c - 100 * d - 208 * e + 128) >> 8); // green rgbPtr[2] = clip((298 * c + 409 * e + 128) >> 8); // red c = y1 - 16; rgbPtr[3] = clip((298 * c + 516 * d + 128) >> 8); // blue rgbPtr[4] = clip((298 * c - 100 * d - 208 * e + 128) >> 8); // green rgbPtr[5] = clip((298 * c + 409 * e + 128) >> 8); // red rgbPtr += 6; } }*/ unsigned char *in = pData + camera->size.width * (camera->size.height - 1) * 3, *out = outdata; for (UINT32 j = 0; j < camera->size.height; j++) { for (UINT32 i = 0; i < camera->size.width; ++i) { out[2] = in[0]; out[1] = in[1]; out[0] = in[2]; in += 3; out += 3; } in -= 2 * camera->size.width * 3; } mediaBuffer->Unlock(); SafeRelease(&pSample); SafeRelease(&mediaBuffer); return 1; }
void VideoCompressor::AudioSample32Bit2Channel(float *Samples, UINT FrameCount, UINT64 CaptureStartTime) { //double TimeInSeconds = _Clock->Elapsed(); const UINT SamplesPerSecond = 44100; const UINT ChannelCount = 2; const UINT SampleCount = FrameCount * ChannelCount; const UINT BitsPerSample = 16; const UINT BufferLength = BitsPerSample / 8 * ChannelCount * FrameCount; const LONGLONG SampleDuration = LONGLONG(FrameCount) * LONGLONG(10000000) / SamplesPerSecond; // in hns // // Write some data // IMFSample *spSample; IMFMediaBuffer *spBuffer; BYTE *pbBuffer = NULL; // // Create a media sample // HRESULT hr = MFCreateSample( &spSample ); hr = spSample->SetSampleDuration( SampleDuration ); //hr = spSample->SetSampleTime( LONGLONG( TimeInSeconds * 10000000.0 ) ); //CaptureStartTime = 10,000,000 * t / f; //t = CaptureStartTime * f / 10,000,000 LONGLONG FileStartCounter = _Clock->StartTime(); LONGLONG CaptureStartCounter = CaptureStartTime * _Clock->TicksPerSecond() / LONGLONG(10000000); hr = spSample->SetSampleTime( ( CaptureStartCounter - FileStartCounter ) * LONGLONG(10000000) / _Clock->TicksPerSecond() ); // // Add a media buffer filled with random data // hr = MFCreateMemoryBuffer( BufferLength, &spBuffer ); hr = spBuffer->SetCurrentLength( BufferLength ); hr = spSample->AddBuffer( spBuffer ); hr = spBuffer->Lock( &pbBuffer, NULL, NULL ); __int16 *OutputAudioBuffer = (__int16 *)pbBuffer; for(UINT SampleIndex = 0; SampleIndex < SampleCount; SampleIndex++) { // // Floats are in the range -1 to 1 // OutputAudioBuffer[SampleIndex] = int(Samples[SampleIndex] * 32768.0f); } hr = spBuffer->Unlock(); // // Write the media sample // hr = _Writer->WriteSample( 1, spSample ); PersistentAssert(SUCCEEDED(hr), "WriteSample failed"); spSample->Release(); spBuffer->Release(); }