AmqpProcessor::Result AmqpProcessor::process_frame(const amqp_frame_t& frame) { Result result; if(is_deliver(frame)) { listener_->process_event(Deliver()); result = delivery_decoded(frame); } else if(is_header(frame)) { listener_->process_event(Header(body_size(frame))); result = properties(frame); } else if(is_body(frame)) { amqp_bytes_t fragment = get_body_fragment(frame); listener_->process_event(Body(fragment.len)); AmqpListener::Delivery& delivery = listener_->get_state<AmqpListener::Delivery&>(); bool last = delivery.is_flag_active<Delivered>(); result = std::make_pair(fragment, last); } else { std::cout << "unprocessed frame: " << frame.frame_type << std::endl; } return result; }
HRESULT CDecD3D11::DeliverD3D11Readback(LAVFrame *pFrame) { AVFrame *src = (AVFrame *)pFrame->priv_data; AVFrame *dst = av_frame_alloc(); int ret = av_hwframe_transfer_data(dst, src, 0); if (ret < 0) { ReleaseFrame(&pFrame); av_frame_free(&dst); return E_FAIL; } // free the source frame av_frame_free(&src); // and store the dst frame in LAVFrame pFrame->priv_data = dst; GetPixelFormat(&pFrame->format, &pFrame->bpp); ASSERT((dst->format == AV_PIX_FMT_NV12 && pFrame->format == LAVPixFmt_NV12) || (dst->format == AV_PIX_FMT_P010 && pFrame->format == LAVPixFmt_P016)); for (int i = 0; i < 4; i++) { pFrame->data[i] = dst->data[i]; pFrame->stride[i] = dst->linesize[i]; } return Deliver(pFrame); }
HRESULT CDecMSDKMVC::DeliverOutput(MVCBuffer * pBaseView, MVCBuffer * pExtraView) { mfxStatus sts = MFX_ERR_NONE; ASSERT(pBaseView->surface.Info.FrameId.ViewId == 0 && pExtraView->surface.Info.FrameId.ViewId > 0); ASSERT(pBaseView->surface.Data.FrameOrder == pExtraView->surface.Data.FrameOrder); // Sync base view do { sts = MFXVideoCORE_SyncOperation(m_mfxSession, pBaseView->sync, 1000); } while (sts == MFX_WRN_IN_EXECUTION); pBaseView->sync = nullptr; // Sync extra view do { sts = MFXVideoCORE_SyncOperation(m_mfxSession, pExtraView->sync, 1000); } while (sts == MFX_WRN_IN_EXECUTION); pExtraView->sync = nullptr; LAVFrame *pFrame = nullptr; AllocateFrame(&pFrame); pFrame->width = pBaseView->surface.Info.CropW; pFrame->height = pBaseView->surface.Info.CropH; pFrame->data[0] = pBaseView->surface.Data.Y; pFrame->data[1] = pBaseView->surface.Data.UV; pFrame->stereo[0] = pExtraView->surface.Data.Y; pFrame->stereo[1] = pExtraView->surface.Data.UV; pFrame->data[2] = (uint8_t *)pBaseView; pFrame->data[3] = (uint8_t *)pExtraView; pFrame->stride[0] = pBaseView->surface.Data.PitchLow; pFrame->stride[1] = pBaseView->surface.Data.PitchLow; pFrame->format = LAVPixFmt_NV12; pFrame->bpp = 8; pFrame->flags |= LAV_FRAME_FLAG_MVC; if (!(pBaseView->surface.Data.DataFlag & MFX_FRAMEDATA_ORIGINAL_TIMESTAMP)) pBaseView->surface.Data.TimeStamp = MFX_TIMESTAMP_UNKNOWN; if (pBaseView->surface.Data.TimeStamp != MFX_TIMESTAMP_UNKNOWN) { pFrame->rtStart = pBaseView->surface.Data.TimeStamp; pFrame->rtStart -= TIMESTAMP_OFFSET; } else { pFrame->rtStart = AV_NOPTS_VALUE; } int64_t num = (int64_t)pBaseView->surface.Info.AspectRatioW * pFrame->width; int64_t den = (int64_t)pBaseView->surface.Info.AspectRatioH * pFrame->height; av_reduce(&pFrame->aspect_ratio.num, &pFrame->aspect_ratio.den, num, den, INT_MAX); pFrame->destruct = msdk_buffer_destruct; pFrame->priv_data = this; GetOffsetSideData(pFrame, pBaseView->surface.Data.TimeStamp); return Deliver(pFrame); }
STDMETHODIMP FakeOutputPin::PushBuffer(byte *buffer, __int64 start, __int64 stop, unsigned int size, bool discont) { IMediaSample *pSample = NULL; if (start != -1) { start /= 100; stop /= 100; } HRESULT hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (hres == S_OK && pSample) { BYTE *sample_buffer; pSample->GetPointer(&sample_buffer); if(sample_buffer) { memcpy (sample_buffer, buffer, size); pSample->SetActualDataLength(size); } pSample->SetDiscontinuity(discont); pSample->SetSyncPoint(TRUE); pSample->SetPreroll(FALSE); if (start != -1) pSample->SetTime(&start, &stop); hres = Deliver(pSample); pSample->Release(); } return S_OK; }
HRESULT CTSParserOutputPin::SendOut( DWORD nGroupFlag, const BYTE* pData, DWORD dwBytes, DWORD* dwUsedBytes ) { IMediaSample *pSample; BYTE *pSampleData; DWORD cbData; *dwUsedBytes = 0; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) return E_FAIL; pSample->GetPointer(&pSampleData); cbData = pSample->GetSize(); if ( cbData < dwBytes ) { dwBytes = cbData; ASSERT( 0 ); } memcpy( pSampleData, pData, dwBytes ); pSample->SetActualDataLength( dwBytes ); *dwUsedBytes = dwBytes; hr = Deliver(pSample); pSample->Release(); return S_OK; }
void CProtocolICMP::Process(RMBufChain &aPacket,CProtocolBase * /*aSourceProtocol*/) { RMBufRecvPacket packet; packet.Assign(aPacket); for (;;) // ONLY FOR NEAT BREAK EXITS -- NOT A LOOP { RMBufRecvInfo *const info = packet.Unpack(); if (info == NULL) break; LOG(Log::Printf(_L("\t%S Process(%d bytes)"), &ProtocolName(), info->iLength)); if (info->iIcmp) { // This packet is actually an ICMP error report to an ICMP sent by // this host. The iOffset points to "inner" ICMP, but tradionally // ICMP applications expect the outer RAW ICMP's, thus, change // the iOffset and other fields accordingly info->iOffset = info->iOffsetIp - 8 /* 8 = ICMP header size*/; info->iProtocol = info->iIcmp; // *NOTE* // The following assumes that outer IP tunnels have been removed // from the packet. Then, it can be assumed that the IP header // related to ICMP error is at the beginning of the packet (offset=0). info->iOffsetIp = 0; const MInterface *const mi = NetworkService()->Interfacer()->Interface(info->iInterfaceIndex); if (mi == NULL) break; TIpHeader *const ip = ((RMBufPacketPeek &)packet).GetIpHeader(); if (ip == NULL) break; TInetAddr &src = TInetAddr::Cast(info->iSrcAddr); TInetAddr &dst = TInetAddr::Cast(info->iDstAddr); if (ip->ip4.Version() == 4) { src.SetV4MappedAddress(ip->ip4.SrcAddr()); dst.SetV4MappedAddress(ip->ip4.DstAddr()); } else { src.SetAddress(ip->ip6.SrcAddr()); dst.SetAddress(ip->ip6.DstAddr()); } const TIp6Addr &src_ip = src.Ip6Address(); const TIp6Addr &dst_ip = dst.Ip6Address(); src.SetScope(mi->Scope((TScopeType)(src_ip.Scope()-1))); dst.SetScope(mi->Scope((TScopeType)(dst_ip.Scope()-1))); } // Check "payload length", and don't even try delivering truncated ICMP packets. if (info->iLength - info->iOffset < TInet6HeaderICMP::MinHeaderLength()) break; Deliver(packet); // *ALWAYS BREAK OUT FROM LOOP* break; } packet.Free(); }
void UsersGroup::Deliver(NetworkDefs::TUiid uuid, std::shared_ptr<NetworkUtils::NetworkMessage>& message) { auto user = FindUser(uuid); if (user) { user->Deliver(message); } }
virtual NPT_Result Dispatch(NPT_MessageHandler* handler) { TestClientMessageHandler* specific = dynamic_cast<TestClientMessageHandler*>(handler); if (specific) { return Deliver(specific); } else { return DefaultDeliver(handler); } }
void MessageManager::Update() { while (!m_systemMessages.empty() && (*m_systemMessages.begin())->GetDeliveryTime() <= BaseApp::GetBaseApp()->GetTick()) { Message *m = *m_systemMessages.begin(); m_systemMessages.pop_front(); Deliver(m); delete m; } while (!m_gameMessages.empty() && (*m_gameMessages.begin())->GetDeliveryTime() <= BaseApp::GetBaseApp()->GetGameTick()) { Message *m = *m_gameMessages.begin(); m_gameMessages.pop_front(); Deliver(m); delete m; } }
HRESULT CPushAudioPin::DoBufferProcessingLoop(void) { Command com; OnThreadStartPlay(); do { while (!CheckRequest(&com)) { HRESULT hr; CAutoLock lock(&m_cSharedState); if( WaitForSingleObject( this->buffLockEvent, 500 ) == WAIT_OBJECT_0 ){ if( this->buffData.size() != 0 ){ if( this->buffLockEvent != NULL ){ SetEvent(this->buffLockEvent); } IMediaSample *pSample; hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) { Sleep(1); continue; } hr = FillBuffer(pSample); if (hr == S_OK) { hr = Deliver(pSample); pSample->Release(); if(hr != S_OK) { return S_OK; } } else if (hr == S_FALSE) { pSample->Release(); DeliverEndOfStream(); return S_OK; } else { pSample->Release(); DeliverEndOfStream(); m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0); return hr; } }else{ if( this->buffLockEvent != NULL ){ SetEvent(this->buffLockEvent); } Sleep(10); } } } // For all commands sent to us there must be a Reply call! if (com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if (com != CMD_STOP) { Reply((DWORD) E_UNEXPECTED); } } while (com != CMD_STOP); return S_FALSE; }
HRESULT CDecDXVA2::DeliverDXVA2Frame(LAVFrame *pFrame) { if (m_bNative) { if (!pFrame->data[0] || !pFrame->data[3]) { DbgLog((LOG_ERROR, 10, L"No sample or surface for DXVA2 frame?!?!")); ReleaseFrame(&pFrame); return S_FALSE; } pFrame->format = LAVPixFmt_DXVA2; Deliver(pFrame); } else { if (CopyFrame(pFrame)) Deliver(pFrame); else ReleaseFrame(&pFrame); } return S_OK; }
HRESULT OutputPin::Push(void *buf, long size) { HRESULT hr; IMediaSample *pSample; VIDEOINFOHEADER *vi; AM_MEDIA_TYPE *pmt; BYTE *dst_buf; /** * Hold the critical section here as the pin might get disconnected * during the Deliver() method call. */ m_pLock->Lock(); hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (FAILED(hr)) goto on_error; pSample->GetMediaType(&pmt); if (pmt) { mediaType.Set(*pmt); bufSize = pmt->lSampleSize; } pSample->GetPointer(&dst_buf); vi = (VIDEOINFOHEADER *)mediaType.pbFormat; if (vi->rcSource.right == vi->bmiHeader.biWidth) { assert(pSample->GetSize() >= size); memcpy(dst_buf, buf, size); } else { unsigned i, bpp; unsigned dststride, srcstride; BYTE *src_buf = (BYTE *)buf; bpp = size / abs(vi->bmiHeader.biHeight) / vi->rcSource.right; dststride = vi->bmiHeader.biWidth * bpp; srcstride = vi->rcSource.right * bpp; for (i = abs(vi->bmiHeader.biHeight); i > 0; i--) { memcpy(dst_buf, src_buf, srcstride); dst_buf += dststride; src_buf += srcstride; } } pSample->SetActualDataLength(size); hr = Deliver(pSample); pSample->Release(); on_error: m_pLock->Unlock(); return hr; }
HRESULT COutputPin::DeliverSample(GstBuffer *pBuffer) { HRESULT hr = S_OK; IMediaSample *pSample = NULL; REFERENCE_TIME start = -1; REFERENCE_TIME stop = -1; hr = m_pAlloc->SetGstBuffer(pBuffer); if (FAILED(hr)) return hr; hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (FAILED(hr)) return hr; // Set media time pSample->SetMediaTime(NULL, NULL); // Set time if (GST_BUFFER_TIMESTAMP_IS_VALID(pBuffer)) { start = GST_BUFFER_TIMESTAMP(pBuffer) / 100; if (GST_BUFFER_DURATION_IS_VALID(pBuffer)) { stop = (GST_BUFFER_TIMESTAMP(pBuffer) + GST_BUFFER_DURATION(pBuffer)) / 100; } else { stop = start + 1; } if (stop <= start) // Sometimes it may happen stop = start + 1; pSample->SetTime(&start, &stop); } else { pSample->SetTime(NULL, NULL); } if (GST_BUFFER_IS_DISCONT(pBuffer)) pSample->SetDiscontinuity(TRUE); hr = Deliver(pSample); pSample->Release(); if (FAILED(hr)) return hr; return S_OK; }
BLT_DecoderServer_PropertyListenerWrapper_OnPropertyChanged ATX_END_INTERFACE_MAP /*---------------------------------------------------------------------- | BLT_DecoderServer_Message::Dispatch +---------------------------------------------------------------------*/ NPT_Result BLT_DecoderServer_Message::Dispatch(NPT_MessageHandler* handler) { BLT_DecoderServer_MessageHandler* specific = NPT_DYNAMIC_CAST(BLT_DecoderServer_MessageHandler, handler); if (specific) { return Deliver(specific); } else { return DefaultDeliver(handler); } }
HRESULT CDecD3D11::DeliverD3D11ReadbackDirect(LAVFrame *pFrame) { AVD3D11VADeviceContext *pDeviceContext = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx; AVFrame *src = (AVFrame *)pFrame->priv_data; if (m_pD3D11StagingTexture == nullptr) { D3D11_TEXTURE2D_DESC texDesc = { 0 }; ((ID3D11Texture2D *)src->data[0])->GetDesc(&texDesc); texDesc.ArraySize = 1; texDesc.Usage = D3D11_USAGE_STAGING; texDesc.BindFlags = 0; texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ; HRESULT hr = pDeviceContext->device->CreateTexture2D(&texDesc, nullptr, &m_pD3D11StagingTexture); if (FAILED(hr)) { ReleaseFrame(&pFrame); return E_FAIL; } } pDeviceContext->lock(pDeviceContext->lock_ctx); pDeviceContext->device_context->CopySubresourceRegion(m_pD3D11StagingTexture, 0, 0, 0, 0, (ID3D11Texture2D *)src->data[0], (UINT)(intptr_t)src->data[1], nullptr); pDeviceContext->unlock(pDeviceContext->lock_ctx); av_frame_free(&src); D3D11DirectPrivate *c = new D3D11DirectPrivate; c->pDeviceContex = av_buffer_ref(m_pDevCtx); c->pStagingTexture = m_pD3D11StagingTexture; m_pD3D11StagingTexture->AddRef(); pFrame->priv_data = c; pFrame->destruct = d3d11_direct_free; GetPixelFormat(&pFrame->format, &pFrame->bpp); pFrame->direct = true; pFrame->direct_lock = d3d11_direct_lock; pFrame->direct_unlock = d3d11_direct_unlock; return Deliver(pFrame); }
HRESULT CAMROutputPin::DeliverDataPacketAMR(DataPacketAMR &packet) { IMediaSample *sample; HRESULT hr = GetDeliveryBuffer(&sample, NULL, NULL, 0); if (FAILED(hr)) { return E_FAIL; } // we should have enough space in there long lsize = sample->GetSize(); ASSERT(lsize >= packet.size); BYTE *buf; sample->GetPointer(&buf); memcpy(buf, packet.buf, packet.size); sample->SetActualDataLength(packet.size); // sync point, discontinuity ? if (packet.sync_point) { sample->SetSyncPoint(TRUE); } else { sample->SetSyncPoint(FALSE); } if (packet.discontinuity) { sample->SetDiscontinuity(TRUE); } else { sample->SetDiscontinuity(FALSE); } // do we have a time stamp ? if (packet.has_time) { sample->SetTime(&packet.rtStart, &packet.rtStop); } // dorucime hr = Deliver(sample); sample->Release(); return hr; }
//---------------------------------------------------------------------------- //! @brief サンプルをダウンストリームへ送り続ける処理 //! @return エラーコード //---------------------------------------------------------------------------- HRESULT CDemuxOutputPin::DoBufferProcessingLoop(void) { Command com; HRESULT hr; OnThreadStartPlay(); do { while( !CheckRequest(&com) ) { // Virtual function user will override. IMediaSample *pSample; hr = RetrieveBuffer(&pSample); if( hr == S_OK ) { hr = Deliver(pSample); pSample->Release(); if(hr != S_OK) { DbgLog((LOG_TRACE, 2, TEXT("Deliver() returned %08x; stopping"), hr)); return S_OK; } } else if (hr == S_FALSE) { // derived class wants us to stop pushing data DeliverEndOfStream(); return S_OK; } else { // derived class encountered an error DbgLog((LOG_ERROR, 1, TEXT("Error %08lX from FillBuffer!!!"), hr)); DeliverEndOfStream(); m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0); return hr; } } if( com == CMD_RUN || com == CMD_PAUSE ) { Reply(NOERROR); } else if( com != CMD_STOP ) { Reply((DWORD) E_UNEXPECTED); DbgLog((LOG_ERROR, 1, TEXT("Unexpected command!!!"))); } } while( com != CMD_STOP ); return S_FALSE; }
HRESULT CDecD3D11::DeliverD3D11Frame(LAVFrame *pFrame) { if (m_bReadBackFallback) { if (m_bDirect) DeliverD3D11ReadbackDirect(pFrame); else DeliverD3D11Readback(pFrame); } else { AVFrame *pAVFrame = (AVFrame *)pFrame->priv_data; pFrame->data[0] = pAVFrame->data[3]; pFrame->data[1] = pFrame->data[2] = pFrame->data[3] = nullptr; GetPixelFormat(&pFrame->format, &pFrame->bpp); Deliver(pFrame); } return S_OK; }
HRESULT CDecDXVA2::HandleDXVA2Frame(LAVFrame *pFrame) { if (pFrame->flags & LAV_FRAME_FLAG_FLUSH) { if (!m_bNative) { FlushDisplayQueue(TRUE); } Deliver(pFrame); return S_OK; } if (m_bNative) { DeliverDXVA2Frame(pFrame); } else { LAVFrame *pQueuedFrame = m_FrameQueue[m_FrameQueuePosition]; m_FrameQueue[m_FrameQueuePosition] = pFrame; m_FrameQueuePosition = (m_FrameQueuePosition + 1) % m_DisplayDelay; if (pQueuedFrame) { DeliverDXVA2Frame(pQueuedFrame); } } return S_OK; }
// // DoBufferProcessingLoop // // Grabs a buffer and calls the users processing function. // Overridable, so that different delivery styles can be catered for. HRESULT CDynamicSourceStream::DoBufferProcessingLoop(void) { Command com; bool fOutputFormatChanged = false; OnThreadStartPlay(); do { while(!CheckRequest(&com)) { // CAutoUsingOutputPin::CAutoUsingOutputPin() only changes the value of hr // if an error occurs. HRESULT hr = S_OK; CAutoUsingOutputPin auopUsingOutputPin(this, &hr); if(FAILED(hr)) { FatalError(hr); return hr; } if(m_fReconnectOutputPin) { hr = DynamicReconnect(NULL); m_fReconnectOutputPin = false; if(FAILED(hr)) { FatalError(hr); return hr; } fOutputFormatChanged = true; } IMediaSample *pSample; hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if(FAILED(hr)) { Sleep(1); continue; // go round again. Perhaps the error will go away // or the allocator is decommited & we will be asked to // exit soon. } if(fOutputFormatChanged) { pSample->SetDiscontinuity(TRUE); fOutputFormatChanged = false; } // Virtual function user will override. hr = FillBuffer(pSample); if(hr == S_OK) { hr = Deliver(pSample); pSample->Release(); // downstream filter returns S_FALSE if it wants us to // stop or an error if it's reporting an error. if(hr != S_OK) { DbgLog((LOG_TRACE, 2, TEXT("Deliver() returned %08x; stopping"), hr)); return S_OK; } } else if(hr == S_FALSE) { // derived class wants us to stop pushing data pSample->Release(); DeliverEndOfStream(); return S_OK; } else { // derived class encountered an error pSample->Release(); DbgLog((LOG_ERROR, 1, TEXT("Error %08lX from FillBuffer!!!"), hr)); FatalError(hr); return hr; } // all paths release the sample } // For all commands sent to us there must be a Reply call! if(com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if(com != CMD_STOP) { Reply((DWORD) E_UNEXPECTED); DbgLog((LOG_ERROR, 1, TEXT("Unexpected command!!!"))); } } while(com != CMD_STOP); return S_FALSE; }
//------------------------------------------------------------------------------ // Deliver // Add the supplied media sample to delivery queue so that the streaming thread can // deliver the sample downstream.If the streaming thread is blocked, waiting for a sample to // become available on the delivery queue, signal the thread when a new sample is added. STDMETHODIMP CCustomAllocator::Deliver(IUnknown* pSample) { return Deliver(reinterpret_cast<IMediaSample*>(pSample)); }
HRESULT CBaseSplitterOutputPin::DeliverPacket(CAutoPtr<Packet> p) { HRESULT hr; long nBytes = (long)p->GetCount(); if (nBytes == 0) { return S_OK; } m_brs.nBytesSinceLastDeliverTime += nBytes; if (p->rtStart != Packet::INVALID_TIME) { if (m_brs.rtLastDeliverTime == Packet::INVALID_TIME) { m_brs.rtLastDeliverTime = p->rtStart; m_brs.nBytesSinceLastDeliverTime = 0; } if (m_brs.rtLastDeliverTime + 10000000 < p->rtStart) { REFERENCE_TIME rtDiff = p->rtStart - m_brs.rtLastDeliverTime; double secs, bits; secs = (double)rtDiff / 10000000; bits = 8.0 * m_brs.nBytesSinceLastDeliverTime; m_brs.nCurrentBitRate = (DWORD)(bits / secs); m_brs.rtTotalTimeDelivered += rtDiff; m_brs.nTotalBytesDelivered += m_brs.nBytesSinceLastDeliverTime; secs = (double)m_brs.rtTotalTimeDelivered / 10000000; bits = 8.0 * m_brs.nTotalBytesDelivered; m_brs.nAverageBitRate = (DWORD)(bits / secs); m_brs.rtLastDeliverTime = p->rtStart; m_brs.nBytesSinceLastDeliverTime = 0; /* TRACE(_T("[%d] c: %d kbps, a: %d kbps\n"), p->TrackNumber, (m_brs.nCurrentBitRate+500)/1000, (m_brs.nAverageBitRate+500)/1000); */ } double dRate = 1.0; if (SUCCEEDED((static_cast<CBaseSplitterFilter*>(m_pFilter))->GetRate(&dRate))) { p->rtStart = (REFERENCE_TIME)((double)p->rtStart / dRate); p->rtStop = (REFERENCE_TIME)((double)p->rtStop / dRate); } } do { CComPtr<IMediaSample> pSample; if (S_OK != (hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0))) { break; } if (nBytes > pSample->GetSize()) { pSample.Release(); ALLOCATOR_PROPERTIES props, actual; if (S_OK != (hr = m_pAllocator->GetProperties(&props))) { break; } props.cbBuffer = nBytes * 3 / 2; if (props.cBuffers > 1) { if (S_OK != (hr = __super::DeliverBeginFlush())) { break; } if (S_OK != (hr = __super::DeliverEndFlush())) { break; } } if (S_OK != (hr = m_pAllocator->Decommit())) { break; } if (S_OK != (hr = m_pAllocator->SetProperties(&props, &actual))) { break; } if (S_OK != (hr = m_pAllocator->Commit())) { break; } if (S_OK != (hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0))) { break; } } if (p->pmt) { pSample->SetMediaType(p->pmt); p->bDiscontinuity = true; CAutoLock cAutoLock(m_pLock); m_mts.RemoveAll(); m_mts.Add(*p->pmt); } bool fTimeValid = p->rtStart != Packet::INVALID_TIME; #if defined(_DEBUG) && 0 TRACE(_T("[%d]: d%d s%d p%d, b=%d, [%20I64d - %20I64d]\n"), p->TrackNumber, p->bDiscontinuity, p->bSyncPoint, fTimeValid && p->rtStart < 0, nBytes, p->rtStart, p->rtStop); #endif ASSERT(!p->bSyncPoint || fTimeValid); BYTE* pData = nullptr; if (S_OK != (hr = pSample->GetPointer(&pData)) || !pData) { break; } memcpy(pData, p->GetData(), nBytes); if (S_OK != (hr = pSample->SetActualDataLength(nBytes))) { break; } if (S_OK != (hr = pSample->SetTime(fTimeValid ? &p->rtStart : nullptr, fTimeValid ? &p->rtStop : nullptr))) { break; } if (S_OK != (hr = pSample->SetMediaTime(nullptr, nullptr))) { break; } if (S_OK != (hr = pSample->SetDiscontinuity(p->bDiscontinuity))) { break; } if (S_OK != (hr = pSample->SetSyncPoint(p->bSyncPoint))) { break; } if (S_OK != (hr = pSample->SetPreroll(fTimeValid && p->rtStart < 0))) { break; } if (S_OK != (hr = Deliver(pSample))) { break; } } while (false); return hr; }
HRESULT CMPIptvSourceStream::DoBufferProcessingLoop(void) { Command com; OnThreadStartPlay(); WSADATA wsaData; WSAStartup(MAKEWORD(2, 2), &wsaData); #ifdef logging LogDebug("Starting grabber thread"); #endif sockaddr_in addr; memset(&addr, 0, sizeof(addr)); addr.sin_family = AF_INET; if (localip) { addr.sin_addr.s_addr = inet_addr(localip); } else { addr.sin_addr.s_addr = htonl(INADDR_ANY); } addr.sin_port = htons((u_short)port); ip_mreq imr; imr.imr_multiaddr.s_addr = inet_addr(ip); if (localip) { imr.imr_interface.s_addr = inet_addr(localip); } else { imr.imr_interface.s_addr = INADDR_ANY; } unsigned long nonblocking = 1; if((m_socket = socket(AF_INET, SOCK_DGRAM, 0)) >= 0) { /* u_long argp = 1; ioctlsocket(m_socket, FIONBIO, &argp); */ DWORD dw = TRUE; int dwLen = sizeof(dw); if(setsockopt(m_socket, SOL_SOCKET, SO_REUSEADDR, (const char*)&dw, sizeof(dw)) < 0) { closesocket(m_socket); m_socket = -1; } if(setsockopt(m_socket, SOL_SOCKET, SO_BROADCAST, (const char*)&dw, sizeof(dw)) < 0) { closesocket(m_socket); m_socket = -1; } getsockopt(m_socket, SOL_SOCKET, SO_RCVBUF, (char *)&dw, &dwLen); #ifdef logging LogDebug("Socket receive buffer is: %d (%d)", dw, dwLen); LogDebug("Trying to set receive buffer to %d", IPTV_SOCKET_BUFFER_SIZE); #endif dw = IPTV_SOCKET_BUFFER_SIZE; if(setsockopt(m_socket, SOL_SOCKET, SO_RCVBUF, (const char*)&dw, sizeof(dw)) < 0) { closesocket(m_socket); m_socket = -1; } dwLen = sizeof(dw); getsockopt(m_socket, SOL_SOCKET, SO_RCVBUF, (char *)&dw, &dwLen); #ifdef logging LogDebug("New socket receive buffer is: %d (%d)", dw, dwLen); #endif if (ioctlsocket(m_socket, FIONBIO, &nonblocking) != 0) { closesocket(m_socket); m_socket = -1; } if(bind(m_socket, (struct sockaddr*)&addr, sizeof(addr)) < 0) { closesocket(m_socket); m_socket = -1; } if(IN_MULTICAST(htonl(imr.imr_multiaddr.s_addr))) { int ret = setsockopt(m_socket, IPPROTO_IP, IP_ADD_MEMBERSHIP, (const char*)&imr, sizeof(imr)); if(ret < 0) ret = ::WSAGetLastError(); ret = ret; } } SetThreadPriority(m_hThread, THREAD_PRIORITY_TIME_CRITICAL); int fromlen = sizeof(addr); m_buffsize = 0; timeval tv; //Will be used for select() below tv.tv_sec = 0; tv.tv_usec = 100000; //100 msec do { BOOL requestAvail; while ((requestAvail = CheckRequest(&com)) == FALSE) { DWORD startRecvTime; startRecvTime = GetTickCount(); #ifdef FILL_DIRECTLY_INTO_BUFFER IMediaSample *pSample; char *pData; long cbData; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) continue; CheckPointer(pSample, E_POINTER); // Access the sample's data buffer pSample->GetPointer((BYTE **)&pData); cbData = pSample->GetSize(); #endif do { //Try to read the complete remaining buffer size //But stop reading after 100ms have passed (slow streams like internet radio) #ifdef FILL_DIRECTLY_INTO_BUFFER int len = recvfrom(m_socket, &pData[m_buffsize], cbData - m_buffsize, 0, (SOCKADDR*)&addr, &fromlen); #else int len = recvfrom(m_socket, &m_buffer[m_buffsize], IPTV_BUFFER_SIZE - m_buffsize, 0, (SOCKADDR*)&addr, &fromlen); #endif if(len <= 0) { //Wait until there's something in the receive buffer fd_set myFDsocket; myFDsocket.fd_count = 1; myFDsocket.fd_array[0] = m_socket; int selectRet = select(0, &myFDsocket, NULL, NULL, &tv); #ifdef logging LogDebug("select return code: %d", selectRet); #endif continue; //On error or nothing read just repeat the loop } #ifdef logging LogDebug("Read %d bytes at pos %d of %d", len, m_buffsize, IPTV_BUFFER_SIZE); #endif m_buffsize += len; #ifdef FILL_DIRECTLY_INTO_BUFFER } while ((requestAvail = CheckRequest(&com)) == FALSE && m_buffsize < (cbData * 3 / 4) && abs((signed long)(GetTickCount() - startRecvTime)) < 100); #else } while ((requestAvail = CheckRequest(&com)) == FALSE && m_buffsize < (IPTV_BUFFER_SIZE * 3 / 4) && abs((signed long)(GetTickCount() - startRecvTime)) < 100); #endif if (requestAvail) break; #ifndef FILL_DIRECTLY_INTO_BUFFER if (m_buffsize == 0) continue; //100ms passed but no buffer received IMediaSample *pSample; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) { continue; // go round again. Perhaps the error will go away // or the allocator is decommited & we will be asked to // exit soon. } #endif // fill buffer hr = FillBuffer(pSample); if (hr == S_OK) { hr = Deliver(pSample); pSample->Release(); // downstream filter returns S_FALSE if it wants us to // stop or an error if it's reporting an error. if(hr != S_OK) { #ifdef logging LogDebug("Deliver() returned %08x; stopping", hr); #endif if(m_socket >= 0) {closesocket(m_socket); m_socket = -1;} WSACleanup(); return S_OK; } } else if (hr == S_FALSE) { // derived class wants us to stop pushing data pSample->Release(); DeliverEndOfStream(); if(m_socket >= 0) {closesocket(m_socket); m_socket = -1;} WSACleanup(); return S_OK; } else { // derived class encountered an error pSample->Release(); #ifdef logging LogDebug("Error %08lX from FillBuffer!!!", hr); #endif DeliverEndOfStream(); m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0); if(m_socket >= 0) {closesocket(m_socket); m_socket = -1;} WSACleanup(); return hr; } // all paths release the sample } // For all commands sent to us there must be a Reply call! if (com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if (com != CMD_STOP) { Reply((DWORD) E_UNEXPECTED); #ifdef logging LogDebug("Unexpected command %d!!!", com); #endif } } while (com != CMD_STOP);
HRESULT CAudioPin::DoBufferProcessingLoop(void) { if (!m_bConnected) { return S_OK; m_bThreadRunning = false; } Command com; OnThreadStartPlay(); m_bThreadRunning = true; SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_NORMAL); do { while (!CheckRequest(&com)) { IMediaSample *pSample; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) { Sleep(1); continue; // go round again. Perhaps the error will go away // or the allocator is decommited & we will be asked to // exit soon. } // Virtual function user will override. hr = FillBuffer(pSample); if (hr == S_OK) { // Some decoders seem to crash when we provide empty samples if ((pSample->GetActualDataLength() > 0) && !m_pTsReaderFilter->IsStopping() && m_bConnected) { hr = Deliver(pSample); m_sampleCount++ ; } else { m_bDiscontinuity = true; } pSample->Release(); // downstream filter returns S_FALSE if it wants us to // stop or an error if it's reporting an error. if(hr != S_OK) { DbgLog((LOG_TRACE, 2, TEXT("Deliver() returned %08x; stopping"), hr)); m_bThreadRunning = false; return S_OK; } } else if (hr == S_FALSE) { // derived class wants us to stop pushing data pSample->Release(); DeliverEndOfStream(); m_bThreadRunning = false; return S_OK; } else { // derived class encountered an error pSample->Release(); DbgLog((LOG_ERROR, 1, TEXT("Error %08lX from FillBuffer!!!"), hr)); DeliverEndOfStream(); m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0); m_bThreadRunning = false; return hr; } // all paths release the sample } // For all commands sent to us there must be a Reply call! if (com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if (com != CMD_STOP) { Reply((DWORD) E_UNEXPECTED); DbgLog((LOG_ERROR, 1, TEXT("Unexpected command!!!"))); } } while (com != CMD_STOP); m_bThreadRunning = false; return S_FALSE; }
// // DoBufferProcessingLoop // // Grabs a buffer and calls the users processing function. // Overridable, so that different delivery styles can be catered for. HRESULT CSourceStream::DoBufferProcessingLoop(void) { Command com; OnThreadStartPlay(); do { while (!CheckRequest(&com)) { IMediaSample *pSample; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) { Sleep(1); continue; // go round again. Perhaps the error will go away // or the allocator is decommited & we will be asked to // exit soon. } // Virtual function user will override. hr = FillBuffer(pSample); if (hr == S_OK) { hr = Deliver(pSample); pSample->Release(); // downstream filter returns S_FALSE if it wants us to // stop or an error if it's reporting an error. if(hr != S_OK) { DbgLog((LOG_TRACE, 2, TEXT("Deliver() returned %08x; stopping"), hr)); return S_OK; } } else if (hr == S_FALSE) { // derived class wants us to stop pushing data pSample->Release(); DeliverEndOfStream(); return S_OK; } else { // derived class encountered an error pSample->Release(); DbgLog((LOG_ERROR, 1, TEXT("Error %08lX from FillBuffer!!!"), hr)); DeliverEndOfStream(); m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0); return hr; } // all paths release the sample } // For all commands sent to us there must be a Reply call! if (com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if (com != CMD_STOP) { Reply((DWORD) E_UNEXPECTED); DbgLog((LOG_ERROR, 1, TEXT("Unexpected command!!!"))); } } while (com != CMD_STOP); return S_FALSE; }
STDMETHODIMP CDecWMV9MFT::ProcessOutput() { HRESULT hr = S_OK; DWORD dwStatus = 0; MFT_OUTPUT_STREAM_INFO outputInfo = {0}; m_pMFT->GetOutputStreamInfo(0, &outputInfo); IMFMediaBuffer *pMFBuffer = nullptr; ASSERT(!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)); MFT_OUTPUT_DATA_BUFFER OutputBuffer = {0}; if (!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) { pMFBuffer = GetBuffer(outputInfo.cbSize); if (!pMFBuffer) { DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffere")); return E_FAIL; } IMFSample *pSampleOut = nullptr; hr = MF.CreateSample(&pSampleOut); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"Unable to allocate MF sample, hr: 0x%x", hr)); ReleaseBuffer(pMFBuffer); return E_FAIL; } pSampleOut->AddBuffer(pMFBuffer); OutputBuffer.pSample = pSampleOut; } hr = m_pMFT->ProcessOutput(0, 1, &OutputBuffer, &dwStatus); // We don't process events, just release them SafeRelease(&OutputBuffer.pEvents); // handle stream format changes if (hr == MF_E_TRANSFORM_STREAM_CHANGE || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE ) { SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); hr = SelectOutputType(); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to handle stream change, hr: %x", hr)); return E_FAIL; } // try again with the new type, it should work now! return ProcessOutput(); } // the MFT generated no output, discard the sample and return if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE) { SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); return S_FALSE; } // unknown error condition if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr)); SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); return E_FAIL; } LAVFrame *pFrame = nullptr; AllocateFrame(&pFrame); IMFMediaType *pMTOut = nullptr; m_pMFT->GetOutputCurrentType(0, &pMTOut); MFGetAttributeSize(pMTOut, MF_MT_FRAME_SIZE, (UINT32 *)&pFrame->width, (UINT32 *)&pFrame->height); pFrame->format = m_OutPixFmt; AVRational pixel_aspect_ratio = {1, 1}; MFGetAttributeRatio(pMTOut, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&pixel_aspect_ratio.num, (UINT32*)&pixel_aspect_ratio.den); AVRational display_aspect_ratio = {0, 0}; av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, (int64_t)pixel_aspect_ratio.num * pFrame->width, (int64_t)pixel_aspect_ratio.den * pFrame->height, INT_MAX); pFrame->aspect_ratio = display_aspect_ratio; pFrame->interlaced = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_Interlaced, FALSE); pFrame->repeat = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_RepeatFirstField, FALSE); LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder(); pFrame->tff = (fo == DeintFieldOrder_Auto) ? !MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_BottomFieldFirst, FALSE) : (fo == DeintFieldOrder_TopFieldFirst); if (pFrame->interlaced && !m_bInterlaced) m_bInterlaced = TRUE; pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable); pFrame->ext_format.VideoPrimaries = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_PRIMARIES, MFVideoPrimaries_Unknown); pFrame->ext_format.VideoTransferFunction = MFGetAttributeUINT32(pMTOut, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown); pFrame->ext_format.VideoTransferMatrix = MFGetAttributeUINT32(pMTOut, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown); pFrame->ext_format.VideoChromaSubsampling = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown); pFrame->ext_format.NominalRange = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown); // HACK: don't flag range=limited if its the only value set, since its also the implied default, this helps to avoid a reconnect // The MFT always sets this value, even if the bitstream says nothing about it, causing a reconnect on every vc1/wmv3 file if (pFrame->ext_format.value == 0x2000) pFrame->ext_format.value = 0; // Timestamps if (m_bManualReorder) { if (!m_timestampQueue.empty()) { pFrame->rtStart = m_timestampQueue.front(); m_timestampQueue.pop(); LONGLONG llDuration = 0; hr = OutputBuffer.pSample->GetSampleDuration(&llDuration); if (SUCCEEDED(hr) && llDuration > 0) { pFrame->rtStop = pFrame->rtStart + llDuration; } } } else { LONGLONG llTimestamp = 0; hr = OutputBuffer.pSample->GetSampleTime(&llTimestamp); if (SUCCEEDED(hr)) { pFrame->rtStart = llTimestamp; LONGLONG llDuration = 0; hr = OutputBuffer.pSample->GetSampleDuration(&llDuration); if (SUCCEEDED(hr) && llDuration > 0) { pFrame->rtStop = pFrame->rtStart + llDuration; } } } SafeRelease(&pMTOut); // Lock memory in the buffer BYTE *pBuffer = nullptr; pMFBuffer->Lock(&pBuffer, NULL, NULL); // Check alignment // If not properly aligned, we need to make the data aligned. int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32; if ((pFrame->width % alignment) != 0) { hr = AllocLAVFrameBuffers(pFrame); if (FAILED(hr)) { pMFBuffer->Unlock(); ReleaseBuffer(pMFBuffer); SafeRelease(&OutputBuffer.pSample); return hr; } size_t ySize = pFrame->width * pFrame->height; memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height); if (m_OutPixFmt == LAVPixFmt_NV12) { memcpy_plane(pFrame->data[1], pBuffer + ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2); } else if (m_OutPixFmt == LAVPixFmt_YUV420) { size_t uvSize = ySize / 4; memcpy_plane(pFrame->data[2], pBuffer + ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2); memcpy_plane(pFrame->data[1], pBuffer + ySize + uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2); } pMFBuffer->Unlock(); ReleaseBuffer(pMFBuffer); } else { if (m_OutPixFmt == LAVPixFmt_NV12) { pFrame->data[0] = pBuffer; pFrame->data[1] = pBuffer + pFrame->width * pFrame->height; pFrame->stride[0] = pFrame->stride[1] = pFrame->width; } else if (m_OutPixFmt == LAVPixFmt_YUV420) { pFrame->data[0] = pBuffer; pFrame->data[2] = pBuffer + pFrame->width * pFrame->height; pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2); pFrame->stride[0] = pFrame->width; pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2; } pFrame->data[3] = (BYTE *)pMFBuffer; pFrame->destruct = wmv9_buffer_destruct; pFrame->priv_data = this; } pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY; Deliver(pFrame); SafeRelease(&OutputBuffer.pSample); if (OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_INCOMPLETE) return ProcessOutput(); return hr; }
// the loop executed while running HRESULT FCapturePin::DoBufferProcessingLoop(void) { Command com; OnThreadStartPlay(); int32 LastFrame = -1; do { while (!CheckRequest(&com)) { // Wait for the next frame from the game thread if ( !GCaptureSyncEvent->Wait(1000) ) { FPlatformProcess::Sleep( 0.01f ); continue; // Reevaluate request } IMediaSample *pSample; int32 FrameNumber = FAVIWriter::GetInstance()->GetFrameNumber(); if (FrameNumber > LastFrame) { UE_LOG(LogMovieCapture, Log, TEXT(" FrameNumber > LastFrame = %d > %d"), FrameNumber, LastFrame); HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) { if (pSample) { pSample->Release(); } } else { LastFrame = FrameNumber; hr = FillBuffer(pSample); if (hr == S_OK) { hr = Deliver(pSample); pSample->Release(); // downstream filter returns S_FALSE if it wants us to // stop or an error if it's reporting an error. if(hr != S_OK) { UE_LOG(LogMovieCapture, Log, TEXT("Deliver() returned %08x; stopping"), hr); return S_OK; } } } } // Allow the game thread read more data GCaptureSyncEvent->Trigger(); } // For all commands sent to us there must be a Reply call! if (com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if (com != CMD_STOP) { Reply((uint32) E_UNEXPECTED); } } while (com != CMD_STOP); return S_FALSE; }
HRESULT CBaseSplitterOutputPin::DeliverPacket(CAutoPtr<Packet> p) { HRESULT hr; long nBytes = (long)p->GetCount(); if (nBytes == 0) { return S_OK; } DWORD nFlag = (static_cast<CBaseSplitterFilter*>(m_pFilter))->GetFlag(); if (p->rtStart != INVALID_TIME && (nFlag & PACKET_PTS_DISCONTINUITY)) { // Filter invalid PTS value (if too different from previous packet) if (!IsDiscontinuous() && !((nFlag & PACKET_PTS_VALIDATE_POSITIVE) && p->rtStart < 0)) { REFERENCE_TIME rt = p->rtStart + m_rtOffset; if (_abs64(rt - m_rtPrev) > MAX_PTS_SHIFT) { m_rtOffset += m_rtPrev - rt; DbgLog((LOG_TRACE, 3, L"CBaseSplitterOutputPin::DeliverPacket() : Packet discontinuity detected, adjusting offset to %I64d", m_rtOffset)); } } p->rtStart += m_rtOffset; p->rtStop += m_rtOffset; m_rtPrev = p->rtStart; } m_brs.nBytesSinceLastDeliverTime += nBytes; if (p->rtStart != INVALID_TIME) { if (m_brs.rtLastDeliverTime == INVALID_TIME) { m_brs.rtLastDeliverTime = p->rtStart; m_brs.nBytesSinceLastDeliverTime = 0; } if (m_brs.rtLastDeliverTime + 10000000 < p->rtStart) { REFERENCE_TIME rtDiff = p->rtStart - m_brs.rtLastDeliverTime; double secs, bits; secs = (double)rtDiff / 10000000; bits = 8.0 * m_brs.nBytesSinceLastDeliverTime; m_brs.nCurrentBitRate = (DWORD)(bits / secs); m_brs.rtTotalTimeDelivered += rtDiff; m_brs.nTotalBytesDelivered += m_brs.nBytesSinceLastDeliverTime; secs = (double)m_brs.rtTotalTimeDelivered / 10000000; bits = 8.0 * m_brs.nTotalBytesDelivered; m_brs.nAverageBitRate = (DWORD)(bits / secs); m_brs.rtLastDeliverTime = p->rtStart; m_brs.nBytesSinceLastDeliverTime = 0; /* TRACE(_T("[%d] c: %d kbps, a: %d kbps\n"), p->TrackNumber, (m_brs.nCurrentBitRate+500)/1000, (m_brs.nAverageBitRate+500)/1000); */ } double dRate = 1.0; if (SUCCEEDED((static_cast<CBaseSplitterFilter*>(m_pFilter))->GetRate(&dRate))) { p->rtStart = (REFERENCE_TIME)((double)p->rtStart / dRate); p->rtStop = (REFERENCE_TIME)((double)p->rtStop / dRate); } } do { CComPtr<IMediaSample> pSample; if (S_OK != (hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0))) { break; } if (nBytes > pSample->GetSize()) { pSample.Release(); ALLOCATOR_PROPERTIES props, actual; if (S_OK != (hr = m_pAllocator->GetProperties(&props))) { break; } props.cbBuffer = nBytes*3/2; if (props.cBuffers > 1) { if (S_OK != (hr = __super::DeliverBeginFlush())) { break; } if (S_OK != (hr = __super::DeliverEndFlush())) { break; } } if (S_OK != (hr = m_pAllocator->Decommit())) { break; } if (S_OK != (hr = m_pAllocator->SetProperties(&props, &actual))) { break; } if (S_OK != (hr = m_pAllocator->Commit())) { break; } if (S_OK != (hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0))) { break; } } if (p->pmt) { pSample->SetMediaType(p->pmt); p->bDiscontinuity = true; // CAutoLock cAutoLock(m_pLock); // this can cause the lock m_mts.RemoveAll(); m_mts.Add(*p->pmt); } bool fTimeValid = p->rtStart != INVALID_TIME; #if defined(_DEBUG) && 0 TRACE(_T("[%d]: d%d s%d p%d, b=%d, [%20I64d - %20I64d]\n"), p->TrackNumber, p->bDiscontinuity, p->bSyncPoint, fTimeValid && p->rtStart < 0, nBytes, p->rtStart, p->rtStop); #endif ASSERT(!p->bSyncPoint || fTimeValid); BYTE* pData = NULL; if (S_OK != (hr = pSample->GetPointer(&pData)) || !pData) { break; } memcpy(pData, p->GetData(), nBytes); if (S_OK != (hr = pSample->SetActualDataLength(nBytes))) { break; } if (S_OK != (hr = pSample->SetTime(fTimeValid ? &p->rtStart : NULL, fTimeValid ? &p->rtStop : NULL))) { break; } if (S_OK != (hr = pSample->SetMediaTime(NULL, NULL))) { break; } if (S_OK != (hr = pSample->SetDiscontinuity(p->bDiscontinuity))) { break; } if (S_OK != (hr = pSample->SetSyncPoint(p->bSyncPoint))) { break; } if (S_OK != (hr = pSample->SetPreroll(fTimeValid && p->rtStart < 0))) { break; } if (S_OK != (hr = Deliver(pSample))) { break; } } while (false); return hr; }
STDMETHODIMP CDecWMV9::ProcessOutput() { HRESULT hr = S_OK; DWORD dwStatus = 0; BYTE *pBuffer = GetBuffer(m_pRawBufferSize); CMediaBuffer *pOutBuffer = new CMediaBuffer(pBuffer, m_pRawBufferSize, true); pOutBuffer->SetLength(0); DMO_OUTPUT_DATA_BUFFER OutputBufferStructs[1]; memset(&OutputBufferStructs[0], 0, sizeof(DMO_OUTPUT_DATA_BUFFER)); OutputBufferStructs[0].pBuffer = pOutBuffer; hr = m_pDMO->ProcessOutput(0, 1, OutputBufferStructs, &dwStatus); if (FAILED(hr)) { ReleaseBuffer(pBuffer); DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr)); return S_FALSE; } if (hr == S_FALSE) { ReleaseBuffer(pBuffer); return S_FALSE; } LAVFrame *pFrame = NULL; AllocateFrame(&pFrame); BITMAPINFOHEADER *pBMI = NULL; videoFormatTypeHandler(mtOut, &pBMI); pFrame->width = pBMI->biWidth; pFrame->height = pBMI->biHeight; pFrame->format = m_OutPixFmt; pFrame->key_frame = (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_SYNCPOINT); AVRational display_aspect_ratio; int64_t num = (int64_t)m_StreamAR.num * pBMI->biWidth; int64_t den = (int64_t)m_StreamAR.den * pBMI->biHeight; av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, num, den, 1 << 30); BYTE contentType = 0; DWORD dwPropSize = 1; pOutBuffer->GetProperty(WM_SampleExtensionGUID_ContentType, &contentType, &dwPropSize); pFrame->interlaced = !!(contentType & WM_CT_INTERLACED); pFrame->repeat = !!(contentType & WM_CT_REPEAT_FIRST_FIELD); LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder(); pFrame->tff = (fo == DeintFieldOrder_Auto) ? !!(contentType & WM_CT_TOP_FIELD_FIRST) : (fo == DeintFieldOrder_TopFieldFirst); if (pFrame->interlaced && !m_bInterlaced) m_bInterlaced = TRUE; pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable); if (m_bManualReorder) { if (!m_timestampQueue.empty()) { pFrame->rtStart = m_timestampQueue.front(); m_timestampQueue.pop(); if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIMELENGTH) { pFrame->rtStop = pFrame->rtStart + OutputBufferStructs[0].rtTimelength; } } } else { if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIME) { pFrame->rtStart = OutputBufferStructs[0].rtTimestamp; if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIMELENGTH) { pFrame->rtStop = pFrame->rtStart + OutputBufferStructs[0].rtTimelength; } } } // Check alignment // If not properly aligned, we need to make the data aligned. int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32; if ((pFrame->width % alignment) != 0) { AllocLAVFrameBuffers(pFrame); size_t ySize = pFrame->width * pFrame->height; memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height); if (m_OutPixFmt == LAVPixFmt_NV12) { memcpy_plane(pFrame->data[1], pBuffer+ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2); } else if (m_OutPixFmt == LAVPixFmt_YUV420) { size_t uvSize = ySize / 4; memcpy_plane(pFrame->data[2], pBuffer+ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2); memcpy_plane(pFrame->data[1], pBuffer+ySize+uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2); } ReleaseBuffer(pBuffer); } else { if (m_OutPixFmt == LAVPixFmt_NV12) { pFrame->data[0] = pBuffer; pFrame->data[1] = pBuffer + pFrame->width * pFrame->height; pFrame->stride[0] = pFrame->stride[1] = pFrame->width; } else if (m_OutPixFmt == LAVPixFmt_YUV420) { pFrame->data[0] = pBuffer; pFrame->data[2] = pBuffer + pFrame->width * pFrame->height; pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2); pFrame->stride[0] = pFrame->width; pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2; } pFrame->destruct = wmv9_buffer_destruct; pFrame->priv_data = this; } pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY; Deliver(pFrame); SafeRelease(&pOutBuffer); if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE) return ProcessOutput(); return hr; }
HRESULT BridgeSourceOutput::Send(IMediaSample* pSample) { // check for inactive before attempting to acquire // the mutex { CAutoLock lock(&m_csActive); if (!m_bActive || !IsConnected()) { return S_FALSE; } } // ensure we are active WaitForSingleObject(m_hsemActive, INFINITE); { // check that we are not shutting down CAutoLock lock(&m_csActive); if (!m_bActive || !IsConnected()) { ReleaseSemaphore(m_hsemActive, 1, NULL); return S_FALSE; } } //LOG((TEXT("Source pin 0x%x Send 0x%x", this, pSample)); bool bDiscard = false; // adjust times so that segments abut correctly REFERENCE_TIME tStart, tEnd; if (SUCCEEDED(pSample->GetTime(&tStart, &tEnd))) { CAutoLock lock(&m_csTime); if (m_bNewBaseline && (m_tBase > 0) && (m_pStream->AllowedTypes() == eUncompressed)) { if ((pSample->IsPreroll() == S_OK) || (tStart < 0)) { LOG((TEXT("Dropping preroll %d..%dms"), long(tStart/10000), long(tEnd/10000))); bDiscard = true; } } if (!bDiscard) { REFERENCE_TIME tStream = Filter()->RealStreamTime(); LOG((TEXT("Sample 0x%x adjust from %d ms to %d ms, latency %d ms %c"), pSample, long(tStart/10000), long((tStart+m_tBase)/10000), long((tStart+m_tBase-tStream)/10000), m_bDiscont ? TEXT('D') : TEXT(' '))); tStart += m_tBase; tEnd += m_tBase; m_tLastStop = tEnd; pSample->SetTime(&tStart, &tEnd); REFERENCE_TIME tMT = 0, tMTEnd = 0; if (SUCCEEDED(pSample->GetMediaTime(&tMT, &tMTEnd))) { if (m_bNewBaseline) { m_mtBase = m_mtLast - tMT; } if (m_mtBase != 0) { tMT += m_mtBase; tMTEnd += m_mtBase; pSample->SetMediaTime(&tMT, &tMTEnd); } m_mtLast = tMTEnd; } LOG((TEXT("Duration: %d, MT %d..%d"), long((tEnd - tStart) / 10000), long(tMT), long(tMTEnd))); // ensure disconts are only set on a timebase jump -- this // allows filters later in the graph to detect the jump pSample->SetDiscontinuity(m_bDiscont); m_bDiscont = false; } } m_bNewBaseline = false; // the sample is from the correct allocator already HRESULT hr = S_OK; if (!bDiscard) { if (m_pQueue != NULL) { pSample->AddRef(); hr = m_pQueue->Receive(pSample); } else { hr = Deliver(pSample); } } ReleaseSemaphore(m_hsemActive, 1, NULL); if (hr != S_OK) { LOG((TEXT("Source pin 0x%x, sample 0x%x, HRESULT 0x%x"), this, pSample, hr)); } return hr; }