STDMETHODIMP FakeOutputPin::PushBuffer(byte *buffer, __int64 start, __int64 stop, unsigned int size, bool discont) { IMediaSample *pSample = NULL; if (start != -1) { start /= 100; stop /= 100; } HRESULT hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (hres == S_OK && pSample) { BYTE *sample_buffer; pSample->GetPointer(&sample_buffer); if(sample_buffer) { memcpy (sample_buffer, buffer, size); pSample->SetActualDataLength(size); } pSample->SetDiscontinuity(discont); pSample->SetSyncPoint(TRUE); pSample->SetPreroll(FALSE); if (start != -1) pSample->SetTime(&start, &stop); hres = Deliver(pSample); pSample->Release(); } return S_OK; }
HRESULT CTSParserOutputPin::SendOut( DWORD nGroupFlag, const BYTE* pData, DWORD dwBytes, DWORD* dwUsedBytes ) { IMediaSample *pSample; BYTE *pSampleData; DWORD cbData; *dwUsedBytes = 0; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) return E_FAIL; pSample->GetPointer(&pSampleData); cbData = pSample->GetSize(); if ( cbData < dwBytes ) { dwBytes = cbData; ASSERT( 0 ); } memcpy( pSampleData, pData, dwBytes ); pSample->SetActualDataLength( dwBytes ); *dwUsedBytes = dwBytes; hr = Deliver(pSample); pSample->Release(); return S_OK; }
dsnerror_t Decode(const BYTE *src, int size, double pts, double *newpts, BYTE *pImage, int keyframe) { IMediaSample* sample = NULL; REFERENCE_TIME start = PTS2RT(pts); /* sometimes I get x99999 instead of y00000 */ REFERENCE_TIME stoptime = start + m_frametime; BYTE *ptr; DSN_CHECK(m_pAll->GetBuffer(&sample, 0, 0, 0), DSN_FAIL_DECODESAMPLE); DSN_CHECK(sample->SetActualDataLength(size), DSN_FAIL_DECODESAMPLE); DSN_CHECK(sample->GetPointer(&ptr), DSN_FAIL_DECODESAMPLE); memcpy(ptr, src, size); DSN_CHECK(sample->SetTime(&start, &stoptime), DSN_FAIL_DECODESAMPLE); DSN_CHECK(sample->SetSyncPoint(keyframe), DSN_FAIL_DECODESAMPLE); DSN_CHECK(sample->SetPreroll(pImage ? 0 : 1), DSN_FAIL_DECODESAMPLE); DSN_CHECK(sample->SetDiscontinuity(m_discontinuity), DSN_FAIL_DECODESAMPLE); m_discontinuity = 0; m_pOurOutput->SetPointer(pImage); DSN_CHECK(m_pImp->Receive(sample), DSN_FAIL_RECEIVE); sample->Release(); *newpts = RT2PTS(m_pOurOutput->GetPTS()); return DSN_OK; }
bool CFilterNetReceiver::DeliverBuffer(const unsigned char * pData, long nLen, unsigned int timestamp) { if (pData == NULL || nLen <= 0) return false; PBYTE pSampleBuffer = NULL; IMediaSample * pSample = NULL; // Receive media data BOOL pass = FALSE; do { pSample = GetMediaSample(); if (pSample != NULL) { pSample->GetPointer(&pSampleBuffer); if (pSampleBuffer != NULL) pass = TRUE; } Sleep(10); if (m_bIsStoped) break; } while (!pass); if (m_bIsStoped) return false; //MessageBox(NULL, L"CFilterNetReceiver::doSampleBuffer memcpy()", L"test", MB_OK); memcpy(pSampleBuffer, pData, nLen); // Deliver this sample BOOL ret = DeliverHoldingSample(pSample, nLen, timestamp); return ret ? true : false; }
HRESULT CWavPackSplitterFilterInputPin::DeliverOneFrame(WavPack_parser* wpp) { IMediaSample *pSample; BYTE *Buffer = NULL; HRESULT hr; unsigned long FrameLenBytes = 0, FrameLenSamples = 0, FrameIndex = 0; // Get a new media sample hr = m_pParentFilter->m_pOutputPin->GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (FAILED(hr)) { return hr; } hr = pSample->GetPointer(&Buffer); if (FAILED(hr)) { pSample->Release(); return hr; } FrameLenBytes = wavpack_parser_read_frame(wpp, Buffer, &FrameIndex, &FrameLenSamples); if (!FrameLenBytes) { // Something bad happened, let's end here pSample->Release(); m_pParentFilter->m_pOutputPin->DeliverEndOfStream(); // TODO : check if we need to stop the thread return hr; } pSample->SetActualDataLength(FrameLenBytes); REFERENCE_TIME rtStart, rtStop; rtStart = FrameIndex; rtStop = rtStart + FrameLenSamples; rtStart = (rtStart * 10000000) / wpp->sample_rate; rtStop = (rtStop * 10000000) / wpp->sample_rate; rtStart -= m_pParentFilter->m_rtStart; rtStop -= m_pParentFilter->m_rtStart; pSample->SetTime(&rtStart, &rtStop); pSample->SetPreroll(FALSE); pSample->SetDiscontinuity(m_bDiscontinuity); if (m_bDiscontinuity) { m_bDiscontinuity = FALSE; } pSample->SetSyncPoint(TRUE); // Deliver the sample hr = m_pParentFilter->m_pOutputPin->Deliver(pSample); pSample->Release(); pSample = NULL; if (FAILED(hr)) { return hr; } return S_OK; }
HRESULT OutputPin::Push(void *buf, long size) { HRESULT hr; IMediaSample *pSample; VIDEOINFOHEADER *vi; AM_MEDIA_TYPE *pmt; BYTE *dst_buf; /** * Hold the critical section here as the pin might get disconnected * during the Deliver() method call. */ m_pLock->Lock(); hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (FAILED(hr)) goto on_error; pSample->GetMediaType(&pmt); if (pmt) { mediaType.Set(*pmt); bufSize = pmt->lSampleSize; } pSample->GetPointer(&dst_buf); vi = (VIDEOINFOHEADER *)mediaType.pbFormat; if (vi->rcSource.right == vi->bmiHeader.biWidth) { assert(pSample->GetSize() >= size); memcpy(dst_buf, buf, size); } else { unsigned i, bpp; unsigned dststride, srcstride; BYTE *src_buf = (BYTE *)buf; bpp = size / abs(vi->bmiHeader.biHeight) / vi->rcSource.right; dststride = vi->bmiHeader.biWidth * bpp; srcstride = vi->rcSource.right * bpp; for (i = abs(vi->bmiHeader.biHeight); i > 0; i--) { memcpy(dst_buf, src_buf, srcstride); dst_buf += dststride; src_buf += srcstride; } } pSample->SetActualDataLength(size); hr = Deliver(pSample); pSample->Release(); on_error: m_pLock->Unlock(); return hr; }
int StreamOutputPin::WritePackets(void * opaque, uint8_t * buf, int buf_size) { HRESULT hr = S_OK; BYTE * pBuffer = NULL; IMediaSample * pSample = NULL; StreamOutputPin * pThis = (StreamOutputPin *)opaque; REFERENCE_TIME newOffset = pThis->m_offset; hr = pThis->GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (hr != S_OK) { //Pokusi sa zapisat buffer cez IStream - allocator je dealokovany if (pThis->m_stream) { ULONG written; LARGE_INTEGER move; move.QuadPart = pThis->m_offset; CHECK_HR(hr = pThis->m_stream->Seek(move, STREAM_SEEK_SET, NULL)); CHECK_HR(hr = pThis->m_stream->Write(buf, buf_size, &written)); pThis->m_offset += written; } goto done; } buf_size = min(pSample->GetSize(), buf_size); newOffset += buf_size; CHECK_HR(hr = pSample->GetPointer(&pBuffer)); //Pozor ak sa velkost nezhoduje memcpy(pBuffer, buf, buf_size); CHECK_HR(hr = pSample->SetActualDataLength(buf_size)); CHECK_HR(hr = pSample->SetTime(&pThis->m_offset, &newOffset)); pThis->m_offset = newOffset; CHECK_HR(hr = pThis->Deliver(pSample)); done: SAFE_RELEASE(pSample); return buf_size; }
HRESULT CAMROutputPin::DeliverDataPacketAMR(DataPacketAMR &packet) { IMediaSample *sample; HRESULT hr = GetDeliveryBuffer(&sample, NULL, NULL, 0); if (FAILED(hr)) { return E_FAIL; } // we should have enough space in there long lsize = sample->GetSize(); ASSERT(lsize >= packet.size); BYTE *buf; sample->GetPointer(&buf); memcpy(buf, packet.buf, packet.size); sample->SetActualDataLength(packet.size); // sync point, discontinuity ? if (packet.sync_point) { sample->SetSyncPoint(TRUE); } else { sample->SetSyncPoint(FALSE); } if (packet.discontinuity) { sample->SetDiscontinuity(TRUE); } else { sample->SetDiscontinuity(FALSE); } // do we have a time stamp ? if (packet.has_time) { sample->SetTime(&packet.rtStart, &packet.rtStop); } // dorucime hr = Deliver(sample); sample->Release(); return hr; }
HRESULT TheoraEncodeInputPin::deliverData(LONGLONG inStart, LONGLONG inEnd, unsigned char* inBuf, unsigned long inNumBytes) { //debugLog <<" deliverData : "<<inStart<<" - "<<inEnd<<" :: size = "<<inNumBytes<<endl; //Get a pointer to a new sample stamped with our time IMediaSample* locSample; HRESULT locHR = mOutputPin->GetDeliveryBuffer(&locSample, &inStart, &inEnd, NULL); if (locHR != S_OK) { //We get here when the application goes into stop mode usually. return locHR; } BYTE* locBuffer = NULL; //Make our pointers set to point to the samples buffer locSample->GetPointer(&locBuffer); if (locSample->GetSize() >= inNumBytes) { memcpy((void*)locBuffer, (const void*)inBuf, inNumBytes); //Set the sample parameters. SetSampleParams(locSample, inNumBytes, &inStart, &inEnd); { CAutoLock locLock(m_pLock); HRESULT locHR = ((TheoraEncodeOutputPin*)mOutputPin)->mDataQueue->Receive(locSample); //->DownstreamFilter()->Receive(locSample); if (locHR != S_OK) { return locHR; } else { } } //debugLog<<"deliverData : SUCCESS"<<endl; return S_OK; } else { //debugLog<<"Buffer too small !!!! FATALITY !"<<endl; throw 0; } }
unsigned int __stdcall CBonSrcPin::StreamThread(LPVOID lpParameter) { CBonSrcPin *pThis = static_cast<CBonSrcPin*>(lpParameter); TRACE(TEXT("CBonSrcPin::StreamThread() Start\n")); ::CoInitialize(NULL); DWORD Wait = 0; while (::WaitForSingleObject(pThis->m_hEndEvent, Wait) == WAIT_TIMEOUT) { if (pThis->m_SrcStream.IsDataAvailable()) { // 空のメディアサンプルを要求する IMediaSample *pSample = NULL; HRESULT hr = pThis->GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (SUCCEEDED(hr)) { // 書き込み先ポインタを取得する BYTE *pSampleData = NULL; hr = pSample->GetPointer(&pSampleData); if (SUCCEEDED(hr)) { DWORD Size = SAMPLE_PACKETS; if (pThis->m_SrcStream.GetData(pSampleData, &Size)) { pSample->SetActualDataLength(Size * TS_PACKETSIZE); pThis->Deliver(pSample); } pSample->Release(); } } Wait = 0; } else { Wait = 5; } } ::CoUninitialize(); TRACE(TEXT("CBonSrcPin::StreamThread() End\n")); return 0; }
// // Copy // // return a pointer to an identical copy of pSample IMediaSample * CTransInPlaceFilter::Copy(IMediaSample *pSource) { IMediaSample * pDest; HRESULT hr; REFERENCE_TIME tStart, tStop; const BOOL bTime = S_OK == pSource->GetTime( &tStart, &tStop); // this may block for an indeterminate amount of time hr = OutputPin()->PeekAllocator()->GetBuffer( &pDest , bTime ? &tStart : NULL , bTime ? &tStop : NULL , m_bSampleSkipped ? AM_GBF_PREVFRAMESKIPPED : 0 ); if (FAILED(hr)) { return NULL; } ASSERT(pDest); IMediaSample2 *pSample2; if (SUCCEEDED(pDest->QueryInterface(IID_IMediaSample2, (void **)&pSample2))) { HRESULT hr = pSample2->SetProperties( FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, pbBuffer), (PBYTE)m_pInput->SampleProps()); pSample2->Release(); if (FAILED(hr)) { pDest->Release(); return NULL; } } else { if (bTime) { pDest->SetTime(&tStart, &tStop); } if (S_OK == pSource->IsSyncPoint()) { pDest->SetSyncPoint(TRUE); } if (S_OK == pSource->IsDiscontinuity() || m_bSampleSkipped) { pDest->SetDiscontinuity(TRUE); } if (S_OK == pSource->IsPreroll()) { pDest->SetPreroll(TRUE); } // Copy the media type AM_MEDIA_TYPE *pMediaType; if (S_OK == pSource->GetMediaType(&pMediaType)) { pDest->SetMediaType(pMediaType); DeleteMediaType( pMediaType ); } } m_bSampleSkipped = FALSE; // Copy the sample media times REFERENCE_TIME TimeStart, TimeEnd; if (pSource->GetMediaTime(&TimeStart,&TimeEnd) == NOERROR) { pDest->SetMediaTime(&TimeStart,&TimeEnd); } // Copy the actual data length and the actual data. { const long lDataLength = pSource->GetActualDataLength(); pDest->SetActualDataLength(lDataLength); // Copy the sample data { BYTE *pSourceBuffer, *pDestBuffer; long lSourceSize = pSource->GetSize(); long lDestSize = pDest->GetSize(); ASSERT(lDestSize >= lSourceSize && lDestSize >= lDataLength); pSource->GetPointer(&pSourceBuffer); pDest->GetPointer(&pDestBuffer); ASSERT(lDestSize == 0 || pSourceBuffer != NULL && pDestBuffer != NULL); CopyMemory( (PVOID) pDestBuffer, (PVOID) pSourceBuffer, lDataLength ); } } return pDest; } // Copy
HRESULT CWavPackDSSplitterInputPin::DeliverOneFrame(WavPack_parser* wpp) { IMediaSample *pSample; BYTE *Buffer = NULL; HRESULT hr; unsigned long FrameLenBytes = 0, FrameLenSamples = 0, FrameIndex = 0; // Get a new media sample hr = m_pParentFilter->m_pOutputPin->GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (FAILED(hr)) { DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop GetDeliveryBuffer failed 0x%08X",hr); return hr; } hr = pSample->GetPointer(&Buffer); if (FAILED(hr)) { DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop GetPointer failed 0x%08X",hr); pSample->Release(); return hr; } FrameLenBytes = wavpack_parser_read_frame(wpp, Buffer, &FrameIndex, &FrameLenSamples); if(!FrameLenBytes) { // Something bad happened, let's end here pSample->Release(); m_pParentFilter->m_pOutputPin->DeliverEndOfStream(); // TODO : check if we need to stop the thread DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop wavpack_parser_read_frame error"); return hr; } pSample->SetActualDataLength(FrameLenBytes); if(wpp->is_correction == TRUE) { IMediaSample2 *pSample2; if (SUCCEEDED(pSample->QueryInterface(IID_IMediaSample2, (void **)&pSample2))) { AM_SAMPLE2_PROPERTIES ams2p; ZeroMemory(&ams2p, sizeof(AM_SAMPLE2_PROPERTIES)); hr = pSample2->GetProperties(sizeof(AM_SAMPLE2_PROPERTIES), (PBYTE)&ams2p); if(SUCCEEDED(hr)) { ams2p.dwStreamId = AM_STREAM_BLOCK_ADDITIONNAL; pSample2->SetProperties(sizeof(AM_SAMPLE2_PROPERTIES), (PBYTE)&ams2p); } pSample2->Release(); pSample2 = NULL; } } REFERENCE_TIME rtStart, rtStop; rtStart = FrameIndex; rtStop = rtStart + FrameLenSamples; rtStart = (rtStart * 10000000) / wpp->sample_rate; rtStop = (rtStop * 10000000) / wpp->sample_rate; rtStart -= m_pParentFilter->m_rtStart; rtStop -= m_pParentFilter->m_rtStart; pSample->SetTime(&rtStart, &rtStop); pSample->SetPreroll(FALSE); pSample->SetDiscontinuity(m_bDiscontinuity); if(m_bDiscontinuity) { m_bDiscontinuity = FALSE; } pSample->SetSyncPoint(TRUE); // Deliver the sample hr = m_pParentFilter->m_pOutputPin->Deliver(pSample); pSample->Release(); pSample = NULL; if (FAILED(hr)) { DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop Deliver failed 0x%08X",hr); return hr; } return S_OK; }
IMediaSample *QMemInputPin::duplicateSampleForOutput(IMediaSample *sample, IMemAllocator *alloc) { LONG length = sample->GetActualDataLength(); HRESULT hr = alloc->Commit(); if (hr == HRESULT(VFW_E_SIZENOTSET)) { ALLOCATOR_PROPERTIES prop = getDefaultAllocatorProperties(); prop.cbBuffer = qMax(prop.cbBuffer, length); ALLOCATOR_PROPERTIES actual; //we just try to set the properties... alloc->SetProperties(&prop, &actual); hr = alloc->Commit(); } Q_ASSERT(SUCCEEDED(hr)); IMediaSample *out; hr = alloc->GetBuffer(&out, 0, 0, AM_GBF_NOTASYNCPOINT); Q_ASSERT(SUCCEEDED(hr)); //let's copy the sample { REFERENCE_TIME start, end; sample->GetTime(&start, &end); out->SetTime(&start, &end); } hr = out->SetActualDataLength(length); Q_ASSERT(SUCCEEDED(hr)); hr = out->SetDiscontinuity(sample->IsDiscontinuity()); Q_ASSERT(SUCCEEDED(hr)); { LONGLONG start, end; hr = sample->GetMediaTime(&start, &end); if (hr != HRESULT(VFW_E_MEDIA_TIME_NOT_SET)) { hr = out->SetMediaTime(&start, &end); Q_ASSERT(SUCCEEDED(hr)); } } AM_MEDIA_TYPE *type = 0; hr = sample->GetMediaType(&type); Q_ASSERT(SUCCEEDED(hr)); hr = out->SetMediaType(type); Q_ASSERT(SUCCEEDED(hr)); hr = out->SetPreroll(sample->IsPreroll()); Q_ASSERT(SUCCEEDED(hr)); hr = out->SetSyncPoint(sample->IsSyncPoint()); Q_ASSERT(SUCCEEDED(hr)); BYTE *dest = 0, *src = 0; hr = out->GetPointer(&dest); Q_ASSERT(SUCCEEDED(hr)); hr = sample->GetPointer(&src); Q_ASSERT(SUCCEEDED(hr)); qMemCopy(dest, src, sample->GetActualDataLength()); return out; }
DWORD WINAPI UdpReceiveThread(LPVOID param) { HRESULT hr; ReceiveParam *receiveParam = (ReceiveParam*)param; HANDLE PushSemaphore = receiveParam->PushSemaphore; HANDLE PushDataMutex = receiveParam->PushDataMutex; std::map<REFERENCE_TIME,IMediaSample*>& SampleList = *receiveParam->SampleList; NetReceiveFilter* filter = receiveParam->filter; delete receiveParam; NetReceiveOutputPin* outputPin = reinterpret_cast<NetReceiveOutputPin*>(filter->GetPin(0)); assert(outputPin != NULL); AM_MEDIA_TYPE mediaType; while (true) { outputPin->ConnectionMediaType(&mediaType); if (mediaType.majortype == GUID_NULL) { Sleep(300); } else break; } SOCKET udpSocket; udpSocket = ::socket(AF_INET, SOCK_DGRAM, 0); if (udpSocket == INVALID_SOCKET) { ErrorPrint("Create udp socket error"); return 1; } sockaddr_in bindAddress; bindAddress.sin_family = AF_INET; bindAddress.sin_addr.s_addr = htonl(INADDR_ANY); if(mediaType.majortype == MEDIATYPE_Video) { bindAddress.sin_port = htons(VideoBroadcastPort); } else { bindAddress.sin_port = htons(AudioBroadcastPort); } int option = 1; int ret = setsockopt(udpSocket, SOL_SOCKET, SO_REUSEADDR, (char*)&option, sizeof(option)); if (ret == SOCKET_ERROR) { ErrorPrint("Set socket reuse address error"); return 1; } int recvSystemBufferSize = 1024 * 1024 * 10; ret = setsockopt(udpSocket, SOL_SOCKET, SO_RCVBUF, (char*)&recvSystemBufferSize, sizeof(recvSystemBufferSize)); if (ret == SOCKET_ERROR) { ErrorPrint("Set socket receive system buffer size error"); } ret = ::bind(udpSocket, (sockaddr*)&bindAddress, sizeof(bindAddress)); if(ret == SOCKET_ERROR) { ErrorPrint("Bind udp receive socket error"); return 1; } sockaddr_in fromAddress; fromAddress.sin_family = AF_INET; int addressLen = sizeof(fromAddress); std::map<long long, IMediaSample*> idToSampleMap; const int packetMaxSize = 10 * 1024; MediaPacketHeader* mediaPacketHeader = (MediaPacketHeader*)new char[sizeof(MediaPacketHeader) + packetMaxSize]; boost::scoped_array<char> bufferContainer((char*)mediaPacketHeader); char* dataStart = (char*)mediaPacketHeader; char* dataBuffer = (char*)mediaPacketHeader + sizeof(MediaPacketHeader); while (true) { int recvedSize = recvfrom(udpSocket, dataStart, sizeof(MediaPacketHeader) + packetMaxSize, 0, (sockaddr*)&fromAddress, &addressLen); if (recvedSize == SOCKET_ERROR) { ErrorPrint("Receive from udp error"); return 1; } if (g_IsBroadcasting) //是自己广播的数据包,丢弃之 { continue; } if (mediaPacketHeader->type == 0) // 是sample头 { #ifdef UDP_PRINT std::cout<<"Receive media packet header:"<<mediaPacketHeader->id<<std::endl; #endif std::map<long long, IMediaSample*>::iterator it = idToSampleMap.begin(); while (it != idToSampleMap.end()) //处理发生过丢包的sample { std::map<long long, IMediaSample*>::iterator tmp = it++; if (tmp->first < mediaPacketHeader->id) //这个sample肯定丢包了,序列号比后来的小,并且没有接受完整,直接丢弃掉 { std::cout<<"Lose packet:"<<mediaPacketHeader->id<<std::endl; tmp->second->Release(); //一定要把sample给释放掉 idToSampleMap.erase(tmp); } else //将所有要丢弃的包都处理完了 break; } // if (mediaType.majortype == MEDIATYPE_Video) // { // std::cout<<"Video header:"<<mediaPacketHeader->id<<std::endl; // } // std::cout<<"Before get free sample"<<std::endl; IMediaSample *sample = filter->GetFreeSample(); //此时为这个sample头申请一个新的sample // std::cout<<"After get free sample"<<std::endl; if (sample == NULL) { ErrorPrint("Get free sample error"); return 1; } AM_SAMPLE2_PROPERTIES* sample2Properties = (AM_SAMPLE2_PROPERTIES*)dataBuffer; sample2Properties->cbData = sizeof(AM_SAMPLE2_PROPERTIES) - 9; IMediaSample2 *mediaSample2; hr = sample->QueryInterface(IID_IMediaSample2, (void**)&mediaSample2); if (FAILED(hr)) { ErrorPrint("Get media sample2 interface error",hr); sample->Release(); return 1; } ComReleaser mediaSample2Releaser(mediaSample2); hr = mediaSample2->SetProperties(sample2Properties->cbData, (BYTE*)sample2Properties);//设置sample属性 if (FAILED(hr)) { ErrorPrint("Set sample properties error"); } sample->SetTime(&(sample2Properties->tStart), &(sample2Properties->tStop)); sample->SetActualDataLength(sample2Properties->lActual); idToSampleMap.insert(std::make_pair(mediaPacketHeader->id, sample)); //插入到map当中,等待所有的sample数据接受完 } else if (mediaPacketHeader->type == 1) //是sample数据 { #ifdef UDP_PRINT std::cout<<"Receive sample data:"<<mediaPacketHeader->id<<std::endl; #endif std::map<long long, IMediaSample*>::iterator it = idToSampleMap.find(mediaPacketHeader->id); if (it != idToSampleMap.end()) //如果id找不到,sample头丢失了,或者已经过期了,直接将该包丢弃 { IMediaSample* sample = it->second; PBYTE dataPointer = NULL; hr = sample->GetPointer(&dataPointer); if (FAILED(hr)) { ErrorPrint("Get data pointer error",hr); idToSampleMap.erase(it); sample->Release(); continue; } memcpy(dataPointer + mediaPacketHeader->offset, dataBuffer, mediaPacketHeader->size); if ( (mediaPacketHeader->offset + mediaPacketHeader->size) == sample->GetActualDataLength()) //已经接收完整了,当然也有可能中间数据丢包了,但现在不管这种情况 { idToSampleMap.erase(it); REFERENCE_TIME startTime,endTime; sample->GetTime(&startTime,&endTime); //通知PUSH线程进行数据传送 WaitForSingleObject(PushDataMutex, INFINITE); // if (mediaType.majortype == MEDIATYPE_Video) // { // std::cout<<"Finished Video sample:"<<mediaPacketHeader->id<<";Current Thread:"<<GetCurrentThreadId()<<";Map size:"<<idToSampleMap.size()<<std::endl; // std::cout<<"Sample start time:"<<startTime <<";Sample end time:"<<endTime<<std::endl; // } SampleList.insert(std::make_pair(startTime,sample)); if (SampleList.size() >= 24 * 10) { ReleaseSemaphore(PushSemaphore, 1, NULL); } ReleaseMutex(PushDataMutex); } } else std::cout<<"Lose packet header:"<<mediaPacketHeader->id<<std::endl; } // if(idToSampleMap.size() == 0 || idToSampleMap.begin()->first < ) // // mediaPacketHeader // } }
GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer) { IMediaSample *pSample = NULL; byte *data = GST_BUFFER_DATA (buffer); int attempts = 0; HRESULT hres; BYTE *sample_buffer; AM_MEDIA_TYPE *mediatype; StartUsingOutputPin(); while (attempts < MAX_ATTEMPTS) { hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (SUCCEEDED (hres)) break; attempts++; Sleep(100); } if (FAILED (hres)) { StopUsingOutputPin(); GST_WARNING ("Could not get sample for delivery to sink: %x", hres); return GST_FLOW_ERROR; } pSample->GetPointer(&sample_buffer); pSample->GetMediaType(&mediatype); if (mediatype) SetMediaType (mediatype); if(sample_buffer) { /* Copy to the destination stride. * This is not just a simple memcpy because of the different strides. * TODO: optimise for the same-stride case and avoid the copy entirely. */ CopyToDestinationBuffer (data, sample_buffer); } pSample->SetDiscontinuity(FALSE); /* Decoded frame; unimportant */ pSample->SetSyncPoint(TRUE); /* Decoded frame; always a valid syncpoint */ pSample->SetPreroll(FALSE); /* For non-displayed frames. Not used in GStreamer */ /* Disable synchronising on this sample. We instead let GStreamer handle * this at a higher level, inside BaseSink. */ pSample->SetTime(NULL, NULL); while (attempts < MAX_ATTEMPTS) { hres = Deliver(pSample); if (SUCCEEDED (hres)) break; attempts++; Sleep(100); } pSample->Release(); StopUsingOutputPin(); if (SUCCEEDED (hres)) return GST_FLOW_OK; else { GST_WARNING_OBJECT (this, "Failed to deliver sample: %x", hres); if (hres == VFW_E_NOT_CONNECTED) return GST_FLOW_NOT_LINKED; else return GST_FLOW_ERROR; } }
HRESULT CAudioDecFilter::Transform(IMediaSample *pIn, IMediaSample *pOut) { // 入力データポインタを取得する const DWORD InSize = pIn->GetActualDataLength(); BYTE *pInData = NULL; HRESULT hr = pIn->GetPointer(&pInData); if (FAILED(hr)) return hr; { CAutoLock Lock(&m_cPropLock); /* 複数の音声フォーマットに対応する場合、この辺りでフォーマットの判定をする */ if (!m_pDecoder) { m_pDecoder = new CAacDecoder(); m_pDecoder->Open(); } REFERENCE_TIME rtStart, rtEnd; hr = pIn->GetTime(&rtStart, &rtEnd); if (FAILED(hr)) rtStart = -1; if (pIn->IsDiscontinuity() == S_OK) { m_bDiscontinuity = true; m_bInputDiscontinuity = true; } else if (hr == S_OK || hr == VFW_S_NO_STOP_TIME) { if (!m_bJitterCorrection) { m_StartTime = rtStart; } else if (m_StartTime >= 0 && _abs64(rtStart - m_StartTime) > MAX_JITTER) { TRACE(TEXT("Resync audio stream time (%lld -> %lld [%f])\n"), m_StartTime, rtStart, (double)(rtStart - m_StartTime) / (double)REFERENCE_TIME_SECOND); m_StartTime = rtStart; } } if (m_StartTime < 0 || m_bDiscontinuity) { TRACE(TEXT("Initialize audio stream time (%lld)\n"), rtStart); m_StartTime = rtStart; } m_BitRateCalculator.Update(InSize); } DWORD InDataPos = 0; FrameSampleInfo SampleInfo; SampleInfo.pData = &m_OutData; hr = S_OK; while (InDataPos < InSize) { { CAutoLock Lock(&m_cPropLock); CAudioDecoder::DecodeFrameInfo FrameInfo; const DWORD DataSize = InSize - InDataPos; DWORD DecodeSize = DataSize; if (!m_pDecoder->Decode(&pInData[InDataPos], &DecodeSize, &FrameInfo)) { if (DecodeSize < DataSize) { InDataPos += DecodeSize; continue; } break; } InDataPos += DecodeSize; if (FrameInfo.bDiscontinuity) m_bDiscontinuity = true; SampleInfo.bMediaTypeChanged = false; hr = OnFrame(FrameInfo.pData, FrameInfo.Samples, FrameInfo.Info, &SampleInfo); } if (SUCCEEDED(hr)) { if (SampleInfo.bMediaTypeChanged) { hr = ReconnectOutput(SampleInfo.MediaBufferSize, SampleInfo.MediaType); if (FAILED(hr)) break; OutputLog(TEXT("出力メディアタイプを更新します。\r\n")); hr = m_pOutput->SetMediaType(&SampleInfo.MediaType); if (FAILED(hr)) { OutputLog(TEXT("出力メディアタイプを設定できません。(%08x)\r\n"), hr); break; } m_MediaType = SampleInfo.MediaType; m_bDiscontinuity = true; m_bInputDiscontinuity = true; } IMediaSample *pOutSample = NULL; hr = m_pOutput->GetDeliveryBuffer(&pOutSample, NULL, NULL, 0); if (FAILED(hr)) { OutputLog(TEXT("出力メディアサンプルを取得できません。(%08x)\r\n"), hr); break; } if (SampleInfo.bMediaTypeChanged) pOutSample->SetMediaType(&m_MediaType); // 出力ポインタ取得 BYTE *pOutBuff = NULL; hr = pOutSample->GetPointer(&pOutBuff); if (FAILED(hr)) { OutputLog(TEXT("出力サンプルのバッファを取得できません。(%08x)\r\n"), hr); pOutSample->Release(); break; } ::CopyMemory(pOutBuff, m_OutData.GetData(), m_OutData.GetSize()); pOutSample->SetActualDataLength(m_OutData.GetSize()); if (m_StartTime >= 0) { REFERENCE_TIME rtDuration, rtStart, rtEnd; rtDuration = REFERENCE_TIME_SECOND * (LONGLONG)SampleInfo.Samples / FREQUENCY; rtStart = m_StartTime; m_StartTime += rtDuration; // 音ずれ補正用時間シフト if (m_DelayAdjustment > 0) { // 最大2倍まで時間を遅らせる if (rtDuration >= m_DelayAdjustment) { rtDuration += m_DelayAdjustment; m_DelayAdjustment = 0; } else { m_DelayAdjustment -= rtDuration; rtDuration *= 2; } } else if (m_DelayAdjustment < 0) { // 最短1/2まで時間を早める if (rtDuration >= -m_DelayAdjustment * 2) { rtDuration += m_DelayAdjustment; m_DelayAdjustment = 0; } else { m_DelayAdjustment += rtDuration; rtDuration /= 2; } } else { rtStart += m_Delay; } rtEnd = rtStart + rtDuration; pOutSample->SetTime(&rtStart, &rtEnd); } pOutSample->SetMediaTime(NULL, NULL); pOutSample->SetPreroll(FALSE); #if 0 // Discontinuityを設定すると倍速再生がおかしくなる模様 pOutSample->SetDiscontinuity(m_bDiscontinuity); #else pOutSample->SetDiscontinuity(m_bInputDiscontinuity); #endif m_bDiscontinuity = false; m_bInputDiscontinuity = false; pOutSample->SetSyncPoint(TRUE); hr = m_pOutput->Deliver(pOutSample); #ifdef _DEBUG if (FAILED(hr)) { OutputLog(TEXT("サンプルを送信できません。(%08x)\r\n"), hr); if (m_bPassthrough && !m_bPassthroughError) { m_bPassthroughError = true; if (m_pEventHandler) m_pEventHandler->OnSpdifPassthroughError(hr); } } #endif pOutSample->Release(); if (FAILED(hr)) break; } } return hr; }
HRESULT CMPIptvSourceStream::DoBufferProcessingLoop(void) { Command com; OnThreadStartPlay(); WSADATA wsaData; WSAStartup(MAKEWORD(2, 2), &wsaData); #ifdef logging LogDebug("Starting grabber thread"); #endif sockaddr_in addr; memset(&addr, 0, sizeof(addr)); addr.sin_family = AF_INET; if (localip) { addr.sin_addr.s_addr = inet_addr(localip); } else { addr.sin_addr.s_addr = htonl(INADDR_ANY); } addr.sin_port = htons((u_short)port); ip_mreq imr; imr.imr_multiaddr.s_addr = inet_addr(ip); if (localip) { imr.imr_interface.s_addr = inet_addr(localip); } else { imr.imr_interface.s_addr = INADDR_ANY; } unsigned long nonblocking = 1; if((m_socket = socket(AF_INET, SOCK_DGRAM, 0)) >= 0) { /* u_long argp = 1; ioctlsocket(m_socket, FIONBIO, &argp); */ DWORD dw = TRUE; int dwLen = sizeof(dw); if(setsockopt(m_socket, SOL_SOCKET, SO_REUSEADDR, (const char*)&dw, sizeof(dw)) < 0) { closesocket(m_socket); m_socket = -1; } if(setsockopt(m_socket, SOL_SOCKET, SO_BROADCAST, (const char*)&dw, sizeof(dw)) < 0) { closesocket(m_socket); m_socket = -1; } getsockopt(m_socket, SOL_SOCKET, SO_RCVBUF, (char *)&dw, &dwLen); #ifdef logging LogDebug("Socket receive buffer is: %d (%d)", dw, dwLen); LogDebug("Trying to set receive buffer to %d", IPTV_SOCKET_BUFFER_SIZE); #endif dw = IPTV_SOCKET_BUFFER_SIZE; if(setsockopt(m_socket, SOL_SOCKET, SO_RCVBUF, (const char*)&dw, sizeof(dw)) < 0) { closesocket(m_socket); m_socket = -1; } dwLen = sizeof(dw); getsockopt(m_socket, SOL_SOCKET, SO_RCVBUF, (char *)&dw, &dwLen); #ifdef logging LogDebug("New socket receive buffer is: %d (%d)", dw, dwLen); #endif if (ioctlsocket(m_socket, FIONBIO, &nonblocking) != 0) { closesocket(m_socket); m_socket = -1; } if(bind(m_socket, (struct sockaddr*)&addr, sizeof(addr)) < 0) { closesocket(m_socket); m_socket = -1; } if(IN_MULTICAST(htonl(imr.imr_multiaddr.s_addr))) { int ret = setsockopt(m_socket, IPPROTO_IP, IP_ADD_MEMBERSHIP, (const char*)&imr, sizeof(imr)); if(ret < 0) ret = ::WSAGetLastError(); ret = ret; } } SetThreadPriority(m_hThread, THREAD_PRIORITY_TIME_CRITICAL); int fromlen = sizeof(addr); m_buffsize = 0; timeval tv; //Will be used for select() below tv.tv_sec = 0; tv.tv_usec = 100000; //100 msec do { BOOL requestAvail; while ((requestAvail = CheckRequest(&com)) == FALSE) { DWORD startRecvTime; startRecvTime = GetTickCount(); #ifdef FILL_DIRECTLY_INTO_BUFFER IMediaSample *pSample; char *pData; long cbData; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) continue; CheckPointer(pSample, E_POINTER); // Access the sample's data buffer pSample->GetPointer((BYTE **)&pData); cbData = pSample->GetSize(); #endif do { //Try to read the complete remaining buffer size //But stop reading after 100ms have passed (slow streams like internet radio) #ifdef FILL_DIRECTLY_INTO_BUFFER int len = recvfrom(m_socket, &pData[m_buffsize], cbData - m_buffsize, 0, (SOCKADDR*)&addr, &fromlen); #else int len = recvfrom(m_socket, &m_buffer[m_buffsize], IPTV_BUFFER_SIZE - m_buffsize, 0, (SOCKADDR*)&addr, &fromlen); #endif if(len <= 0) { //Wait until there's something in the receive buffer fd_set myFDsocket; myFDsocket.fd_count = 1; myFDsocket.fd_array[0] = m_socket; int selectRet = select(0, &myFDsocket, NULL, NULL, &tv); #ifdef logging LogDebug("select return code: %d", selectRet); #endif continue; //On error or nothing read just repeat the loop } #ifdef logging LogDebug("Read %d bytes at pos %d of %d", len, m_buffsize, IPTV_BUFFER_SIZE); #endif m_buffsize += len; #ifdef FILL_DIRECTLY_INTO_BUFFER } while ((requestAvail = CheckRequest(&com)) == FALSE && m_buffsize < (cbData * 3 / 4) && abs((signed long)(GetTickCount() - startRecvTime)) < 100); #else } while ((requestAvail = CheckRequest(&com)) == FALSE && m_buffsize < (IPTV_BUFFER_SIZE * 3 / 4) && abs((signed long)(GetTickCount() - startRecvTime)) < 100); #endif if (requestAvail) break; #ifndef FILL_DIRECTLY_INTO_BUFFER if (m_buffsize == 0) continue; //100ms passed but no buffer received IMediaSample *pSample; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) { continue; // go round again. Perhaps the error will go away // or the allocator is decommited & we will be asked to // exit soon. } #endif // fill buffer hr = FillBuffer(pSample); if (hr == S_OK) { hr = Deliver(pSample); pSample->Release(); // downstream filter returns S_FALSE if it wants us to // stop or an error if it's reporting an error. if(hr != S_OK) { #ifdef logging LogDebug("Deliver() returned %08x; stopping", hr); #endif if(m_socket >= 0) {closesocket(m_socket); m_socket = -1;} WSACleanup(); return S_OK; } } else if (hr == S_FALSE) { // derived class wants us to stop pushing data pSample->Release(); DeliverEndOfStream(); if(m_socket >= 0) {closesocket(m_socket); m_socket = -1;} WSACleanup(); return S_OK; } else { // derived class encountered an error pSample->Release(); #ifdef logging LogDebug("Error %08lX from FillBuffer!!!", hr); #endif DeliverEndOfStream(); m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0); if(m_socket >= 0) {closesocket(m_socket); m_socket = -1;} WSACleanup(); return hr; } // all paths release the sample } // For all commands sent to us there must be a Reply call! if (com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if (com != CMD_STOP) { Reply((DWORD) E_UNEXPECTED); #ifdef logging LogDebug("Unexpected command %d!!!", com); #endif } } while (com != CMD_STOP);
void DeviceSource::Preprocess() { if(!bCapturing) return; //---------------------------------------- if(bRequestVolume) { if(audioOut) audioOut->SetVolume(fNewVol); else if(audioFilter) { IBasicAudio *basicAudio; if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio))) { long lVol = long((double(fNewVol)*NEAR_SILENTf)-NEAR_SILENTf); if(lVol <= -NEAR_SILENT) lVol = -10000; basicAudio->put_Volume(lVol); basicAudio->Release(); } } bRequestVolume = false; } //---------------------------------------- IMediaSample *lastSample = NULL; OSEnterMutex(hSampleMutex); if(curSample) { lastSample = curSample; curSample = NULL; } OSLeaveMutex(hSampleMutex); int numThreads = MAX(OSGetTotalCores()-2, 1); if(lastSample) { REFERENCE_TIME refTimeStart, refTimeFinish; lastSample->GetTime(&refTimeStart, &refTimeFinish); static REFERENCE_TIME lastRefTime = 0; Log(TEXT("refTimeStart: %llu, refTimeFinish: %llu, offset = %llu"), refTimeStart, refTimeFinish, refTimeStart-lastRefTime); lastRefTime = refTimeStart; BYTE *lpImage = NULL; if(colorType == DeviceOutputType_RGB) { if(texture) { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) texture->SetImage(lpImage, GS_IMAGEFORMAT_BGRX, renderCX*4); bReadyToDraw = true; } } else if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12) { if(bUseThreadedConversion) { if(!bFirstFrame) { List<HANDLE> events; for(int i=0; i<numThreads; i++) events << convertData[i].hSignalComplete; WaitForMultipleObjects(numThreads, events.Array(), TRUE, INFINITE); texture->SetImage(lpImageBuffer, GS_IMAGEFORMAT_RGBX, texturePitch); bReadyToDraw = true; } else bFirstFrame = false; if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { for(int i=0; i<numThreads; i++) lastSample->AddRef(); for(int i=0; i<numThreads; i++) { convertData[i].input = lpImage; convertData[i].pitch = texturePitch; convertData[i].output = lpImageBuffer; convertData[i].sample = lastSample; SetEvent(convertData[i].hSignalConvert); } } } else { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { PackPlanar(lpData, lpImage, renderCX, renderCY, pitch, 0, renderCY); texture->Unmap(); } } bReadyToDraw = true; } } else if(colorType == DeviceOutputType_YVYU || colorType == DeviceOutputType_YUY2) { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { Convert422To444(lpData, lpImage, pitch, true); texture->Unmap(); } } bReadyToDraw = true; } else if(colorType == DeviceOutputType_UYVY || colorType == DeviceOutputType_HDYC) { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { Convert422To444(lpData, lpImage, pitch, false); texture->Unmap(); } } bReadyToDraw = true; } lastSample->Release(); } }
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket) { HRESULT hr = S_OK; IMediaSample *pSample = nullptr; long nBytes = (long)pPacket->GetDataSize(); if(nBytes == 0) { goto done; } CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0)); if (m_bPacketAllocator) { ILAVMediaSample *pLAVSample = nullptr; CHECK_HR(hr = pSample->QueryInterface(&pLAVSample)); CHECK_HR(hr = pLAVSample->SetPacket(pPacket)); SafeRelease(&pLAVSample); } else { // Resize buffer if it is too small // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size if(nBytes > pSample->GetSize()) { SafeRelease(&pSample); ALLOCATOR_PROPERTIES props, actual; CHECK_HR(hr = m_pAllocator->GetProperties(&props)); // Give us 2 times the requested size, so we don't resize every time props.cbBuffer = nBytes*2; if(props.cBuffers > 1) { CHECK_HR(hr = __super::DeliverBeginFlush()); CHECK_HR(hr = __super::DeliverEndFlush()); } CHECK_HR(hr = m_pAllocator->Decommit()); CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual)); CHECK_HR(hr = m_pAllocator->Commit()); CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0)); } // Fill the sample BYTE* pData = nullptr; if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done; memcpy(pData, pPacket->GetData(), nBytes); } if(pPacket->pmt) { DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder")); pSample->SetMediaType(pPacket->pmt); pPacket->bDiscontinuity = true; CAutoLock cAutoLock(m_pLock); CMediaType pmt = *(pPacket->pmt); m_mts.clear(); m_mts.push_back(pmt); pPacket->pmt = nullptr; SetMediaType(&pmt); } bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME; // IBitRateInfo m_BitRate.nBytesSinceLastDeliverTime += nBytes; if (fTimeValid) { if (m_BitRate.rtLastDeliverTime == Packet::INVALID_TIME) { m_BitRate.rtLastDeliverTime = pPacket->rtStart; m_BitRate.nBytesSinceLastDeliverTime = 0; } if (m_BitRate.rtLastDeliverTime + 10000000 < pPacket->rtStart) { REFERENCE_TIME rtDiff = pPacket->rtStart - m_BitRate.rtLastDeliverTime; double dSecs, dBits; dSecs = rtDiff / 10000000.0; dBits = 8.0 * m_BitRate.nBytesSinceLastDeliverTime; m_BitRate.nCurrentBitRate = (DWORD)(dBits / dSecs); m_BitRate.rtTotalTimeDelivered += rtDiff; m_BitRate.nTotalBytesDelivered += m_BitRate.nBytesSinceLastDeliverTime; dSecs = m_BitRate.rtTotalTimeDelivered / 10000000.0; dBits = 8.0 * m_BitRate.nTotalBytesDelivered; m_BitRate.nAverageBitRate = (DWORD)(dBits / dSecs); m_BitRate.rtLastDeliverTime = pPacket->rtStart; m_BitRate.nBytesSinceLastDeliverTime = 0; } } CHECK_HR(hr = pSample->SetActualDataLength(nBytes)); CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : nullptr, fTimeValid ? &pPacket->rtStop : nullptr)); CHECK_HR(hr = pSample->SetMediaTime(nullptr, nullptr)); CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity)); CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint)); CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0)); // Deliver CHECK_HR(hr = Deliver(pSample)); done: if (!m_bPacketAllocator || !pSample) SAFE_DELETE(pPacket); SafeRelease(&pSample); return hr; }
HRESULT CMPVlcSourceStream::DoBufferProcessingLoop(void) { Command com; HRESULT result = S_OK; BOOL bStop = false; OnThreadStartPlay(); LogInfo("Starting grabber thread"); if (m_exec) { LogInfo("Executing external command: %s %s", m_exec, m_exec_opt); ::ShellExecuteA(0, NULL, m_exec, m_exec_opt, NULL, SW_HIDE); Sleep(m_exec_wait); } //libvlc_vlm_seek_media(m_vlc, "vlc_ds_stream", 0); if (libvlc_vlm_play_media (m_vlc, "vlc_ds_stream") != 0) { LogError("libvlc_vlm_play_media failed"); return S_FALSE; } OVERLAPPED o; o.hEvent = CreateEvent( NULL, FALSE, FALSE, NULL); o.Internal = o.InternalHigh = o.Offset = o.OffsetHigh = 0; ConnectNamedPipe(m_hPipe, &o); WaitForSingleObject(o.hEvent, 20000); BOOL fConnected = HasOverlappedIoCompleted(&o); SetThreadPriority(m_hThread, THREAD_PRIORITY_TIME_CRITICAL); if (!fConnected) { LogError("ConnectNamedPipe failed"); CancelIo(m_hPipe); CloseHandle(o.hEvent); return S_FALSE; } else do { BOOL requestAvail = FALSE; while ((requestAvail = CheckRequest(&com)) == FALSE) { //LogDebug ("Command: %d", com); IMediaSample *pSample; HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0); if (FAILED(hr)) continue; // fill buffer // ------------------------------------------------------------------------------------ hr = S_OK; BYTE *pData; DWORD cbData; CheckPointer(pSample, E_POINTER); // Access the sample's data buffer pSample->GetPointer((BYTE **)&pData); cbData = pSample->GetSize(); DWORD startRecvTime = GetTickCount(); m_buffsize = 0; do { DWORD cbBytesRead = 0; ResetEvent(o.hEvent); o.Internal = o.InternalHigh = o.Offset = o.OffsetHigh = 0; BOOL fSuccess = ReadFile( m_hPipe, pData + m_buffsize, cbData - m_buffsize, &cbBytesRead, &o); if (GetLastError() == ERROR_IO_PENDING) { for (int n=0; n < 20; n++) { if ((requestAvail = CheckRequest(&com)) == TRUE) break; if (WaitForSingleObject(o.hEvent, 1000) == WAIT_OBJECT_0) break; } fSuccess = GetOverlappedResult(m_hPipe, &o, &cbBytesRead, false); } if (!fSuccess || cbBytesRead == 0) { CancelIo(m_hPipe); break; } m_buffsize += cbBytesRead; } while ( !requestAvail && m_buffsize < (cbData * 3 / 4) && abs((signed long)(GetTickCount() - startRecvTime)) < 100); // ------------------------------------------------------------------------------------ if (m_buffsize != 0 && !(requestAvail && com == CMD_STOP)) { LogDebug("Posting %d / %d bytes", m_buffsize, pSample->GetSize()); REFERENCE_TIME rtStart = startRecvTime; REFERENCE_TIME rtStop = GetTickCount(); pSample->SetTime(&rtStart, &rtStop); pSample->SetActualDataLength(m_buffsize); pSample->SetSyncPoint(TRUE); hr = Deliver(pSample); // downstream filter returns S_FALSE if it wants us to // stop or an error if it's reporting an error. if(hr != S_OK) { LogInfo("Deliver() returned %08x; stopping", hr); bStop = true; } } else { // FillBuffer returned false bStop = true; DeliverEndOfStream(); } pSample->Release(); if (bStop) break; } if (requestAvail) { LogInfo("Received command: %d", com); if (com == CMD_RUN || com == CMD_PAUSE) { Reply(NOERROR); } else if (com != CMD_STOP) { Reply((DWORD) E_UNEXPECTED); LogDebug("Unexpected command %d!!!", com); } } } while (com != CMD_STOP && bStop == false); //DeliverEndOfStream(); LogDebug("end loop"); HANDLE hSDThread = CreateThread(NULL, 0, &VlcStreamDiscardThread, LPVOID(m_hPipe), 0, 0); libvlc_vlm_stop_media(m_vlc, "vlc_ds_stream"); LogDebug("libvlc_vlm_stop_media"); if (WaitForSingleObject(hSDThread, 30000) == WAIT_TIMEOUT) { DWORD ec; LogError("Terminating StreamDiscardThread!"); GetExitCodeThread(hSDThread, &ec); TerminateThread(hSDThread, ec); } DisconnectNamedPipe(m_hPipe); LogDebug("DoBufferProcessingLoop end"); CloseHandle(o.hEvent); return result; }
HRESULT MediaChunk::Write(Atom* patm) { // record chunk start position LONGLONG posChunk = patm->Position() + patm->Length(); if (m_bOldIndexFormat) { long cBytes = 0; // ensure that we don't break in the middle of a sample (Maxim Kartavenkov) const int MAX_PCM_SIZE = 22050; int max_bytes = MAX_PCM_SIZE - (MAX_PCM_SIZE % m_pTrack->Handler()->BlockAlign()); list<IMediaSample*>::iterator it = m_Samples.begin(); long cAvail = 0; BYTE* pBuffer = NULL; for (;;) { if (!cAvail) { if (it == m_Samples.end()) { break; } IMediaSample* pSample = *it++; pSample->GetPointer(&pBuffer); cAvail = pSample->GetActualDataLength(); REFERENCE_TIME tStart, tStop; if (SUCCEEDED(pSample->GetTime(&tStart, &tStop))) { m_pTrack->SetOldIndexStart(tStart); } } long cThis = max_bytes - cBytes; if (cThis > cAvail) { cThis = cAvail; } int cActual = 0; m_pTrack->Handler()->WriteData(patm, pBuffer, cThis, &cActual); cBytes += cActual; cAvail -= cActual; pBuffer += cActual; if (cBytes >= max_bytes) { m_pTrack->OldIndex(posChunk, cBytes); posChunk = patm->Position() + patm->Length(); cBytes = 0; } } if (cBytes) { m_pTrack->OldIndex(posChunk, cBytes); } return S_OK; } // Remember that large H264 samples may be broken // across several buffers, with Sync flag at start and // time on last buffer. bool bSync = false; long cBytes = 0; long nSamples = 0; // loop once through the samples writing the data list<IMediaSample*>::iterator it; for (it = m_Samples.begin(); it != m_Samples.end(); it++) { IMediaSample* pSample = *it; // record positive sync flag, but for // multiple-buffer samples, only one sync flag will be present // so don't overwrite with later negatives. if (pSample->IsSyncPoint() == S_OK) { bSync = true; } // write payload, including any transformation (eg BSF to length-prepended) BYTE* pBuffer; pSample->GetPointer(&pBuffer); int cActual = 0; m_pTrack->Handler()->WriteData(patm, pBuffer, pSample->GetActualDataLength(), &cActual); cBytes += cActual; REFERENCE_TIME tStart, tEnd; HRESULT hr = pSample->GetTime(&tStart, &tEnd); if (SUCCEEDED(hr)) { // this is the last buffer in the sample m_pTrack->IndexSample(bSync, tStart, tEnd, cBytes); // reset for new sample bSync = false; cBytes = 0; nSamples++; } } // add chunk position to index m_pTrack->IndexChunk(posChunk, nSamples); return S_OK; }
DWORD WINAPI ReceiveThread(PVOID param) { HRESULT hr; ReceiveParam *receiveParam = (ReceiveParam*)param; HANDLE PushSemaphore = receiveParam->PushSemaphore; HANDLE PushDataMutex = receiveParam->PushDataMutex; std::map<REFERENCE_TIME,IMediaSample*>& SampleList = *receiveParam->SampleList; NetReceiveFilter* filter = receiveParam->filter; SOCKET socket ; delete receiveParam; LONG packSize; // CMediaSample *tmpSample = (CMediaSample*) malloc(sizeof(CMediaSample)); REFERENCE_TIME startTime = 0,endTime = 0; //马上播放 REFERENCE_TIME mediaStartTime = 0,mediaEndTime = 0; AM_SAMPLE2_PROPERTIES sample2Properties; NetReceiveOutputPin* outputPin = reinterpret_cast<NetReceiveOutputPin*>(filter->GetPin(0)); assert(outputPin != NULL); filter->waitForNewSocket(); while(true) { IMediaSample *sample = filter->GetFreeSample(); if (sample == NULL) { ErrorPrint("Get free sample error"); return 1; } PBYTE dataPointer = NULL; hr = sample->GetPointer(&dataPointer); if (FAILED(hr)) { ErrorPrint("Get data pointer error",hr); sample->Release(); return 1; } CAutoLock lock(filter->getSocketLock()); socket = filter->getSocket(); if (!receiveData(socket, (char*)&sample2Properties, sizeof(sample2Properties))) { ErrorPrint("Get pack Properties error"); sample->Release(); filter->waitForNewSocket(); continue; } packSize = sample2Properties.lActual; if (packSize > 100 * 1024) { std::cout<<"Exceed 100K:"<<packSize/1024<<std::endl; } AM_MEDIA_TYPE mediaType; filter->GetPin(0)->ConnectionMediaType(&mediaType); if (filter->getPlayMode() == 1) { // static const unsigned long offset = 10000000; //将延迟增加,尽量缓冲一些 // sample2Properties.tStart +=offset; // sample2Properties.tStop += offset; sample2Properties.cbData = sizeof(sample2Properties) - 9; sample2Properties.pbBuffer= dataPointer; IMediaSample2 *mediaSample2; hr = sample->QueryInterface(IID_IMediaSample2, (void**)&mediaSample2); if (FAILED(hr)) { ErrorPrint("Get media sample2 interface error",hr); sample->Release(); return 1; } ComReleaser mediaSample2Releaser(mediaSample2); hr = mediaSample2->SetProperties(sample2Properties.cbData, (BYTE*)&sample2Properties); if (FAILED(hr)) { ErrorPrint("Set sample properties error"); } sample->SetTime(&sample2Properties.tStart, &sample2Properties.tStop); sample->GetTime(&startTime,&endTime); } else { startTime = 0; endTime = 0; } ASSERT(packSize <= sample->GetSize()); sample->SetActualDataLength(packSize); sample->SetTime(&startTime, &endTime); if(!receiveData(socket, (char*)dataPointer, packSize)) { ErrorPrint("Receive pack errors"); sample->Release(); filter->waitForNewSocket(); continue; } //通知PUSH线程进行数据传送 WaitForSingleObject(PushDataMutex, INFINITE); SampleList.insert(std::make_pair(startTime, sample)); if(filter->getPlayMode() == 0) //如果尽快播放,则只要有一个sample就通知push线程 { if (SampleList.size() == 1) { ReleaseSemaphore(PushSemaphore, 1, NULL); } } else if (filter->getPlayMode() == 1)//如果考虑时间戳,我们则缓冲尽量多的sample,但也不能太多 { if (SampleList.size() >= 24 * 10) { ReleaseSemaphore(PushSemaphore, 1, NULL); } } ReleaseMutex(PushDataMutex); outputPin->newTransSample(sample2Properties, dataPointer); //通知进行sample的转发 } return 0; }
HRESULT CWavPackDSDecoder::Receive(IMediaSample *pSample) { // Check for other streams and pass them on AM_SAMPLE2_PROPERTIES * const pProps = m_pInput->SampleProps(); if ((pProps->dwStreamId != AM_STREAM_MEDIA) && (pProps->dwStreamId != AM_STREAM_BLOCK_ADDITIONNAL)) { return m_pOutput->Deliver(pSample); } ASSERT(pSample); // If no output to deliver to then no point sending us data ASSERT(m_pOutput != NULL); HRESULT hr = S_OK; BYTE *pSrc, *pDst; DWORD SrcLength = pSample->GetActualDataLength(); hr = pSample->GetPointer(&pSrc); if(FAILED(hr)) return hr; // Check for minimal block size if(SrcLength < (3 * sizeof(uint32_t))) { return S_OK; } WAVEFORMATEX* pwfx = (WAVEFORMATEX*)m_pInput->CurrentMediaType().Format(); BOOL bSeveralBlocks = (pwfx->nChannels > 2); if(pProps->dwStreamId == AM_STREAM_MEDIA) { REFERENCE_TIME rtStop; if(pSample->IsSyncPoint() == S_OK) { pSample->GetTime(&m_rtFrameStart, &rtStop); m_TotalSamples = 0; } m_MainBlockDiscontinuity = (pSample->IsDiscontinuity() == S_OK); reconstruct_wavpack_frame( m_MainFrame, &m_CommonFrameData, (char*)pSrc, SrcLength, TRUE, bSeveralBlocks, m_PrivateData.version); if(m_HybridMode == TRUE) { // Stop here and wait for correction data return S_OK; } } if((m_HybridMode == TRUE) && (pProps->dwStreamId == AM_STREAM_BLOCK_ADDITIONNAL)) { // rebuild correction data block reconstruct_wavpack_frame( m_CorrectionFrame, &m_CommonFrameData, (char*)pSrc, SrcLength, FALSE, bSeveralBlocks, m_PrivateData.version); } if(wavpack_buffer_decoder_load_frame(m_Codec, m_MainFrame->data, m_MainFrame->len, m_HybridMode ? m_CorrectionFrame->data : NULL, m_CorrectionFrame->len) == 0) { // Something is wrong return S_FALSE; } // We can precise the decoding mode now if(m_HybridMode == FALSE) { if(m_CommonFrameData.array_flags[0] & WV_HYBRID_FLAG) { m_DecodingMode = DECODING_MODE_LOSSY; } else { m_DecodingMode = DECODING_MODE_LOSSLESS; } } uint32_t samplesLeft = m_CommonFrameData.block_samples; while(samplesLeft > 0) { // Set up the output sample IMediaSample *pOutSample; hr = InitializeOutputSample(pSample, &pOutSample); if(FAILED(hr)) { break; } DWORD DstLength = pOutSample->GetSize(); hr = pOutSample->GetPointer(&pDst); if(FAILED(hr)) { pOutSample->Release(); break; } DstLength &= 0xFFFFFFF8; long samples = wavpack_buffer_decoder_unpack(m_Codec,(int32_t *)pDst, m_SamplesPerBuffer); if(samples) { wavpack_buffer_format_samples(m_Codec, (uchar *) pDst, (long*) pDst, samples); DstLength = samples * WavpackGetBytesPerSample(m_Codec->wpc) * WavpackGetNumChannels (m_Codec->wpc); pOutSample->SetActualDataLength(DstLength); REFERENCE_TIME rtStart, rtStop; rtStart = m_rtFrameStart + (REFERENCE_TIME)(((double)m_TotalSamples / WavpackGetSampleRate(m_Codec->wpc)) * 10000000); m_TotalSamples += samples; rtStop = m_rtFrameStart + (REFERENCE_TIME)(((double)m_TotalSamples / WavpackGetSampleRate(m_Codec->wpc)) * 10000000); if(rtStart < 0 && rtStop < 0) { // No need to deliver this sample it will be skipped pOutSample->Release(); continue; } pOutSample->SetTime(&rtStart, &rtStop); pOutSample->SetSyncPoint(TRUE); pOutSample->SetDiscontinuity(m_MainBlockDiscontinuity); if(m_MainBlockDiscontinuity == TRUE) { m_MainBlockDiscontinuity = FALSE; } hr = m_pOutput->Deliver(pOutSample); if(FAILED(hr)) { pOutSample->Release(); break; } pOutSample->Release(); } else { pOutSample->Release(); break; } samplesLeft -= samples; } m_DecodedFrames++; m_CrcError = WavpackGetNumErrors(m_Codec->wpc); return S_OK; }
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket) { HRESULT hr = S_OK; IMediaSample *pSample = NULL; long nBytes = (long)pPacket->GetDataSize(); if(nBytes == 0) { goto done; } CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0)); if (m_bPacketAllocator) { ILAVMediaSample *pLAVSample = NULL; CHECK_HR(hr = pSample->QueryInterface(&pLAVSample)); CHECK_HR(hr = pLAVSample->SetPacket(pPacket)); SafeRelease(&pLAVSample); } else { // Resize buffer if it is too small // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size if(nBytes > pSample->GetSize()) { SafeRelease(&pSample); ALLOCATOR_PROPERTIES props, actual; CHECK_HR(hr = m_pAllocator->GetProperties(&props)); // Give us 2 times the requested size, so we don't resize every time props.cbBuffer = nBytes*2; if(props.cBuffers > 1) { CHECK_HR(hr = __super::DeliverBeginFlush()); CHECK_HR(hr = __super::DeliverEndFlush()); } CHECK_HR(hr = m_pAllocator->Decommit()); CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual)); CHECK_HR(hr = m_pAllocator->Commit()); CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0)); } // Fill the sample BYTE* pData = NULL; if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done; memcpy(pData, pPacket->GetData(), nBytes); } if(pPacket->pmt) { DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder")); pSample->SetMediaType(pPacket->pmt); pPacket->bDiscontinuity = true; CAutoLock cAutoLock(m_pLock); CMediaType pmt = *(pPacket->pmt); m_mts.clear(); m_mts.push_back(pmt); pPacket->pmt = NULL; SetMediaType(&pmt); } bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME; CHECK_HR(hr = pSample->SetActualDataLength(nBytes)); CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : NULL, fTimeValid ? &pPacket->rtStop : NULL)); CHECK_HR(hr = pSample->SetMediaTime(NULL, NULL)); CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity)); CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint)); CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0)); // Deliver CHECK_HR(hr = Deliver(pSample)); done: if (!m_bPacketAllocator) SAFE_DELETE(pPacket); SafeRelease(&pSample); return hr; }
HRESULT CTTASplitterInputPin::DoProcessingLoop(void) { DWORD com; IMediaSample *pSample; BYTE *Buffer; HRESULT hr; unsigned long FrameLenBytes, FrameLenSamples, FrameIndex; Reply(NOERROR); m_bAbort = FALSE; m_pParentFilter->m_pOutputPin->DeliverNewSegment(0, m_pParentFilter->m_rtStop - m_pParentFilter->m_rtStart, m_pParentFilter->m_dRateSeeking); do { if (m_pIACBW->StreamPos >= m_pIACBW->StreamLen || tta_parser_eof(m_pTTAParser)) { m_pParentFilter->m_pOutputPin->DeliverEndOfStream(); return NOERROR; } hr = m_pParentFilter->m_pOutputPin->GetDeliveryBuffer(&pSample, NULL, NULL, 0); if (FAILED(hr)) { return hr; } hr = pSample->GetPointer(&Buffer); if (FAILED(hr)) { pSample->Release(); return hr; } FrameLenBytes = tta_parser_read_frame(m_pTTAParser, Buffer, &FrameIndex, &FrameLenSamples); if (!FrameLenBytes) { pSample->Release(); m_pParentFilter->m_pOutputPin->DeliverEndOfStream(); return hr; } pSample->SetActualDataLength(FrameLenBytes); REFERENCE_TIME rtStart, rtStop; rtStart = (FrameIndex * m_pTTAParser->FrameLen); rtStop = rtStart + FrameLenSamples; rtStart = (rtStart * 10000000) / m_pTTAParser->TTAHeader.SampleRate; rtStop = (rtStop * 10000000) / m_pTTAParser->TTAHeader.SampleRate; rtStart -= m_pParentFilter->m_rtStart; rtStop -= m_pParentFilter->m_rtStart; pSample->SetTime(&rtStart, &rtStop); pSample->SetPreroll(FALSE); pSample->SetDiscontinuity(m_bDiscontinuity); if (m_bDiscontinuity) { m_bDiscontinuity = FALSE; } pSample->SetSyncPoint(TRUE); hr = m_pParentFilter->m_pOutputPin->Deliver(pSample); pSample->Release(); pSample = NULL; if (FAILED(hr)) { return hr; } } while (!CheckRequest((DWORD*)&com) && !m_bAbort); return NOERROR; }