void CUdpBizPacket::ReadStringWithDecompress(string& str) { CBuffer Buffer; str.clear(); ReadBlob(Buffer); if (Buffer.GetSize() > 0) { if (ZlibUncompress(Buffer)) str.assign(Buffer.Data(), Buffer.GetSize()); } }
HRESULT CVideoPin::FillBuffer(IMediaSample *pSample) { try { CDeMultiplexer& demux = m_pTsReaderFilter->GetDemultiplexer(); CBuffer* buffer = NULL; bool earlyStall = false; //get file-duration and set m_rtDuration GetDuration(NULL); do { //Check if we need to wait for a while DWORD timeNow = GET_TIME_NOW(); while (timeNow < (m_LastFillBuffTime + m_FillBuffSleepTime)) { Sleep(1); timeNow = GET_TIME_NOW(); } m_LastFillBuffTime = timeNow; //did we reach the end of the file if (demux.EndOfFile()) { int ACnt, VCnt; demux.GetBufferCounts(&ACnt, &VCnt); if (ACnt <= 0 && VCnt <= 0) //have we used all the data ? { LogDebug("vidPin:set eof"); m_FillBuffSleepTime = 5; CreateEmptySample(pSample); m_bInFillBuffer = false; return S_FALSE; //S_FALSE will notify the graph that end of file has been reached } } //if the filter is currently seeking to a new position //or this pin is currently seeking to a new position then //we dont try to read any packets, but simply return... if (m_pTsReaderFilter->IsSeeking() || m_pTsReaderFilter->IsStopping() || demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated) { m_FillBuffSleepTime = 5; CreateEmptySample(pSample); m_bInFillBuffer = false; if (demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated) { //Force discon on next good sample m_sampleCount = 0; m_bDiscontinuity=true; } return NOERROR; } else { m_FillBuffSleepTime = 1; m_bInFillBuffer = true; } // Get next video buffer from demultiplexer buffer=demux.GetVideo(earlyStall); if (buffer == NULL) { m_FillBuffSleepTime = 5; } else if (buffer->Length() > m_bufferSize) { //discard buffer delete buffer; demux.EraseVideoBuff(); m_bDiscontinuity = TRUE; //Next good sample will be discontinuous buffer = NULL; m_FillBuffSleepTime = 1; LogDebug("vidPin : Error - buffer too large for sample") ; } else { m_bPresentSample = true ; CRefTime RefTime, cRefTime; double fTime = 0.0; double clock = 0.0; double stallPoint = VIDEO_STALL_POINT; //check if it has a timestamp bool HasTimestamp=buffer->MediaTime(RefTime); if (HasTimestamp) { bool ForcePresent = false; CRefTime compTemp = m_pTsReaderFilter->GetCompensation(); if (m_pTsReaderFilter->m_bFastSyncFFDShow && (compTemp != m_llLastComp)) { m_bDiscontinuity = true; } m_llLastComp = compTemp; cRefTime = RefTime; cRefTime -= m_rtStart; //adjust the timestamp with the compensation cRefTime -= compTemp; cRefTime -= m_pTsReaderFilter->m_ClockOnStart.m_time; // 'fast start' timestamp modification, during first (AddVideoComp + 1 sec) of play double fsAdjLimit = (1.0 * (double)m_pTsReaderFilter->AddVideoComp.m_time) + (double)FS_ADDON_LIM; //(1 * vid comp) + 1 second if (m_pTsReaderFilter->m_EnableSlowMotionOnZapping && ((double)cRefTime.m_time < fsAdjLimit) ) { //float startCref = (float)cRefTime.m_time/(1000*10000); //used in LogDebug below only //Assume desired timestamp span is zero to fsAdjLimit, actual span is AddVideoComp to fsAdjLimit double offsetRatio = fsAdjLimit/(double)FS_ADDON_LIM; // == fsAdjLimit/(fsAdjLimit - (double)m_pTsReaderFilter->AddVideoComp.m_time); double currOffset = fsAdjLimit - (double)cRefTime.m_time; double newOffset = currOffset * offsetRatio; cRefTime = (fsAdjLimit > newOffset) ? (REFERENCE_TIME)(fsAdjLimit - newOffset) : 0; //Don't allow negative cRefTime ForcePresent = true; //LogDebug("VFS cOfs %03.3f, nOfs %03.3f, cRefTimeS %03.3f, cRefTimeN %03.3f", (float)currOffset/(1000*10000), (float)newOffset/(1000*10000), startCref, (float)cRefTime.m_time/(1000*10000)); if (m_pTsReaderFilter->m_bFastSyncFFDShow) { m_delayedDiscont = 2; //Force I-frame timestamp updates for FFDShow } } REFERENCE_TIME RefClock = 0; m_pTsReaderFilter->GetMediaPosition(&RefClock) ; clock = (double)(RefClock-m_rtStart.m_time)/10000000.0 ; fTime = ((double)(cRefTime.m_time + m_pTsReaderFilter->m_ClockOnStart.m_time)/10000000.0) - clock ; if (m_dRateSeeking == 1.0) { if ((fTime < -2.0) && (m_pTsReaderFilter->State() == State_Running) && (clock > 8.0) && !ForcePresent && !demux.m_bFlushDelegated) { //Very late - request internal flush and re-sync to stream demux.DelegatedFlush(false, false); LogDebug("vidPin : Video to render very late, flushing") ; } //Discard late samples at start of play, //and samples outside a sensible timing window during play //(helps with signal corruption recovery) if ((fTime > (ForcePresent ? -1.0 : -0.3)) && (fTime < (demux.m_dVidPTSJumpLimit + 1.0)) ) { if ((fTime > stallPoint) && (m_sampleCount > 10)) { //Too early - stall for a while to avoid over-filling of video pipeline buffers, //but don't enable at start of play to make sure graph starts properly m_FillBuffSleepTime = 10; buffer = NULL; earlyStall = true; continue; } } else { // Sample is too late. m_bPresentSample = false ; } } else if ((fTime < -1.0) || (fTime > 3.0)) //Fast-forward limits { // Sample is too late. m_bPresentSample = false ; } cRefTime += m_pTsReaderFilter->m_ClockOnStart.m_time; } if (m_bPresentSample && (buffer->Length() > 0)) { //do we need to set the discontinuity flag? if (m_bDiscontinuity || buffer->GetDiscontinuity()) { if ((m_sampleCount == 0) && m_bAddPMT && !m_pTsReaderFilter->m_bDisableAddPMT && !m_bPinNoAddPMT) { //Add MediaType info to first sample after OnThreadStartPlay() CMediaType mt; if (demux.GetVideoStreamType(mt)) { pSample->SetMediaType(&mt); SetMediaType(&mt); LogDebug("vidPin: Add pmt and set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount); } else { LogDebug("vidPin: Add pmt failed - set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount); } m_bAddPMT = false; //Only add once each time } else { LogDebug("vidPin: Set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount); } pSample->SetDiscontinuity(TRUE); m_bDiscontinuity=FALSE; } //LogDebug("vidPin: video buffer type = %d", buffer->GetVideoServiceType()); if (HasTimestamp) { //now we have the final timestamp, set timestamp in sample REFERENCE_TIME refTime=(REFERENCE_TIME)cRefTime; pSample->SetSyncPoint(TRUE); bool stsDiscon = TimestampDisconChecker(refTime); //Update with current timestamp refTime = (REFERENCE_TIME)((double)refTime/m_dRateSeeking); pSample->SetTime(&refTime,&refTime); if (m_pTsReaderFilter->m_bFastSyncFFDShow && (m_dRateSeeking == 1.0)) { if (stsDiscon || (pSample->IsDiscontinuity()==S_OK)) { pSample->SetDiscontinuity(TRUE); m_delayedDiscont = 2; } if ((m_delayedDiscont > 0) && (buffer->GetFrameType() == 'I')) { if ((buffer->GetVideoServiceType() == SERVICE_TYPE_VIDEO_MPEG1 || buffer->GetVideoServiceType() == SERVICE_TYPE_VIDEO_MPEG2)) { //Use delayed discontinuity pSample->SetDiscontinuity(TRUE); m_delayedDiscont--; LogDebug("vidPin:set I-frame discontinuity, count %d", m_delayedDiscont); } else { m_delayedDiscont = 0; } } } if (m_pTsReaderFilter->m_ShowBufferVideo || ((fTime < 0.02) && (m_dRateSeeking == 1.0)) || (m_sampleCount < 3)) { int cntA, cntV; CRefTime firstAudio, lastAudio; CRefTime firstVideo, lastVideo, zeroVideo; cntA = demux.GetAudioBufferPts(firstAudio, lastAudio); cntV = demux.GetVideoBufferPts(firstVideo, lastVideo, zeroVideo); LogDebug("Vid/Ref : %03.3f, %c-frame(%02d), Compensated = %03.3f ( %0.3f A/V buffers=%02d/%02d), Clk : %f, SampCnt %d, stallPt %03.3f", (float)RefTime.Millisecs()/1000.0f,buffer->GetFrameType(),buffer->GetFrameCount(), (float)cRefTime.Millisecs()/1000.0f, fTime, cntA,cntV,clock, m_sampleCount, (float)stallPoint); } if (m_pTsReaderFilter->m_ShowBufferVideo) m_pTsReaderFilter->m_ShowBufferVideo--; } else { //buffer has no timestamp pSample->SetTime(NULL,NULL); pSample->SetSyncPoint(FALSE); } // copy buffer into the sample BYTE* pSampleBuffer; pSample->SetActualDataLength(buffer->Length()); pSample->GetPointer(&pSampleBuffer); memcpy(pSampleBuffer,buffer->Data(),buffer->Length()); // delete the buffer delete buffer; demux.EraseVideoBuff(); //m_sampleCount++ ; } else { // Buffer was not displayed because it was out of date, search for next. delete buffer; demux.EraseVideoBuff(); m_bDiscontinuity = TRUE; //Next good sample will be discontinuous buffer = NULL; m_FillBuffSleepTime = 1; } } earlyStall = false; } while (buffer == NULL); m_bInFillBuffer = false; return NOERROR; } catch(...) { LogDebug("vidPin:fillbuffer exception"); } m_FillBuffSleepTime = 5; CreateEmptySample(pSample); m_bDiscontinuity = TRUE; //Next good sample will be discontinuous m_bInFillBuffer = false; return NOERROR; }