HRESULT CChannelMixer::EndOfStream() { if (!m_bPassThrough) FlushStream(); return CBaseAudioSink::EndOfStream(); }
void AudioOutputPulseAudio::CloseDevice() { if (mainloop) pa_threaded_mainloop_lock(mainloop); if (pstream) { FlushStream("CloseDevice"); pa_stream_disconnect(pstream); pa_stream_unref(pstream); pstream = NULL; } if (pcontext) { pa_context_drain(pcontext, NULL, NULL); pa_context_disconnect(pcontext); pa_context_unref(pcontext); pcontext = NULL; } if (mainloop) { pa_threaded_mainloop_unlock(mainloop); pa_threaded_mainloop_stop(mainloop); mainloop = NULL; } }
bool CEncoderFFmpeg::Close() { if (m_Format) { /* if there is anything still in the buffer */ if (m_BufferSize > 0) { /* zero the unused space so we dont encode random junk */ memset(&m_Buffer[m_BufferSize], 0, m_NeededBytes - m_BufferSize); /* write any remaining data */ WriteFrame(); } /* write the eof flag */ delete[] m_Buffer; m_Buffer = NULL; WriteFrame(); /* write the trailer */ m_dllAvFormat.av_write_trailer(m_Format); FlushStream(); FileClose(); /* cleanup */ m_dllAvCodec.avcodec_close(m_CodecCtx); m_dllAvUtil.av_freep(&m_Stream ); m_dllAvUtil.av_freep(&m_Format->pb); m_dllAvUtil.av_freep(&m_Format ); } m_BufferSize = 0; m_dllAvFormat.Unload(); m_dllAvUtil .Unload(); m_dllAvCodec .Unload(); return true; }
int64_t CEncoder::FileSeek(int64_t iFilePosition, int iWhence) { if (!m_file) return -1; FlushStream(); return m_file->Seek(iFilePosition, iWhence); }
bool CEncoderFlac::Close() { FLAC__bool ok = 0; if (m_encoder) { // finish encoding ok = m_dll.FLAC__stream_encoder_finish(m_encoder); if (!ok) CLog::Log(LOGERROR, "FLAC encoder finish error"); // now that encoding is finished, the metadata can be freed if (m_metadata[0]) m_dll.FLAC__metadata_object_delete(m_metadata[0]); if (m_metadata[1]) m_dll.FLAC__metadata_object_delete(m_metadata[1]); // delete encoder m_dll.FLAC__stream_encoder_delete(m_encoder); } FlushStream(); FileClose(); // unload the flac dll m_dll.Unload(); return ok ? true : false; }
void NetChannelBase::HandleOutput() { if( m_state == CS_DISCONNECTED ) return; if( !OnWritePackets2Stream() ) return; FlushStream(); }
bool CEncoderVorbis::Close() { int eos = 0; // tell vorbis we are encoding the end of the stream m_VorbisDll.vorbis_analysis_wrote(&m_sVorbisDspState, 0); while (m_VorbisDll.vorbis_analysis_blockout(&m_sVorbisDspState, &m_sVorbisBlock) == 1) { /* analysis, assume we want to use bitrate management */ m_VorbisDll.vorbis_analysis(&m_sVorbisBlock, NULL); m_VorbisDll.vorbis_bitrate_addblock(&m_sVorbisBlock); while (m_VorbisDll.vorbis_bitrate_flushpacket(&m_sVorbisDspState, &m_sOggPacket)) { /* weld the packet into the bitstream */ m_OggDll.ogg_stream_packetin(&m_sOggStreamState, &m_sOggPacket); /* write out pages (if any) */ while (!eos) { int result = m_OggDll.ogg_stream_pageout(&m_sOggStreamState, &m_sOggPage); if (result == 0)break; WriteStream(m_sOggPage.header, m_sOggPage.header_len); WriteStream(m_sOggPage.body, m_sOggPage.body_len); /* this could be set above, but for illustrative purposes, I do it here (to show that vorbis does know where the stream ends) */ if (m_OggDll.ogg_page_eos(&m_sOggPage)) eos = 1; } } } /* clean up and exit. vorbis_info_clear() must be called last */ m_OggDll.ogg_stream_clear(&m_sOggStreamState); m_VorbisDll.vorbis_block_clear(&m_sVorbisBlock); m_VorbisDll.vorbis_dsp_clear(&m_sVorbisDspState); m_VorbisDll.vorbis_comment_clear(&m_sVorbisComment); m_VorbisDll.vorbis_info_clear(&m_sVorbisInfo); /* ogg_page and ogg_packet structs always point to storage in libvorbis. They're never freed or manipulated directly */ FlushStream(); FileClose(); delete []m_pBuffer; m_pBuffer = NULL; m_VorbisEncDll.Unload(); m_OggDll.Unload(); m_VorbisDll.Unload(); return true; }
bool CEncoder::CloseEncode() { int iBytes = m_impl->Flush(m_buffer); if (iBytes < 0) { CLog::Log(LOGERROR, "Internal encoder error: %i", iBytes); return false; } WriteStream(m_buffer, iBytes); FlushStream(); FileClose(); return m_impl->Close(); }
// Processing HRESULT CChannelMixer::PutSample(IMediaSample *pSample) { if (!pSample) return S_OK; AM_MEDIA_TYPE *pmt = NULL; bool bFormatChanged = false; HRESULT hr = S_OK; if (SUCCEEDED(pSample->GetMediaType(&pmt)) && pmt) bFormatChanged = !FormatsEqual((WAVEFORMATEXTENSIBLE*)pmt->pbFormat, m_pInputFormat); if (pSample->IsDiscontinuity() == S_OK) m_bDiscontinuity = true; CAutoLock lock (&m_csOutputSample); if (m_bFlushing) return S_OK; if (bFormatChanged) { // Process any remaining input if (!m_bPassThrough) hr = ProcessData(NULL, 0, NULL); // Apply format change locally, // next filter will evaluate the format change when it receives the sample Log("CChannelMixer::PutSample: Processing format change"); ChannelOrder chOrder; hr = NegotiateFormat((WAVEFORMATEXTENSIBLE*)pmt->pbFormat, 1, &chOrder); if (FAILED(hr)) { DeleteMediaType(pmt); Log("CChannelMixer: PutSample failed to change format: 0x%08x", hr); return hr; } m_chOrder = chOrder; } if (pmt) DeleteMediaType(pmt); if (m_bPassThrough) { if (m_pNextSink) return m_pNextSink->PutSample(pSample); return S_OK; // perhaps we should return S_FALSE to indicate sample was dropped } long nOffset = 0; long cbSampleData = pSample->GetActualDataLength(); BYTE *pData = NULL; REFERENCE_TIME rtStop = 0; REFERENCE_TIME rtStart = 0; pSample->GetTime(&rtStart, &rtStop); // Detect discontinuity in stream timeline if ((abs(m_rtNextIncomingSampleTime - rtStart) > MAX_SAMPLE_TIME_ERROR) && m_nSampleNum != 0) { Log("CChannelMixer - stream discontinuity: %6.3f", (rtStart - m_rtNextIncomingSampleTime) / 10000000.0); m_rtInSampleTime = rtStart; if (m_nSampleNum > 0) { Log("CChannelMixer - using buffered sample data"); FlushStream(); } else Log("CChannelMixer - discarding buffered sample data"); } if (m_nSampleNum == 0) m_rtInSampleTime = rtStart; UINT nFrames = cbSampleData / m_pInputFormat->Format.nBlockAlign; REFERENCE_TIME duration = nFrames * UNITS / m_pInputFormat->Format.nSamplesPerSec; m_rtNextIncomingSampleTime = rtStart + duration; m_nSampleNum++; hr = pSample->GetPointer(&pData); ASSERT(pData); if (FAILED(hr)) { Log("CChannelMixer::PutSample - failed to get sample's data pointer: 0x%08x", hr); return hr; } while (nOffset < cbSampleData && SUCCEEDED(hr)) { long cbProcessed = 0; hr = ProcessData(pData + nOffset, cbSampleData - nOffset, &cbProcessed); nOffset += cbProcessed; } return hr; }
HRESULT CSampleRateConverter::EndOfStream() { if (!m_bPassThrough) FlushStream(); return CBaseAudioSink::EndOfStream(); }
void FHttpNetworkReplayStreamer::Tick( float DeltaTime ) { if ( IsHttpBusy() ) { return; } if ( SessionName.IsEmpty() ) { check( StreamerState == EStreamerState::Idle ); return; } if ( bStopStreamingCalled ) { // If http isn't busy and we need to flush the last chunk, then go to that state now if ( StreamerState == EStreamerState::StreamingUp ) { StreamerState = EStreamerState::StreamingUpFinal; bStopStreamingCalled = false; } else if ( StreamerState == EStreamerState::StreamingDown ) { StreamerState = EStreamerState::StreamingDownFinal; bStopStreamingCalled = false; } } if ( StreamerState == EStreamerState::NeedToUploadHeader ) { // If we're waiting on the header, don't do anything until the header has data and is uploaded UploadHeader(); } else if ( StreamerState == EStreamerState::StreamingUp ) { const double FLUSH_TIME_IN_SECONDS = 10; if ( FPlatformTime::Seconds() - LastChunkTime > FLUSH_TIME_IN_SECONDS ) { FlushStream(); } } else if ( StreamerState == EStreamerState::StreamingUpFinal ) { FlushStream(); } else if ( StreamerState == EStreamerState::NeedToDownloadHeader ) { // If we're waiting on the header to download, don't do anything until the header has been downloaded DownloadHeader(); } else if ( StreamerState == EStreamerState::StreamingDown ) { DownloadNextChunk(); RefreshViewer(); } else if ( StreamerState == EStreamerState::StreamingDownFinal ) { RefreshViewer(); } }
void CWallmarksEngine::Render() { // if (marks.empty()) return; // Projection and xform float _43 = Device.mProject._43; Device.mProject._43 -= ps_r__WallmarkSHIFT; RCache.set_xform_world (Fidentity); RCache.set_xform_project (Device.mProject); Fmatrix mSavedView = Device.mView; Fvector mViewPos ; mViewPos.mad (Device.vCameraPosition, Device.vCameraDirection,ps_r__WallmarkSHIFT_V); Device.mView.build_camera_dir (mViewPos,Device.vCameraDirection,Device.vCameraTop); RCache.set_xform_view (Device.mView); Device.Statistic->RenderDUMP_WM.Begin (); Device.Statistic->RenderDUMP_WMS_Count = 0; Device.Statistic->RenderDUMP_WMD_Count = 0; Device.Statistic->RenderDUMP_WMT_Count = 0; float ssaCLIP = r_ssaDISCARD/4; lock.Enter (); // Physics may add wallmarks in parallel with rendering for (WMSlotVecIt slot_it=marks.begin(); slot_it!=marks.end(); slot_it++){ u32 w_offset; FVF::LIT *w_verts, *w_start; BeginStream (hGeom,w_offset,w_verts,w_start); wm_slot* slot = *slot_it; // static wallmarks for (StaticWMVecIt w_it=slot->static_items.begin(); w_it!=slot->static_items.end(); ){ static_wallmark* W = *w_it; if (RImplementation.ViewBase.testSphere_dirty(W->bounds.P,W->bounds.R)){ Device.Statistic->RenderDUMP_WMS_Count++; float dst = Device.vCameraPosition.distance_to_sqr(W->bounds.P); float ssa = W->bounds.R * W->bounds.R / dst; if (ssa>=ssaCLIP) { u32 w_count = u32(w_verts-w_start); if ((w_count+W->verts.size())>=(MAX_TRIS*3)){ FlushStream (hGeom,slot->shader,w_offset,w_verts,w_start,FALSE); BeginStream (hGeom,w_offset,w_verts,w_start); } static_wm_render (W,w_verts); } W->ttl -= 0.1f*Device.fTimeDelta; // visible wallmarks fade much slower } else { W->ttl -= Device.fTimeDelta; } if (W->ttl<=EPS){ static_wm_destroy (W); *w_it = slot->static_items.back(); slot->static_items.pop_back(); }else{ w_it++; } } // Flush stream FlushStream (hGeom,slot->shader,w_offset,w_verts,w_start,FALSE); //. remove line if !(suppress cull needed) BeginStream (hGeom,w_offset,w_verts,w_start); // dynamic wallmarks for (xr_vector<intrusive_ptr<CSkeletonWallmark> >::iterator w_it=slot->skeleton_items.begin(); w_it!=slot->skeleton_items.end(); w_it++){ intrusive_ptr<CSkeletonWallmark> W = *w_it; if (!W){ continue ; } #ifdef DEBUG if(W->used_in_render != Device.dwFrame) { Log("W->used_in_render",W->used_in_render); Log("Device.dwFrame",Device.dwFrame); VERIFY(W->used_in_render == Device.dwFrame); } #endif float dst = Device.vCameraPosition.distance_to_sqr(W->m_Bounds.P); float ssa = W->m_Bounds.R * W->m_Bounds.R / dst; if (ssa>=ssaCLIP){ Device.Statistic->RenderDUMP_WMD_Count++; u32 w_count = u32(w_verts-w_start); if ((w_count+W->VCount())>=(MAX_TRIS*3)){ FlushStream (hGeom,slot->shader,w_offset,w_verts,w_start,TRUE); BeginStream (hGeom,w_offset,w_verts,w_start); } FVF::LIT *w_save = w_verts; try { W->Parent()->RenderWallmark (W,w_verts); } catch (...) { Msg ("! Failed to render dynamic wallmark"); w_verts = w_save; } } #ifdef DEBUG W->used_in_render = u32(-1); #endif } slot->skeleton_items.clear(); // Flush stream FlushStream (hGeom,slot->shader,w_offset,w_verts,w_start,TRUE); } lock.Leave(); // Physics may add wallmarks in parallel with rendering // Level-wmarks RImplementation.r_dsgraph_render_wmarks (); Device.Statistic->RenderDUMP_WM.End (); // Projection Device.mView = mSavedView; Device.mProject._43 = _43; RCache.set_xform_view (Device.mView); RCache.set_xform_project (Device.mProject); }