Esempio n. 1
0
MediaSourceImpl::State
MediaSourceImpl::EnsureBufferingInternal()
{
	MediaByteRange request;
	CalcRequest(request);

	if (!request.IsEmpty())
	{
		m_clamp_request = FALSE;

		if (NeedRestart(request))
		{
			StopBuffering();
			return StartBuffering(request);
		}

		// The request wasn't restarted, so it may need to be clamped
		// by aborting it once enough data has become available.
		if (request.IsFinite() && !IsStreaming())
		{
			OpFileLength loading_end = FILE_LENGTH_NONE;
			m_use_url->GetAttribute(URL::KHTTPRangeEnd, &loading_end);
			if (loading_end == FILE_LENGTH_NONE || request.end < loading_end)
				m_clamp_request = TRUE;
		}
	}
	else
	{
		// We have all the data we wanted, so stop buffering if possible.
		switch (m_state)
		{
		case NONE:
			// Already loaded (data: URL or in cache).
			return IDLE;

		case IDLE:
		case FAILED:
			// not loading
			break;

		case STARTED:
		case HEADERS:
		case LOADING:
		case PAUSED:
			// Only stop a load if it's in fact already complete or if
			// it's one that we can later resume. However, when using
			// the streaming cache, continue loading until either the
			// cache fills up and PauseBuffering() is called or (if
			// the request fits in cache) IsLoadedURL() is true.
			if (IsLoadedURL(m_use_url) || (IsResumableURL(m_use_url) && !IsStreaming()))
			{
				StopBuffering();
				return IDLE;
			}
			break;
		}
	}

	return NONE;
}
Esempio n. 2
0
SIZE_T USoundWave::GetResourceSize(EResourceSizeMode::Type Mode)
{
	int32 CalculatedResourceSize = 0;

	if (DecompressionType == DTYPE_Native)
	{
		// If we've been decompressed, need to account for decompressed and also compressed
		CalculatedResourceSize += RawPCMDataSize;
	}
	else if (DecompressionType == DTYPE_RealTime)
	{
		if (CachedRealtimeFirstBuffer)
		{
			CalculatedResourceSize += MONO_PCM_BUFFER_SIZE * NumChannels;
		}
	}

	if (GEngine && GEngine->GetMainAudioDevice())
	{
		// Don't add compressed data to size of streaming sounds
		if (!FPlatformProperties::SupportsAudioStreaming() || !IsStreaming())
		{
			CalculatedResourceSize += GetCompressedDataSize(GEngine->GetMainAudioDevice()->GetRuntimeFormat(this));
		}
	}

	return CalculatedResourceSize;
}
Esempio n. 3
0
void USoundWave::PostLoad()
{
	Super::PostLoad();

	if (GetOutermost()->HasAnyPackageFlags(PKG_ReloadingForCooker))
	{
		return;
	}

	// Compress to whatever formats the active target platforms want
	// static here as an optimization
	ITargetPlatformManagerModule* TPM = GetTargetPlatformManager();
	if (TPM)
	{
		const TArray<ITargetPlatform*>& Platforms = TPM->GetActiveTargetPlatforms();

		for (int32 Index = 0; Index < Platforms.Num(); Index++)
		{
			GetCompressedData(Platforms[Index]->GetWaveFormat(this));
		}
	}

	// We don't precache default objects and we don't precache in the Editor as the latter will
	// most likely cause us to run out of memory.
	if (!GIsEditor && !IsTemplate( RF_ClassDefaultObject ) && GEngine)
	{
		FAudioDevice* AudioDevice = GEngine->GetMainAudioDevice();
		if (AudioDevice && AudioDevice->AreStartupSoundsPreCached())
		{
			// Upload the data to the hardware, but only if we've precached startup sounds already
			AudioDevice->Precache(this);
		}
		// remove bulk data if no AudioDevice is used and no sounds were initialized
		else if(IsRunningGame())
		{
			RawData.RemoveBulkData();
		}
	}

	// Only add this streaming sound if we're not a dedicated server or if there is an audio device manager
	if (IsStreaming() && !IsRunningDedicatedServer() && GEngine && GEngine->GetAudioDeviceManager())
	{
#if WITH_EDITORONLY_DATA
		FinishCachePlatformData();
#endif // #if WITH_EDITORONLY_DATA
		IStreamingManager::Get().GetAudioStreamingManager().AddStreamingSoundWave(this);
	}

#if WITH_EDITORONLY_DATA
	if (!SourceFilePath_DEPRECATED.IsEmpty() && AssetImportData)
	{
		FAssetImportInfo Info;
		Info.Insert(FAssetImportInfo::FSourceFile(SourceFilePath_DEPRECATED));
		AssetImportData->SourceData = MoveTemp(Info);
	}
#endif // #if WITH_EDITORONLY_DATA

	INC_FLOAT_STAT_BY( STAT_AudioBufferTime, Duration );
	INC_FLOAT_STAT_BY( STAT_AudioBufferTimeChannels, NumChannels * Duration );
}
// -------------------------------------------------------------------------- //
// Overridden functions                                                 
// -------------------------------------------------------------------------- //
BOOL VFmodSoundResource::Reload()
{
  VFmodManager &manager = VFmodManager::GlobalManager();
  if (!manager.IsInitialized())
    return FALSE;

  VASSERT(!m_pSound);

  int iFMODFlags = FMOD_SOFTWARE; // software must be used when geometry is used
  iFMODFlags |= Is3D() ? FMOD_3D:FMOD_2D;
  iFMODFlags |= IsStreaming() ? FMOD_CREATESTREAM : FMOD_CREATESAMPLE;
  if (!LogAttenuation())
    iFMODFlags |= FMOD_3D_LINEARROLLOFF;
  if(IsCompressedData())
    iFMODFlags |= FMOD_CREATECOMPRESSEDSAMPLE;
  iFMODFlags |= LoadsAsyncronous() ? FMOD_NONBLOCKING : FMOD_DEFAULT;

  FMOD_WARNINGCHECK(manager.m_pSystem->createSound(GetFilename(), iFMODFlags, NULL, &m_pSound));
  if (!m_pSound)
    return FALSE;

  // get the size in memory
  unsigned int iSize = 0;
  FMOD_WARNINGCHECK(m_pSound->getMemoryInfo(FMOD_MEMBITS_ALL, 0, &iSize, NULL));

  SetNewMemSize(VRESOURCEMEMORY_SYSTEM,iSize);  
  m_iSysMem = (int)iSize;

  return TRUE;
}
Esempio n. 5
0
void USoundWave::UpdatePlatformData()
{
	if (IsStreaming())
	{
		// Make sure there are no pending requests in flight.
		while (IStreamingManager::Get().GetAudioStreamingManager().IsStreamingInProgress(this))
		{
			// Give up timeslice.
			FPlatformProcess::Sleep(0);
		}

#if WITH_EDITORONLY_DATA
		// Temporarily remove from streaming manager to release currently used data chunks
		IStreamingManager::Get().GetAudioStreamingManager().RemoveStreamingSoundWave(this);
		// Recache platform data if the source has changed.
		CachePlatformData();
		// Add back to the streaming manager to reload first chunk
		IStreamingManager::Get().GetAudioStreamingManager().AddStreamingSoundWave(this);
#endif
	}
	else
	{
		IStreamingManager::Get().GetAudioStreamingManager().RemoveStreamingSoundWave(this);
	}
}
Esempio n. 6
0
bool CMOOSSerialPort::GetTelegramOrAccumulate(std::string &sTelegram,double dfTimeOut,double *pTime)
{
    if(IsStreaming())
    {
        MOOSTrace("don't call GetTelegram on a streaming device!\n");
        return false;
    }
    
    static char telegramBuffer[TELEGRAM_LEN];
    static int nTelegramBufferRead = 0;              //total number of chars read
    
    double dfTimeWaited = 0.0;              //haven't waited any time yet
    double dfInterval = 0.01;             //10ms
    
    
    while ((dfTimeWaited<dfTimeOut) && nTelegramBufferRead<TELEGRAM_LEN)
    {
        int nGrabbed = 0;
        
        //try the read
        nGrabbed = GrabN(telegramBuffer+nTelegramBufferRead,1);
        
        if (nGrabbed == 0)
        {
            //OK wait a while...maybe it is on its way!
            dfTimeWaited+=dfInterval;
            
            MOOSPause((int)(dfInterval*1000.0));
        }
        else
        {
            if(nTelegramBufferRead==0 && pTime!=NULL)
            {
                //grab the time..                        
                *pTime = MOOSTime();
            }
            
            
            nTelegramBufferRead+=nGrabbed;
            
            //have we reached the end of the message?
            if(IsCompleteReply(telegramBuffer,TELEGRAM_LEN,nTelegramBufferRead))
            {
                telegramBuffer[nTelegramBufferRead]='\0';
                nTelegramBufferRead = 0;
                sTelegram = telegramBuffer;
                MOOSRemoveChars(sTelegram,"\r\n");
                
                if(IsVerbose())
                {
                    MOOSTrace("Telegram = %s\n",sTelegram.c_str());
                }
                //MOOSTrace("Required %d retries and %d accumulates\n",nRetries,nAccumulates);
                return true;
            }            
        }
    }
    
    return false;
}
bool USoundWave::IsCachedCookedPlatformDataLoaded( const ITargetPlatform* TargetPlatform ) 
{
	if (TargetPlatform->SupportsFeature(ETargetPlatformFeatures::AudioStreaming) && IsStreaming())
	{
		// Retrieve format to cache for targetplatform.
		FName PlatformFormat = TargetPlatform->GetWaveFormat(this);

		// find format data by comparing derived data keys.
		FString DerivedDataKey;
		GetStreamedAudioDerivedDataKeySuffix(*this, PlatformFormat, DerivedDataKey);

		FStreamedAudioPlatformData *PlatformData = CookedPlatformData.FindRef(DerivedDataKey);
		if (PlatformData == NULL)
		{
			// we havne't called begincache
			return false;
		}

		if (PlatformData->AsyncTask && PlatformData->AsyncTask->IsWorkDone())
		{
			PlatformData->FinishCache();
		}

		return PlatformData->IsFinishedCache();
	}
	return true; 
}
Esempio n. 8
0
BOOL
MediaSourceImpl::NeedRestart(const MediaByteRange& request)
{
	OP_ASSERT(!request.IsEmpty());
	// Note: this function assumes that request is not in cache

	// If not loading we certainly need to start.
	if (m_state == NONE || m_state == IDLE)
		return TRUE;

	// Only restart resumable resources.
	if (!IsResumableURL(m_use_url))
		return FALSE;

	// Get the currently loading range.
	MediaByteRange loading;
	m_use_url->GetAttribute(URL::KHTTPRangeStart, &loading.start);
	m_use_url->GetAttribute(URL::KHTTPRangeEnd, &loading.end);
	OP_ASSERT(!loading.IsEmpty());

	// When streaming, adjust the loading range to not include what
	// has already been evicted from the cache. Note: This must not be
	// done for a request that was just started, as the cache can then
	// contain data from the previous request which is not relevant.
	if (m_state >= LOADING && IsStreaming())
	{
		BOOL available = FALSE;
		OpFileLength length = 0;
		GetPartialCoverage(loading.start, available, length);
		if (!available && (!loading.IsFinite() || length < loading.Length()))
			loading.start += length;
	}

	// Restart if request is before currently loading range.
	if (request.start < loading.start)
		return TRUE;

	// Restart if request is after currently loading range.
	if (loading.IsFinite() && request.start > loading.end)
		return TRUE;

	// request is now a subset of loading, check how much we have left
	// to load until request.start.
	BOOL available = FALSE;
	OpFileLength length = 0;
	GetPartialCoverage(loading.start, available, length);
	if (!available)
		length = 0;
	if (request.start > loading.start + length)
	{
		// FIXME: calculate download rate and time taken to reach offset (CORE-27952)
		OpFileLength remaining = request.start - (loading.start + length);
		if (remaining > MEDIA_SOURCE_MAX_WAIT)
			return TRUE;
	}

	return FALSE;
}
Esempio n. 9
0
//
// OnReceiveFirstSample
//
// Display an image if not streaming
//
void CTextOutFilter::OnReceiveFirstSample(IMediaSample *pMediaSample)
{
    if(IsStreaming() == FALSE)
    {
        ASSERT(pMediaSample);
        DrawText(pMediaSample);
    }

} // OnReceiveFirstSample
// -------------------------------------------------------------------------- //
// Constructor/ Destructor                                                 
// -------------------------------------------------------------------------- //
VFmodSoundResource::VFmodSoundResource(VFmodSoundResourceManager *pManager, int iUsageFlags) : VManagedResource(pManager)
{
  m_iSysMem = 0;
  m_iSoundFlags = iUsageFlags;
  m_pSound = NULL;

  if (IsStreaming()) // since unique
    SetResourceFlag(VRESOURCEFLAG_AUTODELETE);
}
Esempio n. 11
0
OpFileLength
MediaSourceImpl::GetTotalBytes()
{
	// For non-resumable, non-streaming resources we know the duration
	// for sure once it fully loaded. Otherwise we have to trust the
	// Content-Length header, or return 0 if there is none.
	if (!IsResumableURL(m_use_url) && !IsStreaming() && IsLoadedURL(m_use_url))
		return m_use_url->ContentLoaded();
	return m_use_url->GetContentSize();
}
Esempio n. 12
0
void
MediaSourceImpl::ResumeBuffering()
{
	OP_NEW_DBG("ResumeBuffering", "MediaSource");
	OP_DBG((""));

	OP_ASSERT(m_state == PAUSED && IsStreaming());

	m_use_url->SetAttribute(URL::KPauseDownload, FALSE);
	m_state = LOADING;
}
Esempio n. 13
0
void
MediaSourceImpl::PauseBuffering()
{
	OP_NEW_DBG("PauseBuffering", "MediaSource");
	OP_DBG((""));

	OP_ASSERT(m_state == LOADING && IsStreaming());

	m_use_url->SetAttribute(URL::KPauseDownload, TRUE);
	m_state = PAUSED;
	for (OpListenersIterator iter(m_listeners); m_listeners.HasNext(iter);)
		m_listeners.GetNext(iter)->OnIdle(this);
}
Esempio n. 14
0
OP_STATUS
MediaSourceImpl::Init()
{
	m_use_url.SetURL(m_key_url);
	// Purge the resource from cache if it is expired or we are using
	// the streaming cache. Cache invalidation during the lifetime of
	// the MediaSourceImpl instance is not supported, see CORE-27748.
	if (m_use_url->Expired(TRUE) || IsStreaming())
		m_use_url->Unload();
	m_use_url->SetAttribute(URL::KMultimedia, TRUE);
	m_use_url->SetAttribute(URL::KSendAcceptEncoding, FALSE);
	return SetCallBacks();
}
Esempio n. 15
0
void USoundWave::PostLoad()
{
	Super::PostLoad();

	if (GetOutermost()->PackageFlags & PKG_ReloadingForCooker)
	{
		return;
	}

	// Compress to whatever formats the active target platforms want
	// static here as an optimization
	ITargetPlatformManagerModule* TPM = GetTargetPlatformManager();
	if (TPM)
	{
		const TArray<ITargetPlatform*>& Platforms = TPM->GetActiveTargetPlatforms();

		for (int32 Index = 0; Index < Platforms.Num(); Index++)
		{
			GetCompressedData(Platforms[Index]->GetWaveFormat(this));
		}
	}

	// We don't precache default objects and we don't precache in the Editor as the latter will
	// most likely cause us to run out of memory.
	if( !GIsEditor && !IsTemplate( RF_ClassDefaultObject ) && GEngine )
	{
		FAudioDevice* AudioDevice = GEngine->GetAudioDevice();
		if( AudioDevice && AudioDevice->bStartupSoundsPreCached)
		{
			// Upload the data to the hardware, but only if we've precached startup sounds already
			AudioDevice->Precache( this );
		}
		// remove bulk data if no AudioDevice is used and no sounds were initialized
		else if( IsRunningGame() )
		{
			RawData.RemoveBulkData();
		}
	}

	if (IsStreaming())
	{
#if WITH_EDITORONLY_DATA
		FinishCachePlatformData();
#endif // #if WITH_EDITORONLY_DATA
		IStreamingManager::Get().GetAudioStreamingManager().AddStreamingSoundWave(this);
	}

	INC_FLOAT_STAT_BY( STAT_AudioBufferTime, Duration );
	INC_FLOAT_STAT_BY( STAT_AudioBufferTimeChannels, NumChannels * Duration );
}
Esempio n. 16
0
bool USoundWave::InitAudioResource(FName Format)
{
	if( !ResourceSize && (!FPlatformProperties::SupportsAudioStreaming() || !IsStreaming()) )
	{
		FByteBulkData* Bulk = GetCompressedData(Format);
		if (Bulk)
		{
			ResourceSize = Bulk->GetBulkDataSize();
			check(ResourceSize > 0);
			check(!ResourceData);
			Bulk->GetCopy((void**)&ResourceData, true);
		}
	}

	return ResourceSize > 0;
}
Esempio n. 17
0
bool CStreamingEncoder::StartRecording(void)
{
	CMediaBuffer* pApp = (CMediaBuffer*)AfxGetApp();
	bool bReturn = true;

	DebugTell(_T("CStreamingEncoder[%d,%d]::StartRecording: starting encoder at %s:%d."),m_pParentRoomObj->m_pRoom->ID,m_pStreamingEncoder->ID,
		(LPCTSTR)m_pStreamingEncoder->StreamingAddress,
		m_pStreamingEncoder->StreamingPort);
	m_iMediaBufferServerRecordingIndex = 
		pApp->m_pMediaBufferServer->StartRecording((LPCTSTR)m_pStreamingEncoder->StreamingAddress,m_pStreamingEncoder->StreamingPort);

	if (m_iMediaBufferServerRecordingIndex >= 0)
	{
		DebugTell(_T("CStreamingEncoder[%d,%d]::StartRecording: encoder started. Ingest Index: %d"),m_pParentRoomObj->m_pRoom->ID,m_pStreamingEncoder->ID,
			m_iMediaBufferServerRecordingIndex);
		DebugTell(_T("CStreamingEncoder[%d,%d]::StartRecording: checking for stream at %s:%d."),m_pParentRoomObj->m_pRoom->ID,m_pStreamingEncoder->ID,
			(LPCTSTR)m_pStreamingEncoder->StreamingAddress,
			m_pStreamingEncoder->StreamingPort);
		pApp->m_pMediaBufferServer->GetFilename(m_iMediaBufferServerRecordingIndex,m_sFilename);
		pApp->m_pMediaBufferServer->GetFileTitle(m_iMediaBufferServerRecordingIndex,m_sFileTitle);
		if (!IsStreaming())
		{
			DebugTell(_T("CStreamingEncoder[%d,%d]::StartRecording: encoder at %s:%d is not streaming."),m_pParentRoomObj->m_pRoom->ID,m_pStreamingEncoder->ID,
				(LPCTSTR)m_pStreamingEncoder->StreamingAddress,
				m_pStreamingEncoder->StreamingPort);
//			StopRecording(false);
//			DeleteFile(m_sFilename);
			bReturn = false;
		}
/*
		else
		{
			pApp->m_pMediaBufferServer->GetFilename(m_iMediaBufferServerRecordingIndex,m_sFilename);
			pApp->m_pMediaBufferServer->GetFileTitle(m_iMediaBufferServerRecordingIndex,m_sFileTitle);
		}
*/
	}
	else
	{
		DebugTell(_T("CStreamingEncoder[%d,%d]::StartRecording: failed to start encoder at %s:%d."),m_pParentRoomObj->m_pRoom->ID,m_pStreamingEncoder->ID,(LPCTSTR)m_pStreamingEncoder->StreamingAddress,
			m_pStreamingEncoder->StreamingPort);
		bReturn = false;
	}
	return bReturn;
}
int VFmodSoundResource::GetAdditionalOutputString(char *szDestBuffer, int iMaxChars)
{
  szDestBuffer[0] = 0;
  char *szStart = szDestBuffer;

  if (Is2D()) 
    szDestBuffer += sprintf(szDestBuffer,"2D;"); 
  else 
    szDestBuffer += sprintf(szDestBuffer,"3D;");
  if (LinearAttenuation()) 
    szDestBuffer += sprintf(szDestBuffer,"linear;"); 
  else 
    szDestBuffer += sprintf(szDestBuffer,"log;");
  if (IsStreaming()) 
    szDestBuffer += sprintf(szDestBuffer,"streaming;");

  return int(szDestBuffer-szStart);
}
void VFmodSoundResource::GetDependencies(VResourceSnapshot &snapshot)
{
  if (!IsStreaming())
  {
    VManagedResource::GetDependencies(snapshot);
    IVFileInStream *pFile = Vision::File.Open(GetFilename());
    if (pFile)
    {
      // patch the file size afterwards
      VResourceSnapshotEntry *pEntry = snapshot.FindResourceEntry(this);
      VASSERT(pEntry!=NULL)
        if (pEntry)
        {
          pEntry->m_iFileSize = pFile->GetSize();
          pEntry->SetCustomIntValue(0,m_iSoundFlags);
        }
        pFile->Close();
    }
  }
Esempio n. 20
0
void USoundWave::ClearCachedCookedPlatformData( const ITargetPlatform* TargetPlatform )
{
	Super::ClearCachedCookedPlatformData(TargetPlatform);

	if (TargetPlatform->SupportsFeature(ETargetPlatformFeatures::AudioStreaming) && IsStreaming())
	{
		// Retrieve format to cache for targetplatform.
		FName PlatformFormat = TargetPlatform->GetWaveFormat(this);

		// find format data by comparing derived data keys.
		FString DerivedDataKey;
		GetStreamedAudioDerivedDataKeySuffix(*this, PlatformFormat, DerivedDataKey);

		
		if ( CookedPlatformData.Contains(DerivedDataKey) )
		{
			FStreamedAudioPlatformData *PlatformData = CookedPlatformData.FindAndRemoveChecked( DerivedDataKey );
			delete PlatformData;
		}	
	}
}
void StreamService::operator()()
{
	Thread loadThread("Loader");
	loadThread.startThread(std::ref(loader));

	currentBuffer = &streamBufferA;

	UInt32 bufferOffset = 0;
	loader.ReadyForData();
	InitialDataMove(bufferOffset);
	StartProcessing();

	while (IsStreaming())
	{
		coordCV.wait(lock);

		StreamBuffer* currBuff = GetActiveBuffer();
		
		musicRef->PushData(currBuff, ByteSubmissionAmount, bufferOffset);

		bufferOffset += ByteSubmissionAmount;

		if (bufferOffset >= currBuff->numBytesInBuffer)
		{
			if (LastBuffer)
			{
				musicRef->StopProcessing();
				SetStreaming(FALSE);
			}
			currBuff->hasData = FALSE;
			SwapActiveBuffer();
			bufferOffset = 0;
			loader.ReadyForData();
		}
	}

	loadThread.join();

	streamer->StreamEnded(this);
}
Esempio n. 22
0
SIZE_T USoundWave::GetResourceSize(EResourceSizeMode::Type Mode)
{
	if (!GEngine)
	{
		return 0;
	}

	SIZE_T CalculatedResourceSize = 0;

	if (FAudioDevice* LocalAudioDevice = GEngine->GetMainAudioDevice())
	{
		if (LocalAudioDevice->HasCompressedAudioInfoClass(this) && DecompressionType == DTYPE_Native)
		{
			// In non-editor builds ensure that the "native" sound wave has unloaded its compressed asset at this point.
			// DTYPE_Native assets fully decompress themselves on load and are supposed to unload the compressed asset when it finishes.
			// However, in the editor, it's possible for an asset to be DTYPE_Native and not referenced by currently loaded level and thus not
			// actually loaded (and fully decompressed) before its ResourceSize is queried.
			if (!GIsEditor)
			{
				ensureMsgf(ResourceSize == 0, TEXT("ResourceSize for DTYPE_Native USoundWave '%s' was not 0 (%d)."), *GetName(), ResourceSize);
			}
			CalculatedResourceSize = RawPCMDataSize;
		}
		else 
		{
			if (DecompressionType == DTYPE_RealTime && CachedRealtimeFirstBuffer)
			{
				CalculatedResourceSize = MONO_PCM_BUFFER_SIZE * NumChannels;
			}
			
			if ((!FPlatformProperties::SupportsAudioStreaming() || !IsStreaming()))
			{
				CalculatedResourceSize += GetCompressedDataSize(LocalAudioDevice->GetRuntimeFormat(this));
			}
		}
	}

	return CalculatedResourceSize;
}
Esempio n. 23
0
void USoundWave::BeginCacheForCookedPlatformData(const ITargetPlatform *TargetPlatform)
{
	if (TargetPlatform->SupportsFeature(ETargetPlatformFeatures::AudioStreaming) && IsStreaming())
	{
		// Retrieve format to cache for targetplatform.
		FName PlatformFormat = TargetPlatform->GetWaveFormat(this);

		uint32 CacheFlags = EStreamedAudioCacheFlags::Async | EStreamedAudioCacheFlags::InlineChunks;

		// If source data is resident in memory then allow the streamed audio to be built
		// in a background thread.
		if (GetCompressedData(PlatformFormat)->IsBulkDataLoaded())
		{
			CacheFlags |= EStreamedAudioCacheFlags::AllowAsyncBuild;
		}

		// find format data by comparing derived data keys.
		FString DerivedDataKey;
		GetStreamedAudioDerivedDataKeySuffix(*this, PlatformFormat, DerivedDataKey);

		FStreamedAudioPlatformData *PlatformData = CookedPlatformData.FindRef(DerivedDataKey);

		if (PlatformData == NULL)
		{
			PlatformData = new FStreamedAudioPlatformData();
			PlatformData->Cache(
				*this,
				PlatformFormat,
				CacheFlags
				);
			CookedPlatformData.Add(DerivedDataKey, PlatformData);
		}
	}

	Super::BeginCacheForCookedPlatformData(TargetPlatform);
}
void FHttpNetworkReplayStreamer::StartStreaming( const FString& StreamName, bool bRecord, const FString& VersionString, const FOnStreamReadyDelegate& Delegate )
{
	if ( !SessionName.IsEmpty() )
	{
		UE_LOG( LogHttpReplay, Warning, TEXT( "FHttpNetworkReplayStreamer::StartStreaming. SessionName already set." ) );
		return;
	}

	if ( IsStreaming() )
	{
		UE_LOG( LogHttpReplay, Warning, TEXT( "FHttpNetworkReplayStreamer::StartStreaming. IsStreaming == true." ) );
		return;
	}

	if ( IsHttpBusy() )
	{
		UE_LOG( LogHttpReplay, Warning, TEXT( "FHttpNetworkReplayStreamer::StartStreaming. IsHttpBusy == true." ) );
		return;
	}

	SessionVersion = VersionString;

	// Remember the delegate, which we'll call as soon as the header is available
	StartStreamingDelegate = Delegate;

	// Setup the archives
	StreamArchive.ArIsLoading = !bRecord;
	StreamArchive.ArIsSaving = !StreamArchive.ArIsLoading;

	HeaderArchive.ArIsLoading = !bRecord;
	HeaderArchive.ArIsSaving = !StreamArchive.ArIsLoading;

	LastChunkTime = FPlatformTime::Seconds();

	const bool bOverrideRecordingSession = false;

	// Override session name if requested
	if ( StreamArchive.ArIsLoading || bOverrideRecordingSession )
	{
		SessionName = StreamName;
	}

	// Create the Http request and add to pending request list
	TSharedRef<class IHttpRequest> HttpRequest = FHttpModule::Get().CreateRequest();

	StreamFileCount = 0;

	if ( StreamArchive.ArIsLoading )
	{
		// Notify the http server that we want to start downloading a replay
		HttpRequest->SetURL( FString::Printf( TEXT( "%sstartdownloading?Version=%s&Session=%s" ), *ServerURL, *SessionVersion, *SessionName ) );
		HttpRequest->SetVerb( TEXT( "POST" ) );

		HttpRequest->OnProcessRequestComplete().BindRaw( this, &FHttpNetworkReplayStreamer::HttpStartDownloadingFinished );

		HttpState = EHttptate::StartDownloading;
		
		// Set the next streamer state to download the header
		StreamerState = EStreamerState::NeedToDownloadHeader;
	}
	else
	{
		// Notify the http server that we want to start upload a replay
		if ( !SessionName.IsEmpty() )
		{
			HttpRequest->SetURL( FString::Printf( TEXT( "%sstartuploading?Version=%s&Session=%s" ), *ServerURL, *SessionVersion, *SessionName ) );
		}
		else
		{
			HttpRequest->SetURL( FString::Printf( TEXT( "%sstartuploading?Version=%s" ), *ServerURL, *SessionVersion ) );
		}

		HttpRequest->SetVerb( TEXT( "POST" ) );

		HttpRequest->OnProcessRequestComplete().BindRaw( this, &FHttpNetworkReplayStreamer::HttpStartUploadingFinished );

		SessionName.Empty();

		HttpState = EHttptate::StartUploading;

		// Set the next streamer state to upload the header
		StreamerState = EStreamerState::NeedToUploadHeader;
	}
	
	HttpRequest->ProcessRequest();
}
Esempio n. 25
0
HRESULT CMpegSplitterFile::Init(IAsyncReader* pAsyncReader)
{
    HRESULT hr;

    // get the type first

    m_type = mpeg_us;

    Seek(0);

    if (m_type == mpeg_us) {
        if (BitRead(32, true) == 'TFrc') {
            Seek(0x67c);
        }
        int cnt = 0, limit = 4;
        for (tshdr h; cnt < limit && Read(h); cnt++) {
            Seek(h.next);
        }
        if (cnt >= limit) {
            m_type = mpeg_ts;
        }
    }

    Seek(0);

    if (m_type == mpeg_us) {
        if (BitRead(32, true) == 'TFrc') {
            Seek(0xE80);
        }
        int cnt = 0, limit = 4;
        for (tshdr h; cnt < limit && Read(h); cnt++) {
            Seek(h.next);
        }
        if (cnt >= limit) {
            m_type = mpeg_ts;
        }
    }

    Seek(0);

    if (m_type == mpeg_us) {
        int cnt = 0, limit = 4;
        for (pvahdr h; cnt < limit && Read(h); cnt++) {
            Seek(GetPos() + h.length);
        }
        if (cnt >= limit) {
            m_type = mpeg_pva;
        }
    }

    Seek(0);

    if (m_type == mpeg_us) {
        BYTE b;
        for (int i = 0; (i < 4 || GetPos() < MAX_PROBE_SIZE) && m_type == mpeg_us && NextMpegStartCode(b); i++) {
            if (b == 0xba) {
                pshdr h;
                if (Read(h)) {
                    m_type = mpeg_ps;
                    m_rate = int(h.bitrate / 8);
                    break;
                }
            } else if ((b & 0xe0) == 0xc0 // audio, 110xxxxx, mpeg1/2/3
                       || (b & 0xf0) == 0xe0 // video, 1110xxxx, mpeg1/2
                       // || (b&0xbd) == 0xbd) // private stream 1, 0xbd, ac3/dts/lpcm/subpic
                       || b == 0xbd) { // private stream 1, 0xbd, ac3/dts/lpcm/subpic
                peshdr h;
                if (Read(h, b) && BitRead(24, true) == 0x000001) {
                    m_type = mpeg_es;
                }
            }
        }
    }

    Seek(0);

    if (m_type == mpeg_us) {
        return E_FAIL;
    }

    // min/max pts & bitrate
    m_rtMin = m_posMin = _I64_MAX;
    m_rtMax = m_posMax = 0;
    m_rtPrec = _I64_MIN;
    m_bPTSWrap = false;

    m_init = true;

    if (IsRandomAccess() || IsStreaming()) {
        if (IsStreaming()) {
            for (int i = 0; i < 20 || i < 50 && S_OK != HasMoreData(MEGABYTE, 100); i++) {
                ;
            }
        }

        SearchPrograms(0, min(GetLength(), MEGABYTE * 5)); // max 5Mb for search a valid Program Map Table

        __int64 pfp = 0;
        const int k = 5;
        for (int i = 0; i <= k; i++) {
            __int64 fp = i * GetLength() / k;
            fp = min(GetLength() - MEGABYTE / 8, fp);
            fp = max(pfp, fp);
            __int64 nfp = fp + (pfp == 0 ? 10 * MEGABYTE : MEGABYTE / 8);
            if (FAILED(hr = SearchStreams(fp, nfp, pAsyncReader))) {
                return hr;
            }
            pfp = nfp;
        }
    } else {
        if (FAILED(hr = SearchStreams(0, MEGABYTE / 8, pAsyncReader))) {
            return hr;
        }
    }

    if (m_type == mpeg_ts) {
        if (IsRandomAccess() || IsStreaming()) {
            if (IsStreaming()) {
                for (int i = 0; i < 20 || i < 50 && S_OK != HasMoreData(1024 * 100, 100); i++) {
                    ;
                }
            }

            __int64 pfp = 0;
            const int k = 5;
            for (int i = 0; i <= k; i++) {
                __int64 fp = i * GetLength() / k;
                fp = min(GetLength() - MEGABYTE / 8, fp);
                fp = max(pfp, fp);
                __int64 nfp = fp + (pfp == 0 ? 10 * MEGABYTE : MEGABYTE / 8);
                if (FAILED(hr = SearchStreams(fp, nfp, pAsyncReader, TRUE))) {
                    return hr;
                }
                pfp = nfp;
            }
        } else {
            if (FAILED(hr = SearchStreams(0, MEGABYTE / 8, pAsyncReader, TRUE))) {
                return hr;
            }
        }
    }

    if (m_posMax - m_posMin <= 0 || (m_rtMax - m_rtMin <= 0 && !m_bPTSWrap)) {
        return E_FAIL;
    }

    m_init = false;

    int indicated_rate = m_rate;
    REFERENCE_TIME dur = !m_bPTSWrap ? (m_rtMax - m_rtMin) : (PTS_MAX_BEFORE_WRAP - m_rtMin + m_rtMax);
    int detected_rate = int(10000000i64 * (m_posMax - m_posMin) / dur);

    m_rate = detected_rate ? detected_rate : m_rate;
#if (0)
    // normally "detected" should always be less than "indicated", but sometimes it can be a few percent higher (+10% is allowed here)
    // (update: also allowing +/-50k/s)
    if (indicated_rate == 0 || ((float)detected_rate / indicated_rate) < 1.1 || abs(detected_rate - indicated_rate) < 50 * 1024) {
        m_rate = detected_rate;
    } else {
        ;    // TODO: in this case disable seeking, or try doing something less drastical...
    }
#endif

    // Add fake subtitle stream...
    if (m_streams[video].GetCount() && m_streams[subpic].GetCount()) {
        if (m_type == mpeg_ts && m_bIsHdmv) {
            AddHdmvPGStream(NO_SUBTITLE_PID, "---");
        } else {
            stream s;
            s.pid = NO_SUBTITLE_PID;
            s.mt.majortype = m_streams[subpic].GetHead().mt.majortype;
            s.mt.subtype = m_streams[subpic].GetHead().mt.subtype;
            s.mt.formattype = m_streams[subpic].GetHead().mt.formattype;
            m_streams[subpic].Insert(s, this);
        }
    }

    Seek(0);

    return S_OK;
}
Esempio n. 26
0
void USoundWave::Serialize( FArchive& Ar )
{
	Super::Serialize( Ar );

	bool bCooked = Ar.IsCooking();
	Ar << bCooked;

	if (FPlatformProperties::RequiresCookedData() && !bCooked && Ar.IsLoading())
	{
		UE_LOG(LogAudio, Fatal, TEXT("This platform requires cooked packages, and audio data was not cooked into %s."), *GetFullName());
	}

	if (Ar.IsCooking())
	{
		CompressionName = Ar.CookingTarget()->GetWaveFormat(this);
	}
	
	if (Ar.UE4Ver() >= VER_UE4_SOUND_COMPRESSION_TYPE_ADDED)
	{
		Ar << CompressionName;
	}

	if (bCooked)
	{
		// Only want to cook/load full data if we don't support streaming
		if (!IsStreaming() ||
			(Ar.IsLoading() && !FPlatformProperties::SupportsAudioStreaming()) ||
			(Ar.IsCooking() && !Ar.CookingTarget()->SupportsFeature(ETargetPlatformFeatures::AudioStreaming)))
		{
			if (Ar.IsCooking())
			{
#if WITH_ENGINE
				TArray<FName> ActualFormatsToSave;
				if (!Ar.CookingTarget()->IsServerOnly())
				{
					// for now we only support one format per wav
					FName Format = Ar.CookingTarget()->GetWaveFormat(this);
					GetCompressedData(Format); // Get the data from the DDC or build it

					ActualFormatsToSave.Add(Format);
				}
				CompressedFormatData.Serialize(Ar, this, &ActualFormatsToSave);
#endif
			}
			else
			{
				CompressedFormatData.Serialize(Ar, this);
			}
		}
	}
	else
	{
		// only save the raw data for non-cooked packages
		RawData.Serialize( Ar, this );
	}

	Ar << CompressedDataGuid;

	if (IsStreaming())
	{
		if (bCooked)
		{
			// only cook/load streaming data if it's supported
			if ((Ar.IsLoading() && FPlatformProperties::SupportsAudioStreaming()) ||
				(Ar.IsCooking() && Ar.CookingTarget()->SupportsFeature(ETargetPlatformFeatures::AudioStreaming)))
			{
				SerializeCookedPlatformData(Ar);
			}
		}

#if WITH_EDITORONLY_DATA	
		if (Ar.IsLoading() && !Ar.IsTransacting() && !bCooked && !(GetOutermost()->PackageFlags & PKG_ReloadingForCooker))
		{
			BeginCachePlatformData();
		}
#endif // #if WITH_EDITORONLY_DATA
	}
}
Esempio n. 27
0
HRESULT CMpegSplitterFile::Init()
{
	HRESULT hr;

    SVP_LogMsg5(L"HRESULT CMpegSplitterFile::Init()");
	// get the type first

	m_type = us;

	Seek(0);

	if(m_type == us)
	{
		if(BitRead(32, true) == 'TFrc') Seek(0x67c);
		int cnt = 0, limit = 4;
		for(trhdr h; cnt < limit && Read(h); cnt++) Seek(h.next);
		if(cnt >= limit) m_type = ts;
	}

	Seek(0);

	if(m_type == us)
	{
		int cnt = 0, limit = 4;
		for(pvahdr h; cnt < limit && Read(h); cnt++) Seek(GetPos() + h.length);
		if(cnt >= limit) m_type = pva;
	}

	Seek(0);

	if(m_type == us)
	{
		BYTE b;
		for(int i = 0; (i < 4 || GetPos() < 65536) && m_type == us && NextMpegStartCode(b); i++)
		{
			if(b == 0xba)
			{
				pshdr h;
				if(Read(h)) 
				{
					m_type = ps;
					m_rate = int(h.bitrate/8);
					break;
				}
			}
			else if((b&0xe0) == 0xc0 // audio, 110xxxxx, mpeg1/2/3
				|| (b&0xf0) == 0xe0 // video, 1110xxxx, mpeg1/2
				// || (b&0xbd) == 0xbd) // private stream 1, 0xbd, ac3/dts/lpcm/subpic
				|| b == 0xbd) // private stream 1, 0xbd, ac3/dts/lpcm/subpic
			{
				peshdr h;
				if(Read(h, b) && BitRead(24, true) == 0x000001)
				{
					m_type = es;
				}
			}
		}
	}

	Seek(0);

	if(m_type == us)
	{
		return E_FAIL;
	}

	// min/max pts & bitrate
	m_rtMin = m_posMin = _I64_MAX;
	m_rtMax = m_posMax = 0;

	if(IsRandomAccess() || IsStreaming())
	{
		if(IsStreaming())
		{
			for(int i = 0; i < 20 || i < 50 && S_OK != HasMoreData(1024*100, 100); i++);
		}

		CAtlList<__int64> fps;
		for(int i = 0, j = 5; i <= j; i++)
			fps.AddTail(i*GetLength()/j);

		for(__int64 pfp = 0; fps.GetCount(); )
		{
			__int64 fp = fps.RemoveHead();
			fp = min(GetLength() - MEGABYTE/8, fp);
			fp = max(pfp, fp);
			__int64 nfp = fp + (pfp == 0 ? 5*MEGABYTE : MEGABYTE/8);
			if(FAILED(hr = SearchStreams(fp, nfp)))
				return hr;
			pfp = nfp;
		}
	}
	else
	{
		if(FAILED(hr = SearchStreams(0, MEGABYTE/8)))
			return hr;
	}

	if(m_posMax - m_posMin <= 0 || m_rtMax - m_rtMin <= 0)
		return E_FAIL;

	int indicated_rate = m_rate;
	int detected_rate = int(10000000i64 * (m_posMax - m_posMin) / (m_rtMax - m_rtMin));
	// normally "detected" should always be less than "indicated", but sometimes it can be a few percent higher (+10% is allowed here)
	// (update: also allowing +/-50k/s)
	if(indicated_rate == 0 || ((float)detected_rate / indicated_rate) < 1.1
	|| abs(detected_rate - indicated_rate) < 50*1024)
		m_rate = detected_rate;
	else ; // TODO: in this case disable seeking, or try doing something less drastical...

//#ifndef DEBUG
	if(m_streams[audio].GetCount() < 1 && m_type == ts){
		SVP_LogMsg5(_T("ts and no audio %d %d"), m_streams[audio].GetCount(), m_streams[unknown].GetCount());
		return E_FAIL;
	}
	if(m_streams[video].GetCount())
	{
		if (!m_bIsHdmv && m_streams[subpic].GetCount())
		{
			stream s;
			s.mt.majortype = MEDIATYPE_Video;
			s.mt.subtype = MEDIASUBTYPE_DVD_SUBPICTURE;
			s.mt.formattype = FORMAT_None;
			m_streams[subpic].Insert(s, this);
		}
		else
		{
			// Add fake stream for "No subtitle"
			AddHdmvPGStream (NO_SUBTITLE_PID, "---");
		}
	}else if(m_type == ts){
		SVP_LogMsg5(_T("ts and no video"));
		return E_FAIL;
	}
//#endif

	Seek(0);

    SVP_LogMsg5(_T("ts %d %d %d"),  m_streams[video].GetCount() , m_streams[audio].GetCount(), m_bIsHdmv);
	return S_OK;
}
Esempio n. 28
0
/**
 * The window procedure which handles the application events.
 */
LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
	switch (message)
	{
		case WM_SETFOCUS:
		{
			GetCursorPos(&gLastMousePos);
			gFocused = true;
			break;
		}
		case WM_KILLFOCUS:
		{
		  gFocused = false;
		  break;
		}
		// An explicit paint message
		case WM_PAINT:
		{
			if (!gReinitializeRequired)
			{
				// This is only really called when something is dragged over top of the window
				RenderScene();
			}

			ValidateRect(hWnd, nullptr);
			break;
		}
		// Handle window size changes and pause streaming when minimized since the back buffer might not be available
		case WM_SIZE:
		{
			// Update the pause state
			int wmEvent = LOWORD(wParam);
			if (wmEvent == SIZE_MINIMIZED)
			{
				gPaused = true;
				Pause();
			}
			else if (wmEvent == SIZE_RESTORED)
			{
				gPaused = false;
			}

			break;
		}
		// Handle key presses
		case WM_KEYDOWN:
		{
			switch (wParam)
			{
				// Toggle streaming
				case VK_F5:
				{
					if (IsStreaming())
					{
						gStreamingDesired = false;
						StopStreaming();
					}
					else
					{
						gStreamingDesired = true;
						StartStreaming(gBroadcastWidth, gBroadcastHeight, gBroadcastFramesPerSecond);
					}
					break;
				}
				// Toggle fullscreen
				case VK_F12:
				{
					gFullscreen = !gFullscreen;
					gReinitializeRequired = true;
					break;
				}
				// Toggle broadcast resolution
				case VK_F1:
				{
					bool streaming = IsStreaming();
					if (streaming)
					{
						StopStreaming();
					}

					if (gBroadcastWidth == 640)
					{
						gBroadcastWidth = 1024;
						gBroadcastHeight = 768;
					}
					else
					{
						gBroadcastWidth = 640;
						gBroadcastHeight = 368;
					}

					if (streaming)
					{
						StartStreaming(gBroadcastWidth, gBroadcastHeight, gBroadcastFramesPerSecond);
					}

					break;
				}
			}
			break;
		}
		// Close the application
		case WM_DESTROY:
		{
			PostQuitMessage(0);
			break;
		}
		default:
		{
			return DefWindowProc(hWnd, message, wParam, lParam);
		}
	}
	return 0;
}
Esempio n. 29
0
void USoundWave::Serialize( FArchive& Ar )
{
	DECLARE_SCOPE_CYCLE_COUNTER( TEXT("USoundWave::Serialize"), STAT_SoundWave_Serialize, STATGROUP_LoadTime );

	Super::Serialize( Ar );

	bool bCooked = Ar.IsCooking();
	Ar << bCooked;

	if (FPlatformProperties::RequiresCookedData() && !bCooked && Ar.IsLoading())
	{
		UE_LOG(LogAudio, Fatal, TEXT("This platform requires cooked packages, and audio data was not cooked into %s."), *GetFullName());
	}

	Ar.UsingCustomVersion(FFrameworkObjectVersion::GUID);

	if (Ar.IsLoading() && (Ar.UE4Ver() >= VER_UE4_SOUND_COMPRESSION_TYPE_ADDED) && (Ar.CustomVer(FFrameworkObjectVersion::GUID) < FFrameworkObjectVersion::RemoveSoundWaveCompressionName))
	{
		FName DummyCompressionName;
		Ar << DummyCompressionName;
	}

	bool bSupportsStreaming = false;
	if (Ar.IsLoading() && FPlatformProperties::SupportsAudioStreaming())
	{
		bSupportsStreaming = true;
	}
	else if (Ar.IsCooking() && Ar.CookingTarget()->SupportsFeature(ETargetPlatformFeatures::AudioStreaming))
	{
		bSupportsStreaming = true;
	}

	if (bCooked)
	{
		// Only want to cook/load full data if we don't support streaming
		if (!IsStreaming() || !bSupportsStreaming)
		{
			if (Ar.IsCooking())
			{
#if WITH_ENGINE
				TArray<FName> ActualFormatsToSave;
				if (!Ar.CookingTarget()->IsServerOnly())
				{
					// for now we only support one format per wav
					FName Format = Ar.CookingTarget()->GetWaveFormat(this);
					GetCompressedData(Format); // Get the data from the DDC or build it

					ActualFormatsToSave.Add(Format);
				}
				CompressedFormatData.Serialize(Ar, this, &ActualFormatsToSave);
#endif
			}
			else
			{
				CompressedFormatData.Serialize(Ar, this);
			}
		}
	}
	else
	{
		// only save the raw data for non-cooked packages
		RawData.Serialize( Ar, this );
	}

	Ar << CompressedDataGuid;

	if (IsStreaming())
	{
		if (bCooked)
		{
			// only cook/load streaming data if it's supported
			if (bSupportsStreaming)
			{
				SerializeCookedPlatformData(Ar);
			}
		}

#if WITH_EDITORONLY_DATA	
		if (Ar.IsLoading() && !Ar.IsTransacting() && !bCooked && !GetOutermost()->HasAnyPackageFlags(PKG_ReloadingForCooker))
		{
			BeginCachePlatformData();
		}
#endif // #if WITH_EDITORONLY_DATA
	}
}
Esempio n. 30
0
/**
 * The main entry point for the application.
 */
int APIENTRY _tWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpCmdLine, int nCmdShow)
{
	UNREFERENCED_PARAMETER(hPrevInstance);
	UNREFERENCED_PARAMETER(lpCmdLine);

	// Initialize global strings
	LoadString(hInstance, IDS_APP_TITLE, gWindowTitle, sizeof(gWindowTitle));
	LoadString(hInstance, IDC_STREAMING, gWindowClass, sizeof(gWindowClass));

	// Register the window class
	RegisterWindowClass(hInstance);

	// Perform application initialization:
	if ( !InitInstance(hInstance, nCmdShow) )
	{
		return FALSE;
	}

	// Set the view to the default position
	ResetView();

	// Cache the last mouse position
	GetCursorPos(&gLastMousePos);

	// Initialize the Twitch SDK
	InitializeStreaming("<username>", "<password>", "<clientId>", "<clientSecret>", GetIntelDllPath());

	// Main message loop
	MSG msg;
	while (true)
	{
		// Check to see if any messages are waiting in the queue
		while (PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE))
		{
			// Process window messages
			TranslateMessage(&msg);
			DispatchMessage(&msg);

			// Received a quit message
			if (msg.message == WM_QUIT)
			{
				break;
			}
		}

		// Received a quit message so exit the app
		if (msg.message == WM_QUIT)
		{
			break;
		}

		if (gReinitializeRequired)
		{
			gReinitializeRequired = false;
			InitializeRendering();
		}

		// Draw the scene
		RenderScene();

		UpdateWaveMesh();

		// Process user input independent of the event queue
		if (gFocused)
		{
			HandleInput();
		}

		// Record the frame time
		unsigned __int64 curTime = GetSystemTimeMs();

		// Begin streaming when ready
		if (gStreamingDesired && 
			!IsStreaming() &&
			IsReadyToStream())
		{
			StartStreaming(gBroadcastWidth, gBroadcastHeight, gBroadcastFramesPerSecond);

			gLastCaptureTime = 0;
		}

		// If you send frames too quickly to the SDK (based on the broadcast FPS you configured) it will not be able 
		// to make use of them all.  In that case, it will simply release buffers without using them which means the
		// game wasted time doing the capture.  To mitigate this, the app should pace the captures to the broadcast FPS.
		unsigned __int64 captureDelta = curTime - gLastCaptureTime;
		bool isTimeForNextCapture = (captureDelta / 1000.0) >= (1.0 / gBroadcastFramesPerSecond);

		// streaming is in progress so try and capture a frame
		if (IsStreaming() && 
			!gPaused &&
			isTimeForNextCapture)
		{
			// capture a snapshot of the back buffer
			unsigned char* pBgraFrame = nullptr;
			int width = 0;
			int height = 0;
			bool gotFrame = false;

			switch (gCaptureMethod)
			{
			case CaptureMethod::Slow:
				gotFrame = CaptureFrame_Slow(gBroadcastWidth, gBroadcastHeight, pBgraFrame);
				break;
			case CaptureMethod::Fast:
				gotFrame = CaptureFrame_Fast(gBroadcastWidth, gBroadcastHeight, pBgraFrame, width, height);
				break;
			}

			// send a frame to the stream
			if (gotFrame)
			{
				SubmitFrame(pBgraFrame);
			}
		}

		// The SDK may generate events that need to be handled by the main thread so we should handle them
		FlushStreamingEvents();

		unsigned __int64 timePerFrame = curTime - gLastFrameTime;
		unsigned int fps = 0;
		if (timePerFrame > 0)
		{
			fps = static_cast<int>(1000 / timePerFrame);
		}
		gLastFrameTime = curTime;

		// Update the window title to show the state
		#undef STREAM_STATE
		#define STREAM_STATE(__state__) #__state__,

		char buffer[128];
		const char* streamStates[] = 
		{
			STREAM_STATE_LIST
		};
		#undef STREAM_STATE

		sprintf_s(buffer, sizeof(buffer), "Twitch Direct3D Streaming Sample - %s - %s    FPS=%d", GetUsername().c_str(), streamStates[GetStreamState()], fps);
		SetWindowTextA(gWindowHandle, buffer);
	}

	// Shutdown the Twitch SDK
	StopStreaming();
	ShutdownStreaming();

	// Cleanup the rendering method
	switch (gCaptureMethod)
	{
	case CaptureMethod::Slow:
		DeinitRendering_Slow();
		break;
	case CaptureMethod::Fast:
		DeinitRendering_Fast();
		break;
	}

	// Shutdown the app
	gGraphicsDevice->Release();
	gDirect3D->Release();

	// Cleanup the mesh
	DestroyWaveMesh();

	return (int)msg.wParam;
}