//-------------------------------------------------------------------------------------- // Name: ReadSampleRaw() // Desc: Reads data from the audio file. No endianness conversion is performed. //-------------------------------------------------------------------------------------- HRESULT WaveFile::ReadSampleRaw( DWORD dwPosition, VOID* pBuffer, DWORD dwBufferSize, DWORD* pdwRead ) const { // Don't read past the end of the data chunk DWORD dwDuration; GetDuration( &dwDuration ); if( dwPosition + dwBufferSize > dwDuration ) dwBufferSize = dwDuration - dwPosition; HRESULT hr = S_OK; if( dwBufferSize ) hr = m_DataChunk.ReadData( ( LONG )dwPosition, pBuffer, dwBufferSize, NULL ); if( pdwRead ) *pdwRead = dwBufferSize; return hr; }
Duration Timeline::GetNaturalDuration (Clock *clock) { Duration* d = GetDuration (); if (*d == Duration::Automatic) { // printf ("automatic duration, we need to calculate it\n"); Duration cd = GetNaturalDurationCore (clock); // if (cd.HasTimeSpan ()) // printf (" + duration (%" G_GINT64_FORMAT " timespan)\n", cd.GetTimeSpan ()); // else if (cd == Duration::Automatic) // printf (" + automatic\n"); // else if (cd == Duration::Forever) // printf (" + forever\n"); return cd; } else { return *d; } }
double DoubleAnimation::GetCurrentValue() { if (_isEndOf) { return 0; } double dDiff = (_dTo - _dFrom) * GetSpeedRatio() / GetDuration(); if (_bReverse) { _dCurrent -= dDiff; if ((dDiff > 0 && _dCurrent <= _dFrom) || (dDiff < 0 && _dCurrent >= _dFrom)) { _dCurrent = _dFrom; _isEndOf = true; } } else { _dCurrent += dDiff; if ((dDiff > 0 && _dCurrent >= _dTo) || (dDiff < 0 && _dCurrent <= _dTo)) { _dCurrent = _dTo; if (GetAutoReverse()) { _bReverse = true; } else { _isEndOf = true; } } } return _dCurrent; }
bool EffectDtmf::ProcessInitialize(sampleCount WXUNUSED(totalLen), ChannelNames WXUNUSED(chanMap)) { double duration = GetDuration(); // all dtmf sequence durations in samples from seconds // MJS: Note that mDuration is in seconds but will have been quantised to the units of the TTC. // If this was 'samples' and the project rate was lower than the track rate, // extra samples may get created as mDuration may now be > mT1 - mT0; // However we are making our best efforts at creating what was asked for. auto nT0 = (sampleCount)floor(mT0 * mSampleRate + 0.5); auto nT1 = (sampleCount)floor((mT0 + duration) * mSampleRate + 0.5); numSamplesSequence = nT1 - nT0; // needs to be exact number of samples selected //make under-estimates if anything, and then redistribute the few remaining samples numSamplesTone = sampleCount( floor(dtmfTone * mSampleRate) ); numSamplesSilence = sampleCount( floor(dtmfSilence * mSampleRate) ); // recalculate the sum, and spread the difference - due to approximations. // Since diff should be in the order of "some" samples, a division (resulting in zero) // is not sufficient, so we add the additional remaining samples in each tone/silence block, // at least until available. diff = numSamplesSequence - (dtmfNTones*numSamplesTone) - (dtmfNTones-1)*numSamplesSilence; while (diff > 2*dtmfNTones - 1) { // more than one per thingToBeGenerated // in this case, both numSamplesTone and numSamplesSilence would change, so it makes sense // to recalculate diff here, otherwise just keep the value we already have // should always be the case that dtmfNTones>1, as if 0, we don't even start processing, // and with 1 there is no difference to spread (no silence slot)... wxASSERT(dtmfNTones > 1); numSamplesTone += (diff/(dtmfNTones)); numSamplesSilence += (diff/(dtmfNTones-1)); diff = numSamplesSequence - (dtmfNTones*numSamplesTone) - (dtmfNTones-1)*numSamplesSilence; } wxASSERT(diff >= 0); // should never be negative curSeqPos = -1; // pointer to string in dtmfSequence isTone = false; numRemaining = 0; return true; }
void CMMAMMFPlayerBase::GetMediaTime(TInt64* aMediaTime) { LOG(EJavaMMAPI, EInfo, "CMMAMMFPlayerBase::GetMediaTime +"); TTimeIntervalMicroSeconds position(0); if (iMediaTime == KTimeUnknown || iState == EStarted) { // The controller must be in the PRIMED or PLAYING state TInt error(iController.GetPosition(position)); if (error == KErrNone) { TInt64 newTime = position.Int64(); LOG1(EJavaMMAPI, EInfo, "CMMAMMFPlayerBase::GetMediaTime iController.GetPosition : %d", newTime); // Sanity check for media time going backwards or beyond the // duration. // Some native controls may return zero media time for // a few moments just before playback will complete. if (newTime < iMediaTime || (iDuration > 0 && newTime > iDuration)) { LOG(EJavaMMAPI, EInfo, "CMMAMMFPlayerBase::GetMediaTime.GetDuration "); GetDuration(&iMediaTime); } else { LOG(EJavaMMAPI, EInfo, "CMMAMMFPlayerBase::GetMediaTime.else "); // set return value iMediaTime = newTime; } } else { LOG1(EJavaMMAPI, EInfo, "CMMAMMFPlayerBase::GetMediaTime: error=%d, returning TIME_UNKNOWN", error); // cannot get media time iMediaTime = KTimeUnknown; } } *aMediaTime = iMediaTime; LOG1(EJavaMMAPI, EInfo, "CMMAMMFPlayerBase::GetMediaTime - %d", *aMediaTime); }
void CAVISplitter::GetSeekingParams(REFERENCE_TIME* ptStart, REFERENCE_TIME* ptStop, double* pdRate) { if (ptStart != NULL) { *ptStart = m_tStart; } if (ptStop != NULL) { if (m_tStop == MAX_TIME) { m_tStop = GetDuration(); } *ptStop = m_tStop; } if (pdRate != NULL) { *pdRate = m_dRate; } }
HRESULT CAVISplitter::Seek(REFERENCE_TIME tStart, REFERENCE_TIME tStop, double dRate) { // We must stop the pushing thread before we change // the seek parameters -- especially for VBR seeking, // where we might re-seek based on the incoming data. CAutoLock lock(&m_csSeeking); if (tStop == 0 || tStop < tStart) tStop = GetDuration(); bool bShouldRestart = Suspend(); m_tStart = tStart; m_tStop = tStop; m_dRate = dRate; m_scanner.Seek(m_tStart); if (bShouldRestart) Resume(); return S_OK; }
STDMETHODIMP IKGSTAudioPlayer::SetPosition(STREAM_TIME pos)//0-100 { if(!pipeline) return E_FAIL; gint64 time; GstElement *p = pipeline; gint64 Glength = GetDuration(), Gstart = 0; if (Glength) { time = Glength; time *= (double)pos/100; time += Gstart; bool b = gst_element_seek(p, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, time, GST_SEEK_TYPE_SET, Gstart + Glength); return b ? S_OK : E_FAIL; } else return E_FAIL; }
//======================================================= // FUNCTIONS: Update@Buf // PURPOSE: Update All Kinds Of Buffers //======================================================= int UpdateRspBuf(MobileNode *node) { WGN_802_11_Mac_Frame *frame; unsigned int timeout = 0; int subtype; if (PktRspIsEmpty(node) == TRUE) return -1; if (GetTxStatus(node) == MAC_CTS || GetTxStatus(node) == MAC_ACK) return -1; frame = node->nodeMac->pktRsp; subtype = GetFcSubtype (frame); switch(subtype) { case MAC_SUBTYPE_CTS: if( CheckIfChannelIdle(node) == FALSE ) { Dot11FrameFree(frame); node->nodeMac->pktRsp=NULL; return 0; } SetTxStatus(node,MAC_CTS); //Set timeout period. timeout = UsecToNsec(GetDuration(frame))+ SecToNsec(CTS_Time); break; //IEEE 802.11 specs, section 9.2.8 //Acknowledment should be sent after an SIFS, regardless to //the busy/idle state of the medium. case MAC_SUBTYPE_ACK: SetTxStatus(node,MAC_ACK); timeout = SecToNsec(ACK_Time); break; default: printf ("[UpdateRspBuf]:: Error, Invalid frame subtype!\n"); exit(1); break; } //printf("In UpdateRspBuf ..\n"); MacSendPkt(node, frame, timeout); return 0; }
HRESULT MFMovieSource::Open( const wchar_t* pszFilePath ) { HRESULT hr; hr=MFCreateSourceReaderFromURL( pszFilePath, CreateSourceReaderAttribute(), &_reader ); PKY_IF_ERR_RETURN( hr, "Create SourceReader Failed" ); hr=GetDuration( _reader, m_Duration ); PKY_IF_ERR_RETURN( hr, "GetDuration Failed" ); hr=GetSourceFlags( _reader, &m_SeekingFlag ); hr=EnumerateTypsForStream( _reader,(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, [this]( IMFMediaType*pType )->bool { UINT32 wd; UINT32 ht; if( SUCCEEDED( GetFrameSize( pType, &wd, &ht ) ) ) { this->_wd=wd; this->_ht=ht; } if( SUCCEEDED( GetKeyFrameDelta( pType, &this->_keyFrameDelta ) ) ) { } uint32_t numer, denum; if( SUCCEEDED(GetFrameRate(pType, numer, denum)) ) { this->_numer = numer; this->_denum = denum; } return true; } ); _frameCount = (UINT64)(m_Duration * _numer/_denum/10000000.0 + 0.001); hr=ConfigureDecoder( _reader, (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM ); hr=ConfigureDecoder( _reader, (DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM ); //Jump(0); return E_NOTIMPL; }
STDMETHODIMP_(STREAM_TIME) IKGSTAudioPlayer::GetPosition()//0-100 { if(!pipeline) return 0; GstElement *p = pipeline; gint64 Glength = GetDuration(), Gstart = 0; gint64 pos; GstFormat fmt = GST_FORMAT_TIME; if (Glength && gst_element_query_position(p, &fmt, &pos)) { pos -= Gstart; pos *= 100; pos /= Glength; return pos;//pos/100 } else return 0; }
FString USoundCue::GetDesc() { FString Description = TEXT( "" ); // Display duration const float Duration = GetDuration(); if( Duration < INDEFINITELY_LOOPING_DURATION ) { Description = FString::Printf( TEXT( "%3.2fs" ), Duration ); } else { Description = TEXT( "Forever" ); } // Display group Description += TEXT( " [" ); Description += *GetSoundClass()->GetName(); Description += TEXT( "]" ); return Description; }
//****************************************************** /// Called when thread is about to start delivering data to the codec /// HRESULT CVideoPin::OnThreadStartPlay() { DWORD thrdID = GetCurrentThreadId(); LogDebug("vidPin:OnThreadStartPlay(%f), rate:%02.2f, threadID:0x%x, GET_TIME_NOW:0x%x", (float)m_rtStart.Millisecs()/1000.0f, m_dRateSeeking, thrdID, GET_TIME_NOW()); //set discontinuity flag indicating to codec that the new data //is not belonging to any previous data m_bDiscontinuity=TRUE; m_bPresentSample=false; m_delayedDiscont = 0; m_FillBuffSleepTime = 1; m_LastFillBuffTime = GET_TIME_NOW(); m_sampleCount = 0; m_bInFillBuffer=false; m_pTsReaderFilter->m_ShowBufferVideo = INIT_SHOWBUFFERVIDEO; m_llLastComp = 0; m_llLastMTDts = 0; m_nNextMTD = 0; m_fMTDMean = 0; m_llMTDSumAvg = 0; ZeroMemory((void*)&m_pllMTD, sizeof(REFERENCE_TIME) * NB_MTDSIZE); //get file-duration and set m_rtDuration GetDuration(NULL); if( !m_bPinNoNewSegFlush ) //MS DTV video decoder can hang if we flush here... { //Downstream flush DeliverBeginFlush(); DeliverEndFlush(); } //start playing DeliverNewSegment(m_rtStart, m_rtStop, m_dRateSeeking); return CSourceStream::OnThreadStartPlay( ); }
bool CCinemaPath::Validate() { if ( m_TimeElapsed <= GetDuration() && m_TimeElapsed >= 0.0f ) { //Find current node and past "node time" float previousTime = 0.0f, cumulation = 0.0f; //Ignore the last node, since it is a blank (node time values are shifted down one from interface) for ( size_t i = 0; i < Node.size() - 1; ++i ) { cumulation += Node[i].Distance; if ( m_TimeElapsed <= cumulation ) { m_PreviousNodeTime = previousTime; m_PreviousRotation = Node[i].Rotation; m_CurrentNode = i; //We're moving toward this next node, so use its rotation return true; } else previousTime += Node[i].Distance; } } return false; }
/*--------------------------------------------------------------------------------*/ bool AudioObjectParameters::GetJumpPosition(bool& jumpPosition, double *interpolationLength) const { bool interpolate = false; bool valid = GetInterpolate(interpolate); if (valid) { jumpPosition = (!interpolate || (interpolate && (GetInterpolationTime() != GetDuration()))); if (interpolationLength) *interpolationLength = interpolate ? GetInterpolationTimeS() : 0.0; } else { jumpPosition = false; if (interpolationLength) *interpolationLength = 0.0; } BBCDEBUG3(("GetJumpPosition(): %s/%s -> %s/%s", StringFrom(GetInterpolate()).c_str(), StringFrom(GetInterpolationTimeS()).c_str(), StringFrom(jumpPosition).c_str(), interpolationLength ? StringFrom(*interpolationLength).c_str() : "<notset>")); return valid; }
HRESULT CConvert::WaitForCompletion() { long evCode = 0; HRESULT hr = m_pEvent->WaitForCompletion(1000, &evCode); while (hr == E_ABORT) { hr = m_pEvent->WaitForCompletion(1000, &evCode); if (evCode == EC_COMPLETE || evCode == EC_ERRORABORT || evCode == EC_USERABORT) { break; } TCHAR szProgress[512]; _stprintf(szProgress, _T("%.2f of %.2f\n"), GetProgress(), GetDuration()); OutputDebugString(szProgress); _tprintf(szProgress); } return hr; }
//****************************************************** /// Called when thread is about to start delivering data to the codec /// HRESULT CAudioPin::OnThreadStartPlay() { DWORD thrdID = GetCurrentThreadId(); LogDebug("audPin:OnThreadStartPlay(%f), rate:%02.2f, threadID:0x%x, GET_TIME_NOW:0x%x", (float)m_rtStart.Millisecs()/1000.0f, m_dRateSeeking, thrdID, GET_TIME_NOW()); //set flag to compensate any differences in the stream time & file time m_pTsReaderFilter->m_bStreamCompensated = false; m_pTsReaderFilter->m_bForcePosnUpdate = true; m_pTsReaderFilter->WakeThread(); m_pTsReaderFilter->m_ShowBufferAudio = INIT_SHOWBUFFERAUDIO; //set discontinuity flag indicating to codec that the new data //is not belonging to any previous data m_bDiscontinuity = TRUE; m_bPresentSample = false; m_sampleCount = 0; m_bInFillBuffer=false; m_pTsReaderFilter->m_audioReady = false; m_FillBuffSleepTime = 1; m_LastFillBuffTime = GET_TIME_NOW(); ClearAverageFtime(); //get file-duration and set m_rtDuration GetDuration(NULL); //Downstream flush DeliverBeginFlush(); DeliverEndFlush(); //start playing DeliverNewSegment(m_rtStart, m_rtStop, m_dRateSeeking); return CSourceStream::OnThreadStartPlay( ); }
void DynamicObject::Update(uint32 p_time) { // caster can be not in world at time dynamic object update, but dynamic object not yet deleted in Unit destructor Unit* caster = GetCaster(); if (!caster) { Delete(); return; } bool expired = false; if (m_aura) { if (!m_aura->IsRemoved()) m_aura->UpdateOwner(p_time, this); // m_aura may be set to null in Unit::RemoveGameObject call if (m_aura && (m_aura->IsRemoved() || m_aura->IsExpired())) expired = true; } else { if (GetDuration() > int32(p_time)) m_duration -= p_time; else expired = true; } if (expired) { caster->RemoveDynObjectWithGUID(GetGUID()); Delete(); } else sScriptMgr.OnDynamicObjectUpdate(this, p_time); }
void CDlgVidSeek::OnTimer(UINT nIDEvent) { LONGLONG cur, stop; if (m_seek == NULL) return; if (m_hWndDuration) ::SetWindowText (m_hWndDuration, GetDuration ()); m_seek->GetPositions (&cur, &stop); double pos = (double) cur / m_llDuration; if (m_llDuration > 1000) pos *= 1000.0; else pos *= m_llDuration; m_wndSeek.SetPos (int (pos)); CDialog::OnTimer(nIDEvent); }
bool AnalyticGradPulse::Prepare (const PrepareMode mode) { //set attributes "Shape", "Diff", "Constants" and initialize GiNaC evaluation if (mode != PREP_UPDATE) m_pulse_shape.PrepareInit(mode==PREP_VERBOSE); // Base class Prepare *before* analytic prepare of pulse shape bool btag = ( GradPulse::Prepare(mode) && mode != PREP_UPDATE && m_pulse_shape.PrepareAnalytic(mode==PREP_VERBOSE) ); //Calculate area //if (btag) m_area = ( (HasDOMattribute("Diff") && GetDOMattribute("Diff")=="1") ? m_pulse_shape.GetAnalyticIntegral(mode==PREP_VERBOSE) : GetAreaNumeric(2000) ); m_area = GetAreaNumeric((int) (1000*GetDuration())); //TMP AnalyticIntegral is currently not working => always numeric evaluation ! if (!btag && mode == PREP_VERBOSE) cout << "\n warning in Prepare(1) of AnalyticGradPulse " << GetName() << endl; if (mode != PREP_UPDATE) { HideAttribute("Area" ); HideAttribute("MaxAmpl", false); HideAttribute("SlewRate", false); } return btag; }
void DynamicObject::Delay(int32 delaytime) { SetDuration(GetDuration() - delaytime); }
void CVideoInfoTag::ToSortable(SortItem& sortable, Field field) const { switch (field) { case FieldDirector: sortable[FieldDirector] = m_director; break; case FieldWriter: sortable[FieldWriter] = m_writingCredits; break; case FieldGenre: sortable[FieldGenre] = m_genre; break; case FieldCountry: sortable[FieldCountry] = m_country; break; case FieldTagline: sortable[FieldTagline] = m_strTagLine; break; case FieldPlotOutline: sortable[FieldPlotOutline] = m_strPlotOutline; break; case FieldPlot: sortable[FieldPlot] = m_strPlot; break; case FieldTitle: { // make sure not to overwrite an existing title with an empty one std::string title = m_strTitle; if (!title.empty() || sortable.find(FieldTitle) == sortable.end()) sortable[FieldTitle] = title; break; } case FieldVotes: sortable[FieldVotes] = GetRating().votes; break; case FieldStudio: sortable[FieldStudio] = m_studio; break; case FieldTrailer: sortable[FieldTrailer] = m_strTrailer; break; case FieldSet: sortable[FieldSet] = m_strSet; break; case FieldTime: sortable[FieldTime] = GetDuration(); break; case FieldFilename: sortable[FieldFilename] = m_strFile; break; case FieldMPAA: sortable[FieldMPAA] = m_strMPAARating; break; case FieldPath: { // make sure not to overwrite an existing path with an empty one std::string path = GetPath(); if (!path.empty() || sortable.find(FieldPath) == sortable.end()) sortable[FieldPath] = path; break; } case FieldSortTitle: { // seasons with a custom name/title need special handling as they should be sorted by season number if (m_type == MediaTypeSeason && !m_strSortTitle.empty()) sortable[FieldSortTitle] = StringUtils::Format(g_localizeStrings.Get(20358).c_str(), m_iSeason); else sortable[FieldSortTitle] = m_strSortTitle; break; } case FieldTvShowStatus: sortable[FieldTvShowStatus] = m_strStatus; break; case FieldProductionCode: sortable[FieldProductionCode] = m_strProductionCode; break; case FieldAirDate: sortable[FieldAirDate] = m_firstAired.IsValid() ? m_firstAired.GetAsDBDate() : (m_premiered.IsValid() ? m_premiered.GetAsDBDate() : StringUtils::Empty); break; case FieldTvShowTitle: sortable[FieldTvShowTitle] = m_strShowTitle; break; case FieldAlbum: sortable[FieldAlbum] = m_strAlbum; break; case FieldArtist: sortable[FieldArtist] = m_artist; break; case FieldPlaycount: sortable[FieldPlaycount] = m_playCount; break; case FieldLastPlayed: sortable[FieldLastPlayed] = m_lastPlayed.IsValid() ? m_lastPlayed.GetAsDBDateTime() : StringUtils::Empty; break; case FieldTop250: sortable[FieldTop250] = m_iTop250; break; case FieldYear: sortable[FieldYear] = m_premiered.GetYear(); break; case FieldSeason: sortable[FieldSeason] = m_iSeason; break; case FieldEpisodeNumber: sortable[FieldEpisodeNumber] = m_iEpisode; break; case FieldNumberOfEpisodes: sortable[FieldNumberOfEpisodes] = m_iEpisode; break; case FieldNumberOfWatchedEpisodes: sortable[FieldNumberOfWatchedEpisodes] = m_iEpisode; break; case FieldEpisodeNumberSpecialSort: sortable[FieldEpisodeNumberSpecialSort] = m_iSpecialSortEpisode; break; case FieldSeasonSpecialSort: sortable[FieldSeasonSpecialSort] = m_iSpecialSortSeason; break; case FieldRating: sortable[FieldRating] = GetRating().rating; break; case FieldUserRating: sortable[FieldUserRating] = m_iUserRating; break; case FieldId: sortable[FieldId] = m_iDbId; break; case FieldTrackNumber: sortable[FieldTrackNumber] = m_iTrack; break; case FieldTag: sortable[FieldTag] = m_tags; break; case FieldVideoResolution: sortable[FieldVideoResolution] = m_streamDetails.GetVideoHeight(); break; case FieldVideoAspectRatio: sortable[FieldVideoAspectRatio] = m_streamDetails.GetVideoAspect(); break; case FieldVideoCodec: sortable[FieldVideoCodec] = m_streamDetails.GetVideoCodec(); break; case FieldStereoMode: sortable[FieldStereoMode] = m_streamDetails.GetStereoMode(); break; case FieldAudioChannels: sortable[FieldAudioChannels] = m_streamDetails.GetAudioChannels(); break; case FieldAudioCodec: sortable[FieldAudioCodec] = m_streamDetails.GetAudioCodec(); break; case FieldAudioLanguage: sortable[FieldAudioLanguage] = m_streamDetails.GetAudioLanguage(); break; case FieldSubtitleLanguage: sortable[FieldSubtitleLanguage] = m_streamDetails.GetSubtitleLanguage(); break; case FieldInProgress: sortable[FieldInProgress] = m_resumePoint.IsPartWay(); break; case FieldDateAdded: sortable[FieldDateAdded] = m_dateAdded.IsValid() ? m_dateAdded.GetAsDBDateTime() : StringUtils::Empty; break; case FieldMediaType: sortable[FieldMediaType] = m_type; break; case FieldRelevance: sortable[FieldRelevance] = m_relevance; break; default: break; } }
void CVideoInfoTag::Serialize(CVariant& value) const { value["director"] = m_director; value["writer"] = m_writingCredits; value["genre"] = m_genre; value["country"] = m_country; value["tagline"] = m_strTagLine; value["plotoutline"] = m_strPlotOutline; value["plot"] = m_strPlot; value["title"] = m_strTitle; value["votes"] = StringUtils::Format("%i", GetRating().votes); value["studio"] = m_studio; value["trailer"] = m_strTrailer; value["cast"] = CVariant(CVariant::VariantTypeArray); for (unsigned int i = 0; i < m_cast.size(); ++i) { CVariant actor; actor["name"] = m_cast[i].strName; actor["role"] = m_cast[i].strRole; actor["order"] = m_cast[i].order; if (!m_cast[i].thumb.empty()) actor["thumbnail"] = CTextureUtils::GetWrappedImageURL(m_cast[i].thumb); value["cast"].push_back(actor); } value["set"] = m_strSet; value["setid"] = m_iSetId; value["setoverview"] = m_strSetOverview; value["tag"] = m_tags; value["runtime"] = GetDuration(); value["file"] = m_strFile; value["path"] = m_strPath; value["imdbnumber"] = GetUniqueID(); value["mpaa"] = m_strMPAARating; value["filenameandpath"] = m_strFileNameAndPath; value["originaltitle"] = m_strOriginalTitle; value["sorttitle"] = m_strSortTitle; value["episodeguide"] = m_strEpisodeGuide; value["premiered"] = m_premiered.IsValid() ? m_premiered.GetAsDBDate() : StringUtils::Empty; value["status"] = m_strStatus; value["productioncode"] = m_strProductionCode; value["firstaired"] = m_firstAired.IsValid() ? m_firstAired.GetAsDBDate() : StringUtils::Empty; value["showtitle"] = m_strShowTitle; value["album"] = m_strAlbum; value["artist"] = m_artist; value["playcount"] = m_playCount; value["lastplayed"] = m_lastPlayed.IsValid() ? m_lastPlayed.GetAsDBDateTime() : StringUtils::Empty; value["top250"] = m_iTop250; value["year"] = m_premiered.GetYear(); value["season"] = m_iSeason; value["episode"] = m_iEpisode; for (const auto& i : m_uniqueIDs) value["uniqueid"][i.first] = i.second; value["rating"] = GetRating().rating; CVariant ratings = CVariant(CVariant::VariantTypeObject); for (const auto& i : m_ratings) { CVariant rating; rating["rating"] = i.second.rating; rating["votes"] = i.second.votes; rating["default"] = i.first == m_strDefaultRating; ratings[i.first] = rating; } value["ratings"] = ratings; value["userrating"] = m_iUserRating; value["dbid"] = m_iDbId; value["fileid"] = m_iFileId; value["track"] = m_iTrack; value["showlink"] = m_showLink; m_streamDetails.Serialize(value["streamdetails"]); CVariant resume = CVariant(CVariant::VariantTypeObject); resume["position"] = (float)m_resumePoint.timeInSeconds; resume["total"] = (float)m_resumePoint.totalTimeInSeconds; value["resume"] = resume; value["tvshowid"] = m_iIdShow; value["dateadded"] = m_dateAdded.IsValid() ? m_dateAdded.GetAsDBDateTime() : StringUtils::Empty; value["type"] = m_type; value["seasonid"] = m_iIdSeason; value["specialsortseason"] = m_iSpecialSortSeason; value["specialsortepisode"] = m_iSpecialSortEpisode; }
bool CVideoInfoTag::Save(TiXmlNode *node, const std::string &tag, bool savePathInfo, const TiXmlElement *additionalNode) { if (!node) return false; // we start with a <tag> tag TiXmlElement movieElement(tag.c_str()); TiXmlNode *movie = node->InsertEndChild(movieElement); if (!movie) return false; XMLUtils::SetString(movie, "title", m_strTitle); if (!m_strOriginalTitle.empty()) XMLUtils::SetString(movie, "originaltitle", m_strOriginalTitle); if (!m_strShowTitle.empty()) XMLUtils::SetString(movie, "showtitle", m_strShowTitle); if (!m_strSortTitle.empty()) XMLUtils::SetString(movie, "sorttitle", m_strSortTitle); if (!m_ratings.empty()) { TiXmlElement ratings("ratings"); for (const auto& it : m_ratings) { TiXmlElement rating("rating"); rating.SetAttribute("name", it.first.c_str()); XMLUtils::SetFloat(&rating, "value", it.second.rating); XMLUtils::SetInt(&rating, "votes", it.second.votes); rating.SetAttribute("max", 10); if (it.first == m_strDefaultRating) rating.SetAttribute("default", "true"); ratings.InsertEndChild(rating); } movie->InsertEndChild(ratings); } XMLUtils::SetInt(movie, "userrating", m_iUserRating); if (m_EpBookmark.timeInSeconds > 0) { TiXmlElement epbookmark("episodebookmark"); XMLUtils::SetFloat(&epbookmark, "position", (float)m_EpBookmark.timeInSeconds); if (!m_EpBookmark.playerState.empty()) { TiXmlElement playerstate("playerstate"); CXBMCTinyXML doc; doc.Parse(m_EpBookmark.playerState); playerstate.InsertEndChild(*doc.RootElement()); epbookmark.InsertEndChild(playerstate); } movie->InsertEndChild(epbookmark); } XMLUtils::SetInt(movie, "top250", m_iTop250); if (tag == "episodedetails" || tag == "tvshow") { XMLUtils::SetInt(movie, "season", m_iSeason); XMLUtils::SetInt(movie, "episode", m_iEpisode); XMLUtils::SetInt(movie, "displayseason",m_iSpecialSortSeason); XMLUtils::SetInt(movie, "displayepisode",m_iSpecialSortEpisode); } if (tag == "musicvideo") { XMLUtils::SetInt(movie, "track", m_iTrack); XMLUtils::SetString(movie, "album", m_strAlbum); } XMLUtils::SetString(movie, "outline", m_strPlotOutline); XMLUtils::SetString(movie, "plot", m_strPlot); XMLUtils::SetString(movie, "tagline", m_strTagLine); XMLUtils::SetInt(movie, "runtime", GetDuration() / 60); if (!m_strPictureURL.m_xml.empty()) { CXBMCTinyXML doc; doc.Parse(m_strPictureURL.m_xml); const TiXmlNode* thumb = doc.FirstChild("thumb"); while (thumb) { movie->InsertEndChild(*thumb); thumb = thumb->NextSibling("thumb"); } } if (m_fanart.m_xml.size()) { CXBMCTinyXML doc; doc.Parse(m_fanart.m_xml); movie->InsertEndChild(*doc.RootElement()); } XMLUtils::SetString(movie, "mpaa", m_strMPAARating); XMLUtils::SetInt(movie, "playcount", m_playCount); XMLUtils::SetDate(movie, "lastplayed", m_lastPlayed); if (savePathInfo) { XMLUtils::SetString(movie, "file", m_strFile); XMLUtils::SetString(movie, "path", m_strPath); XMLUtils::SetString(movie, "filenameandpath", m_strFileNameAndPath); XMLUtils::SetString(movie, "basepath", m_basePath); } if (!m_strEpisodeGuide.empty()) { CXBMCTinyXML doc; doc.Parse(m_strEpisodeGuide); if (doc.RootElement()) movie->InsertEndChild(*doc.RootElement()); else XMLUtils::SetString(movie, "episodeguide", m_strEpisodeGuide); } XMLUtils::SetString(movie, "id", GetUniqueID()); for (const auto& uniqueid : m_uniqueIDs) { TiXmlElement uniqueID("uniqueid"); uniqueID.SetAttribute("type", uniqueid.first); if (uniqueid.first == m_strDefaultUniqueID) uniqueID.SetAttribute("default", "true"); TiXmlText value(uniqueid.second); uniqueID.InsertEndChild(value); movie->InsertEndChild(uniqueID); } XMLUtils::SetStringArray(movie, "genre", m_genre); XMLUtils::SetStringArray(movie, "country", m_country); if (!m_strSet.empty()) { TiXmlElement set("set"); XMLUtils::SetString(&set, "name", m_strSet); if (!m_strSetOverview.empty()) XMLUtils::SetString(&set, "overview", m_strSetOverview); movie->InsertEndChild(set); } XMLUtils::SetStringArray(movie, "tag", m_tags); XMLUtils::SetStringArray(movie, "credits", m_writingCredits); XMLUtils::SetStringArray(movie, "director", m_director); if (HasPremiered()) XMLUtils::SetDate(movie, "premiered", m_premiered); if (HasYear()) XMLUtils::SetInt(movie, "year", GetYear()); XMLUtils::SetString(movie, "status", m_strStatus); XMLUtils::SetString(movie, "code", m_strProductionCode); XMLUtils::SetDate(movie, "aired", m_firstAired); XMLUtils::SetStringArray(movie, "studio", m_studio); XMLUtils::SetString(movie, "trailer", m_strTrailer); if (m_streamDetails.HasItems()) { // it goes fileinfo/streamdetails/[video|audio|subtitle] TiXmlElement fileinfo("fileinfo"); TiXmlElement streamdetails("streamdetails"); for (int iStream=1; iStream<=m_streamDetails.GetVideoStreamCount(); iStream++) { TiXmlElement stream("video"); XMLUtils::SetString(&stream, "codec", m_streamDetails.GetVideoCodec(iStream)); XMLUtils::SetFloat(&stream, "aspect", m_streamDetails.GetVideoAspect(iStream)); XMLUtils::SetInt(&stream, "width", m_streamDetails.GetVideoWidth(iStream)); XMLUtils::SetInt(&stream, "height", m_streamDetails.GetVideoHeight(iStream)); XMLUtils::SetInt(&stream, "durationinseconds", m_streamDetails.GetVideoDuration(iStream)); XMLUtils::SetString(&stream, "stereomode", m_streamDetails.GetStereoMode(iStream)); streamdetails.InsertEndChild(stream); } for (int iStream=1; iStream<=m_streamDetails.GetAudioStreamCount(); iStream++) { TiXmlElement stream("audio"); XMLUtils::SetString(&stream, "codec", m_streamDetails.GetAudioCodec(iStream)); XMLUtils::SetString(&stream, "language", m_streamDetails.GetAudioLanguage(iStream)); XMLUtils::SetInt(&stream, "channels", m_streamDetails.GetAudioChannels(iStream)); streamdetails.InsertEndChild(stream); } for (int iStream=1; iStream<=m_streamDetails.GetSubtitleStreamCount(); iStream++) { TiXmlElement stream("subtitle"); XMLUtils::SetString(&stream, "language", m_streamDetails.GetSubtitleLanguage(iStream)); streamdetails.InsertEndChild(stream); } fileinfo.InsertEndChild(streamdetails); movie->InsertEndChild(fileinfo); } /* if has stream details */ // cast for (iCast it = m_cast.begin(); it != m_cast.end(); ++it) { // add a <actor> tag TiXmlElement cast("actor"); TiXmlNode *node = movie->InsertEndChild(cast); XMLUtils::SetString(node, "name", it->strName); XMLUtils::SetString(node, "role", it->strRole); XMLUtils::SetInt(node, "order", it->order); XMLUtils::SetString(node, "thumb", it->thumbUrl.GetFirstThumb().m_url); } XMLUtils::SetStringArray(movie, "artist", m_artist); XMLUtils::SetStringArray(movie, "showlink", m_showLink); for (const auto& namedSeason : m_namedSeasons) { TiXmlElement season("namedseason"); season.SetAttribute("number", namedSeason.first); season.SetValue(namedSeason.second); movie->InsertEndChild(season); } TiXmlElement resume("resume"); XMLUtils::SetFloat(&resume, "position", (float)m_resumePoint.timeInSeconds); XMLUtils::SetFloat(&resume, "total", (float)m_resumePoint.totalTimeInSeconds); movie->InsertEndChild(resume); XMLUtils::SetDateTime(movie, "dateadded", m_dateAdded); if (additionalNode) movie->InsertEndChild(*additionalNode); return true; }
bool Generator::Process() { if (GetDuration() < 0.0) return false; // Set up mOutputTracks. // This effect needs Track::All for sync-lock grouping. this->CopyInputTracks(Track::All); // Iterate over the tracks bool bGoodResult = true; int ntrack = 0; TrackListIterator iter(mOutputTracks); Track* t = iter.First(); while (t != NULL) { if (t->GetKind() == Track::Wave && t->GetSelected()) { WaveTrack* track = (WaveTrack*)t; bool editClipCanMove; gPrefs->Read(wxT("/GUI/EditClipCanMove"), &editClipCanMove, true); //if we can't move clips, and we're generating into an empty space, //make sure there's room. if (!editClipCanMove && track->IsEmpty(mT0, mT1+1.0/track->GetRate()) && !track->IsEmpty(mT0, mT0+GetDuration()-(mT1-mT0)-1.0/track->GetRate())) { wxMessageBox( _("There is not enough room available to generate the audio"), _("Error"), wxICON_STOP); Failure(); return false; } if (GetDuration() > 0.0) { AudacityProject *p = GetActiveProject(); // Create a temporary track std::unique_ptr<WaveTrack> tmp( mFactory->NewWaveTrack(track->GetSampleFormat(), track->GetRate()) ); BeforeTrack(*track); BeforeGenerate(); // Fill it with data if (!GenerateTrack(&*tmp, *track, ntrack)) bGoodResult = false; else { // Transfer the data from the temporary track to the actual one tmp->Flush(); SetTimeWarper(new StepTimeWarper(mT0+GetDuration(), GetDuration()-(mT1-mT0))); bGoodResult = track->ClearAndPaste(p->GetSel0(), p->GetSel1(), &*tmp, true, false, GetTimeWarper()); } if (!bGoodResult) { Failure(); return false; } } else { // If the duration is zero, there's no need to actually // generate anything track->Clear(mT0, mT1); } ntrack++; } else if (t->IsSyncLockSelected()) { t->SyncLockAdjust(mT1, mT0 + GetDuration()); } // Move on to the next track t = iter.Next(); } Success(); this->ReplaceProcessedTracks(bGoodResult); mT1 = mT0 + GetDuration(); // Update selection. return true; }
STDMETHODIMP CBaseSplitterFilter::GetStopPosition(LONGLONG* pStop) { return GetDuration(pStop); }
void OnRemove(AuraEffect const* /*aurEff*/, AuraEffectHandleModes /*mode*/) { //if (GetTargetApplication()->GetRemoveMode() != AURA_REMOVE_BY_DEFAULT) if (!GetDuration()) GetTarget()->ResetRedirectThreat(); }
void OnRemove(AuraEffect const* /*aurEff*/, AuraEffectHandleModes /*mode*/) { if (Unit* caster = GetCaster()) if (!GetDuration()) caster->SetReducedThreatPercent(0, 0); }
float FAnimNotifyEvent::GetEndTriggerTime() const { return GetTriggerTime() + GetDuration() + EndTriggerTimeOffset; }
HRESULT CAudioPin::FillBuffer(IMediaSample *pSample) { try { CDeMultiplexer& demux=m_pTsReaderFilter->GetDemultiplexer(); CBuffer* buffer=NULL; bool earlyStall = false; //get file-duration and set m_rtDuration GetDuration(NULL); do { //Check if we need to wait for a while DWORD timeNow = GET_TIME_NOW(); while (timeNow < (m_LastFillBuffTime + m_FillBuffSleepTime)) { Sleep(1); timeNow = GET_TIME_NOW(); } m_LastFillBuffTime = timeNow; //did we reach the end of the file if (demux.EndOfFile()) { int ACnt, VCnt; demux.GetBufferCounts(&ACnt, &VCnt); if (ACnt <= 0 && VCnt <= 0) //have we used all the data ? { LogDebug("audPin:set eof"); m_FillBuffSleepTime = 5; CreateEmptySample(pSample); m_bInFillBuffer = false; return S_FALSE; //S_FALSE will notify the graph that end of file has been reached } } //if the filter is currently seeking to a new position //or this pin is currently seeking to a new position then //we dont try to read any packets, but simply return... if (m_pTsReaderFilter->IsSeeking() || m_pTsReaderFilter->IsStopping() || demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated) { m_FillBuffSleepTime = 5; CreateEmptySample(pSample); m_bInFillBuffer = false; if (demux.m_bFlushRunning || !m_pTsReaderFilter->m_bStreamCompensated) { //Force discon on next good sample m_sampleCount = 0; m_bDiscontinuity=true; } if (!m_pTsReaderFilter->m_bStreamCompensated && (m_nNextAFT != 0)) { ClearAverageFtime(); } return NOERROR; } else { m_FillBuffSleepTime = 1; m_bInFillBuffer = true; } // Get next audio buffer from demultiplexer buffer=demux.GetAudio(earlyStall, m_rtStart); if (buffer==NULL) { m_FillBuffSleepTime = 5; } else { m_bPresentSample = true ; if (buffer->GetForcePMT()) { m_bAddPMT = true; } if (buffer->GetDiscontinuity()) { m_bDiscontinuity = true; } CRefTime RefTime,cRefTime ; bool HasTimestamp ; double fTime = 0.0; double clock = 0.0; double stallPoint = AUDIO_STALL_POINT; //check if it has a timestamp if ((HasTimestamp=buffer->MediaTime(RefTime))) { cRefTime = RefTime ; cRefTime -= m_rtStart ; //adjust the timestamp with the compensation cRefTime-= m_pTsReaderFilter->GetCompensation() ; //Check if total compensation offset is more than +/-10ms if (abs(m_pTsReaderFilter->GetTotalDeltaComp()) > 100000) { if (!m_bDisableSlowPlayDiscontinuity) { //Force downstream filters to resync by setting discontinuity flag pSample->SetDiscontinuity(TRUE); } m_pTsReaderFilter->ClearTotalDeltaComp(); } REFERENCE_TIME RefClock = 0; m_pTsReaderFilter->GetMediaPosition(&RefClock) ; clock = (double)(RefClock-m_rtStart.m_time)/10000000.0 ; fTime = ((double)cRefTime.m_time/10000000.0) - clock ; //Calculate a mean 'fTime' value using 'raw' fTime data CalcAverageFtime(fTime); if (timeNow < (m_pTsReaderFilter->m_lastPauseRun + (30*1000))) { //do this for 30s after start of play, a flush or pause m_fAFTMeanRef = m_fAFTMean; } //Add compensation time for external downstream audio delay //to stop samples becoming 'late' (note: this does NOT change the actual sample timestamps) fTime -= m_fAFTMeanRef; //remove the 'mean' offset fTime += ((AUDIO_STALL_POINT/2.0) + 0.2); //re-centre the timing //Discard late samples at start of play, //and samples outside a sensible timing window during play //(helps with signal corruption recovery) cRefTime -= m_pTsReaderFilter->m_ClockOnStart.m_time; if (fTime < -2.0) { if ((m_dRateSeeking == 1.0) && (m_pTsReaderFilter->State() == State_Running) && (clock > 8.0) && !demux.m_bFlushDelegated) { //Very late - request internal flush and re-sync to stream demux.DelegatedFlush(false, false); LogDebug("audPin : Audio to render very late, flushing") ; } } if ((cRefTime.m_time >= PRESENT_DELAY) && (fTime > ((cRefTime.m_time >= FS_TIM_LIM) ? -0.3 : -0.5)) && (fTime < 2.5)) { if ((fTime > stallPoint) && (m_sampleCount > 2)) { //Too early - stall to avoid over-filling of audio decode/renderer buffers, //but don't enable at start of play to make sure graph starts properly m_FillBuffSleepTime = 10; buffer = NULL; earlyStall = true; continue; } } else //Don't drop samples normally - it upsets the rate matching in the audio renderer { // Sample is too late. m_bPresentSample = false ; } cRefTime += m_pTsReaderFilter->m_ClockOnStart.m_time; } if (m_bPresentSample && (m_dRateSeeking == 1.0) && (buffer->Length() > 0)) { //do we need to set the discontinuity flag? if (m_bDiscontinuity) { //ifso, set it pSample->SetDiscontinuity(TRUE); LogDebug("audPin: Set discontinuity L:%d B:%d fTime:%03.3f SampCnt:%d", m_bDiscontinuity, buffer->GetDiscontinuity(), (float)fTime, m_sampleCount); m_bDiscontinuity=FALSE; } if (m_bAddPMT && !m_pTsReaderFilter->m_bDisableAddPMT && !m_bPinNoAddPMT) { //Add MediaType info to sample CMediaType mt; int audioIndex = 0; demux.GetAudioStream(audioIndex); demux.GetAudioStreamType(audioIndex, mt, m_iPosition); pSample->SetMediaType(&mt); SetMediaType(&mt); WAVEFORMATEX* wfe = (WAVEFORMATEX*)mt.Format(); LogDebug("audPin: Add pmt, fTime:%03.3f SampCnt:%d, Ch:%d, Sr:%d", (float)fTime, m_sampleCount, wfe->nChannels, wfe->nSamplesPerSec); m_bAddPMT = false; //Only add once } if (HasTimestamp) { //now we have the final timestamp, set timestamp in sample REFERENCE_TIME refTime=(REFERENCE_TIME)cRefTime; refTime = (REFERENCE_TIME)((double)refTime/m_dRateSeeking); refTime += m_pTsReaderFilter->m_regAudioDelay; //add offset (to produce delay relative to video) pSample->SetSyncPoint(TRUE); pSample->SetTime(&refTime,&refTime); if (m_pTsReaderFilter->m_ShowBufferAudio || fTime < 0.02 || (m_sampleCount < 3)) { int cntA, cntV; CRefTime firstAudio, lastAudio; CRefTime firstVideo, lastVideo, zeroVideo; cntA = demux.GetAudioBufferPts(firstAudio, lastAudio); cntV = demux.GetVideoBufferPts(firstVideo, lastVideo, zeroVideo); LogDebug("Aud/Ref : %03.3f, Compensated = %03.3f ( %0.3f A/V buffers=%02d/%02d), Clk : %f, SampCnt %d, Sleep %d ms, stallPt %03.3f", (float)RefTime.Millisecs()/1000.0f, (float)cRefTime.Millisecs()/1000.0f, fTime,cntA,cntV, clock, m_sampleCount, m_FillBuffSleepTime, (float)stallPoint); } if (m_pTsReaderFilter->m_ShowBufferAudio) m_pTsReaderFilter->m_ShowBufferAudio--; // CalcAverageFtime(fTime); if (((float)cRefTime.Millisecs()/1000.0f) > AUDIO_READY_POINT) { m_pTsReaderFilter->m_audioReady = true; } } else { //buffer has no timestamp pSample->SetTime(NULL,NULL); pSample->SetSyncPoint(FALSE); } //copy buffer in sample BYTE* pSampleBuffer; pSample->SetActualDataLength(buffer->Length()); pSample->GetPointer(&pSampleBuffer); memcpy(pSampleBuffer,buffer->Data(),buffer->Length()); //delete the buffer and return delete buffer; demux.EraseAudioBuff(); } else { // Buffer was not displayed because it was out of date, search for next. delete buffer; demux.EraseAudioBuff(); buffer=NULL ; m_FillBuffSleepTime = (m_dRateSeeking == 1.0) ? 1 : 2; m_bDiscontinuity = TRUE; //Next good sample will be discontinuous } } earlyStall = false; } while (buffer==NULL); m_bInFillBuffer = false; return NOERROR; } // Should we return something else than NOERROR when hitting an exception? catch(int e) { LogDebug("audPin:fillbuffer exception %d", e); } catch(...) { LogDebug("audPin:fillbuffer exception ..."); } m_FillBuffSleepTime = 5; CreateEmptySample(pSample); m_bDiscontinuity = TRUE; //Next good sample will be discontinuous m_bInFillBuffer = false; return NOERROR; }