qint64 MediaObject::remainingTime() const { if(currentTime() > totalTime()) return 0; return totalTime() - currentTime(); }
/** * Set the property of this metajoint to the value passed in * * @param property The property to set * @param value The value to set it to * @return True on success */ bool MetaJoint::set(PROPERTY property, double value){ double currVel; double newVia; switch (property){ case META_VALUE: position = value; break; case POSITION: case GOAL: controller->update(); if (value == interStep){ break; } currVel = (currStepCount != 0 && currParams.valid) ? (interpolateFourthOrder(currParams, currStepCount) - interpolateFourthOrder(currParams, currStepCount - 1)) : 0; if (!startParams.valid){ currStepCount = 0; totalStepCount = totalTime(interStep, value, currVel, interVel) * frequency; if (totalStepCount > 0) { startParams = initFourthOrder(interStep, currVel, (value + interStep)/2, (double)totalStepCount / 2, value, totalStepCount); currParams = startParams; } lastGoal = interStep; } else if (currStepCount != currParams.tv) { newVia = (startParams.ths + interpolateFourthOrder(currParams, currStepCount)); totalStepCount = currStepCount + (totalTime(newVia, value, currVel, interVel) * frequency); currParams = initFourthOrder( startParams.ths, currVel, newVia, currStepCount, value, totalStepCount ); } currGoal = value; break; case SPEED: case VELOCITY: if (value > 0) interVel = value; break; case READY: ready = (bool)value; break; case INTERPOLATION_STEP: currParams.valid = false; startParams.valid = false; totalStepCount = 0; currStepCount = 0; break; default: return false; } return true; }
// Dump out performance metrics over some time interval void LLPerfStats::dumpIntervalPerformanceStats() { // Ensure output file is OK openPerfStatsFile(); if ( mFrameStatsFile ) { LLSD stats = LLSD::emptyMap(); LLStatAccum::TimeScale scale; if ( getReportPerformanceInterval() == 0.f ) { scale = LLStatAccum::SCALE_PER_FRAME; } else if ( getReportPerformanceInterval() < 0.5f ) { scale = LLStatAccum::SCALE_100MS; } else { scale = LLStatAccum::SCALE_SECOND; } // Write LLSD into log stats["utc_time"] = (LLSD::String) LLError::utcTime(); stats["timestamp"] = U64_to_str((totalTime() / 1000) + (gUTCOffset * 1000)); // milliseconds since epoch stats["frame_number"] = (LLSD::Integer) LLFrameTimer::getFrameCount(); // Add process-specific frame info. addProcessFrameInfo(stats, scale); LLPerfBlock::addStatsToLLSDandReset( stats, scale ); mFrameStatsFile << LLSDNotationStreamer(stats) << std::endl; } }
void PlaybackWidget::slotTimeUpdaterTimeout() { if ( m_mediaObject->state() == Phonon::ErrorState ) { slotError(); return; } long int current = m_mediaObject->currentTime(); int hours = (int)(current / (long int)( 60 * 60 * 1000 )); int mins = (int)((current / (long int)( 60 * 1000 )) - (long int)(hours * 60)); int secs = (int)((current / (long int)1000) - (long int)(hours * 60 + mins * 60)); QTime elapsedTime(hours, mins, secs); if ( m_isZeroTime ) { m_isZeroTime = false; long int total = m_mediaObject->totalTime(); hours = (int)(total / (long int)( 60 * 60 * 1000 )); mins = (int)((total / (long int)( 60 * 1000 )) - (long int)(hours * 60)); secs = (int)((total / (long int)1000) - (long int)(hours * 60 + mins * 60)); QTime totalTime(hours, mins, secs); m_totalTimeLabel->setText(totalTime.toString("H:mm:ss")); } m_elapsedTimeLabel->setText(elapsedTime.toString("H:mm:ss")); }
void widget::positionChanged(qint64 position) { ui->hSlider_SongProgress->setValue(position); QTime totalTime(0,(position / 60000) % 60,(position / 1000) % 60,position%1000); ui->label_Curtime->setText(tr("%1").arg(totalTime.toString("mm:ss"))); // showTime(position); }
void PresentationAudioWidget::slotTimeUpdaterTimeout() { if (d->mediaObject->error() != QMediaPlayer::NoError) { slotError(); return; } qint64 current = d->mediaObject->position(); int hours = (int)(current / (qint64)(60 * 60 * 1000)); int mins = (int)((current / (qint64)(60 * 1000)) - (qint64)(hours * 60)); int secs = (int)((current / (qint64)1000) - (qint64)(hours * 60 + mins * 60)); QTime elapsedTime(hours, mins, secs); if (d->isZeroTime && d->mediaObject->duration() > 0) { d->isZeroTime = false; qint64 total = d->mediaObject->duration(); hours = (int)(total / (qint64)(60 * 60 * 1000)); mins = (int)((total / (qint64)(60 * 1000)) - (qint64)(hours * 60)); secs = (int)((total / (qint64)1000) - (qint64)(hours * 60 + mins * 60)); QTime totalTime(hours, mins, secs); m_totalTimeLabel->setText(totalTime.toString(QString::fromLatin1("H:mm:ss"))); } m_elapsedTimeLabel->setText(elapsedTime.toString(QString::fromLatin1("H:mm:ss"))); }
qint64 MediaObject::totalTime() const { K_D(const MediaObject); if (!d->m_backendObject) { return -1; } return INTERFACE_CALL(totalTime()); }
// static void LLFrameTimer::updateFrameTime() { U64 total_time = totalTime(); sFrameDeltaTime = total_time - sTotalTime; sTotalTime = total_time; sTotalSeconds = U64_to_F64(sTotalTime) * USEC_TO_SEC_F64; sFrameTime = U64_to_F64(sTotalTime - sStartTotalTime) * USEC_TO_SEC_F64; }
// 更新timeLabel标签显示的播放时间 void MyWidget::updateTime(qint64 time) { qint64 totalTimeValue = mediaObject->totalTime(); QTime totalTime(0, (totalTimeValue / 60000) % 60, (totalTimeValue / 1000) % 60); QTime currentTime(0, (time / 60000) % 60, (time / 1000) % 60); QString str = currentTime.toString("mm:ss") + " / " + totalTime.toString("mm:ss"); timeLabel->setText(str); }
/// Print a report with current and total time etc. void SimulatorTimer::report(std::ostream& os) const { os << "\n\n--------------- Simulation step number " << currentStepNum() << " ---------------" << "\n Current time (days) " << Opm::unit::convert::to(simulationTimeElapsed(), Opm::unit::day) << "\n Current stepsize (days) " << Opm::unit::convert::to(currentStepLength(), Opm::unit::day) << "\n Total time (days) " << Opm::unit::convert::to(totalTime(), Opm::unit::day) << "\n" << std::endl; }
void PerfGraph::recursivelyPrintHeaviestGraph(PerfNode * current_node, FullTable & vtable, unsigned int current_depth) { mooseAssert(!_section_time_ptrs.empty(), "updateTiming() must be run before recursivelyPrintGraph!"); auto & name = _id_to_section_name[current_node->id()]; auto section = std::string(current_depth * 2, ' ') + name; // The total time of the root node auto total_root_time = _section_time_ptrs[0]->_total; auto num_calls = current_node->numCalls(); auto self = std::chrono::duration<double>(current_node->selfTime()).count(); auto self_avg = self / static_cast<Real>(num_calls); auto self_percent = 100. * self / total_root_time; auto children = std::chrono::duration<double>(current_node->childrenTime()).count(); auto children_avg = children / static_cast<Real>(num_calls); auto children_percent = 100. * children / total_root_time; auto total = std::chrono::duration<double>(current_node->totalTime()).count(); auto total_avg = total / static_cast<Real>(num_calls); auto total_percent = 100. * total / total_root_time; vtable.addRow(section, num_calls, self, self_avg, self_percent, children, children_avg, children_percent, total, total_avg, total_percent); current_depth++; if (!current_node->children().empty()) { PerfNode * heaviest_child = nullptr; for (auto & child_it : current_node->children()) { auto current_child = child_it.second.get(); if (!heaviest_child || (current_child->totalTime() > heaviest_child->totalTime())) heaviest_child = current_child; } recursivelyPrintHeaviestGraph(heaviest_child, vtable, current_depth); } }
void MediaObject::seekingFinished(MediaGraph *mg) { if (mg == currentGraph()) { updateTargetTick(); if (currentTime() < totalTime() - m_prefinishMark) { m_prefinishMarkSent = false; } if (currentTime() < totalTime() - PRELOAD_TIME + m_transitionTime) { m_aboutToFinishSent = false; } //this helps the update of the application (seekslider for example) if (m_state == PausedState || m_state == PlayingState) { emit tick(currentTime()); } } }
HttpOperation::HttpOperation() : LLCoreInt::RefCounted(true), mReplyQueue(NULL), mUserHandler(NULL), mReqPolicy(HttpRequest::DEFAULT_POLICY_ID), mReqPriority(0U), mTracing(HTTP_TRACE_OFF) { mMetricCreated = totalTime(); }
U64 LLStatAccum::getCurrentUsecs() const { if (mUseFrameTimer) { return LLFrameTimer::getTotalTime(); } else { return totalTime(); } }
void Player::updateDurationInfo(qint64 currentInfo) { QString tStr; if (currentInfo || duration) { QTime currentTime((currentInfo/3600)%60, (currentInfo/60)%60, currentInfo%60, (currentInfo*1000)%1000); QTime totalTime((duration/3600)%60, (duration/60)%60, duration%60, (duration*1000)%1000); QString format = "mm:ss"; if (duration > 3600) format = "hh:mm:ss"; tStr = currentTime.toString(format) + " / " + totalTime.toString(format); } labelDuration->setText(tStr); }
int main(){ std::string input; std::vector<std::string> history{}; std::chrono::microseconds totalTime(0); do { std::cout << "> "; std::getline(std::cin, input); } while(execute(input, history, totalTime)); return 0; }
void GrlMedia::updateDurationInfo(qint64 currentInfo) { lb_tiempo = "00:00 / 00:00"; if (currentInfo || m_duration) { QTime currentTime((currentInfo/3600)%60, (currentInfo/60)%60, currentInfo%60, (currentInfo*1000)%1000); QTime totalTime((m_duration/3600)%60, (m_duration/60)%60, m_duration%60, (m_duration*1000)%1000); QString format = "mm:ss"; if (m_duration > 3600) format = "hh:mm:ss"; lb_tiempo = currentTime.toString(format) + " / " + totalTime.toString(format); } emit timeChanged(lb_tiempo); }
void MediaObject::loadingFinished(MediaGraph *mg) { if (mg == currentGraph()) { #ifndef QT_NO_PHONON_MEDIACONTROLLER //Title interface m_currentTitle = 0; setTitles(currentGraph()->titles()); #endif //QT_NO_PHONON_MEDIACONTROLLER HRESULT hr = mg->renderResult(); if (catchComError(hr)) { return; } if (m_oldHasVideo != currentGraph()->hasVideo()) { emit hasVideoChanged(currentGraph()->hasVideo()); } #ifndef QT_NO_PHONON_VIDEO if (currentGraph()->hasVideo()) { updateVideoGeometry(); } #endif //QT_NO_PHONON_VIDEO emit metaDataChanged(currentGraph()->metadata()); emit totalTimeChanged(totalTime()); //let's put the next state switch(m_nextState) { case Phonon::PausedState: pause(); break; case Phonon::PlayingState: play(); break; case Phonon::ErrorState: setState(Phonon::ErrorState); break; case Phonon::StoppedState: default: stop(); break; } } }
LLReliablePacket::LLReliablePacket( S32 socket, U8* buf_ptr, S32 buf_len, LLReliablePacketParams* params) : mBuffer(NULL), mBufferLength(0) { if (params) { mHost = params->mHost; mRetries = params->mRetries; mPingBasedRetry = params->mPingBasedRetry; mTimeout = params->mTimeout; mCallback = params->mCallback; mCallbackData = params->mCallbackData; mMessageName = params->mMessageName; } else { mRetries = 0; mPingBasedRetry = TRUE; mTimeout = 0.f; mCallback = NULL; mCallbackData = NULL; mMessageName = NULL; } mExpirationTime = (F64)((S64)totalTime())/1000000.0 + mTimeout; mPacketID = ntohl(*((U32*)(&buf_ptr[PHL_PACKET_ID]))); mSocket = socket; if (mRetries) { mBuffer = new U8[buf_len]; if (mBuffer != NULL) { memcpy(mBuffer,buf_ptr,buf_len); /*Flawfinder: ignore*/ mBufferLength = buf_len; } } }
void MediaObject::switchToNextSource() { m_prefinishMarkSent = false; m_aboutToFinishSent = false; m_nextSourceReadyToStart = false; m_oldHasVideo = currentGraph()->hasVideo(); qSwap(m_graphs[0], m_graphs[1]); //swap the graphs if (m_transitionTime >= 0) m_graphs[1]->stop(); //make sure we stop the previous graph if (currentGraph()->mediaSource().type() != Phonon::MediaSource::Invalid && catchComError(currentGraph()->renderResult())) { setState(Phonon::ErrorState); return; } //we need to play the next media play(); //we tell the video widgets to switch now to the new source #ifndef QT_NO_PHONON_VIDEO for (int i = 0; i < m_videoWidgets.count(); ++i) { m_videoWidgets.at(i)->setCurrentGraph(currentGraph()->index()); } #endif //QT_NO_PHONON_VIDEO emit currentSourceChanged(currentGraph()->mediaSource()); emit metaDataChanged(currentGraph()->metadata()); if (nextGraph()->hasVideo() != currentGraph()->hasVideo()) { emit hasVideoChanged(currentGraph()->hasVideo()); } emit tick(0); emit totalTimeChanged(totalTime()); #ifndef QT_NO_PHONON_MEDIACONTROLLER setTitles(currentGraph()->titles()); #endif //QT_NO_PHONON_MEDIACONTROLLER }
void MediaObject::_iface_setCurrentTitle(int title, bool bseek) { #ifdef GRAPH_DEBUG qDebug() << "_iface_setCurrentTitle" << title; #endif const int oldTitle = m_currentTitle; m_currentTitle = title; updateStopPosition(); if (bseek) { //let's seek to the beginning of the song seek(0); } else { updateTargetTick(); } if (oldTitle != title) { emit titleChanged(title); emit totalTimeChanged(totalTime()); } }
void QueryCostInfo :: translateToExternalFormat(SQL_QUERY_COST_INFO *query_cost_info) { query_cost_info->cpuTime = cpuTime(); query_cost_info->ioTime = ioTime(); query_cost_info->msgTime = msgTime(); query_cost_info->idleTime = idleTime(); query_cost_info->totalTime = totalTime(); query_cost_info->cardinality = cardinality(); query_cost_info->estimatedTotalMem = totalMem(); query_cost_info->resourceUsage = resourceUsage(); query_cost_info->maxCpuUsage = maxCpuUsage(); }
void MediaObject::handleEvents(Graph graph, long eventCode, long param1) { QString eventDescription; switch (eventCode) { case EC_BUFFERING_DATA: if (graph == currentGraph()->graph()) { m_buffering = param1; emit stateChanged(state(), m_state); } break; case EC_LENGTH_CHANGED: if (graph == currentGraph()->graph()) { emit totalTimeChanged( totalTime() ); } break; case EC_COMPLETE: handleComplete(graph); break; #ifndef QT_NO_PHONON_VIDEO case EC_VIDEO_SIZE_CHANGED: if (graph == currentGraph()->graph()) { updateVideoGeometry(); } break; #endif //QT_NO_PHONON_VIDEO #ifdef GRAPH_DEBUG case EC_ACTIVATE: qDebug() << "EC_ACTIVATE: A video window is being " << (param1 ? "ACTIVATED" : "DEACTIVATED"); break; case EC_BUILT: qDebug() << "EC_BUILT: Send by the Video Control when a graph has been built. Not forwarded to applications."; break; case EC_CLOCK_CHANGED: qDebug() << "EC_CLOCK_CHANGED"; break; case EC_CLOCK_UNSET: qDebug() << "EC_CLOCK_UNSET: The clock provider was disconnected."; break; case EC_CODECAPI_EVENT: qDebug() << "EC_CODECAPI_EVENT: Sent by an encoder to signal an encoding event."; break; case EC_DEVICE_LOST: qDebug() << "EC_DEVICE_LOST: A Plug and Play device was removed or has become available again."; break; case EC_DISPLAY_CHANGED: qDebug() << "EC_DISPLAY_CHANGED: The display mode has changed."; break; case EC_END_OF_SEGMENT: qDebug() << "EC_END_OF_SEGMENT: The end of a segment has been reached."; break; case EC_ERROR_STILLPLAYING: qDebug() << "EC_ERROR_STILLPLAYING: An asynchronous command to run the graph has failed."; break; case EC_ERRORABORT: qDebug() << "EC_ERRORABORT: An operation was aborted because of an error."; break; case EC_EXTDEVICE_MODE_CHANGE: qDebug() << "EC_EXTDEVICE_MODE_CHANGE: Not supported."; break; case EC_FULLSCREEN_LOST: qDebug() << "EC_FULLSCREEN_LOST: The video renderer is switching out of full-screen mode."; break; case EC_GRAPH_CHANGED: qDebug() << "EC_GRAPH_CHANGED: The filter graph has changed."; break; case EC_NEED_RESTART: qDebug() << "EC_NEED_RESTART: A filter is requesting that the graph be restarted."; break; case EC_NOTIFY_WINDOW: qDebug() << "EC_NOTIFY_WINDOW: Notifies a filter of the video renderer's window."; break; case EC_OLE_EVENT: qDebug() << "EC_OLE_EVENT: A filter is passing a text string to the application."; break; case EC_OPENING_FILE: qDebug() << "EC_OPENING_FILE: The graph is opening a file, or has finished opening a file."; break; case EC_PALETTE_CHANGED: qDebug() << "EC_PALETTE_CHANGED: The video palette has changed."; break; case EC_PAUSED: qDebug() << "EC_PAUSED: A pause request has completed."; break; case EC_PREPROCESS_COMPLETE: qDebug() << "EC_PREPROCESS_COMPLETE: Sent by the WM ASF Writer filter when it completes the pre-processing for multipass encoding."; break; case EC_QUALITY_CHANGE: qDebug() << "EC_QUALITY_CHANGE: The graph is dropping samples, for quality control."; break; case EC_REPAINT: qDebug() << "EC_REPAINT: A video renderer requires a repaint."; break; case EC_SEGMENT_STARTED: qDebug() << "EC_SEGMENT_STARTED: A new segment has started."; break; case EC_SHUTTING_DOWN: qDebug() << "EC_SHUTTING_DOWN: The filter graph is shutting down, prior to being destroyed."; break; case EC_SNDDEV_IN_ERROR: qDebug() << "EC_SNDDEV_IN_ERROR: A device error has occurred in an audio capture filter."; break; case EC_SNDDEV_OUT_ERROR: qDebug() << "EC_SNDDEV_OUT_ERROR: A device error has occurred in an audio renderer filter."; break; case EC_STARVATION: qDebug() << "EC_STARVATION: A filter is not receiving enough data."; break; case EC_STATE_CHANGE: qDebug() << "EC_STATE_CHANGE: The filter graph has changed state."; break; case EC_STEP_COMPLETE: qDebug() << "EC_STEP_COMPLETE: A filter performing frame stepping has stepped the specified number of frames."; break; case EC_STREAM_CONTROL_STARTED: qDebug() << "EC_STREAM_CONTROL_STARTED: A stream-control start command has taken effect."; break; case EC_STREAM_CONTROL_STOPPED: qDebug() << "EC_STREAM_CONTROL_STOPPED: A stream-control stop command has taken effect."; break; case EC_STREAM_ERROR_STILLPLAYING: qDebug() << "EC_STREAM_ERROR_STILLPLAYING: An error has occurred in a stream. The stream is still playing."; break; case EC_STREAM_ERROR_STOPPED: qDebug() << "EC_STREAM_ERROR_STOPPED: A stream has stopped because of an error."; break; case EC_TIMECODE_AVAILABLE: qDebug() << "EC_TIMECODE_AVAILABLE: Not supported."; break; case EC_UNBUILT: qDebug() << "Sent by the Video Control when a graph has been torn down. Not forwarded to applications."; break; case EC_USERABORT: qDebug() << "EC_USERABORT: Send by the Video Control when a graph has been torn down. Not forwarded to applications."; break; case EC_VMR_RECONNECTION_FAILED: qDebug() << "EC_VMR_RECONNECTION_FAILED: Sent by the VMR-7 and the VMR-9 when it was unable to accept a dynamic format change request from the upstream decoder."; break; case EC_VMR_RENDERDEVICE_SET: qDebug() << "EC_VMR_RENDERDEVICE_SET: Sent when the VMR has selected its rendering mechanism."; break; case EC_VMR_SURFACE_FLIPPED: qDebug() << "EC_VMR_SURFACE_FLIPPED: Sent when the VMR-7's allocator presenter has called the DirectDraw Flip method on the surface being presented."; break; case EC_WINDOW_DESTROYED: qDebug() << "EC_WINDOW_DESTROYED: The video renderer was destroyed or removed from the graph"; break; case EC_WMT_EVENT: qDebug() << "EC_WMT_EVENT: Sent by the Windows Media Format SDK when an application uses the ASF Reader filter to play ASF files protected by digital rights management (DRM)."; break; case EC_WMT_INDEX_EVENT: qDebug() << "EC_WMT_INDEX_EVENT: Sent by the Windows Media Format SDK when an application uses the ASF Writer to index Windows Media Video files."; break; //documented by Microsoft but not supported in the Platform SDK // case EC_BANDWIDTHCHANGE : qDebug() << "EC_BANDWIDTHCHANGE: not supported"; break; // case EC_CONTENTPROPERTY_CHANGED: qDebug() << "EC_CONTENTPROPERTY_CHANGED: not supported."; break; // case EC_EOS_SOON: qDebug() << "EC_EOS_SOON: not supported"; break; // case EC_ERRORABORTEX: qDebug() << "EC_ERRORABORTEX: An operation was aborted because of an error."; break; // case EC_FILE_CLOSED: qDebug() << "EC_FILE_CLOSED: The source file was closed because of an unexpected event."; break; // case EC_LOADSTATUS: qDebug() << "EC_LOADSTATUS: Notifies the application of progress when opening a network file."; break; // case EC_MARKER_HIT: qDebug() << "EC_MARKER_HIT: not supported."; break; // case EC_NEW_PIN: qDebug() << "EC_NEW_PIN: not supported."; break; // case EC_PLEASE_REOPEN: qDebug() << "EC_PLEASE_REOPEN: The source file has changed."; break; // case EC_PROCESSING_LATENCY: qDebug() << "EC_PROCESSING_LATENCY: Indicates the amount of time that a component is taking to process each sample."; break; // case EC_RENDER_FINISHED: qDebug() << "EC_RENDER_FINISHED: Not supported."; break; // case EC_SAMPLE_LATENCY: qDebug() << "EC_SAMPLE_LATENCY: Specifies how far behind schedule a component is for processing samples."; break; // case EC_SAMPLE_NEEDED: qDebug() << "EC_SAMPLE_NEEDED: Requests a new input sample from the Enhanced Video Renderer (EVR) filter."; break; // case EC_SCRUB_TIME: qDebug() << "EC_SCRUB_TIME: Specifies the time stamp for the most recent frame step."; break; // case EC_STATUS: qDebug() << "EC_STATUS: Contains two arbitrary status strings."; break; // case EC_VIDEOFRAMEREADY: qDebug() << "EC_VIDEOFRAMEREADY: A video frame is ready for display."; break; default: qDebug() << "Unknown event" << eventCode << "(" << param1 << ")"; break; #else default: break; #endif } }
qint64 MediaObject::remainingTime() const { return totalTime() - currentTime(); }
* * ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO * WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY, * COMPLETENESS OR PERFORMANCE. * $/LicenseInfo$ */ #include "linden_common.h" #include "u64.h" #include "llframetimer.h" // Static members //LLTimer LLFrameTimer::sInternalTimer; U64 LLFrameTimer::sStartTotalTime = totalTime(); F64 LLFrameTimer::sFrameTime = 0.0; U64 LLFrameTimer::sTotalTime = 0; F64 LLFrameTimer::sTotalSeconds = 0.0; S32 LLFrameTimer::sFrameCount = 0; U64 LLFrameTimer::sFrameDeltaTime = 0; const F64 USEC_PER_SECOND = 1000000.0; const F64 USEC_TO_SEC_F64 = 0.000001; // static void LLFrameTimer::updateFrameTime() { U64 total_time = totalTime(); sFrameDeltaTime = total_time - sTotalTime; sTotalTime = total_time; sTotalSeconds = U64_to_F64(sTotalTime) * USEC_TO_SEC_F64;
//utility function to save the graph to a file void MediaObject::timerEvent(QTimerEvent *e) { if (e->timerId() == m_tickTimer.timerId()) { const qint64 current = currentTime(); const qint64 total = totalTime(); if ( m_tickInterval != 0 && current > m_targetTick) { updateTargetTick(); emit tick(current); } //check that the title hasn't changed #ifndef QT_NO_PHONON_MEDIACONTROLLER if (m_autoplayTitles && m_currentTitle < _iface_availableTitles() - 1) { if (current >= total) { //we go to the next title _iface_setCurrentTitle(m_currentTitle + 1, false); emit tick(current); } return; } #endif //QT_NO_PHONON_MEDIACONTROLLER if (total) { const qint64 remaining = total - current; if (m_transitionTime < 0 && m_nextSourceReadyToStart) { if (remaining < -m_transitionTime + TIMER_INTERVAL/2) { //we need to switch graphs to run the next source in the queue (with cross-fading) switchToNextSource(); return; } else if (current < -m_transitionTime) { //we are currently crossfading for (int i = 0; i < m_audioOutputs.count(); ++i) { m_audioOutputs.at(i)->setCrossFadingProgress( currentGraph()->index(), qMin( qreal(1.), qreal(current) / qreal(-m_transitionTime))); } } } if (m_prefinishMark > 0 && !m_prefinishMarkSent && remaining < m_prefinishMark + TIMER_INTERVAL/2) { #ifdef GRAPH_DEBUG qDebug() << "DS9: emit prefinishMarkReached" << remaining << QTime::currentTime().toString(); #endif m_prefinishMarkSent = true; emit prefinishMarkReached( remaining ); } if (!m_aboutToFinishSent && remaining < PRELOAD_TIME - m_transitionTime + TIMER_INTERVAL/2) { //let's take a 2 seconds time time to actually load the next file #ifdef GRAPH_DEBUG qDebug() << "DS9: emit aboutToFinish" << remaining << QTime::currentTime().toString(); #endif m_aboutToFinishSent = true; emit aboutToFinish(); } } else { //total is 0: the stream is probably live (endless) } if (m_buffering) { ComPointer<IAMNetworkStatus> status(currentGraph()->realSource(), IID_IAMNetworkStatus); if (status) { long l; status->get_BufferingProgress(&l); emit bufferStatus(l); #ifdef GRAPH_DEBUG qDebug() << "emit bufferStatus(" << l << ")"; #endif } } } }
// static // Return seconds since the current frame started F32 LLFrameTimer::getCurrentFrameTime() { U64 frame_time = totalTime() - sTotalTime; return (F32)(U64_to_F64(frame_time) * USEC_TO_SEC_F64); }
XML_Timer( U64 * sum ) : mSum( sum ) { mStart = totalTime(); }
void LLViewerObjectList::update(LLAgent &agent, LLWorld &world) { LLMemType mt(LLMemType::MTYPE_OBJECT); // Update globals gVelocityInterpolate = gSavedSettings.getBOOL("VelocityInterpolate"); gPingInterpolate = gSavedSettings.getBOOL("PingInterpolate"); gAnimateTextures = gSavedSettings.getBOOL("AnimateTextures"); // update global timer F32 last_time = gFrameTimeSeconds; U64 time = totalTime(); // this will become the new gFrameTime when the update is done // Time _can_ go backwards, for example if the user changes the system clock. // It doesn't cause any fatal problems (just some oddness with stats), so we shouldn't assert here. // llassert(time > gFrameTime); F64 time_diff = U64_to_F64(time - gFrameTime)/(F64)SEC_TO_MICROSEC; gFrameTime = time; F64 time_since_start = U64_to_F64(gFrameTime - gStartTime)/(F64)SEC_TO_MICROSEC; gFrameTimeSeconds = (F32)time_since_start; gFrameIntervalSeconds = gFrameTimeSeconds - last_time; if (gFrameIntervalSeconds < 0.f) { gFrameIntervalSeconds = 0.f; } //clear avatar LOD change counter LLVOAvatar::sNumLODChangesThisFrame = 0; const F64 frame_time = LLFrameTimer::getElapsedSeconds(); std::vector<LLViewerObject*> kill_list; S32 num_active_objects = 0; LLViewerObject *objectp = NULL; // Make a copy of the list in case something in idleUpdate() messes with it std::vector<LLViewerObject*> idle_list; idle_list.reserve( mActiveObjects.size() ); for (std::set<LLPointer<LLViewerObject> >::iterator active_iter = mActiveObjects.begin(); active_iter != mActiveObjects.end(); active_iter++) { objectp = *active_iter; if (objectp) { idle_list.push_back( objectp ); } else { // There shouldn't be any NULL pointers in the list, but they have caused // crashes before. This may be idleUpdate() messing with the list. llwarns << "LLViewerObjectList::update has a NULL objectp" << llendl; } } if (gSavedSettings.getBOOL("FreezeTime")) { for (std::vector<LLViewerObject*>::iterator iter = idle_list.begin(); iter != idle_list.end(); iter++) { objectp = *iter; if (objectp->getPCode() == LLViewerObject::LL_VO_CLOUDS || objectp->isAvatar()) { objectp->idleUpdate(agent, world, frame_time); } } } else { for (std::vector<LLViewerObject*>::iterator idle_iter = idle_list.begin(); idle_iter != idle_list.end(); idle_iter++) { objectp = *idle_iter; if (!objectp->idleUpdate(agent, world, frame_time)) { // If Idle Update returns false, kill object! kill_list.push_back(objectp); } else { num_active_objects++; } } for (std::vector<LLViewerObject*>::iterator kill_iter = kill_list.begin(); kill_iter != kill_list.end(); kill_iter++) { objectp = *kill_iter; killObject(objectp); } } mNumSizeCulled = 0; mNumVisCulled = 0; // compute all sorts of time-based stats // don't factor frames that were paused into the stats if (! mWasPaused) { gViewerStats->updateFrameStats(time_diff); } /* // Debugging code for viewing orphans, and orphaned parents LLUUID id; char id_str[UUID_STR_LENGTH + 20]; for (i = 0; i < mOrphanParents.count(); i++) { id = sIndexAndLocalIDToUUID[mOrphanParents[i]]; LLViewerObject *objectp = findObject(id); if (objectp) { sprintf(id_str, "Par: "); objectp->mID.toString(id_str + 5); addDebugBeacon(objectp->getPositionAgent(), id_str, LLColor4(1.f,0.f,0.f,1.f), LLColor4(1.f,1.f,1.f,1.f)); } } LLColor4 text_color; for (i = 0; i < mOrphanChildren.count(); i++) { OrphanInfo oi = mOrphanChildren[i]; LLViewerObject *objectp = findObject(oi.mChildInfo); if (objectp) { if (objectp->getParent()) { sprintf(id_str, "ChP: "); text_color = LLColor4(0.f, 1.f, 0.f, 1.f); } else { sprintf(id_str, "ChNoP: "); text_color = LLColor4(1.f, 0.f, 0.f, 1.f); } id = sIndexAndLocalIDToUUID[oi.mParentInfo]; objectp->mID.toString(id_str + 8); addDebugBeacon(objectp->getPositionAgent() + LLVector3(0.f, 0.f, -0.25f), id_str, LLColor4(0.25f,0.25f,0.25f,1.f), text_color); } i++; } */ mNumObjectsStat.addValue(mObjects.count()); mNumActiveObjectsStat.addValue(num_active_objects); mNumSizeCulledStat.addValue(mNumSizeCulled); mNumVisCulledStat.addValue(mNumVisCulled); }
~XML_Timer() { *mSum += (totalTime() - mStart); }