static void uploadTextures(QOpenGLContext* context, SharedFrame& frame, GLuint texture[]) { int width = frame.get_image_width(); int height = frame.get_image_height(); const uint8_t* image = frame.get_image(); QOpenGLFunctions* f = context->functions(); // Upload each plane of YUV to a texture. if (texture[0] && texture[1] && texture[2]) f->glDeleteTextures(3, texture); check_error(f); f->glGenTextures(3, texture); check_error(f); f->glPixelStorei(GL_UNPACK_ROW_LENGTH, width); f->glBindTexture (GL_TEXTURE_2D, texture[0]); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); check_error(f); f->glTexImage2D (GL_TEXTURE_2D, 0, GL_RED, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, image); check_error(f); f->glBindTexture (GL_TEXTURE_2D, texture[1]); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); check_error(f); int y = context->isOpenGLES() ? 2 : 4; f->glTexImage2D (GL_TEXTURE_2D, 0, GL_RED, width/2, height/y, 0, GL_RED, GL_UNSIGNED_BYTE, image + width * height); check_error(f); f->glBindTexture (GL_TEXTURE_2D, texture[2]); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); check_error(f); f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); check_error(f); f->glTexImage2D (GL_TEXTURE_2D, 0, GL_RED, width/2, height/y, 0, GL_RED, GL_UNSIGNED_BYTE, image + width * height + width/2 * height/2); check_error(f); }
void AudioGraphSpectrum::processSpectrum(const SharedFrame&frame) { if (!isVisible()) return; mlt_audio_format format = mlt_audio_s16; int channels = frame.get_audio_channels(); int frequency = frame.get_audio_frequency(); int samples = frame.get_audio_samples(); Mlt::Frame mFrame = frame.clone(true, false, false); m_filter->process(mFrame); mFrame.get_audio( format, frequency, channels, samples ); QVector<double> bands(AUDIBLE_BAND_COUNT); float* bins = (float*)m_filter->get_data("bins"); int bin_count = m_filter->get_int("bin_count"); double bin_width = m_filter->get_double("bin_width"); int band = 0; bool firstBandFound = false; for (int bin = 0; bin < bin_count; bin++) { // Loop through all the FFT bins and align bin frequencies with // band frequencies. double F = bin_width * (double)bin; if (!firstBandFound) { // Skip bins that come before the first band. if (BAND_TAB[band + FIRST_AUDIBLE_BAND_INDEX].low > F) { continue; } else { firstBandFound = true; bands[band] = bins[bin]; } } else if (BAND_TAB[band + FIRST_AUDIBLE_BAND_INDEX].high < F) { // This bin is outside of this band - move to the next band. band++; if ((band + FIRST_AUDIBLE_BAND_INDEX) > LAST_AUDIBLE_BAND_INDEX) { // Skip bins that come after the last band. break; } bands[band] = bins[bin]; } else if (bands[band] < bins[bin] ) { // Pick the highest bin level within this band to represent the // whole band. bands[band] = bins[bin]; } } // At this point, bands contains the magnitude of the signal for each // band. Convert to dB. for (band = 0; band < bands.size(); band++) { double mag = bands[band]; double dB = mag > 0.0 ? 20 * log10( mag ) : -1000.0; bands[band] = dB; } // Update the audio signal widget QMetaObject::invokeMethod(m_graphWidget, "showAudio", Qt::QueuedConnection, Q_ARG(const QVector<double>&, bands)); }
void TimelineDock::onShowFrame(const SharedFrame& frame) { if (MLT.isMultitrack()) { m_position = frame.get_position(); emit positionChanged(); } }
void AudioPeakMeterScopeWidget::refreshScope(const QSize& /*size*/, bool /*full*/) { SharedFrame sFrame; while (m_queue.count() > 0) { sFrame = m_queue.pop(); if (sFrame.is_valid() && sFrame.get_audio_samples() > 0) { mlt_audio_format format = mlt_audio_s16; int channels = sFrame.get_audio_channels(); int frequency = sFrame.get_audio_frequency(); int samples = sFrame.get_audio_samples(); Mlt::Frame mFrame = sFrame.clone(true, false, false); m_filter->process(mFrame); mFrame.get_audio( format, frequency, channels, samples ); QVector<double> levels; for (int i = 0; i < channels; i++) { QString s = QString("meta.media.audio_level.%1").arg(i); double audioLevel = mFrame.get_double(s.toLatin1().constData()); if (audioLevel == 0.0) { levels << -100.0; } else { levels << 20 * log10(audioLevel); } } QMetaObject::invokeMethod(m_audioMeter, "showAudio", Qt::QueuedConnection, Q_ARG(const QVector<double>&, levels)); } }
void MonitorAudioLevel::refreshScope(const QSize& /*size*/, bool /*full*/) { SharedFrame sFrame; while (m_queue.count() > 0) { sFrame = m_queue.pop(); if (sFrame.is_valid() && sFrame.get_audio_samples() > 0) { mlt_audio_format format = mlt_audio_s16; int channels = sFrame.get_audio_channels(); int frequency = sFrame.get_audio_frequency(); int samples = sFrame.get_audio_samples(); Mlt::Frame mFrame = sFrame.clone(true, false, false); m_filter->process(mFrame); mFrame.get_audio( format, frequency, channels, samples ); if (samples == 0) { // There was an error processing audio from frame continue; } QVector<int> levels; for (int i = 0; i < audioChannels; i++) { QString s = QString("meta.media.audio_level.%1").arg(i); double audioLevel = mFrame.get_double(s.toLatin1().constData()); if (audioLevel == 0.0) { levels << -100; } else { levels << (int) levelToDB(audioLevel); } } QMetaObject::invokeMethod(this, "setAudioValues", Qt::QueuedConnection, Q_ARG(const QVector<int>&, levels)); } }
void TimelineDock::onShowFrame(const SharedFrame& frame) { if (m_ignoreNextPositionChange) { m_ignoreNextPositionChange = false; } else if (MLT.isMultitrack()) { m_position = frame.get_position(); emit positionChanged(); } }
void Player::onFrameDisplayed(const SharedFrame& frame) { int position = frame.get_position(); if (position < m_duration) { m_position = position; m_positionSpinner->blockSignals(true); m_positionSpinner->setValue(position); m_positionSpinner->blockSignals(false); m_scrubber->onSeek(position); } if (position >= m_duration) emit endOfStream(); }