/*------------------------------------------------------------------------------ | OMX_VideoProcessor::cleanup +-----------------------------------------------------------------------------*/ void OMX_MediaProcessor::cleanup() { LOG_INFORMATION(LOG_TAG, "Cleaning up..."); #if 0 if (m_refresh) { m_BcmHost.vc_tv_hdmi_power_on_best( tv_state.width, tv_state.height, tv_state.frame_rate, HDMI_NONINTERLACED, (EDID_MODE_MATCH_FLAG_T)(HDMI_MODE_MATCH_FRAMERATE| HDMI_MODE_MATCH_RESOLUTION|HDMI_MODE_MATCH_SCANMODE) ); } #endif LOG_VERBOSE(LOG_TAG, "Closing players..."); #ifdef ENABLE_SUBTITLES m_player_subtitles->Close(); #endif m_player_video->Close(); m_player_audio->Close(); if (m_omx_pkt) { m_omx_reader->FreePacket(m_omx_pkt); m_omx_pkt = NULL; } LOG_VERBOSE(LOG_TAG, "Closing players..."); m_omx_reader->Close(); m_metadata.clear(); emit metadataChanged(m_metadata); vc_tv_show_info(0); // lcarlon: free the texture. Invoke freeTexture so that it is the user // of the class to do it cause it is commonly required to do it in the // current OpenGL and EGL context. Do it here, after the stop command is // considered finished: this is needed to avoid hardlock in case the // used wants to free the texture in his own thread, which would still // be blocked waiting for the stop command to finish. LOG_VERBOSE(LOG_TAG, "Freeing texture..."); m_provider->freeTexture(m_textureData); m_textureData = NULL; emit textureInvalidated(); LOG_INFORMATION(LOG_TAG, "Cleanup done."); }
/*---------------------------------------------------------------------- | handlerSigterm +---------------------------------------------------------------------*/ void handlerSigint(int sig) { Q_UNUSED(sig); LOG_INFORMATION(LOG_TAG, "Terminating..."); qApp->quit(); }
/*------------------------------------------------------------------------------ | OMX_MediaProcessor::pause +-----------------------------------------------------------------------------*/ bool OMX_MediaProcessor::pause() { LOG_VERBOSE(LOG_TAG, "Pause"); QMutexLocker locker(&m_sendCmd); if (!checkCurrentThread()) return false; switch (m_state) { case STATE_INACTIVE: case STATE_STOPPED: return false; case STATE_PAUSED: case STATE_PLAYING: break; default: return false; } m_state = STATE_PAUSED; setSpeed(OMX_PLAYSPEED_PAUSE); m_av_clock->OMXPause(); // Wait for command completion. m_mutexPending.lock(); if (m_pendingPause) { LOG_VERBOSE(LOG_TAG, "Waiting for the pause command to finish."); m_waitPendingCommand.wait(&m_mutexPending); } m_mutexPending.unlock(); LOG_INFORMATION(LOG_TAG, "Pause command issued."); return true; }
/*------------------------------------------------------------------------------ | OMX_MediaProcessor::stop +-----------------------------------------------------------------------------*/ bool OMX_MediaProcessor::stop() { LOG_VERBOSE(LOG_TAG, "Stop"); QMutexLocker locker(&m_sendCmd); if (!checkCurrentThread()) return false; switch (m_state) { case STATE_INACTIVE: return false; case STATE_PAUSED: case STATE_PLAYING: case STATE_STOPPED: break; m_state = STATE_STOPPED; return true; default: return false; } m_pendingStop = true; m_state = STATE_STOPPED; // Wait for command completion. m_mutexPending.lock(); if (m_pendingStop) { LOG_VERBOSE(LOG_TAG, "Waiting for the stop command to finish."); m_waitPendingCommand.wait(&m_mutexPending); } m_mutexPending.unlock(); LOG_INFORMATION(LOG_TAG, "Stop command issued."); return true; }
/*------------------------------------------------------------------------------ | OMX_MediaProcessor::play +-----------------------------------------------------------------------------*/ bool OMX_MediaProcessor::play() { // I need to invoke this in another thread (this object is owned by another // thread). LOG_VERBOSE(LOG_TAG, "Play"); QMutexLocker locker(&m_sendCmd); if (!checkCurrentThread()) return false; switch (m_state) { case STATE_INACTIVE: return true; case STATE_PAUSED: break; case STATE_PLAYING: return true; case STATE_STOPPED: { setState(STATE_PLAYING); #if 1 if (!m_omx_reader->SeekTime(0, true, &startpts)) { LOG_WARNING(LOG_TAG, "Failed to seek to the beginning."); return false; } flushStreams(startpts); #endif //m_av_clock->OMXStart(0.0); m_av_clock->OMXPause(); m_av_clock->OMXStateExecute(); m_av_clock->OMXResume(); //m_av_clock->OMXMediaTime(0.0D); LOG_VERBOSE(LOG_TAG, "Starting thread."); return QMetaObject::invokeMethod(this, "mediaDecoding"); } default: return false; } setState(STATE_PLAYING); if (m_av_clock->OMXPlaySpeed() != DVD_PLAYSPEED_NORMAL && m_av_clock->OMXPlaySpeed() != DVD_PLAYSPEED_PAUSE) { LOG_VERBOSE(LOG_TAG, "resume\n"); m_playspeedCurrent = playspeed_normal; setSpeed(playspeeds[m_playspeedCurrent]); m_seekFlush = true; } setSpeed(playspeeds[m_playspeedCurrent]); m_av_clock->OMXResume(); #ifdef ENABLE_SUBTITLES if (m_has_subtitle) m_player_subtitles.Resume(); #endif LOG_INFORMATION(LOG_TAG, "Play command issued."); return true; }
void GLWidget::makeObject() { static const int coords[6][4][3] = { { { +1, -1, -1 }, { -1, -1, -1 }, { -1, +1, -1 }, { +1, +1, -1 } }, { { +1, +1, -1 }, { -1, +1, -1 }, { -1, +1, +1 }, { +1, +1, +1 } }, { { +1, -1, +1 }, { +1, -1, -1 }, { +1, +1, -1 }, { +1, +1, +1 } }, { { -1, -1, -1 }, { -1, -1, +1 }, { -1, +1, +1 }, { -1, +1, -1 } }, { { +1, -1, +1 }, { -1, -1, +1 }, { -1, -1, -1 }, { +1, -1, -1 } }, { { -1, -1, +1 }, { +1, -1, +1 }, { +1, +1, +1 }, { -1, +1, +1 } } }; QElapsedTimer timer; timer.start(); #ifndef DISABLED_OPENMAX //loadWithOmx(); QPlatformNativeInterface* nativeInterface = QGuiApplicationPrivate::platformIntegration()->nativeInterface(); Q_ASSERT(nativeInterface); EGLDisplay eglDisplay = nativeInterface->nativeResourceForIntegration("egldisplay"); EGLContext eglContext = nativeInterface->nativeResourceForContext("eglcontext", QOpenGLContext::currentContext()); #if 0 eglImageVideo = getEGLImage(1920, 1080, eglDisplay, eglContext, textures[0]); #endif for (int i = 0; i < 5; i++) textures[i] = 0; //QtConcurrent::run(video_decode_test, videoPath, eglImageVideo, eglDisplay); m_videoProc = new OMX_VideoProcessor(eglDisplay, eglContext, m_provider); connect(m_videoProc, SIGNAL(textureReady(uint)), this, SLOT(onTextureChanged(uint))); m_videoProc->setVideoPath("/home/pi/out.h264"); m_videoProc->play(); #else for (int i = 0; i < 6; i++) { QPixmap pixmap(QString("%1%2.jpg").arg(prefix).arg(i)); if (pixmap.isNull()) LOG_ERROR(LOG_TAG, "Failed to load image!"); textures[i] = bindTexture(pixmap, GL_TEXTURE_2D, GL_RGBA); } #endif LOG_INFORMATION(LOG_TAG, "Elapsed: %lld.", timer.elapsed()); for (int i = 0; i < 6; ++i) { for (int j = 0; j < 4; ++j) { texCoords.append (QVector2D(j == 0 || j == 3, j == 0 || j == 1)); vertices.append (QVector3D(0.2 * coords[i][j][0], 0.2 * coords[i][j][1], 0.2 * coords[i][j][2])); } } }
void GLWidget::loadWithOmx() { LOG_VERBOSE(LOG_TAG, "Loading with OMX."); QPlatformNativeInterface* nativeInterface = QGuiApplicationPrivate::platformIntegration()->nativeInterface(); Q_ASSERT(nativeInterface); EGLDisplay eglDisplay = nativeInterface->nativeResourceForIntegration("egldisplay"); EGLContext eglContext = nativeInterface->nativeResourceForContext("eglcontext", QOpenGLContext::currentContext()); for (int i = 5; i < 6; i++) { QString fileAbsPath = QString("%1%2.jpg").arg(prefix).arg(i); OpenMAXILTextureLoader* omTextureLoader = OpenMAXILTextureLoader::intance(); if (!omTextureLoader->loadTextureFromImage(fileAbsPath, eglDisplay, eglContext, textures[i])) { LOG_ERROR(LOG_TAG, "Failed to load image."); } else { LOG_INFORMATION(LOG_TAG, "Image %s successfully decoded and loaded.", qPrintable(fileAbsPath)); } } }
/*------------------------------------------------------------------------------ | OMX_MediaProcessor::pause +-----------------------------------------------------------------------------*/ bool OMX_MediaProcessor::pause() { LOG_VERBOSE(LOG_TAG, "Pause"); QMutexLocker locker(&m_sendCmd); if (!checkCurrentThread()) return false; switch (m_state) { case STATE_INACTIVE: case STATE_STOPPED: return true; case STATE_PAUSED: return true; case STATE_PLAYING: break; default: return false; } #ifdef ENABLE_SUBTITLES if (m_has_subtitle) m_player_subtitles.Pause(); #endif setState(STATE_PAUSED); setSpeed(playspeeds[m_playspeedCurrent]); m_av_clock->OMXPause(); // Wait for command completion. m_mutexPending.lock(); if (m_pendingPause) { LOG_VERBOSE(LOG_TAG, "Waiting for the pause command to finish."); m_waitPendingCommand.wait(&m_mutexPending); } m_mutexPending.unlock(); LOG_INFORMATION(LOG_TAG, "Pause command issued."); return true; }
/*---------------------------------------------------------------------- | main +---------------------------------------------------------------------*/ int main(int argc, char** argv) { QApplication a(argc, argv); QStringList arguments = a.arguments(); if (arguments.size() < 2) { LOG_ERROR(LOG_TAG, "Wrong syntax."); return -1; } if (!QFile(arguments.at(1)).exists()) { LOG_ERROR(LOG_TAG, "Input file does not exist."); return -1; } // Init codecs. av_register_all(); // Open input file. if (avformat_open_input(&fmt_ctx, argv[1], NULL, NULL) < 0) { LOG_ERROR(LOG_TAG, "Could not open source file %s.", argv[1]); return -1; } // Retrieve stream information. if (avformat_find_stream_info(fmt_ctx, NULL) < 0) { LOG_ERROR(LOG_TAG, "Could not find stream information."); return -1; } if (open_codec_context(&video_stream_idx, fmt_ctx, AVMEDIA_TYPE_VIDEO) >= 0) { video_stream = fmt_ctx->streams[video_stream_idx]; video_dec_ctx = video_stream->codec; #if 0 video_dst_file = fopen(video_dst_filename, "wb"); if (!video_dst_file) { fprintf(stderr, "Could not open destination file %s\n", video_dst_filename); ret = 1; goto end; } /* allocate image where the decoded image will be put */ ret = av_image_alloc(video_dst_data, video_dst_linesize, video_dec_ctx->width, video_dec_ctx->height, video_dec_ctx->pix_fmt, 1); if (ret < 0) { fprintf(stderr, "Could not allocate raw video buffer\n"); goto end; } video_dst_bufsize = ret; #endif } if (open_codec_context(&audio_stream_idx, fmt_ctx, AVMEDIA_TYPE_AUDIO) >= 0) { int nb_planes; audio_stream = fmt_ctx->streams[audio_stream_idx]; audio_dec_ctx = audio_stream->codec; #if 0 audio_dst_file = fopen(audio_dst_filename, "wb"); if (!audio_dst_file) { fprintf(stderr, "Could not open destination file %s\n", video_dst_filename); ret = 1; goto end; } nb_planes = av_sample_fmt_is_planar(audio_dec_ctx->sample_fmt) ? audio_dec_ctx->channels : 1; audio_dst_data = av_mallocz(sizeof(uint8_t *) * nb_planes); if (!audio_dst_data) { fprintf(stderr, "Could not allocate audio data buffers\n"); ret = AVERROR(ENOMEM); goto end; } #endif } // Dump input information to stderr. av_dump_format(fmt_ctx, 0, argv[1], 0); // initialize packet, set data to NULL, let the demuxer fill it. av_init_packet(&pkt); pkt.data = NULL; pkt.size = 0; QFile outFile(argv[2]); if (!outFile.open(QIODevice::ReadWrite)) { LOG_ERROR(LOG_TAG, "Cannot create output file."); return -1; } while (av_read_frame(fmt_ctx, &pkt) >= 0) { if (pkt.stream_index == audio_stream_idx) { LOG_VERBOSE(LOG_TAG, "Frame read."); } else continue; outFile.write((const char*)pkt.data, pkt.size); } LOG_INFORMATION(LOG_TAG, "Data written successfully."); outFile.close(); // Cleanup. if (video_dec_ctx) avcodec_close(video_dec_ctx); if (audio_dec_ctx) avcodec_close(audio_dec_ctx); avformat_close_input(&fmt_ctx); return 0; }
/*------------------------------------------------------------------------------ | OMX_VideoProcessor::cleanup +-----------------------------------------------------------------------------*/ void OMX_MediaProcessor::cleanup() { LOG_INFORMATION(LOG_TAG, "Cleaning up..."); if (!m_pendingStop /* && !g_abort */) { LOG_VERBOSE(LOG_TAG, "Waiting for audio completion..."); if (m_has_audio) m_player_audio->WaitCompletion(); LOG_VERBOSE(LOG_TAG, "Waiting for video completion..."); if (m_has_video) m_player_video->WaitCompletion(); } #if 0 if (m_refresh) { m_BcmHost.vc_tv_hdmi_power_on_best( tv_state.width, tv_state.height, tv_state.frame_rate, HDMI_NONINTERLACED, (EDID_MODE_MATCH_FLAG_T)(HDMI_MODE_MATCH_FRAMERATE| HDMI_MODE_MATCH_RESOLUTION|HDMI_MODE_MATCH_SCANMODE) ); } #endif LOG_VERBOSE(LOG_TAG, "Stopping OMX clock..."); m_av_clock->OMXStop(); m_av_clock->OMXStateIdle(); LOG_VERBOSE(LOG_TAG, "Closing players..."); #ifdef ENABLE_SUBTITLES m_player_subtitles->Close(); #endif m_player_video->Close(); m_player_audio->Close(); if (m_omx_pkt) { m_omx_reader.FreePacket(m_omx_pkt); m_omx_pkt = NULL; } LOG_VERBOSE(LOG_TAG, "Closing players..."); m_omx_reader.Close(); vc_tv_show_info(0); // lcarlon: this should only be done in object destructor. //LOG_VERBOSE(LOG_TAG, "Deinitializing engines..."); //m_OMX.Deinitialize(); //m_RBP.Deinitialize(); // lcarlon: free the texture. LOG_VERBOSE(LOG_TAG, "Freeing texture..."); emit textureInvalidated(); #if 0 QMetaObject::invokeMethod( (QObject*)m_provider, "freeTexture", Qt::QueuedConnection, Q_ARG(OMX_TextureData*, m_textureData) ); #endif m_textureData = NULL; // Actually change the state here and reset flags. m_state = STATE_STOPPED; m_mutexPending.lock(); if (m_pendingStop) { m_pendingStop = false; m_waitPendingCommand.wakeAll(); } m_mutexPending.unlock(); LOG_INFORMATION(LOG_TAG, "Cleanup done."); }
/*------------------------------------------------------------------------------ | OMX_MediaProcessor::mediaDecoding +-----------------------------------------------------------------------------*/ void OMX_MediaProcessor::mediaDecoding() { LOG_VERBOSE(LOG_TAG, "Decoding thread started."); emit playbackStarted(); struct timespec starttime, endtime; while (!m_pendingStop) { // If a request is pending then consider done here. m_mutexPending.lock(); if (m_pendingPause) { m_waitPendingCommand.wakeAll(); m_pendingPause = false; } m_mutexPending.unlock(); // TODO: Use a semaphore instead. if (m_state == STATE_PAUSED) { OMXClock::OMXSleep(2); continue; } #if 0 // TODO: Reimplement? if (m_incr != 0 && !m_bMpeg) { int seek_flags = 0; double seek_pos = 0; double pts = 0; pts = m_av_clock->GetPTS(); seek_pos = (pts / DVD_TIME_BASE) + m_incr; seek_flags = m_incr < 0.0f ? AVSEEK_FLAG_BACKWARD : 0; seek_pos *= 1000.0f; m_incr = 0; if(m_omx_reader.SeekTime(seek_pos, seek_flags, &startpts)) FlushStreams(startpts); m_player_video->Close(); if(m_has_video && !m_player_video->Open(m_hints_video, m_av_clock, m_Deinterlace, m_bMpeg, m_hdmi_clock_sync, m_thread_player, m_display_aspect)) goto do_exit; } #endif // TODO: Better error handling. if (m_player_audio->Error()) { LOG_ERROR(LOG_TAG, "Audio player error. emergency exit!"); break; } if (false) { LOG_INFORMATION(LOG_TAG, "V : %8.02f %8d %8d A : %8.02f %8.02f Cv : %8d Ca : %8d", m_av_clock->OMXMediaTime(), m_player_video->GetDecoderBufferSize(), m_player_video->GetDecoderFreeSpace(), m_player_audio->GetCurrentPTS() / DVD_TIME_BASE, m_player_audio->GetDelay(), m_player_video->GetCached(), m_player_audio->GetCached()); } if (m_omx_reader.IsEof() && !m_omx_pkt) { if (!m_player_audio->GetCached() && !m_player_video->GetCached()) break; // Abort audio buffering, now we're on our own. if (m_buffer_empty) m_av_clock->OMXResume(); OMXClock::OMXSleep(10); continue; } /* when the audio buffer runs under 0.1 seconds we buffer up */ if (m_has_audio) { if (m_player_audio->GetDelay() < 0.1f && !m_buffer_empty) { if (!m_av_clock->OMXIsPaused()) { m_av_clock->OMXPause(); LOG_VERBOSE(LOG_TAG, "Buffering starts."); m_buffer_empty = true; clock_gettime(CLOCK_REALTIME, &starttime); } } if (m_player_audio->GetDelay() > (AUDIO_BUFFER_SECONDS * 0.75f) && m_buffer_empty) { if (m_av_clock->OMXIsPaused()) { m_av_clock->OMXResume(); LOG_VERBOSE(LOG_TAG, "Buffering ends."); m_buffer_empty = false; } } if (m_buffer_empty) { clock_gettime(CLOCK_REALTIME, &endtime); if ((endtime.tv_sec - starttime.tv_sec) > BUFFERING_TIMEOUT_S) { m_buffer_empty = false; m_av_clock->OMXResume(); LOG_WARNING(LOG_TAG, "Buffering timed out."); } } } if (!m_omx_pkt) m_omx_pkt = m_omx_reader.Read(); if (m_has_video && m_omx_pkt && m_omx_reader.IsActive(OMXSTREAM_VIDEO, m_omx_pkt->stream_index)) { if (m_player_video->AddPacket(m_omx_pkt)) m_omx_pkt = NULL; else OMXClock::OMXSleep(10); #if 0 // TODO: Reimplement? if(m_tv_show_info) { char response[80]; vc_gencmd(response, sizeof response, "render_bar 4 video_fifo %d %d %d %d", m_player_video->GetDecoderBufferSize()-m_player_video->GetDecoderFreeSpace(), 0 , 0, m_player_video->GetDecoderBufferSize()); vc_gencmd(response, sizeof response, "render_bar 5 audio_fifo %d %d %d %d", (int)(100.0*m_player_audio->GetDelay()), 0, 0, 100*AUDIO_BUFFER_SECONDS); } #endif } else if (m_has_audio && m_omx_pkt && m_omx_pkt->codec_type == AVMEDIA_TYPE_AUDIO) { if (m_player_audio->AddPacket(m_omx_pkt)) m_omx_pkt = NULL; else OMXClock::OMXSleep(10); } #ifdef ENABLE_SUBTITLES else if (m_omx_pkt && m_omx_reader.IsActive(OMXSTREAM_SUBTITLE, m_omx_pkt->stream_index)) { if (m_omx_pkt->size && ENABLE_SUBTITLES && (m_omx_pkt->hints.codec == CODEC_ID_TEXT || m_omx_pkt->hints.codec == CODEC_ID_SSA)) { if(m_player_subtitles->AddPacket(m_omx_pkt)) m_omx_pkt = NULL; else OMXClock::OMXSleep(10); } else { m_omx_reader.FreePacket(m_omx_pkt); m_omx_pkt = NULL; } } #endif else { if (m_omx_pkt) { m_omx_reader.FreePacket(m_omx_pkt); m_omx_pkt = NULL; } } } emit playbackCompleted(); cleanup(); }