bool FileReader::setFileName(const char *path){ std::lock_guard<std::recursive_mutex> lock(mMutex); if(mFile) sf_close(mFile); mFile = NULL; mAudioInfo = AudioInfo(); if(!path) return false; SF_INFO info; mFile = sf_open(path, SFM_READ, &info); if(!mFile) return false; if(info.frames * info.channels <= 0) return false; mFileName = path; mAudioInfo.setSamplerate(info.samplerate); mAudioInfo.setSamples(info.frames * info.channels); mAudioInfo.setChannels(info.channels); /* std::cerr << "FILEREADER: " << std::endl; std::cerr << mAudioInfo.getSamplerate() << std::endl; std::cerr << mAudioInfo.getSamples() << std::endl; std::cerr << mAudioInfo.getChannels() << std::endl; */ return true; }
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) { MOZ_ASSERT(OnTaskQueue()); nsresult ret = NS_OK; /* * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there * might be concurrent stream operations happening on both decoding and gstreamer * threads which will screw the GStreamer state machine. */ LOG(LogLevel::Debug, "content-type: %s %s", mDecoder->GetResource()->GetContentType().get(), mDecoder->GetResource()->GetContentURL().get()); bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3); if (isMP3) { ParseMP3Headers(); } /* We do 3 attempts here: decoding audio and video, decoding video only, * decoding audio only. This allows us to play streams that have one broken * stream but that are otherwise decodeable. */ guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO, static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)}; guint default_flags, current_flags; g_object_get(mPlayBin, "flags", &default_flags, nullptr); GstMessage* message = nullptr; for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) { current_flags = default_flags & flags[i]; g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr); /* reset filter caps to ANY */ GstCaps* caps = gst_caps_new_any(); GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); g_object_set(filter, "caps", caps, nullptr); gst_object_unref(filter); filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); g_object_set(filter, "caps", caps, nullptr); gst_object_unref(filter); gst_caps_unref(caps); filter = nullptr; if (!(current_flags & GST_PLAY_FLAG_AUDIO)) filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); else if (!(current_flags & GST_PLAY_FLAG_VIDEO)) filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); if (filter) { /* Little trick: set the target caps to "skip" so that playbin2 fails to * find a decoder for the stream we want to skip. */ GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr); g_object_set(filter, "caps", filterCaps, nullptr); gst_caps_unref(filterCaps); gst_object_unref(filter); } LOG(LogLevel::Debug, "starting metadata pipeline"); if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) { LOG(LogLevel::Debug, "metadata pipeline state change failed"); ret = NS_ERROR_FAILURE; continue; } /* Wait for ASYNC_DONE, which is emitted when the pipeline is built, * prerolled and ready to play. Also watch for errors. */ message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) { LOG(LogLevel::Debug, "read metadata pipeline prerolled"); gst_message_unref(message); ret = NS_OK; break; } else { LOG(LogLevel::Debug, "read metadata pipeline failed to preroll: %s", gst_message_type_get_name (GST_MESSAGE_TYPE (message))); if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) { GError* error; gchar* debug; gst_message_parse_error(message, &error, &debug); LOG(LogLevel::Error, "read metadata error: %s: %s", error->message, debug); g_error_free(error); g_free(debug); } /* Unexpected stream close/EOS or other error. We'll give up if all * streams are in error/eos. */ gst_element_set_state(mPlayBin, GST_STATE_NULL); gst_message_unref(message); ret = NS_ERROR_FAILURE; } } if (NS_SUCCEEDED(ret)) ret = CheckSupportedFormats(); if (NS_FAILED(ret)) /* we couldn't get this to play */ return ret; /* report the duration */ gint64 duration; if (isMP3 && mMP3FrameParser.IsMP3()) { // The MP3FrameParser has reported a duration; use that over the gstreamer // reported duration for inter-platform consistency. mUseParserDuration = true; mLastParserDuration = mMP3FrameParser.GetDuration(); mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(mLastParserDuration)); } else { LOG(LogLevel::Debug, "querying duration"); // Otherwise use the gstreamer duration. #if GST_VERSION_MAJOR >= 1 if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME, &duration)) { #else GstFormat format = GST_FORMAT_TIME; if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) && format == GST_FORMAT_TIME) { #endif LOG(LogLevel::Debug, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration)); duration = GST_TIME_AS_USECONDS (duration); mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(duration)); } } int n_video = 0, n_audio = 0; g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr); if (!n_video) { mInfo.mVideo = VideoInfo(); } if (!n_audio) { mInfo.mAudio = AudioInfo(); } *aInfo = mInfo; *aTags = nullptr; // Watch the pipeline for fatal errors #if GST_VERSION_MAJOR >= 1 gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr); #else gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this); #endif /* set the pipeline to PLAYING so that it starts decoding and queueing data in * the appsinks */ gst_element_set_state(mPlayBin, GST_STATE_PLAYING); return NS_OK; } bool GStreamerReader::IsMediaSeekable() { if (mUseParserDuration) { return true; } gint64 duration; #if GST_VERSION_MAJOR >= 1 if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME, &duration)) { #else GstFormat format = GST_FORMAT_TIME; if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) && format == GST_FORMAT_TIME) { #endif return true; } return false; } nsresult GStreamerReader::CheckSupportedFormats() { bool done = false; bool unsupported = false; GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin)); while (!done) { GstIteratorResult res; GstElement* element; #if GST_VERSION_MAJOR >= 1 GValue value = {0,}; res = gst_iterator_next(it, &value); #else res = gst_iterator_next(it, (void **) &element); #endif switch(res) { case GST_ITERATOR_OK: { #if GST_VERSION_MAJOR >= 1 element = GST_ELEMENT (g_value_get_object (&value)); #endif GstElementFactory* factory = gst_element_get_factory(element); if (factory) { const char* klass = gst_element_factory_get_klass(factory); GstPad* pad = gst_element_get_static_pad(element, "sink"); if (pad) { GstCaps* caps; #if GST_VERSION_MAJOR >= 1 caps = gst_pad_get_current_caps(pad); #else caps = gst_pad_get_negotiated_caps(pad); #endif if (caps) { /* check for demuxers but ignore elements like id3demux */ if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata")) unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps); else if (strstr (klass, "Decoder") && !strstr(klass, "Generic")) unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps); gst_caps_unref(caps); } gst_object_unref(pad); } } #if GST_VERSION_MAJOR >= 1 g_value_unset (&value); #else gst_object_unref(element); #endif done = unsupported; break; } case GST_ITERATOR_RESYNC: unsupported = false; break; case GST_ITERATOR_ERROR: done = true; break; case GST_ITERATOR_DONE: done = true; break; } } gst_iterator_free(it); return unsupported ? NS_ERROR_FAILURE : NS_OK; } nsresult GStreamerReader::ResetDecode() { nsresult res = NS_OK; LOG(LogLevel::Debug, "reset decode"); if (NS_FAILED(MediaDecoderReader::ResetDecode())) { res = NS_ERROR_FAILURE; } mVideoQueue.Reset(); mAudioQueue.Reset(); mVideoSinkBufferCount = 0; mAudioSinkBufferCount = 0; mReachedAudioEos = false; mReachedVideoEos = false; #if GST_VERSION_MAJOR >= 1 mConfigureAlignment = true; #endif LOG(LogLevel::Debug, "reset decode done"); return res; } bool GStreamerReader::DecodeAudioData() { MOZ_ASSERT(OnTaskQueue()); GstBuffer *buffer = nullptr; { ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); if (mReachedAudioEos && !mAudioSinkBufferCount) { return false; } /* Wait something to be decoded before return or continue */ if (!mAudioSinkBufferCount) { if(!mVideoSinkBufferCount) { /* We have nothing decoded so it makes no sense to return to the state machine * as it will call us back immediately, we'll return again and so on, wasting * CPU cycles for no job done. So, block here until there is either video or * audio data available */ mon.Wait(); if (!mAudioSinkBufferCount) { /* There is still no audio data available, so either there is video data or * something else has happened (Eos, etc...). Return to the state machine * to process it. */ return true; } } else { return true; } } #if GST_VERSION_MAJOR >= 1 GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink); buffer = gst_buffer_ref(gst_sample_get_buffer(sample)); gst_sample_unref(sample); #else buffer = gst_app_sink_pull_buffer(mAudioAppSink); #endif mAudioSinkBufferCount--; } int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer); { ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); timestamp = gst_segment_to_stream_time(&mAudioSegment, GST_FORMAT_TIME, timestamp); } timestamp = GST_TIME_AS_USECONDS(timestamp); int64_t offset = GST_BUFFER_OFFSET(buffer); guint8* data; #if GST_VERSION_MAJOR >= 1 GstMapInfo info; gst_buffer_map(buffer, &info, GST_MAP_READ); unsigned int size = info.size; data = info.data; #else unsigned int size = GST_BUFFER_SIZE(buffer); data = GST_BUFFER_DATA(buffer); #endif int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels; typedef AudioCompactor::NativeCopy GstCopy; mAudioCompactor.Push(offset, timestamp, mInfo.mAudio.mRate, frames, mInfo.mAudio.mChannels, GstCopy(data, size, mInfo.mAudio.mChannels)); #if GST_VERSION_MAJOR >= 1 gst_buffer_unmap(buffer, &info); #endif gst_buffer_unref(buffer); return true; } bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip, int64_t aTimeThreshold) { MOZ_ASSERT(OnTaskQueue()); GstBuffer *buffer = nullptr; { ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); if (mReachedVideoEos && !mVideoSinkBufferCount) { return false; } /* Wait something to be decoded before return or continue */ if (!mVideoSinkBufferCount) { if (!mAudioSinkBufferCount) { /* We have nothing decoded so it makes no sense to return to the state machine * as it will call us back immediately, we'll return again and so on, wasting * CPU cycles for no job done. So, block here until there is either video or * audio data available */ mon.Wait(); if (!mVideoSinkBufferCount) { /* There is still no video data available, so either there is audio data or * something else has happened (Eos, etc...). Return to the state machine * to process it */ return true; } } else { return true; } } mDecoder->NotifyDecodedFrames(0, 1, 0); #if GST_VERSION_MAJOR >= 1 GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink); buffer = gst_buffer_ref(gst_sample_get_buffer(sample)); gst_sample_unref(sample); #else buffer = gst_app_sink_pull_buffer(mVideoAppSink); #endif mVideoSinkBufferCount--; } bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT); if ((aKeyFrameSkip && !isKeyframe)) { mDecoder->NotifyDecodedFrames(0, 0, 1); gst_buffer_unref(buffer); return true; } int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer); { ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); timestamp = gst_segment_to_stream_time(&mVideoSegment, GST_FORMAT_TIME, timestamp); } NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp), "frame has invalid timestamp"); timestamp = GST_TIME_AS_USECONDS(timestamp); int64_t duration = 0; if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer))) duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer)); else if (fpsNum && fpsDen) /* add 1-frame duration */ duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum); if (timestamp < aTimeThreshold) { LOG(LogLevel::Debug, "skipping frame %" GST_TIME_FORMAT " threshold %" GST_TIME_FORMAT, GST_TIME_ARGS(timestamp * 1000), GST_TIME_ARGS(aTimeThreshold * 1000)); gst_buffer_unref(buffer); return true; } if (!buffer) /* no more frames */ return true; #if GST_VERSION_MAJOR >= 1 if (mConfigureAlignment && buffer->pool) { GstStructure *config = gst_buffer_pool_get_config(buffer->pool); GstVideoAlignment align; if (gst_buffer_pool_config_get_video_alignment(config, &align)) gst_video_info_align(&mVideoInfo, &align); gst_structure_free(config); mConfigureAlignment = false; } #endif nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer); if (!image) { /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy. */ GstBuffer* tmp = nullptr; CopyIntoImageBuffer(buffer, &tmp, image); gst_buffer_unref(buffer); buffer = tmp; } int64_t offset = mResource.Tell(); // Estimate location in media. nsRefPtr<VideoData> video = VideoData::CreateFromImage(mInfo.mVideo, mDecoder->GetImageContainer(), offset, timestamp, duration, static_cast<Image*>(image.get()), isKeyframe, -1, mPicture); mVideoQueue.Push(video); gst_buffer_unref(buffer); return true; }
AudioInfo BaseAudioSink::getInfo() noexcept{ if(checkInput(0)) return getInfoFromSlot(0); return AudioInfo(); }
void play( const char *fileE, int type ) { sek = 0; sek2 = 0; if ( type != 0 && type != 2 ) return ERROR2( "Wtyczka obsługuje tylko odczyt plików i strumienia internetowego!" ); if ( type == 2 ) { QString tmp = loadCURL(); if ( !CURLloaded ) return ERROR2( "Nie można załadować biblioteki: "+QMPInternetf+libExt+"\n"+tmp ); } if ( curF != fileE ) clrPos(); clrSet(); curF = fileE; if ( type == 0 ) fE = qmp_fopen(fileE,"rb"); if ( type == 2 ) { title = "Czekaj, trwa otwieranie..."; uF = url_fopen( fileE ); } bool loaded(0); if ( type == 0 ) { loaded = !ov_open(fE,&musicfile,NULL,0); Internet = false; } if ( type == 2 ) { if (uF) loaded = !ov_open_callbacks(uF, &musicfile, NULL, 0, OV_CALLBACKS_URL); else loaded = 0; Internet = true; } if ( !loaded ) { if ( type == 2 ) return ERROR( "Nie można otworzyć strumienia internetowego!" , 1 ); else return ERROR( "Błąd odczytu pliku!" , 1 ); } OGGUpdate(); if ( title.isEmpty() && type == 2 ) title = fileE; if ( type == 2 || ( type == 0 && fileE[0] == '/' && fileE[1] == '/' ) ) QOut->useQMPBuffer(); QOut->Init( AudioInfo( rate, 16, chn ), true, 0, fileE, title ); if ( *QOut->PlErr ) { *QOut->PlErr = false; return ERROR( "Błąd zwraca wtyczka wyjściowa!", 0 ); } long s = 0; IsPlaying=true; char audio_buffer[BUF_SIZE]; QTime tim_i_u; if ( Internet ) tim_i_u.start(); do { if ( bolStop ) break; if ( !sek2 && !*QOut->mustReset && !IsPaused ) { sek = 1; s = ov_read( &musicfile, audio_buffer, BUF_SIZE, Q_BYTE_ORDER == Q_BIG_ENDIAN, 2, 1, 0 ); sek = 0; QOut->Play( audio_buffer, s, pltime, false ); OGGUpdate2(s); } else QOut->Play(NULL,0,pltime,true); if ( Internet && tim_i_u.elapsed() >= 10000 ) { tim_i_u.restart(); QString tit; getMusInfo( musicfile, &tit, &Title, &Artist, NULL, &vorbisInfoStr ); if ( !tit.isEmpty() ) title = tit; updateF = true; } } while ( s != 0 ); STOP(); }
void play( const char *fileE, int ) { if ( Type != 0 && Type != 2 ) return ERROR2( "Wtyczka obsługuje tylko odczyt plików i strumienia internetowego!" ); if ( Type == 2 ) { QString tmp = loadCURL(); if ( !CURLloaded ) return ERROR2( "Nie można załadować biblioteki: "+QMPInternetf+libExt+"\n"+tmp ); } if ( fileE != curF ) clrPos(); clrSet(); QString BLAD; if ( Type == 0 ) BLAD = "Błąd odczytu pliku!"; else if ( Type == 2 ) BLAD = "Nie można otworzyć strumienia internetowego!"; musicfile = mpg123_new(NULL, NULL); if ( Type == 0 ) { fd = qmp_open( fileE, O_RDONLY|O_BINARY ); mpg123_open_fd( musicfile, fd ); } if ( Type == 2 ) { title = "Czekaj, trwa otwieranie..."; mpg123_open_feed( musicfile ); f = url_fopen(fileE,0,1); if ( !f ) return ERROR(BLAD); if ( !getDataBuff( f, url_fread, _DATA_BUFF, &data, &bolStop ) ) return ERROR(BLAD); url_fread(data+10, 1, _DATA_BUFF-10, f); if ( getNetInfo( f, FILE_SIZE ) > 0.0 ) knownSize = true; mpg123_decode( musicfile, (const unsigned char*)data, _DATA_BUFF, 0,0,0 ); } if ( !musicfile ) return ERROR(BLAD); curF = fileE; if ( !MP3Update() ) return ERROR(BLAD); if ( Type == 2 && title.isEmpty() ) title = fileE; if ( Type == 2 || ( Type == 0 && fileE[0] == '/' && fileE[1] == '/' ) ) QOut->useQMPBuffer(); QOut->Init( AudioInfo( rate, 16, chn ), true, 0, fileE, /*title*/Title ); if ( *QOut->PlErr ) { *QOut->PlErr = false; return ERROR("Błąd zwraca wtyczka wyjściowa!"); } IsPlaying = true; size_t bDecoded = 0; audio_buffer = new char[BUF_SIZE]; TMPint = 0; int musErr = 0; for(;;) { if ( bolStop ) break; if ( !IsPaused ) { if ( !*QOut->mustReset ) { if ( doSeek ) { mpg123_seek( musicfile, doSeek, SEEK_SET ); QOut->control( QOUT_NEXT_READ ); doSeek = 0; } musErr = mpg123_read( musicfile, (unsigned char*)audio_buffer, BUF_SIZE, &bDecoded ); if ( musErr == MPG123_DONE ) break; if ( musErr == MPG123_ERR && !searchParts ) break; QOut->Play( audio_buffer, bDecoded, pltime, false ); MP3Update2( bDecoded ); if ( musErr == MPG123_NEED_MORE && Type != 0 ) { int bread = url_fread( data, 1, _DATA_BUFF, f ); if ( ( !bread && !wait4Data ) || ( !bread && url_feof(f) ) ) break; else if ( !bread ) MYsleep( 25 ); mpg123_decode( musicfile, (const unsigned char*)data, bread, NULL, 0, NULL ); } } else QOut->Play(0,0,pltime,true); } else QOut->Play(0,0,pltime,true); } STOP(); }