void PlaybackPipeline::enqueueSample(RefPtr<MediaSample> mediaSample) { ASSERT(WTF::isMainThread()); AtomicString trackId = mediaSample->trackID(); GST_TRACE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT, trackId.string().utf8().data(), mediaSample->presentationTime().toFloat(), mediaSample->presentationSize().width(), mediaSample->presentationSize().height(), GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->presentationTime().toDouble())), GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->duration().toDouble()))); Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId); if (!stream) { GST_WARNING("No stream!"); return; } if (!stream->sourceBuffer->isReadyForMoreSamples(trackId)) { GST_DEBUG("enqueueSample: skip adding new sample for trackId=%s, SB is not ready yet", trackId.string().utf8().data()); return; } GstElement* appsrc = stream->appsrc; MediaTime lastEnqueuedTime = stream->lastEnqueuedTime; GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(mediaSample.get()); if (sample->sample() && gst_sample_get_buffer(sample->sample())) { GRefPtr<GstSample> gstSample = sample->sample(); GstBuffer* buffer = gst_sample_get_buffer(gstSample.get()); lastEnqueuedTime = sample->presentationTime(); GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DECODE_ONLY); pushSample(GST_APP_SRC(appsrc), gstSample.get()); // gst_app_src_push_sample() uses transfer-none for gstSample. stream->lastEnqueuedTime = lastEnqueuedTime; } }
void pushSamples (const float* inputSamples, const int num) noexcept { for (int i = 0; i < num; ++i) pushSample (inputSamples[i]); }