void PlaybackPipeline::enqueueSample(PassRefPtr<MediaSample> prsample)
{
    RefPtr<MediaSample> rsample = prsample;
    AtomicString trackId = rsample->trackID();

    TRACE_MEDIA_MESSAGE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT, trackId.string().utf8().data(), rsample->presentationTime().toFloat(), rsample->presentationSize().width(), rsample->presentationSize().height(), GST_TIME_ARGS(floatToGstClockTime(rsample->presentationTime().toDouble())));

    ASSERT(WTF::isMainThread());

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        WARN_MEDIA_MESSAGE("No stream!");
        GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(rsample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GstSample* gstsample = gst_sample_ref(sample->sample());
        GST_BUFFER_FLAG_UNSET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
        push_sample(GST_APP_SRC(appsrc), gstsample);
        // gst_app_src_push_sample() uses transfer-none for gstsample
        gst_sample_unref(gstsample);
    }
}
Ejemplo n.º 2
0
void PlaybackPipeline::enqueueSample(RefPtr<MediaSample> mediaSample)
{
    ASSERT(WTF::isMainThread());

    AtomicString trackId = mediaSample->trackID();

    GST_TRACE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT,
        trackId.string().utf8().data(), mediaSample->presentationTime().toFloat(),
        mediaSample->presentationSize().width(), mediaSample->presentationSize().height(),
        GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->presentationTime().toDouble())),
        GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->duration().toDouble())));

    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        GST_WARNING("No stream!");
        return;
    }

    if (!stream->sourceBuffer->isReadyForMoreSamples(trackId)) {
        GST_DEBUG("enqueueSample: skip adding new sample for trackId=%s, SB is not ready yet", trackId.string().utf8().data());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    MediaTime lastEnqueuedTime = stream->lastEnqueuedTime;

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(mediaSample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GRefPtr<GstSample> gstSample = sample->sample();
        GstBuffer* buffer = gst_sample_get_buffer(gstSample.get());
        lastEnqueuedTime = sample->presentationTime();

        GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DECODE_ONLY);
        pushSample(GST_APP_SRC(appsrc), gstSample.get());
        // gst_app_src_push_sample() uses transfer-none for gstSample.

        stream->lastEnqueuedTime = lastEnqueuedTime;
    }
}
void PlaybackPipeline::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample> > samples)
{
    ASSERT(WTF::isMainThread());

    if (samples.size() == 0) {
        LOG_MEDIA_MESSAGE("No samples, trackId unknown");
        return;
    }

    AtomicString trackId = samples[0]->trackID();
    LOG_MEDIA_MESSAGE("flushAndEnqueueNonDisplayingSamples: trackId=%s PTS[0]=%f ... PTS[n]=%f", trackId.string().utf8().data(), samples[0]->presentationTime().toFloat(), samples[samples.size()-1]->presentationTime().toFloat());

    GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Flushing and re-enqueing %d samples for stream %s", samples.size(), trackId.string().utf8().data());

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    // Actually no need to flush. The seek preparations have done it for us.

    for (Vector<RefPtr<MediaSample> >::iterator it = samples.begin(); it != samples.end(); ++it) {
        GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(it->get());
        if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
            GstSample* gstsample = gst_sample_ref(sample->sample());
            GST_BUFFER_FLAG_SET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
            push_sample(GST_APP_SRC(appsrc), gstsample);
            // gst_app_src_push_sample() uses transfer-none for gstsample
            gst_sample_unref(gstsample);
        }
    }
}