/* main loop or GStreamer streaming thread */ static void schedule_frame(SpiceGstDecoder *decoder) { guint32 now = stream_get_time(decoder->base.stream); g_mutex_lock(&decoder->queues_mutex); while (!decoder->timer_id) { SpiceGstFrame *gstframe = g_queue_peek_head(decoder->display_queue); if (!gstframe) { break; } if (now < gstframe->frame->mm_time) { decoder->timer_id = g_timeout_add(gstframe->frame->mm_time - now, display_frame, decoder); } else if (g_queue_get_length(decoder->display_queue) == 1) { /* Still attempt to display the least out of date frame so the * video is not completely frozen for an extended period of time. */ decoder->timer_id = g_timeout_add(0, display_frame, decoder); } else { SPICE_DEBUG("%s: rendering too late by %u ms (ts: %u, mmtime: %u), dropping", __FUNCTION__, now - gstframe->frame->mm_time, gstframe->frame->mm_time, now); stream_dropped_frame_on_playback(decoder->base.stream); g_queue_pop_head(decoder->display_queue); free_gst_frame(gstframe); } } g_mutex_unlock(&decoder->queues_mutex); }
static void mjpeg_decoder_schedule(MJpegDecoder *decoder) { if (decoder->timer_id) { return; } guint32 time = stream_get_time(decoder->base.stream); SpiceFrame *frame = decoder->cur_frame; decoder->cur_frame = NULL; do { if (frame) { if (spice_mmtime_diff(time, frame->mm_time) <= 0) { guint32 d = frame->mm_time - time; decoder->cur_frame = frame; decoder->timer_id = g_timeout_add(d, mjpeg_decoder_decode_frame, decoder); break; } SPICE_DEBUG("%s: rendering too late by %u ms (ts: %u, mmtime: %u), dropping ", __FUNCTION__, time - frame->mm_time, frame->mm_time, time); stream_dropped_frame_on_playback(decoder->base.stream); free_spice_frame(frame); } frame = g_queue_pop_head(decoder->msgq); } while (frame); }
/* spice_gst_decoder_queue_frame() queues the SpiceFrame for decoding and * displaying. The steps it goes through are as follows: * * 1) A SpiceGstFrame is created to keep track of SpiceFrame and some additional * metadata. The SpiceGstFrame is then pushed to the decoding_queue. * 2) frame->data, which contains the compressed frame data, is reffed and * wrapped in a GstBuffer which is pushed to the GStreamer pipeline for * decoding. * 3) As soon as the GStreamer pipeline no longer needs the compressed frame it * will call frame->unref_data() to free it. * 4) Once the decompressed frame is available the GStreamer pipeline calls * new_sample() in the GStreamer thread. * 5) new_sample() then matches the decompressed frame to a SpiceGstFrame from * the decoding queue using the GStreamer timestamp information to deal with * dropped frames. The SpiceGstFrame is popped from the decoding_queue. * 6) new_sample() then attaches the decompressed frame to the SpiceGstFrame, * pushes it to the display_queue and calls schedule_frame(). * 7) schedule_frame() then uses gstframe->frame->mm_time to arrange for * display_frame() to be called, in the main thread, at the right time for * the next frame. * 8) display_frame() pops the first SpiceGstFrame from the display_queue and * calls stream_display_frame(). * 9) display_frame() then frees the SpiceGstFrame, which frees the SpiceFrame * and decompressed frame with it. */ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder, SpiceFrame *frame, int latency) { SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder; if (frame->size == 0) { SPICE_DEBUG("got an empty frame buffer!"); frame->free(frame); return TRUE; } if (frame->mm_time < decoder->last_mm_time) { SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):" " resetting stream", frame->mm_time, decoder->last_mm_time); /* Let GStreamer deal with the frame anyway */ } decoder->last_mm_time = frame->mm_time; if (latency < 0 && decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) { /* Dropping MJPEG frames has no impact on those that follow and * saves CPU so do it. */ SPICE_DEBUG("dropping a late MJPEG frame"); frame->free(frame); return TRUE; } if (decoder->pipeline == NULL) { /* An error occurred, causing the GStreamer pipeline to be freed */ spice_warning("An error occurred, stopping the video stream"); return FALSE; } /* ref() the frame data for the buffer */ frame->ref_data(frame->data_opaque); GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, frame->data, frame->size, 0, frame->size, frame->data_opaque, frame->unref_data); GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000; g_mutex_lock(&decoder->queues_mutex); g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame)); g_mutex_unlock(&decoder->queues_mutex); if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) { SPICE_DEBUG("GStreamer error: unable to push frame of size %u", frame->size); stream_dropped_frame_on_playback(decoder->base.stream); } return TRUE; }