static void spice_gst_decoder_reschedule(VideoDecoder *video_decoder) { SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder; if (decoder->timer_id != 0) { g_source_remove(decoder->timer_id); decoder->timer_id = 0; } schedule_frame(decoder); }
/* GStreamer thread * * We cannot use GStreamer's signals because they are not always run in * the main context. So use a callback (lower overhead) and have it pull * the sample to avoid a race with free_pipeline(). This means queuing the * decoded frames outside GStreamer. So while we're at it, also schedule * the frame display ourselves in schedule_frame(). */ static GstFlowReturn new_sample(GstAppSink *gstappsink, gpointer video_decoder) { SpiceGstDecoder *decoder = video_decoder; GstSample *sample = gst_app_sink_pull_sample(decoder->appsink); GstBuffer *buffer = sample ? gst_sample_get_buffer(sample) : NULL; if (sample) { g_mutex_lock(&decoder->queues_mutex); /* gst_app_sink_pull_sample() sometimes returns the same buffer twice * or buffers that have a modified, and thus unrecognizable, PTS. * Blindly removing frames from the decoding_queue until we find a * match would only empty the queue, resulting in later buffers not * finding a match either, etc. So check the buffer has a matching * frame first. */ SpiceGstFrame *gstframe; GList *l = g_queue_peek_head_link(decoder->decoding_queue); while (l) { gstframe = l->data; if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) { /* The frame is now ready for display */ gstframe->sample = sample; g_queue_push_tail(decoder->display_queue, gstframe); /* Now that we know there is a match, remove it and the older * frames from the decoding queue. */ while ((gstframe = g_queue_pop_head(decoder->decoding_queue))) { if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) { break; } /* The GStreamer pipeline dropped the corresponding * buffer. */ SPICE_DEBUG("the GStreamer pipeline dropped a frame"); free_gst_frame(gstframe); } break; } l = l->next; } if (!l) { spice_warning("got an unexpected decoded buffer!"); gst_sample_unref(sample); } g_mutex_unlock(&decoder->queues_mutex); schedule_frame(decoder); } else { spice_warning("GStreamer error: could not pull sample"); } return GST_FLOW_OK; }
void preroll_video_frames(unsigned int n_frames) { IDeckLinkMutableVideoFrame *frame; for (unsigned int i = 0; i < n_frames; i++) { if (deckLinkOutput->CreateVideoFrame(norms[norm].w, norms[norm].h, 2*norms[norm].w, bpf, bmdFrameFlagDefault, &frame) != S_OK) { throw std::runtime_error("Failed to create frame"); } schedule_frame(frame); } }
/* main context */ static gboolean display_frame(gpointer video_decoder) { SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder; SpiceGstFrame *gstframe; GstCaps *caps; gint width, height; GstStructure *s; GstBuffer *buffer; GstMapInfo mapinfo; g_mutex_lock(&decoder->queues_mutex); decoder->timer_id = 0; gstframe = g_queue_pop_head(decoder->display_queue); g_mutex_unlock(&decoder->queues_mutex); /* If the queue is empty we don't even need to reschedule */ g_return_val_if_fail(gstframe, G_SOURCE_REMOVE); if (!gstframe->sample) { spice_warning("got a frame without a sample!"); goto error; } caps = gst_sample_get_caps(gstframe->sample); if (!caps) { spice_warning("GStreamer error: could not get the caps of the sample"); goto error; } s = gst_caps_get_structure(caps, 0); if (!gst_structure_get_int(s, "width", &width) || !gst_structure_get_int(s, "height", &height)) { spice_warning("GStreamer error: could not get the size of the frame"); goto error; } buffer = gst_sample_get_buffer(gstframe->sample); if (!gst_buffer_map(buffer, &mapinfo, GST_MAP_READ)) { spice_warning("GStreamer error: could not map the buffer"); goto error; } stream_display_frame(decoder->base.stream, gstframe->frame, width, height, mapinfo.data); gst_buffer_unmap(buffer, &mapinfo); error: free_gst_frame(gstframe); schedule_frame(decoder); return G_SOURCE_REMOVE; }
/* DeckLink callbacks */ virtual HRESULT ScheduledFrameCompleted(IDeckLinkVideoFrame *frame, BMDOutputFrameCompletionResult result) { switch (result) { case bmdOutputFrameDisplayedLate: fprintf(stderr, "WARNING: Decklink displayed frame late (running too slow!)\r\n"); break; case bmdOutputFrameDropped: fprintf(stderr, "WARNING: Decklink dropped frame\r\n"); break; case bmdOutputFrameFlushed: fprintf(stderr, "WARNING: Decklink flushed frame\r\n"); break; default: break; } schedule_frame((IDeckLinkMutableVideoFrame *) frame); return S_OK; }
void preroll_video_frames(unsigned int n_frames) { IDeckLinkMutableVideoFrame *frame; IDeckLinkVideoFrameAncillary *anc; for (unsigned int i = 0; i < n_frames; i++) { if (deckLinkOutput->CreateVideoFrame(norms[norm].w, norms[norm].h, 2*norms[norm].w, bpf, bmdFrameFlagDefault, &frame) != S_OK) { throw std::runtime_error("Failed to create frame"); } if (deckLinkOutput->CreateAncillaryData(bpf, &anc) != S_OK) { throw std::runtime_error("failed to set frame ancillary data"); } if (frame->SetAncillaryData(anc) != S_OK) { throw std::runtime_error("failed to set frame ancillary data"); } schedule_frame(frame); } }
int main(void) { int input, output; create_sockets(&input, &output); bind_sockets("wlan0", input, "lo", output); /* main program part.. */ frame_t work; struct pollfd pollinput; pollinput.fd = input; pollinput.events = POLLIN; int mswait; while ( 1 ) { printf("%40d frames in buffer.\r", count_frames()); work.length = read(input, work.data.buffer, BIGGEST_FRAME); if ( work.length > 0 ) { translate_frame(&work); schedule_frame(&work); queue_frame(&work); // freaking woo hoo } dole_frame(output); mswait = ttn_frame(); poll(&pollinput, 1, mswait); } return 0; }