static void fakesink_hand_off (GstElement * fakesink, GstBuffer * buf, GstPad * pad, gpointer data) { GstElement *pipeline = (GstElement *) data; gst_element_post_message (data, gst_message_new_eos (GST_OBJECT (pipeline))); }
set_next_uri (GstElement * obj, gpointer userdata) { if (count < num_uri) { g_message ("Setting uri %s", uri_list[++count]); g_object_set (G_OBJECT (pipeline), "uri", uri_list[count], NULL); } else gst_element_post_message (pipeline, gst_message_new_eos(GST_OBJECT(pipeline))); }
static gboolean gst_decklink_sink_videosink_event (GstPad * pad, GstObject * parent, GstEvent * event) { gboolean res; GstDecklinkSink *decklinksink; decklinksink = GST_DECKLINK_SINK (parent); GST_DEBUG_OBJECT (pad, "event: %" GST_PTR_FORMAT, event); switch (GST_EVENT_TYPE (event)) { /* FIXME: this makes no sense, template caps don't contain v210 */ #if 0 case GST_EVENT_CAPS:{ GstCaps *caps; gst_event_parse_caps (event, &caps); ret = gst_video_format_parse_caps (caps, &format, &width, &height); if (ret) { if (format == GST_VIDEO_FORMAT_v210) { decklinksink->pixel_format = bmdFormat10BitYUV; } else { decklinksink->pixel_format = bmdFormat8BitYUV; } } break; } #endif case GST_EVENT_EOS: /* FIXME: EOS aggregation with audio pad looks wrong */ decklinksink->video_eos = TRUE; decklinksink->video_seqnum = gst_event_get_seqnum (event); { GstMessage *message; message = gst_message_new_eos (GST_OBJECT_CAST (decklinksink)); gst_message_set_seqnum (message, decklinksink->video_seqnum); gst_element_post_message (GST_ELEMENT_CAST (decklinksink), message); } res = gst_pad_event_default (pad, parent, event); break; default: res = gst_pad_event_default (pad, parent, event); break; } return res; }
static void gstreamer_close_file(struct input_handle* ih) { if (ih->bin) { GstBus *bus = gst_element_get_bus(ih->bin); gst_bus_post(bus, gst_message_new_eos(NULL)); g_object_unref(bus); } g_thread_join(ih->gstreamer_loop); if (ih->message_source) g_source_destroy(ih->message_source); if (ih->bin) { /* cleanup */ gst_element_set_state(ih->bin, GST_STATE_NULL); g_object_unref(ih->bin); ih->bin = NULL; g_main_loop_unref(ih->loop); } }
static int gstreamer_open_file(struct input_handle* ih, const char* filename) { GTimeVal beg, end; ih->filename = filename; ih->quit_pipeline = TRUE; ih->main_loop_quit = FALSE; ih->ready = FALSE; ih->bin = NULL; ih->message_source = NULL; ih->gstreamer_loop = g_thread_create((GThreadFunc) gstreamer_loop, ih, TRUE, NULL); g_get_current_time(&beg); while (!ih->ready) { g_thread_yield(); g_get_current_time(&end); if (end.tv_usec + end.tv_sec * G_USEC_PER_SEC - beg.tv_usec - beg.tv_sec * G_USEC_PER_SEC > 1 * G_USEC_PER_SEC) { break; } } if (!ih->quit_pipeline) { if (!query_data(ih)) { ih->quit_pipeline = TRUE; } } if (ih->quit_pipeline) { if (ih->bin) { GstBus *bus = gst_element_get_bus(ih->bin); gst_bus_post(bus, gst_message_new_eos(NULL)); g_object_unref(bus); } g_thread_join(ih->gstreamer_loop); if (ih->message_source) g_source_destroy(ih->message_source); if (ih->bin) { /* cleanup */ gst_element_set_state(ih->bin, GST_STATE_NULL); g_object_unref(ih->bin); ih->bin = NULL; g_main_loop_unref(ih->loop); } return 1; } else { return 0; } }
HRESULT Output::RenderAudioSamples (bool preroll) { uint32_t samplesWritten; // guint64 samplesToWrite; if (decklinksink->stop) { GST_DEBUG ("decklinksink->stop set TRUE!"); decklinksink->output->BeginAudioPreroll (); // running = true; } else { gconstpointer data; int n; g_mutex_lock (&decklinksink->audio_mutex); n = gst_adapter_available (decklinksink->audio_adapter); if (n > 0) { data = gst_adapter_map (decklinksink->audio_adapter, n); decklinksink->output->ScheduleAudioSamples ((void *) data, n / 4, 0, 0, &samplesWritten); gst_adapter_unmap (decklinksink->audio_adapter); gst_adapter_flush (decklinksink->audio_adapter, samplesWritten * 4); GST_DEBUG ("wrote %d samples, %d available", samplesWritten, n / 4); g_cond_signal (&decklinksink->audio_cond); } else { if (decklinksink->audio_eos) { GstMessage *message; message = gst_message_new_eos (GST_OBJECT_CAST (decklinksink)); gst_message_set_seqnum (message, decklinksink->audio_seqnum); gst_element_post_message (GST_ELEMENT_CAST (decklinksink), message); } } g_mutex_unlock (&decklinksink->audio_mutex); } GST_DEBUG ("RenderAudioSamples"); return S_OK; }
static void Lastfmfp_cb_have_data(GstElement *element, GstBuffer *buffer, GstPad *pad, LastfmfpAudio *ma) { gint buffersamples; gint bufferpos; gint i; gint j; gint fill; // if data continues to flow/EOS is not yet processed if (ma->quit) return; // exit on empty buffer if (buffer->size <= 0) return; ma->data_in = (short*)GST_BUFFER_DATA(buffer); //ma->num_samples = (size_t)(GST_BUFFER_OFFSET_END (buffer) - GST_BUFFER_OFFSET (buffer)); ma->num_samples = (size_t)(GST_BUFFER_SIZE (buffer) / sizeof(guint16)); //printf("caps: %s\n", gst_caps_to_string(GST_BUFFER_CAPS(buffer))); //printf(" offset : %llu size: %llu \n", (unsigned long long)GST_BUFFER_OFFSET (buffer), (unsigned long long)GST_BUFFER_OFFSET_END (buffer)); //GST_LOG ("caps are %" GST_PTR_FORMAT, GST_BUFFER_CAPS(buffer)); //extractor.process(const short* pPCM, size_t num_samples, bool end_of_stream = false); //printf("data: %d %d %d %d %d %d %d %d %d %d %d %d \n", ma->data_in[0], ma->data_in[1], ma->data_in[2], ma->data_in[3], ma->data_in[4], ma->data_in[5], ma->data_in[6], ma->data_in[7], ma->data_in[8], ma->data_in[9], ma->data_in[10], ma->data_in[11]); if (ma->extractor->process(ma->data_in, ma->num_samples, false))//TODO check parametters { //stop the gstreamer loop to free all and return fpid GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(ma->pipeline)); GstMessage* eosmsg = gst_message_new_eos(GST_OBJECT(ma->pipeline)); gst_bus_post(bus, eosmsg); g_print("libLastfmfp: EOS Message sent\n"); gst_object_unref(bus); ma->quit = TRUE; return; } return; }
static void send_5app_1el_1err_2app_1eos_messages (guint interval_usecs) { GstMessage *m; GstStructure *s; gint i; for (i = 0; i < 5; i++) { s = gst_structure_new ("test_message", "msg_id", G_TYPE_INT, i, NULL); m = gst_message_new_application (NULL, s); GST_LOG ("posting application message"); gst_bus_post (test_bus, m); g_usleep (interval_usecs); } for (i = 0; i < 1; i++) { s = gst_structure_new ("test_message", "msg_id", G_TYPE_INT, i, NULL); m = gst_message_new_element (NULL, s); GST_LOG ("posting element message"); gst_bus_post (test_bus, m); g_usleep (interval_usecs); } for (i = 0; i < 1; i++) { m = gst_message_new_error (NULL, NULL, "debug string"); GST_LOG ("posting error message"); gst_bus_post (test_bus, m); g_usleep (interval_usecs); } for (i = 0; i < 2; i++) { s = gst_structure_new ("test_message", "msg_id", G_TYPE_INT, i, NULL); m = gst_message_new_application (NULL, s); GST_LOG ("posting application message"); gst_bus_post (test_bus, m); g_usleep (interval_usecs); } for (i = 0; i < 1; i++) { m = gst_message_new_eos (NULL); GST_LOG ("posting EOS message"); gst_bus_post (test_bus, m); g_usleep (interval_usecs); } }
void mirageaudio_canceldecode(MirageAudio *ma) { if (GST_IS_ELEMENT(ma->pipeline)) { GstState state; gst_element_get_state(ma->pipeline, &state, NULL, 100*GST_MSECOND); if (state != GST_STATE_NULL) { g_mutex_lock(ma->decoding_mutex); GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(ma->pipeline)); GstMessage* eosmsg = gst_message_new_eos(GST_OBJECT(ma->pipeline)); gst_bus_post(bus, eosmsg); g_print("libmirageaudio: EOS Message sent\n"); gst_object_unref(bus); ma->invalidate = TRUE; g_mutex_unlock(ma->decoding_mutex); } } }
static VALUE eos_initialize(VALUE self, VALUE src) { G_INITIALIZE(self, gst_message_new_eos(RVAL2GST_OBJ(src))); return Qnil; }
static void mirageaudio_cb_have_data(GstElement *element, GstBuffer *buffer, GstPad *pad, MirageAudio *ma) { gint buffersamples; gint bufferpos; gint i; gint j; gint fill; GstMapInfo map; // if data continues to flow/EOS is not yet processed if (ma->quit) return; // exit on empty buffer if (gst_buffer_get_size (buffer) <= 0) return; if (!gst_buffer_map (buffer, &map, GST_MAP_READ)) return; ma->src_data.data_in = (float*)map.data; ma->src_data.input_frames = map.size/sizeof(float); do { // set end of input flag if necessary ma->cursample += ma->src_data.input_frames; if (ma->cursample >= ma->seconds * ma->filerate) { ma->src_data.end_of_input = 1; } // resampling int err = src_process(ma->src_state, &ma->src_data); if (err != 0) { g_print("libmirageaudio: SRC Error - %s\n", src_strerror(err)); } // return if no output if (ma->src_data.output_frames_gen == 0) { gst_buffer_unmap (buffer, &map); return; } buffersamples = ma->src_data.output_frames_gen; bufferpos = 0; // FFTW // If buffer does not get filled if (ma->fftwsamples + buffersamples < ma->winsize) { memcpy(ma->fftw+ma->fftwsamples, ma->src_data.data_out, buffersamples*sizeof(float)); ma->fftwsamples += buffersamples; // If buffer gets filled. } else { do { // prepare FFTW fill = ma->winsize - ma->fftwsamples; if (fill <= 0) g_print("libmirageaudio: Logic ERROR! fill <= 0\n"); memcpy(ma->fftw+ma->fftwsamples, ma->src_data.data_out+bufferpos, fill*sizeof(float)); memset(ma->fftw+ma->winsize, 0, ma->winsize*sizeof(float)); for (i = 0; i < ma->winsize; i++) { ma->fftw[i] = ma->fftw[i] * ma->window[i] * 32768.0f; } // Execute FFTW fftwf_execute(ma->fftwplan); // Powerspectrum ma->out[ma->curhop] = powf(ma->fftw[0], 2); for (j = 1; j < ma->winsize/2; j++) { ma->out[j*ma->hops + ma->curhop] = powf(ma->fftw[j*2], 2) + powf(ma->fftw[ma->fftwsize-j*2], 2); } ma->out[(ma->winsize/2)*ma->hops + ma->curhop] = powf(ma->fftw[ma->winsize], 2); ma->fftwsamples = 0; buffersamples -= fill; bufferpos += fill; ma->curhop++; if (ma->curhop == ma->hops) { GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(ma->pipeline)); GstMessage* eosmsg = gst_message_new_eos(GST_OBJECT(ma->pipeline)); gst_bus_post(bus, eosmsg); g_print("libmirageaudio: EOS Message sent\n"); gst_object_unref(bus); ma->quit = TRUE; gst_buffer_unmap (buffer, &map); return; } } while (buffersamples >= ma->winsize); if (buffersamples > 0) { memcpy(ma->fftw, ma->src_data.data_out+bufferpos, buffersamples*sizeof(float)); ma->fftwsamples = buffersamples; } } ma->src_data.data_in += ma->src_data.input_frames_used; ma->src_data.input_frames -= ma->src_data.input_frames_used; } while (ma->src_data.input_frames > 0); gst_buffer_unmap (buffer, &map); return; }
EosMessagePtr EosMessage::create(const ObjectPtr & source) { return EosMessagePtr::wrap(gst_message_new_eos(source), false); }
static void gst_qt_moov_recover_run (void *data) { FILE *moovrec = NULL; FILE *mdatinput = NULL; FILE *output = NULL; MdatRecovFile *mdat_recov = NULL; MoovRecovFile *moov_recov = NULL; GstQTMoovRecover *qtmr = GST_QT_MOOV_RECOVER_CAST (data); GError *err = NULL; GST_LOG_OBJECT (qtmr, "Starting task"); GST_DEBUG_OBJECT (qtmr, "Validating properties"); GST_OBJECT_LOCK (qtmr); /* validate properties */ if (qtmr->broken_input == NULL) { GST_OBJECT_UNLOCK (qtmr); GST_ELEMENT_ERROR (qtmr, RESOURCE, SETTINGS, ("Please set broken-input property"), (NULL)); goto end; } if (qtmr->recovery_input == NULL) { GST_OBJECT_UNLOCK (qtmr); GST_ELEMENT_ERROR (qtmr, RESOURCE, SETTINGS, ("Please set recovery-input property"), (NULL)); goto end; } if (qtmr->fixed_output == NULL) { GST_OBJECT_UNLOCK (qtmr); GST_ELEMENT_ERROR (qtmr, RESOURCE, SETTINGS, ("Please set fixed-output property"), (NULL)); goto end; } GST_DEBUG_OBJECT (qtmr, "Opening input/output files"); /* open files */ moovrec = g_fopen (qtmr->recovery_input, "rb"); if (moovrec == NULL) { GST_OBJECT_UNLOCK (qtmr); GST_ELEMENT_ERROR (qtmr, RESOURCE, OPEN_READ, ("Failed to open recovery-input file"), (NULL)); goto end; } mdatinput = g_fopen (qtmr->broken_input, "rb"); if (mdatinput == NULL) { GST_OBJECT_UNLOCK (qtmr); GST_ELEMENT_ERROR (qtmr, RESOURCE, OPEN_READ, ("Failed to open broken-input file"), (NULL)); goto end; } output = g_fopen (qtmr->fixed_output, "wb+"); if (output == NULL) { GST_OBJECT_UNLOCK (qtmr); GST_ELEMENT_ERROR (qtmr, RESOURCE, OPEN_READ_WRITE, ("Failed to open fixed-output file"), (NULL)); goto end; } GST_OBJECT_UNLOCK (qtmr); GST_DEBUG_OBJECT (qtmr, "Parsing input files"); /* now create our structures */ mdat_recov = mdat_recov_file_create (mdatinput, qtmr->faststart_mode, &err); mdatinput = NULL; if (mdat_recov == NULL) { GST_ELEMENT_ERROR (qtmr, RESOURCE, FAILED, ("Broken file could not be parsed correctly"), (NULL)); goto end; } moov_recov = moov_recov_file_create (moovrec, &err); moovrec = NULL; if (moov_recov == NULL) { GST_ELEMENT_ERROR (qtmr, RESOURCE, FAILED, ("Recovery file could not be parsed correctly"), (NULL)); goto end; } /* now parse the buffers data from moovrec */ if (!moov_recov_parse_buffers (moov_recov, mdat_recov, &err)) { goto end; } GST_DEBUG_OBJECT (qtmr, "Writing fixed file to output"); if (!moov_recov_write_file (moov_recov, mdat_recov, output, &err)) { goto end; } /* here means success */ GST_DEBUG_OBJECT (qtmr, "Finished successfully, posting EOS"); gst_element_post_message (GST_ELEMENT_CAST (qtmr), gst_message_new_eos (GST_OBJECT_CAST (qtmr))); end: GST_LOG_OBJECT (qtmr, "Finalizing task"); if (err) { GST_ELEMENT_ERROR (qtmr, RESOURCE, FAILED, ("%s", err->message), (NULL)); g_error_free (err); } if (moov_recov) moov_recov_file_free (moov_recov); if (moovrec) fclose (moovrec); if (mdat_recov) mdat_recov_file_free (mdat_recov); if (mdatinput) fclose (mdatinput); if (output) fclose (output); GST_LOG_OBJECT (qtmr, "Leaving task"); gst_task_stop (qtmr->task); }