示例#1
0
static GstFlowReturn
gst_jpeg_dec_post_error_or_warning (GstJpegDec * dec)
{
  GstFlowReturn ret;
  int max_errors;

  ++dec->error_count;
  max_errors = g_atomic_int_get (&dec->max_errors);

  if (max_errors < 0) {
    ret = GST_FLOW_OK;
  } else if (max_errors == 0) {
    /* FIXME: do something more clever in "automatic mode" */
    if (gst_video_decoder_get_packetized (GST_VIDEO_DECODER (dec))) {
      ret = (dec->error_count < 3) ? GST_FLOW_OK : GST_FLOW_ERROR;
    } else {
      ret = GST_FLOW_ERROR;
    }
  } else {
    ret = (dec->error_count < max_errors) ? GST_FLOW_OK : GST_FLOW_ERROR;
  }

  GST_INFO_OBJECT (dec, "decoding error %d/%d (%s)", dec->error_count,
      max_errors, (ret == GST_FLOW_OK) ? "ignoring error" : "erroring out");

  gst_element_message_full (GST_ELEMENT (dec),
      (ret == GST_FLOW_OK) ? GST_MESSAGE_WARNING : GST_MESSAGE_ERROR,
      GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
      g_strdup (_("Failed to decode JPEG image")), dec->error_msg,
      __FILE__, dec->error_func, dec->error_line);

  dec->error_msg = NULL;
  gst_jpeg_dec_clear_error (dec);
  return ret;
}
示例#2
0
static inline void post_message(MpegTSDemuxer *demuxer, const char* message,
                                GstMessageType type, GQuark domain, int code)
{
#ifdef DEBUG_OUTPUT
    g_print ("MpegTS post_message: %s\n", message);
#endif
    gst_element_message_full(GST_ELEMENT(demuxer), type, domain, code,
                             g_strdup(message), NULL, ("mpegtsdemuxer.c"), ("mpegts_demuxer_message"), 0);
}
示例#3
0
static inline void post_error(MpegTSDemuxer *demuxer, const char* description, int result, int code)
{
    char* error_string = g_strdup_printf("%s: %d (%s)", description, result,
                                         avelement_error_to_string(AVELEMENT(demuxer), result));

#ifdef DEBUG_OUTPUT
    g_print ("MpegTS post_error: %s\n", error_string);
#endif

    gst_element_message_full(GST_ELEMENT(demuxer), GST_MESSAGE_ERROR, GST_STREAM_ERROR, code,
                             error_string, NULL, ("mpegtsdemuxer.c"), ("mpegts_demuxer_error"), 0);
}
static gboolean videodecoder_configure_sourcepad(VideoDecoder *decoder)
{
    BaseDecoder *base = BASEDECODER(decoder);

    if (GST_PAD_CAPS(base->srcpad) == NULL ||
        decoder->width != base->context->width ||
        decoder->height != base->context->height)
    {
        decoder->width = base->context->width;
        decoder->height = base->context->height;

        decoder->discont = (GST_PAD_CAPS(base->srcpad) != NULL);

        decoder->u_offset = base->frame->linesize[0] * decoder->height;
        decoder->uv_blocksize = base->frame->linesize[1] * decoder->height / 2;

        decoder->v_offset = decoder->u_offset + decoder->uv_blocksize;
        decoder->frame_size = (base->frame->linesize[0] + base->frame->linesize[1]) * decoder->height;

        GstCaps *src_caps = gst_caps_new_simple("video/x-raw-yuv",
                                                "format", GST_TYPE_FOURCC, GST_STR_FOURCC("YV12"),
                                                "width", G_TYPE_INT, decoder->width,
                                                "height", G_TYPE_INT, decoder->height,
                                                "stride-y", G_TYPE_INT, base->frame->linesize[0],
                                                "stride-u", G_TYPE_INT, base->frame->linesize[1],
                                                "stride-v", G_TYPE_INT, base->frame->linesize[2],
                                                "offset-y", G_TYPE_INT, 0,
                                                "offset-u", G_TYPE_INT, decoder->u_offset,
                                                "offset-v", G_TYPE_INT, decoder->v_offset,
                                                "framerate", GST_TYPE_FRACTION, 2997, 100,
                                                NULL);


        if (!gst_pad_set_caps (base->srcpad, src_caps))
        {
            gst_element_message_full(GST_ELEMENT(decoder), GST_MESSAGE_ERROR, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
                                     g_strdup("Failed to set caps on the sourcepad"), NULL,
                                     ("videodecoder.c"), ("videodecoder_configure"), 0);
            gst_caps_unref(src_caps);
            return FALSE;
        }
        gst_caps_unref(src_caps);
    }

    return TRUE;
}
static gboolean
dvb_base_bin_uri_set_uri (GstURIHandler * handler, const gchar * uri,
    GError ** error)
{
  DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (handler);
  GError *err = NULL;
  gchar *location;

  location = gst_uri_get_location (uri);

  if (location == NULL)
    goto no_location;

  /* FIXME: here is where we parse channels.conf */
  if (!set_properties_for_channel (GST_ELEMENT (dvbbasebin), location, &err))
    goto set_properties_failed;

  g_free (location);
  return TRUE;

post_error_and_exit:
  {
    gst_element_message_full (GST_ELEMENT (dvbbasebin), GST_MESSAGE_ERROR,
        err->domain, err->code, g_strdup (err->message), NULL, __FILE__,
        GST_FUNCTION, __LINE__);
    g_propagate_error (error, err);
    return FALSE;
  }
no_location:
  {
    g_set_error (&err, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
        "No details to DVB URI");
    goto post_error_and_exit;
  }
set_properties_failed:
  {
    g_free (location);
    if (!err)
      g_set_error (&err, GST_URI_ERROR, GST_URI_ERROR_BAD_REFERENCE,
          "Could not find information for channel");
    goto post_error_and_exit;
  }
}
示例#6
0
static void
byzanz_encoder_gstreamer_need_data (GstAppSrc *src, guint length, gpointer data)
{
  ByzanzEncoder *encoder = data;
  ByzanzEncoderGStreamer *gst = data;
  GstBuffer *buffer;
  cairo_t *cr;
  cairo_surface_t *surface;
  cairo_region_t *region;
  GError *error = NULL;
  guint64 msecs;
  int i, num_rects;

  if (!byzanz_deserialize (encoder->input_stream, &msecs, &surface, &region, encoder->cancellable, &error)) {
    gst_element_message_full (GST_ELEMENT (src), GST_MESSAGE_ERROR,
        error->domain, error->code, g_strdup (error->message), NULL, __FILE__, GST_FUNCTION, __LINE__);
    g_error_free (error);
    return;
  }

  if (surface == NULL) {
    gst_app_src_end_of_stream (gst->src);
    if (gst->audiosrc)
      gst_element_send_event (gst->audiosrc, gst_event_new_eos ());
    return;
  }

  if (cairo_surface_get_reference_count (gst->surface) > 1) {
    cairo_surface_t *copy = cairo_image_surface_create (CAIRO_FORMAT_RGB24,
        cairo_image_surface_get_width (gst->surface), cairo_image_surface_get_height (gst->surface));
    
    cr = cairo_create (copy);
    cairo_set_source_surface (cr, gst->surface, 0, 0);
    cairo_paint (cr);
    cairo_destroy (cr);

    cairo_surface_destroy (gst->surface);
    gst->surface = copy;
  }
  cr = cairo_create (gst->surface);
  cairo_set_source_surface (cr, surface, 0, 0);

  num_rects = cairo_region_num_rectangles (region);
  for (i = 0; i < num_rects; i++) {
    cairo_rectangle_int_t rect;
    cairo_region_get_rectangle (region, i, &rect);
    cairo_rectangle (cr, rect.x, rect.y,
                     rect.width, rect.height);
  }

  cairo_fill (cr);
  cairo_destroy (cr);

  /* create a buffer and send it */
  /* FIXME: stride just works? */
  cairo_surface_reference (gst->surface);
  buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
                                        cairo_image_surface_get_data (gst->surface),
                                        cairo_image_surface_get_stride (gst->surface) * cairo_image_surface_get_height (gst->surface),
                                        0,
                                        cairo_image_surface_get_stride (gst->surface) * cairo_image_surface_get_height (gst->surface),
                                        gst->surface,
                                        (GDestroyNotify) cairo_surface_destroy);
  GST_BUFFER_TIMESTAMP (buffer) = msecs * GST_MSECOND;
  gst_app_src_push_buffer (gst->src, buffer);
}
/***********************************************************************************
 * chain
 ***********************************************************************************/
static GstFlowReturn videodecoder_chain(GstPad *pad, GstBuffer *buf)
{
    VideoDecoder  *decoder = VIDEODECODER(GST_PAD_PARENT(pad));
    BaseDecoder   *base = BASEDECODER(decoder);
    GstFlowReturn  result = GST_FLOW_OK;
    int            num_dec = NO_DATA_USED;

    if (base->is_flushing)  // Reject buffers in flushing state.
    {
        result = GST_FLOW_WRONG_STATE;
        goto _exit;
    }

    if (!base->is_initialized && !videodecoder_configure(decoder, GST_PAD_CAPS(pad)))
    {
        result = GST_FLOW_ERROR;
        goto _exit;
    }

    if (!base->is_hls)
    {
        if (av_new_packet(&decoder->packet, GST_BUFFER_SIZE(buf)) == 0)
        {
            memcpy(decoder->packet.data, GST_BUFFER_DATA(buf), GST_BUFFER_SIZE(buf));
            if (GST_BUFFER_TIMESTAMP_IS_VALID(buf))
                base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf);
            else
                base->context->reordered_opaque = AV_NOPTS_VALUE;
            num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet);
            av_free_packet(&decoder->packet);
        }
        else
        {
            result = GST_FLOW_ERROR;
            goto _exit;
        }
    }
    else
    {
        av_init_packet(&decoder->packet);
        decoder->packet.data = GST_BUFFER_DATA(buf);
        decoder->packet.size = GST_BUFFER_SIZE(buf);
        if (GST_BUFFER_TIMESTAMP_IS_VALID(buf))
            base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf);
        else
            base->context->reordered_opaque = AV_NOPTS_VALUE;

        num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet);
    }

    if (num_dec < 0)
    {
        //        basedecoder_flush(base);
#ifdef DEBUG_OUTPUT
        g_print ("videodecoder_chain error: %s\n", avelement_error_to_string(AVELEMENT(decoder), num_dec));
#endif
        goto _exit;
    }

    if (decoder->frame_finished > 0)
    {
        if (!videodecoder_configure_sourcepad(decoder))
            result = GST_FLOW_ERROR;
        else
        {
            GstBuffer *outbuf = NULL;
            result = gst_pad_alloc_buffer_and_set_caps(base->srcpad, base->context->frame_number,
                                                       decoder->frame_size, GST_PAD_CAPS(base->srcpad), &outbuf);
            if (result != GST_FLOW_OK)
            {
                if (result != GST_FLOW_WRONG_STATE)
                {
                    gst_element_message_full(GST_ELEMENT(decoder), GST_MESSAGE_ERROR,
                                             GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
                                             ("Decoded video buffer allocation failed"), NULL,
                                             ("videodecoder.c"), ("videodecoder_chain"), 0);
                }
            }
            else
            {
                if (base->frame->reordered_opaque != AV_NOPTS_VALUE)
                {
                    GST_BUFFER_TIMESTAMP(outbuf) = base->frame->reordered_opaque;
                    GST_BUFFER_DURATION(outbuf) = GST_BUFFER_DURATION(buf); // Duration for video usually same
                }
                GST_BUFFER_SIZE(outbuf) = decoder->frame_size;

                // Copy image by parts from different arrays.
                memcpy(GST_BUFFER_DATA(outbuf),                     base->frame->data[0], decoder->u_offset);
                memcpy(GST_BUFFER_DATA(outbuf) + decoder->u_offset, base->frame->data[1], decoder->uv_blocksize);
                memcpy(GST_BUFFER_DATA(outbuf) + decoder->v_offset, base->frame->data[2], decoder->uv_blocksize);

                GST_BUFFER_OFFSET_END(outbuf) = GST_BUFFER_OFFSET_NONE;

                if (decoder->discont || GST_BUFFER_IS_DISCONT(buf))
                {
#ifdef DEBUG_OUTPUT
                    g_print("Video discont: frame size=%dx%d\n", base->context->width, base->context->height);
#endif
                    GST_BUFFER_FLAG_SET(outbuf, GST_BUFFER_FLAG_DISCONT);
                    decoder->discont = FALSE;
                }


#ifdef VERBOSE_DEBUG
                g_print("videodecoder: pushing buffer ts=%.4f sec", (double)GST_BUFFER_TIMESTAMP(outbuf)/GST_SECOND);
#endif
                result = gst_pad_push(base->srcpad, outbuf);
#ifdef VERBOSE_DEBUG
                g_print(" done, res=%s\n", gst_flow_get_name(result));
#endif
            }
        }
    }

_exit:
// INLINE - gst_buffer_unref()
    gst_buffer_unref(buf);
    return result;
}
static gboolean
rb_mtp_src_start (GstBaseSrc *basesrc)
{
	RBMTPSrc *src = RB_MTP_SRC (basesrc);

	/* download the file, if we haven't already */
	if (src->tempfile == NULL) {
		g_mutex_lock (&src->download_mutex);
		src->download_done = FALSE;
		rb_mtp_thread_download_track (src->device_thread,
					      src->track_id,
					      "",
					      (RBMtpDownloadCallback)download_cb,
					      g_object_ref (src),
					      g_object_unref);

		while (src->download_done == FALSE) {
			g_cond_wait (&src->download_cond, &src->download_mutex);
		}
		g_mutex_unlock (&src->download_mutex);
		rb_debug ("download finished");

		if (src->download_error) {
			int code;
			switch (src->download_error->code) {
			case RB_MTP_THREAD_ERROR_NO_SPACE:
				code = GST_RESOURCE_ERROR_NO_SPACE_LEFT;
				break;

			case RB_MTP_THREAD_ERROR_TEMPFILE:
				code = GST_RESOURCE_ERROR_OPEN_WRITE;
				break;

			default:
			case RB_MTP_THREAD_ERROR_GET_TRACK:
				code = GST_RESOURCE_ERROR_READ;
				break;

			}

			GST_WARNING_OBJECT (src, "error: %s", src->download_error->message);
			gst_element_message_full (GST_ELEMENT (src),
						  GST_MESSAGE_ERROR,
						  GST_RESOURCE_ERROR, code,
						  src->download_error->message, NULL,
						  __FILE__, GST_FUNCTION, __LINE__);
			return FALSE;
		}
	}

	/* open file - maybe do this in create after waiting for it to finish downloading */
	src->fd = open (src->tempfile, O_RDONLY, 0);
	if (src->fd < 0) {
		switch (errno) {
		case ENOENT:
			GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL),
					   ("Could not find temporary file"));
			break;
		default:
			GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
					   ("Could not open temporary file for reading"));
		}
		return FALSE;
	}

	src->read_position = 0;

	return TRUE;
}
示例#9
0
/***********************************************************************************
 * Seek implementation
 ***********************************************************************************/
static gboolean progress_buffer_perform_push_seek(ProgressBuffer *element, GstPad *pad, GstEvent *event)
{
    GstFormat    format;
    gdouble      rate;
    GstSeekFlags flags;
    GstSeekType  start_type, stop_type;
    gint64       position;
    GstSegment   segment;

    gst_event_parse_seek(event, &rate, &format, &flags, &start_type, &position, &stop_type, NULL);

    if (format != GST_FORMAT_BYTES || start_type != GST_SEEK_TYPE_SET)
        return FALSE;

    if (stop_type != GST_SEEK_TYPE_NONE)
    {
        gst_element_message_full(GST_ELEMENT(element),
            GST_MESSAGE_WARNING,
            GST_CORE_ERROR,
            GST_CORE_ERROR_SEEK, g_strdup("stop_type != GST_SEEK_TYPE_NONE. Seeking to stop is not supported."), NULL,
            ("progressbuffer.c"), ("progress_buffer_perform_push_seek"), 0);
        return FALSE;
    }

    if (flags & GST_SEEK_FLAG_FLUSH)
        gst_pad_push_event(pad, gst_event_new_flush_start());

    // Signal the task to stop if it's waiting.
    g_mutex_lock(&element->lock);
    element->srcresult = GST_FLOW_FLUSHING;
    g_cond_signal(&element->add_cond);
    g_mutex_unlock(&element->lock);

    GST_PAD_STREAM_LOCK(pad); // Wait for task to stop

    g_mutex_lock(&element->lock);
    element->srcresult = GST_FLOW_OK;

#ifdef ENABLE_SOURCE_SEEKING
    element->instant_seek = (position >= element->sink_segment.start &&
                             (position - (gint64)element->sink_segment.position) <= element->bandwidth * element->wait_tolerance);

    if (element->instant_seek)
    {
        cache_set_read_position(element->cache, position - element->cache_read_offset);
        gst_segment_init(&segment, GST_FORMAT_BYTES);
        segment.rate = rate;
        segment.start = position;
        segment.stop = element->sink_segment.stop;
        segment.position = position;
        progress_buffer_set_pending_event(element, gst_event_new_segment(&segment));
    }
    else
    {
        // Clear any pending events, since we doing seek.
        reset_eos(element, TRUE);
    }
#else
    cache_set_read_position(element->cache, position - element->cache_read_offset);
    gst_segment_init(&segment, GST_FORMAT_BYTES);
    segment.rate = rate;
    segment.start = position;
    segment.stop = element->sink_segment.stop;
    segment.position = position;
    progress_buffer_set_pending_event(element, gst_event_new_segment(&segment));
#endif

    g_mutex_unlock(&element->lock);

#ifdef ENABLE_SOURCE_SEEKING
    if (!element->instant_seek)
    {
        element->is_source_seeking = TRUE;
        if (!gst_pad_push_event(element->sinkpad, gst_event_new_seek(rate, GST_FORMAT_BYTES, flags, GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, 0)))
        {
            element->instant_seek = TRUE;
            cache_set_read_position(element->cache, position - element->cache_read_offset);
            gst_segment_init(&segment, GST_FORMAT_BYTES);
            segment.rate = rate;
            segment.start = position;
            segment.stop = element->sink_segment.stop;
            segment.position = position;
            progress_buffer_set_pending_event(element, gst_event_new_segment(&segment));
        }
        element->is_source_seeking = FALSE;
    }
#endif

    if (flags & GST_SEEK_FLAG_FLUSH)
        gst_pad_push_event(pad, gst_event_new_flush_stop(TRUE));

    gst_pad_start_task(element->srcpad, progress_buffer_loop, element, NULL);
    GST_PAD_STREAM_UNLOCK(pad);

// INLINE - gst_event_unref()
    gst_event_unref(event);
    return TRUE;
}
示例#10
0
/**
 * progress_buffer_enqueue_item()
 *
 * Add an item in the queue. Must be called in the locked context.  Item may be event or data.
 */
static GstFlowReturn progress_buffer_enqueue_item(ProgressBuffer *element, GstMiniObject *item)
{
    gboolean signal = FALSE;

    if (GST_IS_BUFFER (item))
    {
        gdouble elapsed;
        // update sink segment position
        element->sink_segment.position = GST_BUFFER_OFFSET(GST_BUFFER(item)) + gst_buffer_get_size (GST_BUFFER(item));

        if(element->sink_segment.stop < element->sink_segment.position) // This must never happen.
            return  GST_FLOW_ERROR;

        cache_write_buffer(element->cache, GST_BUFFER(item));

        elapsed = g_timer_elapsed(element->bandwidth_timer, NULL);
        element->subtotal += gst_buffer_get_size (GST_BUFFER(item));

        if (elapsed > 1.0)
        {
            element->bandwidth = element->subtotal/elapsed;
            element->subtotal = 0;
            g_timer_start(element->bandwidth_timer);
        }

        // send buffer progress position up (used to track buffer fill, etc.)
        signal = send_position_message(element, signal);
    }
    else if (GST_IS_EVENT (item))
    {
        GstEvent *event = GST_EVENT_CAST (item);

        switch (GST_EVENT_TYPE (event))
        {
            case GST_EVENT_EOS:
                element->eos_status.eos = TRUE;
                if (element->sink_segment.position < element->sink_segment.stop)
                    element->sink_segment.stop = element->sink_segment.position;

                progress_buffer_set_pending_event(element, NULL);

                signal = send_position_message(element, TRUE);
                gst_event_unref(event); // INLINE - gst_event_unref()
                break;

            case GST_EVENT_SEGMENT:
            {
                GstSegment segment;

                element->unexpected = FALSE;

                gst_event_copy_segment (event, &segment);

                if (segment.format != GST_FORMAT_BYTES)
                {
                    gst_element_message_full(GST_ELEMENT(element), GST_MESSAGE_ERROR, GST_STREAM_ERROR, GST_STREAM_ERROR_FORMAT,
                                             g_strdup("GST_FORMAT_BYTES buffers expected."), NULL,
                                             ("progressbuffer.c"), ("progress_buffer_enqueue_item"), 0);
                    gst_event_unref(event); // INLINE - gst_event_unref()
                    return GST_FLOW_ERROR;
                 }

                if (segment.stop - segment.start <= 0)
                {
                    gst_element_message_full(GST_ELEMENT(element), GST_MESSAGE_ERROR, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE,
                                             g_strdup("Only limited content is supported by progressbuffer."), NULL,
                                             ("progressbuffer.c"), ("progress_buffer_enqueue_item"), 0);
                    gst_event_unref(event); // INLINE - gst_event_unref()
                    return GST_FLOW_ERROR;
                }

                if ((segment.flags & GST_SEGMENT_FLAG_UPDATE) == GST_SEGMENT_FLAG_UPDATE) // Updating segments create new cache.
                {
                    if (element->cache)
                        destroy_cache(element->cache);

                    element->cache = create_cache();
                    if (!element->cache)
                    {
                        gst_element_message_full(GST_ELEMENT(element), GST_MESSAGE_ERROR, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_OPEN_READ_WRITE,
                                                 g_strdup("Couldn't create backing cache"), NULL,
                                                 ("progressbuffer.c"), ("progress_buffer_enqueue_item"), 0);
                        gst_event_unref(event); // INLINE - gst_event_unref()
                        return GST_FLOW_ERROR;
                    }
                }
                else
                {
                    cache_set_write_position(element->cache, 0);
                    cache_set_read_position(element->cache, 0);
                    element->cache_read_offset = segment.start;
                }

                gst_segment_copy_into (&segment, &element->sink_segment);
                progress_buffer_set_pending_event(element, event);
                element->instant_seek = TRUE;

                signal = send_position_message(element, TRUE);
                break;
            }

            default:
                gst_event_unref(event); // INLINE - gst_event_unref()
                break;
        }
    }

    if (signal)
        g_cond_signal(&element->add_cond);

    return GST_FLOW_OK;
}
示例#11
0
static void
rb_mtp_sink_handle_message (GstBin *bin, GstMessage *message)
{
    /* when we get an EOS message from the fdsink, close the fd and upload the
     * file to the device.
     */
    if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_EOS) {
        int fd;
        struct stat stat_buf;

        RBMTPSink *sink = RB_MTP_SINK (bin);

        /* fill in the file size and close the fd */
        g_object_get (sink->fdsink, "fd", &fd, NULL);
        fstat (fd, &stat_buf);
        sink->track->filesize = stat_buf.st_size;
        close (fd);

        rb_debug ("handling EOS from fdsink; file size is %" G_GUINT64_FORMAT, sink->track->filesize);

        /* we can just block waiting for mtp thread operations to finish here
         * as we're on a streaming thread.
         */
        g_mutex_lock (sink->upload_mutex);

        if (sink->folder_path != NULL) {
            /* find or create the target folder.
             * if this fails, we just upload to the default music folder
             * rather than giving up entirely.
             */
            sink->got_folder = FALSE;
            rb_mtp_thread_create_folder (sink->device_thread,
                                         (const char **)sink->folder_path,
                                         (RBMtpCreateFolderCallback) folder_callback,
                                         g_object_ref (sink),
                                         g_object_unref);
            while (sink->got_folder == FALSE) {
                g_cond_wait (sink->upload_cond, sink->upload_mutex);
            }
        }

        /* and upload the file */
        sink->upload_done = FALSE;
        rb_mtp_thread_upload_track (sink->device_thread,
                                    sink->track,
                                    sink->tempfile,
                                    (RBMtpUploadCallback) upload_callback,
                                    g_object_ref (sink),
                                    g_object_unref);

        while (sink->upload_done == FALSE) {
            g_cond_wait (sink->upload_cond, sink->upload_mutex);
        }
        g_mutex_unlock (sink->upload_mutex);

        /* post error message if the upload failed - this should get there before
         * this EOS message does, so it should work OK.
         */
        if (sink->upload_error != NULL) {
            int code;

            switch (sink->upload_error->code) {
            case RB_MTP_THREAD_ERROR_NO_SPACE:
                code = GST_RESOURCE_ERROR_NO_SPACE_LEFT;
                break;

            default:
            case RB_MTP_THREAD_ERROR_SEND_TRACK:
                code = GST_RESOURCE_ERROR_WRITE;
                break;
            }

            GST_WARNING_OBJECT (sink, "error: %s", sink->upload_error->message);
            gst_element_message_full (GST_ELEMENT (sink),
                                      GST_MESSAGE_ERROR,
                                      GST_RESOURCE_ERROR, code,
                                      g_strdup (sink->upload_error->message), NULL,
                                      __FILE__, GST_FUNCTION, __LINE__);
        }
    }

    GST_BIN_CLASS (parent_class)->handle_message (bin, message);
}
/**
 * hls_progress_buffer_sink_event()
 *
 * Receives event from the sink pad (currently, data from javasource).  When an event comes in,
 * we get the data from the pad by getting at the ProgressBuffer* object associated with the pad.
 */
static gboolean hls_progress_buffer_sink_event(GstPad *pad, GstEvent *event)
{
    HLSProgressBuffer *element = HLS_PROGRESS_BUFFER(GST_PAD_PARENT(pad));
    gboolean ret = FALSE;

    switch (GST_EVENT_TYPE (event))
    {
    case GST_EVENT_NEWSEGMENT:
        {
            gboolean update;
            GstFormat format;
            gdouble rate, arate;
            gint64 start, stop, time;

            g_mutex_lock(element->lock);
            if (element->srcresult != GST_FLOW_OK)
            {
                // INLINE - gst_event_unref()
                gst_event_unref(event);
                g_mutex_unlock(element->lock);
                return TRUE;
            }
            g_mutex_unlock(element->lock);

            if (element->is_eos)
            {
                element->is_eos = FALSE;
                element->srcresult = GST_FLOW_OK;
                if (gst_pad_is_linked(element->srcpad))
                    gst_pad_start_task(element->srcpad, hls_progress_buffer_loop, element);
            }

            // In HLS mode javasource will set time to correct position in time unit, even if segment in byte units.
            // Maybe not perfect, but works.
            gst_event_parse_new_segment_full(event, &update, &rate, &arate, &format, &start, &stop, &time);
            // INLINE - gst_event_unref()
            gst_event_unref(event);
            ret = TRUE;

            if (stop - start <= 0)
            {
                gst_element_message_full(GST_ELEMENT(element), GST_MESSAGE_ERROR, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE, g_strdup("Only limited content is supported by hlsprogressbuffer."), NULL, ("hlsprogressbuffer.c"), ("hls_progress_buffer_src_event"), 0);
                return TRUE;
            }

            if (element->send_new_segment)
            {
                event = gst_event_new_new_segment(update, rate, GST_FORMAT_TIME, 0, -1, time);
                element->send_new_segment = FALSE;
                ret = gst_pad_push_event(element->srcpad, event);
            }

            // Get and prepare next write segment
            g_mutex_lock(element->lock);
            element->cache_write_index = (element->cache_write_index + 1) % NUM_OF_CACHED_SEGMENTS;

            while (element->srcresult == GST_FLOW_OK && !element->cache_write_ready[element->cache_write_index])
            {
                g_mutex_unlock(element->lock);
                send_hls_full_message(element);
                g_mutex_lock(element->lock);
                g_cond_wait(element->del_cond, element->lock);
                if (element->srcresult != GST_FLOW_OK)
                {
                    g_mutex_unlock(element->lock);
                    return TRUE;
                }
            }
            element->cache_size[element->cache_write_index] = stop;
            element->cache_write_ready[element->cache_write_index] = FALSE;
            cache_set_write_position(element->cache[element->cache_write_index], 0);
            cache_set_read_position(element->cache[element->cache_write_index], 0);

            g_mutex_unlock(element->lock);

            send_hls_resume_message(element); // Send resume message for each segment
        }
        break;
    case GST_EVENT_FLUSH_START:
        g_mutex_lock(element->lock);
        element->is_flushing = TRUE;
        g_mutex_unlock(element->lock);

        ret = gst_pad_push_event(element->srcpad, event);
        hls_progress_buffer_flush_data(element);

        if (gst_pad_is_linked(element->srcpad))
            gst_pad_pause_task(element->srcpad);

        break;
    case GST_EVENT_FLUSH_STOP:
        ret = gst_pad_push_event(element->srcpad, event);

        g_mutex_lock(element->lock);

        element->send_new_segment = TRUE;
        element->is_flushing = FALSE;
        element->srcresult = GST_FLOW_OK;
        
        if (!element->is_eos && gst_pad_is_linked(element->srcpad))
            gst_pad_start_task(element->srcpad, hls_progress_buffer_loop, element);

        g_mutex_unlock(element->lock);

        break;
    case GST_EVENT_EOS:
        send_hls_eos_message(element); // Just in case we stall

        g_mutex_lock(element->lock);
        element->is_eos = TRUE;
        g_cond_signal(element->add_cond);
        g_mutex_unlock(element->lock);
        // INLINE - gst_event_unref()
        gst_event_unref(event);
        ret = TRUE;

        break;
    default:
        ret = gst_pad_push_event(element->srcpad, event);
        break;
    }

    return ret;
}