Exemplo n.º 1
0
/* returns TRUE on success, with byte_offset set to the offset of the
 * wavpack chunk containing the sample requested. start_sample will be
 * set to the first sample in the chunk starting at byte_offset.
 * Scanning from the last known header offset to the wanted position
 * when seeking forward isn't very clever, but seems fast enough in
 * practice and has the nice side effect of populating our index
 * table */
static gboolean
gst_wavpack_parse_scan_to_find_sample (GstWavpackParse * parse,
    gint64 sample, gint64 * byte_offset, gint64 * start_sample)
{
  GstWavpackParseIndexEntry *entry;
  GstFlowReturn ret;
  gint64 off = 0;

  /* first, check if we have to scan at all */
  entry = gst_wavpack_parse_index_get_entry_from_sample (parse, sample);
  if (entry) {
    *byte_offset = entry->byte_offset;
    *start_sample = entry->sample_offset;
    GST_LOG_OBJECT (parse, "Found index entry: sample %" G_GINT64_FORMAT
        " @ offset %" G_GINT64_FORMAT, entry->sample_offset,
        entry->byte_offset);
    return TRUE;
  }

  GST_LOG_OBJECT (parse, "No matching entry in index, scanning file ...");

  /* if we have an index, we can start scanning from the last known offset
   * in there, after all we know our wanted sample is not in the index */
  if (parse->entries) {
    GstWavpackParseIndexEntry *entry;

    entry = gst_wavpack_parse_index_get_last_entry (parse);
    off = entry->byte_offset;
  }

  /* now scan forward until we find the chunk we're looking for or hit EOS */
  do {
    WavpackHeader header;
    GstBuffer *buf;

    buf = gst_wavpack_parse_pull_buffer (parse, off, sizeof (WavpackHeader),
        &ret);

    if (buf == NULL)
      break;

    gst_wavpack_read_header (&header, GST_BUFFER_DATA (buf));
    gst_buffer_unref (buf);

    if (header.flags & INITIAL_BLOCK)
      gst_wavpack_parse_index_append_entry (parse, off, header.block_index,
          header.block_samples);
    else
      continue;

    if (header.block_index <= sample &&
        sample < (header.block_index + header.block_samples)) {
      *byte_offset = off;
      *start_sample = header.block_index;
      return TRUE;
    }

    off += header.ckSize + 8;
  } while (1);

  GST_DEBUG_OBJECT (parse, "scan failed: %s (off=0x%08" G_GINT64_MODIFIER "x)",
      gst_flow_get_name (ret), off);

  return FALSE;
}
Exemplo n.º 2
0
static GstCaps *
gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
    GstCaps * filter)
{
  GstCaps *template_caps;
  GstCaps *allowed_caps;
  GstCaps *caps, *icaps;
  gboolean append_unrestricted;
  guint i;

  allowed_caps =
      gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);

  if (allowed_caps == NULL)
    return NULL;

  template_caps =
      gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template);

  if (gst_caps_is_any (allowed_caps)) {
    caps = gst_caps_ref (template_caps);
    goto done;
  }

  if (gst_caps_is_empty (allowed_caps)) {
    caps = gst_caps_ref (allowed_caps);
    goto done;
  }

  caps = gst_caps_new_empty ();

  append_unrestricted = FALSE;
  for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
    GstStructure *s = gst_caps_get_structure (allowed_caps, i);
    GstStructure *new_s = gst_structure_new_empty ("video/x-h264");
    const gchar *profile_level_id;

    profile_level_id = gst_structure_get_string (s, "profile-level-id");

    if (profile_level_id && strlen (profile_level_id) == 6) {
      const gchar *profile;
      const gchar *level;
      long int spsint;
      guint8 sps[3];

      spsint = strtol (profile_level_id, NULL, 16);
      sps[0] = spsint >> 16;
      sps[1] = spsint >> 8;
      sps[2] = spsint;

      profile = gst_codec_utils_h264_get_profile (sps, 3);
      level = gst_codec_utils_h264_get_level (sps, 3);

      if (profile && level) {
        GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s",
            profile, level);

        if (!strcmp (profile, "constrained-baseline"))
          gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
        else {
          GValue val = { 0, };
          GValue profiles = { 0, };

          g_value_init (&profiles, GST_TYPE_LIST);
          g_value_init (&val, G_TYPE_STRING);

          g_value_set_static_string (&val, profile);
          gst_value_list_append_value (&profiles, &val);

          g_value_set_static_string (&val, "constrained-baseline");
          gst_value_list_append_value (&profiles, &val);

          gst_structure_take_value (new_s, "profile", &profiles);
        }

        if (!strcmp (level, "1"))
          gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL);
        else {
          GValue levels = { 0, };
          GValue val = { 0, };
          int j;

          g_value_init (&levels, GST_TYPE_LIST);
          g_value_init (&val, G_TYPE_STRING);

          for (j = 0; j < G_N_ELEMENTS (all_levels); j++) {
            g_value_set_static_string (&val, all_levels[j]);
            gst_value_list_prepend_value (&levels, &val);
            if (!strcmp (level, all_levels[j]))
              break;
          }
          gst_structure_take_value (new_s, "level", &levels);
        }
      } else {
        /* Invalid profile-level-id means baseline */

        gst_structure_set (new_s,
            "profile", G_TYPE_STRING, "constrained-baseline", NULL);
      }
    } else {
      /* No profile-level-id means baseline or unrestricted */

      gst_structure_set (new_s,
          "profile", G_TYPE_STRING, "constrained-baseline", NULL);
      append_unrestricted = TRUE;
    }

    caps = gst_caps_merge_structure (caps, new_s);
  }
static gboolean
gst_soup_http_client_sink_start (GstBaseSink * sink)
{
  GstSoupHttpClientSink *souphttpsink = GST_SOUP_HTTP_CLIENT_SINK (sink);

  if (souphttpsink->prop_session) {
    souphttpsink->session = souphttpsink->prop_session;
  } else {
    GSource *source;
    GError *error = NULL;

    souphttpsink->context = g_main_context_new ();

    /* set up idle source to signal when the main loop is running and
     * it's safe for ::stop() to call g_main_loop_quit() */
    source = g_idle_source_new ();
    g_source_set_callback (source, thread_ready_idle_cb, sink, NULL);
    g_source_attach (source, souphttpsink->context);
    g_source_unref (source);

    souphttpsink->loop = g_main_loop_new (souphttpsink->context, TRUE);

    g_mutex_lock (&souphttpsink->mutex);

    souphttpsink->thread = g_thread_try_new ("souphttpclientsink-thread",
        thread_func, souphttpsink, &error);

    if (error != NULL) {
      GST_DEBUG_OBJECT (souphttpsink, "failed to start thread, %s",
          error->message);
      g_error_free (error);
      g_mutex_unlock (&souphttpsink->mutex);
      return FALSE;
    }

    GST_LOG_OBJECT (souphttpsink, "waiting for main loop thread to start up");
    g_cond_wait (&souphttpsink->cond, &souphttpsink->mutex);
    g_mutex_unlock (&souphttpsink->mutex);
    GST_LOG_OBJECT (souphttpsink, "main loop thread running");

    if (souphttpsink->proxy == NULL) {
      souphttpsink->session =
          soup_session_async_new_with_options (SOUP_SESSION_ASYNC_CONTEXT,
          souphttpsink->context, SOUP_SESSION_USER_AGENT,
          souphttpsink->user_agent, SOUP_SESSION_TIMEOUT, souphttpsink->timeout,
          NULL);
    } else {
      souphttpsink->session =
          soup_session_async_new_with_options (SOUP_SESSION_ASYNC_CONTEXT,
          souphttpsink->context, SOUP_SESSION_USER_AGENT,
          souphttpsink->user_agent, SOUP_SESSION_TIMEOUT, souphttpsink->timeout,
          SOUP_SESSION_PROXY_URI, souphttpsink->proxy, NULL);
    }

    g_signal_connect (souphttpsink->session, "authenticate",
        G_CALLBACK (authenticate), souphttpsink);
  }

  /* Set up logging */
  gst_soup_util_log_setup (souphttpsink->session, souphttpsink->log_level,
      GST_ELEMENT (souphttpsink));

  return TRUE;
}
Exemplo n.º 4
0
static void
gst_gnome_vfs_src_received_headers_callback (gconstpointer in,
    gsize in_size, gpointer out, gsize out_size, gpointer callback_data)
{
  GList *i;
  gint icy_metaint;
  GstGnomeVFSSrc *src = GST_GNOME_VFS_SRC (callback_data);
  GnomeVFSModuleCallbackReceivedHeadersIn *in_args =
      (GnomeVFSModuleCallbackReceivedHeadersIn *) in;

  /* This is only used for internet radio stuff right now */
  if (!src->iradio_mode)
    return;

  GST_DEBUG_OBJECT (src, "receiving internet radio metadata\n");

  /* FIXME: Could we use "Accept-Ranges: bytes"
   * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.5
   * to enable pull-mode?
   */

  for (i = in_args->headers; i; i = i->next) {
    char *data = (char *) i->data;
    char *key = data;
    char *value = strchr (data, ':');

    if (!value)
      continue;

    value++;
    g_strstrip (value);
    if (!strlen (value))
      continue;

    GST_LOG_OBJECT (src, "data %s", data);

    /* Icecast stuff */
    if (strncmp (data, "icy-metaint:", 12) == 0) {      /* ugh */
      if (sscanf (data + 12, "%d", &icy_metaint) == 1) {
        if (icy_metaint > 0) {
          GstCaps *icy_caps;

          icy_caps = gst_caps_new_simple ("application/x-icy",
              "metadata-interval", G_TYPE_INT, icy_metaint, NULL);
          gst_pad_set_caps (GST_BASE_SRC_PAD (src), icy_caps);
          gst_caps_unref (icy_caps);
        }
      }
      continue;
    }

    if (!strncmp (data, "icy-", 4))
      key = data + 4;
    else
      continue;

    GST_DEBUG_OBJECT (src, "key: %s", key);
    if (!strncmp (key, "name", 4)) {
      g_free (src->iradio_name);
      src->iradio_name = gst_gnome_vfs_src_unicodify (value);
      if (src->iradio_name)
        g_object_notify (G_OBJECT (src), "iradio-name");
    } else if (!strncmp (key, "genre", 5)) {
      g_free (src->iradio_genre);
      src->iradio_genre = gst_gnome_vfs_src_unicodify (value);
      if (src->iradio_genre)
        g_object_notify (G_OBJECT (src), "iradio-genre");
    } else if (!strncmp (key, "url", 3)) {
      g_free (src->iradio_url);
      src->iradio_url = gst_gnome_vfs_src_unicodify (value);
      if (src->iradio_url)
        g_object_notify (G_OBJECT (src), "iradio-url");
    }
  }
}
Exemplo n.º 5
0
static gboolean
gst_raw_parse_handle_seek_pull (GstRawParse * rp, GstEvent * event)
{
  gdouble rate;
  GstFormat format;
  GstSeekFlags flags;
  GstSeekType start_type, stop_type;
  gint64 start, stop;
  gint64 last_stop;
  gboolean ret = FALSE;
  gboolean flush;
  GstSegment seeksegment;

  if (event) {
    gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
        &stop_type, &stop);

    /* convert input offsets to time */
    ret = gst_raw_parse_convert (rp, format, start, GST_FORMAT_TIME, &start);
    ret &= gst_raw_parse_convert (rp, format, stop, GST_FORMAT_TIME, &stop);
    if (!ret)
      goto convert_failed;

    GST_DEBUG_OBJECT (rp, "converted start - stop to time");

    format = GST_FORMAT_TIME;

    gst_event_unref (event);
  } else {
    format = GST_FORMAT_TIME;
    flags = 0;
  }

  flush = ((flags & GST_SEEK_FLAG_FLUSH) != 0);

  /* start flushing up and downstream so that the loop function pauses and we
   * can acquire the STREAM_LOCK. */
  if (flush) {
    GST_LOG_OBJECT (rp, "flushing");
    gst_pad_push_event (rp->sinkpad, gst_event_new_flush_start ());
    gst_pad_push_event (rp->srcpad, gst_event_new_flush_start ());
  } else {
    GST_LOG_OBJECT (rp, "pause task");
    gst_pad_pause_task (rp->sinkpad);
  }

  GST_PAD_STREAM_LOCK (rp->sinkpad);

  memcpy (&seeksegment, &rp->segment, sizeof (GstSegment));

  if (event) {
    /* configure the seek values */
    gst_segment_do_seek (&seeksegment, rate, format, flags,
        start_type, start, stop_type, stop, NULL);
  }

  /* get the desired position */
  last_stop = seeksegment.position;

  GST_LOG_OBJECT (rp, "seeking to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (last_stop));

  /* convert the desired position to bytes */
  ret =
      gst_raw_parse_convert (rp, format, last_stop, GST_FORMAT_BYTES,
      &last_stop);

  /* prepare for streaming */
  if (flush) {
    GST_LOG_OBJECT (rp, "stop flush");
    gst_pad_push_event (rp->sinkpad, gst_event_new_flush_stop (TRUE));
    gst_pad_push_event (rp->srcpad, gst_event_new_flush_stop (TRUE));
  }

  if (ret) {
    /* seek done */

    /* Seek on a frame boundary */
    last_stop -= last_stop % rp->framesize;

    rp->offset = last_stop;
    rp->n_frames = last_stop / rp->framesize;

    GST_LOG_OBJECT (rp, "seeking to bytes %" G_GINT64_FORMAT, last_stop);

    memcpy (&rp->segment, &seeksegment, sizeof (GstSegment));

    if (rp->segment.flags & GST_SEEK_FLAG_SEGMENT) {
      gst_element_post_message (GST_ELEMENT_CAST (rp),
          gst_message_new_segment_start (GST_OBJECT_CAST (rp),
              rp->segment.format, rp->segment.position));
    }

    /* for deriving a stop position for the playback segment from the seek
     * segment, we must take the duration when the stop is not set */
    if ((stop = rp->segment.stop) == -1)
      stop = rp->segment.duration;

    GST_DEBUG_OBJECT (rp, "preparing newsegment from %" G_GINT64_FORMAT
        " to %" G_GINT64_FORMAT, rp->segment.start, stop);

    /* now replace the old segment so that we send it in the stream thread the
     * next time it is scheduled. */
    if (rp->start_segment)
      gst_event_unref (rp->start_segment);
    rp->start_segment = gst_event_new_segment (&rp->segment);
  }
  rp->discont = TRUE;

  GST_LOG_OBJECT (rp, "start streaming");
  gst_pad_start_task (rp->sinkpad, (GstTaskFunction) gst_raw_parse_loop, rp,
      NULL);

  GST_PAD_STREAM_UNLOCK (rp->sinkpad);

  return ret;

  /* ERRORS */
convert_failed:
  {
    GST_DEBUG_OBJECT (rp, "Seek failed: couldn't convert to byte positions");
    return FALSE;
  }
}
Exemplo n.º 6
0
static GstFlowReturn
gst_tcp_server_src_create (GstPushSrc * psrc, GstBuffer ** outbuf)
{
  GstTCPServerSrc *src;
  GstFlowReturn ret = GST_FLOW_OK;
  gssize rret, avail;
  gsize read;
  GError *err = NULL;
  GstMapInfo map;

  src = GST_TCP_SERVER_SRC (psrc);

  if (!GST_OBJECT_FLAG_IS_SET (src, GST_TCP_SERVER_SRC_OPEN))
    goto wrong_state;

  if (!src->client_socket) {
    /* wait on server socket for connections */
    src->client_socket =
        g_socket_accept (src->server_socket, src->cancellable, &err);
    if (!src->client_socket)
      goto accept_error;
    /* now read from the socket. */
  }

  /* if we have a client, wait for read */
  GST_LOG_OBJECT (src, "asked for a buffer");

  /* read the buffer header */
  avail = g_socket_get_available_bytes (src->client_socket);
  if (avail < 0) {
    goto get_available_error;
  } else if (avail == 0) {
    GIOCondition condition;

    if (!g_socket_condition_wait (src->client_socket,
            G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP, src->cancellable, &err))
      goto select_error;

    condition =
        g_socket_condition_check (src->client_socket,
        G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP);

    if ((condition & G_IO_ERR)) {
      GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
          ("Socket in error state"));
      *outbuf = NULL;
      ret = GST_FLOW_ERROR;
      goto done;
    } else if ((condition & G_IO_HUP)) {
      GST_DEBUG_OBJECT (src, "Connection closed");
      *outbuf = NULL;
      ret = GST_FLOW_EOS;
      goto done;
    }
    avail = g_socket_get_available_bytes (src->client_socket);
    if (avail < 0)
      goto get_available_error;
  }

  if (avail > 0) {
    read = MIN (avail, MAX_READ_SIZE);
    *outbuf = gst_buffer_new_and_alloc (read);
    gst_buffer_map (*outbuf, &map, GST_MAP_READWRITE);
    rret =
        g_socket_receive (src->client_socket, (gchar *) map.data, read,
        src->cancellable, &err);
  } else {
    /* Connection closed */
    rret = 0;
    *outbuf = NULL;
    read = 0;
  }

  if (rret == 0) {
    GST_DEBUG_OBJECT (src, "Connection closed");
    ret = GST_FLOW_EOS;
    if (*outbuf) {
      gst_buffer_unmap (*outbuf, &map);
      gst_buffer_unref (*outbuf);
    }
    *outbuf = NULL;
  } else if (rret < 0) {
    if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
      ret = GST_FLOW_FLUSHING;
      GST_DEBUG_OBJECT (src, "Cancelled reading from socket");
    } else {
      ret = GST_FLOW_ERROR;
      GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
          ("Failed to read from socket: %s", err->message));
    }
    gst_buffer_unmap (*outbuf, &map);
    gst_buffer_unref (*outbuf);
    *outbuf = NULL;
  } else {
    ret = GST_FLOW_OK;
    gst_buffer_unmap (*outbuf, &map);
    gst_buffer_resize (*outbuf, 0, rret);

    GST_LOG_OBJECT (src,
        "Returning buffer from _get of size %" G_GSIZE_FORMAT ", ts %"
        GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT
        ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT,
        gst_buffer_get_size (*outbuf),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*outbuf)),
        GST_TIME_ARGS (GST_BUFFER_DURATION (*outbuf)),
        GST_BUFFER_OFFSET (*outbuf), GST_BUFFER_OFFSET_END (*outbuf));
  }
  g_clear_error (&err);

done:
  return ret;

wrong_state:
  {
    GST_DEBUG_OBJECT (src, "connection to closed, cannot read data");
    return GST_FLOW_FLUSHING;
  }
accept_error:
  {
    if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
      GST_DEBUG_OBJECT (src, "Cancelled accepting of client");
    } else {
      GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
          ("Failed to accept client: %s", err->message));
    }
    g_clear_error (&err);
    return GST_FLOW_ERROR;
  }
select_error:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("Select failed: %s", err->message));
    g_clear_error (&err);
    return GST_FLOW_ERROR;
  }
get_available_error:
  {
    GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
        ("Failed to get available bytes from socket"));
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 7
0
static GstFlowReturn
gst_spectrum_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
{
  GstSpectrum *spectrum = GST_SPECTRUM (trans);
  guint rate = GST_AUDIO_FILTER_RATE (spectrum);
  guint channels = GST_AUDIO_FILTER_CHANNELS (spectrum);
  guint bps = GST_AUDIO_FILTER_BPS (spectrum);
  guint bpf = GST_AUDIO_FILTER_BPF (spectrum);
  guint output_channels = spectrum->multi_channel ? channels : 1;
  guint c;
  gfloat max_value = (1UL << ((bps << 3) - 1)) - 1;
  guint bands = spectrum->bands;
  guint nfft = 2 * bands - 2;
  guint input_pos;
  gfloat *input;
  GstMapInfo map;
  const guint8 *data;
  gsize size;
  guint fft_todo, msg_todo, block_size;
  gboolean have_full_interval;
  GstSpectrumChannel *cd;
  GstSpectrumInputData input_data;

  g_mutex_lock (&spectrum->lock);
  gst_buffer_map (buffer, &map, GST_MAP_READ);
  data = map.data;
  size = map.size;

  GST_LOG_OBJECT (spectrum, "input size: %" G_GSIZE_FORMAT " bytes", size);

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG_OBJECT (spectrum, "Discontinuity detected -- flushing");
    gst_spectrum_flush (spectrum);
  }

  /* If we don't have a FFT context yet (or it was reset due to parameter
   * changes) get one and allocate memory for everything
   */
  if (spectrum->channel_data == NULL) {
    GST_DEBUG_OBJECT (spectrum, "allocating for bands %u", bands);

    gst_spectrum_alloc_channel_data (spectrum);

    /* number of sample frames we process before posting a message
     * interval is in ns */
    spectrum->frames_per_interval =
        gst_util_uint64_scale (spectrum->interval, rate, GST_SECOND);
    spectrum->frames_todo = spectrum->frames_per_interval;
    /* rounding error for frames_per_interval in ns,
     * aggregated it in accumulated_error */
    spectrum->error_per_interval = (spectrum->interval * rate) % GST_SECOND;
    if (spectrum->frames_per_interval == 0)
      spectrum->frames_per_interval = 1;

    GST_INFO_OBJECT (spectrum, "interval %" GST_TIME_FORMAT ", fpi %"
        G_GUINT64_FORMAT ", error %" GST_TIME_FORMAT,
        GST_TIME_ARGS (spectrum->interval), spectrum->frames_per_interval,
        GST_TIME_ARGS (spectrum->error_per_interval));

    spectrum->input_pos = 0;

    gst_spectrum_flush (spectrum);
  }

  if (spectrum->num_frames == 0)
    spectrum->message_ts = GST_BUFFER_TIMESTAMP (buffer);

  input_pos = spectrum->input_pos;
  input_data = spectrum->input_data;

  while (size >= bpf) {
    /* run input_data for a chunk of data */
    fft_todo = nfft - (spectrum->num_frames % nfft);
    msg_todo = spectrum->frames_todo - spectrum->num_frames;
    GST_LOG_OBJECT (spectrum,
        "message frames todo: %u, fft frames todo: %u, input frames %"
        G_GSIZE_FORMAT, msg_todo, fft_todo, (size / bpf));
    block_size = msg_todo;
    if (block_size > (size / bpf))
      block_size = (size / bpf);
    if (block_size > fft_todo)
      block_size = fft_todo;

    for (c = 0; c < output_channels; c++) {
      cd = &spectrum->channel_data[c];
      input = cd->input;
      /* Move the current frames into our ringbuffers */
      input_data (data + c * bps, input, block_size, channels, max_value,
          input_pos, nfft);
    }
    data += block_size * bpf;
    size -= block_size * bpf;
    input_pos = (input_pos + block_size) % nfft;
    spectrum->num_frames += block_size;

    have_full_interval = (spectrum->num_frames == spectrum->frames_todo);

    GST_LOG_OBJECT (spectrum,
        "size: %" G_GSIZE_FORMAT ", do-fft = %d, do-message = %d", size,
        (spectrum->num_frames % nfft == 0), have_full_interval);

    /* If we have enough frames for an FFT or we have all frames required for
     * the interval and we haven't run a FFT, then run an FFT */
    if ((spectrum->num_frames % nfft == 0) ||
        (have_full_interval && !spectrum->num_fft)) {
      for (c = 0; c < output_channels; c++) {
        cd = &spectrum->channel_data[c];
        gst_spectrum_run_fft (spectrum, cd, input_pos);
      }
      spectrum->num_fft++;
    }

    /* Do we have the FFTs for one interval? */
    if (have_full_interval) {
      GST_DEBUG_OBJECT (spectrum, "nfft: %u frames: %" G_GUINT64_FORMAT
          " fpi: %" G_GUINT64_FORMAT " error: %" GST_TIME_FORMAT, nfft,
          spectrum->num_frames, spectrum->frames_per_interval,
          GST_TIME_ARGS (spectrum->accumulated_error));

      spectrum->frames_todo = spectrum->frames_per_interval;
      if (spectrum->accumulated_error >= GST_SECOND) {
        spectrum->accumulated_error -= GST_SECOND;
        spectrum->frames_todo++;
      }
      spectrum->accumulated_error += spectrum->error_per_interval;

      if (spectrum->post_messages) {
        GstMessage *m;

        for (c = 0; c < output_channels; c++) {
          cd = &spectrum->channel_data[c];
          gst_spectrum_prepare_message_data (spectrum, cd);
        }

        m = gst_spectrum_message_new (spectrum, spectrum->message_ts,
            spectrum->interval);

        gst_element_post_message (GST_ELEMENT (spectrum), m);
      }

      if (GST_CLOCK_TIME_IS_VALID (spectrum->message_ts))
        spectrum->message_ts +=
            gst_util_uint64_scale (spectrum->num_frames, GST_SECOND, rate);

      for (c = 0; c < output_channels; c++) {
        cd = &spectrum->channel_data[c];
        gst_spectrum_reset_message_data (spectrum, cd);
      }
      spectrum->num_frames = 0;
      spectrum->num_fft = 0;
    }
  }

  spectrum->input_pos = input_pos;

  gst_buffer_unmap (buffer, &map);
  g_mutex_unlock (&spectrum->lock);

  g_assert (size == 0);

  return GST_FLOW_OK;
}
Exemplo n.º 8
0
static GstFlowReturn
vorbis_handle_data_packet (GstVorbisDec * vd, ogg_packet * packet,
                           GstClockTime timestamp, GstClockTime duration)
{
#ifdef USE_TREMOLO
    vorbis_sample_t *pcm;
#else
    vorbis_sample_t **pcm;
#endif
    guint sample_count;
    GstBuffer *out = NULL;
    GstFlowReturn result;
    gint size;

    if (G_UNLIKELY (!vd->initialized)) {
        result = vorbis_dec_handle_header_caps (vd);
        if (result != GST_FLOW_OK)
            goto not_initialized;
    }

    /* normal data packet */
    /* FIXME, we can skip decoding if the packet is outside of the
     * segment, this is however not very trivial as we need a previous
     * packet to decode the current one so we must be careful not to
     * throw away too much. For now we decode everything and clip right
     * before pushing data. */

#ifdef USE_TREMOLO
    if (G_UNLIKELY (vorbis_dsp_synthesis (&vd->vd, packet, 1)))
        goto could_not_read;
#else
    if (G_UNLIKELY (vorbis_synthesis (&vd->vb, packet)))
        goto could_not_read;

    if (G_UNLIKELY (vorbis_synthesis_blockin (&vd->vd, &vd->vb) < 0))
        goto not_accepted;
#endif

    /* assume all goes well here */
    result = GST_FLOW_OK;

    /* count samples ready for reading */
#ifdef USE_TREMOLO
    if ((sample_count = vorbis_dsp_pcmout (&vd->vd, NULL, 0)) == 0)
#else
    if ((sample_count = vorbis_synthesis_pcmout (&vd->vd, NULL)) == 0)
        goto done;
#endif

        size = sample_count * vd->vi.channels * vd->width;
    GST_LOG_OBJECT (vd, "%d samples ready for reading, size %d", sample_count,
                    size);

    /* alloc buffer for it */
    result =
        gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD (vd),
                                           GST_BUFFER_OFFSET_NONE, size,
                                           GST_PAD_CAPS (GST_AUDIO_DECODER_SRC_PAD (vd)), &out);
    if (G_UNLIKELY (result != GST_FLOW_OK))
        goto done;

    /* get samples ready for reading now, should be sample_count */
#ifdef USE_TREMOLO
    pcm = GST_BUFFER_DATA (out);
    if (G_UNLIKELY (vorbis_dsp_pcmout (&vd->vd, pcm, sample_count) !=
                    sample_count))
#else
    if (G_UNLIKELY (vorbis_synthesis_pcmout (&vd->vd, &pcm) != sample_count))
#endif
        goto wrong_samples;

#ifndef USE_TREMOLO
    /* copy samples in buffer */
    vd->copy_samples ((vorbis_sample_t *) GST_BUFFER_DATA (out), pcm,
                      sample_count, vd->vi.channels, vd->width);
#endif

    GST_LOG_OBJECT (vd, "setting output size to %d", size);
    GST_BUFFER_SIZE (out) = size;

done:
    /* whether or not data produced, consume one frame and advance time */
    result = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (vd), out, 1);

#ifdef USE_TREMOLO
    vorbis_dsp_read (&vd->vd, sample_count);
#else
    vorbis_synthesis_read (&vd->vd, sample_count);
#endif

    return result;

    /* ERRORS */
not_initialized:
    {
        GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE,
                           (NULL), ("no header sent yet"));
        return GST_FLOW_NOT_NEGOTIATED;
    }
could_not_read:
    {
        GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE,
                           (NULL), ("couldn't read data packet"));
        return GST_FLOW_ERROR;
    }
not_accepted:
    {
        GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE,
                           (NULL), ("vorbis decoder did not accept data packet"));
        return GST_FLOW_ERROR;
    }
wrong_samples:
    {
        gst_buffer_unref (out);
        GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE,
                           (NULL), ("vorbis decoder reported wrong number of samples"));
        return GST_FLOW_ERROR;
    }
}
Exemplo n.º 9
0
static GstFlowReturn
vorbis_dec_handle_frame (GstAudioDecoder * dec, GstBuffer * buffer)
{
    ogg_packet *packet;
    ogg_packet_wrapper packet_wrapper;
    GstFlowReturn result = GST_FLOW_OK;
    GstVorbisDec *vd = GST_VORBIS_DEC (dec);

    /* no draining etc */
    if (G_UNLIKELY (!buffer))
        return GST_FLOW_OK;

    /* make ogg_packet out of the buffer */
    gst_ogg_packet_wrapper_from_buffer (&packet_wrapper, buffer);
    packet = gst_ogg_packet_from_wrapper (&packet_wrapper);
    /* set some more stuff */
    packet->granulepos = -1;
    packet->packetno = 0;         /* we don't care */
    /* EOS does not matter, it is used in vorbis to implement clipping the last
     * block of samples based on the granulepos. We clip based on segments. */
    packet->e_o_s = 0;

    GST_LOG_OBJECT (vd, "decode buffer of size %ld", packet->bytes);

    /* error out on empty header packets, but just skip empty data packets */
    if (G_UNLIKELY (packet->bytes == 0)) {
        if (vd->initialized)
            goto empty_buffer;
        else
            goto empty_header;
    }

    /* switch depending on packet type */
    if ((gst_ogg_packet_data (packet))[0] & 1) {
        if (vd->initialized) {
            GST_WARNING_OBJECT (vd, "Already initialized, so ignoring header packet");
            goto done;
        }
        result = vorbis_handle_header_packet (vd, packet);
        /* consumer header packet/frame */
        gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (vd), NULL, 1);
    } else {
        GstClockTime timestamp, duration;

        timestamp = GST_BUFFER_TIMESTAMP (buffer);
        duration = GST_BUFFER_DURATION (buffer);

        result = vorbis_handle_data_packet (vd, packet, timestamp, duration);
    }

done:
    return result;

empty_buffer:
    {
        /* don't error out here, just ignore the buffer, it's invalid for vorbis
         * but not fatal. */
        GST_WARNING_OBJECT (vd, "empty buffer received, ignoring");
        result = GST_FLOW_OK;
        goto done;
    }

    /* ERRORS */
empty_header:
    {
        GST_ELEMENT_ERROR (vd, STREAM, DECODE, (NULL), ("empty header received"));
        result = GST_FLOW_ERROR;
        goto done;
    }
}
Exemplo n.º 10
0
static GstFlowReturn
fs_funnel_chain (GstPad * pad, GstBuffer * buffer)
{
  GstFlowReturn res;
  FsFunnel *funnel = FS_FUNNEL (gst_pad_get_parent (pad));
  FsFunnelPadPrivate *priv = gst_pad_get_element_private (pad);
  GstEvent *event = NULL;
  GstClockTime newts;
  GstCaps *padcaps;

  GST_DEBUG_OBJECT (funnel, "received buffer %p", buffer);

  GST_OBJECT_LOCK (funnel);
  if (priv->segment.format == GST_FORMAT_UNDEFINED) {
    GST_WARNING_OBJECT (funnel, "Got buffer without segment,"
        " setting segment [0,inf[");
     gst_segment_set_newsegment_full (&priv->segment, FALSE, 1.0, 1.0,
         GST_FORMAT_TIME, 0, -1, 0);
  }

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buffer)))
    gst_segment_set_last_stop (&priv->segment, priv->segment.format,
        GST_BUFFER_TIMESTAMP (buffer));

  newts = gst_segment_to_running_time (&priv->segment,
      priv->segment.format, GST_BUFFER_TIMESTAMP (buffer));
  if (newts != GST_BUFFER_TIMESTAMP (buffer)) {
    buffer = gst_buffer_make_metadata_writable (buffer);
    GST_BUFFER_TIMESTAMP (buffer) = newts;
  }

  if (!funnel->has_segment)
  {
    event = gst_event_new_new_segment_full (FALSE, 1.0, 1.0, GST_FORMAT_TIME,
        0, -1, 0);
    funnel->has_segment = TRUE;
  }
  GST_OBJECT_UNLOCK (funnel);

  if (event) {
    if (!gst_pad_push_event (funnel->srcpad, event))
      GST_WARNING_OBJECT (funnel, "Could not push out newsegment event");
  }


  GST_OBJECT_LOCK (pad);
  padcaps = GST_PAD_CAPS (funnel->srcpad);
  GST_OBJECT_UNLOCK (pad);

  if (GST_BUFFER_CAPS (buffer) && GST_BUFFER_CAPS (buffer) != padcaps) {
    if (!gst_pad_set_caps (funnel->srcpad, GST_BUFFER_CAPS (buffer))) {
      res = GST_FLOW_NOT_NEGOTIATED;
      goto out;
    }
  }

  res = gst_pad_push (funnel->srcpad, buffer);

  GST_LOG_OBJECT (funnel, "handled buffer %s", gst_flow_get_name (res));

 out:
  gst_object_unref (funnel);

  return res;
}
static GstFlowReturn gst_openh264dec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame)
{
    GstOpenh264Dec *openh264dec = GST_OPENH264DEC(decoder);
    GstMapInfo map_info;
    GstVideoCodecState *state;
    SBufferInfo dst_buf_info;
    DECODING_STATE ret;
    guint8 *yuvdata[3];
    GstFlowReturn flow_status;
    GstVideoFrame video_frame;
    guint actual_width, actual_height;
    guint i;
    guint8 *p;
    guint row_stride, component_width, component_height, src_width, row;

    if (frame) {
        if (!gst_buffer_map(frame->input_buffer, &map_info, GST_MAP_READ)) {
            GST_ERROR_OBJECT(openh264dec, "Cannot map input buffer!");
            return GST_FLOW_ERROR;
        }

        GST_LOG_OBJECT(openh264dec, "handle frame, %d", map_info.size > 4 ? map_info.data[4] & 0x1f : -1);

        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(map_info.data, map_info.size, yuvdata, &dst_buf_info);

        if (ret == dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
        }

        if (ret != dsErrorFree && ret != dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                               gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
            GST_LOG_OBJECT(openh264dec, "error decoding nal, return code: %d", ret);
        }

        gst_buffer_unmap(frame->input_buffer, &map_info);
        gst_video_codec_frame_unref (frame);
        frame = NULL;
    } else {
        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(NULL, 0, yuvdata, &dst_buf_info);
        if (ret != dsErrorFree)
            return GST_FLOW_EOS;
    }

    /* FIXME: openh264 has no way for us to get a connection
     * between the input and output frames, we just have to
     * guess based on the input. Fortunately openh264 can
     * only do baseline profile. */
    frame = gst_video_decoder_get_oldest_frame (decoder);
    if (!frame) {
      /* Can only happen in finish() */
      return GST_FLOW_EOS;
    }

    /* No output available yet */
    if (dst_buf_info.iBufferStatus != 1) {
        return (frame ? GST_FLOW_OK : GST_FLOW_EOS);
    }

    actual_width  = dst_buf_info.UsrData.sSystemBuffer.iWidth;
    actual_height = dst_buf_info.UsrData.sSystemBuffer.iHeight;

    if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (openh264dec)) || actual_width != openh264dec->priv->width || actual_height != openh264dec->priv->height) {
        state = gst_video_decoder_set_output_state(decoder,
            GST_VIDEO_FORMAT_I420,
            actual_width,
            actual_height,
            openh264dec->priv->input_state);
        openh264dec->priv->width = actual_width;
        openh264dec->priv->height = actual_height;

        if (!gst_video_decoder_negotiate(decoder)) {
            GST_ERROR_OBJECT(openh264dec, "Failed to negotiate with downstream elements");
            return GST_FLOW_NOT_NEGOTIATED;
        }
    } else {
        state = gst_video_decoder_get_output_state(decoder);
    }

    flow_status = gst_video_decoder_allocate_output_frame(decoder, frame);
    if (flow_status != GST_FLOW_OK) {
        gst_video_codec_state_unref (state);
        return flow_status;
    }

    if (!gst_video_frame_map(&video_frame, &state->info, frame->output_buffer, GST_MAP_WRITE)) {
        GST_ERROR_OBJECT(openh264dec, "Cannot map output buffer!");
        gst_video_codec_state_unref (state);
        return GST_FLOW_ERROR;
    }

    for (i = 0; i < 3; i++) {
        p = GST_VIDEO_FRAME_COMP_DATA(&video_frame, i);
        row_stride = GST_VIDEO_FRAME_COMP_STRIDE(&video_frame, i);
        component_width = GST_VIDEO_FRAME_COMP_WIDTH(&video_frame, i);
        component_height = GST_VIDEO_FRAME_COMP_HEIGHT(&video_frame, i);
        src_width = i < 1 ? dst_buf_info.UsrData.sSystemBuffer.iStride[0] : dst_buf_info.UsrData.sSystemBuffer.iStride[1];
        for (row = 0; row < component_height; row++) {
            memcpy(p, yuvdata[i], component_width);
            p += row_stride;
            yuvdata[i] += src_width;
        }
    }
    gst_video_codec_state_unref (state);
    gst_video_frame_unmap(&video_frame);

    return gst_video_decoder_finish_frame(decoder, frame);
}
Exemplo n.º 12
0
static gboolean
rsn_audiomunge_sink_event (GstPad * pad, GstEvent * event)
{
  gboolean ret = FALSE;
  RsnAudioMunge *munge = RSN_AUDIOMUNGE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_STOP:
      rsn_audiomunge_reset (munge);
      ret = gst_pad_push_event (munge->srcpad, event);
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      GstSegment *segment;
      gboolean update;
      GstFormat format;
      gdouble rate, arate;
      gint64 start, stop, time;

      gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
          &start, &stop, &time);

      /* we need TIME format */
      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      /* now configure the values */
      segment = &munge->sink_segment;

      gst_segment_set_newsegment_full (segment, update,
          rate, arate, format, start, stop, time);

      /*
       * FIXME:
       * If this is a segment update and accum >= threshold,
       * or we're in a still frame and there's been no audio received,
       * then we need to generate some audio data.
       *
       * If caused by a segment start update (time advancing in a gap) adjust
       * the new-segment and send the buffer.
       *
       * Otherwise, send the buffer before the newsegment, so that it appears
       * in the closing segment.
       */
      if (!update) {
        GST_DEBUG_OBJECT (munge,
            "Sending newsegment: update %d start %" GST_TIME_FORMAT " stop %"
            GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT, update,
            GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
            GST_TIME_ARGS (segment->accum));

        ret = gst_pad_push_event (munge->srcpad, event);
      }

      if (!munge->have_audio) {
        if ((update && segment->accum >= AUDIO_FILL_THRESHOLD)
            || munge->in_still) {
          GST_DEBUG_OBJECT (munge,
              "Sending audio fill with ts %" GST_TIME_FORMAT ": accum = %"
              GST_TIME_FORMAT " still-state=%d", GST_TIME_ARGS (segment->start),
              GST_TIME_ARGS (segment->accum), munge->in_still);

          /* Just generate a 200ms silence buffer for now. FIXME: Fill the gap */
          if (rsn_audiomunge_make_audio (munge, segment->start,
                  GST_SECOND / 5) == GST_FLOW_OK)
            munge->have_audio = TRUE;
        } else {
          GST_LOG_OBJECT (munge, "Not sending audio fill buffer: "
              "Not segment update, or segment accum below thresh: accum = %"
              GST_TIME_FORMAT, GST_TIME_ARGS (segment->accum));
        }
      }

      if (update) {
        GST_DEBUG_OBJECT (munge,
            "Sending newsegment: update %d start %" GST_TIME_FORMAT " stop %"
            GST_TIME_FORMAT " accum now %" GST_TIME_FORMAT, update,
            GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
            GST_TIME_ARGS (segment->accum));

        ret = gst_pad_push_event (munge->srcpad, event);
      }

      break;
    }
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    {
      gboolean in_still;

      if (gst_video_event_parse_still_frame (event, &in_still)) {
        /* Remember the still-frame state, so we can generate a pre-roll
         * buffer when a new-segment arrives */
        munge->in_still = in_still;
        GST_INFO_OBJECT (munge, "AUDIO MUNGE: still-state now %d",
            munge->in_still);
      }

      ret = gst_pad_push_event (munge->srcpad, event);
      break;
    }
    default:
      ret = gst_pad_push_event (munge->srcpad, event);
      break;
  }

  gst_object_unref (munge);
  return ret;

newseg_wrong_format:

  GST_DEBUG_OBJECT (munge, "received non TIME newsegment");
  gst_event_unref (event);
  gst_object_unref (munge);
  return FALSE;
}
Exemplo n.º 13
0
static GstFlowReturn
gst_wavpack_parse_push_buffer (GstWavpackParse * wvparse, GstBuffer * buf,
    WavpackHeader * header)
{
  GstFlowReturn ret;

  wvparse->current_offset += header->ckSize + 8;
  wvparse->segment.last_stop = header->block_index;

  if (wvparse->need_newsegment) {
    if (gst_wavpack_parse_send_newsegment (wvparse, FALSE))
      wvparse->need_newsegment = FALSE;
  }

  /* send any queued events */
  if (wvparse->queued_events) {
    GList *l;

    for (l = wvparse->queued_events; l != NULL; l = l->next) {
      gst_pad_push_event (wvparse->srcpad, GST_EVENT (l->data));
    }
    g_list_free (wvparse->queued_events);
    wvparse->queued_events = NULL;
  }

  if (wvparse->pending_buffer == NULL) {
    wvparse->pending_buffer = buf;
    wvparse->pending_offset = header->block_index;
  } else if (wvparse->pending_offset == header->block_index) {
    wvparse->pending_buffer = gst_buffer_join (wvparse->pending_buffer, buf);
  } else {
    GST_ERROR ("Got incomplete block, dropping");
    gst_buffer_unref (wvparse->pending_buffer);
    wvparse->pending_buffer = buf;
    wvparse->pending_offset = header->block_index;
  }

  if (!(header->flags & FINAL_BLOCK))
    return GST_FLOW_OK;

  buf = wvparse->pending_buffer;
  wvparse->pending_buffer = NULL;

  GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (header->block_index,
      GST_SECOND, wvparse->samplerate);
  GST_BUFFER_DURATION (buf) = gst_util_uint64_scale_int (header->block_samples,
      GST_SECOND, wvparse->samplerate);
  GST_BUFFER_OFFSET (buf) = header->block_index;
  GST_BUFFER_OFFSET_END (buf) = header->block_index + header->block_samples;

  if (wvparse->discont || wvparse->next_block_index != header->block_index) {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    wvparse->discont = FALSE;
  }

  wvparse->next_block_index = header->block_index + header->block_samples;

  gst_buffer_set_caps (buf, GST_PAD_CAPS (wvparse->srcpad));

  GST_LOG_OBJECT (wvparse, "Pushing buffer with time %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  ret = gst_pad_push (wvparse->srcpad, buf);

  wvparse->segment.last_stop = wvparse->next_block_index;

  return ret;
}
Exemplo n.º 14
0
static gboolean
gst_wavpack_parse_create_src_pad (GstWavpackParse * wvparse, GstBuffer * buf,
    WavpackHeader * header)
{
  GstWavpackMetadata meta;
  GstCaps *caps = NULL;
  guchar *bufptr;

  g_assert (wvparse->srcpad == NULL);

  bufptr = GST_BUFFER_DATA (buf) + sizeof (WavpackHeader);

  while (gst_wavpack_read_metadata (&meta, GST_BUFFER_DATA (buf), &bufptr)) {
    switch (meta.id) {
      case ID_WVC_BITSTREAM:{
        caps = gst_caps_new_simple ("audio/x-wavpack-correction",
            "framed", G_TYPE_BOOLEAN, TRUE, NULL);
        wvparse->srcpad =
            gst_pad_new_from_template (gst_element_class_get_pad_template
            (GST_ELEMENT_GET_CLASS (wvparse), "wvcsrc"), "wvcsrc");
        break;
      }
      case ID_WV_BITSTREAM:
      case ID_WVX_BITSTREAM:{
        WavpackStreamReader *stream_reader = gst_wavpack_stream_reader_new ();

        WavpackContext *wpc;

        gchar error_msg[80];

        read_id rid;

        gint channel_mask;

        rid.buffer = GST_BUFFER_DATA (buf);
        rid.length = GST_BUFFER_SIZE (buf);
        rid.position = 0;

        wpc =
            WavpackOpenFileInputEx (stream_reader, &rid, NULL, error_msg, 0, 0);

        if (!wpc)
          return FALSE;

        wvparse->samplerate = WavpackGetSampleRate (wpc);
        wvparse->channels = WavpackGetNumChannels (wpc);
        wvparse->total_samples =
            (header->total_samples ==
            0xffffffff) ? G_GINT64_CONSTANT (-1) : header->total_samples;

        caps = gst_caps_new_simple ("audio/x-wavpack",
            "width", G_TYPE_INT, WavpackGetBitsPerSample (wpc),
            "channels", G_TYPE_INT, wvparse->channels,
            "rate", G_TYPE_INT, wvparse->samplerate,
            "framed", G_TYPE_BOOLEAN, TRUE, NULL);
#ifdef WAVPACK_OLD_API
        channel_mask = wpc->config.channel_mask;
#else
        channel_mask = WavpackGetChannelMask (wpc);
#endif
        if (channel_mask == 0)
          channel_mask =
              gst_wavpack_get_default_channel_mask (wvparse->channels);

        if (channel_mask != 0) {
          if (!gst_wavpack_set_channel_layout (caps, channel_mask)) {
            GST_WARNING_OBJECT (wvparse, "Failed to set channel layout");
            gst_caps_unref (caps);
            caps = NULL;
            WavpackCloseFile (wpc);
            g_free (stream_reader);
            break;
          }
        }

        wvparse->srcpad =
            gst_pad_new_from_template (gst_element_class_get_pad_template
            (GST_ELEMENT_GET_CLASS (wvparse), "src"), "src");
        WavpackCloseFile (wpc);
        g_free (stream_reader);
        break;
      }
      default:{
        GST_LOG_OBJECT (wvparse, "unhandled ID: 0x%02x", meta.id);
        break;
      }
    }
    if (caps != NULL)
      break;
  }

  if (caps == NULL || wvparse->srcpad == NULL)
    return FALSE;

  GST_DEBUG_OBJECT (wvparse, "Added src pad with caps %" GST_PTR_FORMAT, caps);

  gst_pad_set_query_function (wvparse->srcpad,
      GST_DEBUG_FUNCPTR (gst_wavpack_parse_src_query));
  gst_pad_set_query_type_function (wvparse->srcpad,
      GST_DEBUG_FUNCPTR (gst_wavpack_parse_get_src_query_types));
  gst_pad_set_event_function (wvparse->srcpad,
      GST_DEBUG_FUNCPTR (gst_wavpack_parse_src_event));

  gst_pad_set_caps (wvparse->srcpad, caps);
  gst_caps_unref (caps);
  gst_pad_use_fixed_caps (wvparse->srcpad);

  gst_object_ref (wvparse->srcpad);
  gst_pad_set_active (wvparse->srcpad, TRUE);
  gst_element_add_pad (GST_ELEMENT (wvparse), wvparse->srcpad);
  gst_element_no_more_pads (GST_ELEMENT (wvparse));

  return TRUE;
}
Exemplo n.º 15
0
GstFlowReturn
gst_vdp_video_yuv_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans);
  GstVdpDevice *device;
  VdpVideoSurface surface;

  device = GST_VDP_VIDEO_BUFFER (inbuf)->device;
  surface = GST_VDP_VIDEO_BUFFER (inbuf)->surface;

  switch (video_yuv->format) {
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
    {
      VdpStatus status;
      guint8 *data[3];
      guint32 stride[3];

      data[0] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          0, video_yuv->width, video_yuv->height);
      data[1] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          2, video_yuv->width, video_yuv->height);
      data[2] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          1, video_yuv->width, video_yuv->height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          0, video_yuv->width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          2, video_yuv->width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          1, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
    {
      VdpStatus status;
      guint8 *data[3];
      guint32 stride[3];

      data[0] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          0, video_yuv->width, video_yuv->height);
      data[1] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          2, video_yuv->width, video_yuv->height);
      data[2] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          1, video_yuv->width, video_yuv->height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          0, video_yuv->width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          2, video_yuv->width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          1, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
    {
      VdpStatus status;
      guint8 *data[2];
      guint32 stride[2];

      data[0] = GST_BUFFER_DATA (outbuf);
      data[1] = GST_BUFFER_DATA (outbuf) + video_yuv->width * video_yuv->height;

      stride[0] = video_yuv->width;
      stride[1] = video_yuv->width;

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
    {
      VdpStatus status;
      guint8 *data[1];
      guint32 stride[1];

      data[0] = GST_BUFFER_DATA (outbuf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_UYVY,
          0, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_UYVY, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
    {
      VdpStatus status;
      guint8 *data[1];
      guint32 stride[1];

      data[0] = GST_BUFFER_DATA (outbuf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YUY2,
          0, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YUYV, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    default:
      break;
  }

  gst_buffer_copy_metadata (outbuf, inbuf,
      GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);

  GST_LOG_OBJECT (video_yuv, "Pushing buffer with ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));

  return GST_FLOW_OK;
}
Exemplo n.º 16
0
static void
src_task_loop (GstPad * pad)
{
  GstDtlsEnc *self = GST_DTLS_ENC (GST_PAD_PARENT (pad));
  GstFlowReturn ret;
  GstBuffer *buffer;
  gboolean check_connection_timeout = FALSE;

  GST_TRACE_OBJECT (self, "src loop: acquiring lock");
  g_mutex_lock (&self->queue_lock);
  GST_TRACE_OBJECT (self, "src loop: acquired lock");

  if (self->flushing) {
    GST_LOG_OBJECT (self, "src task loop entered on inactive pad");
    GST_TRACE_OBJECT (self, "src loop: releasing lock");
    g_mutex_unlock (&self->queue_lock);
    return;
  }

  while (g_queue_is_empty (&self->queue)) {
    GST_TRACE_OBJECT (self, "src loop: queue empty, waiting for add");
    g_cond_wait (&self->queue_cond_add, &self->queue_lock);
    GST_TRACE_OBJECT (self, "src loop: add signaled");

    if (self->flushing) {
      GST_LOG_OBJECT (self, "pad inactive, task returning");
      GST_TRACE_OBJECT (self, "src loop: releasing lock");
      g_mutex_unlock (&self->queue_lock);
      return;
    }
  }
  GST_TRACE_OBJECT (self, "src loop: queue has element");

  buffer = g_queue_pop_head (&self->queue);
  g_mutex_unlock (&self->queue_lock);

  if (self->send_initial_events) {
    GstSegment segment;
    gchar s_id[32];
    GstCaps *caps;

    self->send_initial_events = FALSE;

    g_snprintf (s_id, sizeof (s_id), "dtlsenc-%08x", g_random_int ());
    gst_pad_push_event (self->src, gst_event_new_stream_start (s_id));
    caps = gst_caps_new_empty_simple ("application/x-dtls");
    gst_pad_push_event (self->src, gst_event_new_caps (caps));
    gst_caps_unref (caps);
    gst_segment_init (&segment, GST_FORMAT_BYTES);
    gst_pad_push_event (self->src, gst_event_new_segment (&segment));
    check_connection_timeout = TRUE;
  }

  GST_TRACE_OBJECT (self, "src loop: releasing lock");

  ret = gst_pad_push (self->src, buffer);
  if (check_connection_timeout)
    gst_dtls_connection_check_timeout (self->connection);

  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (self, "failed to push buffer on src pad: %s",
        gst_flow_get_name (ret));
  }
}
Exemplo n.º 17
0
static GstBuffer *
gst_rtp_vraw_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
  GstRtpVRawDepay *rtpvrawdepay;
  guint8 *payload, *p0, *yp, *up, *vp, *headers;
  guint32 timestamp;
  guint cont, ystride, uvstride, pgroup, payload_len;
  gint width, height, xinc, yinc;
  GstRTPBuffer rtp = { NULL };
  GstVideoFrame *frame;
  gboolean marker;
  GstBuffer *outbuf = NULL;

  rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);

  gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);

  timestamp = gst_rtp_buffer_get_timestamp (&rtp);

  if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
    GstBuffer *new_buffer;
    GstFlowReturn ret;

    GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
    /* new timestamp, flush old buffer and create new output buffer */
    if (rtpvrawdepay->outbuf) {
      gst_video_frame_unmap (&rtpvrawdepay->frame);
      gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
      rtpvrawdepay->outbuf = NULL;
    }

    if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
      GstCaps *caps;

      caps =
          gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
      gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
          &rtpvrawdepay->vinfo);
      gst_caps_unref (caps);
    }

    ret =
        gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &new_buffer, NULL);

    if (G_UNLIKELY (ret != GST_FLOW_OK))
      goto alloc_failed;

    /* clear timestamp from alloc... */
    GST_BUFFER_PTS (new_buffer) = -1;

    if (!gst_video_frame_map (&rtpvrawdepay->frame, &rtpvrawdepay->vinfo,
            new_buffer, GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
      gst_buffer_unref (new_buffer);
      goto invalid_frame;
    }

    rtpvrawdepay->outbuf = new_buffer;
    rtpvrawdepay->timestamp = timestamp;
  }

  frame = &rtpvrawdepay->frame;

  g_assert (frame->buffer != NULL);

  /* get pointer and strides of the planes */
  p0 = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
  yp = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
  up = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
  vp = GST_VIDEO_FRAME_COMP_DATA (frame, 2);

  ystride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
  uvstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);

  pgroup = rtpvrawdepay->pgroup;
  width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
  height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
  xinc = rtpvrawdepay->xinc;
  yinc = rtpvrawdepay->yinc;

  payload = gst_rtp_buffer_get_payload (&rtp);
  payload_len = gst_rtp_buffer_get_payload_len (&rtp);

  if (payload_len < 3)
    goto short_packet;

  /* skip extended seqnum */
  payload += 2;
  payload_len -= 2;

  /* remember header position */
  headers = payload;

  /* find data start */
  do {
    if (payload_len < 6)
      goto short_packet;

    cont = payload[4] & 0x80;

    payload += 6;
    payload_len -= 6;
  } while (cont);

  while (TRUE) {
    guint length, line, offs, plen;
    guint8 *datap;

    /* stop when we run out of data */
    if (payload_len == 0)
      break;

    /* read length and cont. This should work because we iterated the headers
     * above. */
    length = (headers[0] << 8) | headers[1];
    line = ((headers[2] & 0x7f) << 8) | headers[3];
    offs = ((headers[4] & 0x7f) << 8) | headers[5];
    cont = headers[4] & 0x80;
    headers += 6;

    /* length must be a multiple of pgroup */
    if (length % pgroup != 0)
      goto wrong_length;

    if (length > payload_len)
      length = payload_len;

    /* sanity check */
    if (line > (height - yinc)) {
      GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
      goto next;
    }
    if (offs > (width - xinc)) {
      GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
      goto next;
    }

    /* calculate the maximim amount of bytes we can use per line */
    if (offs + ((length / pgroup) * xinc) > width) {
      plen = ((width - offs) * pgroup) / xinc;
      GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
          length, offs, plen);
    } else
      plen = length;

    GST_LOG_OBJECT (depayload,
        "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
        line, offs, payload_len);

    switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
      case GST_VIDEO_FORMAT_RGB:
      case GST_VIDEO_FORMAT_RGBA:
      case GST_VIDEO_FORMAT_BGR:
      case GST_VIDEO_FORMAT_BGRA:
      case GST_VIDEO_FORMAT_UYVY:
      case GST_VIDEO_FORMAT_UYVP:
        /* samples are packed just like gstreamer packs them */
        offs /= xinc;
        datap = p0 + (line * ystride) + (offs * pgroup);

        memcpy (datap, payload, plen);
        break;
      case GST_VIDEO_FORMAT_AYUV:
      {
        gint i;
        guint8 *p;

        datap = p0 + (line * ystride) + (offs * 4);
        p = payload;

        /* samples are packed in order Cb-Y-Cr for both interlaced and
         * progressive frames */
        for (i = 0; i < plen; i += pgroup) {
          *datap++ = 0;
          *datap++ = p[1];
          *datap++ = p[0];
          *datap++ = p[2];
          p += pgroup;
        }
        break;
      }
      case GST_VIDEO_FORMAT_I420:
      {
        gint i;
        guint uvoff;
        guint8 *yd1p, *yd2p, *udp, *vdp, *p;

        yd1p = yp + (line * ystride) + (offs);
        yd2p = yd1p + ystride;
        uvoff = (line / yinc * uvstride) + (offs / xinc);

        udp = up + uvoff;
        vdp = vp + uvoff;
        p = payload;

        /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ...  */
        for (i = 0; i < plen; i += pgroup) {
          *yd1p++ = p[0];
          *yd1p++ = p[1];
          *yd2p++ = p[2];
          *yd2p++ = p[3];
          *udp++ = p[4];
          *vdp++ = p[5];
          p += pgroup;
        }
        break;
      }
      case GST_VIDEO_FORMAT_Y41B:
      {
        gint i;
        guint uvoff;
        guint8 *ydp, *udp, *vdp, *p;

        ydp = yp + (line * ystride) + (offs);
        uvoff = (line / yinc * uvstride) + (offs / xinc);

        udp = up + uvoff;
        vdp = vp + uvoff;
        p = payload;

        /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
         * and progressive scan lines */
        for (i = 0; i < plen; i += pgroup) {
          *udp++ = p[0];
          *ydp++ = p[1];
          *ydp++ = p[2];
          *vdp++ = p[3];
          *ydp++ = p[4];
          *ydp++ = p[5];
          p += pgroup;
        }
        break;
      }
      default:
        goto unknown_sampling;
    }

  next:
    if (!cont)
      break;

    payload += length;
    payload_len -= length;
  }

  marker = gst_rtp_buffer_get_marker (&rtp);
  gst_rtp_buffer_unmap (&rtp);

  if (marker) {
    GST_LOG_OBJECT (depayload, "marker, flushing frame");
    gst_video_frame_unmap (&rtpvrawdepay->frame);
    outbuf = rtpvrawdepay->outbuf;
    rtpvrawdepay->outbuf = NULL;
    rtpvrawdepay->timestamp = -1;
  }
  return outbuf;

  /* ERRORS */
unknown_sampling:
  {
    GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
        (NULL), ("unimplemented sampling"));
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
alloc_failed:
  {
    GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
invalid_frame:
  {
    GST_ERROR_OBJECT (depayload, "could not map video frame");
    return NULL;
  }
wrong_length:
  {
    GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
short_packet:
  {
    GST_WARNING_OBJECT (depayload, "short packet");
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
}
Exemplo n.º 18
0
static GstFlowReturn
gst_dirac_parse_handle_frame (GstBaseParse * parse,
    GstBaseParseFrame * frame, gint * skipsize)
{
  int off;
  guint32 next_header;
  GstMapInfo map;
  guint8 *data;
  gsize size;
  gboolean have_picture = FALSE;
  int offset;
  guint framesize = 0;

  gst_buffer_map (frame->buffer, &map, GST_MAP_READ);
  data = map.data;
  size = map.size;

  if (G_UNLIKELY (size < 13)) {
    *skipsize = 1;
    goto out;
  }

  GST_DEBUG ("%" G_GSIZE_FORMAT ": %02x %02x %02x %02x", size, data[0], data[1],
      data[2], data[3]);

  if (GST_READ_UINT32_BE (data) != 0x42424344) {
    GstByteReader reader;

    gst_byte_reader_init (&reader, data, size);
    off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
        0x42424344, 0, size);

    if (off < 0) {
      *skipsize = size - 3;
      goto out;
    }

    GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);

    GST_DEBUG ("skipping %d", off);
    *skipsize = off;
    goto out;
  }

  /* have sync, parse chunks */

  offset = 0;
  while (!have_picture) {
    GST_DEBUG ("offset %d:", offset);

    if (offset + 13 >= size) {
      framesize = offset + 13;
      goto out;
    }

    GST_DEBUG ("chunk type %02x", data[offset + 4]);

    if (GST_READ_UINT32_BE (data + offset) != 0x42424344) {
      GST_DEBUG ("bad header");
      *skipsize = 3;
      goto out;
    }

    next_header = GST_READ_UINT32_BE (data + offset + 5);
    GST_DEBUG ("next_header %d", next_header);
    if (next_header == 0)
      next_header = 13;

    if (SCHRO_PARSE_CODE_IS_PICTURE (data[offset + 4])) {
      have_picture = TRUE;
    }

    offset += next_header;
    if (offset >= size) {
      framesize = offset;
      goto out;
    }
  }

  gst_buffer_unmap (frame->buffer, &map);

  framesize = offset;
  GST_DEBUG ("framesize %d", framesize);

  g_assert (framesize <= size);

  if (data[4] == SCHRO_PARSE_CODE_SEQUENCE_HEADER) {
    GstCaps *caps;
    GstDiracParse *diracparse = GST_DIRAC_PARSE (parse);
    DiracSequenceHeader sequence_header;
    int ret;

    ret = dirac_sequence_header_parse (&sequence_header, data + 13, size - 13);
    if (ret) {
      memcpy (&diracparse->sequence_header, &sequence_header,
          sizeof (sequence_header));
      caps = gst_caps_new_simple ("video/x-dirac",
          "width", G_TYPE_INT, sequence_header.width,
          "height", G_TYPE_INT, sequence_header.height,
          "framerate", GST_TYPE_FRACTION,
          sequence_header.frame_rate_numerator,
          sequence_header.frame_rate_denominator,
          "pixel-aspect-ratio", GST_TYPE_FRACTION,
          sequence_header.aspect_ratio_numerator,
          sequence_header.aspect_ratio_denominator,
          "interlace-mode", G_TYPE_STRING,
          sequence_header.interlaced ? "interleaved" : "progressive",
          "profile", G_TYPE_STRING, get_profile_name (sequence_header.profile),
          "level", G_TYPE_STRING, get_level_name (sequence_header.level), NULL);
      gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
      gst_caps_unref (caps);

      gst_base_parse_set_frame_rate (parse,
          sequence_header.frame_rate_numerator,
          sequence_header.frame_rate_denominator, 0, 0);
    }
  }

  gst_base_parse_set_min_frame_size (parse, 13);

  return gst_base_parse_finish_frame (parse, frame, framesize);

out:
  gst_buffer_unmap (frame->buffer, &map);
  if (framesize)
    gst_base_parse_set_min_frame_size (parse, framesize);
  return GST_FLOW_OK;
}
Exemplo n.º 19
0
static void
gst_aspect_ratio_transform_structure (GstAspectRatioCrop * aspect_ratio_crop,
    GstStructure * structure, GstStructure ** new_structure,
    gboolean set_videocrop)
{
  gdouble incoming_ar;
  gdouble requested_ar;
  gint width, height;
  gint cropvalue;
  gint par_d, par_n;

  /* Check if we need to change the aspect ratio */
  if (aspect_ratio_crop->ar_num < 1) {
    GST_DEBUG_OBJECT (aspect_ratio_crop, "No cropping requested");
    goto beach;
  }

  /* get the information from the caps */
  if (!gst_structure_get_int (structure, "width", &width) ||
      !gst_structure_get_int (structure, "height", &height))
    goto beach;

  if (!gst_structure_get_fraction (structure, "pixel-aspect-ratio",
          &par_n, &par_d)) {
    par_d = par_n = 1;
  }

  incoming_ar = ((gdouble) (width * par_n)) / (height * par_d);
  GST_LOG_OBJECT (aspect_ratio_crop,
      "incoming caps width(%d), height(%d), par (%d/%d) : ar = %f", width,
      height, par_n, par_d, incoming_ar);

  requested_ar =
      (gdouble) aspect_ratio_crop->ar_num / aspect_ratio_crop->ar_denom;

  /* check if the original aspect-ratio is the aspect-ratio that we want */
  if (requested_ar == incoming_ar) {
    GST_DEBUG_OBJECT (aspect_ratio_crop,
        "Input video already has the correct aspect ratio (%.3f == %.3f)",
        incoming_ar, requested_ar);
    goto beach;
  } else if (requested_ar > incoming_ar) {
    /* fix aspect ratio with cropping on top and bottom */
    cropvalue =
        ((((double) aspect_ratio_crop->ar_denom /
                (double) (aspect_ratio_crop->ar_num)) * ((double) par_n /
                (double) par_d) * width) - height) / 2;
    if (cropvalue < 0) {
      cropvalue *= -1;
    }
    if (cropvalue >= (height / 2))
      goto crop_failed;
    if (set_videocrop) {
      gst_aspect_ratio_crop_set_cropping (aspect_ratio_crop, cropvalue, 0,
          cropvalue, 0);
    }
    if (new_structure) {
      *new_structure = gst_structure_copy (structure);
      gst_structure_set (*new_structure,
          "height", G_TYPE_INT, (int) (height - (cropvalue * 2)), NULL);
    }
  } else {
    /* fix aspect ratio with cropping on left and right */
    cropvalue =
        ((((double) aspect_ratio_crop->ar_num /
                (double) (aspect_ratio_crop->ar_denom)) * ((double) par_d /
                (double) par_n) * height) - width) / 2;
    if (cropvalue < 0) {
      cropvalue *= -1;
    }
    if (cropvalue >= (width / 2))
      goto crop_failed;
    if (set_videocrop) {
      gst_aspect_ratio_crop_set_cropping (aspect_ratio_crop, 0, cropvalue,
          0, cropvalue);
    }
    if (new_structure) {
      *new_structure = gst_structure_copy (structure);
      gst_structure_set (*new_structure,
          "width", G_TYPE_INT, (int) (width - (cropvalue * 2)), NULL);
    }
  }

  return;

crop_failed:
  GST_WARNING_OBJECT (aspect_ratio_crop,
      "can't crop to aspect ratio requested");
  goto beach;
beach:
  if (set_videocrop) {
    gst_aspect_ratio_crop_set_cropping (aspect_ratio_crop, 0, 0, 0, 0);
  }

  if (new_structure) {
    *new_structure = gst_structure_copy (structure);
  }
}
Exemplo n.º 20
0
static GstFlowReturn
gst_jasper_enc_get_data (GstJasperEnc * enc, guint8 * data, GstBuffer ** outbuf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  jas_stream_t *stream = NULL;
  gint i;
  guint size, boxsize;

  g_return_val_if_fail (outbuf != NULL, GST_FLOW_ERROR);

  *outbuf = NULL;

  boxsize = (enc->mode == GST_JP2ENC_MODE_J2C) ? 8 : 0;

  if (!(stream = jas_stream_memopen (NULL, 0)))
    goto fail_stream;

  for (i = 0; i < enc->channels; ++i) {
    gint x, y, cwidth, cheight, inc, stride, cmpt;
    guint8 *row_pix, *in_pix;
    glong *tb;

    cmpt = i;
    inc = enc->inc[i];
    stride = enc->stride[i];
    cheight = enc->cheight[cmpt];
    cwidth = enc->cwidth[cmpt];

    GST_LOG_OBJECT (enc,
        "write component %d<=%d, size %dx%d, offset %d, inc %d, stride %d",
        i, cmpt, cwidth, cheight, enc->offset[i], inc, stride);

    row_pix = data + enc->offset[i];

    for (y = 0; y < cheight; y++) {
      in_pix = row_pix;
      tb = enc->buf;
      for (x = 0; x < cwidth; x++) {
        *tb = *in_pix;
        in_pix += inc;
        tb++;
      }
      if (jas_image_writecmpt2 (enc->image, cmpt, 0, y, cwidth, 1, enc->buf))
        goto fail_image;
      row_pix += stride;
    }
  }

  GST_LOG_OBJECT (enc, "all components written");

  if (jas_image_encode (enc->image, stream, enc->fmt, ""))
    goto fail_encode;

  GST_LOG_OBJECT (enc, "image encoded");

  size = jas_stream_length (stream);
  ret = gst_pad_alloc_buffer_and_set_caps (enc->srcpad,
      GST_BUFFER_OFFSET_NONE, size + boxsize, GST_PAD_CAPS (enc->srcpad),
      outbuf);

  if (ret != GST_FLOW_OK)
    goto no_buffer;

  data = GST_BUFFER_DATA (*outbuf);
  if (jas_stream_flush (stream) ||
      jas_stream_rewind (stream) < 0 ||
      jas_stream_read (stream, data + boxsize, size) < size)
    goto fail_image_out;

  if (boxsize) {
    /* write atom prefix */
    GST_WRITE_UINT32_BE (data, size + 8);
    GST_WRITE_UINT32_LE (data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
  }

done:
  if (stream)
    jas_stream_close (stream);

  return ret;

  /* ERRORS */
fail_stream:
  {
    GST_DEBUG_OBJECT (enc, "Failed to create inputstream.");
    goto fail;
  }
fail_encode:
  {
    GST_DEBUG_OBJECT (enc, "Failed to encode image.");
    goto fail;
  }
fail_image:
  {
    GST_DEBUG_OBJECT (enc, "Failed to process input image.");
    goto fail;
  }
fail_image_out:
  {
    GST_DEBUG_OBJECT (enc, "Failed to process encoded image.");
    goto fail;
  }
fail:
  {
    if (*outbuf)
      gst_buffer_unref (*outbuf);
    *outbuf = NULL;
    GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }
no_buffer:
  {
    GST_DEBUG_OBJECT (enc, "Failed to create outbuffer - %s",
        gst_flow_get_name (ret));
    goto done;
  }
}
Exemplo n.º 21
0
/**
 * gst_bus_timed_pop_filtered:
 * @bus: a #GstBus to pop from
 * @timeout: a timeout in nanoseconds, or GST_CLOCK_TIME_NONE to wait forever
 * @types: message types to take into account, GST_MESSAGE_ANY for any type
 *
 * Get a message from the bus whose type matches the message type mask @types,
 * waiting up to the specified timeout (and discarding any messages that do not
 * match the mask provided).
 *
 * If @timeout is 0, this function behaves like gst_bus_pop_filtered(). If
 * @timeout is #GST_CLOCK_TIME_NONE, this function will block forever until a
 * matching message was posted on the bus.
 *
 * Returns: (transfer full) (nullable): a #GstMessage matching the
 *     filter in @types, or %NULL if no matching message was found on
 *     the bus until the timeout expired. The message is taken from
 *     the bus and needs to be unreffed with gst_message_unref() after
 *     usage.
 *
 * MT safe.
 */
GstMessage *
gst_bus_timed_pop_filtered (GstBus * bus, GstClockTime timeout,
    GstMessageType types)
{
  GstMessage *message;
  GTimeVal now, then;
  gboolean first_round = TRUE;
  GstClockTime elapsed = 0;

  g_return_val_if_fail (GST_IS_BUS (bus), NULL);
  g_return_val_if_fail (types != 0, NULL);
  g_return_val_if_fail (timeout == 0 || bus->priv->poll != NULL, NULL);

  g_mutex_lock (&bus->priv->queue_lock);

  while (TRUE) {
    gint ret;

    GST_LOG_OBJECT (bus, "have %d messages",
        gst_atomic_queue_length (bus->priv->queue));

    while ((message = gst_atomic_queue_pop (bus->priv->queue))) {
      if (bus->priv->poll)
        gst_poll_read_control (bus->priv->poll);

      GST_DEBUG_OBJECT (bus, "got message %p, %s from %s, type mask is %u",
          message, GST_MESSAGE_TYPE_NAME (message),
          GST_MESSAGE_SRC_NAME (message), (guint) types);
      if ((GST_MESSAGE_TYPE (message) & types) != 0) {
        /* Extra check to ensure extended types don't get matched unless
         * asked for */
        if ((!GST_MESSAGE_TYPE_IS_EXTENDED (message))
            || (types & GST_MESSAGE_EXTENDED)) {
          /* exit the loop, we have a message */
          goto beach;
        }
      }

      GST_DEBUG_OBJECT (bus, "discarding message, does not match mask");
      gst_message_unref (message);
      message = NULL;
    }

    /* no need to wait, exit loop */
    if (timeout == 0)
      break;

    else if (timeout != GST_CLOCK_TIME_NONE) {
      if (first_round) {
        g_get_current_time (&then);
        first_round = FALSE;
      } else {
        g_get_current_time (&now);

        elapsed = GST_TIMEVAL_TO_TIME (now) - GST_TIMEVAL_TO_TIME (then);

        if (elapsed > timeout)
          break;
      }
    }

    /* only here in timeout case */
    g_assert (bus->priv->poll);
    g_mutex_unlock (&bus->priv->queue_lock);
    ret = gst_poll_wait (bus->priv->poll, timeout - elapsed);
    g_mutex_lock (&bus->priv->queue_lock);

    if (ret == 0) {
      GST_INFO_OBJECT (bus, "timed out, breaking loop");
      break;
    } else {
      GST_INFO_OBJECT (bus, "we got woken up, recheck for message");
    }
  }

beach:

  g_mutex_unlock (&bus->priv->queue_lock);

  return message;
}
Exemplo n.º 22
0
/* Called by the idle function in the gl thread */
void
_do_convert (GstGLContext * context, GstGLColorConvert * convert)
{
  guint in_width, in_height, out_width, out_height;
  struct ConvertInfo *c_info = &convert->priv->convert_info;
  GstMapInfo out_info[GST_VIDEO_MAX_PLANES], in_info[GST_VIDEO_MAX_PLANES];
  gboolean res = TRUE;
  gint i, j = 0;

  out_width = GST_VIDEO_INFO_WIDTH (&convert->out_info);
  out_height = GST_VIDEO_INFO_HEIGHT (&convert->out_info);
  in_width = GST_VIDEO_INFO_WIDTH (&convert->in_info);
  in_height = GST_VIDEO_INFO_HEIGHT (&convert->in_info);

  convert->outbuf = NULL;

  if (!_init_convert (convert)) {
    convert->priv->result = FALSE;
    return;
  }

  convert->outbuf = gst_buffer_new ();
  if (!gst_gl_memory_setup_buffer (convert->context, &convert->out_info,
          convert->outbuf)) {
    convert->priv->result = FALSE;
    return;
  }

  gst_buffer_add_video_meta_full (convert->outbuf, 0,
      GST_VIDEO_INFO_FORMAT (&convert->out_info),
      GST_VIDEO_INFO_WIDTH (&convert->out_info),
      GST_VIDEO_INFO_HEIGHT (&convert->out_info),
      GST_VIDEO_INFO_N_PLANES (&convert->out_info),
      convert->out_info.offset, convert->out_info.stride);

  for (i = 0; i < c_info->in_n_textures; i++) {
    convert->priv->in_tex[i] =
        (GstGLMemory *) gst_buffer_peek_memory (convert->inbuf, i);
    if (!gst_is_gl_memory ((GstMemory *) convert->priv->in_tex[i])) {
      GST_ERROR_OBJECT (convert, "input must be GstGLMemory");
      res = FALSE;
      goto out;
    }
    if (!gst_memory_map ((GstMemory *) convert->priv->in_tex[i], &in_info[i],
            GST_MAP_READ | GST_MAP_GL)) {
      GST_ERROR_OBJECT (convert, "failed to map input memory %p",
          convert->priv->in_tex[i]);
      res = FALSE;
      goto out;
    }
  }

  for (j = 0; j < c_info->out_n_textures; j++) {
    GstGLMemory *out_tex =
        (GstGLMemory *) gst_buffer_peek_memory (convert->outbuf, j);
    gint mem_width, mem_height;

    if (!gst_is_gl_memory ((GstMemory *) out_tex)) {
      GST_ERROR_OBJECT (convert, "output must be GstGLMemory");
      res = FALSE;
      goto out;
    }

    mem_width = gst_gl_memory_get_texture_width (out_tex);
    mem_height = gst_gl_memory_get_texture_height (out_tex);

    if (out_tex->tex_type == GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE
        || out_tex->tex_type == GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA
        || out_width != mem_width || out_height != mem_height) {
      /* Luminance formats are not color renderable */
      /* renderering to a framebuffer only renders the intersection of all
       * the attachments i.e. the smallest attachment size */
      GstVideoInfo temp_info;

      gst_video_info_set_format (&temp_info, GST_VIDEO_FORMAT_RGBA, out_width,
          out_height);

      if (!convert->priv->out_tex[j])
        convert->priv->out_tex[j] =
            (GstGLMemory *) gst_gl_memory_alloc (context, &temp_info, 0);
    } else {
      convert->priv->out_tex[j] = out_tex;
    }

    if (!gst_memory_map ((GstMemory *) convert->priv->out_tex[j], &out_info[j],
            GST_MAP_WRITE | GST_MAP_GL)) {
      GST_ERROR_OBJECT (convert, "failed to map output memory %p",
          convert->priv->out_tex[i]);
      res = FALSE;
      goto out;
    }
  }

  GST_LOG_OBJECT (convert, "converting to textures:%p,%p,%p,%p "
      "dimensions:%ux%u, from textures:%p,%p,%p,%p dimensions:%ux%u",
      convert->priv->out_tex[0], convert->priv->out_tex[1],
      convert->priv->out_tex[2], convert->priv->out_tex[3], out_width,
      out_height, convert->priv->in_tex[0], convert->priv->in_tex[1],
      convert->priv->in_tex[2], convert->priv->in_tex[3], in_width, in_height);

  if (!_do_convert_draw (context, convert))
    res = FALSE;

out:
  for (j--; j >= 0; j--) {
    GstGLMemory *out_tex =
        (GstGLMemory *) gst_buffer_peek_memory (convert->outbuf, j);
    gint mem_width, mem_height;

    gst_memory_unmap ((GstMemory *) convert->priv->out_tex[j], &out_info[j]);

    mem_width = gst_gl_memory_get_texture_width (out_tex);
    mem_height = gst_gl_memory_get_texture_height (out_tex);

    if (out_tex->tex_type == GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE
        || out_tex->tex_type == GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA
        || out_width != mem_width || out_height != mem_height) {
      GstMapInfo to_info, from_info;

      if (!gst_memory_map ((GstMemory *) convert->priv->out_tex[j], &from_info,
              GST_MAP_READ | GST_MAP_GL)) {
        gst_gl_context_set_error (convert->context, "Failed to map "
            "intermediate memory");
        res = FALSE;
        continue;
      }
      if (!gst_memory_map ((GstMemory *) out_tex, &to_info,
              GST_MAP_WRITE | GST_MAP_GL)) {
        gst_gl_context_set_error (convert->context, "Failed to map "
            "intermediate memory");
        res = FALSE;
        continue;
      }
      gst_gl_memory_copy_into_texture (convert->priv->out_tex[j],
          out_tex->tex_id, out_tex->tex_type, mem_width, mem_height,
          GST_VIDEO_INFO_PLANE_STRIDE (&out_tex->info, out_tex->plane), FALSE);
      gst_memory_unmap ((GstMemory *) convert->priv->out_tex[j], &from_info);
      gst_memory_unmap ((GstMemory *) out_tex, &to_info);
    } else {
      convert->priv->out_tex[j] = NULL;
    }
  }

  /* YV12 the same as I420 except planes 1+2 swapped */
  if (GST_VIDEO_INFO_FORMAT (&convert->out_info) == GST_VIDEO_FORMAT_YV12) {
    GstMemory *mem1 = gst_buffer_get_memory (convert->outbuf, 1);
    GstMemory *mem2 = gst_buffer_get_memory (convert->outbuf, 2);

    gst_buffer_replace_memory (convert->outbuf, 1, mem2);
    gst_buffer_replace_memory (convert->outbuf, 2, mem1);
  }

  for (i--; i >= 0; i--) {
    gst_memory_unmap ((GstMemory *) convert->priv->in_tex[i], &in_info[i]);
  }

  if (!res) {
    gst_buffer_unref (convert->outbuf);
    convert->outbuf = NULL;
  }

  convert->priv->result = res;
  return;
}
Exemplo n.º 23
0
static void
gst_raw_parse_loop (GstElement * element)
{
  GstRawParse *rp = GST_RAW_PARSE (element);
  GstRawParseClass *rp_class = GST_RAW_PARSE_GET_CLASS (rp);
  GstFlowReturn ret;
  GstBuffer *buffer;
  gint size;

  if (!gst_raw_parse_set_src_caps (rp))
    goto no_caps;

  if (rp->start_segment) {
    GST_DEBUG_OBJECT (rp, "sending start segment");
    gst_pad_push_event (rp->srcpad, rp->start_segment);
    rp->start_segment = NULL;
  }

  if (rp_class->multiple_frames_per_buffer && rp->framesize < 4096)
    size = 4096 - (4096 % rp->framesize);
  else
    size = rp->framesize;

  if (rp->segment.rate >= 0) {
    if (rp->offset + size > rp->upstream_length) {
      GstFormat fmt = GST_FORMAT_BYTES;

      if (!gst_pad_peer_query_duration (rp->sinkpad, fmt, &rp->upstream_length)) {
        GST_WARNING_OBJECT (rp,
            "Could not get upstream duration, trying to pull frame by frame");
        size = rp->framesize;
      } else if (rp->upstream_length < rp->offset + rp->framesize) {
        ret = GST_FLOW_EOS;
        goto pause;
      } else if (rp->offset + size > rp->upstream_length) {
        size = rp->upstream_length - rp->offset;
        size -= size % rp->framesize;
      }
    }
  } else {
    if (rp->offset == 0) {
      ret = GST_FLOW_EOS;
      goto pause;
    } else if (rp->offset < size) {
      size -= rp->offset;
    }
    rp->offset -= size;
  }

  buffer = NULL;
  ret = gst_pad_pull_range (rp->sinkpad, rp->offset, size, &buffer);

  if (ret != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (rp, "pull_range (%" G_GINT64_FORMAT ", %u) "
        "failed, flow: %s", rp->offset, size, gst_flow_get_name (ret));
    buffer = NULL;
    goto pause;
  }

  if (gst_buffer_get_size (buffer) < size) {
    GST_DEBUG_OBJECT (rp, "Short read at offset %" G_GINT64_FORMAT
        ", got only %" G_GSIZE_FORMAT " of %u bytes", rp->offset,
        gst_buffer_get_size (buffer), size);

    if (size > rp->framesize) {
      gst_buffer_set_size (buffer, gst_buffer_get_size (buffer) -
          gst_buffer_get_size (buffer) % rp->framesize);
    } else {
      gst_buffer_unref (buffer);
      buffer = NULL;
      ret = GST_FLOW_EOS;
      goto pause;
    }
  }

  ret = gst_raw_parse_push_buffer (rp, buffer);
  if (ret != GST_FLOW_OK)
    goto pause;

  return;

  /* ERRORS */
no_caps:
  {
    GST_ERROR_OBJECT (rp, "could not negotiate caps");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto pause;
  }
pause:
  {
    const gchar *reason = gst_flow_get_name (ret);

    GST_LOG_OBJECT (rp, "pausing task, reason %s", reason);
    gst_pad_pause_task (rp->sinkpad);

    if (ret == GST_FLOW_EOS) {
      if (rp->segment.flags & GST_SEEK_FLAG_SEGMENT) {
        GstClockTime stop;

        GST_LOG_OBJECT (rp, "Sending segment done");

        if ((stop = rp->segment.stop) == -1)
          stop = rp->segment.duration;

        gst_element_post_message (GST_ELEMENT_CAST (rp),
            gst_message_new_segment_done (GST_OBJECT_CAST (rp),
                rp->segment.format, stop));
        gst_pad_push_event (rp->srcpad,
            gst_event_new_segment_done (rp->segment.format, stop));
      } else {
        GST_LOG_OBJECT (rp, "Sending EOS, at end of stream");
        gst_pad_push_event (rp->srcpad, gst_event_new_eos ());
      }
    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
      GST_ELEMENT_ERROR (rp, STREAM, FAILED,
          ("Internal data stream error."),
          ("stream stopped, reason %s", reason));
      gst_pad_push_event (rp->srcpad, gst_event_new_eos ());
    }
    return;
  }
}
Exemplo n.º 24
0
static GstFlowReturn
gst_gio_base_src_create (GstBaseSrc * base_src, guint64 offset, guint size,
    GstBuffer ** buf_return)
{
  GstGioBaseSrc *src = GST_GIO_BASE_SRC (base_src);
  GstBuffer *buf;
  GstFlowReturn ret = GST_FLOW_OK;

  g_return_val_if_fail (G_IS_INPUT_STREAM (src->stream), GST_FLOW_ERROR);

  /* If we have the requested part in our cache take a subbuffer of that,
   * otherwise fill the cache again with at least 4096 bytes from the
   * requested offset and return a subbuffer of that.
   *
   * We need caching because every read/seek operation will need to go
   * over DBus if our backend is GVfs and this is painfully slow. */
  if (src->cache && offset >= GST_BUFFER_OFFSET (src->cache) &&
      offset + size <= GST_BUFFER_OFFSET_END (src->cache)) {

    GST_DEBUG_OBJECT (src, "Creating subbuffer from cached buffer: offset %"
        G_GUINT64_FORMAT " length %u", offset, size);

    buf = gst_buffer_create_sub (src->cache,
        offset - GST_BUFFER_OFFSET (src->cache), size);

    GST_BUFFER_OFFSET (buf) = offset;
    GST_BUFFER_OFFSET_END (buf) = offset + size;
    GST_BUFFER_SIZE (buf) = size;
  } else {
    guint cachesize = MAX (4096, size);

    gssize read, res;

    gboolean success, eos;

    GError *err = NULL;

    if (src->cache) {
      gst_buffer_unref (src->cache);
      src->cache = NULL;
    }

    if (G_UNLIKELY (offset != src->position)) {
      if (!GST_GIO_STREAM_IS_SEEKABLE (src->stream))
        return GST_FLOW_NOT_SUPPORTED;

      GST_DEBUG_OBJECT (src, "Seeking to position %" G_GUINT64_FORMAT, offset);
      ret = gst_gio_seek (src, G_SEEKABLE (src->stream), offset, src->cancel);

      if (ret == GST_FLOW_OK)
        src->position = offset;
      else
        return ret;
    }

    src->cache = gst_buffer_try_new_and_alloc (cachesize);
    if (G_UNLIKELY (src->cache == NULL)) {
      GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", cachesize);
      return GST_FLOW_ERROR;
    }

    GST_LOG_OBJECT (src, "Reading %u bytes from offset %" G_GUINT64_FORMAT,
        cachesize, offset);

    /* GIO sometimes gives less bytes than requested although
     * it's not at the end of file. SMB for example only
     * supports reads up to 64k. So we loop here until we get at
     * at least the requested amount of bytes or a read returns
     * nothing. */
    read = 0;
    while (size - read > 0 && (res =
            g_input_stream_read (G_INPUT_STREAM (src->stream),
                GST_BUFFER_DATA (src->cache) + read, cachesize - read,
                src->cancel, &err)) > 0) {
      read += res;
    }

    success = (read >= 0);
    eos = (cachesize > 0 && read == 0);

    if (!success && !gst_gio_error (src, "g_input_stream_read", &err, &ret)) {
      GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
          ("Could not read from stream: %s", err->message));
      g_clear_error (&err);
    }

    if (success && !eos) {
      src->position += read;
      GST_BUFFER_SIZE (src->cache) = read;

      GST_BUFFER_OFFSET (src->cache) = offset;
      GST_BUFFER_OFFSET_END (src->cache) = offset + read;

      GST_DEBUG_OBJECT (src, "Read successful");
      GST_DEBUG_OBJECT (src, "Creating subbuffer from new "
          "cached buffer: offset %" G_GUINT64_FORMAT " length %u", offset,
          size);

      buf = gst_buffer_create_sub (src->cache, 0, MIN (size, read));

      GST_BUFFER_OFFSET (buf) = offset;
      GST_BUFFER_OFFSET_END (buf) = offset + MIN (size, read);
      GST_BUFFER_SIZE (buf) = MIN (size, read);
    } else {
      GST_DEBUG_OBJECT (src, "Read not successful");
      gst_buffer_unref (src->cache);
      src->cache = NULL;
      buf = NULL;
    }

    if (eos)
      ret = GST_FLOW_UNEXPECTED;
  }

  *buf_return = buf;

  return ret;
}
Exemplo n.º 25
0
static GstFlowReturn
gst_speex_dec_parse_data (GstSpeexDec * dec, GstBuffer * buf)
{
  GstFlowReturn res = GST_FLOW_OK;
  gint i, fpp;
  SpeexBits *bits;
  GstMapInfo map;

  if (!dec->frame_duration)
    goto not_negotiated;

  if (G_LIKELY (gst_buffer_get_size (buf))) {
    /* send data to the bitstream */
    gst_buffer_map (buf, &map, GST_MAP_READ);
    speex_bits_read_from (&dec->bits, (gchar *) map.data, map.size);
    gst_buffer_unmap (buf, &map);

    fpp = dec->header->frames_per_packet;
    bits = &dec->bits;

    GST_DEBUG_OBJECT (dec, "received buffer of size %" G_GSIZE_FORMAT
        ", fpp %d, %d bits", map.size, fpp, speex_bits_remaining (bits));
  } else {
    /* FIXME ? actually consider how much concealment is needed */
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "creating concealment data");
    fpp = dec->header->frames_per_packet;
    bits = NULL;
  }

  /* now decode each frame, catering for unknown number of them (e.g. rtp) */
  for (i = 0; i < fpp; i++) {
    GstBuffer *outbuf;
    gboolean corrupted = FALSE;
    gint ret;

    GST_LOG_OBJECT (dec, "decoding frame %d/%d, %d bits remaining", i, fpp,
        bits ? speex_bits_remaining (bits) : -1);
#if 0
    res =
        gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec),
        GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header->nb_channels * 2,
        GST_PAD_CAPS (GST_AUDIO_DECODER_SRC_PAD (dec)), &outbuf);

    if (res != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
      return res;
    }
#endif
    /* FIXME, we can use a bufferpool because we have fixed size buffers. We
     * could also use an allocator */
    outbuf =
        gst_buffer_new_allocate (NULL,
        dec->frame_size * dec->header->nb_channels * 2, NULL);

    gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
    ret = speex_decode_int (dec->state, bits, (spx_int16_t *) map.data);

    if (ret == -1) {
      /* uh? end of stream */
      GST_WARNING_OBJECT (dec, "Unexpected end of stream found");
      corrupted = TRUE;
    } else if (ret == -2) {
      GST_WARNING_OBJECT (dec, "Decoding error: corrupted stream?");
      corrupted = TRUE;
    }

    if (bits && speex_bits_remaining (bits) < 0) {
      GST_WARNING_OBJECT (dec, "Decoding overflow: corrupted stream?");
      corrupted = TRUE;
    }
    if (dec->header->nb_channels == 2)
      speex_decode_stereo_int ((spx_int16_t *) map.data, dec->frame_size,
          dec->stereo);

    gst_buffer_unmap (outbuf, &map);

    if (!corrupted) {
      res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1);
    } else {
      res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), NULL, 1);
      gst_buffer_unref (outbuf);
    }

    if (res != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));
      break;
    }
  }

  return res;

  /* ERRORS */
not_negotiated:
  {
    GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL),
        ("decoder not initialized"));
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
Exemplo n.º 26
0
static void
generate_coefficients (GstAudioChebBand * filter)
{
  if (GST_AUDIO_FILTER (filter)->format.rate == 0) {
    gdouble *a = g_new0 (gdouble, 1);

    a[0] = 1.0;
    gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
        (filter), a, 1, NULL, 0);
    GST_LOG_OBJECT (filter, "rate was not set yet");
    return;
  }

  if (filter->upper_frequency <= filter->lower_frequency) {
    gdouble *a = g_new0 (gdouble, 1);

    a[0] = (filter->mode == MODE_BAND_PASS) ? 0.0 : 1.0;
    gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
        (filter), a, 1, NULL, 0);

    GST_LOG_OBJECT (filter, "frequency band had no or negative dimension");
    return;
  }

  if (filter->upper_frequency > GST_AUDIO_FILTER (filter)->format.rate / 2) {
    filter->upper_frequency = GST_AUDIO_FILTER (filter)->format.rate / 2;
    GST_LOG_OBJECT (filter, "clipped upper frequency to nyquist frequency");
  }

  if (filter->lower_frequency < 0.0) {
    filter->lower_frequency = 0.0;
    GST_LOG_OBJECT (filter, "clipped lower frequency to 0.0");
  }

  /* Calculate coefficients for the chebyshev filter */
  {
    gint np = filter->poles;
    gdouble *a, *b;
    gint i, p;

    a = g_new0 (gdouble, np + 5);
    b = g_new0 (gdouble, np + 5);

    /* Calculate transfer function coefficients */
    a[4] = 1.0;
    b[4] = 1.0;

    for (p = 1; p <= np / 4; p++) {
      gdouble a0, a1, a2, a3, a4, b1, b2, b3, b4;
      gdouble *ta = g_new0 (gdouble, np + 5);
      gdouble *tb = g_new0 (gdouble, np + 5);

      generate_biquad_coefficients (filter, p, &a0, &a1, &a2, &a3, &a4, &b1,
          &b2, &b3, &b4);

      memcpy (ta, a, sizeof (gdouble) * (np + 5));
      memcpy (tb, b, sizeof (gdouble) * (np + 5));

      /* add the new coefficients for the new two poles
       * to the cascade by multiplication of the transfer
       * functions */
      for (i = 4; i < np + 5; i++) {
        a[i] =
            a0 * ta[i] + a1 * ta[i - 1] + a2 * ta[i - 2] + a3 * ta[i - 3] +
            a4 * ta[i - 4];
        b[i] =
            tb[i] - b1 * tb[i - 1] - b2 * tb[i - 2] - b3 * tb[i - 3] -
            b4 * tb[i - 4];
      }
      g_free (ta);
      g_free (tb);
    }

    /* Move coefficients to the beginning of the array
     * and multiply the b coefficients with -1 to move from
     * the transfer function's coefficients to the difference
     * equation's coefficients */
    b[4] = 0.0;
    for (i = 0; i <= np; i++) {
      a[i] = a[i + 4];
      b[i] = -b[i + 4];
    }

    /* Normalize to unity gain at frequency 0 and frequency
     * 0.5 for bandreject and unity gain at band center frequency
     * for bandpass */
    if (filter->mode == MODE_BAND_REJECT) {
      /* gain is sqrt(H(0)*H(0.5)) */

      gdouble gain1 =
          gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1,
          1.0, 0.0);
      gdouble gain2 =
          gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1,
          -1.0, 0.0);

      gain1 = sqrt (gain1 * gain2);

      for (i = 0; i <= np; i++) {
        a[i] /= gain1;
      }
    } else {
      /* gain is H(wc), wc = center frequency */

      gdouble w1 =
          2.0 * G_PI * (filter->lower_frequency /
          GST_AUDIO_FILTER (filter)->format.rate);
      gdouble w2 =
          2.0 * G_PI * (filter->upper_frequency /
          GST_AUDIO_FILTER (filter)->format.rate);
      gdouble w0 = (w2 + w1) / 2.0;
      gdouble zr = cos (w0), zi = sin (w0);
      gdouble gain =
          gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1, zr,
          zi);

      for (i = 0; i <= np; i++) {
        a[i] /= gain;
      }
    }

    gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
        (filter), a, np + 1, b, np + 1);

    GST_LOG_OBJECT (filter,
        "Generated IIR coefficients for the Chebyshev filter");
    GST_LOG_OBJECT (filter,
        "mode: %s, type: %d, poles: %d, lower-frequency: %.2f Hz, upper-frequency: %.2f Hz, ripple: %.2f dB",
        (filter->mode == MODE_BAND_PASS) ? "band-pass" : "band-reject",
        filter->type, filter->poles, filter->lower_frequency,
        filter->upper_frequency, filter->ripple);

    GST_LOG_OBJECT (filter, "%.2f dB gain @ 0Hz",
        20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
                np + 1, 1.0, 0.0)));
    {
      gdouble w1 =
          2.0 * G_PI * (filter->lower_frequency /
          GST_AUDIO_FILTER (filter)->format.rate);
      gdouble w2 =
          2.0 * G_PI * (filter->upper_frequency /
          GST_AUDIO_FILTER (filter)->format.rate);
      gdouble w0 = (w2 + w1) / 2.0;
      gdouble zr, zi;

      zr = cos (w1);
      zi = sin (w1);
      GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
          20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1,
                  b, np + 1, zr, zi)), (int) filter->lower_frequency);
      zr = cos (w0);
      zi = sin (w0);
      GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
          20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1,
                  b, np + 1, zr, zi)),
          (int) ((filter->lower_frequency + filter->upper_frequency) / 2.0));
      zr = cos (w2);
      zi = sin (w2);
      GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
          20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1,
                  b, np + 1, zr, zi)), (int) filter->upper_frequency);
    }
    GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
        20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
                np + 1, -1.0, 0.0)),
        GST_AUDIO_FILTER (filter)->format.rate / 2);
  }
}
Exemplo n.º 27
0
static void
gst_camerabin_image_dispose (GstCameraBinImage * img)
{
  GST_DEBUG_OBJECT (img, "disposing");

  g_string_free (img->filename, TRUE);
  img->filename = NULL;

  if (img->elements) {
    g_list_free (img->elements);
    img->elements = NULL;
  }

  if (img->sink) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->sink), GST_OBJECT_REFCOUNT_VALUE (img->sink));
    gst_object_unref (img->sink);
    img->sink = NULL;
  }

  if (img->formatter) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->formatter),
        GST_OBJECT_REFCOUNT_VALUE (img->formatter));
    gst_object_unref (img->formatter);
    img->formatter = NULL;
  }

  if (img->app_formatter) {
    gst_object_sink (img->app_formatter);
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->app_formatter),
        GST_OBJECT_REFCOUNT_VALUE (img->app_formatter));
    gst_object_unref (img->app_formatter);
    img->app_formatter = NULL;
  }

  if (img->enc) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->enc), GST_OBJECT_REFCOUNT_VALUE (img->enc));
    gst_object_unref (img->enc);
    img->enc = NULL;
  }

  if (img->csp) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->csp), GST_OBJECT_REFCOUNT_VALUE (img->csp));
    gst_object_unref (img->csp);
    img->csp = NULL;
  }

  /* Note: if imagebin was never set to READY state the
     ownership of elements created by application were never
     taken by bin and therefore gst_object_sink is called for
     these elements (they may still be in floating state
     and not unreffed properly without sinking first)
   */
  if (img->app_enc) {
    gst_object_sink (img->app_enc);
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->app_enc),
        GST_OBJECT_REFCOUNT_VALUE (img->app_enc));
    gst_object_unref (img->app_enc);
    img->app_enc = NULL;
  }

  if (img->post) {
    gst_object_sink (img->post);
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->post), GST_OBJECT_REFCOUNT_VALUE (img->post));
    gst_object_unref (img->post);
    img->post = NULL;
  }

  G_OBJECT_CLASS (parent_class)->dispose ((GObject *) img);
}
Exemplo n.º 28
0
static gboolean
gst_wavenc_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstWavEnc *wavenc;
  GstStructure *structure;
  const gchar *name;
  gint chans, rate;
  GstCaps *ccaps;

  wavenc = GST_WAVENC (gst_pad_get_parent (pad));

  ccaps = gst_pad_get_current_caps (pad);
  if (wavenc->sent_header && ccaps && !gst_caps_can_intersect (caps, ccaps)) {
    gst_caps_unref (ccaps);
    GST_WARNING_OBJECT (wavenc, "cannot change format in middle of stream");
    goto fail;
  }
  if (ccaps)
    gst_caps_unref (ccaps);

  GST_DEBUG_OBJECT (wavenc, "got caps: %" GST_PTR_FORMAT, caps);

  structure = gst_caps_get_structure (caps, 0);
  name = gst_structure_get_name (structure);

  if (!gst_structure_get_int (structure, "channels", &chans) ||
      !gst_structure_get_int (structure, "rate", &rate)) {
    GST_WARNING_OBJECT (wavenc, "caps incomplete");
    goto fail;
  }

  if (strcmp (name, "audio/x-raw") == 0) {
    GstAudioInfo info;

    if (!gst_audio_info_from_caps (&info, caps))
      goto fail;

    if (GST_AUDIO_INFO_IS_INTEGER (&info))
      wavenc->format = GST_RIFF_WAVE_FORMAT_PCM;
    else if (GST_AUDIO_INFO_IS_FLOAT (&info))
      wavenc->format = GST_RIFF_WAVE_FORMAT_IEEE_FLOAT;
    else
      goto fail;

    wavenc->width = GST_AUDIO_INFO_WIDTH (&info);
  } else if (strcmp (name, "audio/x-alaw") == 0) {
    wavenc->format = GST_RIFF_WAVE_FORMAT_ALAW;
    wavenc->width = 8;
  } else if (strcmp (name, "audio/x-mulaw") == 0) {
    wavenc->format = GST_RIFF_WAVE_FORMAT_MULAW;
    wavenc->width = 8;
  } else {
    GST_WARNING_OBJECT (wavenc, "Unsupported format %s", name);
    goto fail;
  }

  wavenc->channels = chans;
  wavenc->rate = rate;

  GST_LOG_OBJECT (wavenc,
      "accepted caps: format=0x%04x chans=%u width=%u rate=%u",
      wavenc->format, wavenc->channels, wavenc->width, wavenc->rate);

  gst_object_unref (wavenc);
  return TRUE;

fail:
  gst_object_unref (wavenc);
  return FALSE;
}
static gboolean
flx_decode_chunks (GstFlxDec * flxdec, gulong n_chunks, GstByteReader * reader,
    GstByteWriter * writer)
{
  gboolean ret = TRUE;

  while (n_chunks--) {
    GstByteReader chunk;
    guint32 size;
    guint16 type;

    if (!gst_byte_reader_get_uint32_le (reader, &size))
      goto parse_error;
    if (!gst_byte_reader_get_uint16_le (reader, &type))
      goto parse_error;
    GST_LOG_OBJECT (flxdec, "chunk has type 0x%02x size %d", type, size);

    if (!gst_byte_reader_get_sub_reader (reader, &chunk,
            size - FlxFrameChunkSize)) {
      GST_ERROR_OBJECT (flxdec, "Incorrect size in the chunk header");
      goto error;
    }

    switch (type) {
      case FLX_COLOR64:
        ret = flx_decode_color (flxdec, &chunk, writer, 2);
        break;

      case FLX_COLOR256:
        ret = flx_decode_color (flxdec, &chunk, writer, 0);
        break;

      case FLX_BRUN:
        ret = flx_decode_brun (flxdec, &chunk, writer);
        break;

      case FLX_LC:
        ret = flx_decode_delta_fli (flxdec, &chunk, writer);
        break;

      case FLX_SS2:
        ret = flx_decode_delta_flc (flxdec, &chunk, writer);
        break;

      case FLX_BLACK:
        ret = gst_byte_writer_fill (writer, 0, flxdec->size);
        break;

      case FLX_MINI:
        break;

      default:
        GST_WARNING ("Unimplemented chunk type: 0x%02x size: %d - skipping",
            type, size);
        break;
    }

    if (!ret)
      break;
  }

  return ret;

parse_error:
  GST_ERROR_OBJECT (flxdec, "Failed to decode chunk");
error:
  return FALSE;
}
Exemplo n.º 30
0
static void
gst_wavpack_parse_loop (GstElement * element)
{
  GstWavpackParse *parse = GST_WAVPACK_PARSE (element);
  GstFlowReturn flow_ret;
  WavpackHeader header = { {0,}, 0, };
  GstBuffer *buf = NULL;

  flow_ret = gst_wavpack_parse_resync_loop (parse, &header);

  if (flow_ret != GST_FLOW_OK)
    goto pause;

  GST_LOG_OBJECT (parse, "Read header at offset %" G_GINT64_FORMAT
      ": chunk size = %u+8", parse->current_offset, header.ckSize);

  buf = gst_wavpack_parse_pull_buffer (parse, parse->current_offset,
      header.ckSize + 8, &flow_ret);

  if (flow_ret != GST_FLOW_OK)
    goto pause;

  if (parse->srcpad == NULL) {
    if (!gst_wavpack_parse_create_src_pad (parse, buf, &header)) {
      GST_ERROR_OBJECT (parse, "Failed to create src pad");
      flow_ret = GST_FLOW_ERROR;
      goto pause;
    }
  }
  if (header.flags & INITIAL_BLOCK)
    gst_wavpack_parse_index_append_entry (parse, parse->current_offset,
        header.block_index, header.block_samples);

  flow_ret = gst_wavpack_parse_push_buffer (parse, buf, &header);
  if (flow_ret != GST_FLOW_OK)
    goto pause;

  return;

pause:
  {
    const gchar *reason = gst_flow_get_name (flow_ret);

    GST_LOG_OBJECT (parse, "pausing task, reason %s", reason);
    gst_pad_pause_task (parse->sinkpad);

    if (flow_ret == GST_FLOW_UNEXPECTED && parse->srcpad) {
      if (parse->segment.flags & GST_SEEK_FLAG_SEGMENT) {
        GstClockTime stop;

        GST_LOG_OBJECT (parse, "Sending segment done");

        if ((stop = parse->segment.stop) == -1)
          stop = parse->segment.duration;

        gst_element_post_message (GST_ELEMENT_CAST (parse),
            gst_message_new_segment_done (GST_OBJECT_CAST (parse),
                parse->segment.format, stop));
      } else {
        GST_LOG_OBJECT (parse, "Sending EOS, at end of stream");
        gst_pad_push_event (parse->srcpad, gst_event_new_eos ());
      }
    } else if (flow_ret == GST_FLOW_NOT_LINKED
        || flow_ret < GST_FLOW_UNEXPECTED) {
      GST_ELEMENT_ERROR (parse, STREAM, FAILED,
          (_("Internal data stream error.")), ("stream stopped, reason %s",
              reason));
      if (parse->srcpad)
        gst_pad_push_event (parse->srcpad, gst_event_new_eos ());
    }
    return;
  }
}