Exemplo n.º 1
0
/* The nav event handler handles nav events, but still forwards them, so you
 * should be able to even use puzzle while navigating a dvd menu. We return 
 * TRUE of course even when noone downstream handles the event.
 */
static gboolean
nav_event_handler (GstPad * pad, GstEvent * event)
{
  GstPuzzle *puzzle;
  GstVideofilter *filter;
  const gchar *type;
  gboolean result = FALSE;
  gdouble x, y;
  gint xpos = 0, ypos = 0;

  puzzle = GST_PUZZLE (gst_pad_get_parent (pad));
  filter = GST_VIDEOFILTER (puzzle);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_NAVIGATION:
      /* translate the event */
      if (gst_structure_get_double (event->event_data.structure.structure,
              "pointer_x", &x) &&
          gst_structure_get_double (event->event_data.structure.structure,
              "pointer_y", &y)) {
        gint width, height;

        width = gst_videofilter_get_input_width (filter);
        height = gst_videofilter_get_input_height (filter);
        width = (width / puzzle->columns) & ~3;
        height = (height / puzzle->rows) & ~3;
        xpos = (int) x / width;
        ypos = (int) y / height;
        if (xpos >= 0 && xpos < puzzle->columns && ypos >= 0
            && ypos < puzzle->rows) {
          GstEvent *copy;
          guint lookup;

          lookup = puzzle->permutation[ypos * puzzle->columns + xpos];
          GST_DEBUG_OBJECT (puzzle, "translated %dx%d (%gx%g) to %dx%d (%gx%g)",
              xpos, ypos, x, y,
              lookup % puzzle->columns, lookup / puzzle->columns,
              x + ((gint) (lookup % puzzle->columns) - xpos) * width,
              y + ((gint) (lookup / puzzle->columns) - ypos) * height);
          x += ((gint) (lookup % puzzle->columns) - xpos) * width;
          y += ((gint) (lookup / puzzle->columns) - ypos) * height;
          copy = gst_event_copy (event);
          gst_structure_set (copy->event_data.structure.structure,
              "pointer_x", G_TYPE_DOUBLE, x,
              "pointer_y", G_TYPE_DOUBLE, y, NULL);
          gst_event_unref (event);
          event = copy;
        }
      }
      /* handle the event. NOTE: it has already been translated! */
      type = gst_structure_get_string (event->event_data.structure.structure,
          "event");
      if (g_str_equal (type, "key-press")) {
        const gchar *key =
            gst_structure_get_string (event->event_data.structure.structure,
            "key");

        if (g_str_equal (key, "space")) {
          if (gst_puzzle_is_solved (puzzle)) {
            gst_puzzle_shuffle (puzzle);
          } else {
            gst_puzzle_solve (puzzle);
          }
        } else {
          if (puzzle->solved)
            break;
          if (g_str_equal (key, "Left")) {
            gst_puzzle_move (puzzle, DIR_LEFT);
          } else if (g_str_equal (key, "Right")) {
            gst_puzzle_move (puzzle, DIR_RIGHT);
          } else if (g_str_equal (key, "Up")) {
            gst_puzzle_move (puzzle, DIR_UP);
          } else if (g_str_equal (key, "Down")) {
            gst_puzzle_move (puzzle, DIR_DOWN);
          }
        }
        puzzle->solved = gst_puzzle_is_solved (puzzle);
      } else if (g_str_equal (type, "mouse-button-press")) {
        gint button;

        if (gst_structure_get_int (event->event_data.structure.structure,
                "button", &button)) {
          if (button == 1) {
            if (xpos >= 0 && xpos < puzzle->columns && ypos >= 0
                && ypos < puzzle->rows && !puzzle->solved) {
              gst_puzzle_swap (puzzle, ypos * puzzle->columns + xpos);
              puzzle->solved = gst_puzzle_is_solved (puzzle);
            }
          } else if (button == 2) {
            if (puzzle->solved) {
              gst_puzzle_shuffle (puzzle);
            } else {
              gst_puzzle_solve (puzzle);
            }
            puzzle->solved = gst_puzzle_is_solved (puzzle);
          }
        }
      }
      /* FIXME: only return TRUE for events we handle? */
      result = TRUE;
      break;
    default:
      break;
  }
  return gst_pad_event_default (pad, event) || result;
}
Exemplo n.º 2
0
static gboolean
gst_interlace_sink_event (GstPad * pad, GstEvent * event)
{
  gboolean ret;
  GstInterlace *interlace;

  interlace = GST_INTERLACE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      GST_DEBUG_OBJECT (interlace, "handling FLUSH_START");
      if (interlace->stored_frame) {
        gst_buffer_unref (interlace->stored_frame);
        interlace->stored_frame = NULL;
      }
      ret = gst_pad_push_event (interlace->srcpad, event);
      break;
    case GST_EVENT_FLUSH_STOP:
      GST_DEBUG_OBJECT (interlace, "handling FLUSH_STOP");
      gst_interlace_reset (interlace);
      ret = gst_pad_push_event (interlace->srcpad, event);
      break;
    case GST_EVENT_EOS:
#if 0
      /* FIXME revive this when we output ONEFIELD and RFF buffers */
    {
      gint num_fields;
      const PulldownFormat *format = &formats[interlace->pattern];

      num_fields =
          format->n_fields[interlace->phase_index] -
          interlace->stored_fields_pushed;
      interlace->stored_fields_pushed = 0;

      /* on EOS we want to push as many sane frames as are left */
      while (num_fields > 1) {
        GstBuffer *output_buffer;

        /* make metadata writable before editing it */
        interlace->stored_frame =
            gst_buffer_make_metadata_writable (interlace->stored_frame);
        num_fields -= 2;

        gst_interlace_decorate_buffer (interlace, interlace->stored_frame,
            n_fields);

        /* ref output_buffer/stored frame because we want to keep it for now
         * and pushing gives away a ref */
        output_buffer = gst_buffer_ref (interlace->stored_frame);
        if (gst_pad_push (interlace->srcpad, output_buffer)) {
          GST_DEBUG_OBJECT (interlace, "Failed to push buffer %p",
              output_buffer);
          return FALSE;
        }
        output_buffer = NULL;

        if (num_fields <= 1) {
          gst_buffer_unref (interlace->stored_frame);
          interlace->stored_frame = NULL;
          break;
        }
      }

      /* increment the phase index */
      interlace->phase_index++;
      if (!format->n_fields[interlace->phase_index]) {
        interlace->phase_index = 0;
      }
    }
#endif

      ret = gst_pad_push_event (interlace->srcpad, event);
      break;
    default:
      ret = gst_pad_push_event (interlace->srcpad, event);
      break;
  }

  g_object_unref (interlace);

  return ret;
}
Exemplo n.º 3
0
static GstFlowReturn
gst_interlace_chain (GstPad * pad, GstBuffer * buffer)
{
  GstInterlace *interlace = GST_INTERLACE (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  gint num_fields = 0;
  int current_fields;
  const PulldownFormat *format;

  GST_DEBUG ("Received buffer at %u:%02u:%02u:%09u",
      (guint) (GST_BUFFER_TIMESTAMP (buffer) / (GST_SECOND * 60 * 60)),
      (guint) ((GST_BUFFER_TIMESTAMP (buffer) / (GST_SECOND * 60)) % 60),
      (guint) ((GST_BUFFER_TIMESTAMP (buffer) / GST_SECOND) % 60),
      (guint) (GST_BUFFER_TIMESTAMP (buffer) % GST_SECOND));

  GST_DEBUG ("duration %" GST_TIME_FORMAT " flags %04x %s %s %s",
      GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)),
      GST_BUFFER_FLAGS (buffer),
      (GST_BUFFER_FLAGS (buffer) & GST_VIDEO_BUFFER_TFF) ? "tff" : "",
      (GST_BUFFER_FLAGS (buffer) & GST_VIDEO_BUFFER_RFF) ? "rff" : "",
      (GST_BUFFER_FLAGS (buffer) & GST_VIDEO_BUFFER_ONEFIELD) ? "onefield" :
      "");

  if (GST_BUFFER_FLAGS (buffer) & GST_BUFFER_FLAG_DISCONT) {
    GST_DEBUG ("discont");

    if (interlace->stored_frame) {
      gst_buffer_unref (interlace->stored_frame);
    }
    interlace->stored_frame = NULL;
    interlace->stored_fields = 0;

    if (interlace->top_field_first) {
      interlace->field_index = 0;
    } else {
      interlace->field_index = 1;
    }
  }

  if (interlace->timebase == GST_CLOCK_TIME_NONE) {
    /* get the initial ts */
    interlace->timebase = GST_BUFFER_TIMESTAMP (buffer);
  }

  format = &formats[interlace->pattern];

  if (interlace->stored_fields == 0
      && interlace->phase_index == interlace->pattern_offset
      && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buffer))) {
    interlace->timebase = GST_BUFFER_TIMESTAMP (buffer);
    interlace->fields_since_timebase = 0;
  }

  if (!format->n_fields[interlace->phase_index]) {
    interlace->phase_index = 0;
  }

  current_fields = format->n_fields[interlace->phase_index];
  /* increment the phase index */
  interlace->phase_index++;
  GST_DEBUG ("incoming buffer assigned %d fields", current_fields);

  num_fields = interlace->stored_fields + current_fields;
  while (num_fields >= 2) {
    GstBuffer *output_buffer;
    int n_output_fields;

    GST_DEBUG ("have %d fields, %d current, %d stored",
        num_fields, current_fields, interlace->stored_fields);

    if (interlace->stored_fields > 0) {
      GST_DEBUG ("1 field from stored, 1 from current");

      output_buffer = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (buffer));
      /* take the first field from the stored frame */
      copy_field (interlace, output_buffer, interlace->stored_frame,
          interlace->field_index);
      interlace->stored_fields--;
      /* take the second field from the incoming buffer */
      copy_field (interlace, output_buffer, buffer, interlace->field_index ^ 1);
      current_fields--;
      n_output_fields = 2;
    } else {
      output_buffer =
          gst_buffer_make_metadata_writable (gst_buffer_ref (buffer));
      if (num_fields >= 3 && interlace->allow_rff) {
        GST_DEBUG ("3 fields from current");
        /* take both fields from incoming buffer */
        current_fields -= 3;
        n_output_fields = 3;
      } else {
        GST_DEBUG ("2 fields from current");
        /* take both buffers from incoming buffer */
        current_fields -= 2;
        n_output_fields = 2;
      }
    }
    num_fields -= n_output_fields;

    gst_interlace_decorate_buffer (interlace, output_buffer, n_output_fields);
    interlace->fields_since_timebase += n_output_fields;
    interlace->field_index ^= (n_output_fields & 1);

    GST_DEBUG_OBJECT (interlace, "output timestamp %" GST_TIME_FORMAT
        " duration %" GST_TIME_FORMAT " flags %04x %s %s %s",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (output_buffer)),
        GST_TIME_ARGS (GST_BUFFER_DURATION (output_buffer)),
        GST_BUFFER_FLAGS (output_buffer),
        (GST_BUFFER_FLAGS (output_buffer) & GST_VIDEO_BUFFER_TFF) ? "tff" : "",
        (GST_BUFFER_FLAGS (output_buffer) & GST_VIDEO_BUFFER_RFF) ? "rff" : "",
        (GST_BUFFER_FLAGS (output_buffer) & GST_VIDEO_BUFFER_ONEFIELD) ?
        "onefield" : "");

    ret = gst_pad_push (interlace->srcpad, output_buffer);
    if (ret != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (interlace, "Failed to push buffer %p", output_buffer);
      break;
    }
  }

  GST_DEBUG ("done.  %d fields remaining", current_fields);

  if (interlace->stored_frame) {
    gst_buffer_unref (interlace->stored_frame);
    interlace->stored_frame = NULL;
    interlace->stored_fields = 0;
  }

  if (current_fields > 0) {
    interlace->stored_frame = buffer;
    interlace->stored_fields = current_fields;
  } else {
    gst_buffer_unref (buffer);
  }

  gst_object_unref (interlace);

  return ret;
}
Exemplo n.º 4
0
static gboolean
gst_deinterlace2_setcaps (GstPad * pad, GstCaps * caps)
{
  gboolean res = TRUE;
  GstDeinterlace2 *self = GST_DEINTERLACE2 (gst_pad_get_parent (pad));
  GstPad *otherpad;
  GstStructure *structure;
  GstVideoFormat fmt;
  guint32 fourcc;
  GstCaps *othercaps;

  otherpad = (pad == self->srcpad) ? self->sinkpad : self->srcpad;

  structure = gst_caps_get_structure (caps, 0);

  res = gst_structure_get_int (structure, "width", &self->frame_width);
  res &= gst_structure_get_int (structure, "height", &self->frame_height);
  res &=
      gst_structure_get_fraction (structure, "framerate", &self->frame_rate_n,
      &self->frame_rate_d);
  res &= gst_structure_get_fourcc (structure, "format", &fourcc);
  /* TODO: get interlaced, field_layout, field_order */
  if (!res)
    goto invalid_caps;

  if (self->fields == GST_DEINTERLACE2_ALL) {
    gint fps_n = self->frame_rate_n, fps_d = self->frame_rate_d;

    if (!gst_fraction_double (&fps_n, &fps_d, otherpad != self->srcpad))
      goto invalid_caps;

    othercaps = gst_caps_copy (caps);

    gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION, fps_n,
        fps_d, NULL);
  } else {
    othercaps = gst_caps_ref (caps);
  }

  if (!gst_pad_set_caps (otherpad, othercaps))
    goto caps_not_accepted;
  gst_caps_unref (othercaps);

  /* TODO: introduce self->field_stride */
  self->field_height = self->frame_height / 2;

  fmt = gst_video_format_from_fourcc (fourcc);

  /* TODO: only true if fields are subbuffers of interlaced frames,
     change when the buffer-fields concept has landed */
  self->field_stride =
      gst_video_format_get_row_stride (fmt, 0, self->frame_width) * 2;
  self->output_stride =
      gst_video_format_get_row_stride (fmt, 0, self->frame_width);

  /* in bytes */
  self->line_length =
      gst_video_format_get_row_stride (fmt, 0, self->frame_width);
  self->frame_size =
      gst_video_format_get_size (fmt, self->frame_width, self->frame_height);

  if (self->fields == GST_DEINTERLACE2_ALL && otherpad == self->srcpad)
    self->field_duration =
        gst_util_uint64_scale (GST_SECOND, self->frame_rate_d,
        self->frame_rate_n);
  else
    self->field_duration =
        gst_util_uint64_scale (GST_SECOND, self->frame_rate_d,
        2 * self->frame_rate_n);

  GST_DEBUG_OBJECT (self, "Set caps: %" GST_PTR_FORMAT, caps);

done:

  gst_object_unref (self);
  return res;

invalid_caps:
  res = FALSE;
  GST_ERROR_OBJECT (pad, "Invalid caps: %" GST_PTR_FORMAT, caps);
  goto done;

caps_not_accepted:
  res = FALSE;
  GST_ERROR_OBJECT (pad, "Caps not accepted: %" GST_PTR_FORMAT, othercaps);
  gst_caps_unref (othercaps);
  goto done;
}
Exemplo n.º 5
0
static GstCaps *
gst_deinterlace2_getcaps (GstPad * pad)
{
  GstCaps *ret;
  GstDeinterlace2 *self = GST_DEINTERLACE2 (gst_pad_get_parent (pad));
  GstPad *otherpad;
  gint len;
  const GstCaps *ourcaps;
  GstCaps *peercaps;

  GST_OBJECT_LOCK (self);

  otherpad = (pad == self->srcpad) ? self->sinkpad : self->srcpad;

  ourcaps = gst_pad_get_pad_template_caps (pad);
  peercaps = gst_pad_peer_get_caps (otherpad);

  if (peercaps) {
    ret = gst_caps_intersect (ourcaps, peercaps);
    gst_caps_unref (peercaps);
  } else {
    ret = gst_caps_copy (ourcaps);
  }

  GST_OBJECT_UNLOCK (self);

  if (self->fields == GST_DEINTERLACE2_ALL) {
    for (len = gst_caps_get_size (ret); len > 0; len--) {
      GstStructure *s = gst_caps_get_structure (ret, len - 1);
      const GValue *val;

      val = gst_structure_get_value (s, "framerate");
      if (!val)
        continue;

      if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION) {
        gint n, d;

        n = gst_value_get_fraction_numerator (val);
        d = gst_value_get_fraction_denominator (val);

        if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
          goto error;
        }

        gst_structure_set (s, "framerate", GST_TYPE_FRACTION, n, d, NULL);
      } else if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION_RANGE) {
        const GValue *min, *max;
        GValue nrange = { 0, }, nmin = {
        0,}, nmax = {
        0,};
        gint n, d;

        g_value_init (&nrange, GST_TYPE_FRACTION_RANGE);
        g_value_init (&nmin, GST_TYPE_FRACTION);
        g_value_init (&nmax, GST_TYPE_FRACTION);

        min = gst_value_get_fraction_range_min (val);
        max = gst_value_get_fraction_range_max (val);

        n = gst_value_get_fraction_numerator (min);
        d = gst_value_get_fraction_denominator (min);

        if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
          g_value_unset (&nrange);
          g_value_unset (&nmax);
          g_value_unset (&nmin);
          goto error;
        }

        gst_value_set_fraction (&nmin, n, d);

        n = gst_value_get_fraction_numerator (max);
        d = gst_value_get_fraction_denominator (max);

        if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
          g_value_unset (&nrange);
          g_value_unset (&nmax);
          g_value_unset (&nmin);
          goto error;
        }

        gst_value_set_fraction (&nmax, n, d);
        gst_value_set_fraction_range (&nrange, &nmin, &nmax);

        gst_structure_set_value (s, "framerate", &nrange);

        g_value_unset (&nmin);
        g_value_unset (&nmax);
        g_value_unset (&nrange);
      } else if (G_VALUE_TYPE (val) == GST_TYPE_LIST) {
        const GValue *lval;
        GValue nlist = { 0, };
        GValue nval = { 0, };
        gint i;

        g_value_init (&nlist, GST_TYPE_LIST);
        for (i = gst_value_list_get_size (val); i > 0; i--) {
          gint n, d;

          lval = gst_value_list_get_value (val, i);

          if (G_VALUE_TYPE (lval) != GST_TYPE_FRACTION)
            continue;

          n = gst_value_get_fraction_numerator (lval);
          d = gst_value_get_fraction_denominator (lval);

          /* Double/Half the framerate but if this fails simply
           * skip this value from the list */
          if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
            continue;
          }

          g_value_init (&nval, GST_TYPE_FRACTION);

          gst_value_set_fraction (&nval, n, d);
          gst_value_list_append_value (&nlist, &nval);
          g_value_unset (&nval);
        }
        gst_structure_set_value (s, "framerate", &nlist);
        g_value_unset (&nlist);
      }
    }
  }

  GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret);

  return ret;

error:
  GST_ERROR_OBJECT (pad, "Unable to transform peer caps");
  gst_caps_unref (ret);
  return NULL;
}
Exemplo n.º 6
0
static gboolean
gst_musepackdec_src_query (GstPad * pad, GstQuery * query)
{
  GstMusepackDec *musepackdec = GST_MUSEPACK_DEC (gst_pad_get_parent (pad));
  GstFormat format;
  gboolean res = FALSE;
  gint samplerate;

  samplerate = g_atomic_int_get (&musepackdec->rate);

  if (samplerate == 0)
    goto done;

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:{
      gint64 cur, cur_off;

      gst_query_parse_position (query, &format, NULL);

      GST_OBJECT_LOCK (musepackdec);
      cur_off = musepackdec->segment.last_stop;
      GST_OBJECT_UNLOCK (musepackdec);

      if (format == GST_FORMAT_TIME) {
        cur = gst_util_uint64_scale_int (cur_off, GST_SECOND, samplerate);
        gst_query_set_position (query, GST_FORMAT_TIME, cur);
        res = TRUE;
      } else if (format == GST_FORMAT_DEFAULT) {
        gst_query_set_position (query, GST_FORMAT_DEFAULT, cur_off);
        res = TRUE;
      }
      break;
    }
    case GST_QUERY_DURATION:{
      gint64 len, len_off;

      gst_query_parse_duration (query, &format, NULL);

      GST_OBJECT_LOCK (musepackdec);
      len_off = musepackdec->segment.duration;
      GST_OBJECT_UNLOCK (musepackdec);

      if (format == GST_FORMAT_TIME) {
        len = gst_util_uint64_scale_int (len_off, GST_SECOND, samplerate);
        gst_query_set_duration (query, GST_FORMAT_TIME, len);
        res = TRUE;
      } else if (format == GST_FORMAT_DEFAULT) {
        gst_query_set_duration (query, GST_FORMAT_DEFAULT, len_off);
        res = TRUE;
      }
      break;
    }
    case GST_QUERY_SEEKING:{
      GstFormat fmt;

      res = TRUE;
      gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
      if (fmt == GST_FORMAT_TIME || fmt == GST_FORMAT_DEFAULT)
        gst_query_set_seeking (query, fmt, TRUE, 0, -1);
      else
        gst_query_set_seeking (query, fmt, FALSE, -1, -1);

      break;
    }
    default:
      res = gst_pad_query_default (pad, query);
      break;
  }

done:
  gst_object_unref (musepackdec);
  return res;
}
Exemplo n.º 7
0
static gboolean
theora_enc_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  GstTheoraEnc *enc = GST_THEORA_ENC (gst_pad_get_parent (pad));
  guint32 fourcc;
  const GValue *par;
  gint fps_n, fps_d;

  gst_structure_get_fourcc (structure, "format", &fourcc);
  gst_structure_get_int (structure, "width", &enc->width);
  gst_structure_get_int (structure, "height", &enc->height);
  gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d);
  par = gst_structure_get_value (structure, "pixel-aspect-ratio");

  th_info_clear (&enc->info);
  th_info_init (&enc->info);
  /* Theora has a divisible-by-sixteen restriction for the encoded video size but
   * we can define a picture area using pic_width/pic_height */
  enc->info.frame_width = GST_ROUND_UP_16 (enc->width);
  enc->info.frame_height = GST_ROUND_UP_16 (enc->height);
  enc->info.pic_width = enc->width;
  enc->info.pic_height = enc->height;
  switch (fourcc) {
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
      enc->info.pixel_fmt = TH_PF_420;
      break;
    case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
      enc->info.pixel_fmt = TH_PF_422;
      break;
    case GST_MAKE_FOURCC ('Y', '4', '4', '4'):
      enc->info.pixel_fmt = TH_PF_444;
      break;
    default:
      g_assert_not_reached ();
  }

  enc->info.fps_numerator = enc->fps_n = fps_n;
  enc->info.fps_denominator = enc->fps_d = fps_d;
  if (par) {
    enc->info.aspect_numerator = gst_value_get_fraction_numerator (par);
    enc->info.aspect_denominator = gst_value_get_fraction_denominator (par);
  } else {
    /* setting them to 0 indicates that the decoder can chose a good aspect
     * ratio, defaulting to 1/1 */
    enc->info.aspect_numerator = 0;
    enc->info.aspect_denominator = 0;
  }

  enc->info.colorspace = TH_CS_UNSPECIFIED;

  /* as done in theora */
  enc->info.keyframe_granule_shift = _ilog (enc->keyframe_force - 1);
  GST_DEBUG_OBJECT (enc,
      "keyframe_frequency_force is %d, granule shift is %d",
      enc->keyframe_force, enc->info.keyframe_granule_shift);

  theora_enc_reset (enc);
  enc->initialised = TRUE;

  gst_object_unref (enc);

  return TRUE;
}
Exemplo n.º 8
0
static gboolean
gst_base_video_parse_src_query (GstPad * pad, GstQuery * query)
{
  GstBaseVideoParse *base_parse;
  gboolean res = FALSE;

  base_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:
    {
      GstFormat format;
      gint64 time;
      gint64 value;

      gst_query_parse_position (query, &format, NULL);

      time = gst_util_uint64_scale (base_parse->presentation_frame_number,
          base_parse->state.fps_n, base_parse->state.fps_d);
      time += base_parse->segment.time;
      GST_DEBUG ("query position %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
      res = gst_base_video_encoded_video_convert (&base_parse->state,
          GST_FORMAT_TIME, time, &format, &value);
      if (!res)
        goto error;

      gst_query_set_position (query, format, value);
      break;
    }
    case GST_QUERY_DURATION:
      res =
          gst_pad_query (GST_PAD_PEER (GST_BASE_VIDEO_CODEC_SINK_PAD
              (base_parse)), query);
      if (!res)
        goto error;
      break;
    case GST_QUERY_CONVERT:
    {
      GstFormat src_fmt, dest_fmt;
      gint64 src_val, dest_val;

      GST_WARNING ("query convert");

      gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
      res = gst_base_video_encoded_video_convert (&base_parse->state,
          src_fmt, src_val, &dest_fmt, &dest_val);
      if (!res)
        goto error;
      gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
      break;
    }
    default:
      res = gst_pad_query_default (pad, query);
      break;
  }
done:
  gst_object_unref (base_parse);

  return res;
error:
  GST_DEBUG_OBJECT (base_parse, "query failed");
  goto done;
}
Exemplo n.º 9
0
static gboolean
gst_base_video_parse_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoParse *base_video_parse;
  gboolean res = FALSE;

  base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      GstFormat format, tformat;
      gdouble rate;
      GstEvent *real_seek;
      GstSeekFlags flags;
      GstSeekType cur_type, stop_type;
      gint64 cur, stop;
      gint64 tcur, tstop;

      gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
          &cur, &stop_type, &stop);
      gst_event_unref (event);

      tformat = GST_FORMAT_TIME;
      res = gst_base_video_encoded_video_convert (&base_video_parse->state,
          format, cur, &tformat, &tcur);
      if (!res)
        goto convert_error;
      res = gst_base_video_encoded_video_convert (&base_video_parse->state,
          format, stop, &tformat, &tstop);
      if (!res)
        goto convert_error;

      real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
          flags, cur_type, tcur, stop_type, tstop);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse),
          real_seek);

      break;
    }
#if 0
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_parse);
      base_video_parse->proportion = proportion;
      base_video_parse->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (base_video_parse);

      GST_DEBUG_OBJECT (base_video_parse,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
          GST_TIME_ARGS (timestamp), diff);

      res = gst_pad_push_event (base_video_parse->sinkpad, event);
      break;
    }
#endif
    default:
      res =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse),
          event);
      break;
  }
done:
  gst_object_unref (base_video_parse);
  return res;

convert_error:
  GST_DEBUG_OBJECT (base_video_parse, "could not convert format");
  goto done;
}
Exemplo n.º 10
0
static gboolean
gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:

      /* FIXME: do seek using bitrate incase upstream doesn't handle it */
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);

      break;

    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      GstClockTime duration;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_decoder);
      base_video_decoder->proportion = proportion;
      if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
        if (G_UNLIKELY (diff > 0)) {
          if (base_video_decoder->state.fps_n > 0)
            duration =
                gst_util_uint64_scale (GST_SECOND,
                base_video_decoder->state.fps_d,
                base_video_decoder->state.fps_n);
          else
            duration = 0;
          base_video_decoder->earliest_time = timestamp + 2 * diff + duration;
        } else {
          base_video_decoder->earliest_time = timestamp + diff;
        }
      } else {
        base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
      }
      GST_OBJECT_UNLOCK (base_video_decoder);

      GST_DEBUG_OBJECT (base_video_decoder,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
          GST_TIME_ARGS (timestamp), diff, proportion);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
    }

    default:
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
  }

  gst_object_unref (base_video_decoder);
  return res;
}
Exemplo n.º 11
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_flush (base_video_decoder);
  }

  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }

  if (!base_video_decoder->current_frame)
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);

  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    ret = gst_base_video_decoder_drain (base_video_decoder, FALSE);
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
Exemplo n.º 12
0
static gboolean
gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
      if (!base_video_decoder->packetized)
        gst_base_video_decoder_drain (base_video_decoder, TRUE);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;

    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      double rate;
      double applied_rate;
      GstFormat format;
      gint64 start;
      gint64 stop;
      gint64 position;
      GstSegment *segment = &base_video_decoder->segment;

      gst_event_parse_new_segment_full (event, &update, &rate,
          &applied_rate, &format, &start, &stop, &position);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (!update) {
        gst_base_video_decoder_flush (base_video_decoder);
      }

      base_video_decoder->timestamp_offset = start;

      gst_segment_set_newsegment_full (segment,
          update, rate, applied_rate, format, start, stop, position);
      base_video_decoder->have_segment = TRUE;

      GST_WARNING ("new segment: format %d rate %g start %" GST_TIME_FORMAT
          " stop %" GST_TIME_FORMAT
          " position %" GST_TIME_FORMAT
          " update %d",
          format, rate,
          GST_TIME_ARGS (segment->start),
          GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;
    }

    case GST_EVENT_FLUSH_STOP:
      gst_base_video_decoder_flush (base_video_decoder);
      gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;

    default:
      res = gst_pad_event_default (pad, event);
      break;
  }

done:
  gst_object_unref (base_video_decoder);
  return res;

newseg_wrong_format:
  GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment");
  gst_event_unref (event);
  goto done;
}
static gboolean
theora_parse_src_convert (GstPad * pad,
    GstFormat src_format, gint64 src_value,
    GstFormat * dest_format, gint64 * dest_value)
{
  gboolean res = TRUE;
  GstTheoraParse *parse;
  guint64 scale = 1;

  if (src_format == *dest_format) {
    *dest_value = src_value;
    return TRUE;
  }

  parse = GST_THEORA_PARSE (gst_pad_get_parent (pad));

  /* we need the info part before we can done something */
  if (!parse->streamheader_received)
    goto no_header;

  switch (src_format) {
    case GST_FORMAT_BYTES:
      switch (*dest_format) {
        case GST_FORMAT_DEFAULT:
          *dest_value = gst_util_uint64_scale_int (src_value, 2,
              parse->info.pic_height * parse->info.pic_width * 3);
          break;
        case GST_FORMAT_TIME:
          /* seems like a rather silly conversion, implement me if you like */
        default:
          res = FALSE;
      }
      break;
    case GST_FORMAT_TIME:
      switch (*dest_format) {
        case GST_FORMAT_BYTES:
          scale = 3 * (parse->info.pic_width * parse->info.pic_height) / 2;
        case GST_FORMAT_DEFAULT:
          *dest_value = scale * gst_util_uint64_scale (src_value,
              parse->info.fps_numerator,
              parse->info.fps_denominator * GST_SECOND);
          break;
        default:
          GST_DEBUG_OBJECT (parse, "cannot convert to format %s",
              gst_format_get_name (*dest_format));
          res = FALSE;
      }
      break;
    case GST_FORMAT_DEFAULT:
      switch (*dest_format) {
        case GST_FORMAT_TIME:
          *dest_value = gst_util_uint64_scale (src_value,
              GST_SECOND * parse->info.fps_denominator,
              parse->info.fps_numerator);
          break;
        case GST_FORMAT_BYTES:
          *dest_value = gst_util_uint64_scale_int (src_value,
              3 * parse->info.pic_width * parse->info.pic_height, 2);
          break;
        default:
          res = FALSE;
      }
      break;
    default:
      res = FALSE;
  }
done:
  gst_object_unref (parse);
  return res;

  /* ERRORS */
no_header:
  {
    GST_DEBUG_OBJECT (parse, "no header yet, cannot convert");
    res = FALSE;
    goto done;
  }
}
Exemplo n.º 14
0
static GstFlowReturn
gst_amrwbenc_chain (GstPad * pad, GstBuffer * buffer)
{
  GstAmrwbEnc *amrwbenc;
  GstFlowReturn ret = GST_FLOW_OK;
  const int buffer_size = sizeof (Word16) * L_FRAME16k;

  amrwbenc = GST_AMRWBENC (gst_pad_get_parent (pad));

  g_return_val_if_fail (amrwbenc->handle, GST_FLOW_WRONG_STATE);

  if (amrwbenc->rate == 0 || amrwbenc->channels == 0) {
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

  /* discontinuity clears adapter, FIXME, maybe we can set some
   * encoder flag to mask the discont. */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (amrwbenc->adapter);
    amrwbenc->ts = 0;
    amrwbenc->discont = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    amrwbenc->ts = GST_BUFFER_TIMESTAMP (buffer);

  ret = GST_FLOW_OK;
  gst_adapter_push (amrwbenc->adapter, buffer);

  /* Collect samples until we have enough for an output frame */
  while (gst_adapter_available (amrwbenc->adapter) >= buffer_size) {
    GstBuffer *out;
    guint8 *data;
    gint outsize;

    out = gst_buffer_new_and_alloc (buffer_size);
    GST_BUFFER_DURATION (out) = GST_SECOND * L_FRAME16k /
        (amrwbenc->rate * amrwbenc->channels);
    GST_BUFFER_TIMESTAMP (out) = amrwbenc->ts;
    if (amrwbenc->ts != -1) {
      amrwbenc->ts += GST_BUFFER_DURATION (out);
    }
    if (amrwbenc->discont) {
      GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT);
      amrwbenc->discont = FALSE;
    }
    gst_buffer_set_caps (out, gst_pad_get_caps (amrwbenc->srcpad));

    data = (guint8 *) gst_adapter_peek (amrwbenc->adapter, buffer_size);

    /* encode */
    outsize =
        E_IF_encode (amrwbenc->handle, amrwbenc->bandmode, (Word16 *) data,
        (UWord8 *) GST_BUFFER_DATA (out), 0);

    gst_adapter_flush (amrwbenc->adapter, buffer_size);
    GST_BUFFER_SIZE (out) = outsize;

    /* play */
    if ((ret = gst_pad_push (amrwbenc->srcpad, out)) != GST_FLOW_OK)
      break;
  }

done:

  gst_object_unref (amrwbenc);
  return ret;

}
Exemplo n.º 15
0
static gboolean
gst_schro_dec_sink_event (GstPad *pad, GstEvent *event)
{
  GstSchroDec *dec;
  gboolean ret = FALSE;

  dec = GST_SCHRO_DEC (gst_pad_get_parent(pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      //GST_ERROR("unhandled flush start");
      ret = gst_pad_push_event (dec->srcpad, event);
      break;
    case GST_EVENT_FLUSH_STOP:
      //GST_ERROR("unhandled flush stop");
      gst_schro_dec_reset (dec);
      ret = gst_pad_push_event (dec->srcpad, event);
      break;
    case GST_EVENT_EOS:
      ret = gst_schro_dec_process_buffer (dec, NULL);
      ret = gst_pad_push_event (dec->srcpad, event);
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      GstFormat format;
      gdouble rate;
      gint64 start, stop, time;

      gst_event_parse_new_segment (event, &update, &rate, &format, &start,
          &stop, &time);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (rate <= 0.0)
        goto newseg_wrong_rate;

      GST_DEBUG("newsegment %lld %lld", start, time);
      gst_segment_set_newsegment (&dec->segment, update, rate, format,
          start, stop, time);

      ret = gst_pad_push_event (dec->srcpad, event);
      break;
    }
    default:
      ret = gst_pad_push_event (dec->srcpad, event);
      break;
  }
done:
  gst_object_unref (dec);
  return ret;

newseg_wrong_format:
  GST_DEBUG_OBJECT (dec, "received non TIME newsegment");
  gst_event_unref (event);
  goto done;

newseg_wrong_rate:
  GST_DEBUG_OBJECT (dec, "negative rates not supported");
  gst_event_unref (event);
  goto done;
}
Exemplo n.º 16
0
static gboolean
gst_base_video_parse_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoParse *base_video_parse;
  gboolean ret = FALSE;

  base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    case GST_EVENT_FLUSH_STOP:
      gst_base_video_parse_reset (base_video_parse);
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    case GST_EVENT_EOS:
      if (gst_base_video_parse_push_all (base_video_parse,
              FALSE) == GST_FLOW_ERROR) {
        gst_event_unref (event);
        return FALSE;
      }

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      GstFormat format;
      gdouble rate;
      gint64 start, stop, time;

      gst_event_parse_new_segment (event, &update, &rate, &format, &start,
          &stop, &time);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (rate <= 0.0)
        goto newseg_wrong_rate;

      GST_DEBUG ("newsegment %" GST_TIME_FORMAT " %" GST_TIME_FORMAT,
          GST_TIME_ARGS (start), GST_TIME_ARGS (time));
      gst_segment_set_newsegment (&base_video_parse->segment, update,
          rate, format, start, stop, time);

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    }
    default:
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
  }
done:
  gst_object_unref (base_video_parse);
  return ret;

newseg_wrong_format:
  GST_DEBUG_OBJECT (base_video_parse, "received non TIME newsegment");
  gst_event_unref (event);
  goto done;

newseg_wrong_rate:
  GST_DEBUG_OBJECT (base_video_parse, "negative rates not supported");
  gst_event_unref (event);
  goto done;
}
Exemplo n.º 17
0
static gboolean
gst_wavenc_sink_setcaps (GstPad * pad, GstCaps * caps)
{
    GstWavEnc *wavenc;
    GstStructure *structure;
    const gchar *name;
    gint chans, rate, width;

    wavenc = GST_WAVENC (gst_pad_get_parent (pad));

    if (wavenc->sent_header) {
        GST_WARNING_OBJECT (wavenc, "cannot change format in middle of stream");
        goto fail;
    }

    GST_DEBUG_OBJECT (wavenc, "got caps: %" GST_PTR_FORMAT, caps);

    structure = gst_caps_get_structure (caps, 0);
    name = gst_structure_get_name (structure);

    if (!gst_structure_get_int (structure, "channels", &chans) ||
            !gst_structure_get_int (structure, "rate", &rate)) {
        GST_WARNING_OBJECT (wavenc, "caps incomplete");
        goto fail;
    }

    if (strcmp (name, "audio/x-raw-int") == 0) {
        if (!gst_structure_get_int (structure, "width", &width)) {
            GST_WARNING_OBJECT (wavenc, "caps incomplete");
            goto fail;
        }
        wavenc->format = GST_RIFF_WAVE_FORMAT_PCM;
        wavenc->width = width;
    } else if (strcmp (name, "audio/x-raw-float") == 0) {
        if (!gst_structure_get_int (structure, "width", &width)) {
            GST_WARNING_OBJECT (wavenc, "caps incomplete");
            goto fail;
        }
        wavenc->format = GST_RIFF_WAVE_FORMAT_IEEE_FLOAT;
        wavenc->width = width;
    } else if (strcmp (name, "audio/x-alaw") == 0) {
        wavenc->format = GST_RIFF_WAVE_FORMAT_ALAW;
        wavenc->width = 8;
    } else if (strcmp (name, "audio/x-mulaw") == 0) {
        wavenc->format = GST_RIFF_WAVE_FORMAT_MULAW;
        wavenc->width = 8;
    } else {
        GST_WARNING_OBJECT (wavenc, "Unsupported format %s", name);
        goto fail;
    }

    wavenc->channels = chans;
    wavenc->rate = rate;

    GST_LOG_OBJECT (wavenc,
                    "accepted caps: format=0x%04x chans=%u width=%u rate=%u",
                    wavenc->format, wavenc->channels, wavenc->width, wavenc->rate);

    gst_object_unref (wavenc);
    return TRUE;

fail:
    gst_object_unref (wavenc);
    return FALSE;
}
Exemplo n.º 18
0
static gboolean
gst_rtp_jitter_buffer_sink_event (GstPad * pad, GstEvent * event)
{
  gboolean ret = TRUE;
  GstRtpJitterBuffer *jitterbuffer;
  GstRtpJitterBufferPrivate *priv;

  jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
  priv = jitterbuffer->priv;

  GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_NEWSEGMENT:
    {
      GstFormat format;
      gdouble rate, arate;
      gint64 start, stop, time;
      gboolean update;

      gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
          &start, &stop, &time);

      /* we need time for now */
      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      GST_DEBUG_OBJECT (jitterbuffer,
          "newsegment: update %d, rate %g, arate %g, start %" GST_TIME_FORMAT
          ", stop %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT,
          update, rate, arate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
          GST_TIME_ARGS (time));

      /* now configure the values, we need these to time the release of the
       * buffers on the srcpad. */
      gst_segment_set_newsegment_full (&priv->segment, update,
          rate, arate, format, start, stop, time);

      /* FIXME, push SEGMENT in the queue. Sorting order might be difficult. */
      ret = gst_pad_push_event (priv->srcpad, event);
      break;
    }
    case GST_EVENT_FLUSH_START:
      gst_rtp_jitter_buffer_flush_start (jitterbuffer);
      ret = gst_pad_push_event (priv->srcpad, event);
      break;
    case GST_EVENT_FLUSH_STOP:
      ret = gst_pad_push_event (priv->srcpad, event);
      ret = gst_rtp_jitter_buffer_src_activate_push (priv->srcpad, TRUE);
      break;
    case GST_EVENT_EOS:
    {
      /* push EOS in queue. We always push it at the head */
      JBUF_LOCK (priv);
      /* check for flushing, we need to discard the event and return FALSE when
       * we are flushing */
      ret = priv->srcresult == GST_FLOW_OK;
      if (ret && !priv->eos) {
        GST_DEBUG_OBJECT (jitterbuffer, "queuing EOS");
        priv->eos = TRUE;
        JBUF_SIGNAL (priv);
      } else if (priv->eos) {
        GST_DEBUG_OBJECT (jitterbuffer, "dropping EOS, we are already EOS");
      } else {
        GST_DEBUG_OBJECT (jitterbuffer, "dropping EOS, reason %s",
            gst_flow_get_name (priv->srcresult));
      }
      JBUF_UNLOCK (priv);
      gst_event_unref (event);
      break;
    }
    default:
      ret = gst_pad_push_event (priv->srcpad, event);
      break;
  }

done:
  gst_object_unref (jitterbuffer);

  return ret;

  /* ERRORS */
newseg_wrong_format:
  {
    GST_DEBUG_OBJECT (jitterbuffer, "received non TIME newsegment");
    ret = FALSE;
    goto done;
  }
}
Exemplo n.º 19
0
static GstFlowReturn
gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (gst_pad_get_parent (pad));
  gboolean completed = FALSE;
  const guint8 *data;
  guint size;
  gboolean ret = GST_FLOW_OK;

  if (rsvg->timestamp_offset == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
      rsvg->timestamp_offset = GST_BUFFER_TIMESTAMP (buffer);
    else
      rsvg->timestamp_offset = 0;
  }

  gst_adapter_push (rsvg->adapter, buffer);

  size = gst_adapter_available (rsvg->adapter);

  /* "<svg></svg>" */
  while (size >= 5 + 6 && ret == GST_FLOW_OK) {
    guint i;

    data = gst_adapter_peek (rsvg->adapter, size);
    for (i = size - 6; i >= 5; i--) {
      if (memcmp (data + i, "</svg>", 6) == 0) {
        completed = TRUE;
        size = i + 6;
        break;
      }
    }

    if (completed) {
      GstBuffer *outbuf = NULL;

      data = gst_adapter_peek (rsvg->adapter, size);

      ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf);
      if (ret != GST_FLOW_OK)
        break;


      if (rsvg->fps_n != 0) {
        GST_BUFFER_TIMESTAMP (outbuf) =
            rsvg->timestamp_offset + gst_util_uint64_scale (rsvg->frame_count,
            rsvg->fps_d, rsvg->fps_n * GST_SECOND);
        GST_BUFFER_DURATION (outbuf) =
            gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
            rsvg->fps_n * GST_SECOND);
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = 0;
      }
      rsvg->frame_count++;

      if (rsvg->need_newsegment) {
        gst_pad_push_event (rsvg->srcpad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
        rsvg->need_newsegment = FALSE;
      }

      if (rsvg->pending_events) {
        GList *l;

        for (l = rsvg->pending_events; l; l = l->next)
          gst_pad_push_event (rsvg->srcpad, l->data);
        g_list_free (rsvg->pending_events);
        rsvg->pending_events = NULL;
      }

      if (rsvg->pending_tags) {
        gst_element_found_tags (GST_ELEMENT_CAST (rsvg), rsvg->pending_tags);
        rsvg->pending_tags = NULL;
      }

      ret = gst_pad_push (rsvg->srcpad, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      gst_adapter_flush (rsvg->adapter, size);
      size = gst_adapter_available (rsvg->adapter);
      continue;
    } else {
      break;
    }
  }

  gst_object_unref (rsvg);

  return GST_FLOW_OK;
}
Exemplo n.º 20
0
static GstFlowReturn
gst_rtp_jitter_buffer_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRtpJitterBuffer *jitterbuffer;
  GstRtpJitterBufferPrivate *priv;
  guint16 seqnum;
  GstFlowReturn ret = GST_FLOW_OK;
  GstClockTime timestamp;
  guint64 latency_ts;
  gboolean tail;

  jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));

  if (!gst_rtp_buffer_validate (buffer))
    goto invalid_buffer;

  priv = jitterbuffer->priv;

  if (priv->last_pt != gst_rtp_buffer_get_payload_type (buffer)) {
    GstCaps *caps;

    priv->last_pt = gst_rtp_buffer_get_payload_type (buffer);
    /* reset clock-rate so that we get a new one */
    priv->clock_rate = -1;
    /* Try to get the clock-rate from the caps first if we can. If there are no
     * caps we must fire the signal to get the clock-rate. */
    if ((caps = GST_BUFFER_CAPS (buffer))) {
      gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
    }
  }

  if (priv->clock_rate == -1) {
    guint8 pt;

    /* no clock rate given on the caps, try to get one with the signal */
    pt = gst_rtp_buffer_get_payload_type (buffer);

    gst_rtp_jitter_buffer_get_clock_rate (jitterbuffer, pt);
    if (priv->clock_rate == -1)
      goto not_negotiated;
  }

  /* take the timestamp of the buffer. This is the time when the packet was
   * received and is used to calculate jitter and clock skew. We will adjust
   * this timestamp with the smoothed value after processing it in the
   * jitterbuffer. */
  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  /* bring to running time */
  timestamp = gst_segment_to_running_time (&priv->segment, GST_FORMAT_TIME,
      timestamp);

  seqnum = gst_rtp_buffer_get_seq (buffer);
  GST_DEBUG_OBJECT (jitterbuffer,
      "Received packet #%d at time %" GST_TIME_FORMAT, seqnum,
      GST_TIME_ARGS (timestamp));

  JBUF_LOCK_CHECK (priv, out_flushing);
  /* don't accept more data on EOS */
  if (priv->eos)
    goto have_eos;

  /* let's check if this buffer is too late, we can only accept packets with
   * bigger seqnum than the one we last pushed. */
  if (priv->last_popped_seqnum != -1) {
    gint gap;

    gap = gst_rtp_buffer_compare_seqnum (priv->last_popped_seqnum, seqnum);

    if (gap <= 0) {
      /* priv->last_popped_seqnum >= seqnum, this packet is too late or the
       * sender might have been restarted with different seqnum. */
      if (gap < -100) {
        GST_DEBUG_OBJECT (jitterbuffer, "reset: buffer too old %d", gap);
        priv->last_popped_seqnum = -1;
        priv->next_seqnum = -1;
      } else {
        goto too_late;
      }
    } else {
      /* priv->last_popped_seqnum < seqnum, this is a new packet */
      if (gap > 3000) {
        GST_DEBUG_OBJECT (jitterbuffer, "reset: too many dropped packets %d",
            gap);
        priv->last_popped_seqnum = -1;
        priv->next_seqnum = -1;
      }
    }
  }

  /* let's drop oldest packet if the queue is already full and drop-on-latency
   * is set. We can only do this when there actually is a latency. When no
   * latency is set, we just pump it in the queue and let the other end push it
   * out as fast as possible. */
  if (priv->latency_ms && priv->drop_on_latency) {

    latency_ts =
        gst_util_uint64_scale_int (priv->latency_ms, priv->clock_rate, 1000);

    if (rtp_jitter_buffer_get_ts_diff (priv->jbuf) >= latency_ts) {
      GstBuffer *old_buf;

      GST_DEBUG_OBJECT (jitterbuffer, "Queue full, dropping old packet #%d",
          seqnum);

      old_buf = rtp_jitter_buffer_pop (priv->jbuf);
      gst_buffer_unref (old_buf);
    }
  }

  /* now insert the packet into the queue in sorted order. This function returns
   * FALSE if a packet with the same seqnum was already in the queue, meaning we
   * have a duplicate. */
  if (!rtp_jitter_buffer_insert (priv->jbuf, buffer, timestamp,
          priv->clock_rate, &tail))
    goto duplicate;

  /* signal addition of new buffer when the _loop is waiting. */
  if (priv->waiting)
    JBUF_SIGNAL (priv);

  /* let's unschedule and unblock any waiting buffers. We only want to do this
   * when the tail buffer changed */
  if (priv->clock_id && tail) {
    GST_DEBUG_OBJECT (jitterbuffer,
        "Unscheduling waiting buffer, new tail buffer");
    gst_clock_id_unschedule (priv->clock_id);
  }

  GST_DEBUG_OBJECT (jitterbuffer, "Pushed packet #%d, now %d packets",
      seqnum, rtp_jitter_buffer_num_packets (priv->jbuf));

finished:
  JBUF_UNLOCK (priv);

  gst_object_unref (jitterbuffer);

  return ret;

  /* ERRORS */
invalid_buffer:
  {
    /* this is not fatal but should be filtered earlier */
    GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
        ("Received invalid RTP payload, dropping"));
    gst_buffer_unref (buffer);
    gst_object_unref (jitterbuffer);
    return GST_FLOW_OK;
  }
not_negotiated:
  {
    GST_WARNING_OBJECT (jitterbuffer, "No clock-rate in caps!");
    gst_buffer_unref (buffer);
    gst_object_unref (jitterbuffer);
    return GST_FLOW_OK;
  }
out_flushing:
  {
    ret = priv->srcresult;
    GST_DEBUG_OBJECT (jitterbuffer, "flushing %s", gst_flow_get_name (ret));
    gst_buffer_unref (buffer);
    goto finished;
  }
have_eos:
  {
    ret = GST_FLOW_UNEXPECTED;
    GST_WARNING_OBJECT (jitterbuffer, "we are EOS, refusing buffer");
    gst_buffer_unref (buffer);
    goto finished;
  }
too_late:
  {
    GST_WARNING_OBJECT (jitterbuffer, "Packet #%d too late as #%d was already"
        " popped, dropping", seqnum, priv->last_popped_seqnum);
    priv->num_late++;
    gst_buffer_unref (buffer);
    goto finished;
  }
duplicate:
  {
    GST_WARNING_OBJECT (jitterbuffer, "Duplicate packet #%d detected, dropping",
        seqnum);
    priv->num_duplicates++;
    gst_buffer_unref (buffer);
    goto finished;
  }
}
Exemplo n.º 21
0
static GstFlowReturn
gst_goo_encpcm_chain (GstPad* pad, GstBuffer* buffer)
{

	GstGooEncPcm* self = GST_GOO_ENCPCM (gst_pad_get_parent (pad));
	guint omxbufsiz = GOO_PORT_GET_DEFINITION (self->inport)->nBufferSize;
	GstGooEncPcmPrivate* priv = GST_GOO_ENCPCM_GET_PRIVATE (self);
	GstFlowReturn ret = GST_FLOW_OK;
	OMX_BUFFERHEADERTYPE* omx_buffer = NULL;
  GstBuffer* gst_buffer = NULL;

	GST_DEBUG_OBJECT (self, "");

	if (self->rate == 0 || self->channels == 0 ||
	    self->component->cur_state != OMX_StateExecuting)
	{
		goto not_negotiated;
	}


	/* take latest timestamp, FIXME timestamp is the one of the
	 * first buffer in the adapter. */
	if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
	{
		self->ts = GST_BUFFER_TIMESTAMP (buffer);
	}


	if (goo_port_is_tunneled (self->inport))
	{
		GST_DEBUG_OBJECT (self, "Input Port is tunneled (DM)");

		GST_DEBUG_OBJECT (self, "Take the oxm buffer, process it and push it");
		ret = process_output_buffer(self,
																goo_port_grab_buffer (self->outport));
	}
	else
	{
		/* discontinuity clears adapter, FIXME, maybe we can set some
		 * encoder flag to mask the discont. */
		if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
		{
			gst_goo_adapter_clear (self->adapter);
			self->ts = 0;
		}




		GST_DEBUG_OBJECT (self, "");
		ret = GST_FLOW_OK;
		GST_DEBUG_OBJECT (self, "Pushing a GST buffer to adapter (%d)",
					GST_BUFFER_SIZE (buffer));
		gst_goo_adapter_push (self->adapter, buffer);

		/* Collect samples until we have enough for an output frame */
		while (gst_goo_adapter_available (self->adapter) >= omxbufsiz)
		{
			GST_DEBUG_OBJECT (self, "Popping an OMX buffer");
			OMX_BUFFERHEADERTYPE* omxbuf;
			omxbuf = goo_port_grab_buffer (self->inport);
			gst_goo_adapter_peek (self->adapter, omxbufsiz, omxbuf);
			omxbuf->nFilledLen = omxbufsiz;
			gst_goo_adapter_flush (self->adapter, omxbufsiz);
			goo_component_release_buffer (self->component, omxbuf);
		}
	}

	goto done;


	/* ERRORS */
not_negotiated:
	{
		GST_ELEMENT_ERROR (self, STREAM, TYPE_NOT_FOUND,
				   (NULL), ("unknown type"));
		return GST_FLOW_NOT_NEGOTIATED;
	}

done:
  GST_DEBUG_OBJECT (self, "");
	gst_object_unref (self);
	gst_buffer_unref (buffer);
	return ret;
}
Exemplo n.º 22
0
static GstCaps *
gst_shape_wipe_mask_sink_getcaps (GstPad * pad)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
  GstCaps *ret, *tmp;
  guint i, n;

  if (GST_PAD_CAPS (pad))
    return gst_caps_copy (GST_PAD_CAPS (pad));

  tmp = gst_pad_peer_get_caps (self->video_sinkpad);
  if (tmp) {
    ret =
        gst_caps_intersect (tmp,
        gst_pad_get_pad_template_caps (self->video_sinkpad));
    gst_caps_unref (tmp);
  } else {
    ret = gst_caps_copy (gst_pad_get_pad_template_caps (self->video_sinkpad));
  }

  tmp = gst_pad_peer_get_caps (self->srcpad);
  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (ret, tmp);
    gst_caps_unref (ret);
    gst_caps_unref (tmp);
    ret = intersection;
  }

  n = gst_caps_get_size (ret);
  tmp = gst_caps_new_empty ();
  for (i = 0; i < n; i++) {
    GstStructure *s = gst_caps_get_structure (ret, i);
    GstStructure *t;

    gst_structure_set_name (s, "video/x-raw-gray");
    gst_structure_remove_fields (s, "format", "framerate", "bpp", "depth",
        "endianness", "framerate", "red_mask", "green_mask", "blue_mask",
        "alpha_mask", NULL);

    if (self->width && self->height)
      gst_structure_set (s, "width", G_TYPE_INT, self->width, "height",
          G_TYPE_INT, self->height, NULL);

    gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);

    t = gst_structure_copy (s);

    gst_structure_set (s, "bpp", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16,
        "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL);
    gst_structure_set (t, "bpp", G_TYPE_INT, 8, "depth", G_TYPE_INT, 8, NULL);

    gst_caps_append_structure (tmp, t);
  }
  gst_caps_append (ret, tmp);

  tmp = gst_pad_peer_get_caps (pad);
  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

  gst_object_unref (self);

  GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
Exemplo n.º 23
0
static gboolean
gst_deinterlace2_src_query (GstPad * pad, GstQuery * query)
{
  GstDeinterlace2 *self = GST_DEINTERLACE2 (gst_pad_get_parent (pad));
  gboolean res = FALSE;

  GST_LOG_OBJECT (self, "%s query", GST_QUERY_TYPE_NAME (query));

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:
    {
      GstClockTime min, max;
      gboolean live;
      GstPad *peer;

      if ((peer = gst_pad_get_peer (self->sinkpad))) {
        if ((res = gst_pad_query (peer, query))) {
          GstClockTime latency;
          gint fields_required = 0;
          gint method_latency = 0;

          if (self->method) {
            fields_required =
                gst_deinterlace_method_get_fields_required (self->method);
            method_latency = gst_deinterlace_method_get_latency (self->method);
          }

          gst_query_parse_latency (query, &live, &min, &max);

          GST_DEBUG ("Peer latency: min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          /* add our own latency */
          latency = (fields_required + method_latency) * self->field_duration;

          GST_DEBUG ("Our latency: min %" GST_TIME_FORMAT
              ", max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (latency), GST_TIME_ARGS (latency));

          min += latency;
          if (max != GST_CLOCK_TIME_NONE)
            max += latency;
          else
            max = latency;

          GST_DEBUG ("Calculated total latency : min %"
              GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
              GST_TIME_ARGS (min), GST_TIME_ARGS (max));

          gst_query_set_latency (query, live, min, max);
        }
        gst_object_unref (peer);
      }
      break;
    }
    default:
      res = gst_pad_query_default (pad, query);
      break;
  }

  gst_object_unref (self);
  return res;
}
Exemplo n.º 24
0
static GstCaps *
gst_shape_wipe_src_getcaps (GstPad * pad)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
  GstCaps *ret, *tmp;

  if (GST_PAD_CAPS (pad))
    return gst_caps_copy (GST_PAD_CAPS (pad));
  else if (GST_PAD_CAPS (self->video_sinkpad))
    return gst_caps_copy (GST_PAD_CAPS (self->video_sinkpad));

  tmp = gst_pad_peer_get_caps (self->video_sinkpad);
  if (tmp) {
    ret =
        gst_caps_intersect (tmp,
        gst_pad_get_pad_template_caps (self->video_sinkpad));
    gst_caps_unref (tmp);
  } else {
    ret = gst_caps_copy (gst_pad_get_pad_template_caps (self->video_sinkpad));
  }

  tmp = gst_pad_peer_get_caps (pad);
  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

  if (self->height && self->width) {
    guint i, n;

    n = gst_caps_get_size (ret);
    for (i = 0; i < n; i++) {
      GstStructure *s = gst_caps_get_structure (ret, i);

      gst_structure_set (s, "width", G_TYPE_INT, self->width, "height",
          G_TYPE_INT, self->height, NULL);
    }
  }

  tmp = gst_pad_peer_get_caps (self->mask_sinkpad);
  if (tmp) {
    GstCaps *intersection, *tmp2;
    guint i, n;

    tmp = gst_caps_make_writable (tmp);
    tmp2 = gst_caps_copy (gst_pad_get_pad_template_caps (self->mask_sinkpad));

    intersection = gst_caps_intersect (tmp, tmp2);
    gst_caps_unref (tmp);
    gst_caps_unref (tmp2);

    tmp = intersection;
    n = gst_caps_get_size (tmp);

    tmp2 = gst_caps_new_empty ();
    for (i = 0; i < n; i++) {
      GstStructure *s = gst_caps_get_structure (tmp, i);
      GstStructure *c;

      gst_structure_remove_fields (s, "format", "bpp", "depth", "endianness",
          "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask",
          NULL);
      gst_structure_set_name (s, "video/x-raw-yuv");
      c = gst_structure_copy (s);

      gst_caps_append_structure (tmp2, c);
    }
    gst_caps_append (tmp, tmp2);

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

  gst_object_unref (self);

  GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
Exemplo n.º 25
0
static GstFlowReturn
gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf)
{
  GstMultipartDemux *multipart;
  GstAdapter *adapter;
  GstClockTime timestamp;
  gint size = 1;
  GstFlowReturn res;

  multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad));
  adapter = multipart->adapter;

  res = GST_FLOW_OK;

  timestamp = GST_BUFFER_TIMESTAMP (buf);

  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (adapter);
  }
  gst_adapter_push (adapter, buf);

  while (gst_adapter_available (adapter) > 0) {
    GstMultipartPad *srcpad;
    GstBuffer *outbuf;
    gboolean created;
    gint datalen;

    if (G_UNLIKELY (!multipart->header_completed)) {
      if ((size = multipart_parse_header (multipart)) < 0) {
        goto nodata;
      } else {
        gst_adapter_flush (adapter, size);
        multipart->header_completed = TRUE;
      }
    }
    if ((size = multipart_find_boundary (multipart, &datalen)) < 0) {
      goto nodata;
    }

    /* Invalidate header info */
    multipart->header_completed = FALSE;
    multipart->content_length = -1;

    if (G_UNLIKELY (datalen <= 0)) {
      GST_DEBUG_OBJECT (multipart, "skipping empty content.");
      gst_adapter_flush (adapter, size - datalen);
    } else {
      srcpad =
          gst_multipart_find_pad_by_mime (multipart,
          multipart->mime_type, &created);
      outbuf = gst_adapter_take_buffer (adapter, datalen);
      gst_adapter_flush (adapter, size - datalen);

      gst_buffer_set_caps (outbuf, GST_PAD_CAPS (srcpad->pad));
      if (created) {
        GstTagList *tags;

        /* Push new segment, first buffer has 0 timestamp */
        gst_pad_push_event (srcpad->pad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));

        tags =
            gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
        gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));

        GST_BUFFER_TIMESTAMP (outbuf) = 0;
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
      }
      GST_DEBUG_OBJECT (multipart,
          "pushing buffer with timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
      GST_DEBUG_OBJECT (multipart, "buffer has caps %" GST_PTR_FORMAT,
          GST_BUFFER_CAPS (outbuf));
      res = gst_pad_push (srcpad->pad, outbuf);
      res = gst_multipart_combine_flows (multipart, srcpad, res);
      if (res != GST_FLOW_OK)
        break;
    }
  }

nodata:
  gst_object_unref (multipart);

  if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
    return GST_FLOW_ERROR;
  if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
    return GST_FLOW_UNEXPECTED;

  return res;
}
static void
output_loop (gpointer data)
{
    GstPad *pad;
    GOmxCore *gomx;
    GOmxPort *out_port;
    GstOmxBaseFilter2 *self;
    GstFlowReturn ret = GST_FLOW_OK;
    GstOmxBaseFilter2Class *bclass;

    pad = data;
    self = GST_OMX_BASE_FILTER2 (gst_pad_get_parent (pad));
    gomx = self->gomx;

    bclass = GST_OMX_BASE_FILTER2_GET_CLASS (self);

    GST_LOG_OBJECT (self, "begin");

    if (!self->ready)
    {
        g_error ("not ready");
        return;
    }

    out_port = (GOmxPort *)gst_pad_get_element_private(pad);

    if (G_LIKELY (out_port->enabled))
    {
        gpointer obj = g_omx_port_recv (out_port);

        if (G_UNLIKELY (!obj))
        {
            GST_WARNING_OBJECT (self, "null buffer: leaving");
            ret = GST_FLOW_WRONG_STATE;
            goto leave;
        }

        if (G_LIKELY (GST_IS_BUFFER (obj)))
        {
            if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (obj, GST_BUFFER_FLAG_IN_CAPS)))
            {
                GstCaps *caps = NULL;
                GstStructure *structure;
                GValue value = { 0 };

                caps = gst_pad_get_negotiated_caps (pad);
                caps = gst_caps_make_writable (caps);
                structure = gst_caps_get_structure (caps, 0);

                g_value_init (&value, GST_TYPE_BUFFER);
                gst_value_set_buffer (&value, obj);
                gst_buffer_unref (obj);
                gst_structure_set_value (structure, "codec_data", &value);
                g_value_unset (&value);

                gst_pad_set_caps (pad, caps);
            }
            else
            {
                GstBuffer *buf = GST_BUFFER (obj);
                ret = bclass->push_buffer (self, buf);
                GST_DEBUG_OBJECT (self, "ret=%s", gst_flow_get_name (ret));
				// HACK!! Dont care if one of the output pads are not connected
                ret = GST_FLOW_OK;
            }
        }
        else if (GST_IS_EVENT (obj))
        {
            GST_DEBUG_OBJECT (self, "got eos");
            gst_pad_push_event (pad, obj);
            ret = GST_FLOW_UNEXPECTED;
            goto leave;
        }

    }

leave:

    self->last_pad_push_return = ret;

    if (gomx->omx_error != OMX_ErrorNone)
    {
        GST_DEBUG_OBJECT (self, "omx_error=%s", g_omx_error_to_str (gomx->omx_error));
        ret = GST_FLOW_ERROR;
    }

    if (ret != GST_FLOW_OK)
    {
        GST_INFO_OBJECT (self, "pause task, reason:  %s",
                         gst_flow_get_name (ret));
        gst_pad_pause_task (pad);
    }

    GST_LOG_OBJECT (self, "end");

    gst_object_unref (self);
}
Exemplo n.º 27
0
static gboolean
gst_interlace_setcaps (GstPad * pad, GstCaps * caps)
{
  GstInterlace *interlace;
  gboolean ret;
  int width, height;
  GstVideoFormat format;
  gboolean interlaced = TRUE;
  int fps_n, fps_d;
  GstPad *otherpad;
  GstCaps *othercaps;
  const PulldownFormat *pdformat;

  interlace = GST_INTERLACE (gst_pad_get_parent (pad));

  otherpad =
      (pad == interlace->srcpad) ? interlace->sinkpad : interlace->srcpad;

  ret = gst_video_format_parse_caps (caps, &format, &width, &height);
  gst_video_format_parse_caps_interlaced (caps, &interlaced);
  ret &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);

  if (!ret)
    goto error;

  othercaps = gst_caps_copy (caps);
  pdformat = &formats[interlace->pattern];

  if (pad == interlace->srcpad) {
    gst_caps_set_simple (othercaps, "interlaced", G_TYPE_BOOLEAN, FALSE, NULL);
    gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION,
        fps_n * pdformat->ratio_d, fps_d * pdformat->ratio_n, NULL);
  } else {
    gst_caps_set_simple (othercaps, "interlaced", G_TYPE_BOOLEAN, TRUE, NULL);
    gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION,
        fps_n * pdformat->ratio_n, fps_d * pdformat->ratio_d, NULL);
  }

  ret = gst_pad_set_caps (otherpad, othercaps);
  if (!ret)
    goto error;

  interlace->format = format;
  interlace->width = width;
  interlace->height = height;

  interlace->phase_index = interlace->pattern_offset;

  if (pad == interlace->sinkpad) {
    gst_caps_replace (&interlace->srccaps, othercaps);
    interlace->src_fps_n = fps_n * pdformat->ratio_n;
    interlace->src_fps_d = fps_d * pdformat->ratio_d;
  } else {
    gst_caps_replace (&interlace->srccaps, caps);
    interlace->src_fps_n = fps_n;
    interlace->src_fps_d = fps_d;
  }

error:
  g_object_unref (interlace);

  return ret;
}
Exemplo n.º 28
0
static gboolean
gst_schro_dec_src_event (GstPad *pad, GstEvent *event)
{
  GstSchroDec *dec;
  gboolean res = FALSE;

  dec = GST_SCHRO_DEC (gst_pad_get_parent(pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      GstFormat format, tformat;
      gdouble rate;
      GstEvent *real_seek;
      GstSeekFlags flags;
      GstSeekType cur_type, stop_type;
      gint64 cur, stop;
      gint64 tcur, tstop;

      gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
          &cur, &stop_type, &stop);
      gst_event_unref (event);

      tformat = GST_FORMAT_TIME;
      res = gst_schro_dec_src_convert (pad, format, cur, &tformat, &tcur);
      if (!res) goto convert_error;
      res = gst_schro_dec_src_convert (pad, format, stop, &tformat, &tstop);
      if (!res) goto convert_error;

      real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
          flags, cur_type, tcur, stop_type, tstop);

      res = gst_pad_push_event (dec->sinkpad, real_seek);

      break;
    }
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (dec);
      dec->proportion = proportion;
      dec->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (dec);

      GST_DEBUG_OBJECT (dec, "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
          GST_TIME_ARGS(timestamp), diff, proportion);

      res = gst_pad_push_event (dec->sinkpad, event);
      break;
    }
    default:
      res = gst_pad_push_event (dec->sinkpad, event);
      break;
  }
done:
  gst_object_unref (dec);
  return res;

convert_error:
  GST_DEBUG_OBJECT (dec, "could not convert format");
  goto done;
}
Exemplo n.º 29
0
static gboolean
gst_swfdec_src_query (GstPad * pad, GstQuery * query)
{
  gboolean res = TRUE;
  GstSwfdec *swfdec;

  swfdec = GST_SWFDEC (gst_pad_get_parent (pad));

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:
    {
      GstFormat format;
      gint64 value;

      gst_query_parse_position (query, &format, NULL);

      switch (format) {
        case GST_FORMAT_TIME:
          value = swfdec_render_get_frame_index (swfdec->decoder) *
              swfdec->interval;
          gst_query_set_position (query, GST_FORMAT_TIME, value);
          res = TRUE;
        default:
          res = FALSE;
          break;
      }
      break;
    }
    case GST_QUERY_DURATION:
    {
      GstFormat format;
      gint64 value;

      gst_query_parse_duration (query, &format, NULL);

      switch (format) {
        case GST_FORMAT_TIME:
        {
          int n_frames;
          int ret;

          res = FALSE;
          ret = swfdec_decoder_get_n_frames (swfdec->decoder, &n_frames);
          if (ret == SWF_OK) {
            value = n_frames * swfdec->interval;
            gst_query_set_duration (query, GST_FORMAT_TIME, value);
            res = TRUE;
          }
          break;
        }
        default:
          res = FALSE;
          break;
      }
      break;
    }
    default:
      res = FALSE;
      break;
  }

  gst_object_unref (swfdec);
  return res;
}
Exemplo n.º 30
0
static gboolean
gst_nsfdec_src_convert (GstPad * pad, GstFormat src_format, gint64 src_value,
    GstFormat * dest_format, gint64 * dest_value)
{
  gboolean res = TRUE;
  guint scale = 1;
  GstNsfDec *nsfdec;

  nsfdec = GST_NSFDEC (gst_pad_get_parent (pad));

  if (src_format == *dest_format) {
    *dest_value = src_value;
    return TRUE;
  }

  switch (src_format) {
    case GST_FORMAT_BYTES:
      switch (*dest_format) {
        case GST_FORMAT_DEFAULT:
          if (nsfdec->bps == 0)
            return FALSE;
          *dest_value = src_value / nsfdec->bps;
          break;
        case GST_FORMAT_TIME:
        {
          gint byterate = nsfdec->bps * nsfdec->frequency;

          if (byterate == 0)
            return FALSE;
          *dest_value =
              gst_util_uint64_scale_int (src_value, GST_SECOND, byterate);
          break;
        }
        default:
          res = FALSE;
      }
      break;
    case GST_FORMAT_DEFAULT:
      switch (*dest_format) {
        case GST_FORMAT_BYTES:
          *dest_value = src_value * nsfdec->bps;
          break;
        case GST_FORMAT_TIME:
          if (nsfdec->frequency == 0)
            return FALSE;
          *dest_value =
              gst_util_uint64_scale_int (src_value, GST_SECOND,
              nsfdec->frequency);
          break;
        default:
          res = FALSE;
      }
      break;
    case GST_FORMAT_TIME:
      switch (*dest_format) {
        case GST_FORMAT_BYTES:
          scale = nsfdec->bps;
          /* fallthrough */
        case GST_FORMAT_DEFAULT:
          *dest_value =
              gst_util_uint64_scale_int (src_value, scale * nsfdec->frequency,
              GST_SECOND);
          break;
        default:
          res = FALSE;
      }
      break;
    default:
      res = FALSE;
  }

  return res;
}