Example #1
0
static GstFlowReturn
gst_iir_equalizer_transform_ip (GstBaseTransform * btrans, GstBuffer * buf)
{
    GstAudioFilter *filter = GST_AUDIO_FILTER (btrans);
    GstIirEqualizer *equ = GST_IIR_EQUALIZER (btrans);
    GstClockTime timestamp;
    GstMapInfo map;
    gint channels = GST_AUDIO_FILTER_CHANNELS (filter);
    gboolean need_new_coefficients;

    if (G_UNLIKELY (channels < 1 || equ->process == NULL))
        return GST_FLOW_NOT_NEGOTIATED;

    BANDS_LOCK (equ);
    need_new_coefficients = equ->need_new_coefficients;
    BANDS_UNLOCK (equ);

    if (!need_new_coefficients && gst_base_transform_is_passthrough (btrans))
        return GST_FLOW_OK;

    timestamp = GST_BUFFER_TIMESTAMP (buf);
    timestamp =
        gst_segment_to_stream_time (&btrans->segment, GST_FORMAT_TIME, timestamp);

    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
        GstIirEqualizerBand **filters = equ->bands;
        guint f, nf = equ->freq_band_count;

        gst_object_sync_values (GST_OBJECT (equ), timestamp);

        /* sync values for bands too */
        /* FIXME: iterating equ->bands is not thread-safe here */
        for (f = 0; f < nf; f++) {
            gst_object_sync_values (GST_OBJECT (filters[f]), timestamp);
        }
    }

    BANDS_LOCK (equ);
    if (need_new_coefficients) {
        update_coefficients (equ);
        set_passthrough (equ);
    }
    BANDS_UNLOCK (equ);

    gst_buffer_map (buf, &map, GST_MAP_READWRITE);
    equ->process (equ, map.data, map.size, channels);
    gst_buffer_unmap (buf, &map);

    return GST_FLOW_OK;
}
static void
gst_gdk_pixbuf_overlay_before_transform (GstBaseTransform * trans,
    GstBuffer * outbuf)
{
  GstClockTime stream_time;
  GstGdkPixbufOverlay *overlay = GST_GDK_PIXBUF_OVERLAY (trans);
  gboolean set_passthrough = FALSE;

  stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (outbuf));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (trans), stream_time);

  /* now properties have been sync'ed; maybe need to update composition */
  GST_OBJECT_LOCK (overlay);
  if (G_UNLIKELY (overlay->update_composition)) {
    gst_gdk_pixbuf_overlay_update_composition (overlay);
    overlay->update_composition = FALSE;
    set_passthrough = TRUE;
  }
  GST_OBJECT_UNLOCK (overlay);

  /* determine passthrough mode so the buffer is writable if needed
   * when passed into _transform_ip */
  if (G_UNLIKELY (set_passthrough))
    gst_base_transform_set_passthrough (trans, overlay->comp == NULL);
}
Example #3
0
/* GstBaseTransform vmethod implementations */
static GstFlowReturn
gst_audio_amplify_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
  GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
  guint num_samples;
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (G_OBJECT (filter), stream_time);

  num_samples =
      GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);

  if (gst_base_transform_is_passthrough (base) ||
      G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
    return GST_FLOW_OK;

  filter->process (filter, GST_BUFFER_DATA (buf), num_samples);

  return GST_FLOW_OK;
}
/* GstBaseTransform vmethod implementations */
static GstFlowReturn
gst_audio_invert_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
  GstAudioInvert *filter = GST_AUDIO_INVERT (base);
  guint num_samples;
  GstClockTime timestamp, stream_time;
  GstMapInfo map;

  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (filter), stream_time);

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
    return GST_FLOW_OK;

  gst_buffer_map (buf, &map, GST_MAP_READWRITE);
  num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);

  filter->process (filter, map.data, num_samples);

  gst_buffer_unmap (buf, &map);

  return GST_FLOW_OK;
}
static GstFlowReturn
gst_iir_equalizer_transform_ip (GstBaseTransform * btrans, GstBuffer * buf)
{
  GstAudioFilter *filter = GST_AUDIO_FILTER (btrans);

  GstIirEqualizer *equ = GST_IIR_EQUALIZER (btrans);

  GstClockTime timestamp;

  if (G_UNLIKELY (filter->format.channels < 1 || equ->process == NULL))
    return GST_FLOW_NOT_NEGOTIATED;

  if (equ->need_new_coefficients) {
    update_coefficients (equ);
    set_passthrough (equ);
  }

  if (gst_base_transform_is_passthrough (btrans))
    return GST_FLOW_OK;

  timestamp = GST_BUFFER_TIMESTAMP (buf);
  timestamp =
      gst_segment_to_stream_time (&btrans->segment, GST_FORMAT_TIME, timestamp);

  if (GST_CLOCK_TIME_IS_VALID (timestamp))
    gst_object_sync_values (G_OBJECT (equ), timestamp);

  equ->process (equ, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
      filter->format.channels);

  return GST_FLOW_OK;
}
Example #6
0
static GstFlowReturn
gst_ladspa_sink_type_render (GstBaseSink * base, GstBuffer * buf)
{
  GstLADSPASink *ladspa = GST_LADSPA_SINK (base);
  GstMapInfo info;

  if (ladspa->num_buffers_left == 0)
    goto eos;

  if (ladspa->num_buffers_left != -1)
    ladspa->num_buffers_left--;

  gst_object_sync_values (GST_OBJECT (ladspa), GST_BUFFER_TIMESTAMP (buf));

  gst_buffer_map (buf, &info, GST_MAP_READ);
  gst_ladspa_transform (&ladspa->ladspa, NULL,
      info.size / sizeof (LADSPA_Data) / ladspa->ladspa.klass->count.audio.in,
      info.data);
  gst_buffer_unmap (buf, &info);

  if (ladspa->num_buffers_left == 0)
    goto eos;

  return GST_FLOW_OK;

  /* ERRORS */
eos:
  {
    GST_DEBUG_OBJECT (ladspa, "we are EOS");
    return GST_FLOW_EOS;
  }
}
Example #7
0
static GstFlowReturn
gst_quarktv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
                             GstVideoFrame * out_frame)
{
    GstQuarkTV *filter = GST_QUARKTV (vfilter);
    gint area;
    guint32 *src, *dest;
    GstClockTime timestamp;
    GstBuffer **planetable;
    gint planes, current_plane;

    timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
    timestamp =
        gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
                                    GST_FORMAT_TIME, timestamp);

    GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
                      GST_TIME_ARGS (timestamp));

    if (GST_CLOCK_TIME_IS_VALID (timestamp))
        gst_object_sync_values (GST_OBJECT (filter), timestamp);

    if (G_UNLIKELY (filter->planetable == NULL))
        return GST_FLOW_FLUSHING;

    src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
    dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);

    GST_OBJECT_LOCK (filter);
    area = filter->area;
    planetable = filter->planetable;
    planes = filter->planes;
    current_plane = filter->current_plane;

    if (planetable[current_plane])
        gst_buffer_unref (planetable[current_plane]);
    planetable[current_plane] = gst_buffer_ref (in_frame->buffer);

    /* For each pixel */
    while (--area) {
        GstBuffer *rand;

        /* pick a random buffer */
        rand = planetable[(current_plane + (fastrand () >> 24)) % planes];

        /* Copy the pixel from the random buffer to dest, FIXME, slow */
        if (rand)
            gst_buffer_extract (rand, area * 4, &dest[area], 4);
        else
            dest[area] = src[area];
    }

    filter->current_plane--;
    if (filter->current_plane < 0)
        filter->current_plane = planes - 1;
    GST_OBJECT_UNLOCK (filter);

    return GST_FLOW_OK;
}
Example #8
0
static GstFlowReturn
gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
  GstRevTV *filter = GST_REVTV (trans);
  guint32 *src, *dest;
  gint width, height;
  guint32 *nsrc;
  gint y, x, R, G, B, yval;
  GstFlowReturn ret = GST_FLOW_OK;
  gint linespace, vscale;
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (in);
  stream_time =
      gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (G_OBJECT (filter), stream_time);

  src = (guint32 *) GST_BUFFER_DATA (in);
  dest = (guint32 *) GST_BUFFER_DATA (out);

  GST_OBJECT_LOCK (filter);
  width = filter->width;
  height = filter->height;

  /* Clear everything to black */
  memset (dest, 0, width * height * sizeof (guint32));

  linespace = filter->linespace;
  vscale = filter->vscale;

  /* draw the offset lines */
  for (y = 0; y < height; y += linespace) {
    for (x = 0; x <= width; x++) {
      nsrc = src + (y * width) + x;

      /* Calc Y Value for curpix */
      R = ((*nsrc) & 0xff0000) >> (16 - 1);
      G = ((*nsrc) & 0xff00) >> (8 - 2);
      B = (*nsrc) & 0xff;

      yval = y - ((short) (R + G + B) / vscale);

      if (yval > 0) {
        dest[x + (yval * width)] = THE_COLOR;
      }
    }
  }
  GST_OBJECT_UNLOCK (filter);

  return ret;
}
Example #9
0
static GstFlowReturn
gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
    GstBuffer * out)
{
  GstQuarkTV *filter = GST_QUARKTV (trans);
  gint area;
  guint32 *src, *dest;
  GstFlowReturn ret = GST_FLOW_OK;
  GstClockTime timestamp;
  GstBuffer **planetable;
  gint planes, current_plane;

  timestamp = GST_BUFFER_TIMESTAMP (in);
  timestamp =
      gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (timestamp))
    gst_object_sync_values (G_OBJECT (filter), timestamp);

  if (G_UNLIKELY (filter->planetable == NULL))
    return GST_FLOW_WRONG_STATE;

  GST_OBJECT_LOCK (filter);
  area = filter->area;
  src = (guint32 *) GST_BUFFER_DATA (in);
  dest = (guint32 *) GST_BUFFER_DATA (out);
  planetable = filter->planetable;
  planes = filter->planes;
  current_plane = filter->current_plane;

  if (planetable[current_plane])
    gst_buffer_unref (planetable[current_plane]);
  planetable[current_plane] = gst_buffer_ref (in);

  /* For each pixel */
  while (--area) {
    GstBuffer *rand;

    /* pick a random buffer */
    rand = planetable[(current_plane + (fastrand () >> 24)) % planes];

    /* Copy the pixel from the random buffer to dest */
    dest[area] =
        (rand ? ((guint32 *) GST_BUFFER_DATA (rand))[area] : src[area]);
  }

  filter->current_plane--;
  if (filter->current_plane < 0)
    filter->current_plane = planes - 1;
  GST_OBJECT_UNLOCK (filter);

  return ret;
}
static void
gst_gdk_pixbuf_overlay_before_transform (GstBaseTransform * trans,
    GstBuffer * outbuf)
{
  GstClockTime stream_time;

  stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (outbuf));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (trans), stream_time);
}
/* start and stop are not symmetric -- start will open the device, but not start
 * capture. it's setcaps that will start capture, which is called via basesrc's
 * negotiate method. stop will both stop capture and close the device.
 */
static gboolean
gst_v4l2src_start (GstBaseSrc * src)
{
  GstV4l2Src *v4l2src = GST_V4L2SRC (src);

  v4l2src->offset = 0;

  /* activate settings for first frame */
  v4l2src->ctrl_time = 0;
  gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time);

  return TRUE;
}
/* GstBaseTransform vmethod implementations */
static GstFlowReturn
gst_audio_echo_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
  GstAudioEcho *self = GST_AUDIO_ECHO (base);
  guint num_samples;
  GstClockTime timestamp, stream_time;
  GstMapInfo map;

  g_mutex_lock (&self->lock);
  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (self), stream_time);

  if (self->buffer == NULL) {
    guint bpf, rate;

    bpf = GST_AUDIO_FILTER_BPF (self);
    rate = GST_AUDIO_FILTER_RATE (self);

    self->delay_frames =
        MAX (gst_util_uint64_scale (self->delay, rate, GST_SECOND), 1);
    self->buffer_size_frames =
        MAX (gst_util_uint64_scale (self->max_delay, rate, GST_SECOND), 1);

    self->buffer_size = self->buffer_size_frames * bpf;
    self->buffer = g_try_malloc0 (self->buffer_size);
    self->buffer_pos = 0;

    if (self->buffer == NULL) {
      g_mutex_unlock (&self->lock);
      GST_ERROR_OBJECT (self, "Failed to allocate %u bytes", self->buffer_size);
      return GST_FLOW_ERROR;
    }
  }

  gst_buffer_map (buf, &map, GST_MAP_READWRITE);
  num_samples = map.size / GST_AUDIO_FILTER_BPS (self);

  self->process (self, map.data, num_samples);

  gst_buffer_unmap (buf, &map);
  g_mutex_unlock (&self->lock);

  return GST_FLOW_OK;
}
Example #13
0
static GstFlowReturn
gst_webvtt_enc_chain (GstPad * pad, GstBuffer * buf)
{
  GstWebvttEnc *webvttenc;
  GstBuffer *new_buffer;
  gchar *timing;
  GstFlowReturn ret;

  webvttenc = GST_WEBVTT_ENC (gst_pad_get_parent_element (pad));

  if (!webvttenc->pushed_header) {
    const char *header = "WEBVTT\n\n";

    new_buffer = gst_buffer_new_and_alloc (strlen (header));
    memcpy (GST_BUFFER_DATA (new_buffer), header, strlen (header));

    GST_BUFFER_TIMESTAMP (new_buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DURATION (new_buffer) = GST_CLOCK_TIME_NONE;

    ret = gst_pad_push (webvttenc->srcpad, new_buffer);
    if (ret != GST_FLOW_OK) {
      goto out;
    }

    webvttenc->pushed_header = TRUE;
  }

  gst_object_sync_values (G_OBJECT (webvttenc), GST_BUFFER_TIMESTAMP (buf));

  timing = gst_webvtt_enc_timeconvertion (webvttenc, buf);
  new_buffer =
      gst_buffer_new_and_alloc (strlen (timing) + GST_BUFFER_SIZE (buf) + 1);
  memcpy (GST_BUFFER_DATA (new_buffer), timing, strlen (timing));
  memcpy (GST_BUFFER_DATA (new_buffer) + strlen (timing), GST_BUFFER_DATA (buf),
      GST_BUFFER_SIZE (buf));
  memcpy (GST_BUFFER_DATA (new_buffer) + GST_BUFFER_SIZE (new_buffer) - 1,
      "\n", 1);
  g_free (timing);

  GST_BUFFER_TIMESTAMP (new_buffer) = GST_BUFFER_TIMESTAMP (buf);
  GST_BUFFER_DURATION (new_buffer) = GST_BUFFER_DURATION (buf);


  ret = gst_pad_push (webvttenc->srcpad, new_buffer);

out:
  gst_buffer_unref (buf);
  gst_object_unref (webvttenc);

  return ret;
}
Example #14
0
static GstFlowReturn
gst_gaussianblur_transform_frame (GstVideoFilter * vfilter,
    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
  GstGaussianBlur *filter = GST_GAUSSIANBLUR (vfilter);
  GstClockTime timestamp;
  gint64 stream_time;
  gfloat sigma;
  guint8 *src, *dest;

  /* GstController: update the properties */
  timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
  stream_time =
      gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
      GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (filter), stream_time);

  GST_OBJECT_LOCK (filter);
  sigma = filter->sigma;
  GST_OBJECT_UNLOCK (filter);

  if (filter->cur_sigma != sigma) {
    g_free (filter->kernel);
    filter->kernel = NULL;
    g_free (filter->kernel_sum);
    filter->kernel_sum = NULL;
    filter->cur_sigma = sigma;
  }
  if (filter->kernel == NULL &&
      !make_gaussian_kernel (filter, filter->cur_sigma)) {
    GST_ELEMENT_ERROR (filter, RESOURCE, NO_SPACE_LEFT, ("Out of memory"),
        ("Failed to allocation gaussian kernel"));
    return GST_FLOW_ERROR;
  }

  /*
   * Perform gaussian smoothing on the image using the input standard
   * deviation.
   */
  src = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
  dest = GST_VIDEO_FRAME_COMP_DATA (out_frame, 0);
  gst_video_frame_copy (out_frame, in_frame);
  gaussian_smooth (filter, src, dest);

  return GST_FLOW_OK;
}
static GstFlowReturn
gst_srt_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstSrtEnc *srtenc = GST_SRT_ENC (parent);
  GstClockTime ts, dur = GST_SECOND;
  GstBuffer *new_buffer;
  GstMapInfo map_info;
  GString *s;
  gsize buf_size;

  gst_object_sync_values (GST_OBJECT (srtenc), GST_BUFFER_PTS (buf));

  ts = GST_BUFFER_PTS (buf) + srtenc->timestamp;
  if (GST_BUFFER_DURATION_IS_VALID (buf))
    dur = GST_BUFFER_DURATION (buf) + srtenc->duration;
  else if (srtenc->duration > 0)
    dur = srtenc->duration;
  else
    dur = GST_SECOND;

  buf_size = gst_buffer_get_size (buf);
  s = g_string_sized_new (10 + 50 + buf_size + 2 + 1);

  /* stanza count */
  g_string_append_printf (s, "%d\n", srtenc->counter++);

  /* start_time --> end_time */
  gst_srt_enc_append_timestamp_to_string (ts, s);
  g_string_append_printf (s, " --> ");
  gst_srt_enc_append_timestamp_to_string (ts + dur, s);
  g_string_append_c (s, '\n');

  /* text */
  if (gst_buffer_map (buf, &map_info, GST_MAP_READ)) {
    g_string_append_len (s, (const gchar *) map_info.data, map_info.size);
    gst_buffer_unmap (buf, &map_info);
  }

  g_string_append (s, "\n\n");

  buf_size = s->len;
  new_buffer = gst_buffer_new_wrapped (g_string_free (s, FALSE), buf_size);

  GST_BUFFER_TIMESTAMP (new_buffer) = GST_BUFFER_TIMESTAMP (buf);
  GST_BUFFER_DURATION (new_buffer) = GST_BUFFER_DURATION (buf);

  gst_buffer_unref (buf);

  return gst_pad_push (srtenc->srcpad, new_buffer);
}
/* this function does the actual processing
 */
static GstFlowReturn
gst_freeverb_transform (GstBaseTransform * base, GstBuffer * inbuf,
                        GstBuffer * outbuf)
{
    GstFreeverb *filter = GST_FREEVERB (base);
    guint num_samples;
    GstClockTime timestamp;
    GstMapInfo inmap, outmap;

    timestamp = GST_BUFFER_TIMESTAMP (inbuf);
    timestamp =
        gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

    gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
    gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
    num_samples = outmap.size / (2 * GST_AUDIO_INFO_BPS (&filter->info));

    GST_DEBUG_OBJECT (filter, "processing %u samples at %" GST_TIME_FORMAT,
                      num_samples, GST_TIME_ARGS (timestamp));

    if (GST_CLOCK_TIME_IS_VALID (timestamp))
        gst_object_sync_values (GST_OBJECT (filter), timestamp);

    if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT))) {
        filter->drained = FALSE;
    }
    if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
        if (filter->drained) {
            memset (outmap.data, 0, outmap.size);
        }
    } else {
        filter->drained = FALSE;
    }

    if (!filter->drained) {
        filter->drained =
            filter->process (filter, inmap.data, outmap.data, num_samples);
    }

    if (filter->drained) {
        GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
    }

    gst_buffer_unmap (inbuf, &inmap);
    gst_buffer_unmap (outbuf, &outmap);

    return GST_FLOW_OK;
}
Example #17
0
static void
gst_gamma_before_transform (GstBaseTransform * base, GstBuffer * outbuf)
{
  GstGamma *gamma = GST_GAMMA (base);
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (outbuf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (gamma, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (gamma), stream_time);
}
static void
gst_geometric_transform_before_transform (GstBaseTransform * trans,
    GstBuffer * outbuf)
{
  GstGeometricTransform *gt = GST_GEOMETRIC_TRANSFORM_CAST (trans);
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (outbuf);
  stream_time =
      gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (gt, "sync to %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (G_OBJECT (gt), stream_time);
}
Example #19
0
/* this function does the actual processing
 */
static GstFlowReturn
gst_plugin_template_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
  GstPluginTemplate *filter = GST_PLUGIN_TEMPLATE (base);

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (outbuf)))
    gst_object_sync_values (G_OBJECT (filter), GST_BUFFER_TIMESTAMP (outbuf));

  if (filter->silent == FALSE)
    g_print ("I'm plugged, therefore I'm in.\n");
  
  /* FIXME: do something interesting here.  This simply copies the source
   * to the destination. */

  return GST_FLOW_OK;
}
Example #20
0
static void
gst_video_flip_before_transform (GstBaseTransform * trans, GstBuffer * in)
{
  GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (in);
  stream_time =
      gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (videoflip, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (G_OBJECT (videoflip), stream_time);
}
Example #21
0
static void
gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
{
  GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (balance), stream_time);
}
Example #22
0
static GstFlowReturn
gst_stereo_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
  GstStereo *stereo = GST_STEREO (base);
  gint samples;
  gint i;
  gdouble avg, ldiff, rdiff, tmp;
  gdouble mul = stereo->stereo;
  gint16 *data;
  GstMapInfo info;

  if (!gst_buffer_map (outbuf, &info, GST_MAP_READWRITE))
    return GST_FLOW_ERROR;

  data = (gint16 *) info.data;
  samples = info.size / 2;

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (outbuf)))
    gst_object_sync_values (GST_OBJECT (stereo), GST_BUFFER_TIMESTAMP (outbuf));

  if (stereo->active) {
    for (i = 0; i < samples / 2; i += 2) {
      avg = (data[i] + data[i + 1]) / 2;
      ldiff = data[i] - avg;
      rdiff = data[i + 1] - avg;

      tmp = avg + ldiff * mul;
      if (tmp < -32768)
        tmp = -32768;
      if (tmp > 32767)
        tmp = 32767;
      data[i] = tmp;

      tmp = avg + rdiff * mul;
      if (tmp < -32768)
        tmp = -32768;
      if (tmp > 32767)
        tmp = 32767;
      data[i + 1] = tmp;
    }
  }

  gst_buffer_unmap (outbuf, &info);

  return GST_FLOW_OK;
}
Example #23
0
/* start and stop are not symmetric -- start will open the device, but not start
 * capture. it's setcaps that will start capture, which is called via basesrc's
 * negotiate method. stop will both stop capture and close the device.
 */
static gboolean
gst_v4l2src_start (GstBaseSrc * src)
{
  GstV4l2Src *v4l2src = GST_V4L2SRC (src);

  v4l2src->offset = 0;
  v4l2src->renegotiation_adjust = 0;

  /* activate settings for first frame */
  v4l2src->ctrl_time = 0;
  gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time);

  v4l2src->has_bad_timestamp = FALSE;
  v4l2src->last_timestamp = 0;

  return TRUE;
}
Example #24
0
/* GstBaseTransform vmethod implementations */
static GstFlowReturn
gst_audio_echo_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
  GstAudioEcho *self = GST_AUDIO_ECHO (base);
  guint num_samples;
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (G_OBJECT (self), stream_time);

  num_samples =
      GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (self)->format.width / 8);

  if (self->buffer == NULL) {
    guint width, rate, channels;

    width = GST_AUDIO_FILTER (self)->format.width / 8;
    rate = GST_AUDIO_FILTER (self)->format.rate;
    channels = GST_AUDIO_FILTER (self)->format.channels;

    self->delay_frames =
        MAX (gst_util_uint64_scale (self->delay, rate, GST_SECOND), 1);
    self->buffer_size_frames =
        MAX (gst_util_uint64_scale (self->max_delay, rate, GST_SECOND), 1);

    self->buffer_size = self->buffer_size_frames * width * channels;
    self->buffer = g_try_malloc0 (self->buffer_size);
    self->buffer_pos = 0;

    if (self->buffer == NULL) {
      GST_ERROR_OBJECT (self, "Failed to allocate %u bytes", self->buffer_size);
      return GST_FLOW_ERROR;
    }
  }

  self->process (self, GST_BUFFER_DATA (buf), num_samples);

  return GST_FLOW_OK;
}
Example #25
0
static void
gst_smpte_alpha_before_transform (GstBaseTransform * trans, GstBuffer * buf)
{
  GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (trans);
  GstClockTime timestamp, stream_time;

  /* first sync the controller to the current stream_time of the buffer */
  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (smpte, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (smpte), stream_time);
}
Example #26
0
/* GstBaseTransform vmethod implementations */
static GstFlowReturn
gst_audio_amplify_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
  GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
  guint num_samples =
      GST_BUFFER_SIZE (buf) / (GST_AUDIO_FILTER (filter)->format.width / 8);

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buf)))
    gst_object_sync_values (G_OBJECT (filter), GST_BUFFER_TIMESTAMP (buf));

  if (gst_base_transform_is_passthrough (base) ||
      G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
    return GST_FLOW_OK;

  filter->process (filter, GST_BUFFER_DATA (buf), num_samples);

  return GST_FLOW_OK;
}
Example #27
0
static GstFlowReturn
gauss_blur_process_frame (GstBaseTransform * btrans,
    GstBuffer * in_buf, GstBuffer * out_buf)
{
  GaussBlur *gb = GAUSS_BLUR (btrans);
  GstClockTime timestamp;
  gint64 stream_time;
  gfloat sigma;

  /* GstController: update the properties */
  timestamp = GST_BUFFER_TIMESTAMP (in_buf);
  stream_time =
      gst_segment_to_stream_time (&btrans->segment, GST_FORMAT_TIME, timestamp);
  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (G_OBJECT (gb), stream_time);

  GST_OBJECT_LOCK (gb);
  sigma = gb->sigma;
  GST_OBJECT_UNLOCK (gb);

  if (gb->cur_sigma != sigma) {
    g_free (gb->kernel);
    gb->kernel = NULL;
    g_free (gb->kernel_sum);
    gb->kernel_sum = NULL;
    gb->cur_sigma = sigma;
  }
  if (gb->kernel == NULL && !make_gaussian_kernel (gb, gb->cur_sigma)) {
    GST_ELEMENT_ERROR (btrans, RESOURCE, NO_SPACE_LEFT, ("Out of memory"),
        ("Failed to allocation gaussian kernel"));
    return GST_FLOW_ERROR;
  }

  /*
   * Perform gaussian smoothing on the image using the input standard
   * deviation.
   */
  memcpy (GST_BUFFER_DATA (out_buf), GST_BUFFER_DATA (in_buf),
      gb->height * gb->stride);
  gaussian_smooth (gb, GST_BUFFER_DATA (in_buf), GST_BUFFER_DATA (out_buf));

  return GST_FLOW_OK;
}
Example #28
0
/* this function does the actual processing
 */
static GstFlowReturn
gst_audio_panorama_transform (GstBaseTransform * base, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
  guint num_samples = GST_BUFFER_SIZE (outbuf) / (2 * filter->width);

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (outbuf)))
    gst_object_sync_values (G_OBJECT (filter), GST_BUFFER_TIMESTAMP (outbuf));

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
    memset (GST_BUFFER_DATA (outbuf), 0, GST_BUFFER_SIZE (outbuf));
    return GST_FLOW_OK;
  }

  filter->process (filter, GST_BUFFER_DATA (inbuf),
      GST_BUFFER_DATA (outbuf), num_samples);

  return GST_FLOW_OK;
}
Example #29
0
static GstFlowReturn
gst_stereo_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
  GstStereo *stereo = GST_STEREO (base);
  gint16 *data = (gint16 *) GST_BUFFER_DATA (outbuf);
  gint samples = GST_BUFFER_SIZE (outbuf) / 2;
  gint i;
  gdouble avg, ldiff, rdiff, tmp;
  gdouble mul = stereo->stereo;

  if (!gst_buffer_is_writable (outbuf))
    return GST_FLOW_OK;

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (outbuf)))
    gst_object_sync_values (GST_OBJECT (stereo), GST_BUFFER_TIMESTAMP (outbuf));

  if (stereo->active) {
    for (i = 0; i < samples / 2; i += 2) {
      avg = (data[i] + data[i + 1]) / 2;
      ldiff = data[i] - avg;
      rdiff = data[i + 1] - avg;

      tmp = avg + ldiff * mul;
      if (tmp < -32768)
        tmp = -32768;
      if (tmp > 32767)
        tmp = 32767;
      data[i] = tmp;

      tmp = avg + rdiff * mul;
      if (tmp < -32768)
        tmp = -32768;
      if (tmp > 32767)
        tmp = 32767;
      data[i + 1] = tmp;
    }
  }

  return GST_FLOW_OK;
}
Example #30
0
/* this function does the actual processing
 */
static GstFlowReturn
gst_freeverb_transform (GstBaseTransform * base, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstFreeverb *filter = GST_FREEVERB (base);
  guint num_samples = GST_BUFFER_SIZE (outbuf) / (2 * filter->width);
  GstClockTime timestamp;

  timestamp = GST_BUFFER_TIMESTAMP (inbuf);
  timestamp =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (filter, "processing %u samples at %" GST_TIME_FORMAT,
      num_samples, GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (timestamp))
    gst_object_sync_values (G_OBJECT (filter), timestamp);

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT))) {
    filter->drained = FALSE;
  }
  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
    if (filter->drained) {
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
      memset (GST_BUFFER_DATA (outbuf), 0, GST_BUFFER_SIZE (outbuf));
      return GST_FLOW_OK;
    }
  } else {
    filter->drained = FALSE;
  }

  filter->drained = filter->process (filter, GST_BUFFER_DATA (inbuf),
      GST_BUFFER_DATA (outbuf), num_samples);

  if (filter->drained) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
  }

  return GST_FLOW_OK;
}