Exemplo n.º 1
0
static void
gst_video_balance_get_property (GObject * object, guint prop_id, GValue * value,
    GParamSpec * pspec)
{
  GstVideoBalance *src;

  src = GST_VIDEO_BALANCE (object);

  switch (prop_id) {
    case PROP_CONTRAST:
      g_value_set_double (value, src->contrast);
      break;
    case PROP_BRIGHTNESS:
      g_value_set_double (value, src->brightness);
      break;
    case PROP_HUE:
      g_value_set_double (value, src->hue);
      break;
    case PROP_SATURATION:
      g_value_set_double (value, src->saturation);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Exemplo n.º 2
0
static void
gst_video_balance_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstVideoBalance *src;

  src = GST_VIDEO_BALANCE (object);

  GST_DEBUG ("gst_video_balance_set_property");
  switch (prop_id) {
    case PROP_CONTRAST:
      src->contrast = g_value_get_double (value);
      break;
    case PROP_BRIGHTNESS:
      src->brightness = g_value_get_double (value);
      break;
    case PROP_HUE:
      src->hue = g_value_get_double (value);
      break;
    case PROP_SATURATION:
      src->saturation = g_value_get_double (value);
      break;
    default:
      break;
  }

  gst_video_balance_update_properties (src);
}
Exemplo n.º 3
0
static GstFlowReturn
gst_video_balance_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
  GstVideoBalance *videobalance;
  guint8 *data;
  guint size;

  videobalance = GST_VIDEO_BALANCE (base);

  /* if no change is needed, we are done */
  if (videobalance->passthru)
    goto done;

  data = GST_BUFFER_DATA (outbuf);
  size = GST_BUFFER_SIZE (outbuf);

  if (size < videobalance->size)
    goto wrong_size;

  gst_video_balance_planar411_ip (videobalance, data,
      videobalance->width, videobalance->height);

done:
  return GST_FLOW_OK;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
        (NULL), ("Invalid buffer size %d, expected %d", size,
            videobalance->size));
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 4
0
/* get notified of caps and plug in the correct process function */
static gboolean
gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstVideoBalance *this;
  GstStructure *structure;
  gboolean res;

  this = GST_VIDEO_BALANCE (base);

  GST_DEBUG_OBJECT (this,
      "set_caps: in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);

  structure = gst_caps_get_structure (incaps, 0);

  res = gst_structure_get_int (structure, "width", &this->width);
  res &= gst_structure_get_int (structure, "height", &this->height);
  if (!res)
    goto done;

  this->size = GST_VIDEO_I420_SIZE (this->width, this->height);

done:
  return res;
}
Exemplo n.º 5
0
static void
gst_video_balance_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
  gdouble d;
  const gchar *label = NULL;

  GST_BASE_TRANSFORM_LOCK (balance);
  GST_OBJECT_LOCK (balance);
  switch (prop_id) {
    case PROP_CONTRAST:
      d = g_value_get_double (value);
      GST_DEBUG_OBJECT (balance, "Changing contrast from %lf to %lf",
          balance->contrast, d);
      if (d != balance->contrast)
        label = "CONTRAST";
      balance->contrast = d;
      break;
    case PROP_BRIGHTNESS:
      d = g_value_get_double (value);
      GST_DEBUG_OBJECT (balance, "Changing brightness from %lf to %lf",
          balance->brightness, d);
      if (d != balance->brightness)
        label = "BRIGHTNESS";
      balance->brightness = d;
      break;
    case PROP_HUE:
      d = g_value_get_double (value);
      GST_DEBUG_OBJECT (balance, "Changing hue from %lf to %lf", balance->hue,
          d);
      if (d != balance->hue)
        label = "HUE";
      balance->hue = d;
      break;
    case PROP_SATURATION:
      d = g_value_get_double (value);
      GST_DEBUG_OBJECT (balance, "Changing saturation from %lf to %lf",
          balance->saturation, d);
      if (d != balance->saturation)
        label = "SATURATION";
      balance->saturation = d;
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }

  gst_video_balance_update_properties (balance);
  GST_OBJECT_UNLOCK (balance);
  GST_BASE_TRANSFORM_UNLOCK (balance);

  if (label) {
    GstColorBalanceChannel *channel =
        gst_video_balance_find_channel (balance, label);
    gst_color_balance_value_changed (GST_COLOR_BALANCE (balance), channel,
        gst_color_balance_get_value (GST_COLOR_BALANCE (balance), channel));
  }
}
Exemplo n.º 6
0
static const GList *
gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
{
  GstVideoBalance *videobalance = GST_VIDEO_BALANCE (balance);

  g_return_val_if_fail (videobalance != NULL, NULL);
  g_return_val_if_fail (GST_IS_VIDEO_BALANCE (videobalance), NULL);

  return videobalance->channels;
}
Exemplo n.º 7
0
/* get notified of caps and plug in the correct process function */
static gboolean
gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);

  GST_DEBUG_OBJECT (videobalance,
      "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);

  videobalance->process = NULL;

  if (!gst_video_format_parse_caps (incaps, &videobalance->format,
          &videobalance->width, &videobalance->height))
    goto invalid_caps;

  videobalance->size =
      gst_video_format_get_size (videobalance->format, videobalance->width,
      videobalance->height);

  switch (videobalance->format) {
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
    case GST_VIDEO_FORMAT_Y41B:
    case GST_VIDEO_FORMAT_Y42B:
    case GST_VIDEO_FORMAT_Y444:
      videobalance->process = gst_video_balance_planar_yuv;
      break;
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_UYVY:
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_YVYU:
      videobalance->process = gst_video_balance_packed_yuv;
      break;
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_RGB:
    case GST_VIDEO_FORMAT_BGR:
      videobalance->process = gst_video_balance_packed_rgb;
      break;
    default:
      break;
  }

  return videobalance->process != NULL;

invalid_caps:
  GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
  return FALSE;
}
Exemplo n.º 8
0
/* get notified of caps and plug in the correct process function */
static gboolean
gst_video_balance_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
    GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
  GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);

  GST_DEBUG_OBJECT (videobalance,
      "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);

  videobalance->process = NULL;

  switch (GST_VIDEO_INFO_FORMAT (in_info)) {
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
    case GST_VIDEO_FORMAT_Y41B:
    case GST_VIDEO_FORMAT_Y42B:
    case GST_VIDEO_FORMAT_Y444:
      videobalance->process = gst_video_balance_planar_yuv;
      break;
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_UYVY:
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_YVYU:
      videobalance->process = gst_video_balance_packed_yuv;
      break;
    case GST_VIDEO_FORMAT_NV12:
    case GST_VIDEO_FORMAT_NV21:
      videobalance->process = gst_video_balance_semiplanar_yuv;
      break;
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_RGB:
    case GST_VIDEO_FORMAT_BGR:
      videobalance->process = gst_video_balance_packed_rgb;
      break;
    default:
      goto unknown_format;
      break;
  }

  return TRUE;

  /* ERRORS */
unknown_format:
  {
    GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
    return FALSE;
  }
}
Exemplo n.º 9
0
static void
gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
{
  GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
  GstClockTime timestamp, stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (buf);
  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (GST_OBJECT (balance), stream_time);
}
Exemplo n.º 10
0
static void
gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
    GstColorBalanceChannel * channel, gint value)
{
  GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
  gdouble new_val;
  gboolean changed = FALSE;

  g_return_if_fail (vb != NULL);
  g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
  g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
  g_return_if_fail (channel->label != NULL);

  GST_BASE_TRANSFORM_LOCK (vb);
  GST_OBJECT_LOCK (vb);
  if (!g_ascii_strcasecmp (channel->label, "HUE")) {
    new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
    changed = new_val != vb->hue;
    vb->hue = new_val;
  } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
    new_val = (value + 1000.0) * 2.0 / 2000.0;
    changed = new_val != vb->saturation;
    vb->saturation = new_val;
  } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
    new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
    changed = new_val != vb->brightness;
    vb->brightness = new_val;
  } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
    new_val = (value + 1000.0) * 2.0 / 2000.0;
    changed = new_val != vb->contrast;
    vb->contrast = new_val;
  }

  if (changed)
    gst_video_balance_update_properties (vb);
  GST_OBJECT_UNLOCK (vb);
  GST_BASE_TRANSFORM_UNLOCK (vb);

  if (changed) {
    gst_color_balance_value_changed (balance, channel,
        gst_color_balance_get_value (balance, channel));
  }
}
Exemplo n.º 11
0
static void
gst_video_balance_finalize (GObject * object)
{
  GList *channels = NULL;
  GstVideoBalance *balance;
  gint i;

  balance = GST_VIDEO_BALANCE (object);

  if (balance->tableu) {
    for (i = 0; i < 256; i++)
      g_free (balance->tableu[i]);
    g_free (balance->tableu);
    balance->tableu = NULL;
  }

  if (balance->tablev) {
    for (i = 0; i < 256; i++)
      g_free (balance->tablev[i]);
    g_free (balance->tablev);
    balance->tablev = NULL;
  }

  if (balance->tabley) {
    g_free (balance->tabley);
    balance->tabley = NULL;
  }

  channels = balance->channels;
  while (channels) {
    GstColorBalanceChannel *channel = channels->data;

    g_object_unref (channel);
    channels->data = NULL;
    channels = g_list_next (channels);
  }

  if (balance->channels)
    g_list_free (balance->channels);

  G_OBJECT_CLASS (parent_class)->finalize (object);
}
Exemplo n.º 12
0
static void
gst_video_balance_init (GTypeInstance * instance, gpointer g_class)
{
  GstVideoBalance *videobalance = GST_VIDEO_BALANCE (instance);
  char *channels[4] = { "HUE", "SATURATION",
    "BRIGHTNESS", "CONTRAST"
  };
  gint i;

  GST_DEBUG ("gst_video_balance_init");

  /* do stuff */
  videobalance->contrast = DEFAULT_PROP_CONTRAST;
  videobalance->brightness = DEFAULT_PROP_BRIGHTNESS;
  videobalance->hue = DEFAULT_PROP_HUE;
  videobalance->saturation = DEFAULT_PROP_SATURATION;

  gst_video_balance_update_properties (videobalance);

  videobalance->tabley = g_new (guint8, 256);
  videobalance->tableu = g_new (guint8 *, 256);
  videobalance->tablev = g_new (guint8 *, 256);
  for (i = 0; i < 256; i++) {
    videobalance->tableu[i] = g_new (guint8, 256);
    videobalance->tablev[i] = g_new (guint8, 256);
  }

  /* Generate the channels list */
  for (i = 0; i < (sizeof (channels) / sizeof (char *)); i++) {
    GstColorBalanceChannel *channel;

    channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
    channel->label = g_strdup (channels[i]);
    channel->min_value = -1000;
    channel->max_value = 1000;

    videobalance->channels = g_list_append (videobalance->channels, channel);
  }

}
Exemplo n.º 13
0
static void
gst_video_balance_finalize (GObject * object)
{
  GList *channels = NULL;
  GstVideoBalance *balance = GST_VIDEO_BALANCE (object);

  g_free (balance->tableu[0]);

  channels = balance->channels;
  while (channels) {
    GstColorBalanceChannel *channel = channels->data;

    g_object_unref (channel);
    channels->data = NULL;
    channels = g_list_next (channels);
  }

  if (balance->channels)
    g_list_free (balance->channels);

  G_OBJECT_CLASS (parent_class)->finalize (object);
}
Exemplo n.º 14
0
static GstFlowReturn
gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
    GstVideoFrame * frame)
{
  GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);

  if (!videobalance->process)
    goto not_negotiated;

  GST_OBJECT_LOCK (videobalance);
  videobalance->process (videobalance, frame);
  GST_OBJECT_UNLOCK (videobalance);

  return GST_FLOW_OK;

  /* ERRORS */
not_negotiated:
  {
    GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
Exemplo n.º 15
0
static GstFlowReturn
gst_video_balance_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
  GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);
  guint8 *data;
  guint size;

  if (!videobalance->process)
    goto not_negotiated;

  /* if no change is needed, we are done */
  if (base->passthrough)
    goto done;

  data = GST_BUFFER_DATA (outbuf);
  size = GST_BUFFER_SIZE (outbuf);

  if (size != videobalance->size)
    goto wrong_size;

  GST_OBJECT_LOCK (videobalance);
  videobalance->process (videobalance, data);
  GST_OBJECT_UNLOCK (videobalance);

done:
  return GST_FLOW_OK;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (videobalance, STREAM, FORMAT,
        (NULL), ("Invalid buffer size %d, expected %d", size,
            videobalance->size));
    return GST_FLOW_ERROR;
  }
not_negotiated:
  GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
  return GST_FLOW_NOT_NEGOTIATED;
}
Exemplo n.º 16
0
static gint
gst_video_balance_colorbalance_get_value (GstColorBalance * balance,
    GstColorBalanceChannel * channel)
{
  GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
  gint value = 0;

  g_return_val_if_fail (vb != NULL, 0);
  g_return_val_if_fail (GST_IS_VIDEO_BALANCE (vb), 0);
  g_return_val_if_fail (channel->label != NULL, 0);

  if (!g_ascii_strcasecmp (channel->label, "HUE")) {
    value = (vb->hue + 1) * 2000.0 / 2.0 - 1000.0;
  } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
    value = vb->saturation * 2000.0 / 2.0 - 1000.0;
  } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
    value = (vb->brightness + 1) * 2000.0 / 2.0 - 1000.0;
  } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
    value = vb->contrast * 2000.0 / 2.0 - 1000.0;
  }

  return value;
}
Exemplo n.º 17
0
static void
gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
    GstColorBalanceChannel * channel, gint value)
{
  GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);

  g_return_if_fail (vb != NULL);
  g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
  g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
  g_return_if_fail (channel->label != NULL);

  if (!g_ascii_strcasecmp (channel->label, "HUE")) {
    vb->hue = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
  } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
    vb->saturation = (value + 1000.0) * 2.0 / 2000.0;
  } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
    vb->brightness = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
  } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
    vb->contrast = (value + 1000.0) * 2.0 / 2000.0;
  }

  gst_video_balance_update_properties (vb);
}