static gboolean
gst_ffmpegdeinterlace_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  AVCodecContext *ctx;

  if (!gst_structure_get_int (structure, "width", &deinterlace->width))
    return FALSE;
  if (!gst_structure_get_int (structure, "height", &deinterlace->height))
    return FALSE;

  ctx = avcodec_alloc_context ();
  ctx->width = deinterlace->width;
  ctx->height = deinterlace->height;
  ctx->pix_fmt = PIX_FMT_NB;
  gst_ffmpeg_caps_with_codectype (CODEC_TYPE_VIDEO, caps, ctx);
  if (ctx->pix_fmt == PIX_FMT_NB) {
    av_free (ctx);
    return FALSE;
  }

  deinterlace->pixfmt = ctx->pix_fmt;

  av_free (ctx);

  deinterlace->to_size =
      avpicture_get_size (deinterlace->pixfmt, deinterlace->width,
      deinterlace->height);

  return gst_pad_set_caps (deinterlace->srcpad, caps);
}
static GstFlowReturn
gst_ffmpegdeinterlace_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstBuffer *outbuf = NULL;
  GstFlowReturn result;

  result =
      gst_pad_alloc_buffer (deinterlace->srcpad, GST_BUFFER_OFFSET_NONE,
      deinterlace->to_size, GST_PAD_CAPS (deinterlace->srcpad), &outbuf);
  if (result == GST_FLOW_OK) {
    gst_ffmpeg_avpicture_fill (&deinterlace->from_frame,
        GST_BUFFER_DATA (inbuf), deinterlace->pixfmt, deinterlace->width,
        deinterlace->height);

    gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, GST_BUFFER_DATA (outbuf),
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame,
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS);

    result = gst_pad_push (deinterlace->srcpad, outbuf);
  }

  gst_buffer_unref (inbuf);

  return result;
}
static void
gst_ffmpegdeinterlace_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstFFMpegDeinterlace *self;

  g_return_if_fail (GST_IS_FFMPEGDEINTERLACE (object));
  self = GST_FFMPEGDEINTERLACE (object);

  switch (prop_id) {
    case PROP_MODE:{
      gint new_mode;

      GST_OBJECT_LOCK (self);
      new_mode = g_value_get_enum (value);
      if (self->mode != new_mode && GST_PAD_CAPS (self->srcpad)) {
        self->reconfigure = TRUE;
        self->new_mode = new_mode;
      } else {
        self->mode = new_mode;
        gst_ffmpegdeinterlace_update_passthrough (self);
      }
      GST_OBJECT_UNLOCK (self);
      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
  }

}
Exemple #4
0
static GstFlowReturn
gst_ffmpegdeinterlace_chain (GstPad * pad, GstObject * parent,
    GstBuffer * inbuf)
{
  GstFFMpegDeinterlace *deinterlace = GST_FFMPEGDEINTERLACE (parent);
  GstBuffer *outbuf = NULL;
  GstFlowReturn result;
  GstMapInfo from_map, to_map;

  GST_OBJECT_LOCK (deinterlace);
  if (deinterlace->reconfigure) {
    if (deinterlace->new_mode != -1)
      deinterlace->mode = deinterlace->new_mode;
    deinterlace->new_mode = -1;

    deinterlace->reconfigure = FALSE;
    GST_OBJECT_UNLOCK (deinterlace);
    if (gst_pad_has_current_caps (deinterlace->srcpad)) {
      GstCaps *caps;

      caps = gst_pad_get_current_caps (deinterlace->sinkpad);
      gst_ffmpegdeinterlace_sink_setcaps (deinterlace->sinkpad, caps);
      gst_caps_unref (caps);
    }
  } else {
    GST_OBJECT_UNLOCK (deinterlace);
  }

  if (deinterlace->passthrough)
    return gst_pad_push (deinterlace->srcpad, inbuf);

  outbuf = gst_buffer_new_and_alloc (deinterlace->to_size);

  gst_buffer_map (inbuf, &from_map, GST_MAP_READ);
  gst_ffmpeg_avpicture_fill (&deinterlace->from_frame, from_map.data,
      deinterlace->pixfmt, deinterlace->width, deinterlace->height);

  gst_buffer_map (outbuf, &to_map, GST_MAP_WRITE);
  gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, to_map.data,
      deinterlace->pixfmt, deinterlace->width, deinterlace->height);

  avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame,
      deinterlace->pixfmt, deinterlace->width, deinterlace->height);
  gst_buffer_unmap (outbuf, &to_map);
  gst_buffer_unmap (inbuf, &from_map);

  gst_buffer_copy_into (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);

  result = gst_pad_push (deinterlace->srcpad, outbuf);

  gst_buffer_unref (inbuf);

  return result;
}
static GstFlowReturn
gst_ffmpegdeinterlace_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstBuffer *outbuf = NULL;
  GstFlowReturn result;

  GST_OBJECT_LOCK (deinterlace);
  if (deinterlace->reconfigure) {
    if (deinterlace->new_mode != -1)
      deinterlace->mode = deinterlace->new_mode;
    deinterlace->new_mode = -1;

    deinterlace->reconfigure = FALSE;
    GST_OBJECT_UNLOCK (deinterlace);
    if (GST_PAD_CAPS (deinterlace->srcpad))
      gst_ffmpegdeinterlace_sink_setcaps (deinterlace->sinkpad,
          GST_PAD_CAPS (deinterlace->sinkpad));
  } else {
    GST_OBJECT_UNLOCK (deinterlace);
  }

  if (deinterlace->passthrough)
    return gst_pad_push (deinterlace->srcpad, inbuf);

  result =
      gst_pad_alloc_buffer (deinterlace->srcpad, GST_BUFFER_OFFSET_NONE,
      deinterlace->to_size, GST_PAD_CAPS (deinterlace->srcpad), &outbuf);
  if (result == GST_FLOW_OK) {
    gst_ffmpeg_avpicture_fill (&deinterlace->from_frame,
        GST_BUFFER_DATA (inbuf), deinterlace->pixfmt, deinterlace->width,
        deinterlace->height);

    gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, GST_BUFFER_DATA (outbuf),
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame,
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS);

    result = gst_pad_push (deinterlace->srcpad, outbuf);
  }

  gst_buffer_unref (inbuf);

  return result;
}
static void
gst_ffmpegdeinterlace_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstFFMpegDeinterlace *self;

  g_return_if_fail (GST_IS_FFMPEGDEINTERLACE (object));
  self = GST_FFMPEGDEINTERLACE (object);

  switch (prop_id) {
    case PROP_MODE:
      g_value_set_enum (value, self->mode);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
  }
}
static gboolean
gst_ffmpegdeinterlace_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  AVCodecContext *ctx;
  GstCaps *src_caps;
  gboolean ret;

  if (!gst_structure_get_int (structure, "width", &deinterlace->width))
    return FALSE;
  if (!gst_structure_get_int (structure, "height", &deinterlace->height))
    return FALSE;

  deinterlace->interlaced = FALSE;
  gst_structure_get_boolean (structure, "interlaced", &deinterlace->interlaced);
  gst_ffmpegdeinterlace_update_passthrough (deinterlace);

  ctx = avcodec_alloc_context ();
  ctx->width = deinterlace->width;
  ctx->height = deinterlace->height;
  ctx->pix_fmt = PIX_FMT_NB;
  gst_ffmpeg_caps_with_codectype (AVMEDIA_TYPE_VIDEO, caps, ctx);
  if (ctx->pix_fmt == PIX_FMT_NB) {
    av_free (ctx);
    return FALSE;
  }

  deinterlace->pixfmt = ctx->pix_fmt;

  av_free (ctx);

  deinterlace->to_size =
      avpicture_get_size (deinterlace->pixfmt, deinterlace->width,
      deinterlace->height);

  src_caps = gst_caps_copy (caps);
  gst_caps_set_simple (src_caps, "interlaced", G_TYPE_BOOLEAN,
      deinterlace->interlaced, NULL);
  ret = gst_pad_set_caps (deinterlace->srcpad, src_caps);
  gst_caps_unref (src_caps);

  return ret;
}
static gboolean
gst_ffmpegdeinterlace_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  AVCodecContext *ctx;
  GValue interlaced = { 0 };
  GstCaps *srcCaps;
  GstFlowReturn ret;

  if (!gst_structure_get_int (structure, "width", &deinterlace->width))
    return FALSE;
  if (!gst_structure_get_int (structure, "height", &deinterlace->height))
    return FALSE;

  ctx = avcodec_alloc_context ();
  ctx->width = deinterlace->width;
  ctx->height = deinterlace->height;
  ctx->pix_fmt = PIX_FMT_NB;
  gst_ffmpeg_caps_with_codectype (CODEC_TYPE_VIDEO, caps, ctx);
  if (ctx->pix_fmt == PIX_FMT_NB) {
    av_free (ctx);
    return FALSE;
  }

  deinterlace->pixfmt = ctx->pix_fmt;

  av_free (ctx);

  deinterlace->to_size =
      avpicture_get_size (deinterlace->pixfmt, deinterlace->width,
      deinterlace->height);

  srcCaps = gst_caps_copy (caps);
  g_value_init (&interlaced, G_TYPE_BOOLEAN);
  g_value_set_boolean (&interlaced, FALSE);
  gst_caps_set_value (srcCaps, "interlaced", &interlaced);
  g_value_unset (&interlaced);

  ret = gst_pad_set_caps (deinterlace->srcpad, srcCaps);
  gst_caps_unref (srcCaps);
  return ret;
}
Exemple #9
0
static gboolean
gst_ffmpegdeinterlace_sink_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  GstFFMpegDeinterlace *deinterlace = GST_FFMPEGDEINTERLACE (parent);
  gboolean ret = FALSE;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);
      ret = gst_ffmpegdeinterlace_sink_setcaps (pad, caps);
      gst_event_unref (event);
      break;
    }
    default:
      ret = gst_pad_push_event (deinterlace->srcpad, event);
      break;
  }

  return ret;
}