Exemple #1
0
static gboolean
gst_ffmpegscale_src_event (GstBaseTransform * trans, GstEvent * event)
{
    GstFFMpegScale *scale;
    GstStructure *structure;
    gdouble pointer;
    gboolean res;

    scale = GST_FFMPEGSCALE (trans);

    switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_NAVIGATION:
        event = gst_event_make_writable (event);

        structure = gst_event_writable_structure (event);
        if (gst_structure_get_double (structure, "pointer_x", &pointer)) {
            gst_structure_set (structure,
                               "pointer_x", G_TYPE_DOUBLE,
                               pointer * scale->in_info.width / scale->out_info.width, NULL);
        }
        if (gst_structure_get_double (structure, "pointer_y", &pointer)) {
            gst_structure_set (structure,
                               "pointer_y", G_TYPE_DOUBLE,
                               pointer * scale->in_info.height / scale->out_info.height, NULL);
        }
        break;
    default:
        break;
    }

    res = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);

    return res;
}
Exemple #2
0
static GstFlowReturn
gst_ffmpegscale_transform (GstBaseTransform * trans, GstBuffer * inbuf,
                           GstBuffer * outbuf)
{
    GstFFMpegScale *scale = GST_FFMPEGSCALE (trans);
    GstVideoFrame in_frame, out_frame;

    if (!gst_video_frame_map (&in_frame, &scale->in_info, inbuf, GST_MAP_READ))
        goto invalid_buffer;

    if (!gst_video_frame_map (&out_frame, &scale->out_info, outbuf,
                              GST_MAP_WRITE))
        goto invalid_buffer;

    sws_scale (scale->ctx, (const guint8 **) in_frame.data, in_frame.info.stride,
               0, scale->in_info.height, (guint8 **) out_frame.data,
               out_frame.info.stride);

    gst_video_frame_unmap (&in_frame);
    gst_video_frame_unmap (&out_frame);

    return GST_FLOW_OK;

    /* ERRORS */
invalid_buffer:
    {
        return GST_FLOW_OK;
    }
}
static gboolean
gst_ffmpegscale_stop(GstBaseTransform* trans) {
    GstFFMpegScale* scale = GST_FFMPEGSCALE(trans);

    gst_ffmpegscale_reset(scale);

    return TRUE;
}
static void
gst_ffmpegscale_finalize(GObject* object) {
    GstFFMpegScale* scale = GST_FFMPEGSCALE(object);

    gst_ffmpegscale_reset(scale);

    G_OBJECT_CLASS(parent_class)->finalize(object);
}
static void
gst_ffmpegscale_finalize (GObject * object)
{
  GstFFMpegScale *scale = GST_FFMPEGSCALE (object);

  if (scale->res != NULL)
    img_resample_close (scale->res);

  G_OBJECT_CLASS (parent_class)->finalize (object);
}
Exemple #6
0
static void
gst_ffmpegscale_get_property (GObject * object, guint prop_id, GValue * value,
                              GParamSpec * pspec)
{
    GstFFMpegScale *scale = GST_FFMPEGSCALE (object);

    switch (prop_id) {
    case PROP_METHOD:
        g_value_set_enum (value, scale->method);
        break;
    default:
        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
        break;
    }
}
static gboolean
gst_ffmpegscale_set_caps (GstBaseTransform * trans, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstFFMpegScale *scale = GST_FFMPEGSCALE (trans);
  GstStructure *instructure = gst_caps_get_structure (incaps, 0);
  GstStructure *outstructure = gst_caps_get_structure (outcaps, 0);
  gint par_num, par_den;
  AVCodecContext *ctx;

  if (!gst_structure_get_int (instructure, "width", &scale->in_width))
    return FALSE;
  if (!gst_structure_get_int (instructure, "height", &scale->in_height))
    return FALSE;

  if (!gst_structure_get_int (outstructure, "width", &scale->out_width))
    return FALSE;
  if (!gst_structure_get_int (outstructure, "height", &scale->out_height))
    return FALSE;

  if (gst_structure_get_fraction (instructure, "pixel-aspect-ratio",
          &par_num, &par_den)) {
    gst_structure_set (outstructure,
        "pixel-aspect-ratio", GST_TYPE_FRACTION,
        par_num * scale->in_width / scale->out_width,
        par_den * scale->in_height / scale->out_height, NULL);
  }

  ctx = avcodec_alloc_context ();
  ctx->width = scale->in_width;
  ctx->height = scale->in_height;
  ctx->pix_fmt = PIX_FMT_NB;
  gst_ffmpeg_caps_with_codectype (CODEC_TYPE_VIDEO, incaps, ctx);
  if (ctx->pix_fmt == PIX_FMT_NB) {
    av_free (ctx);
    return FALSE;
  }

  scale->pixfmt = ctx->pix_fmt;

  av_free (ctx);

  scale->res = img_resample_init (scale->out_width, scale->out_height,
      scale->in_width, scale->in_height);

  return TRUE;
}
static void
gst_ffmpegscale_set_property(GObject* object, guint prop_id,
                             const GValue* value, GParamSpec* pspec) {
    GstFFMpegScale* scale = GST_FFMPEGSCALE(object);

    switch (prop_id) {
    case PROP_METHOD:
        scale->method = g_value_get_enum(value);
        break;

    case PROP_ADD_BORDERS:
        scale->add_borders = g_value_get_boolean(value);
        break;

    default:
        G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
        break;
    }
}
static GstFlowReturn
gst_ffmpegscale_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstFFMpegScale *scale = GST_FFMPEGSCALE (trans);
  AVPicture in_frame, out_frame;

  gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS);

  gst_ffmpeg_avpicture_fill (&in_frame,
      GST_BUFFER_DATA (inbuf),
      scale->pixfmt, scale->in_width, scale->in_height);

  gst_ffmpeg_avpicture_fill (&out_frame,
      GST_BUFFER_DATA (outbuf),
      scale->pixfmt, scale->out_width, scale->out_height);

  img_resample (scale->res, &out_frame, &in_frame);

  return GST_FLOW_OK;
}
static gboolean
gst_ffmpegscale_handle_src_event(GstPad* pad, GstEvent* event) {
    GstFFMpegScale* scale;
    GstStructure* structure;
    gdouble pointer;
    gboolean res;

    scale = GST_FFMPEGSCALE(gst_pad_get_parent(pad));

    switch (GST_EVENT_TYPE(event)) {
    case GST_EVENT_NAVIGATION:
        event =
            GST_EVENT(gst_mini_object_make_writable(GST_MINI_OBJECT(event)));

        structure = (GstStructure*) gst_event_get_structure(event);

        if (gst_structure_get_double(structure, "pointer_x", &pointer)) {
            gst_structure_set(structure,
                              "pointer_x", G_TYPE_DOUBLE,
                              pointer * scale->in_width / scale->out_width, NULL);
        }

        if (gst_structure_get_double(structure, "pointer_y", &pointer)) {
            gst_structure_set(structure,
                              "pointer_y", G_TYPE_DOUBLE,
                              pointer * scale->in_height / scale->out_height, NULL);
        }

        break;

    default:
        break;
    }

    res = gst_pad_event_default(pad, event);

    gst_object_unref(scale);

    return res;
}
static GstFlowReturn
gst_ffmpegscale_transform(GstBaseTransform* trans, GstBuffer* inbuf,
                          GstBuffer* outbuf) {
    GstFFMpegScale* scale = GST_FFMPEGSCALE(trans);
    guint8* in_data[3] = { NULL, NULL, NULL };
    guint8* out_data[3] = { NULL, NULL, NULL };
    gint i;
    FFVSImage dest = { NULL, };
    FFVSImage dest_u = { NULL, };
    FFVSImage dest_v = { NULL, };

    for (i = 0; i < 3; i++) {
        /* again; stay close to the ffmpeg offset way */
        if (!i || scale->in_offset[i]) {
            in_data[i] = GST_BUFFER_DATA(inbuf) + scale->in_offset[i];
        }

        if (!i || scale->out_offset[i]) {
            out_data[i] = GST_BUFFER_DATA(outbuf) + scale->out_offset[i];
        }
    }

    if (scale->add_borders) {
        const guint8* black = get_black_for_format(scale->out_format);

        gst_video_scale_setup_vs_image(&dest, scale->out_format, 0,
                                       scale->out_width, scale->out_height, scale->borders_w,
                                       scale->borders_h, GST_BUFFER_DATA(outbuf));

        if (scale->out_format == GST_VIDEO_FORMAT_I420
                || scale->out_format == GST_VIDEO_FORMAT_YV12
                || scale->out_format == GST_VIDEO_FORMAT_Y444
                || scale->out_format == GST_VIDEO_FORMAT_Y42B
                || scale->out_format == GST_VIDEO_FORMAT_Y41B) {

            gst_video_scale_setup_vs_image(&dest_u, scale->out_format, 1,
                                           scale->out_width, scale->out_height, scale->borders_w,
                                           scale->borders_h, GST_BUFFER_DATA(outbuf));
            gst_video_scale_setup_vs_image(&dest_v, scale->out_format, 2,
                                           scale->out_width, scale->out_height, scale->borders_w,
                                           scale->borders_h, GST_BUFFER_DATA(outbuf));
        }

        switch (scale->out_format) {
        case GST_VIDEO_FORMAT_RGBx:
        case GST_VIDEO_FORMAT_xRGB:
        case GST_VIDEO_FORMAT_BGRx:
        case GST_VIDEO_FORMAT_xBGR:
        case GST_VIDEO_FORMAT_RGBA:
        case GST_VIDEO_FORMAT_ARGB:
        case GST_VIDEO_FORMAT_BGRA:
        case GST_VIDEO_FORMAT_ABGR:
        case GST_VIDEO_FORMAT_AYUV:
            vs_fill_borders_RGBA(&dest, black);
            break;

        case GST_VIDEO_FORMAT_ARGB64:
        case GST_VIDEO_FORMAT_AYUV64:
            vs_fill_borders_AYUV64(&dest, black);
            break;

        case GST_VIDEO_FORMAT_RGB:
        case GST_VIDEO_FORMAT_BGR:
        case GST_VIDEO_FORMAT_v308:

            vs_fill_borders_RGB(&dest, black);
            break;

        case GST_VIDEO_FORMAT_YUY2:
        case GST_VIDEO_FORMAT_YVYU:
            vs_fill_borders_YUYV(&dest, black);
            break;

        case GST_VIDEO_FORMAT_UYVY:
            vs_fill_borders_UYVY(&dest, black);
            break;

        case GST_VIDEO_FORMAT_GRAY8:
            vs_fill_borders_Y(&dest, black);
            break;

        case GST_VIDEO_FORMAT_GRAY16_LE:
        case GST_VIDEO_FORMAT_GRAY16_BE:
            vs_fill_borders_Y16(&dest, 0);
            break;

        case GST_VIDEO_FORMAT_I420:
        case GST_VIDEO_FORMAT_YV12:
        case GST_VIDEO_FORMAT_Y444:
        case GST_VIDEO_FORMAT_Y42B:
        case GST_VIDEO_FORMAT_Y41B:
            vs_fill_borders_Y(&dest, black);
            vs_fill_borders_Y(&dest_u, black + 1);
            vs_fill_borders_Y(&dest_v, black + 2);
            break;

        case GST_VIDEO_FORMAT_RGB16:
            vs_fill_borders_RGB565(&dest, black);
            break;

        case GST_VIDEO_FORMAT_RGB15:
            vs_fill_borders_RGB555(&dest, black);
            break;

        default:
            break;
        }
    }

    sws_scale(scale->ctx, (const guint8**) in_data, scale->in_stride, 0,
              scale->in_height, out_data, scale->out_stride);

    return GST_FLOW_OK;
}
static gboolean
gst_ffmpegscale_set_caps(GstBaseTransform* trans, GstCaps* incaps,
                         GstCaps* outcaps) {
    GstFFMpegScale* scale = GST_FFMPEGSCALE(trans);
    guint mmx_flags, altivec_flags, sse_flags;
    gint swsflags;
    gboolean ok;

    g_return_val_if_fail(scale->method <
                         G_N_ELEMENTS(gst_ffmpegscale_method_flags), FALSE);

    if (scale->ctx) {
        sws_freeContext(scale->ctx);
        scale->ctx = NULL;
    }

    scale->borders_h = 0;
    scale->borders_w = 0;

    ok = gst_video_format_parse_caps(incaps, &scale->in_format, &scale->in_width,
                                     &scale->in_height);
    ok &= gst_video_format_parse_caps(outcaps, &scale->out_format, &scale->out_width,
                                      &scale->out_height);
    scale->in_pixfmt = gst_ffmpeg_caps_to_pixfmt(incaps);
    scale->out_pixfmt = gst_ffmpeg_caps_to_pixfmt(outcaps);

    if (!ok || scale->in_pixfmt == PIX_FMT_NONE ||
            scale->out_pixfmt == PIX_FMT_NONE ||
            scale->in_format == GST_VIDEO_FORMAT_UNKNOWN ||
            scale->out_format == GST_VIDEO_FORMAT_UNKNOWN) {
        goto refuse_caps;
    }

    GST_DEBUG_OBJECT(scale, "format %d => %d, from=%dx%d -> to=%dx%d", scale->in_format,
                     scale->out_format, scale->in_width, scale->in_height, scale->out_width,
                     scale->out_height);

    gst_ffmpegscale_fill_info(scale, scale->in_format, scale->in_width,
                              scale->in_height, scale->in_stride, scale->in_offset);
    gst_ffmpegscale_fill_info(scale, scale->out_format, scale->out_width,
                              scale->out_height, scale->out_stride, scale->out_offset);

#ifdef HAVE_ORC
    mmx_flags = orc_target_get_default_flags(orc_target_get_by_name("mmx"));
    altivec_flags =
        orc_target_get_default_flags(orc_target_get_by_name("altivec"));
    sse_flags = orc_target_get_default_flags(orc_target_get_by_name("sse"));

    swsflags = (mmx_flags & ORC_TARGET_MMX_MMX ? SWS_CPU_CAPS_MMX : 0)
               | (mmx_flags & ORC_TARGET_MMX_MMXEXT ? SWS_CPU_CAPS_MMX2 : 0)
               | (mmx_flags & ORC_TARGET_MMX_3DNOW ? SWS_CPU_CAPS_3DNOW : 0)
               | (altivec_flags & ORC_TARGET_ALTIVEC_ALTIVEC ? SWS_CPU_CAPS_ALTIVEC : 0)
               | (sse_flags & ORC_TARGET_SSE_SSE2 ? SWS_CPU_CAPS_SSE2 : 0);
#else
    mmx_flags = 0;
    altivec_flags = 0;
    swsflags = 0;
#endif

    if (scale->add_borders && scale->in_width > 0 && scale->in_height > 0 && scale->out_height > 0
            && scale->out_width > 0) {
        gfloat ratio = scale->in_width * 1.0f / scale->in_height;
        gint ratio_width = (gint)(scale->out_height * ratio);

        if (ratio_width > scale->out_width) {
            gint ratio_height = (gint)(scale->out_width / ratio);


            scale->ctx = sws_getContext(scale->in_width, scale->in_height,
                                        scale->in_pixfmt, scale->out_width, ratio_height, scale->out_pixfmt,
                                        swsflags | gst_ffmpegscale_method_flags[scale->method], NULL, NULL, NULL);

            if (ratio_height != scale->out_height) {
                gint rows = 0;
                scale->borders_h = scale->out_height - ratio_height;
                rows = scale->borders_h / 2;

                if (rows > 0) {
                    gst_video_format_add_top_border(scale->out_format, rows, scale->out_offset, scale->out_stride);
                }
            }
        } else {

            if (ratio_width != scale->out_width) {
                 gint cols = 0;
                 scale->borders_w = scale->out_width - ratio_width;
                 cols = scale->borders_w / 2;

                if (cols > 0) {
                    int i = 0;

                    for (i = 0; i < 3; i ++) {
                        scale->out_offset[i] += cols * scale->out_stride[i] / scale->out_width;
                    }
                }
            }

            scale->ctx = sws_getContext(scale->in_width, scale->in_height,
                                        scale->in_pixfmt, ratio_width, scale->out_height, scale->out_pixfmt,
                                        swsflags | gst_ffmpegscale_method_flags[scale->method], NULL, NULL, NULL);
        }

    } else {
        scale->ctx = sws_getContext(scale->in_width, scale->in_height,
                                    scale->in_pixfmt, scale->out_width, scale->out_height, scale->out_pixfmt,
                                    swsflags | gst_ffmpegscale_method_flags[scale->method], NULL, NULL, NULL);
    }

    if (!scale->ctx) {
        goto setup_failed;
    }

    return TRUE;

    /* ERRORS */
setup_failed: {
        GST_ELEMENT_ERROR(trans, LIBRARY, INIT, (NULL), (NULL));
        return FALSE;
    }
refuse_caps: {
        GST_DEBUG_OBJECT(trans, "refused caps %" GST_PTR_FORMAT, incaps);
        return FALSE;
    }
}
Exemple #13
0
static gboolean
gst_ffmpegscale_set_caps (GstBaseTransform * trans, GstCaps * incaps,
                          GstCaps * outcaps)
{
    GstFFMpegScale *scale = GST_FFMPEGSCALE (trans);
#ifdef HAVE_ORC
    guint mmx_flags, altivec_flags;
#endif
    gint swsflags;
    gboolean ok;

    g_return_val_if_fail (scale->method <
                          G_N_ELEMENTS (gst_ffmpegscale_method_flags), FALSE);

    if (scale->ctx) {
        sws_freeContext (scale->ctx);
        scale->ctx = NULL;
    }

    ok = gst_video_info_from_caps (&scale->in_info, incaps);
    ok &= gst_video_info_from_caps (&scale->out_info, outcaps);

    scale->in_pixfmt = gst_ffmpeg_caps_to_pixfmt (incaps);
    scale->out_pixfmt = gst_ffmpeg_caps_to_pixfmt (outcaps);

    if (!ok || scale->in_pixfmt == AV_PIX_FMT_NONE ||
            scale->out_pixfmt == AV_PIX_FMT_NONE ||
            GST_VIDEO_INFO_FORMAT (&scale->in_info) == GST_VIDEO_FORMAT_UNKNOWN ||
            GST_VIDEO_INFO_FORMAT (&scale->out_info) == GST_VIDEO_FORMAT_UNKNOWN)
        goto refuse_caps;

    GST_DEBUG_OBJECT (scale, "format %d => %d, from=%dx%d -> to=%dx%d",
                      GST_VIDEO_INFO_FORMAT (&scale->in_info),
                      GST_VIDEO_INFO_FORMAT (&scale->out_info),
                      GST_VIDEO_INFO_WIDTH (&scale->in_info),
                      GST_VIDEO_INFO_HEIGHT (&scale->in_info),
                      GST_VIDEO_INFO_WIDTH (&scale->out_info),
                      GST_VIDEO_INFO_HEIGHT (&scale->out_info));

#ifdef HAVE_ORC
    mmx_flags = orc_target_get_default_flags (orc_target_get_by_name ("mmx"));
    altivec_flags =
        orc_target_get_default_flags (orc_target_get_by_name ("altivec"));
    swsflags = (mmx_flags & ORC_TARGET_MMX_MMX ? SWS_CPU_CAPS_MMX : 0)
               | (mmx_flags & ORC_TARGET_MMX_MMXEXT ? SWS_CPU_CAPS_MMX2 : 0)
               | (mmx_flags & ORC_TARGET_MMX_3DNOW ? SWS_CPU_CAPS_3DNOW : 0)
               | (altivec_flags & ORC_TARGET_ALTIVEC_ALTIVEC ? SWS_CPU_CAPS_ALTIVEC : 0);
#else
    swsflags = 0;
#endif

    scale->ctx = sws_getContext (scale->in_info.width, scale->in_info.height,
                                 scale->in_pixfmt, scale->out_info.width, scale->out_info.height,
                                 scale->out_pixfmt, swsflags | gst_ffmpegscale_method_flags[scale->method],
                                 NULL, NULL, NULL);
    if (!scale->ctx)
        goto setup_failed;

    return TRUE;

    /* ERRORS */
setup_failed:
    {
        GST_ELEMENT_ERROR (trans, LIBRARY, INIT, (NULL), (NULL));
        return FALSE;
    }
refuse_caps:
    {
        GST_DEBUG_OBJECT (trans, "refused caps %" GST_PTR_FORMAT, incaps);
        return FALSE;
    }
}