Ejemplo n.º 1
0
GstCaps *
gst_gl_mixer_update_caps (GstGLMixer * mix, GstCaps * caps)
{
    GstCaps *result = NULL;
    GstCaps *glcaps = gst_gl_mixer_set_caps_features (caps,
                      GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
#if GST_GL_HAVE_PLATFORM_EGL
    GstCaps *eglcaps = gst_gl_mixer_set_caps_features (caps,
                       GST_CAPS_FEATURE_MEMORY_EGL_IMAGE);
#endif
    GstCaps *uploadcaps = gst_gl_mixer_set_caps_features (caps,
                          GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META);
    GstCaps *raw_caps =
        gst_caps_from_string (GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS));

    result = gst_caps_new_empty ();

    result = gst_caps_merge (result, glcaps);
#if GST_GL_HAVE_PLATFORM_EGL
    result = gst_caps_merge (result, eglcaps);
#endif
    result = gst_caps_merge (result, uploadcaps);
    result = gst_caps_merge (result, raw_caps);

    result = gst_caps_merge (result, gst_gl_mixer_caps_remove_format_info (caps));

    GST_DEBUG_OBJECT (mix, "returning %" GST_PTR_FORMAT, result);

    return result;
}
#define orc_memset memset
#else
#include <orc/orcfunctions.h>
#endif

GST_DEBUG_CATEGORY_STATIC (gst_compositor_debug);
#define GST_CAT_DEFAULT gst_compositor_debug

#define FORMATS " { AYUV, BGRA, ARGB, RGBA, ABGR, Y444, Y42B, YUY2, UYVY, "\
                "   YVYU, I420, YV12, NV12, NV21, Y41B, RGB, BGR, xRGB, xBGR, "\
                "   RGBx, BGRx } "

static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
        GST_PAD_SRC,
        GST_PAD_ALWAYS,
        GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
                                                                  );

static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
        GST_PAD_SINK,
        GST_PAD_REQUEST,
        GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
                                                                   );

#define DEFAULT_PAD_XPOS   0
#define DEFAULT_PAD_YPOS   0
#define DEFAULT_PAD_WIDTH  0
#define DEFAULT_PAD_HEIGHT 0
#define DEFAULT_PAD_ALPHA  1.0
enum
{
Ejemplo n.º 3
0
#include "gstmsdksystemmemory.h"
#include "gstmsdkcontextutil.h"
#include "gstmsdkvpputil.h"

#ifndef _WIN32
#include "gstmsdkallocator_libva.h"
#endif

GST_DEBUG_CATEGORY_EXTERN (gst_msdkvpp_debug);
#define GST_CAT_DEFAULT gst_msdkvpp_debug

static GstStaticPadTemplate gst_msdkvpp_sink_factory =
    GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
        ("{ NV12, YV12, I420, YUY2, UYVY, BGRA, BGRx }")
        ", " "interlace-mode = (string){ progressive, interleaved, mixed }" ";"
        GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_DMABUF,
            "{ NV12, BGRA, YUY2}")));

static GstStaticPadTemplate gst_msdkvpp_src_factory =
    GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
        (GST_CAPS_FEATURE_MEMORY_DMABUF,
            "{ BGRA, YUY2, NV12}") ";"
        GST_VIDEO_CAPS_MAKE ("{ NV12, YUY2, BGRA, BGRx }") ", "
        "interlace-mode = (string){ progressive, interleaved, mixed }" ";"));

enum
#define GST_PLUGIN_NAME "vaapiencode_mpeg2"
#define GST_PLUGIN_DESC "A VA-API based MPEG-2 video encoder"

GST_DEBUG_CATEGORY_STATIC (gst_vaapi_mpeg2_encode_debug);
#define GST_CAT_DEFAULT gst_vaapi_mpeg2_encode_debug

#define GST_CODEC_CAPS                          \
  "video/mpeg, mpegversion = (int) 2, "         \
  "systemstream = (boolean) false"

/* *INDENT-OFF* */
static const char gst_vaapiencode_mpeg2_sink_caps_str[] =
  GST_VAAPI_MAKE_ENC_SURFACE_CAPS ", "
  GST_CAPS_INTERLACED_FALSE "; "
  GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL) ", "
  GST_CAPS_INTERLACED_FALSE;
/* *INDENT-ON* */

/* *INDENT-OFF* */
static const char gst_vaapiencode_mpeg2_src_caps_str[] =
  GST_CODEC_CAPS;
/* *INDENT-ON* */

/* *INDENT-OFF* */
static GstStaticPadTemplate gst_vaapiencode_mpeg2_sink_factory =
  GST_STATIC_PAD_TEMPLATE ("sink",
      GST_PAD_SINK,
      GST_PAD_ALWAYS,
      GST_STATIC_CAPS (gst_vaapiencode_mpeg2_sink_caps_str));
/* *INDENT-ON* */
Ejemplo n.º 5
0
    GstVideoCodecState * state);
static GstFlowReturn gst_pngdec_parse (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
static GstFlowReturn gst_pngdec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame);
static gboolean gst_pngdec_decide_allocation (GstVideoDecoder * decoder,
    GstQuery * query);

#define parent_class gst_pngdec_parent_class
G_DEFINE_TYPE (GstPngDec, gst_pngdec, GST_TYPE_VIDEO_DECODER);

static GstStaticPadTemplate gst_pngdec_src_pad_template =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
        ("{ RGBA, RGB, ARGB64, GRAY8, GRAY16_BE }"))
    );

static GstStaticPadTemplate gst_pngdec_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("image/png")
    );

static void
gst_pngdec_class_init (GstPngDecClass * klass)
{
  GstElementClass *element_class = (GstElementClass *) klass;
  GstVideoDecoderClass *vdec_class = (GstVideoDecoderClass *) klass;
Ejemplo n.º 6
0
enum
{
  PROP_0,
  PROP_POSITION,
  PROP_BORDER
};

#define DEFAULT_POSITION 0.0
#define DEFAULT_BORDER 0.0

static GstStaticPadTemplate video_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("video_sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }")));

static GstStaticPadTemplate mask_sink_pad_template =
    GST_STATIC_PAD_TEMPLATE ("mask_sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("video/x-raw, "
        "format = (string) GRAY8, "
        "width = " GST_VIDEO_SIZE_RANGE ", "
        "height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1 ; "
        "video/x-raw, " "format = (string) " GST_VIDEO_NE (GRAY16) ", "
        "width = " GST_VIDEO_SIZE_RANGE ", "
        "height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1"));

static GstStaticPadTemplate src_pad_template =
GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
Ejemplo n.º 7
0
    GstBuffer ** buf);
static GstCaps *gst_inter_video_src_fixate (GstBaseSrc * src, GstCaps * caps);

enum
{
  PROP_0,
  PROP_CHANNEL
};

/* pad templates */

static GstStaticPadTemplate gst_inter_video_src_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
    );


/* class initialization */

G_DEFINE_TYPE (GstInterVideoSrc, gst_inter_video_src, GST_TYPE_BASE_SRC);

static void
gst_inter_video_src_class_init (GstInterVideoSrcClass * klass)
{
  GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
  GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
  GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);

  GST_DEBUG_CATEGORY_INIT (gst_inter_video_src_debug_category, "intervideosrc",
static const gchar *redisplay_fragment_shader_str_gles2 =
      "precision mediump float;                            \n"
      "varying vec2 v_texCoord;                            \n"
      "uniform sampler2D s_texture;                        \n"
      "void main()                                         \n"
      "{                                                   \n"
      "  gl_FragColor = texture2D( s_texture, v_texCoord );\n"
      "}                                                   \n";
/* *INDENT-ON* */
#endif

static GstStaticPadTemplate gst_glimage_sink_template =
    GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_GL_UPLOAD_FORMATS) "; "
        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
        (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
            GST_GL_UPLOAD_FORMATS))
    );

enum
{
  ARG_0,
  ARG_DISPLAY,
  PROP_CLIENT_RESHAPE_CALLBACK,
  PROP_CLIENT_DRAW_CALLBACK,
  PROP_CLIENT_DATA,
  PROP_FORCE_ASPECT_RATIO,
  PROP_PIXEL_ASPECT_RATIO,
  PROP_OTHER_CONTEXT
#include <string.h>

#include "gstvp8utils.h"
#include "gstvp9enc.h"

GST_DEBUG_CATEGORY_STATIC (gst_vp9enc_debug);
#define GST_CAT_DEFAULT gst_vp9enc_debug


/* FIXME: Y42B and Y444 do not work yet it seems */
static GstStaticPadTemplate gst_vp9_enc_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    /*GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12, Y42B, Y444 }")) */
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12 }"))
    );

static GstStaticPadTemplate gst_vp9_enc_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("video/x-vp9, " "profile = (string) {0, 1, 2, 3}")
    );

#define parent_class gst_vp9_enc_parent_class
G_DEFINE_TYPE (GstVP9Enc, gst_vp9_enc, GST_TYPE_VPX_ENC);

static vpx_codec_iface_t *gst_vp9_enc_get_algo (GstVPXEnc * enc);
static gboolean gst_vp9_enc_enable_scaling (GstVPXEnc * enc);
static void gst_vp9_enc_set_image_format (GstVPXEnc * enc, vpx_image_t * image);
Ejemplo n.º 10
0
#endif

#include <stdlib.h>
#include "gstspacescope.h"

#if G_BYTE_ORDER == G_BIG_ENDIAN
#define RGB_ORDER "xRGB"
#else
#define RGB_ORDER "BGRx"
#endif

static GstStaticPadTemplate gst_space_scope_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (RGB_ORDER))
    );

static GstStaticPadTemplate gst_space_scope_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("audio/x-raw, "
        "format = (string) " GST_AUDIO_NE (S16) ", "
        "layout = (string) interleaved, "
        "rate = (int) [ 8000, 96000 ], "
        "channels = (int) 2, " "channel-mask = (bitmask) 0x3")
    );


GST_DEBUG_CATEGORY_STATIC (space_scope_debug);
static gboolean gst_fbdevsink_setcaps (GstBaseSink * bsink, GstCaps * caps);

static void gst_fbdevsink_finalize (GObject * object);
static void gst_fbdevsink_set_property (GObject * object,
    guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_fbdevsink_get_property (GObject * object,
    guint prop_id, GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_fbdevsink_change_state (GstElement * element,
    GstStateChange transition);

#define VIDEO_CAPS "{ RGB, BGR, BGRx, xBGR, RGB, RGBx, xRGB, RGB15, RGB16 }"

static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (VIDEO_CAPS))
    );

#define parent_class gst_fbdevsink_parent_class
G_DEFINE_TYPE (GstFBDEVSink, gst_fbdevsink, GST_TYPE_VIDEO_SINK);

static void
gst_fbdevsink_init (GstFBDEVSink * fbdevsink)
{
  /* nothing to do here yet */
}

#if 0
static void
gst_fbdevsink_get_times (GstBaseSink * basesink, GstBuffer * buffer,
    GstClockTime * start, GstClockTime * end)
Ejemplo n.º 12
0
    GST_CAPS_CODEC("video/x-wmv")
#if USE_VP8_DECODER
    GST_CAPS_CODEC("video/x-vp8")
#endif
#if USE_JPEG_DECODER
    GST_CAPS_CODEC("image/jpeg")
#endif
#if USE_VP9_DECODER
    GST_CAPS_CODEC("video/x-vp9")
#endif
    ;

static const char gst_vaapidecode_src_caps_str[] =
    GST_VAAPI_MAKE_SURFACE_CAPS ";"
    GST_VAAPI_MAKE_GLTEXUPLOAD_CAPS ";"
    GST_VIDEO_CAPS_MAKE("{ I420, YV12, NV12 }");

static GstStaticPadTemplate gst_vaapidecode_sink_factory =
    GST_STATIC_PAD_TEMPLATE(
        "sink",
        GST_PAD_SINK,
        GST_PAD_ALWAYS,
        GST_STATIC_CAPS(gst_vaapidecode_sink_caps_str));

static GstStaticPadTemplate gst_vaapidecode_src_factory =
    GST_STATIC_PAD_TEMPLATE(
        "src",
        GST_PAD_SRC,
        GST_PAD_ALWAYS,
        GST_STATIC_CAPS(gst_vaapidecode_src_caps_str));
Ejemplo n.º 13
0
static void
gst_qt_quick2_video_sink_class_init (GstQtQuick2VideoSinkClass *klass)
{
    GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
    gobject_class->finalize = gst_qt_quick2_video_sink_finalize;
    gobject_class->set_property = gst_qt_quick2_video_sink_set_property;
    gobject_class->get_property = gst_qt_quick2_video_sink_get_property;

    GstElementClass *element_class = GST_ELEMENT_CLASS(klass);
    element_class->change_state = gst_qt_quick2_video_sink_change_state;

    GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS(klass);
    base_sink_class->set_caps = gst_qt_quick2_video_sink_set_caps;

    GstVideoSinkClass *video_sink_class = GST_VIDEO_SINK_CLASS(klass);
    video_sink_class->show_frame = gst_qt_quick2_video_sink_show_frame;

    GstQtQuick2VideoSinkClass *qtquick2_class = GST_QT_QUICK2_VIDEO_SINK_CLASS(klass);
    qtquick2_class->update_node = gst_qt_quick2_video_sink_update_node;

    /**
     * GstQtQuick2VideoSink::pixel-aspect-ratio
     *
     * The pixel aspect ratio of the display device.
     **/
    g_object_class_install_property(gobject_class, PROP_PIXEL_ASPECT_RATIO,
        g_param_spec_string("pixel-aspect-ratio", "Pixel aspect ratio",
                            "The pixel aspect ratio of the display device",
                            "1/1", static_cast<GParamFlags>(G_PARAM_READWRITE)));

    /**
     * GstQtQuick2VideoSink::force-aspect-ratio
     *
     * If set to TRUE, the sink will scale the video respecting its original aspect ratio
     * and any remaining space will be filled with black.
     * If set to FALSE, the sink will scale the video to fit the whole drawing area.
     **/
    g_object_class_install_property(gobject_class, PROP_FORCE_ASPECT_RATIO,
        g_param_spec_boolean("force-aspect-ratio", "Force aspect ratio",
                             "When enabled, scaling will respect original aspect ratio",
                             FALSE, static_cast<GParamFlags>(G_PARAM_READWRITE)));

    g_object_class_install_property(gobject_class, PROP_CONTRAST,
        g_param_spec_int("contrast", "Contrast", "The contrast of the video",
                         -100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));

    g_object_class_install_property(gobject_class, PROP_BRIGHTNESS,
        g_param_spec_int("brightness", "Brightness", "The brightness of the video",
                         -100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));

    g_object_class_install_property(gobject_class, PROP_HUE,
        g_param_spec_int("hue", "Hue", "The hue of the video",
                         -100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));

    g_object_class_install_property(gobject_class, PROP_SATURATION,
        g_param_spec_int("saturation", "Saturation", "The saturation of the video",
                         -100, 100, 0, static_cast<GParamFlags>(G_PARAM_READWRITE)));


    /**
     * GstQtQuick2VideoSink::update-node
     * @node: The QSGNode to update
     * @x: The x coordinate of the target area rectangle
     * @y: The y coordinate of the target area rectangle
     * @width: The width of the target area rectangle
     * @height: The height of the target area rectangle
     * @returns: The updated QGSNode
     *
     * This is an action signal that you can call from your QQuickItem subclass
     * inside its updateNode function to render the video. It takes a QSGNode*
     * and the item's area rectangle as arguments. You should schedule to call
     * this function to repaint the surface whenever the ::update signal is
     * emited.
     *
     * Note that the x,y,width and height arguments are actually qreal.
     * This means that on architectures like arm they will be float instead
     * of double. You should cast the arguments to qreal if they are not
     * already when emitting this signal.
     */
    s_signals[ACTION_UPDATE_NODE] =
        g_signal_new("update-node", G_TYPE_FROM_CLASS(klass),
            static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
            G_STRUCT_OFFSET(GstQtQuick2VideoSinkClass, update_node),
            NULL, NULL,
            qRealIsDouble() ?
              g_cclosure_user_marshal_POINTER__POINTER_DOUBLE_DOUBLE_DOUBLE_DOUBLE :
              g_cclosure_user_marshal_POINTER__POINTER_FLOAT_FLOAT_FLOAT_FLOAT,
            G_TYPE_POINTER, 5,
            G_TYPE_POINTER, G_TYPE_QREAL, G_TYPE_QREAL, G_TYPE_QREAL, G_TYPE_QREAL);

    /**
     * GstQtQuick2VideoSink::update
     *
     * This signal is emited when the surface should be repainted. It should
     * be connected to QQuickItem::update().
     */
    s_signals[SIGNAL_UPDATE] =
        g_signal_new("update", G_TYPE_FROM_CLASS(klass),
            G_SIGNAL_RUN_LAST,
            0, NULL, NULL,
            g_cclosure_marshal_VOID__VOID,
            G_TYPE_NONE, 0);

    g_type_class_add_private (klass, sizeof (GstQtQuick2VideoSinkPrivate));

    static GstStaticPadTemplate sink_pad_template =
        GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
            GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (CAPS_FORMATS))
        );

    gst_element_class_add_pad_template(
            element_class, gst_static_pad_template_get(&sink_pad_template));

    gst_element_class_set_details_simple(element_class,
        "QtQuick2 video sink", "Sink/Video",
        "A video sink that can draw on a QQuickItem",
        "George Kiagiadakis <*****@*****.**>");
}
Ejemplo n.º 14
0
static GstFlowReturn gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf);
static VTStatus gst_vtenc_enqueue_buffer (void *data, int a2, int a3, int a4,
    CMSampleBufferRef sbuf, int a6, int a7);
static gboolean gst_vtenc_buffer_is_keyframe (GstVTEnc * self,
    CMSampleBufferRef sbuf);

static GstVTEncFrame *gst_vtenc_frame_new (GstBuffer * buf,
    GstVideoInfo * videoinfo);
static void gst_vtenc_frame_free (GstVTEncFrame * frame);

static void gst_pixel_buffer_release_cb (void *releaseRefCon,
    const void *dataPtr, size_t dataSize, size_t numberOfPlanes,
    const void *planeAddresses[]);

static GstStaticCaps sink_caps =
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ NV12, I420 }"));

static void
gst_vtenc_base_init (GstVTEncClass * klass)
{
  const GstVTEncoderDetails *codec_details =
      GST_VTENC_CLASS_GET_CODEC_DETAILS (klass);
  GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
  const int min_width = 1, max_width = G_MAXINT;
  const int min_height = 1, max_height = G_MAXINT;
  const int min_fps_n = 0, max_fps_n = G_MAXINT;
  const int min_fps_d = 1, max_fps_d = 1;
  GstPadTemplate *sink_template, *src_template;
  GstCaps *src_caps;
  gchar *longname, *description;
Ejemplo n.º 15
0
GstCaps *
gst_dshow_new_video_caps (GstVideoFormat video_format, const gchar * name,
    GstCapturePinMediaType * pin_mediatype)
{
  GstCaps *video_caps = NULL;
  GstStructure *video_structure = NULL;
  gint min_w, max_w;
  gint min_h, max_h;
  gint min_fr, max_fr;

  /* raw video format */
  switch (video_format) {
    case GST_VIDEO_FORMAT_BGR:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR"));
      break;
    case GST_VIDEO_FORMAT_I420:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("I420"));
	  break;
    case GST_VIDEO_FORMAT_YUY2:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("YUY2"));
      break;
    case GST_VIDEO_FORMAT_UYVY:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("UYVY"));
      break;
    case GST_VIDEO_FORMAT_BGRx:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGRx"));
      break;
    case GST_VIDEO_FORMAT_BGR16:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR16"));
      break;
    case GST_VIDEO_FORMAT_BGR15:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR15"));
      break;
    case GST_VIDEO_FORMAT_GRAY8:
      video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("GRAY8"));
      break;
    default:
      break;
  }

  /* other video format */
  if (!video_caps) {
    if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=FALSE", 31) == 0) {
      video_caps = gst_caps_new_simple ("video/x-dv",
          "systemstream", G_TYPE_BOOLEAN, FALSE,
          "format", G_TYPE_STRING, "dvsd",
          NULL);
    } else if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=TRUE", 31) == 0) {
      video_caps = gst_caps_new_simple ("video/x-dv",
          "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
      return video_caps;
    } else if (g_ascii_strncasecmp (name, "image/jpeg", 10) == 0) {
      video_caps = gst_caps_new_simple ("image/jpeg", NULL);
    } else if (g_ascii_strncasecmp (name, "video/x-h264", 12) == 0) {
      video_caps = gst_caps_new_simple ("video/x-h264", NULL);
    }
  }

  if (!video_caps)
    return NULL;

  video_structure = gst_caps_get_structure (video_caps, 0);

  /* Hope GST_TYPE_INT_RANGE_STEP will exits in future gstreamer releases  */
  /* because we could use :  */
  /* "width", GST_TYPE_INT_RANGE_STEP, video_default->minWidth, video_default->maxWidth,  video_default->granularityWidth */
  /* instead of : */
  /* "width", GST_TYPE_INT_RANGE, video_default->minWidth, video_default->maxWidth */

  /* For framerate we do not need a step (granularity) because  */
  /* "The IAMStreamConfig::SetFormat method will set the frame rate to the closest  */
  /* value that the filter supports" as it said in the VIDEO_STREAM_CONFIG_CAPS dshwo doc */

  min_w = pin_mediatype->vscc.MinOutputSize.cx;
  max_w = pin_mediatype->vscc.MaxOutputSize.cx;
  min_h = pin_mediatype->vscc.MinOutputSize.cy;
  max_h = pin_mediatype->vscc.MaxOutputSize.cy;
  min_fr = (gint) (10000000 / pin_mediatype->vscc.MaxFrameInterval);
  max_fr = (gint)(10000000 / pin_mediatype->vscc.MinFrameInterval);

  if (min_w == max_w)
    gst_structure_set (video_structure, "width", G_TYPE_INT, min_w, NULL);
  else
     gst_structure_set (video_structure,
       "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);

  if (min_h == max_h)
    gst_structure_set (video_structure, "height", G_TYPE_INT, min_h, NULL);
  else
     gst_structure_set (video_structure,
       "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);

  if (min_fr == max_fr)
    gst_structure_set (video_structure, "framerate",
        GST_TYPE_FRACTION, min_fr, 1, NULL);
  else
     gst_structure_set (video_structure, "framerate",
         GST_TYPE_FRACTION_RANGE, min_fr, 1, max_fr, 1, NULL);

  return video_caps;
}
Ejemplo n.º 16
0
  return TRUE;

map_fail:
  {
    GST_ERROR_OBJECT (sink, "Could not map incoming video frame");
    return FALSE;
  }
}

static CoglGstRenderer rgb24_renderer =
{
  "RGB 24",
  COGL_GST_RGB24,
  0,
  GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGB, BGR }")),
  1, /* n_layers */
  cogl_gst_rgb_setup_pipeline,
  cogl_gst_rgb24_upload,
};

static CoglBool
cogl_gst_rgb32_upload (CoglGstVideoSink *sink,
                       GstBuffer *buffer)
{
  CoglGstVideoSinkPrivate *priv = sink->priv;
  CoglPixelFormat format;
  GstVideoFrame frame;

  if (priv->bgr)
    format = COGL_PIXEL_FORMAT_BGRA_8888;
#ifdef HAVE_CONFIG_H
#  include <config.h>
#endif

#include <gst/gst.h>

#include "gstopencvutils.h"
#include "gstcvequalizehist.h"

GST_DEBUG_CATEGORY_STATIC (gst_cv_equalize_hist_debug);
#define GST_CAT_DEFAULT gst_cv_equalize_hist_debug

static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY8")));

static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY8")));

G_DEFINE_TYPE (GstCvEqualizeHist, gst_cv_equalize_hist,
    GST_TYPE_OPENCV_VIDEO_FILTER);

static GstFlowReturn gst_cv_equalize_hist_transform (GstOpencvVideoFilter *
    filter, GstBuffer * buf, IplImage * img, GstBuffer * outbuf,
    IplImage * outimg);


static void
Ejemplo n.º 18
0
#ifdef HAVE_CONFIG_H
#  include <config.h>
#endif

#include <gst/gst.h>

#include "gstopencvutils.h"
#include "gstcvsobel.h"

GST_DEBUG_CATEGORY_STATIC (gst_cv_sobel_debug);
#define GST_CAT_DEFAULT gst_cv_sobel_debug

static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY8"))
    );


#if G_BYTE_ORDER == G_BIG_ENDIAN
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY16_BE"))
    );
#else
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("GRAY16_LE"))
    );
Ejemplo n.º 19
0
};

static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), "
        "format = (string) RGBA, "
        "width = " GST_VIDEO_SIZE_RANGE ", "
        "height = " GST_VIDEO_SIZE_RANGE ", "
        "framerate = " GST_VIDEO_FPS_RANGE ","
        "texture-target = (string) 2D"
        "; "
        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
        (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
            "RGBA")
        "; " GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS))
    );

static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
    GST_PAD_SINK,
    GST_PAD_REQUEST,
    GST_STATIC_CAPS ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), "
        "format = (string) RGBA, "
        "width = " GST_VIDEO_SIZE_RANGE ", "
        "height = " GST_VIDEO_SIZE_RANGE ", "
        "framerate = " GST_VIDEO_FPS_RANGE ","
        "texture-target = (string) 2D"
        "; "
        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
        (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
            "RGBA")
    GstBuffer * buffer);

enum
{
  PROP_0,
  PROP_CHANNEL
};

#define DEFAULT_CHANNEL ("default")

/* pad templates */
static GstStaticPadTemplate gst_inter_video_sink_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL))
    );


/* class initialization */
G_DEFINE_TYPE (GstInterVideoSink, gst_inter_video_sink, GST_TYPE_VIDEO_SINK);

static void
gst_inter_video_sink_class_init (GstInterVideoSinkClass * klass)
{
  GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
  GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
  GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS (klass);
  GstVideoSinkClass *video_sink_class = GST_VIDEO_SINK_CLASS (klass);

  GST_DEBUG_CATEGORY_INIT (gst_inter_video_sink_debug_category,
Ejemplo n.º 21
0
#include "gstgoom.h"
#include "goom_core.h"

GST_DEBUG_CATEGORY_STATIC (goom2k1_debug);
#define GST_CAT_DEFAULT goom2k1_debug

#define DEFAULT_WIDTH  320
#define DEFAULT_HEIGHT 240
#define DEFAULT_FPS_N  25
#define DEFAULT_FPS_D  1

static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
#if G_BYTE_ORDER == G_BIG_ENDIAN
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("xRGB"))
#else
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("BGRx"))
#endif
    );

static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",    /* the name of the pads */
    GST_PAD_SINK,               /* type of the pad */
    GST_PAD_ALWAYS,             /* ALWAYS/SOMETIMES */
    GST_STATIC_CAPS ("audio/x-raw, "
        "format = (string) " GST_AUDIO_NE (S16) ", "
        "rate = (int) [ 8000, 96000 ], "
        "channels = (int) 1, "
        "layout = (string) interleaved; "
        "audio/x-raw, "
        "format = (string) " GST_AUDIO_NE (S16) ", "
Ejemplo n.º 22
0
#endif
    GST_CAPS_CODEC("video/x-wmv")
#if USE_VP8_DECODER
    GST_CAPS_CODEC("video/x-vp8")
#endif
#if USE_VP9_DECODER
    GST_CAPS_CODEC("video/x-vp9")
#endif
    ;

static const char gst_vaapidecode_src_caps_str[] =
    GST_VAAPI_MAKE_SURFACE_CAPS ";"
#if (USE_GLX || USE_EGL)
    GST_VAAPI_MAKE_GLTEXUPLOAD_CAPS ";"
#endif
    GST_VIDEO_CAPS_MAKE("{ NV12, I420, YV12, P010_10LE }");

static GstStaticPadTemplate gst_vaapidecode_src_factory =
    GST_STATIC_PAD_TEMPLATE(
        "src",
        GST_PAD_SRC,
        GST_PAD_ALWAYS,
        GST_STATIC_CAPS(gst_vaapidecode_src_caps_str));
/* *INDENT-ON* */

typedef struct _GstVaapiDecoderMap GstVaapiDecoderMap;
struct _GstVaapiDecoderMap
{
  guint codec;
  guint rank;
  const gchar *name;
#define GST_VIDEO_SIZE_RANGE "(int) [ 1, 32767]"

/* FIXME: add v210 support
 * FIXME: add v216 support
 * FIXME: add UYVP support
 * FIXME: add A420 support
 * FIXME: add YUV9 support
 * FIXME: add YVU9 support
 * FIXME: add IYU1 support
 * FIXME: add r210 support
 */

#define GST_VIDEO_FORMATS GST_VIDEO_FORMATS_ALL

static GstStaticCaps gst_video_scale_format_caps =
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS) ";"
                     GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS));

#define GST_TYPE_VIDEO_SCALE_METHOD (gst_video_scale_method_get_type())
static GType
gst_video_scale_method_get_type (void)
{
    static GType video_scale_method_type = 0;

    static const GEnumValue video_scale_methods[] = {
        {GST_VIDEO_SCALE_NEAREST, "Nearest Neighbour", "nearest-neighbour"},
        {GST_VIDEO_SCALE_BILINEAR, "Bilinear (2-tap)", "bilinear"},
        {GST_VIDEO_SCALE_4TAP, "4-tap Sinc", "4-tap"},
        {GST_VIDEO_SCALE_LANCZOS, "Lanczos", "lanczos"},
        {GST_VIDEO_SCALE_BILINEAR2, "Bilinear (multi-tap)", "bilinear2"},
        {GST_VIDEO_SCALE_SINC, "Sinc (multi-tap)", "sinc"},
Ejemplo n.º 24
0
 */

#ifdef HAVE_CONFIG_H
#  include <config.h>
#endif

#include "gstcvsobel.h"
#include <opencv2/imgproc/imgproc_c.h>

GST_DEBUG_CATEGORY_STATIC (gst_cv_sobel_debug);
#define GST_CAT_DEFAULT gst_cv_sobel_debug

static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
    );

static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("RGB"))
    );

/* Filter signals and args */
enum
{
  /* FILL ME */
  LAST_SIGNAL
};
enum
Ejemplo n.º 25
0
enum
{
  PROP_0,
  PROP_CONTRAST,
  PROP_BRIGHTNESS,
  PROP_HUE,
  PROP_SATURATION
};

static GstStaticPadTemplate gst_video_balance_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
            "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
            "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
            "I420, YV12, IYUV, Y41B, NV12, NV21 }"))
    );

static GstStaticPadTemplate gst_video_balance_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
            "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, "
            "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, "
            "I420, YV12, IYUV, Y41B, NV12, NV21 }"))
    );

static void gst_video_balance_colorbalance_init (GstColorBalanceInterface *
    iface);
Ejemplo n.º 26
0
    GstQuery * query);

#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define GRAY16 "GRAY16_LE"
#define YUV10 "Y444_10LE, I422_10LE, I420_10LE"
#else
#define GRAY16 "GRAY16_BE"
#define YUV10 "Y444_10BE, I422_10BE, I420_10BE"
#endif

static GstStaticPadTemplate gst_openjpeg_enc_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ ARGB64, ARGB, xRGB, "
            "AYUV64, " YUV10 ", "
            "AYUV, Y444, Y42B, I420, Y41B, YUV9, " "GRAY8, " GRAY16 " }"))
    );

static GstStaticPadTemplate gst_openjpeg_enc_src_template =
    GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("image/x-j2c, "
        "width = (int) [1, MAX], "
        "height = (int) [1, MAX], "
        "num-components = (int) [1, 4], "
        "colorspace = (string) { sRGB, sYUV, GRAY }; "
        "image/x-jpc, "
        "width = (int) [1, MAX], "
        "height = (int) [1, MAX], "
Ejemplo n.º 27
0
		{0, NULL, NULL}
	};

	if (!type)
	{
		type = g_enum_register_static("ConvolutionKernelType", KernelType);
	}
	return type;
}


static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE (
	"sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE("{ RGB, GRAY8 }"))
);

static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE (
	"src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE("{ RGB, GRAY8 }"))
);

static void ConvolutionKernelParse(Convolution *this, const gchar *kernel)
{
	//Free existing Kernel.
	g_free(this->kernel.data);
	this->kernel.data = 0;
	this->kernel.width = 0;
Ejemplo n.º 28
0
enum
{
  PROP_0,
  PROP_MODE
};

#define DEFAULT_MODE GST_DEINTERLACE_MODE_AUTO

/* pad templates */

static GstStaticPadTemplate gst_yadif_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{Y42B,I420,Y444}")
        ",interlace-mode=(string){interleaved,mixed,progressive}")
    );

static GstStaticPadTemplate gst_yadif_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{Y42B,I420,Y444}")
        ",interlace-mode=(string)progressive")
    );

#define GST_TYPE_DEINTERLACE_MODES (gst_deinterlace_modes_get_type ())
static GType
gst_deinterlace_modes_get_type (void)
{
Ejemplo n.º 29
0
 * display it.
 * </refsect2>
 */

#ifdef HAVE_CONFIG_H
#include "config.h"
#endif

#include "gstgdiscreencapsrc.h"
#include <gst/video/video.h>

GST_DEBUG_CATEGORY_STATIC (gdiscreencapsrc_debug);

static GstStaticPadTemplate src_template =
GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("BGR")));

#define gst_gdiscreencapsrc_parent_class parent_class
G_DEFINE_TYPE (GstGDIScreenCapSrc, gst_gdiscreencapsrc, GST_TYPE_PUSH_SRC);

enum
{
  PROP_0,
  PROP_MONITOR,
  PROP_SHOW_CURSOR,
  PROP_X_POS,
  PROP_Y_POS,
  PROP_WIDTH,
  PROP_HEIGHT
};
Ejemplo n.º 30
0
{
  ARG_0
};

static GstStaticPadTemplate y4mencode_src_factory =
GST_STATIC_PAD_TEMPLATE ("src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS ("application/x-yuv4mpeg, " "y4mversion = (int) 2")
    );

static GstStaticPadTemplate y4mencode_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ IYUV, I420, Y42B, Y41B, Y444 }"))
    );


static void gst_y4m_encode_set_property (GObject * object,
    guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_y4m_encode_get_property (GObject * object,
    guint prop_id, GValue * value, GParamSpec * pspec);

static void gst_y4m_encode_reset (GstY4mEncode * filter);

static gboolean gst_y4m_encode_sink_event (GstPad * pad, GstObject * parent,
    GstEvent * event);
static GstFlowReturn gst_y4m_encode_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buf);
static GstStateChangeReturn gst_y4m_encode_change_state (GstElement * element,