コード例 #1
0
ファイル: gstvdpsink.c プロジェクト: LCW523/gst-plugins-bad
static gboolean
gst_vdp_sink_setcaps (GstBaseSink * bsink, GstCaps * caps)
{
  VdpSink *vdp_sink;
  GstCaps *allowed_caps;
  gboolean ret = TRUE;
  GstStructure *structure;
  GstCaps *intersection;
  gint new_width, new_height;
  const GValue *fps;

  vdp_sink = GST_VDP_SINK (bsink);

  GST_OBJECT_LOCK (vdp_sink);
  if (!vdp_sink->device)
    return FALSE;
  GST_OBJECT_UNLOCK (vdp_sink);

  allowed_caps = gst_pad_get_caps (GST_BASE_SINK_PAD (bsink));
  GST_DEBUG_OBJECT (vdp_sink,
      "sinkconnect possible caps %" GST_PTR_FORMAT " with given caps %"
      GST_PTR_FORMAT, allowed_caps, caps);

  /* We intersect those caps with our template to make sure they are correct */
  intersection = gst_caps_intersect (allowed_caps, caps);
  gst_caps_unref (allowed_caps);

  GST_DEBUG_OBJECT (vdp_sink, "intersection returned %" GST_PTR_FORMAT,
      intersection);
  if (gst_caps_is_empty (intersection)) {
    gst_caps_unref (intersection);
    return FALSE;
  }

  gst_caps_unref (intersection);

  structure = gst_caps_get_structure (caps, 0);

  ret &= gst_structure_get_int (structure, "width", &new_width);
  ret &= gst_structure_get_int (structure, "height", &new_height);
  fps = gst_structure_get_value (structure, "framerate");
  ret &= (fps != NULL);
  if (!ret)
    return FALSE;

  GST_VIDEO_SINK_WIDTH (vdp_sink) = new_width;
  GST_VIDEO_SINK_HEIGHT (vdp_sink) = new_height;
  vdp_sink->fps_n = gst_value_get_fraction_numerator (fps);
  vdp_sink->fps_d = gst_value_get_fraction_denominator (fps);

  gst_vdp_buffer_pool_set_caps (vdp_sink->bpool, caps);

  /* Notify application to set xwindow id now */
  g_mutex_lock (vdp_sink->flow_lock);
  if (!vdp_sink->window) {
    g_mutex_unlock (vdp_sink->flow_lock);
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (vdp_sink));
  } else {
    g_mutex_unlock (vdp_sink->flow_lock);
  }

  /* Creating our window and our image */
  if (GST_VIDEO_SINK_WIDTH (vdp_sink) <= 0
      || GST_VIDEO_SINK_HEIGHT (vdp_sink) <= 0) {
    GST_ELEMENT_ERROR (vdp_sink, CORE, NEGOTIATION, (NULL),
        ("Invalid image size."));
    return FALSE;
  }

  g_mutex_lock (vdp_sink->flow_lock);
  if (!vdp_sink->window) {
    vdp_sink->window = gst_vdp_sink_window_new (vdp_sink,
        GST_VIDEO_SINK_WIDTH (vdp_sink), GST_VIDEO_SINK_HEIGHT (vdp_sink));
  }
  g_mutex_unlock (vdp_sink->flow_lock);

  return TRUE;
}
コード例 #2
0
static void
print_element_properties_info (GstElement * element)
{
  GParamSpec **property_specs;
  guint num_properties, i;
  gboolean readable;
  gboolean first_flag;

  property_specs = g_object_class_list_properties
      (G_OBJECT_GET_CLASS (element), &num_properties);
  n_print ("\n");
  n_print ("Element Properties:\n");

  for (i = 0; i < num_properties; i++) {
    GValue value = { 0, };
    GParamSpec *param = property_specs[i];

    readable = FALSE;

    g_value_init (&value, param->value_type);

    n_print ("  %-20s: %s\n", g_param_spec_get_name (param),
        g_param_spec_get_blurb (param));

    first_flag = TRUE;
    n_print ("%-23.23s flags: ", "");
    if (param->flags & G_PARAM_READABLE) {
      g_object_get_property (G_OBJECT (element), param->name, &value);
      readable = TRUE;
      if (!first_flag)
        g_print (", ");
      else
        first_flag = FALSE;
      g_print (_("readable"));
    }
    if (param->flags & G_PARAM_WRITABLE) {
      if (!first_flag)
        g_print (", ");
      else
        first_flag = FALSE;
      g_print (_("writable"));
    }
    if (param->flags & GST_PARAM_CONTROLLABLE) {
      if (!first_flag)
        g_print (", ");
      else
        first_flag = FALSE;
      g_print (_("controllable"));
    }
    n_print ("\n");

    switch (G_VALUE_TYPE (&value)) {
      case G_TYPE_STRING:
      {
        GParamSpecString *pstring = G_PARAM_SPEC_STRING (param);

        n_print ("%-23.23s String. ", "");

        if (pstring->default_value == NULL)
          g_print ("Default: null ");
        else
          g_print ("Default: \"%s\" ", pstring->default_value);

        if (readable) {
          const char *string_val = g_value_get_string (&value);

          if (string_val == NULL)
            g_print ("Current: null");
          else
            g_print ("Current: \"%s\"", string_val);
        }
        break;
      }
      case G_TYPE_BOOLEAN:
      {
        GParamSpecBoolean *pboolean = G_PARAM_SPEC_BOOLEAN (param);

        n_print ("%-23.23s Boolean. ", "");
        g_print ("Default: %s ", (pboolean->default_value ? "true" : "false"));
        if (readable)
          g_print ("Current: %s",
              (g_value_get_boolean (&value) ? "true" : "false"));
        break;
      }
      case G_TYPE_ULONG:
      {
        GParamSpecULong *pulong = G_PARAM_SPEC_ULONG (param);

        n_print ("%-23.23s Unsigned Long. ", "");
        g_print ("Range: %lu - %lu Default: %lu ",
            pulong->minimum, pulong->maximum, pulong->default_value);
        if (readable)
          g_print ("Current: %lu", g_value_get_ulong (&value));
        break;
      }
      case G_TYPE_LONG:
      {
        GParamSpecLong *plong = G_PARAM_SPEC_LONG (param);

        n_print ("%-23.23s Long. ", "");
        g_print ("Range: %ld - %ld Default: %ld ",
            plong->minimum, plong->maximum, plong->default_value);
        if (readable)
          g_print ("Current: %ld", g_value_get_long (&value));
        break;
      }
      case G_TYPE_UINT:
      {
        GParamSpecUInt *puint = G_PARAM_SPEC_UINT (param);

        n_print ("%-23.23s Unsigned Integer. ", "");
        g_print ("Range: %u - %u Default: %u ",
            puint->minimum, puint->maximum, puint->default_value);
        if (readable)
          g_print ("Current: %u", g_value_get_uint (&value));
        break;
      }
      case G_TYPE_INT:
      {
        GParamSpecInt *pint = G_PARAM_SPEC_INT (param);

        n_print ("%-23.23s Integer. ", "");
        g_print ("Range: %d - %d Default: %d ",
            pint->minimum, pint->maximum, pint->default_value);
        if (readable)
          g_print ("Current: %d", g_value_get_int (&value));
        break;
      }
      case G_TYPE_UINT64:
      {
        GParamSpecUInt64 *puint64 = G_PARAM_SPEC_UINT64 (param);

        n_print ("%-23.23s Unsigned Integer64. ", "");
        g_print ("Range: %" G_GUINT64_FORMAT " - %" G_GUINT64_FORMAT
            " Default: %" G_GUINT64_FORMAT " ",
            puint64->minimum, puint64->maximum, puint64->default_value);
        if (readable)
          g_print ("Current: %" G_GUINT64_FORMAT, g_value_get_uint64 (&value));
        break;
      }
      case G_TYPE_INT64:
      {
        GParamSpecInt64 *pint64 = G_PARAM_SPEC_INT64 (param);

        n_print ("%-23.23s Integer64. ", "");
        g_print ("Range: %" G_GINT64_FORMAT " - %" G_GINT64_FORMAT
            " Default: %" G_GINT64_FORMAT " ",
            pint64->minimum, pint64->maximum, pint64->default_value);
        if (readable)
          g_print ("Current: %" G_GINT64_FORMAT, g_value_get_int64 (&value));
        break;
      }
      case G_TYPE_FLOAT:
      {
        GParamSpecFloat *pfloat = G_PARAM_SPEC_FLOAT (param);

        n_print ("%-23.23s Float. ", "");
        g_print ("Range: %15.7g - %15.7g Default: %15.7g ",
            pfloat->minimum, pfloat->maximum, pfloat->default_value);
        if (readable)
          g_print ("Current: %15.7g", g_value_get_float (&value));
        break;
      }
      case G_TYPE_DOUBLE:
      {
        GParamSpecDouble *pdouble = G_PARAM_SPEC_DOUBLE (param);

        n_print ("%-23.23s Double. ", "");
        g_print ("Range: %15.7g - %15.7g Default: %15.7g ",
            pdouble->minimum, pdouble->maximum, pdouble->default_value);
        if (readable)
          g_print ("Current: %15.7g", g_value_get_double (&value));
        break;
      }
      default:
        if (param->value_type == GST_TYPE_CAPS) {
          const GstCaps *caps = gst_value_get_caps (&value);

          if (!caps)
            n_print ("%-23.23s Caps (NULL)", "");
          else {
            print_caps (caps, "                           ");
          }
        } else if (G_IS_PARAM_SPEC_ENUM (param)) {
          GParamSpecEnum *penum = G_PARAM_SPEC_ENUM (param);
          GEnumValue *values;
          guint j = 0;
          gint enum_value;
          const gchar *def_val_nick = "", *cur_val_nick = "";

          values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values;
          enum_value = g_value_get_enum (&value);

          while (values[j].value_name) {
            if (values[j].value == enum_value)
              cur_val_nick = values[j].value_nick;
            if (values[j].value == penum->default_value)
              def_val_nick = values[j].value_nick;
            j++;
          }

          n_print
              ("%-23.23s Enum \"%s\" Default: %d, \"%s\" Current: %d, \"%s\"",
              "", g_type_name (G_VALUE_TYPE (&value)), penum->default_value,
              def_val_nick, enum_value, cur_val_nick);

          j = 0;
          while (values[j].value_name) {
            g_print ("\n");
            if (_name)
              g_print (_name);
            g_print ("%-23.23s    (%d): %-16s - %s", "",
                values[j].value, values[j].value_nick, values[j].value_name);
            j++;
          }
          /* g_type_class_unref (ec); */
        } else if (G_IS_PARAM_SPEC_FLAGS (param)) {
          GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS (param);
          GFlagsValue *values;
          guint j = 0;
          gint flags_value;
          GString *cur_flags = NULL, *def_flags = NULL;

          values = G_FLAGS_CLASS (g_type_class_ref (param->value_type))->values;
          flags_value = g_value_get_flags (&value);

          while (values[j].value_name) {
            if (values[j].value & flags_value) {
              if (cur_flags) {
                g_string_append_printf (cur_flags, " | %s",
                    values[j].value_nick);
              } else {
                cur_flags = g_string_new (values[j].value_nick);
              }
            }
            if (values[j].value & pflags->default_value) {
              if (def_flags) {
                g_string_append_printf (def_flags, " | %s",
                    values[j].value_nick);
              } else {
                def_flags = g_string_new (values[j].value_nick);
              }
            }
            j++;
          }

          n_print
              ("%-23.23s Flags \"%s\" Default: 0x%08x, \"%s\" Current: 0x%08x, \"%s\"",
              "", g_type_name (G_VALUE_TYPE (&value)), pflags->default_value,
              (def_flags ? def_flags->str : "(none)"), flags_value,
              (cur_flags ? cur_flags->str : "(none)"));

          j = 0;
          while (values[j].value_name) {
            g_print ("\n");
            if (_name)
              g_print (_name);
            g_print ("%-23.23s    (0x%08x): %-16s - %s", "",
                values[j].value, values[j].value_nick, values[j].value_name);
            j++;
          }

          if (cur_flags)
            g_string_free (cur_flags, TRUE);
          if (def_flags)
            g_string_free (def_flags, TRUE);
        } else if (G_IS_PARAM_SPEC_OBJECT (param)) {
          n_print ("%-23.23s Object of type \"%s\"", "",
              g_type_name (param->value_type));
        } else if (G_IS_PARAM_SPEC_BOXED (param)) {
          n_print ("%-23.23s Boxed pointer of type \"%s\"", "",
              g_type_name (param->value_type));
        } else if (G_IS_PARAM_SPEC_POINTER (param)) {
          if (param->value_type != G_TYPE_POINTER) {
            n_print ("%-23.23s Pointer of type \"%s\".", "",
                g_type_name (param->value_type));
          } else {
            n_print ("%-23.23s Pointer.", "");
          }
        } else if (param->value_type == G_TYPE_VALUE_ARRAY) {
          n_print ("%-23.23s Array of GValues", "");
        } else if (GST_IS_PARAM_SPEC_FRACTION (param)) {
          GstParamSpecFraction *pfraction = GST_PARAM_SPEC_FRACTION (param);

          n_print ("%-23.23s Fraction. ", "");

          g_print ("Range: %d/%d - %d/%d Default: %d/%d ",
              pfraction->min_num, pfraction->min_den,
              pfraction->max_num, pfraction->max_den,
              pfraction->def_num, pfraction->def_den);
          if (readable)
            g_print ("Current: %d/%d",
                gst_value_get_fraction_numerator (&value),
                gst_value_get_fraction_denominator (&value));

        } else {
          n_print ("%-23.23s Unknown type %ld \"%s\"", "", param->value_type,
              g_type_name (param->value_type));
        }
        break;
    }
    if (!readable)
      g_print (" Write only\n");
    else
      g_print ("\n");

    g_value_reset (&value);
  }
  if (num_properties == 0)
    n_print ("  none\n");

  g_free (property_specs);
}
コード例 #3
0
ファイル: gstvdpsink.c プロジェクト: LCW523/gst-plugins-bad
/* This function calculates the pixel aspect ratio */
static GValue *
gst_vdp_sink_calculate_par (Display * display)
{
  static const gint par[][2] = {
    {1, 1},                     /* regular screen */
    {16, 15},                   /* PAL TV */
    {11, 10},                   /* 525 line Rec.601 video */
    {54, 59},                   /* 625 line Rec.601 video */
    {64, 45},                   /* 1280x1024 on 16:9 display */
    {5, 3},                     /* 1280x1024 on 4:3 display */
    {4, 3}                      /*  800x600 on 16:9 display */
  };
  gint screen_num;
  gint width, height;
  gint widthmm, heightmm;
  gint i;
  gint index;
  gdouble ratio;
  gdouble delta;
  GValue *par_value;

#define DELTA(idx) (ABS (ratio - ((gdouble) par[idx][0] / par[idx][1])))

  screen_num = DefaultScreen (display);
  width = DisplayWidth (display, screen_num);
  height = DisplayHeight (display, screen_num);
  widthmm = DisplayWidthMM (display, screen_num);
  heightmm = DisplayHeightMM (display, screen_num);

  /* first calculate the "real" ratio based on the X values;
   * which is the "physical" w/h divided by the w/h in pixels of the display */
  ratio = (gdouble) (widthmm * height)
      / (heightmm * width);

  /* DirectFB's X in 720x576 reports the physical dimensions wrong, so
   * override here */
  if (width == 720 && height == 576) {
    ratio = 4.0 * 576 / (3.0 * 720);
  }
  GST_DEBUG ("calculated pixel aspect ratio: %f", ratio);

  /* now find the one from par[][2] with the lowest delta to the real one */
  delta = DELTA (0);
  index = 0;

  for (i = 1; i < sizeof (par) / (sizeof (gint) * 2); ++i) {
    gdouble this_delta = DELTA (i);

    if (this_delta < delta) {
      index = i;
      delta = this_delta;
    }
  }

  GST_DEBUG ("Decided on index %d (%d/%d)", index,
      par[index][0], par[index][1]);

  par_value = g_new0 (GValue, 1);
  g_value_init (par_value, GST_TYPE_FRACTION);
  gst_value_set_fraction (par_value, par[index][0], par[index][1]);
  GST_DEBUG ("set X11 PAR to %d/%d",
      gst_value_get_fraction_numerator (par_value),
      gst_value_get_fraction_denominator (par_value));

  return par_value;
}
コード例 #4
0
ファイル: gsty4mencode.c プロジェクト: jwzl/ossbuild
static gboolean
gst_y4m_encode_setcaps (GstPad * pad, GstCaps * vscaps)
{
    GstY4mEncode *filter;
    GstStructure *structure;
    gboolean res;
    gint w, h;
    guint32 fourcc;
    const GValue *fps, *par, *interlaced;

    filter = GST_Y4M_ENCODE (GST_PAD_PARENT (pad));

    structure = gst_caps_get_structure (vscaps, 0);

    res = gst_structure_get_int (structure, "width", &w);
    res &= gst_structure_get_int (structure, "height", &h);
    res &= ((fps = gst_structure_get_value (structure, "framerate")) != NULL);
    res &= gst_structure_get_fourcc (structure, "format", &fourcc);

    switch (fourcc) {             /* Translate fourcc to Y4M colorspace code */
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
    case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
        filter->colorspace = "420";
        break;
    case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
        filter->colorspace = "422";
        break;
    case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
        filter->colorspace = "411";
        break;
    case GST_MAKE_FOURCC ('Y', '4', '4', '4'):
        filter->colorspace = "444";
        break;
    default:
        res = FALSE;
        break;
    }

    if (!res || w <= 0 || h <= 0 || !GST_VALUE_HOLDS_FRACTION (fps))
        return FALSE;

    /* optional interlaced info */
    interlaced = gst_structure_get_value (structure, "interlaced");

    /* optional par info */
    par = gst_structure_get_value (structure, "pixel-aspect-ratio");

    filter->width = w;
    filter->height = h;
    filter->fps_num = gst_value_get_fraction_numerator (fps);
    filter->fps_den = gst_value_get_fraction_denominator (fps);
    if ((par != NULL) && GST_VALUE_HOLDS_FRACTION (par)) {
        filter->par_num = gst_value_get_fraction_numerator (par);
        filter->par_den = gst_value_get_fraction_denominator (par);
    } else {                      /* indicates unknown */
        filter->par_num = 0;
        filter->par_den = 0;
    }
    if ((interlaced != NULL) && G_VALUE_HOLDS (interlaced, G_TYPE_BOOLEAN)) {
        filter->interlaced = g_value_get_boolean (interlaced);
    } else {
        /* assume progressive if no interlaced property in caps */
        filter->interlaced = FALSE;
    }
    /* the template caps will do for the src pad, should always accept */
    return gst_pad_set_caps (filter->srcpad,
                             gst_static_pad_template_get_caps (&y4mencode_src_factory));
}
コード例 #5
0
bool ofGstVideoPlayer::allocate(){
	if(bIsAllocated){
		return true;
	}

	guint64 durationNanos = videoUtils.getDurationNanos();

	nFrames		  = 0;
	if(GstPad* pad = gst_element_get_static_pad(videoUtils.getSink(), "sink")){
#if GST_VERSION_MAJOR==0
		int width,height;
		if(gst_video_get_size(GST_PAD(pad), &width, &height)){
			if(!videoUtils.allocate(width,height,internalPixelFormat)) return false;
		}else{
			ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height";
			return false;
		}

		const GValue *framerate = gst_video_frame_rate(pad);
		fps_n=0;
		fps_d=0;
		if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){
			fps_n = gst_value_get_fraction_numerator (framerate);
			fps_d = gst_value_get_fraction_denominator (framerate);
			nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d;
			ofLogVerbose("ofGstVideoPlayer") << "allocate(): framerate: " << fps_n << "/" << fps_d;
		}else{
			ofLogWarning("ofGstVideoPlayer") << "allocate(): cannot get framerate, frame seek won't work";
		}
		bIsAllocated = true;
#else
		if(GstCaps *caps = gst_pad_get_current_caps (GST_PAD (pad))){
			GstVideoInfo info;
			gst_video_info_init (&info);
			if (gst_video_info_from_caps (&info, caps)){
				ofPixelFormat format = ofGstVideoUtils::getOFFormat(GST_VIDEO_INFO_FORMAT(&info));
				if(format!=internalPixelFormat){
					ofLogVerbose("ofGstVideoPlayer") << "allocating as " << info.width << "x" << info.height << " " << info.finfo->description << " " << info.finfo->name;
					internalPixelFormat = format;
				}
				if(!videoUtils.allocate(info.width,info.height,format)) return false;
			}else{
				ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height";
				return false;
			}

			fps_n = info.fps_n;
			fps_d = info.fps_d;
			nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d;
			gst_caps_unref(caps);
			bIsAllocated = true;
		}else{
			ofLogError("ofGstVideoPlayer") << "allocate(): cannot get pipeline caps";
			bIsAllocated = false;
		}
#endif
		gst_object_unref(GST_OBJECT(pad));
	}else{
		ofLogError("ofGstVideoPlayer") << "allocate(): cannot get sink pad";
		bIsAllocated = false;
	}
	return bIsAllocated;
}
コード例 #6
0
ファイル: gstdivxenc.c プロジェクト: ChinnaSuhas/ossbuild
static gboolean
gst_divxenc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstDivxEnc *divxenc;
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  gint w, h;
  const GValue *fps;
  guint32 fourcc;
  guint32 divx_cs;
  gint bitcnt = 0;
  gboolean ret = FALSE;

  divxenc = GST_DIVXENC (gst_pad_get_parent (pad));

  /* if there's something old around, remove it */
  gst_divxenc_unset (divxenc);

  gst_structure_get_int (structure, "width", &w);
  gst_structure_get_int (structure, "height", &h);
  gst_structure_get_fourcc (structure, "format", &fourcc);

  fps = gst_structure_get_value (structure, "framerate");
  if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) {
    divxenc->fps_n = gst_value_get_fraction_numerator (fps);
    divxenc->fps_d = gst_value_get_fraction_denominator (fps);
  } else {
    divxenc->fps_n = -1;
  }

  switch (fourcc) {
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
      divx_cs = GST_MAKE_FOURCC ('I', '4', '2', '0');
      break;
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
      divx_cs = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
      break;
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
      divx_cs = GST_MAKE_FOURCC ('Y', 'V', '1', '2');
      break;
    case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
      divx_cs = GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U');
      break;
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
      divx_cs = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
      break;
    default:
      ret = FALSE;
      goto done;
  }

  divxenc->csp = divx_cs;
  divxenc->bitcnt = bitcnt;
  divxenc->width = w;
  divxenc->height = h;

  /* try it */
  if (gst_divxenc_setup (divxenc)) {
    GstCaps *new_caps = NULL;

    new_caps = gst_caps_new_simple ("video/x-divx",
        "divxversion", G_TYPE_INT, 5,
        "width", G_TYPE_INT, w,
        "height", G_TYPE_INT, h,
        "framerate", GST_TYPE_FRACTION, divxenc->fps_n, divxenc->fps_d, NULL);

    if (new_caps) {

      if (!gst_pad_set_caps (divxenc->srcpad, new_caps)) {
        gst_divxenc_unset (divxenc);
        ret = FALSE;
        goto done;
      }
      gst_caps_unref (new_caps);
      ret = TRUE;
      goto done;

    }

  }

  /* if we got here - it's not good */

  ret = FALSE;

done:
  gst_object_unref (divxenc);
  return ret;
}
コード例 #7
0
ファイル: gstdshowvideodec.cpp プロジェクト: zsx/ossbuild
static gboolean
gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  gboolean ret = FALSE;
  HRESULT hres;
  GstStructure *s = gst_caps_get_structure (caps, 0);
  GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
  GstDshowVideoDecClass *klass =
      (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
  GstBuffer *extradata = NULL;
  const GValue *v = NULL;
  guint size = 0;
  GstCaps *caps_out;
  AM_MEDIA_TYPE output_mediatype, input_mediatype;
  VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL;
  CComPtr<IPin> output_pin;
  CComPtr<IPin> input_pin;
  IBaseFilter *srcfilter = NULL;
  IBaseFilter *sinkfilter = NULL;
  const GValue *fps, *par;

  /* read data */
  if (!gst_structure_get_int (s, "width", &vdec->width) ||
      !gst_structure_get_int (s, "height", &vdec->height)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("error getting video width or height from caps"), (NULL));
    goto end;
  }
  fps = gst_structure_get_value (s, "framerate");
  if (fps) {
    vdec->fps_n = gst_value_get_fraction_numerator (fps);
    vdec->fps_d = gst_value_get_fraction_denominator (fps);
  }
  else {
    /* Invent a sane default framerate; the timestamps matter
     * more anyway. */
    vdec->fps_n = 25;
    vdec->fps_d = 1;
  }

  par = gst_structure_get_value (s, "pixel-aspect-ratio");
  if (par) {
    vdec->par_n = gst_value_get_fraction_numerator (par);
    vdec->par_d = gst_value_get_fraction_denominator (par);
  }
  else {
    vdec->par_n = vdec->par_d = 1;
  }

  if ((v = gst_structure_get_value (s, "codec_data")))
    extradata = gst_value_get_buffer (v);

  /* define the input type format */
  memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  input_mediatype.majortype = klass->entry->input_majortype;
  input_mediatype.subtype = klass->entry->input_subtype;
  input_mediatype.bFixedSizeSamples = FALSE;
  input_mediatype.bTemporalCompression = TRUE;

  if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) {
    size =
        sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) -
        1 : 0);
    input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {
      MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader;

      memcpy (mpeg_info->bSequenceHeader,
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_MPEGVideo;
  } else {
    size =
        sizeof (VIDEOINFOHEADER) +
        (extradata ? GST_BUFFER_SIZE (extradata) : 0);
    input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {            /* Codec data is appended after our header */
      memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER),
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_VideoInfo;
  }
  input_vheader->rcSource.top = input_vheader->rcSource.left = 0;
  input_vheader->rcSource.right = vdec->width;
  input_vheader->rcSource.bottom = vdec->height;
  input_vheader->rcTarget = input_vheader->rcSource;
  input_vheader->bmiHeader.biWidth = vdec->width;
  input_vheader->bmiHeader.biHeight = vdec->height;
  input_vheader->bmiHeader.biPlanes = 1;
  input_vheader->bmiHeader.biBitCount = 16;
  input_vheader->bmiHeader.biCompression = klass->entry->format;
  input_vheader->bmiHeader.biSizeImage =
      (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8);

  input_mediatype.cbFormat = size;
  input_mediatype.pbFormat = (BYTE *) input_vheader;
  input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage;

  vdec->fakesrc->GetOutputPin()->SetMediaType(&input_mediatype);

  /* set the sample size for fakesrc filter to the output buffer size */
  vdec->fakesrc->GetOutputPin()->SetSampleSize(input_mediatype.lSampleSize);

  /* connect our fake src to decoder */
  hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
      (void **) &srcfilter);
  if (FAILED (hres)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
      ("Can't QT fakesrc to IBaseFilter: %x", hres), (NULL));
    goto end;
  }

  output_pin = gst_dshow_get_pin_from_filter (srcfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our directshow fakesrc filter"), (NULL));
    goto end;
  }
  input_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from decoder filter"), (NULL));
    goto end;
  }

  hres = vdec->filtergraph->ConnectDirect (output_pin, input_pin, NULL);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect fakesrc with decoder (error=%x)", hres), (NULL));
    goto end;
  }

  /* get decoder output video format */
  if (!gst_dshowvideodec_get_filter_output_format (vdec,
          klass->entry->output_subtype, &output_vheader, &size)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get decoder output video format"), (NULL));
    goto end;
  }

  memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  output_mediatype.majortype = klass->entry->output_majortype;
  output_mediatype.subtype = klass->entry->output_subtype;
  output_mediatype.bFixedSizeSamples = TRUE;
  output_mediatype.bTemporalCompression = FALSE;
  output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage;
  output_mediatype.formattype = FORMAT_VideoInfo;
  output_mediatype.cbFormat = size;
  output_mediatype.pbFormat = (BYTE *) output_vheader;

  vdec->fakesink->SetMediaType (&output_mediatype);

  /* connect decoder to our fake sink */
  output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our decoder filter"), (NULL));
    goto end;
  }

  hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
      (void **) &sinkfilter);
  if (FAILED (hres)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
      ("Can't QT fakesink to IBaseFilter: %x", hres), (NULL));
    goto end;
  }

  input_pin = gst_dshow_get_pin_from_filter (sinkfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from our directshow fakesink filter"), (NULL));
    goto end;
  }

  hres = vdec->filtergraph->ConnectDirect(output_pin, input_pin,
      &output_mediatype);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect decoder with fakesink (error=%x)", hres), (NULL));
    goto end;
  }

  /* negotiate output */
  caps_out = gst_caps_from_string (klass->entry->srccaps);
  gst_caps_set_simple (caps_out,
      "width", G_TYPE_INT, vdec->width,
      "height", G_TYPE_INT, vdec->height, NULL);

  if (vdec->fps_n && vdec->fps_d) {
      gst_caps_set_simple (caps_out,
          "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL);
  }

  gst_caps_set_simple (caps_out, 
      "pixel-aspect-ratio", GST_TYPE_FRACTION, vdec->par_n, vdec->par_d, NULL);

  if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
    gst_caps_unref (caps_out);
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Failed to negotiate output"), (NULL));
    goto end;
  }
  gst_caps_unref (caps_out);

  hres = vdec->mediafilter->Run (-1);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't run the directshow graph (error=%d)", hres), (NULL));
    goto end;
  }

  ret = TRUE;
end:
  gst_object_unref (vdec);
  if (input_vheader)
    g_free (input_vheader);
  if (srcfilter)
    srcfilter->Release();
  if (sinkfilter)
    sinkfilter->Release();
  return ret;
}
コード例 #8
0
ファイル: gstaravis.c プロジェクト: Will-W/aravis
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
	GstAravis* gst_aravis = GST_ARAVIS(src);
	GstStructure *structure;
	ArvPixelFormat pixel_format;
	int height, width;
	int bpp, depth;
	const GValue *frame_rate;
	const char *caps_string;
	unsigned int i;
	guint32 fourcc;

	GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

	arv_camera_stop_acquisition (gst_aravis->camera);

	if (gst_aravis->stream != NULL)
		g_object_unref (gst_aravis->stream);

	structure = gst_caps_get_structure (caps, 0);

	gst_structure_get_int (structure, "width", &width);
	gst_structure_get_int (structure, "height", &height);
	frame_rate = gst_structure_get_value (structure, "framerate");
	gst_structure_get_int (structure, "bpp", &bpp);
	gst_structure_get_int (structure, "depth", &depth);

	if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) {
		const char *string;

	       	string = gst_structure_get_string (structure, "format");
		fourcc = GST_STR_FOURCC (string);
	} else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) {
		gst_structure_get_fourcc (structure, "format", &fourcc);
	} else
		fourcc = 0;

	pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

	arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height);
	arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
	arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

	if (frame_rate != NULL) {
		double dbl_frame_rate;

		dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
			(double) gst_value_get_fraction_denominator (frame_rate);

		GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
		arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

		if (dbl_frame_rate > 0.0)
			gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
							     3e6 / dbl_frame_rate);
		else
			gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
	} else
		gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

	GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us);

	GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

	if(gst_aravis->gain_auto) {
		arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous", gst_aravis->gain_auto);
	} else {
		if (gst_aravis->gain >= 0) {
			GST_DEBUG_OBJECT (gst_aravis, "Gain = %d", gst_aravis->gain);
			arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %d", arv_camera_get_gain (gst_aravis->camera));
	}

	if(gst_aravis->exposure_auto) {
		arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = contiuous", gst_aravis->exposure_auto);
	} else {
		if (gst_aravis->exposure_time_us > 0.0) {
			GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us);
			arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));
	}

	if (gst_aravis->fixed_caps != NULL)
		gst_caps_unref (gst_aravis->fixed_caps);

	caps_string = arv_pixel_format_to_gst_caps_string (pixel_format);
	if (caps_string != NULL) {
		GstStructure *structure;
		GstCaps *caps;

		caps = gst_caps_new_empty ();
		structure = gst_structure_from_string (caps_string, NULL);
		gst_structure_set (structure,
				   "width", G_TYPE_INT, width,
				   "height", G_TYPE_INT, height,
				   NULL);

		if (frame_rate != NULL)
			gst_structure_set_value (structure, "framerate", frame_rate);

		gst_caps_append_structure (caps, structure);

		gst_aravis->fixed_caps = caps;
	} else
		gst_aravis->fixed_caps = NULL;

	gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
	gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

	for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
		arv_stream_push_buffer (gst_aravis->stream,
					arv_buffer_new (gst_aravis->payload, NULL));

	GST_LOG_OBJECT (gst_aravis, "Start acquisition");
	arv_camera_start_acquisition (gst_aravis->camera);

	gst_aravis->timestamp_offset = 0;
	gst_aravis->last_timestamp = 0;

	return TRUE;
}
コード例 #9
0
ファイル: gstdspvenc.c プロジェクト: EQ4/gst-dsp
static gboolean
sink_setcaps(GstPad *pad,
	     GstCaps *caps)
{
	GstDspVEnc *self;
	GstDspBase *base;
	GstStructure *in_struc;
	GstCaps *out_caps;
	GstStructure *out_struc;
	gint width = 0, height = 0;
	GstCaps *allowed_caps;
	gint tgt_level = -1;
	struct td_codec *codec;

	self = GST_DSP_VENC(GST_PAD_PARENT(pad));
	base = GST_DSP_BASE(self);
	codec = base->codec;

	if (!codec)
		return FALSE;

#ifdef DEBUG
	{
		gchar *str = gst_caps_to_string(caps);
		pr_info(self, "sink caps: %s", str);
		g_free(str);
	}
#endif

	in_struc = gst_caps_get_structure(caps, 0);

	out_caps = gst_caps_new_empty();

	switch (base->alg) {
	case GSTDSP_JPEGENC:
		out_struc = gst_structure_new("image/jpeg",
					      NULL);
		break;
	case GSTDSP_H263ENC:
		out_struc = gst_structure_new("video/x-h263",
					      "variant", G_TYPE_STRING, "itu",
					      NULL);
		break;
	case GSTDSP_MP4VENC:
		out_struc = gst_structure_new("video/mpeg",
					      "mpegversion", G_TYPE_INT, 4,
					      "systemstream", G_TYPE_BOOLEAN, FALSE,
					      NULL);
		break;
	case GSTDSP_H264ENC:
		out_struc = gst_structure_new("video/x-h264",
					      "alignment", G_TYPE_STRING, "au",
					      NULL);
		break;
	default:
		return FALSE;
	}

	if (gst_structure_get_int(in_struc, "width", &width))
		gst_structure_set(out_struc, "width", G_TYPE_INT, width, NULL);
	if (gst_structure_get_int(in_struc, "height", &height))
		gst_structure_set(out_struc, "height", G_TYPE_INT, height, NULL);
	gst_structure_get_fourcc(in_struc, "format", &self->color_format);

	switch (base->alg) {
	case GSTDSP_H263ENC:
	case GSTDSP_MP4VENC:
	case GSTDSP_H264ENC:
		base->output_buffer_size = width * height / 2;
		break;
	case GSTDSP_JPEGENC:
		if (width % 2 || height % 2)
			return FALSE;
		if (self->color_format == GST_MAKE_FOURCC('I', '4', '2', '0'))
			base->input_buffer_size = ROUND_UP(width, 16) * ROUND_UP(height, 16) * 3 / 2;
		else
			base->input_buffer_size = ROUND_UP(width, 16) * ROUND_UP(height, 16) * 2;
		base->output_buffer_size = width * height;
		if (self->quality < 10)
			base->output_buffer_size /= 10;
		else if (self->quality < 100)
			base->output_buffer_size /= (100 / self->quality);
		break;
	default:
		break;
	}

	if (base->node)
		goto skip_setup;

	switch (base->alg) {
	case GSTDSP_JPEGENC:
		du_port_alloc_buffers(base->ports[0], 1);
#if SN_API > 1
		du_port_alloc_buffers(base->ports[1], 2);
#else
		/* old versions of the sn can't handle 2 buffers */
		/*
		 * Some constrained pipelines might starve because of this. You
		 * might want to try enable-last-buffer=false on some sinks.
		 * TODO Is there any way around this?
		 */
		du_port_alloc_buffers(base->ports[1], 1);
#endif
		break;
	default:
		du_port_alloc_buffers(base->ports[0], 2);
		du_port_alloc_buffers(base->ports[1], 4);
		break;
	}

skip_setup:
	self->width = width;
	self->height = height;

	{
		const GValue *framerate = NULL;
		framerate = gst_structure_get_value(in_struc, "framerate");
		if (framerate) {
			gst_structure_set_value(out_struc, "framerate", framerate);
			/* calculate nearest integer */
			self->framerate = (gst_value_get_fraction_numerator(framerate) * 2 /
					   gst_value_get_fraction_denominator(framerate) + 1) / 2;
		}
	}

	/* see if downstream caps express something */
	allowed_caps = gst_pad_get_allowed_caps(base->srcpad);
	if (allowed_caps) {
		if (gst_caps_get_size(allowed_caps) > 0) {
			GstStructure *s;
			s = gst_caps_get_structure(allowed_caps, 0);
			gst_structure_get_int(s, "level", &tgt_level);
			if (base->alg == GSTDSP_H264ENC) {
				const char *stream_format;
				stream_format = gst_structure_get_string(s, "stream-format");
				if (stream_format && !strcmp(stream_format, "avc"))
					self->priv.h264.bytestream = false;
				else
					stream_format = "byte-stream";
				gst_structure_set(out_struc, "stream-format", G_TYPE_STRING, stream_format, NULL);
			}
		}
		gst_caps_unref(allowed_caps);
	}

	check_supported_levels(self, tgt_level);

	if (self->bitrate == 0)
		self->bitrate = self->max_bitrate;
	else if (self->bitrate > self->max_bitrate)
		self->bitrate = self->max_bitrate;

	gst_caps_append_structure(out_caps, out_struc);

#ifdef DEBUG
	{
		gchar *str = gst_caps_to_string(out_caps);
		pr_info(self, "src caps: %s", str);
		g_free(str);
	}
#endif

	if (!gst_pad_take_caps(base->srcpad, out_caps))
		return FALSE;

	if (base->node)
		return TRUE;

	base->node = create_node(self);
	if (!base->node) {
		pr_err(self, "dsp node creation failed");
		return FALSE;
	}

	if (codec->setup_params)
		codec->setup_params(base);

	if (!gstdsp_start(base)) {
		pr_err(self, "dsp start failed");
		return FALSE;
	}

	if (codec->send_params)
		codec->send_params(base, base->node);

	return TRUE;
}
コード例 #10
0
/* h264 dec has its own sink_setcaps for supporting nalu convert codec data */
static gboolean
sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstStructure *structure;
  GstOmxBaseVideoDec *self;
  GstOmxH264Dec *h264_self;
  GstOmxBaseFilter *omx_base;
  GOmxCore *gomx;
  OMX_PARAM_PORTDEFINITIONTYPE param;
  gint width = 0;
  gint height = 0;

  self = GST_OMX_BASE_VIDEODEC (GST_PAD_PARENT (pad));
  h264_self = GST_OMX_H264DEC (GST_PAD_PARENT (pad));
  omx_base = GST_OMX_BASE_FILTER (self);

  gomx = (GOmxCore *) omx_base->gomx;

  GST_INFO_OBJECT (self, "setcaps (sink)(h264): %" GST_PTR_FORMAT, caps);

  g_return_val_if_fail (gst_caps_get_size (caps) == 1, FALSE);

  structure = gst_caps_get_structure (caps, 0);

  gst_structure_get_int (structure, "width", &width);
  gst_structure_get_int (structure, "height", &height);

  {
    const GValue *framerate = NULL;
    framerate = gst_structure_get_value (structure, "framerate");
    if (framerate) {
      self->framerate_num = gst_value_get_fraction_numerator (framerate);
      self->framerate_denom = gst_value_get_fraction_denominator (framerate);
    }
  }

  G_OMX_INIT_PARAM (param);

  {
    const GValue *codec_data;
    GstBuffer *buffer;
    gboolean ret = FALSE;
    guint8 *buf_data = NULL;

    codec_data = gst_structure_get_value (structure, "codec_data");
    if (codec_data) {
      buffer = gst_value_get_buffer (codec_data);

      buf_data = GST_BUFFER_DATA(buffer);

      if (GST_BUFFER_SIZE(buffer) < 4) GST_ERROR("codec data size is less than 4!!"); //check this.

      if ((buf_data[0] == 0x00)&&(buf_data[1] == 0x00)&&
         ((buf_data[2] == 0x01)||((buf_data[2] == 0x00)&&(buf_data[3] == 0x01)))) {
           h264_self->h264Format = GSTOMX_H264_FORMAT_NALU;
           GST_INFO_OBJECT(self, "H264 format is NALU");
      } else {
         h264_self->h264Format = GSTOMX_H264_FORMAT_3GPP;
         GST_INFO_OBJECT(self, "H264 format is 3GPP");
      }


      /* if codec data is 3gpp format, convert nalu format */
      if(h264_self->h264Format == GSTOMX_H264_FORMAT_3GPP) {
        GstBuffer *nalu_dci = NULL;

        ret = convert_dci(h264_self, buffer, &nalu_dci);

        if (ret) {
          omx_base->codec_data = nalu_dci;
        } else {
          if (nalu_dci) { gst_buffer_unref (nalu_dci); }
          GST_ERROR_OBJECT(h264_self, "converting dci error.");
          omx_base->codec_data = buffer;
          gst_buffer_ref (buffer);
        }

      } else { /* not 3GPP format */
        omx_base->codec_data = buffer;
        gst_buffer_ref (buffer);
      }
      h264_self->h264Format = GSTOMX_H264_FORMAT_UNKNOWN;
    }
  }
  /* Input port configuration. */
  {
    param.nPortIndex = omx_base->in_port->port_index;
    OMX_GetParameter (gomx->omx_handle, OMX_IndexParamPortDefinition, &param);

    param.format.video.nFrameWidth = width;
    param.format.video.nFrameHeight = height;

    OMX_SetParameter (gomx->omx_handle, OMX_IndexParamPortDefinition, &param);
  }
  return gst_pad_set_caps (pad, caps);
}
コード例 #11
0
static gboolean
gst_gdiscreencapsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (bsrc);
  HWND capture;
  HDC device;
  GstStructure *structure;
  const GValue *framerate;
  gint red_mask, green_mask, blue_mask;
  gint bpp;

  structure = gst_caps_get_structure (caps, 0);

  gst_structure_get_int (structure, "red_mask", &red_mask);
  gst_structure_get_int (structure, "green_mask", &green_mask);
  gst_structure_get_int (structure, "blue_mask", &blue_mask);
  if (blue_mask != GST_VIDEO_BYTE1_MASK_24_INT ||
      green_mask != GST_VIDEO_BYTE2_MASK_24_INT ||
      red_mask != GST_VIDEO_BYTE3_MASK_24_INT) {
    GST_ERROR ("Wrong red_,green_,blue_ mask provided. "
        "We only support RGB and BGR");

    return FALSE;
  }

  gst_structure_get_int (structure, "bpp", &bpp);
  if (bpp != 24) {
    GST_ERROR ("Wrong bpp provided %d. We only support 24 bpp", bpp);
    return FALSE;
  }

  src->src_rect = src->screen_rect;
  if (src->capture_w && src->capture_h) {
    src->src_rect.left += src->capture_x;
    src->src_rect.top += src->capture_y;
    src->src_rect.right = src->src_rect.left + src->capture_w;
    src->src_rect.bottom = src->src_rect.top + src->capture_h;
  }

  framerate = gst_structure_get_value (structure, "framerate");
  if (framerate) {
    src->rate_numerator = gst_value_get_fraction_numerator (framerate);
    src->rate_denominator = gst_value_get_fraction_denominator (framerate);
  }

  src->info.bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
  src->info.bmiHeader.biWidth = src->src_rect.right - src->src_rect.left;
  src->info.bmiHeader.biHeight = src->src_rect.top - src->src_rect.bottom;
  src->info.bmiHeader.biPlanes = 1;
  src->info.bmiHeader.biBitCount = 24;
  src->info.bmiHeader.biCompression = BI_RGB;
  src->info.bmiHeader.biSizeImage = 0;
  src->info.bmiHeader.biXPelsPerMeter = 0;
  src->info.bmiHeader.biYPelsPerMeter = 0;
  src->info.bmiHeader.biClrUsed = 0;
  src->info.bmiHeader.biClrImportant = 0;

  /* Cleanup first */
  if (src->hBitmap != INVALID_HANDLE_VALUE)
    DeleteObject (src->hBitmap);

  if (src->memDC != INVALID_HANDLE_VALUE)
    DeleteDC (src->memDC);

  /* Allocate */
  capture = GetDesktopWindow ();
  device = GetDC (capture);
  src->hBitmap = CreateDIBSection (device, &(src->info), DIB_RGB_COLORS,
      (void **) &(src->dibMem), 0, 0);
  src->memDC = CreateCompatibleDC (device);
  SelectObject (src->memDC, src->hBitmap);
  ReleaseDC (capture, device);

  GST_DEBUG_OBJECT (src, "size %dx%d, %d/%d fps",
      (gint) src->info.bmiHeader.biWidth,
      (gint) (-src->info.bmiHeader.biHeight),
      src->rate_numerator, src->rate_denominator);

  return TRUE;
}
コード例 #12
0
static gboolean
sink_setcaps (GstPad *pad,
              GstCaps *caps)
{
    GstOmxBaseVideoEnc *self;
    GstOmxBaseFilter *omx_base;
    GstQuery *query;
    GstVideoFormat format;
    gint width, height, rowstride;
    const GValue *framerate = NULL;

    self = GST_OMX_BASE_VIDEOENC (GST_PAD_PARENT (pad));
    omx_base = GST_OMX_BASE_FILTER (self);

    GST_INFO_OBJECT (omx_base, "setcaps (sink): %" GST_PTR_FORMAT, caps);

    g_return_val_if_fail (caps, FALSE);
    g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE);

    framerate = gst_structure_get_value (
            gst_caps_get_structure (caps, 0), "framerate");

    if (framerate)
    {
        omx_base->duration = gst_util_uint64_scale_int(GST_SECOND,
                gst_value_get_fraction_denominator (framerate),
                gst_value_get_fraction_numerator (framerate));
        GST_DEBUG_OBJECT (self, "Nominal frame duration =%"GST_TIME_FORMAT,
                            GST_TIME_ARGS (omx_base->duration));
    }

    if (gst_video_format_parse_caps_strided (caps,
            &format, &width, &height, &rowstride))
    {
        /* Output port configuration: */
        OMX_PARAM_PORTDEFINITIONTYPE param;

        G_OMX_PORT_GET_DEFINITION (omx_base->in_port, &param);

        param.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar ;
        param.format.video.nFrameWidth  = width;
        param.format.video.nFrameHeight = height;
        if (!rowstride)
            rowstride = (width + 15) & 0xFFFFFFF0;
        param.format.video.nStride      = self->rowstride = rowstride;

        if (framerate)
        {
            self->framerate_num = gst_value_get_fraction_numerator (framerate);
            self->framerate_denom = gst_value_get_fraction_denominator (framerate);

            /* convert to Q.16 */
            param.format.video.xFramerate =
                (gst_value_get_fraction_numerator (framerate) << 16) /
                gst_value_get_fraction_denominator (framerate);
        }

        G_OMX_PORT_SET_DEFINITION (omx_base->out_port, &param);
    }


    return TRUE;
}
コード例 #13
0
static gboolean
gst_egl_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstEGLSink *egl_sink;
  gint width;
  gint height;
  gint bufcount;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  gint display_par_n, display_par_d;
  guint display_ratio_num, display_ratio_den;
  GstVideoFormat format;
  GstStructure *s;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  egl_sink = GST_EGL_SINK (bsink);

  if(egl_sink->set_caps_callback)
    return egl_sink->set_caps_callback(caps, egl_sink->client_data);
  
  s = gst_caps_get_structure (caps, 0);
  if(gst_structure_get_int (s, "num-buffers-required", &bufcount) &&
      bufcount > GST_GL_DISPLAY_MAX_BUFFER_COUNT) {
    GST_WARNING("num-buffers-required %d exceed max eglsink buffer count %d", bufcount, GST_GL_DISPLAY_MAX_BUFFER_COUNT);
    return FALSE;
  }
  
  ok = gst_video_format_parse_caps (caps, &format, &width, &height);
  if (!ok)
    return FALSE;

  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  /* get display's PAR */
  if (egl_sink->par) {
    display_par_n = gst_value_get_fraction_numerator (egl_sink->par);
    display_par_d = gst_value_get_fraction_denominator (egl_sink->par);
  } else {
    display_par_n = 1;
    display_par_d = 1;
  }

  ok = gst_video_calculate_display_ratio (&display_ratio_num,
      &display_ratio_den, width, height, par_n, par_d, display_par_n,
      display_par_d);

  if (!ok)
    return FALSE;

  if (height % display_ratio_den == 0) {
    GST_DEBUG ("keeping video height");
    egl_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    egl_sink->window_height = height;
  } else if (width % display_ratio_num == 0) {
    GST_DEBUG ("keeping video width");
    egl_sink->window_width = width;
    egl_sink->window_height = (guint)
        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
  } else {
    GST_DEBUG ("approximating while keeping video height");
    egl_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    egl_sink->window_height = height;
  }
  GST_DEBUG ("scaling to %dx%d",
      egl_sink->window_width, egl_sink->window_height);

  GST_VIDEO_SINK_WIDTH (egl_sink) = width;
  GST_VIDEO_SINK_HEIGHT (egl_sink) = height;
  egl_sink->fps_n = fps_n;
  egl_sink->fps_d = fps_d;
  egl_sink->par_n = par_n;
  egl_sink->par_d = par_d;

  if (!egl_sink->window_id && !egl_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (egl_sink));

  return TRUE;
}
コード例 #14
0
static gboolean
gst_xvidenc_setcaps (GstPad * pad, GstCaps * vscaps)
{
  GstXvidEnc *xvidenc;
  GstStructure *structure;
  gint w, h;
  const GValue *fps, *par;
  gint xvid_cs = -1;

  xvidenc = GST_XVIDENC (GST_PAD_PARENT (pad));

  /* if there's something old around, remove it */
  if (xvidenc->handle) {
    gst_xvidenc_flush_buffers (xvidenc, TRUE);
    xvid_encore (xvidenc->handle, XVID_ENC_DESTROY, NULL, NULL);
    xvidenc->handle = NULL;
  }

  structure = gst_caps_get_structure (vscaps, 0);

  if (!gst_structure_get_int (structure, "width", &w) ||
      !gst_structure_get_int (structure, "height", &h)) {
    return FALSE;
  }

  fps = gst_structure_get_value (structure, "framerate");
  if (fps == NULL || !GST_VALUE_HOLDS_FRACTION (fps)) {
    GST_WARNING_OBJECT (pad, "no framerate specified, or not a GstFraction");
    return FALSE;
  }

  /* optional par info */
  par = gst_structure_get_value (structure, "pixel-aspect-ratio");

  xvid_cs = gst_xvid_structure_to_csp (structure);
  if (xvid_cs == -1) {
    gchar *sstr;

    sstr = gst_structure_to_string (structure);
    GST_DEBUG_OBJECT (xvidenc, "Did not find xvid colourspace for caps %s",
        sstr);
    g_free (sstr);
    return FALSE;
  }

  xvidenc->csp = xvid_cs;
  xvidenc->width = w;
  xvidenc->height = h;
  xvidenc->fbase = gst_value_get_fraction_numerator (fps);
  xvidenc->fincr = gst_value_get_fraction_denominator (fps);
  if ((par != NULL) && GST_VALUE_HOLDS_FRACTION (par)) {
    xvidenc->par_width = gst_value_get_fraction_numerator (par);
    xvidenc->par_height = gst_value_get_fraction_denominator (par);
  } else {
    xvidenc->par_width = 1;
    xvidenc->par_height = 1;
  }

  /* wipe xframe cache given possible change caps properties */
  g_free (xvidenc->xframe_cache);
  xvidenc->xframe_cache = NULL;

  if (gst_xvidenc_setup (xvidenc)) {
    gboolean ret = FALSE;
    GstCaps *new_caps = NULL, *allowed_caps;

    /* please downstream with preferred caps */
    allowed_caps = gst_pad_get_allowed_caps (xvidenc->srcpad);
    GST_DEBUG_OBJECT (xvidenc, "allowed caps: %" GST_PTR_FORMAT, allowed_caps);

    if (allowed_caps && !gst_caps_is_empty (allowed_caps)) {
      new_caps = gst_caps_copy_nth (allowed_caps, 0);
    } else {
      new_caps = gst_caps_new_simple ("video/x-xvid", NULL);
    }
    if (allowed_caps)
      gst_caps_unref (allowed_caps);

    gst_caps_set_simple (new_caps,
        "width", G_TYPE_INT, w, "height", G_TYPE_INT, h,
        "framerate", GST_TYPE_FRACTION, xvidenc->fbase, xvidenc->fincr,
        "pixel-aspect-ratio", GST_TYPE_FRACTION,
        xvidenc->par_width, xvidenc->par_height, NULL);
    /* just to be sure */
    gst_pad_fixate_caps (xvidenc->srcpad, new_caps);

    if (xvidenc->used_profile != 0) {
      switch (xvidenc->used_profile) {
        case XVID_PROFILE_S_L0:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
              "level", G_TYPE_STRING, "0", NULL);
          break;
        case XVID_PROFILE_S_L1:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
              "level", G_TYPE_STRING, "1", NULL);
          break;
        case XVID_PROFILE_S_L2:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
              "level", G_TYPE_STRING, "2", NULL);
          break;
        case XVID_PROFILE_S_L3:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
              "level", G_TYPE_STRING, "3", NULL);
          break;
        case XVID_PROFILE_S_L4a:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
              "level", G_TYPE_STRING, "4a", NULL);
          break;
        case XVID_PROFILE_S_L5:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
              "level", G_TYPE_STRING, "5", NULL);
          break;
        case XVID_PROFILE_S_L6:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
              "level", G_TYPE_STRING, "6", NULL);
          break;
        case XVID_PROFILE_ARTS_L1:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-real-time-simple", "level", G_TYPE_STRING, "1", NULL);
          break;
        case XVID_PROFILE_ARTS_L2:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-real-time-simple", "level", G_TYPE_STRING, "2", NULL);
          break;
        case XVID_PROFILE_ARTS_L3:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-real-time-simple", "level", G_TYPE_STRING, "3", NULL);
          break;
        case XVID_PROFILE_ARTS_L4:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-real-time-simple", "level", G_TYPE_STRING, "4", NULL);
          break;
        case XVID_PROFILE_AS_L0:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-simple", "level", G_TYPE_STRING, "0", NULL);
          break;
        case XVID_PROFILE_AS_L1:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-simple", "level", G_TYPE_STRING, "1", NULL);
          break;
        case XVID_PROFILE_AS_L2:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-simple", "level", G_TYPE_STRING, "2", NULL);
          break;
        case XVID_PROFILE_AS_L3:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-simple", "level", G_TYPE_STRING, "3", NULL);
          break;
        case XVID_PROFILE_AS_L4:
          gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
              "advanced-simple", "level", G_TYPE_STRING, "4", NULL);
          break;
        default:
          g_assert_not_reached ();
          break;
      }
    }

    /* src pad should accept anyway */
    ret = gst_pad_set_caps (xvidenc->srcpad, new_caps);
    gst_caps_unref (new_caps);

    if (!ret && xvidenc->handle) {
      xvid_encore (xvidenc->handle, XVID_ENC_DESTROY, NULL, NULL);
      xvidenc->handle = NULL;
    }
    return ret;

  } else                        /* setup did not work out */
    return FALSE;
}