예제 #1
0
static void
gst_dshowvideosrc_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
  /* If there is no desired video size, set default video size to device preffered video size */

  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
  GstStructure *structure = gst_caps_get_structure (caps, 0);
  guint i = 0;
  gint res = -1;

  for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
    GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

    if (gst_caps_is_subset (caps, capstmp)) {
      res = i;
    }
    gst_caps_unref (capstmp);
  }

  if (res != -1) {
    GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);
    if (type_pin_mediatype) {
      GstCapturePinMediaType *pin_mediatype =
          (GstCapturePinMediaType *) type_pin_mediatype->data;
      gst_structure_fixate_field_nearest_int (structure, "width",
          pin_mediatype->defaultWidth);
      gst_structure_fixate_field_nearest_int (structure, "height",
          pin_mediatype->defaultHeight);
      gst_structure_fixate_field_nearest_fraction (structure, "framerate",
          pin_mediatype->defaultFPS, 1);
    }
  }
}
예제 #2
0
static gboolean gst_imx_compositor_sink_query(GstImxBPAggregator *aggregator, GstImxBPAggregatorPad *pad, GstQuery *query)
{
	switch (GST_QUERY_TYPE(query))
	{
		case GST_QUERY_CAPS:
		{
			/* Custom caps query response. Take the sinkpad template caps,
			 * optionally filter them, and return them as the result.
			 * This ensures that the caps that the derived class supports
			 * for input data are actually used (by default, the aggregator
			 * base classes try to keep input and output caps equal) */

			GstCaps *filter, *caps;

			gst_query_parse_caps(query, &filter);
			caps = gst_pad_get_pad_template_caps(GST_PAD(pad));

			if (filter != NULL)
			{
				GstCaps *unfiltered_caps = gst_caps_make_writable(caps);
				caps = gst_caps_intersect(unfiltered_caps, filter);
				gst_caps_unref(unfiltered_caps);
			}

			GST_DEBUG_OBJECT(aggregator, "responding to CAPS query with caps %" GST_PTR_FORMAT, (gpointer)caps);

			gst_query_set_caps_result(query, caps);

			gst_caps_unref(caps);

			return TRUE;
		}

		case GST_QUERY_ACCEPT_CAPS:
		{
			/* Custom accept_caps query response. Simply check if
			 * the supplied caps are a valid subset of the sinkpad's
			 * template caps. This is done for the same reasons
			 * as the caps query response above. */

			GstCaps *accept_caps = NULL, *template_caps = NULL;
			gboolean ret;

			gst_query_parse_accept_caps(query, &accept_caps);
			template_caps = gst_pad_get_pad_template_caps(GST_PAD(pad));

			ret = gst_caps_is_subset(accept_caps, template_caps);
			GST_DEBUG_OBJECT(aggregator, "responding to ACCEPT_CAPS query with value %d  (acceptcaps: %" GST_PTR_FORMAT "  template caps %" GST_PTR_FORMAT ")", ret, (gpointer)accept_caps, (gpointer)template_caps);
			gst_query_set_accept_caps_result(query, ret);

			return TRUE;
		}

		default:
			return GST_IMXBP_AGGREGATOR_CLASS(gst_imx_compositor_parent_class)->sink_query(aggregator, pad, query);
	}
}
static gboolean
gst_play_sink_convert_bin_acceptcaps (GstPad * pad, GstCaps * caps)
{
  GstCaps *allowed_caps;
  gboolean ret;

  allowed_caps = gst_pad_query_caps (pad, NULL);
  ret = gst_caps_is_subset (caps, allowed_caps);
  gst_caps_unref (allowed_caps);

  return ret;
}
static gboolean
can_blend_caps (GstCaps * incaps)
{
  gboolean ret;
  GstCaps *caps;

  caps = gst_static_caps_get (&overlay_composition_caps);
  ret = gst_caps_is_subset (incaps, caps);
  gst_caps_unref (caps);

  return ret;
}
static gboolean
gst_dvbsub_overlay_can_handle_caps (GstCaps * incaps)
{
  gboolean ret;
  GstCaps *caps;
  static GstStaticCaps static_caps = GST_STATIC_CAPS (DVBSUB_OVERLAY_CAPS);

  caps = gst_static_caps_get (&static_caps);
  ret = gst_caps_is_subset (incaps, caps);
  gst_caps_unref (caps);

  return ret;
}
gboolean
gst_media_descriptor_get_buffers (GstMediaDescriptor * self,
    GstPad * pad, GCompareFunc compare_func, GList ** bufs)
{
  GList *tmpstream, *tmpframe;
  gboolean check = (pad == NULL), ret = FALSE;
  GstCaps *pad_caps = gst_pad_get_current_caps (pad);

  g_return_val_if_fail (GST_IS_MEDIA_DESCRIPTOR (self), FALSE);
  g_return_val_if_fail (self->filenode, FALSE);

  for (tmpstream = self->filenode->streams;
      tmpstream; tmpstream = tmpstream->next) {
    StreamNode *streamnode = (StreamNode *) tmpstream->data;

    if (pad && streamnode->pad == pad)
      check = TRUE;

    if (!streamnode->pad && gst_caps_is_subset (pad_caps, streamnode->caps)) {
      check = TRUE;
    }

    if (check) {
      ret = TRUE;
      for (tmpframe = streamnode->frames; tmpframe; tmpframe = tmpframe->next) {
        if (compare_func)
          *bufs =
              g_list_insert_sorted (*bufs,
              gst_buffer_ref (((FrameNode *) tmpframe->data)->buf),
              compare_func);
        else
          *bufs =
              g_list_prepend (*bufs,
              gst_buffer_ref (((FrameNode *) tmpframe->data)->buf));
      }

      if (pad != NULL)
        goto done;
    }
  }


done:

  if (compare_func == NULL)
    *bufs = g_list_reverse (*bufs);

  gst_caps_unref (pad_caps);
  return ret;
}
예제 #7
0
/**
 * gst_element_factory_list_filter:
 * @list: (transfer none) (element-type Gst.ElementFactory): a #GList of
 *     #GstElementFactory to filter
 * @caps: a #GstCaps
 * @direction: a #GstPadDirection to filter on
 * @subsetonly: whether to filter on caps subsets or not.
 *
 * Filter out all the elementfactories in @list that can handle @caps in
 * the given direction.
 *
 * If @subsetonly is %TRUE, then only the elements whose pads templates
 * are a complete superset of @caps will be returned. Else any element
 * whose pad templates caps can intersect with @caps will be returned.
 *
 * Returns: (transfer full) (element-type Gst.ElementFactory): a #GList of
 *     #GstElementFactory elements that match the given requisits.
 *     Use #gst_plugin_feature_list_free after usage.
 *
 * Since: 0.10.31
 */
GList *
gst_element_factory_list_filter (GList * list,
    const GstCaps * caps, GstPadDirection direction, gboolean subsetonly)
{
  GQueue results = G_QUEUE_INIT;

  GST_DEBUG ("finding factories");

  /* loop over all the factories */
  for (; list; list = list->next) {
    GstElementFactory *factory;
    const GList *templates;
    GList *walk;

    factory = (GstElementFactory *) list->data;

    GST_DEBUG ("Trying %s",
        gst_plugin_feature_get_name ((GstPluginFeature *) factory));

    /* get the templates from the element factory */
    templates = gst_element_factory_get_static_pad_templates (factory);
    for (walk = (GList *) templates; walk; walk = g_list_next (walk)) {
      GstStaticPadTemplate *templ = walk->data;

      /* we only care about the sink templates */
      if (templ->direction == direction) {
        GstCaps *tmpl_caps;

        /* try to intersect the caps with the caps of the template */
        tmpl_caps = gst_static_caps_get (&templ->static_caps);

        /* FIXME, intersect is not the right method, we ideally want to check
         * for a subset here */

        /* check if the intersection is empty */
        if ((subsetonly && gst_caps_is_subset (caps, tmpl_caps)) ||
            (!subsetonly && gst_caps_can_intersect (caps, tmpl_caps))) {
          /* non empty intersection, we can use this element */
          g_queue_push_tail (&results, gst_object_ref (factory));
          gst_caps_unref (tmpl_caps);
          break;
        }
        gst_caps_unref (tmpl_caps);
      }
    }
  }
  return results.head;
}
static void
check_caps (GstPad * pad, HandOffData * hod)
{
  GstCaps *caps, *expected_caps;
  gboolean is_subset = FALSE;

  expected_caps = gst_static_caps_get (&hod->expected_caps);
  caps = gst_pad_get_current_caps (pad);
  is_subset = gst_caps_is_subset (caps, expected_caps);
  GST_DEBUG ("expected caps: %" GST_PTR_FORMAT ", caps: %" GST_PTR_FORMAT
      ", is subset: %d", expected_caps, caps, is_subset);
  gst_caps_unref (expected_caps);
  gst_caps_unref (caps);

  fail_unless (is_subset);
}
예제 #9
0
/* Probing functions */
gboolean
gst_v4l2_is_h264_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
  gboolean ret = FALSE;
  GstCaps *codec_caps;

  codec_caps = gst_static_caps_get (&src_template_caps);

  if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_raw_caps ())
      && gst_caps_can_intersect (src_caps, codec_caps))
    ret = TRUE;

  gst_caps_unref (codec_caps);

  return ret;
}
static void
_caps_match (GstPad * sinkpad, const gchar * capsname)
{
  GstCaps *caps, *sinkcaps;
  gchar *name;

  caps = gst_caps_from_string (capsname);
  sinkcaps = gst_pad_query_caps (sinkpad, NULL);
  fail_unless (sinkcaps != NULL);
  name = gst_caps_to_string (sinkcaps);
  fail_unless (gst_caps_is_subset (sinkcaps, caps),
      "caps ('%s') are not a subset of ('%s')", name, capsname);
  g_free (name);
  gst_caps_unref (sinkcaps);
  gst_caps_unref (caps);
}
static gboolean
gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
    HRESULT hres;
    IPin *input_pin = NULL;
    IPin *output_pin = NULL;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
    GstStructure *s = gst_caps_get_structure (caps, 0);
    OAFilterState ds_graph_state;
    GstCaps *current_caps;

    /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
    if (gst_caps_is_subset (caps, src->caps)) {
        guint i = 0;
        gint res = -1;

        hres = src->media_control->GetState(0, &ds_graph_state);
        if(ds_graph_state == State_Running) {
            GST_INFO("Setting caps while DirectShow graph is already running");
            current_caps = gst_pad_get_current_caps(GST_BASE_SRC_PAD(src));

            if(gst_caps_is_equal(current_caps, caps)) {
                /* no need to set caps, just return */
                GST_INFO("Not resetting caps");
                gst_caps_unref(current_caps);
                return TRUE;
            }
            else {
                /* stop graph and disconnect filters so new caps can be set */
                GST_INFO("Different caps, stopping DirectShow graph");
                hres = src->media_control->Stop();
                hres = src->media_control->GetState(2000, &ds_graph_state);
                if(hres != S_OK) {
                    GST_ERROR("Could not stop DirectShow graph. Cannot renegoiate pins.");
                    goto error;
                }
                gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
                                               &input_pin);
                if (!input_pin) {
                    input_pin->Release();
                    GST_ERROR ("Can't get input pin from our dshow fakesink");
                    goto error;
                }
                input_pin->ConnectedTo(&output_pin);
                hres = input_pin->Disconnect();
                hres = output_pin->Disconnect();
                input_pin->Release();
                output_pin->Release();
            }
            gst_caps_unref(current_caps);
        }

        for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
            GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

            if (gst_caps_is_subset (caps, capstmp)) {
                res = i;
            }
            gst_caps_unref (capstmp);
        }

        if (res != -1 && src->pins_mediatypes) {
            /* get the corresponding media type and build the dshow graph */
            GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);

            if (type_pin_mediatype) {
                GstCapturePinMediaType *pin_mediatype =
                    (GstCapturePinMediaType *) type_pin_mediatype->data;
                gchar *src_caps_string = NULL;
                const gchar *format_string = NULL;

                /* retrieve the desired video size */
                VIDEOINFOHEADER *video_info = NULL;
                gint width = 0;
                gint height = 0;
                gint numerator = 0;
                gint denominator = 0;
                gst_structure_get_int (s, "width", &width);
                gst_structure_get_int (s, "height", &height);
                gst_structure_get_fraction (s, "framerate", &numerator, &denominator);

                /* check if the desired video size is valid about granularity  */
                /* This check will be removed when GST_TYPE_INT_RANGE_STEP exits */
                /* See remarks in gst_dshow_new_video_caps function */
                if (pin_mediatype->granularityWidth != 0
                        && width % pin_mediatype->granularityWidth != 0)
                    g_warning ("your desired video size is not valid : %d mod %d !=0\n",
                               width, pin_mediatype->granularityWidth);
                if (pin_mediatype->granularityHeight != 0
                        && height % pin_mediatype->granularityHeight != 0)
                    g_warning ("your desired video size is not valid : %d mod %d !=0\n",
                               height, pin_mediatype->granularityHeight);

                /* update mediatype */
                video_info = (VIDEOINFOHEADER *) pin_mediatype->mediatype->pbFormat;
                video_info->bmiHeader.biWidth = width;
                video_info->bmiHeader.biHeight = height;
                video_info->AvgTimePerFrame =
                    (LONGLONG) (10000000 * denominator / (double) numerator);
                video_info->bmiHeader.biSizeImage = DIBSIZE (video_info->bmiHeader);
                pin_mediatype->mediatype->lSampleSize = DIBSIZE (video_info->bmiHeader);

                src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
                src->dshow_fakesink->gst_set_buffer_callback (
                    (push_buffer_func) gst_dshowvideosrc_push_buffer, src);

                gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
                                               &input_pin);
                if (!input_pin) {
                    GST_ERROR ("Can't get input pin from our dshow fakesink");
                    goto error;
                }

                hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
                        input_pin, pin_mediatype->mediatype);
                input_pin->Release ();

                if (hres != S_OK) {
                    GST_ERROR
                    ("Can't connect capture filter with fakesink filter (error=0x%x)",
                     hres);
                    goto error;
                }

                /* save width and height negociated */
                gst_structure_get_int (s, "width", &src->width);
                gst_structure_get_int (s, "height", &src->height);

                src->is_rgb = FALSE;
                format_string = gst_structure_get_string (s, "format");
                if(format_string) {
                    if(!strcmp(format_string, "BGR")) {
                        src->is_rgb = TRUE;
                    }
                    else {
                        src->is_rgb = FALSE;
                    }
                }

                hres = src->media_control->Run();

                hres = src->media_control->GetState(5000, &ds_graph_state);
                if(hres != S_OK || ds_graph_state != State_Running) {
                    GST_ERROR("Could not run graph");
                    goto error;
                }
            }
        }
    }

    return TRUE;

error:
    return FALSE;
}
예제 #12
0
/* Note: lots of this code here is also in the videotestsrc.c unit test */
void  test_rgb_to_rgb()
{
  const struct
  {
    const gchar *pattern_name;
    gint pattern_enum;
    guint8 r_expected;
    guint8 g_expected;
    guint8 b_expected;
  } test_patterns[] = {
    {
    "white", 3, 0xff, 0xff, 0xff}, {
    "red", 4, 0xff, 0x00, 0x00}, {
    "green", 5, 0x00, 0xff, 0x00}, {
    "blue", 6, 0x00, 0x00, 0xff}, {
    "black", 2, 0x00, 0x00, 0x00}
  };
  GstElement *pipeline, *src, *filter1, *csp, *filter2, *sink;
  const GstCaps *template_caps;
  GstBuffer *buf = NULL;
  GstPad *srcpad;
  GList *conversions, *l;
  gint p;

  /* test check function */
  fail_unless (right_shift_colour (0x00ff0000, 0x11223344) == 0x22);

  pipeline = gst_pipeline_new ("pipeline");
  src = gst_check_setup_element ("videotestsrc");
  filter1 = gst_check_setup_element ("capsfilter");
  csp = gst_check_setup_element ("ffmpegcolorspace");
  filter2 = gst_element_factory_make ("capsfilter", "to_filter");
  sink = gst_check_setup_element ("fakesink");

  gst_bin_add_many (GST_BIN (pipeline), src, filter1, csp, filter2, sink, NULL);

  fail_unless (gst_element_link (src, filter1));
  fail_unless (gst_element_link (filter1, csp));
  fail_unless (gst_element_link (csp, filter2));
  fail_unless (gst_element_link (filter2, sink));

  srcpad = gst_element_get_pad (src, "src");
  template_caps = gst_pad_get_pad_template_caps (srcpad);
  gst_object_unref (srcpad);

  g_object_set (sink, "signal-handoffs", TRUE, NULL);
  g_signal_connect (sink, "preroll-handoff", G_CALLBACK (got_buf_cb), &buf);

  GST_LOG ("videotestsrc src template caps: %" GST_PTR_FORMAT, template_caps);

  conversions = create_rgb_conversions ();

  for (l = conversions; l != NULL; l = l->next) {
    RGBConversion *conv = (RGBConversion *) l->data;

    /* does videotestsrc support the from_caps? */
    if (!gst_caps_is_subset (conv->from_caps, template_caps)) {
      GST_DEBUG ("videotestsrc doesn't support from_caps %" GST_PTR_FORMAT,
          conv->from_caps);
      continue;
    }

    /* caps are supported, let's run some tests then ... */
    for (p = 0; p < G_N_ELEMENTS (test_patterns); ++p) {
      GstStateChangeReturn state_ret;
      RGBFormat *from = &conv->from_fmt;
      RGBFormat *to = &conv->to_fmt;

      /* trick compiler into thinking from is used, might throw warning
       * otherwise if the debugging system is disabled */
      fail_unless (from != NULL);

      gst_element_set_state (pipeline, GST_STATE_NULL);

      g_object_set (src, "pattern", test_patterns[p].pattern_enum, NULL);

      GST_INFO ("%5s %u/%u %08x %08x %08x %08x %u => "
          "%5s %u/%u %08x %08x %08x %08x %u, pattern=%s",
          from->nick, from->bpp, from->depth, from->red_mask,
          from->green_mask, from->blue_mask, from->alpha_mask,
          from->endianness, to->nick, to->bpp, to->depth, to->red_mask,
          to->green_mask, to->blue_mask, to->alpha_mask, to->endianness,
          test_patterns[p].pattern_name);

      /* now get videotestsrc to produce a buffer with the given caps */
      g_object_set (filter1, "caps", conv->from_caps, NULL);

      /* ... and force ffmpegcolorspace to convert to our target caps */
      g_object_set (filter2, "caps", conv->to_caps, NULL);

      state_ret = gst_element_set_state (pipeline, GST_STATE_PAUSED);
      if (state_ret == GST_STATE_CHANGE_FAILURE) {
        GstMessage *msg;
        GError *err = NULL;

        msg = gst_bus_poll (GST_ELEMENT_BUS (pipeline), GST_MESSAGE_ERROR, 0);
        fail_if (msg == NULL, "expected ERROR message on the bus");
        fail_unless (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR);
        gst_message_parse_error (msg, &err, NULL);
        fail_unless (err != NULL);
        if (msg->src == GST_OBJECT_CAST (src) &&
            err->code == GST_STREAM_ERROR_FORMAT) {
          GST_DEBUG ("ffmpegcolorspace does not support this conversion");
          gst_message_unref (msg);
          g_error_free (err);
          continue;
        }
        fail_unless (state_ret != GST_STATE_CHANGE_FAILURE,
            "pipeline _set_state() to PAUSED failed: %s", err->message);
      }

      state_ret = gst_element_get_state (pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
     fail_unless (state_ret == GST_STATE_CHANGE_SUCCESS,
          "pipeline failed going to PAUSED state");

      state_ret = gst_element_set_state (pipeline, GST_STATE_NULL);
     fail_unless (state_ret == GST_STATE_CHANGE_SUCCESS);

      fail_unless (buf != NULL);

      /* check buffer caps */
      {
        GstStructure *s;
        gint v;

        fail_unless (GST_BUFFER_CAPS (buf) != NULL);
        s = gst_caps_get_structure (GST_BUFFER_CAPS (buf), 0);
        fail_unless (gst_structure_get_int (s, "bpp", &v));
        fail_unless_equals_int (v, to->bpp);
        fail_unless (gst_structure_get_int (s, "depth", &v));
        fail_unless_equals_int (v, to->depth);
        fail_unless (gst_structure_get_int (s, "red_mask", &v));
        fail_unless_equals_int (v, to->red_mask);
        fail_unless (gst_structure_get_int (s, "green_mask", &v));
        fail_unless_equals_int (v, to->green_mask);
        fail_unless (gst_structure_get_int (s, "blue_mask", &v));
        fail_unless_equals_int (v, to->blue_mask);
        /* there mustn't be an alpha_mask if there's no alpha component */
        if (to->depth == 32) {
          fail_unless (gst_structure_get_int (s, "alpha_mask", &v));
          fail_unless_equals_int (v, to->alpha_mask);
        } else {
          fail_unless (gst_structure_get_value (s, "alpha_mask") == NULL);
        }
      }

      /* now check the top-left pixel */
      check_rgb_buf (GST_BUFFER_DATA (buf), to->red_mask,
          to->green_mask, to->blue_mask, to->alpha_mask,
          test_patterns[p].r_expected, test_patterns[p].g_expected,
          test_patterns[p].b_expected, to->endianness, to->bpp, to->depth);

      gst_buffer_unref (buf);
      buf = NULL;
    }
  }

  g_list_foreach (conversions, (GFunc) rgb_conversion_free, NULL);
  g_list_free (conversions);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
static gboolean
gst_directsound_sink_acceptcaps (GstBaseSink * sink, GstQuery * query)
{
  GstDirectSoundSink *dsink = GST_DIRECTSOUND_SINK (sink);
  GstPad *pad;
  GstCaps *caps;
  GstCaps *pad_caps;
  GstStructure *st;
  gboolean ret = FALSE;
  GstAudioRingBufferSpec spec = { 0 };

  if (G_UNLIKELY (dsink == NULL))
    return FALSE;

  pad = sink->sinkpad;

  gst_query_parse_accept_caps (query, &caps);
  GST_DEBUG_OBJECT (pad, "caps %" GST_PTR_FORMAT, caps);

  pad_caps = gst_pad_query_caps (pad, NULL);
  if (pad_caps) {
    gboolean cret = gst_caps_is_subset (caps, pad_caps);
    gst_caps_unref (pad_caps);
    if (!cret) {
      GST_DEBUG_OBJECT (dsink,
          "Caps are not a subset of the pad caps, not accepting caps");
      goto done;
    }
  }

  /* If we've not got fixed caps, creating a stream might fail, so let's just
   * return from here with default acceptcaps behaviour */
  if (!gst_caps_is_fixed (caps)) {
    GST_DEBUG_OBJECT (dsink, "Caps are not fixed, not accepting caps");
    goto done;
  }

  spec.latency_time = GST_SECOND;
  if (!gst_audio_ring_buffer_parse_caps (&spec, caps)) {
    GST_DEBUG_OBJECT (dsink, "Failed to parse caps, not accepting");
    goto done;
  }

  /* Make sure input is framed (one frame per buffer) and can be payloaded */
  switch (spec.type) {
    case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_AC3:
    case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_DTS:
    {
      gboolean framed = FALSE, parsed = FALSE;
      st = gst_caps_get_structure (caps, 0);

      gst_structure_get_boolean (st, "framed", &framed);
      gst_structure_get_boolean (st, "parsed", &parsed);
      if ((!framed && !parsed) || gst_audio_iec61937_frame_size (&spec) <= 0) {
        GST_DEBUG_OBJECT (dsink, "Wrong AC3/DTS caps, not accepting");
        goto done;
      }
    }
    default:
      break;
  }
  ret = TRUE;
  GST_DEBUG_OBJECT (dsink, "Accepting caps");

done:
  gst_query_set_accept_caps_result (query, ret);
  return TRUE;
}
static gboolean
gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
  HRESULT hres;
  IPin *input_pin = NULL;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
  GstStructure *s = gst_caps_get_structure (caps, 0);
  GstCaps *current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc));

  if (current_caps) {
    if (gst_caps_is_equal (caps, current_caps)) {
      gst_caps_unref (current_caps);
      return TRUE;
    }
    gst_caps_unref (current_caps);
  }

  /* Same remark as in gstdshowaudiosrc. */
  gboolean was_running = src->is_running;
  if (was_running) {
    HRESULT hres = src->media_filter->Stop ();
    if (hres != S_OK) {
      GST_ERROR ("Can't STOP the directshow capture graph (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = FALSE;
  }

  /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
  if (gst_caps_is_subset (caps, src->caps)) {
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
      GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

      if (gst_caps_is_subset (caps, capstmp)) {
        res = i;
      }
      gst_caps_unref (capstmp);
    }

    if (res != -1 && src->pins_mediatypes) {
      /* get the corresponding media type and build the dshow graph */
      GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);

      if (type_pin_mediatype) {
        GstCapturePinMediaType *pin_mediatype =
            (GstCapturePinMediaType *) type_pin_mediatype->data;
        gchar *caps_string = NULL;
        gchar *src_caps_string = NULL;

        /* retrieve the desired video size */
        VIDEOINFOHEADER *video_info = NULL;
        gint width = 0;
        gint height = 0;
        gint numerator = 0;
        gint denominator = 0;
        gst_structure_get_int (s, "width", &width);
        gst_structure_get_int (s, "height", &height);
        gst_structure_get_fraction (s, "framerate", &numerator, &denominator);

        /* check if the desired video size is valid about granularity  */
        /* This check will be removed when GST_TYPE_INT_RANGE_STEP exits */
        /* See remarks in gst_dshow_new_video_caps function */
        if (pin_mediatype->granularityWidth != 0
            && width % pin_mediatype->granularityWidth != 0)
          g_warning ("your desired video size is not valid : %d mod %d !=0\n",
              width, pin_mediatype->granularityWidth);
        if (pin_mediatype->granularityHeight != 0
            && height % pin_mediatype->granularityHeight != 0)
          g_warning ("your desired video size is not valid : %d mod %d !=0\n",
              height, pin_mediatype->granularityHeight);

        /* update mediatype */
        video_info = (VIDEOINFOHEADER *) pin_mediatype->mediatype->pbFormat;
        video_info->bmiHeader.biWidth = width;
        video_info->bmiHeader.biHeight = height;
        video_info->AvgTimePerFrame =
            (LONGLONG) (10000000 * denominator / (double) numerator);
        video_info->bmiHeader.biSizeImage = DIBSIZE (video_info->bmiHeader);
        pin_mediatype->mediatype->lSampleSize = DIBSIZE (video_info->bmiHeader);

        src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
        src->dshow_fakesink->gst_set_buffer_callback (
            (push_buffer_func) gst_dshowvideosrc_push_buffer, src);

        gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
            &input_pin);
        if (!input_pin) {
          GST_ERROR ("Can't get input pin from our dshow fakesink");
          goto error;
        }

        if (gst_dshow_is_pin_connected (pin_mediatype->capture_pin)) {
          GST_DEBUG_OBJECT (src,
              "capture_pin already connected, disconnecting");
          src->filter_graph->Disconnect (pin_mediatype->capture_pin);
        }

        if (gst_dshow_is_pin_connected (input_pin)) {
          GST_DEBUG_OBJECT (src, "input_pin already connected, disconnecting");
          src->filter_graph->Disconnect (input_pin);
        }

        hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
            input_pin, pin_mediatype->mediatype);
        input_pin->Release ();

        if (hres != S_OK) {
          GST_ERROR
              ("Can't connect capture filter with fakesink filter (error=0x%x)",
              hres);
          goto error;
        }

        /* save width and height negociated */
        gst_structure_get_int (s, "width", &src->width);
        gst_structure_get_int (s, "height", &src->height);

	GstVideoInfo info;
	gst_video_info_from_caps(&info, caps);
	switch (GST_VIDEO_INFO_FORMAT(&info)) {
          case GST_VIDEO_FORMAT_RGB:
          case GST_VIDEO_FORMAT_BGR:
	    src->is_rgb = TRUE;
	    break;
	default:
	  src->is_rgb = FALSE;
	  break;
	}
      }
    }
  }

  if (was_running) {
    HRESULT hres = src->media_filter->Run (0);
    if (hres != S_OK) {
      GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = TRUE;
  }

  return TRUE;

error:
  return FALSE;
}
예제 #15
0
static gboolean
gst_interleave_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstInterleave *self;

  g_return_val_if_fail (GST_IS_INTERLEAVE_PAD (pad), FALSE);

  self = GST_INTERLEAVE (gst_pad_get_parent (pad));

  /* First caps that are set on a sink pad are used as output caps */
  /* TODO: handle caps changes */
  if (self->sinkcaps && !gst_caps_is_subset (caps, self->sinkcaps)) {
    goto cannot_change_caps;
  } else {
    GstCaps *srccaps;

    GstStructure *s;

    gboolean res;

    s = gst_caps_get_structure (caps, 0);

    if (!gst_structure_get_int (s, "width", &self->width))
      goto no_width;

    if (!gst_structure_get_int (s, "rate", &self->rate))
      goto no_rate;

    gst_interleave_set_process_function (self);

    if (gst_structure_has_field (s, "channel-positions")) {
      const GValue *pos_array;

      pos_array = gst_structure_get_value (s, "channel-positions");
      if (GST_VALUE_HOLDS_ARRAY (pos_array)
          && gst_value_array_get_size (pos_array) == 1) {
        const GValue *pos = gst_value_array_get_value (pos_array, 0);

        GValue *apos = g_value_array_get_nth (self->input_channel_positions,
            GST_INTERLEAVE_PAD_CAST (pad)->channel);

        g_value_set_enum (apos, g_value_get_enum (pos));
      }
    }

    srccaps = gst_caps_copy (caps);
    s = gst_caps_get_structure (srccaps, 0);

    gst_structure_set (s, "channels", G_TYPE_INT, self->channels, NULL);
    gst_interleave_set_channel_positions (self, s);

    res = gst_pad_set_caps (self->src, srccaps);
    gst_caps_unref (srccaps);

    if (!res)
      goto src_did_not_accept;
  }

  if (!self->sinkcaps) {
    GstCaps *sinkcaps = gst_caps_copy (caps);

    GstStructure *s = gst_caps_get_structure (sinkcaps, 0);

    gst_structure_remove_field (s, "channel-positions");

    gst_caps_replace (&self->sinkcaps, sinkcaps);

    gst_caps_unref (sinkcaps);
  }

  gst_object_unref (self);

  return TRUE;

cannot_change_caps:
  {
    GST_DEBUG_OBJECT (self, "caps of %" GST_PTR_FORMAT " already set, can't "
        "change", self->sinkcaps);
    gst_object_unref (self);
    return FALSE;
  }
src_did_not_accept:
  {
    GST_DEBUG_OBJECT (self, "src did not accept setcaps()");
    gst_object_unref (self);
    return FALSE;
  }
no_width:
  {
    GST_WARNING_OBJECT (self, "caps did not have width: %" GST_PTR_FORMAT,
        caps);
    gst_object_unref (self);
    return FALSE;
  }
no_rate:
  {
    GST_WARNING_OBJECT (self, "caps did not have rate: %" GST_PTR_FORMAT, caps);
    gst_object_unref (self);
    return FALSE;
  }
}
예제 #16
0
static gboolean
stereosplit_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
  GstGLStereoSplit *split = GST_GL_STEREOSPLIT (parent);

  GST_DEBUG_OBJECT (split, "sink query %s",
      gst_query_type_get_name (GST_QUERY_TYPE (query)));

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CONTEXT:
    {
      const gchar *context_type;
      GstContext *context, *old_context;
      gboolean ret;

      ret = gst_gl_handle_context_query ((GstElement *) split, query,
          &split->display, &split->other_context);
      if (split->display)
        gst_gl_display_filter_gl_api (split->display, SUPPORTED_GL_APIS);
      gst_query_parse_context_type (query, &context_type);

      if (g_strcmp0 (context_type, "gst.gl.local_context") == 0) {
        GstStructure *s;

        gst_query_parse_context (query, &old_context);

        if (old_context)
          context = gst_context_copy (old_context);
        else
          context = gst_context_new ("gst.gl.local_context", FALSE);

        s = gst_context_writable_structure (context);
        gst_structure_set (s, "context", GST_GL_TYPE_CONTEXT, split->context,
            NULL);
        gst_query_set_context (query, context);
        gst_context_unref (context);

        ret = split->context != NULL;
      }
      GST_LOG_OBJECT (split, "context query of type %s %i", context_type, ret);

      if (ret)
        return ret;

      return gst_pad_query_default (pad, parent, query);
    }
    case GST_QUERY_ALLOCATION:
    {
      return stereosplit_propose_allocation (split, query);
    }
    case GST_QUERY_ACCEPT_CAPS:
    {
      GstCaps *possible, *caps;
      gboolean allowed;

      gst_query_parse_accept_caps (query, &caps);

      if (!(possible = gst_pad_query_caps (split->sink_pad, caps)))
        return FALSE;

      allowed = gst_caps_is_subset (caps, possible);
      gst_caps_unref (possible);

      gst_query_set_accept_caps_result (query, allowed);
      return allowed;
    }
    case GST_QUERY_CAPS:
    {
      GstCaps *filter, *left, *right, *combined, *ret, *templ_caps;

      gst_query_parse_caps (query, &filter);

      /* Calculate what downstream can collectively support */
      if (!(left = gst_pad_peer_query_caps (split->left_pad, NULL)))
        return FALSE;
      if (!(right = gst_pad_peer_query_caps (split->right_pad, NULL)))
        return FALSE;

      /* Strip out multiview mode and flags that might break the
       * intersection, since we can convert.
       * We could keep downstream preferred flip/flopping and list
       * separated as preferred in the future which might
       * theoretically allow us an easier conversion, but it's not essential
       */
      left = strip_mview_fields (left, GST_VIDEO_MULTIVIEW_FLAGS_NONE);
      right = strip_mview_fields (right, GST_VIDEO_MULTIVIEW_FLAGS_NONE);

      combined = gst_caps_intersect (left, right);
      gst_caps_unref (left);
      gst_caps_unref (right);

      /* Intersect peer caps with our template formats */
      templ_caps = gst_pad_get_pad_template_caps (split->left_pad);
      ret =
          gst_caps_intersect_full (combined, templ_caps,
          GST_CAPS_INTERSECT_FIRST);
      gst_caps_unref (templ_caps);

      gst_caps_unref (combined);
      combined = ret;

      if (!combined || gst_caps_is_empty (combined)) {
        gst_caps_unref (combined);
        return FALSE;
      }

      /* Convert from the src pad caps to input formats we support */
      ret = stereosplit_transform_caps (split, GST_PAD_SRC, combined, filter);
      gst_caps_unref (combined);
      combined = ret;

      /* Intersect with the sink pad template then */
      templ_caps = gst_pad_get_pad_template_caps (split->sink_pad);
      ret =
          gst_caps_intersect_full (combined, templ_caps,
          GST_CAPS_INTERSECT_FIRST);
      gst_caps_unref (templ_caps);

      GST_LOG_OBJECT (split, "Returning sink pad caps %" GST_PTR_FORMAT, ret);

      gst_query_set_caps_result (query, ret);
      return !gst_caps_is_empty (ret);
    }
    default:
      return gst_pad_query_default (pad, parent, query);
  }
}
예제 #17
0
bool Caps::isSubsetOf(const CapsPtr & superset) const
{
    return gst_caps_is_subset(object<GstCaps>(), superset);
}
예제 #18
0
static gboolean
gst_interleave_sink_setcaps (GstInterleave * self, GstPad * pad,
    const GstCaps * caps, const GstAudioInfo * info)
{
  g_return_val_if_fail (GST_IS_INTERLEAVE_PAD (pad), FALSE);

  /* TODO: handle caps changes */
  if (self->sinkcaps && !gst_caps_is_subset (caps, self->sinkcaps)) {
    goto cannot_change_caps;
  } else {
    GstCaps *srccaps;
    GstStructure *s;
    gboolean res;

    self->width = GST_AUDIO_INFO_WIDTH (info);
    self->rate = GST_AUDIO_INFO_RATE (info);

    gst_interleave_set_process_function (self);

    srccaps = gst_caps_copy (caps);
    s = gst_caps_get_structure (srccaps, 0);

    gst_structure_remove_field (s, "channel-mask");

    gst_structure_set (s, "channels", G_TYPE_INT, self->channels, NULL);
    gst_interleave_set_channel_positions (self, s);

    gst_pad_set_active (self->src, TRUE);
    res = gst_pad_set_caps (self->src, srccaps);
    gst_caps_unref (srccaps);

    if (!res)
      goto src_did_not_accept;
  }

  if (!self->sinkcaps) {
    GstCaps *sinkcaps = gst_caps_copy (caps);
    GstStructure *s = gst_caps_get_structure (sinkcaps, 0);

    gst_structure_remove_field (s, "channel-mask");

    GST_DEBUG_OBJECT (self, "setting sinkcaps %" GST_PTR_FORMAT, sinkcaps);

    gst_caps_replace (&self->sinkcaps, sinkcaps);

    gst_caps_unref (sinkcaps);
  }

  return TRUE;

cannot_change_caps:
  {
    GST_WARNING_OBJECT (self, "caps of %" GST_PTR_FORMAT " already set, can't "
        "change", self->sinkcaps);
    return FALSE;
  }
src_did_not_accept:
  {
    GST_WARNING_OBJECT (self, "src did not accept setcaps()");
    return FALSE;
  }
}
예제 #19
0
static gboolean
gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
{
  HRESULT hres;
  IGstDshowInterface *srcinterface = NULL;
  IPin *input_pin = NULL;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);

  /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
  if (gst_caps_is_subset (spec->caps, src->caps)) {
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
      GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

      if (gst_caps_is_subset (spec->caps, capstmp)) {
        res = i;
      }
      gst_caps_unref (capstmp);
    }

    if (res != -1 && src->pins_mediatypes) {
      /*get the corresponding media type and build the dshow graph */
      GstCapturePinMediaType *pin_mediatype = NULL;
      GList *type = g_list_nth (src->pins_mediatypes, res);

      if (type) {
        pin_mediatype = (GstCapturePinMediaType *) type->data;

        hres =
            IBaseFilter_QueryInterface (src->dshow_fakesink,
            &IID_IGstDshowInterface, (void **) &srcinterface);
        if (hres != S_OK || !srcinterface) {
          GST_CAT_ERROR (dshowaudiosrc_debug,
              "Can't get IGstDshowInterface interface from our dshow fakesink filter (error=%d)",
              hres);
          goto error;
        }

        IGstDshowInterface_gst_set_media_type (srcinterface,
            pin_mediatype->mediatype);
        IGstDshowInterface_gst_set_buffer_callback (srcinterface,
            (byte *) gst_dshowaudiosrc_push_buffer, (byte *) src);

        if (srcinterface) {
          IGstDshowInterface_Release (srcinterface);
        }

        gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
            &input_pin);
        if (!input_pin) {
          GST_CAT_ERROR (dshowaudiosrc_debug,
              "Can't get input pin from our directshow fakesink filter");
          goto error;
        }

        hres =
            IFilterGraph_ConnectDirect (src->filter_graph,
            pin_mediatype->capture_pin, input_pin, NULL);
        IPin_Release (input_pin);

        if (hres != S_OK) {
          GST_CAT_ERROR (dshowaudiosrc_debug,
              "Can't connect capture filter with fakesink filter (error=%d)",
              hres);
          goto error;
        }

        spec->segsize = spec->rate * spec->channels;
        spec->segtotal = 1;
      }
    }
  }

  return TRUE;

error:
  if (srcinterface) {
    IGstDshowInterface_Release (srcinterface);
  }

  return FALSE;
}
예제 #20
0
/*
 * Method: subset?(caps)
 * caps: another Gst::Caps.
 *
 * Checks if all caps represented by self are also represented by the 
given caps.
 * This method does not work reliably if optional properties for caps 
are included
 * on one caps and omitted on the other.
 *
 * Returns: whether self is a subset of the given caps.
 */
static VALUE
rg_subset_p (VALUE self, VALUE caps)
{
    return CBOOL2RVAL (gst_caps_is_subset (RGST_CAPS (self), RGST_CAPS (caps)));
}
예제 #21
0
gboolean
gst_ks_video_device_set_caps (GstKsVideoDevice * self, GstCaps * caps)
{
  GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self);
  GList *cur;
  GstStructure *s;

  /* State to be committed on success */
  KsVideoMediaType *media_type = NULL;
  guint width, height, fps_n, fps_d;
  HANDLE pin_handle = INVALID_HANDLE_VALUE;

  /* Reset? */
  if (caps == NULL) {
    gst_ks_video_device_reset_caps (self);
    return TRUE;
  }

  /* Validate the caps */
  if (!gst_caps_is_subset (caps, priv->cached_caps)) {
    gchar *string_caps = gst_caps_to_string (caps);
    gchar *string_c_caps = gst_caps_to_string (priv->cached_caps);

    GST_ERROR ("caps (%s) is not a subset of device caps (%s)",
        string_caps, string_c_caps);

    g_free (string_caps);
    g_free (string_c_caps);

    goto error;
  }

  for (cur = priv->media_types; cur != NULL; cur = cur->next) {
    KsVideoMediaType *mt = cur->data;

    if (gst_caps_is_subset (caps, mt->translated_caps)) {
      media_type = ks_video_media_type_dup (mt);
      break;
    }
  }

  if (media_type == NULL)
    goto error;

  s = gst_caps_get_structure (caps, 0);
  if (!gst_structure_get_int (s, "width", &width) ||
      !gst_structure_get_int (s, "height", &height) ||
      !gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) {
    GST_ERROR ("Failed to get width/height/fps");
    goto error;
  }

  if (!ks_video_fixate_media_type (media_type->range,
          media_type->format, width, height, fps_n, fps_d))
    goto error;

  if (priv->cur_media_type != NULL) {
    if (media_type->format_size == priv->cur_media_type->format_size &&
        memcmp (media_type->format, priv->cur_media_type->format,
            priv->cur_media_type->format_size) == 0) {
      GST_DEBUG ("%s: re-using existing pin", G_STRFUNC);
      goto same_caps;
    } else {
      GST_DEBUG ("%s: re-creating pin", G_STRFUNC);
    }
  }

  gst_ks_video_device_close_current_pin (self);

  pin_handle = gst_ks_video_device_create_pin (self, media_type,
      &priv->num_requests);
  if (!ks_is_valid_handle (pin_handle)) {
    /* Re-create the old pin */
    if (priv->cur_media_type != NULL)
      priv->pin_handle = gst_ks_video_device_create_pin (self,
          priv->cur_media_type, &priv->num_requests);
    goto error;
  }

  /* Commit state: no turning back past this */
  gst_ks_video_device_reset_caps (self);

  priv->cur_media_type = media_type;
  priv->width = width;
  priv->height = height;
  priv->fps_n = fps_n;
  priv->fps_d = fps_d;

  if (gst_structure_has_name (s, "video/x-raw-rgb"))
    priv->rgb_swap_buf = g_malloc (media_type->sample_size / priv->height);
  else
    priv->rgb_swap_buf = NULL;

  priv->is_mjpeg = gst_structure_has_name (s, "image/jpeg");

  priv->pin_handle = pin_handle;

  priv->cur_fixed_caps = gst_caps_copy (caps);

  return TRUE;

error:
  {
    ks_video_media_type_free (media_type);
    return FALSE;
  }
same_caps:
  {
    ks_video_media_type_free (media_type);
    return TRUE;
  }
}
예제 #22
0
static gboolean
gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
{
  HRESULT hres;
  IPin *input_pin = NULL;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);
  GstCaps *current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (asrc));

  if (current_caps) {
    if (gst_caps_is_equal (spec->caps, current_caps)) {
      gst_caps_unref (current_caps);
      return TRUE;
    }
    gst_caps_unref (current_caps);
  }
  /* In 1.0, prepare() seems to be called in the PLAYING state. Most
     of the time you can't do much on a running graph. */

  gboolean was_running = src->is_running;
  if (was_running) {
    HRESULT hres = src->media_filter->Stop ();
    if (hres != S_OK) {
      GST_ERROR("Can't STOP the directshow capture graph for preparing (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = FALSE;
  }

  /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
  if (gst_caps_is_subset (spec->caps, src->caps)) {
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
      GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

      if (gst_caps_is_subset (spec->caps, capstmp)) {
        res = i;
      }
      gst_caps_unref (capstmp);
    }

    if (res != -1 && src->pins_mediatypes) {
      /*get the corresponding media type and build the dshow graph */
      GstCapturePinMediaType *pin_mediatype = NULL;
      GList *type = g_list_nth (src->pins_mediatypes, res);

      if (type) {
        pin_mediatype = (GstCapturePinMediaType *) type->data;

        src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
        src->dshow_fakesink->gst_set_buffer_callback (
            (push_buffer_func) gst_dshowaudiosrc_push_buffer, src);

        gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
            &input_pin);
        if (!input_pin) {
          GST_ERROR ("Can't get input pin from our directshow fakesink filter");
          goto error;
        }

        spec->segsize = (gint) (spec->info.bpf * spec->info.rate * spec->latency_time /
            GST_MSECOND);
        spec->segtotal = (gint) ((gfloat) spec->buffer_time /
            (gfloat) spec->latency_time + 0.5);
        if (!gst_dshow_configure_latency (pin_mediatype->capture_pin,
            spec->segsize))
        {
          GST_WARNING ("Could not change capture latency");
          spec->segsize = spec->info.rate * spec->info.channels;
          spec->segtotal = 2;
        };
        GST_INFO ("Configuring with segsize:%d segtotal:%d", spec->segsize, spec->segtotal);

        if (gst_dshow_is_pin_connected (pin_mediatype->capture_pin)) {
          GST_DEBUG_OBJECT (src,
              "capture_pin already connected, disconnecting");
          src->filter_graph->Disconnect (pin_mediatype->capture_pin);
        }

        if (gst_dshow_is_pin_connected (input_pin)) {
          GST_DEBUG_OBJECT (src, "input_pin already connected, disconnecting");
          src->filter_graph->Disconnect (input_pin);
        }

        hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
            input_pin, NULL);
        input_pin->Release ();

        if (hres != S_OK) {
          GST_ERROR
              ("Can't connect capture filter with fakesink filter (error=0x%x)",
              hres);
          goto error;
        }

      }
    }
  }

  if (was_running) {
    HRESULT hres = src->media_filter->Run (0);
    if (hres != S_OK) {
      GST_ERROR("Can't RUN the directshow capture graph after prepare (error=0x%x)", hres);
      return FALSE;
    }

    src->is_running = TRUE;
  }

  return TRUE;

error:
  /* Don't restart the graph, we're out anyway. */
  return FALSE;
}