コード例 #1
0
static GstCaps *
gst_dshowvideosrc_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
    /* If there is no desired video size, set default video size to device preffered video size */

    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
    GstStructure *structure = gst_caps_get_structure (caps, 0);
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
        GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

        if (gst_caps_is_subset (caps, capstmp)) {
            res = i;
        }
        gst_caps_unref (capstmp);
    }

    if (res != -1) {
        GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);
        if (type_pin_mediatype) {
            GstCapturePinMediaType *pin_mediatype =
                (GstCapturePinMediaType *) type_pin_mediatype->data;
            gst_structure_fixate_field_nearest_int (structure, "width",
                                                    pin_mediatype->defaultWidth);
            gst_structure_fixate_field_nearest_int (structure, "height",
                                                    pin_mediatype->defaultHeight);
            gst_structure_fixate_field_nearest_fraction (structure, "framerate",
                    pin_mediatype->defaultFPS, 1);
        }
    }

    return GST_BASE_SRC_CLASS (gst_dshowvideosrc_parent_class)->fixate (bsrc, caps);
}
コード例 #2
0
static void
gst_dshowvideosrc_set_property (GObject * object, guint prop_id,
                                const GValue * value, GParamSpec * pspec)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (object);

    switch (prop_id) {
    case PROP_DEVICE:
    {
        if (src->device) {
            g_free (src->device);
            src->device = NULL;
        }
        if (g_value_get_string (value)) {
            src->device = g_strdup (g_value_get_string (value));
        }
        break;
    }
    case PROP_DEVICE_NAME:
    {
        if (src->device_name) {
            g_free (src->device_name);
            src->device_name = NULL;
        }
        if (g_value_get_string (value)) {
            src->device_name = g_strdup (g_value_get_string (value));
        }
        break;
    }
    default:
        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
        break;
    }
}
コード例 #3
0
static GstStateChangeReturn
gst_dshowvideosrc_change_state (GstElement * element, GstStateChange transition)
{
    HRESULT hres = S_FALSE;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (element);

    switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
        break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
        break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
        //if (src->media_filter)
        //  hres = src->media_filter->Run (0);
        //if (hres != S_OK) {
        //  GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
        //  return GST_STATE_CHANGE_FAILURE;
        //}
        break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
        if (src->media_control)
            hres = src->media_control->Stop ();
        if (hres != S_OK) {
            GST_ERROR ("Can't STOP the directshow capture graph (error=%d)", hres);
            return GST_STATE_CHANGE_FAILURE;
        }
        break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
        break;
    case GST_STATE_CHANGE_READY_TO_NULL:
        break;
    }

    return GST_ELEMENT_CLASS (gst_dshowvideosrc_parent_class)->change_state (element, transition);
}
コード例 #4
0
static GstCaps *
gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (basesrc);
  GstCaps *caps;

  if (src->caps) {
    caps = gst_caps_ref (src->caps);
  } else {
    caps = gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (src));
  }

  if (caps) {
    GstCaps *filtcaps;

    if (filter) {
      filtcaps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
    } else {
      filtcaps = gst_caps_ref (caps);
    }
    gst_caps_unref (caps);

    return filtcaps;
  }

  return NULL;
}
コード例 #5
0
static GstFlowReturn
gst_dshowvideosrc_create (GstPushSrc * psrc, GstBuffer ** buf)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (psrc);

    g_mutex_lock (&src->buffer_mutex);
    while (src->buffer == NULL && !src->stop_requested)
        g_cond_wait (&src->buffer_cond, &src->buffer_mutex);
    *buf = src->buffer;
    src->buffer = NULL;
    g_mutex_unlock (&src->buffer_mutex);

    if (src->stop_requested) {
        if (*buf != NULL) {
            gst_buffer_unref (*buf);
            *buf = NULL;
        }
        return GST_FLOW_FLUSHING;
    }

    GST_DEBUG ("dshowvideosrc_create => pts %" GST_TIME_FORMAT " duration %"
               GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buf)),
               GST_TIME_ARGS (GST_BUFFER_DURATION (*buf)));

    return GST_FLOW_OK;
}
コード例 #6
0
static gboolean
gst_dshowvideosrc_start (GstBaseSrc * bsrc)
{
  HRESULT hres = S_FALSE;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);

  hres = CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,
      IID_IFilterGraph, (LPVOID *) & src->filter_graph);
  if (hres != S_OK || !src->filter_graph) {
    GST_ERROR
        ("Can't create an instance of the dshow graph manager (error=0x%x)",
        hres);
    goto error;
  }

  hres = src->filter_graph->QueryInterface (IID_IMediaFilter,
      (LPVOID *) & src->media_filter);
  if (hres != S_OK || !src->media_filter) {
    GST_ERROR
        ("Can't get IMediacontrol interface from the graph manager (error=0x%x)",
        hres);
    goto error;
  }

  src->dshow_fakesink = new CDshowFakeSink;
  src->dshow_fakesink->AddRef ();

  hres = src->filter_graph->AddFilter (src->video_cap_filter, L"capture");
  if (hres != S_OK) {
    GST_ERROR ("Can't add video capture filter to the graph (error=0x%x)",
        hres);
    goto error;
  }

  hres = src->filter_graph->AddFilter (src->dshow_fakesink, L"sink");
  if (hres != S_OK) {
    GST_ERROR ("Can't add our fakesink filter to the graph (error=0x%x)", hres);
    goto error;
  }

  return TRUE;

error:
  if (src->dshow_fakesink) {
    src->dshow_fakesink->Release ();
    src->dshow_fakesink = NULL;
  }

  if (src->media_filter) {
    src->media_filter->Release ();
    src->media_filter = NULL;
  }
  if (src->filter_graph) {
    src->filter_graph->Release ();
    src->filter_graph = NULL;
  }

  return FALSE;
}
コード例 #7
0
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
    GstClockTime duration)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
  GstBuffer *buf = NULL;
  IPin *pPin = NULL;
  HRESULT hres = S_FALSE;
  AM_MEDIA_TYPE *pMediaType = NULL;

  if (!buffer || size == 0 || !src) {
    return FALSE;
  }

  /* create a new buffer assign to it the clock time as timestamp */
  buf = gst_buffer_new_and_alloc (size);

  GST_BUFFER_SIZE (buf) = size;

  GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
  GST_BUFFER_TIMESTAMP (buf) =
    GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
  gst_object_unref (clock);

  GST_BUFFER_DURATION (buf) = duration;

  if (src->is_rgb) {
    /* FOR RGB directshow decoder will return bottom-up BITMAP
     * There is probably a way to get top-bottom video frames from
     * the decoder...
     */
    gint line = 0;
    gint stride = size / src->height;

    for (; line < src->height; line++) {
      memcpy (GST_BUFFER_DATA (buf) + (line * stride),
          buffer + (size - ((line + 1) * (stride))), stride);
    }
  } else {
    memcpy (GST_BUFFER_DATA (buf), buffer, size);
  }

  GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (duration));

  /* the negotiate() method already set caps on the source pad */
  gst_buffer_set_caps (buf, GST_PAD_CAPS (GST_BASE_SRC_PAD (src)));

  g_mutex_lock (src->buffer_mutex);
  if (src->buffer != NULL)
    gst_buffer_unref (src->buffer);
  src->buffer = buf;
  g_cond_signal (src->buffer_cond);
  g_mutex_unlock (src->buffer_mutex);

  return TRUE;
}
コード例 #8
0
static gboolean
gst_dshowvideosrc_unlock_stop (GstBaseSrc * bsrc)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);

    src->stop_requested = FALSE;

    return TRUE;
}
コード例 #9
0
static gboolean
gst_dshowvideosrc_stop (GstBaseSrc * bsrc)
{
  IPin *input_pin = NULL, *output_pin = NULL;
  HRESULT hres = S_FALSE;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);

  if (!src->filter_graph)
    return TRUE;

  /* disconnect filters */
  gst_dshow_get_pin_from_filter (src->video_cap_filter, PINDIR_OUTPUT,
      &output_pin);
  if (output_pin) {
    hres = src->filter_graph->Disconnect (output_pin);
    output_pin->Release ();
  }

  gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT, &input_pin);
  if (input_pin) {
    hres = src->filter_graph->Disconnect (input_pin);
    input_pin->Release ();
  }

  /* remove filters from the graph */
  src->filter_graph->RemoveFilter (src->video_cap_filter);
  src->filter_graph->RemoveFilter (src->dshow_fakesink);

  /* release our gstreamer dshow sink */
  src->dshow_fakesink->Release ();
  src->dshow_fakesink = NULL;

  /* release media filter interface */
  src->media_filter->Release ();
  src->media_filter = NULL;

  /* release the filter graph manager */
  src->filter_graph->Release ();
  src->filter_graph = NULL;

  /* reset caps */
  if (src->caps) {
    gst_caps_unref (src->caps);
    src->caps = NULL;
  }

  /* reset device id */
  if (src->device) {
    g_free (src->device);
    src->device = NULL;
  }
  
  return TRUE;
}
コード例 #10
0
static gboolean
gst_dshowvideosrc_unlock (GstBaseSrc * bsrc)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);

    g_mutex_lock (&src->buffer_mutex);
    src->stop_requested = TRUE;
    g_cond_signal (&src->buffer_cond);
    g_mutex_unlock (&src->buffer_mutex);

    return TRUE;
}
コード例 #11
0
static void
gst_dshowvideosrc_dispose (GObject * gobject)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (gobject);

  if (src->device) {
    g_free (src->device);
    src->device = NULL;
  }

  if (src->device_name) {
    g_free (src->device_name);
    src->device_name = NULL;
  }

  if (src->caps) {
    gst_caps_unref (src->caps);
    src->caps = NULL;
  }

  if (src->pins_mediatypes) {
    gst_dshow_free_pins_mediatypes (src->pins_mediatypes);
    src->pins_mediatypes = NULL;
  }

  /* clean dshow */
  if (src->video_cap_filter) {
    src->video_cap_filter->Release ();
    src->video_cap_filter = NULL;
  }

  if (src->buffer_mutex) {
    g_mutex_free (src->buffer_mutex);
    src->buffer_mutex = NULL;
  }

  if (src->buffer_cond) {
    g_cond_free (src->buffer_cond);
    src->buffer_cond = NULL;
  }

  if (src->buffer) {
    gst_buffer_unref (src->buffer);
    src->buffer = NULL;
  }

  CoUninitialize ();

  G_OBJECT_CLASS (parent_class)->dispose (gobject);
}
コード例 #12
0
static GValueArray *
gst_dshowvideosrc_probe_get_values (GstPropertyProbe * probe,
    guint prop_id, const GParamSpec * pspec)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (probe);
  GValueArray *array = NULL;

  switch (prop_id) {
    case PROP_DEVICE_NAME:
      array = gst_dshowvideosrc_get_device_name_values (src);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (probe, prop_id, pspec);
      break;
  }

  return array;
}
コード例 #13
0
static void
gst_dshowvideosrc_get_property (GObject * object, guint prop_id,
                                GValue * value, GParamSpec * pspec)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (object);

    switch(prop_id) {
    case PROP_DEVICE:
        g_value_set_string(value, src->device);
        break;
    case PROP_DEVICE_NAME:
        g_value_set_string(value, src->device_name);
        break;
    default:
        G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
        break;
    }
}
コード例 #14
0
static GstStateChangeReturn
gst_dshowvideosrc_change_state (GstElement * element, GstStateChange transition)
{
  HRESULT hres = S_FALSE;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (element);

  switch (transition) {
    case GST_STATE_CHANGE_NULL_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_PAUSED:
      break;
    case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
      if (src->media_filter) {
	/* Setting this to TRUE because set_caps may be invoked before
	   Run() returns. */
	src->is_running = TRUE;
        hres = src->media_filter->Run (0);
      }
      if (hres != S_OK) {
        GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
	src->is_running = FALSE;
        return GST_STATE_CHANGE_FAILURE;
      }
      break;
    case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
      if (src->media_filter)
        hres = src->media_filter->Stop ();
      if (hres != S_OK) {
        GST_ERROR ("Can't STOP the directshow capture graph (error=%d)", hres);
        return GST_STATE_CHANGE_FAILURE;
      }
      src->is_running = FALSE;
      break;
    case GST_STATE_CHANGE_PAUSED_TO_READY:
      break;
    case GST_STATE_CHANGE_READY_TO_NULL:
      break;
  }

  return GST_ELEMENT_CLASS(gst_dshowvideosrc_parent_class)->change_state(element, transition);
}
コード例 #15
0
static void
gst_dshowvideosrc_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (object);

  switch (prop_id) {
    case PROP_DEVICE:
    {
      const gchar *device = g_value_get_string (value);
      g_free (src->device);
      src->device = NULL;
      if (device && strlen (device) != 0) {
        src->device = g_value_dup_string (value);
      }
      break;
    }
    case PROP_DEVICE_NAME:
    {
      const gchar *device_name = g_value_get_string (value);
      g_free (src->device_name);
      src->device_name = NULL;
      if (device_name && strlen (device_name) != 0) {
        src->device_name = g_value_dup_string (value);
      }
      break;
    }
    case PROP_DEVICE_INDEX:
    {
      src->device_index = g_value_get_int (value);
      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
コード例 #16
0
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
                               GstClockTime duration)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
    GstBuffer *buf = NULL;
    GstMapInfo map;
    IPin *pPin = NULL;
    HRESULT hres = S_FALSE;
    AM_MEDIA_TYPE *pMediaType = NULL;

    if (!buffer || size == 0 || !src) {
        return FALSE;
    }

    /* create a new buffer assign to it the clock time as timestamp */
    buf = gst_buffer_new_and_alloc (size);

    gst_buffer_set_size(buf, size);

    GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
    GST_BUFFER_PTS (buf) =
        GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
    //GST_BUFFER_DTS(buf) = GST_BUFFER_PTS (buf);
    GST_BUFFER_DTS(buf) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_OFFSET(buf) = src->offset++;
    GST_BUFFER_OFFSET_END(buf) = src->offset;
    GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE);

    gst_object_unref (clock);

    GST_BUFFER_DURATION (buf) = duration;

    gst_buffer_map(buf, &map, GST_MAP_WRITE);

    if (src->is_rgb) {
        /* FOR RGB directshow decoder will return bottom-up BITMAP
        * There is probably a way to get top-bottom video frames from
        * the decoder...
        */
        gint line = 0;
        gint stride = size / src->height;

        for (; line < src->height; line++) {
            memcpy (map.data + (line * stride),
                    buffer + (size - ((line + 1) * (stride))), stride);
        }
    } else {
        memcpy (map.data, buffer, size);
    }

    gst_buffer_unmap(buf, &map);

    src->time += duration;
    gst_object_sync_values (GST_OBJECT (src), src->time);

    GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
               GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
               GST_TIME_ARGS (duration));

    g_mutex_lock (&src->buffer_mutex);
    if (src->buffer != NULL)
        gst_buffer_unref (src->buffer);
    src->buffer = buf;
    g_cond_signal (&src->buffer_cond);
    g_mutex_unlock (&src->buffer_mutex);

    return TRUE;
}
コード例 #17
0
static gboolean
gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
    HRESULT hres;
    IPin *input_pin = NULL;
    IPin *output_pin = NULL;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
    GstStructure *s = gst_caps_get_structure (caps, 0);
    OAFilterState ds_graph_state;
    GstCaps *current_caps;

    /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
    if (gst_caps_is_subset (caps, src->caps)) {
        guint i = 0;
        gint res = -1;

        hres = src->media_control->GetState(0, &ds_graph_state);
        if(ds_graph_state == State_Running) {
            GST_INFO("Setting caps while DirectShow graph is already running");
            current_caps = gst_pad_get_current_caps(GST_BASE_SRC_PAD(src));

            if(gst_caps_is_equal(current_caps, caps)) {
                /* no need to set caps, just return */
                GST_INFO("Not resetting caps");
                gst_caps_unref(current_caps);
                return TRUE;
            }
            else {
                /* stop graph and disconnect filters so new caps can be set */
                GST_INFO("Different caps, stopping DirectShow graph");
                hres = src->media_control->Stop();
                hres = src->media_control->GetState(2000, &ds_graph_state);
                if(hres != S_OK) {
                    GST_ERROR("Could not stop DirectShow graph. Cannot renegoiate pins.");
                    goto error;
                }
                gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
                                               &input_pin);
                if (!input_pin) {
                    input_pin->Release();
                    GST_ERROR ("Can't get input pin from our dshow fakesink");
                    goto error;
                }
                input_pin->ConnectedTo(&output_pin);
                hres = input_pin->Disconnect();
                hres = output_pin->Disconnect();
                input_pin->Release();
                output_pin->Release();
            }
            gst_caps_unref(current_caps);
        }

        for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
            GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

            if (gst_caps_is_subset (caps, capstmp)) {
                res = i;
            }
            gst_caps_unref (capstmp);
        }

        if (res != -1 && src->pins_mediatypes) {
            /* get the corresponding media type and build the dshow graph */
            GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);

            if (type_pin_mediatype) {
                GstCapturePinMediaType *pin_mediatype =
                    (GstCapturePinMediaType *) type_pin_mediatype->data;
                gchar *src_caps_string = NULL;
                const gchar *format_string = NULL;

                /* retrieve the desired video size */
                VIDEOINFOHEADER *video_info = NULL;
                gint width = 0;
                gint height = 0;
                gint numerator = 0;
                gint denominator = 0;
                gst_structure_get_int (s, "width", &width);
                gst_structure_get_int (s, "height", &height);
                gst_structure_get_fraction (s, "framerate", &numerator, &denominator);

                /* check if the desired video size is valid about granularity  */
                /* This check will be removed when GST_TYPE_INT_RANGE_STEP exits */
                /* See remarks in gst_dshow_new_video_caps function */
                if (pin_mediatype->granularityWidth != 0
                        && width % pin_mediatype->granularityWidth != 0)
                    g_warning ("your desired video size is not valid : %d mod %d !=0\n",
                               width, pin_mediatype->granularityWidth);
                if (pin_mediatype->granularityHeight != 0
                        && height % pin_mediatype->granularityHeight != 0)
                    g_warning ("your desired video size is not valid : %d mod %d !=0\n",
                               height, pin_mediatype->granularityHeight);

                /* update mediatype */
                video_info = (VIDEOINFOHEADER *) pin_mediatype->mediatype->pbFormat;
                video_info->bmiHeader.biWidth = width;
                video_info->bmiHeader.biHeight = height;
                video_info->AvgTimePerFrame =
                    (LONGLONG) (10000000 * denominator / (double) numerator);
                video_info->bmiHeader.biSizeImage = DIBSIZE (video_info->bmiHeader);
                pin_mediatype->mediatype->lSampleSize = DIBSIZE (video_info->bmiHeader);

                src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
                src->dshow_fakesink->gst_set_buffer_callback (
                    (push_buffer_func) gst_dshowvideosrc_push_buffer, src);

                gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
                                               &input_pin);
                if (!input_pin) {
                    GST_ERROR ("Can't get input pin from our dshow fakesink");
                    goto error;
                }

                hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
                        input_pin, pin_mediatype->mediatype);
                input_pin->Release ();

                if (hres != S_OK) {
                    GST_ERROR
                    ("Can't connect capture filter with fakesink filter (error=0x%x)",
                     hres);
                    goto error;
                }

                /* save width and height negociated */
                gst_structure_get_int (s, "width", &src->width);
                gst_structure_get_int (s, "height", &src->height);

                src->is_rgb = FALSE;
                format_string = gst_structure_get_string (s, "format");
                if(format_string) {
                    if(!strcmp(format_string, "BGR")) {
                        src->is_rgb = TRUE;
                    }
                    else {
                        src->is_rgb = FALSE;
                    }
                }

                hres = src->media_control->Run();

                hres = src->media_control->GetState(5000, &ds_graph_state);
                if(hres != S_OK || ds_graph_state != State_Running) {
                    GST_ERROR("Could not run graph");
                    goto error;
                }
            }
        }
    }

    return TRUE;

error:
    return FALSE;
}
コード例 #18
0
static gboolean
gst_dshowvideosrc_start (GstBaseSrc * bsrc)
{
    HRESULT hres = S_FALSE;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);

    src->offset = 0;
    src->time = 0;
    gst_object_sync_values (GST_OBJECT (src), src->time);

    /*
    The filter graph now is created via the IGraphBuilder Interface
    Code added to build upstream filters, needed for USB Analog TV Tuners / DVD Maker, based on AMCap code.
    by Fabrice Costa <*****@*****.**>
    */

    hres =  CoCreateInstance(CLSID_FilterGraph, NULL,
                             CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (LPVOID *) & src->graph_builder );
    if (hres != S_OK || !src->graph_builder ) {
        GST_ERROR
        ("Can't create an instance of the dshow graph builder (error=0x%x)",
         hres);
        goto error;
    } else {
        /*graph builder is derived from IFilterGraph so we can assign it to the old src->filter_graph*/
        src->filter_graph = (IFilterGraph*) src->graph_builder;
    }

    /*adding capture graph builder to correctly create upstream filters, Analog TV, TV Tuner */

    hres = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
                            CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2,
                            (LPVOID *) & src->capture_builder);
    if ( hres != S_OK || !src->capture_builder ) {
        GST_ERROR
        ("Can't create an instance of the dshow capture graph builder manager (error=0x%x)",
         hres);
        goto error;
    } else {
        src->capture_builder->SetFiltergraph(src->graph_builder);
    }

    hres = src->filter_graph->QueryInterface (IID_IMediaControl,
            (LPVOID *) & src->media_control);
    if (hres != S_OK || !src->media_control) {
        GST_ERROR
        ("Can't get IMediacontrol interface from the graph manager (error=0x%x)",
         hres);
        goto error;
    }

    src->dshow_fakesink = new CDshowFakeSink;
    src->dshow_fakesink->AddRef ();

    hres = src->filter_graph->AddFilter (src->video_cap_filter, L"capture");
    if (hres != S_OK) {
        GST_ERROR ("Can't add video capture filter to the graph (error=0x%x)",
                   hres);
        goto error;
    }

    /* Finding interfaces really creates the upstream filters */

    hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
            &MEDIATYPE_Interleaved, src->video_cap_filter,
            IID_IAMVideoCompression, (LPVOID *)&src->pVC);

    if(hres != S_OK)
    {
        hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
                &MEDIATYPE_Video, src->video_cap_filter,
                IID_IAMVideoCompression, (LPVOID *)&src->pVC);
    }

    hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
            &MEDIATYPE_Interleaved,
            src->video_cap_filter, IID_IAMStreamConfig, (LPVOID *)&src->pVSC);
    if(hres != S_OK)
    {
        hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
                &MEDIATYPE_Video, src->video_cap_filter,
                IID_IAMStreamConfig, (LPVOID *)&src->pVSC);
        if (hres != S_OK) {
            // this means we can't set frame rate (non-DV only)
            GST_ERROR ("Error %x: Cannot find VCapture:IAMStreamConfig",	hres);
            goto error;
        }
    }

    hres = src->filter_graph->AddFilter (src->dshow_fakesink, L"sink");
    if (hres != S_OK) {
        GST_ERROR ("Can't add our fakesink filter to the graph (error=0x%x)", hres);
        goto error;
    }

    return TRUE;

error:
    if (src->dshow_fakesink) {
        src->dshow_fakesink->Release ();
        src->dshow_fakesink = NULL;
    }

    if (src->media_control) {
        src->media_control->Release ();
        src->media_control = NULL;
    }
    if (src->graph_builder) {
        src->graph_builder->Release ();
        src->graph_builder = NULL;
    }
    if (src->capture_builder) {
        src->capture_builder->Release ();
        src->capture_builder = NULL;
    }
    if (src->pVC) {
        src->pVC->Release ();
        src->pVC = NULL;
    }
    if (src->pVSC) {
        src->pVSC->Release ();
        src->pVSC = NULL;
    }

    return FALSE;
}
コード例 #19
0
static GstCaps *
gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc, GstCaps *filter)
{
    HRESULT hres = S_OK;
    IBindCtx *lpbc = NULL;
    IMoniker *videom;
    DWORD dwEaten;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (basesrc);
    gunichar2 *unidevice = NULL;

    if (src->caps) {
        return gst_caps_ref (src->caps);
    }

    if (!src->device) {
        src->device =
            gst_dshow_getdevice_from_devicename (&CLSID_VideoInputDeviceCategory,
                    &src->device_name);
        if (!src->device) {
            GST_ERROR ("No video device found.");
            return NULL;
        }
    }

    unidevice =
        g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL);

    if (!src->video_cap_filter) {
        hres = CreateBindCtx (0, &lpbc);
        if (SUCCEEDED (hres)) {
            hres =
                MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &videom);
            if (SUCCEEDED (hres)) {
                hres = videom->BindToObject (lpbc, NULL, IID_IBaseFilter,
                                             (LPVOID *) & src->video_cap_filter);
                videom->Release ();
            }
            lpbc->Release ();
        }
    }

    if (!src->caps) {
        src->caps = gst_caps_new_empty ();
    }

    if (src->video_cap_filter && gst_caps_is_empty (src->caps)) {
        /* get the capture pins supported types */
        IPin *capture_pin = NULL;
        IEnumPins *enumpins = NULL;
        HRESULT hres;

        hres = src->video_cap_filter->EnumPins (&enumpins);
        if (SUCCEEDED (hres)) {
            while (enumpins->Next (1, &capture_pin, NULL) == S_OK) {
                IKsPropertySet *pKs = NULL;
                hres =
                    capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs);
                if (SUCCEEDED (hres) && pKs) {
                    DWORD cbReturned;
                    GUID pin_category;
                    RPC_STATUS rpcstatus;

                    hres =
                        pKs->Get (AMPROPSETID_Pin,
                                  AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID),
                                  &cbReturned);

                    /* we only want capture pins */
                    if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE,
                                     &rpcstatus) == 0) {
                        {
                            GstCaps *caps =
                                gst_dshowvideosrc_getcaps_from_streamcaps (src, capture_pin);
                            if (caps) {
                                gst_caps_append (src->caps, caps);
                            } else {
                                caps = gst_dshowvideosrc_getcaps_from_enum_mediatypes (src, capture_pin);
                                if (caps)
                                    gst_caps_append (src->caps, caps);
                            }
                        }
                    }
                    pKs->Release ();
                }
                capture_pin->Release ();
            }
            enumpins->Release ();
        }
    }

    if (unidevice) {
        g_free (unidevice);
    }

    if (src->caps) {
        if (filter) {
            return gst_caps_intersect_full (filter, src->caps,
                                            GST_CAPS_INTERSECT_FIRST);
        } else {
            return gst_caps_ref (src->caps);
        }
    }

    return NULL;
}
コード例 #20
0
static gboolean
gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
  HRESULT hres;
  IPin *input_pin = NULL;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
  GstStructure *s = gst_caps_get_structure (caps, 0);
  GstCaps *current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc));

  if (current_caps) {
    if (gst_caps_is_equal (caps, current_caps)) {
      gst_caps_unref (current_caps);
      return TRUE;
    }
    gst_caps_unref (current_caps);
  }

  /* Same remark as in gstdshowaudiosrc. */
  gboolean was_running = src->is_running;
  if (was_running) {
    HRESULT hres = src->media_filter->Stop ();
    if (hres != S_OK) {
      GST_ERROR ("Can't STOP the directshow capture graph (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = FALSE;
  }

  /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
  if (gst_caps_is_subset (caps, src->caps)) {
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
      GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

      if (gst_caps_is_subset (caps, capstmp)) {
        res = i;
      }
      gst_caps_unref (capstmp);
    }

    if (res != -1 && src->pins_mediatypes) {
      /* get the corresponding media type and build the dshow graph */
      GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);

      if (type_pin_mediatype) {
        GstCapturePinMediaType *pin_mediatype =
            (GstCapturePinMediaType *) type_pin_mediatype->data;
        gchar *caps_string = NULL;
        gchar *src_caps_string = NULL;

        /* retrieve the desired video size */
        VIDEOINFOHEADER *video_info = NULL;
        gint width = 0;
        gint height = 0;
        gint numerator = 0;
        gint denominator = 0;
        gst_structure_get_int (s, "width", &width);
        gst_structure_get_int (s, "height", &height);
        gst_structure_get_fraction (s, "framerate", &numerator, &denominator);

        /* check if the desired video size is valid about granularity  */
        /* This check will be removed when GST_TYPE_INT_RANGE_STEP exits */
        /* See remarks in gst_dshow_new_video_caps function */
        if (pin_mediatype->granularityWidth != 0
            && width % pin_mediatype->granularityWidth != 0)
          g_warning ("your desired video size is not valid : %d mod %d !=0\n",
              width, pin_mediatype->granularityWidth);
        if (pin_mediatype->granularityHeight != 0
            && height % pin_mediatype->granularityHeight != 0)
          g_warning ("your desired video size is not valid : %d mod %d !=0\n",
              height, pin_mediatype->granularityHeight);

        /* update mediatype */
        video_info = (VIDEOINFOHEADER *) pin_mediatype->mediatype->pbFormat;
        video_info->bmiHeader.biWidth = width;
        video_info->bmiHeader.biHeight = height;
        video_info->AvgTimePerFrame =
            (LONGLONG) (10000000 * denominator / (double) numerator);
        video_info->bmiHeader.biSizeImage = DIBSIZE (video_info->bmiHeader);
        pin_mediatype->mediatype->lSampleSize = DIBSIZE (video_info->bmiHeader);

        src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
        src->dshow_fakesink->gst_set_buffer_callback (
            (push_buffer_func) gst_dshowvideosrc_push_buffer, src);

        gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
            &input_pin);
        if (!input_pin) {
          GST_ERROR ("Can't get input pin from our dshow fakesink");
          goto error;
        }

        if (gst_dshow_is_pin_connected (pin_mediatype->capture_pin)) {
          GST_DEBUG_OBJECT (src,
              "capture_pin already connected, disconnecting");
          src->filter_graph->Disconnect (pin_mediatype->capture_pin);
        }

        if (gst_dshow_is_pin_connected (input_pin)) {
          GST_DEBUG_OBJECT (src, "input_pin already connected, disconnecting");
          src->filter_graph->Disconnect (input_pin);
        }

        hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
            input_pin, pin_mediatype->mediatype);
        input_pin->Release ();

        if (hres != S_OK) {
          GST_ERROR
              ("Can't connect capture filter with fakesink filter (error=0x%x)",
              hres);
          goto error;
        }

        /* save width and height negociated */
        gst_structure_get_int (s, "width", &src->width);
        gst_structure_get_int (s, "height", &src->height);

	GstVideoInfo info;
	gst_video_info_from_caps(&info, caps);
	switch (GST_VIDEO_INFO_FORMAT(&info)) {
          case GST_VIDEO_FORMAT_RGB:
          case GST_VIDEO_FORMAT_BGR:
	    src->is_rgb = TRUE;
	    break;
	default:
	  src->is_rgb = FALSE;
	  break;
	}
      }
    }
  }

  if (was_running) {
    HRESULT hres = src->media_filter->Run (0);
    if (hres != S_OK) {
      GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = TRUE;
  }

  return TRUE;

error:
  return FALSE;
}
コード例 #21
0
static gboolean
gst_dshowvideosrc_start (GstBaseSrc * bsrc)
{
  HRESULT hres = S_FALSE;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
  DshowDeviceEntry *device_entry;
  IMoniker *moniker = NULL;

  device_entry = gst_dshow_select_device (&CLSID_VideoInputDeviceCategory,
      src->device, src->device_name, src->device_index);
  if (device_entry == NULL) {
    GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Failed to find device"), (NULL));
    return FALSE;
  }

  g_free (src->device);
  g_free (src->device_name);
  src->device = g_strdup (device_entry->device);
  src->device_name = g_strdup (device_entry->device_name);
  src->device_index = device_entry->device_index;
  moniker = device_entry->moniker;
  device_entry->moniker = NULL;
  gst_dshow_device_entry_free (device_entry);

  src->video_cap_filter = gst_dshow_create_capture_filter (moniker);
  moniker->Release ();
  if (src->video_cap_filter == NULL) {
    GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
        ("Failed to create capture filter for device"), (NULL));
    return FALSE;
  }

  src->caps = gst_dshowvideosrc_getcaps_from_capture_filter (
      src->video_cap_filter, (GList**)&src->pins_mediatypes);
  if (gst_caps_is_empty (src->caps)) {
    GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
        ("Failed to get any caps from devce"), (NULL));
    return FALSE;
  }

  /*
  The filter graph now is created via the IGraphBuilder Interface   
  Code added to build upstream filters, needed for USB Analog TV Tuners / DVD Maker, based on AMCap code.
  by Fabrice Costa <*****@*****.**>
  */

  hres =  CoCreateInstance(CLSID_FilterGraph, NULL,
    CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (LPVOID *) & src->graph_builder );
  if (hres != S_OK || !src->graph_builder ) {
    GST_ERROR
        ("Can't create an instance of the dshow graph builder (error=0x%x)",
        hres);
    goto error;
  } else {
	/*graph builder is derived from IFilterGraph so we can assign it to the old src->filter_graph*/
	src->filter_graph = (IFilterGraph*) src->graph_builder;
  }
  
  /*adding capture graph builder to correctly create upstream filters, Analog TV, TV Tuner */

  hres = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
        CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, 
        (LPVOID *) & src->capture_builder);
  if ( hres != S_OK || !src->capture_builder ) {	
    GST_ERROR
        ("Can't create an instance of the dshow capture graph builder manager (error=0x%x)",
        hres);
	goto error;
  } else {	
	src->capture_builder->SetFiltergraph(src->graph_builder);
  }

  hres = src->filter_graph->QueryInterface (IID_IMediaFilter,
      (LPVOID *) & src->media_filter);
  if (hres != S_OK || !src->media_filter) {
    GST_ERROR
        ("Can't get IMediacontrol interface from the graph manager (error=0x%x)",
        hres);
    goto error;
  }

  src->dshow_fakesink = new CDshowFakeSink;
  src->dshow_fakesink->AddRef ();

  hres = src->filter_graph->AddFilter (src->video_cap_filter, L"capture");
  if (hres != S_OK) {
    GST_ERROR ("Can't add video capture filter to the graph (error=0x%x)",
        hres);
    goto error;
  }

  /* Finding interfaces really creates the upstream filters */

  hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
                                      &MEDIATYPE_Interleaved, src->video_cap_filter, 
                                      IID_IAMVideoCompression, (LPVOID *)&src->pVC);
  
  if(hres != S_OK)
  {
	hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
                                          &MEDIATYPE_Video, src->video_cap_filter, 
                                          IID_IAMVideoCompression, (LPVOID *)&src->pVC);
  }
  
  hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
                                      &MEDIATYPE_Interleaved,
                                      src->video_cap_filter, IID_IAMStreamConfig, (LPVOID *)&src->pVSC);
  if(hres != S_OK)
  {
	  hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
											&MEDIATYPE_Video, src->video_cap_filter,
											IID_IAMStreamConfig, (LPVOID *)&src->pVSC);
	  if (hres != S_OK) {
                  /* this means we can't set frame rate (non-DV only) */
		  GST_ERROR ("Error %x: Cannot find VCapture:IAMStreamConfig",	hres);
			goto error;
	  }
  }

  hres = src->filter_graph->AddFilter (src->dshow_fakesink, L"sink");
  if (hres != S_OK) {
    GST_ERROR ("Can't add our fakesink filter to the graph (error=0x%x)", hres);
    goto error;
  }

  return TRUE;

error:
  GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
     ("Failed to build filter graph"), (NULL));

  if (src->dshow_fakesink) {
    src->dshow_fakesink->Release ();
    src->dshow_fakesink = NULL;
  }

  if (src->media_filter) {
    src->media_filter->Release ();
    src->media_filter = NULL;
  }
  if (src->graph_builder) {
    src->graph_builder->Release ();
    src->graph_builder = NULL;
  }
  if (src->capture_builder) {
    src->capture_builder->Release ();
    src->capture_builder = NULL;
  }
  if (src->pVC) {
    src->pVC->Release ();
    src->pVC = NULL;
  }
  if (src->pVSC) {
    src->pVSC->Release ();
    src->pVSC = NULL;
  }

  return FALSE;
}