static gboolean
gst_dshowvideosrc_stop (GstBaseSrc * bsrc)
{
  IPin *input_pin = NULL, *output_pin = NULL;
  HRESULT hres = S_FALSE;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);

  if (!src->filter_graph)
    return TRUE;

  /* disconnect filters */
  gst_dshow_get_pin_from_filter (src->video_cap_filter, PINDIR_OUTPUT,
      &output_pin);
  if (output_pin) {
    hres = src->filter_graph->Disconnect (output_pin);
    output_pin->Release ();
  }

  gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT, &input_pin);
  if (input_pin) {
    hres = src->filter_graph->Disconnect (input_pin);
    input_pin->Release ();
  }

  /* remove filters from the graph */
  src->filter_graph->RemoveFilter (src->video_cap_filter);
  src->filter_graph->RemoveFilter (src->dshow_fakesink);

  /* release our gstreamer dshow sink */
  src->dshow_fakesink->Release ();
  src->dshow_fakesink = NULL;

  /* release media filter interface */
  src->media_filter->Release ();
  src->media_filter = NULL;

  /* release the filter graph manager */
  src->filter_graph->Release ();
  src->filter_graph = NULL;

  /* reset caps */
  if (src->caps) {
    gst_caps_unref (src->caps);
    src->caps = NULL;
  }

  /* reset device id */
  if (src->device) {
    g_free (src->device);
    src->device = NULL;
  }
  
  return TRUE;
}
Example #2
0
static gboolean
gst_dshowvideodec_get_filter_output_format (GstDshowVideoDec * vdec,
    GUID * subtype, VIDEOINFOHEADER ** format, guint * size)
{
  IPin *output_pin = NULL;
  IEnumMediaTypes *enum_mediatypes = NULL;
  HRESULT hres;
  ULONG fetched;
  BOOL ret = FALSE;

  if (!vdec->decfilter)
    return FALSE;

  if (!gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT,
          &output_pin)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("failed getting ouput pin from the decoder"), (NULL));
    return FALSE;
  }

  hres = IPin_EnumMediaTypes (output_pin, &enum_mediatypes);
  if (hres == S_OK && enum_mediatypes) {
    AM_MEDIA_TYPE *mediatype = NULL;

    IEnumMediaTypes_Reset (enum_mediatypes);
    while (hres =
        IEnumMoniker_Next (enum_mediatypes, 1, &mediatype, &fetched),
        hres == S_OK) {
      RPC_STATUS rpcstatus;

      if ((UuidCompare (&mediatype->subtype, subtype, &rpcstatus) == 0
              && rpcstatus == RPC_S_OK) &&
          (UuidCompare (&mediatype->formattype, &FORMAT_VideoInfo,
                  &rpcstatus) == 0 && rpcstatus == RPC_S_OK)) {
        *size = mediatype->cbFormat;
        *format = g_malloc0 (*size);
        memcpy (*format, mediatype->pbFormat, *size);
        ret = TRUE;
      }
      gst_dshow_free_mediatype (mediatype);
      if (ret)
        break;
    }
    IEnumMediaTypes_Release (enum_mediatypes);
  }
  if (output_pin) {
    IPin_Release (output_pin);
  }

  return ret;
}
Example #3
0
static gboolean
gst_dshowaudiosrc_unprepare (GstAudioSrc * asrc)
{
  IPin *input_pin = NULL, *output_pin = NULL;
  HRESULT hres = S_FALSE;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);

  /* disconnect filters */
  gst_dshow_get_pin_from_filter (src->audio_cap_filter, PINDIR_OUTPUT,
      &output_pin);
  if (output_pin) {
    hres = IFilterGraph_Disconnect (src->filter_graph, output_pin);
    IPin_Release (output_pin);
  }

  gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT, &input_pin);
  if (input_pin) {
    hres = IFilterGraph_Disconnect (src->filter_graph, input_pin);
    IPin_Release (input_pin);
  }

  return TRUE;
}
static gboolean
gst_dshowvideodec_get_filter_output_format (GstDshowVideoDec * vdec,
    const GUID subtype, VIDEOINFOHEADER ** format, guint * size)
{
  CComPtr<IPin> output_pin;
  CComPtr<IEnumMediaTypes> enum_mediatypes;
  HRESULT hres;
  ULONG fetched;
  BOOL ret = FALSE;

  if (!vdec->decfilter)
    return FALSE;

  output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("failed getting ouput pin from the decoder"), (NULL));
    return FALSE;
  }

  hres = output_pin->EnumMediaTypes (&enum_mediatypes);
  if (hres == S_OK && enum_mediatypes) {
    AM_MEDIA_TYPE *mediatype = NULL;

    enum_mediatypes->Reset();
    while (hres =
        enum_mediatypes->Next(1, &mediatype, &fetched),
        hres == S_OK) 
    {
      if (IsEqualGUID (mediatype->subtype, subtype) &&
          IsEqualGUID (mediatype->formattype, FORMAT_VideoInfo))
      {
        *size = mediatype->cbFormat;
        *format = (VIDEOINFOHEADER *)g_malloc0 (*size);
        memcpy (*format, mediatype->pbFormat, *size);
        ret = TRUE;
      }
      DeleteMediaType (mediatype);
      if (ret)
        break;
    }
  }

  return ret;
}
Example #5
0
static gboolean
gst_dshowaudiodec_get_filter_settings (GstDshowAudioDec * adec)
{
  CComPtr<IPin> output_pin;
  CComPtr<IEnumMediaTypes> enum_mediatypes;
  HRESULT hres;
  ULONG fetched;
  BOOL ret = FALSE;

  if (adec->decfilter == 0)
    return FALSE;

  output_pin = gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("failed getting ouput pin from the decoder"), (NULL));
    return FALSE;
  }

  hres = output_pin->EnumMediaTypes (&enum_mediatypes);
  if (hres == S_OK && enum_mediatypes) {
    AM_MEDIA_TYPE *mediatype = NULL;

    enum_mediatypes->Reset();
    while (!ret && enum_mediatypes->Next(1, &mediatype, &fetched) == S_OK) 
    {
      if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_PCM) &&
          IsEqualGUID (mediatype->formattype, FORMAT_WaveFormatEx))
      {
        WAVEFORMATEX *audio_info = (WAVEFORMATEX *) mediatype->pbFormat;

        adec->channels = audio_info->nChannels;
        adec->depth = audio_info->wBitsPerSample;
        adec->rate = audio_info->nSamplesPerSec;
        ret = TRUE;
      }
      DeleteMediaType (mediatype);
    }
  }

  return ret;
}
Example #6
0
static GstCaps *
gst_dshowvideodec_src_getcaps (GstPad * pad)
{
  GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
  GstCaps *caps = NULL;

  if (!vdec->srccaps)
    vdec->srccaps = gst_caps_new_empty ();

  if (vdec->decfilter) {
    IPin *output_pin = NULL;
    IEnumMediaTypes *enum_mediatypes = NULL;
    HRESULT hres;
    ULONG fetched;

    if (!gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT,
            &output_pin)) {
      GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
          ("failed getting ouput pin from the decoder"), (NULL));
      goto beach;
    }

    hres = IPin_EnumMediaTypes (output_pin, &enum_mediatypes);
    if (hres == S_OK && enum_mediatypes) {
      AM_MEDIA_TYPE *mediatype = NULL;

      IEnumMediaTypes_Reset (enum_mediatypes);
      while (hres =
          IEnumMoniker_Next (enum_mediatypes, 1, &mediatype, &fetched),
          hres == S_OK) {
        RPC_STATUS rpcstatus;
        VIDEOINFOHEADER *video_info;
        GstCaps *mediacaps = NULL;

        /* RGB24 */
        if ((UuidCompare (&mediatype->subtype, &MEDIASUBTYPE_RGB24,
                    &rpcstatus) == 0 && rpcstatus == RPC_S_OK)
            && (UuidCompare (&mediatype->formattype, &FORMAT_VideoInfo,
                    &rpcstatus) == 0 && rpcstatus == RPC_S_OK)) {
          video_info = (VIDEOINFOHEADER *) mediatype->pbFormat;

          /* ffmpegcolorspace handles RGB24 in BIG_ENDIAN */
          mediacaps = gst_caps_new_simple ("video/x-raw-rgb",
              "bpp", G_TYPE_INT, 24,
              "depth", G_TYPE_INT, 24,
              "width", G_TYPE_INT, video_info->bmiHeader.biWidth,
              "height", G_TYPE_INT, video_info->bmiHeader.biHeight,
              "framerate", GST_TYPE_FRACTION,
              (int) (10000000 / video_info->AvgTimePerFrame), 1, "endianness",
              G_TYPE_INT, G_BIG_ENDIAN, "red_mask", G_TYPE_INT, 255,
              "green_mask", G_TYPE_INT, 65280, "blue_mask", G_TYPE_INT,
              16711680, NULL);

          if (mediacaps) {
            vdec->mediatypes = g_list_append (vdec->mediatypes, mediatype);
            gst_caps_append (vdec->srccaps, mediacaps);
          } else {
            gst_dshow_free_mediatype (mediatype);
          }
        } else {
          gst_dshow_free_mediatype (mediatype);
        }

      }
      IEnumMediaTypes_Release (enum_mediatypes);
    }
    if (output_pin) {
      IPin_Release (output_pin);
    }
  }

  if (vdec->srccaps)
    caps = gst_caps_ref (vdec->srccaps);

beach:
  gst_object_unref (vdec);

  return caps;
}
Example #7
0
static gboolean
gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  gboolean ret = FALSE;
  HRESULT hres;
  GstStructure *s = gst_caps_get_structure (caps, 0);
  GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
  GstDshowVideoDecClass *klass =
      (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
  GstBuffer *extradata = NULL;
  const GValue *v = NULL;
  gint size = 0;
  GstCaps *caps_out;
  AM_MEDIA_TYPE output_mediatype, input_mediatype;
  VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL;
  IPin *output_pin = NULL, *input_pin = NULL;
  IGstDshowInterface *gstdshowinterface = NULL;
  const GValue *fps;

  /* read data */
  if (!gst_structure_get_int (s, "width", &vdec->width) ||
      !gst_structure_get_int (s, "height", &vdec->height)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("error getting video width or height from caps"), (NULL));
    goto end;
  }
  fps = gst_structure_get_value (s, "framerate");
  if (!fps) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("error getting video framerate from caps"), (NULL));
    goto end;
  }
  vdec->fps_n = gst_value_get_fraction_numerator (fps);
  vdec->fps_d = gst_value_get_fraction_denominator (fps);

  if ((v = gst_structure_get_value (s, "codec_data")))
    extradata = gst_value_get_buffer (v);

  /* define the input type format */
  memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  input_mediatype.majortype = klass->entry->input_majortype;
  input_mediatype.subtype = klass->entry->input_subtype;
  input_mediatype.bFixedSizeSamples = FALSE;
  input_mediatype.bTemporalCompression = TRUE;

  if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) {
    size =
        sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) -
        1 : 0);
    input_vheader = g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {
      MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader;

      memcpy (mpeg_info->bSequenceHeader,
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_MPEGVideo;
  } else {
    size =
        sizeof (VIDEOINFOHEADER) +
        (extradata ? GST_BUFFER_SIZE (extradata) : 0);
    input_vheader = g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {            /* Codec data is appended after our header */
      memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER),
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_VideoInfo;
  }
  input_vheader->rcSource.top = input_vheader->rcSource.left = 0;
  input_vheader->rcSource.right = vdec->width;
  input_vheader->rcSource.bottom = vdec->height;
  input_vheader->rcTarget = input_vheader->rcSource;
  input_vheader->bmiHeader.biWidth = vdec->width;
  input_vheader->bmiHeader.biHeight = vdec->height;
  input_vheader->bmiHeader.biPlanes = 1;
  input_vheader->bmiHeader.biBitCount = 16;
  input_vheader->bmiHeader.biCompression = klass->entry->format;
  input_vheader->bmiHeader.biSizeImage =
      (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8);

  input_mediatype.cbFormat = size;
  input_mediatype.pbFormat = (BYTE *) input_vheader;
  input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage;

  hres = IBaseFilter_QueryInterface (vdec->srcfilter, &IID_IGstDshowInterface,
      (void **) &gstdshowinterface);
  if (hres != S_OK || !gstdshowinterface) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get IGstDshowInterface interface from dshow fakesrc filter (error=%d)",
            hres), (NULL));
    goto end;
  }

  /* save a reference to IGstDshowInterface to use it processing functions */
  if (!vdec->gstdshowsrcfilter) {
    vdec->gstdshowsrcfilter = gstdshowinterface;
    IBaseFilter_AddRef (vdec->gstdshowsrcfilter);
  }

  IGstDshowInterface_gst_set_media_type (gstdshowinterface, &input_mediatype);
  IGstDshowInterface_Release (gstdshowinterface);
  gstdshowinterface = NULL;

  /* set the sample size for fakesrc filter to the output buffer size */
  IGstDshowInterface_gst_set_sample_size (vdec->gstdshowsrcfilter,
      input_mediatype.lSampleSize);

  /* connect our fake src to decoder */
  gst_dshow_get_pin_from_filter (vdec->srcfilter, PINDIR_OUTPUT, &output_pin);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our directshow fakesrc filter"), (NULL));
    goto end;
  }
  gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT, &input_pin);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from decoder filter"), (NULL));
    goto end;
  }

  hres =
      IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin,
      NULL);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect fakesrc with decoder (error=%d)", hres), (NULL));
    goto end;
  }

  IPin_Release (input_pin);
  IPin_Release (output_pin);
  input_pin = NULL;
  output_pin = NULL;

  /* get decoder output video format */
  if (!gst_dshowvideodec_get_filter_output_format (vdec,
          &klass->entry->output_subtype, &output_vheader, &size)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get decoder output video format"), (NULL));
    goto end;
  }

  memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  output_mediatype.majortype = klass->entry->output_majortype;
  output_mediatype.subtype = klass->entry->output_subtype;
  output_mediatype.bFixedSizeSamples = TRUE;
  output_mediatype.bTemporalCompression = FALSE;
  output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage;
  output_mediatype.formattype = FORMAT_VideoInfo;
  output_mediatype.cbFormat = size;
  output_mediatype.pbFormat = (char *) output_vheader;

  hres = IBaseFilter_QueryInterface (vdec->sinkfilter, &IID_IGstDshowInterface,
      (void **) &gstdshowinterface);
  if (hres != S_OK || !gstdshowinterface) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get IGstDshowInterface interface from dshow fakesink filter (error=%d)",
            hres), (NULL));
    goto end;
  }

  IGstDshowInterface_gst_set_media_type (gstdshowinterface, &output_mediatype);
  IGstDshowInterface_gst_set_buffer_callback (gstdshowinterface,
      gst_dshowvideodec_push_buffer, (byte *) vdec);
  IGstDshowInterface_Release (gstdshowinterface);
  gstdshowinterface = NULL;

  /* connect decoder to our fake sink */
  gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT, &output_pin);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our decoder filter"), (NULL));
    goto end;
  }

  gst_dshow_get_pin_from_filter (vdec->sinkfilter, PINDIR_INPUT, &input_pin);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from our directshow fakesink filter"), (NULL));
    goto end;
  }

  hres =
      IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin,
      &output_mediatype);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect decoder with fakesink (error=%d)", hres), (NULL));
    goto end;
  }

  /* negotiate output */
  caps_out = gst_caps_from_string (klass->entry->srccaps);
  gst_caps_set_simple (caps_out,
      "width", G_TYPE_INT, vdec->width,
      "height", G_TYPE_INT, vdec->height,
      "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL);
  if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
    gst_caps_unref (caps_out);
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Failed to negotiate output"), (NULL));
    goto end;
  }
  gst_caps_unref (caps_out);

  hres = IMediaFilter_Run (vdec->mediafilter, -1);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't run the directshow graph (error=%d)", hres), (NULL));
    goto end;
  }

  ret = TRUE;
end:
  gst_object_unref (vdec);
  if (input_vheader)
    g_free (input_vheader);
  if (gstdshowinterface)
    IGstDshowInterface_Release (gstdshowinterface);
  if (input_pin)
    IPin_Release (input_pin);
  if (output_pin)
    IPin_Release (output_pin);

  return ret;
}
static GstCaps *
gst_dshowvideodec_src_getcaps (GstPad * pad)
{
  GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
  GstCaps *caps = NULL;

  if (!vdec->srccaps)
    vdec->srccaps = gst_caps_new_empty ();

  if (vdec->decfilter) {
    CComPtr<IPin> output_pin;
    CComPtr<IEnumMediaTypes> enum_mediatypes;
    HRESULT hres;
    ULONG fetched;

    output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
    if (!output_pin) {
      GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
          ("failed getting ouput pin from the decoder"), (NULL));
      goto beach;
    }

    hres = output_pin->EnumMediaTypes (&enum_mediatypes);
    if (hres == S_OK && enum_mediatypes) {
      AM_MEDIA_TYPE *mediatype = NULL;

      enum_mediatypes->Reset();
      while (hres =
          enum_mediatypes->Next(1, &mediatype, &fetched),
          hres == S_OK) 
      {
        VIDEOINFOHEADER *video_info;
        GstCaps *mediacaps = NULL;

        /* RGB24 */
        if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_RGB24) &&
            IsEqualGUID (mediatype->formattype, FORMAT_VideoInfo))
        {
          video_info = (VIDEOINFOHEADER *) mediatype->pbFormat;

          /* ffmpegcolorspace handles RGB24 in BIG_ENDIAN */
          mediacaps = gst_caps_new_simple ("video/x-raw-rgb",
              "bpp", G_TYPE_INT, 24,
              "depth", G_TYPE_INT, 24,
              "width", G_TYPE_INT, video_info->bmiHeader.biWidth,
              "height", G_TYPE_INT, video_info->bmiHeader.biHeight,
              "framerate", GST_TYPE_FRACTION,
              (int) (10000000 / video_info->AvgTimePerFrame), 1, "endianness",
              G_TYPE_INT, G_BIG_ENDIAN, "red_mask", G_TYPE_INT, 255,
              "green_mask", G_TYPE_INT, 65280, "blue_mask", G_TYPE_INT,
              16711680, NULL);

          if (mediacaps) {
            vdec->mediatypes = g_list_append (vdec->mediatypes, mediatype);
            gst_caps_append (vdec->srccaps, mediacaps);
          } else {
            DeleteMediaType (mediatype);
          }
        } else {
          DeleteMediaType (mediatype);
        }

      }
    }
  }

  if (vdec->srccaps)
    caps = gst_caps_ref (vdec->srccaps);

beach:
  gst_object_unref (vdec);

  return caps;
}
static gboolean
gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  gboolean ret = FALSE;
  HRESULT hres;
  GstStructure *s = gst_caps_get_structure (caps, 0);
  GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
  GstDshowVideoDecClass *klass =
      (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
  GstBuffer *extradata = NULL;
  const GValue *v = NULL;
  guint size = 0;
  GstCaps *caps_out;
  AM_MEDIA_TYPE output_mediatype, input_mediatype;
  VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL;
  CComPtr<IPin> output_pin;
  CComPtr<IPin> input_pin;
  IBaseFilter *srcfilter = NULL;
  IBaseFilter *sinkfilter = NULL;
  const GValue *fps, *par;

  /* read data */
  if (!gst_structure_get_int (s, "width", &vdec->width) ||
      !gst_structure_get_int (s, "height", &vdec->height)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("error getting video width or height from caps"), (NULL));
    goto end;
  }
  fps = gst_structure_get_value (s, "framerate");
  if (fps) {
    vdec->fps_n = gst_value_get_fraction_numerator (fps);
    vdec->fps_d = gst_value_get_fraction_denominator (fps);
  }
  else {
    /* Invent a sane default framerate; the timestamps matter
     * more anyway. */
    vdec->fps_n = 25;
    vdec->fps_d = 1;
  }

  par = gst_structure_get_value (s, "pixel-aspect-ratio");
  if (par) {
    vdec->par_n = gst_value_get_fraction_numerator (par);
    vdec->par_d = gst_value_get_fraction_denominator (par);
  }
  else {
    vdec->par_n = vdec->par_d = 1;
  }

  if ((v = gst_structure_get_value (s, "codec_data")))
    extradata = gst_value_get_buffer (v);

  /* define the input type format */
  memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  input_mediatype.majortype = klass->entry->input_majortype;
  input_mediatype.subtype = klass->entry->input_subtype;
  input_mediatype.bFixedSizeSamples = FALSE;
  input_mediatype.bTemporalCompression = TRUE;

  if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) {
    size =
        sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) -
        1 : 0);
    input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {
      MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader;

      memcpy (mpeg_info->bSequenceHeader,
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_MPEGVideo;
  } else {
    size =
        sizeof (VIDEOINFOHEADER) +
        (extradata ? GST_BUFFER_SIZE (extradata) : 0);
    input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);

    input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
    if (extradata) {            /* Codec data is appended after our header */
      memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER),
          GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
      input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata);
    }
    input_mediatype.formattype = FORMAT_VideoInfo;
  }
  input_vheader->rcSource.top = input_vheader->rcSource.left = 0;
  input_vheader->rcSource.right = vdec->width;
  input_vheader->rcSource.bottom = vdec->height;
  input_vheader->rcTarget = input_vheader->rcSource;
  input_vheader->bmiHeader.biWidth = vdec->width;
  input_vheader->bmiHeader.biHeight = vdec->height;
  input_vheader->bmiHeader.biPlanes = 1;
  input_vheader->bmiHeader.biBitCount = 16;
  input_vheader->bmiHeader.biCompression = klass->entry->format;
  input_vheader->bmiHeader.biSizeImage =
      (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8);

  input_mediatype.cbFormat = size;
  input_mediatype.pbFormat = (BYTE *) input_vheader;
  input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage;

  vdec->fakesrc->GetOutputPin()->SetMediaType(&input_mediatype);

  /* set the sample size for fakesrc filter to the output buffer size */
  vdec->fakesrc->GetOutputPin()->SetSampleSize(input_mediatype.lSampleSize);

  /* connect our fake src to decoder */
  hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
      (void **) &srcfilter);
  if (FAILED (hres)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
      ("Can't QT fakesrc to IBaseFilter: %x", hres), (NULL));
    goto end;
  }

  output_pin = gst_dshow_get_pin_from_filter (srcfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our directshow fakesrc filter"), (NULL));
    goto end;
  }
  input_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from decoder filter"), (NULL));
    goto end;
  }

  hres = vdec->filtergraph->ConnectDirect (output_pin, input_pin, NULL);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect fakesrc with decoder (error=%x)", hres), (NULL));
    goto end;
  }

  /* get decoder output video format */
  if (!gst_dshowvideodec_get_filter_output_format (vdec,
          klass->entry->output_subtype, &output_vheader, &size)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get decoder output video format"), (NULL));
    goto end;
  }

  memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE));
  output_mediatype.majortype = klass->entry->output_majortype;
  output_mediatype.subtype = klass->entry->output_subtype;
  output_mediatype.bFixedSizeSamples = TRUE;
  output_mediatype.bTemporalCompression = FALSE;
  output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage;
  output_mediatype.formattype = FORMAT_VideoInfo;
  output_mediatype.cbFormat = size;
  output_mediatype.pbFormat = (BYTE *) output_vheader;

  vdec->fakesink->SetMediaType (&output_mediatype);

  /* connect decoder to our fake sink */
  output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get output pin from our decoder filter"), (NULL));
    goto end;
  }

  hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
      (void **) &sinkfilter);
  if (FAILED (hres)) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
      ("Can't QT fakesink to IBaseFilter: %x", hres), (NULL));
    goto end;
  }

  input_pin = gst_dshow_get_pin_from_filter (sinkfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't get input pin from our directshow fakesink filter"), (NULL));
    goto end;
  }

  hres = vdec->filtergraph->ConnectDirect(output_pin, input_pin,
      &output_mediatype);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't connect decoder with fakesink (error=%x)", hres), (NULL));
    goto end;
  }

  /* negotiate output */
  caps_out = gst_caps_from_string (klass->entry->srccaps);
  gst_caps_set_simple (caps_out,
      "width", G_TYPE_INT, vdec->width,
      "height", G_TYPE_INT, vdec->height, NULL);

  if (vdec->fps_n && vdec->fps_d) {
      gst_caps_set_simple (caps_out,
          "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL);
  }

  gst_caps_set_simple (caps_out, 
      "pixel-aspect-ratio", GST_TYPE_FRACTION, vdec->par_n, vdec->par_d, NULL);

  if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
    gst_caps_unref (caps_out);
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Failed to negotiate output"), (NULL));
    goto end;
  }
  gst_caps_unref (caps_out);

  hres = vdec->mediafilter->Run (-1);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
        ("Can't run the directshow graph (error=%d)", hres), (NULL));
    goto end;
  }

  ret = TRUE;
end:
  gst_object_unref (vdec);
  if (input_vheader)
    g_free (input_vheader);
  if (srcfilter)
    srcfilter->Release();
  if (sinkfilter)
    sinkfilter->Release();
  return ret;
}
Example #10
0
static gboolean
gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
{
  HRESULT hres;
  IGstDshowInterface *srcinterface = NULL;
  IPin *input_pin = NULL;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);

  /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
  if (gst_caps_is_subset (spec->caps, src->caps)) {
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
      GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

      if (gst_caps_is_subset (spec->caps, capstmp)) {
        res = i;
      }
      gst_caps_unref (capstmp);
    }

    if (res != -1 && src->pins_mediatypes) {
      /*get the corresponding media type and build the dshow graph */
      GstCapturePinMediaType *pin_mediatype = NULL;
      GList *type = g_list_nth (src->pins_mediatypes, res);

      if (type) {
        pin_mediatype = (GstCapturePinMediaType *) type->data;

        hres =
            IBaseFilter_QueryInterface (src->dshow_fakesink,
            &IID_IGstDshowInterface, (void **) &srcinterface);
        if (hres != S_OK || !srcinterface) {
          GST_CAT_ERROR (dshowaudiosrc_debug,
              "Can't get IGstDshowInterface interface from our dshow fakesink filter (error=%d)",
              hres);
          goto error;
        }

        IGstDshowInterface_gst_set_media_type (srcinterface,
            pin_mediatype->mediatype);
        IGstDshowInterface_gst_set_buffer_callback (srcinterface,
            (byte *) gst_dshowaudiosrc_push_buffer, (byte *) src);

        if (srcinterface) {
          IGstDshowInterface_Release (srcinterface);
        }

        gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
            &input_pin);
        if (!input_pin) {
          GST_CAT_ERROR (dshowaudiosrc_debug,
              "Can't get input pin from our directshow fakesink filter");
          goto error;
        }

        hres =
            IFilterGraph_ConnectDirect (src->filter_graph,
            pin_mediatype->capture_pin, input_pin, NULL);
        IPin_Release (input_pin);

        if (hres != S_OK) {
          GST_CAT_ERROR (dshowaudiosrc_debug,
              "Can't connect capture filter with fakesink filter (error=%d)",
              hres);
          goto error;
        }

        spec->segsize = spec->rate * spec->channels;
        spec->segtotal = 1;
      }
    }
  }

  return TRUE;

error:
  if (srcinterface) {
    IGstDshowInterface_Release (srcinterface);
  }

  return FALSE;
}
static gboolean
gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
    HRESULT hres;
    IPin *input_pin = NULL;
    IPin *output_pin = NULL;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
    GstStructure *s = gst_caps_get_structure (caps, 0);
    OAFilterState ds_graph_state;
    GstCaps *current_caps;

    /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
    if (gst_caps_is_subset (caps, src->caps)) {
        guint i = 0;
        gint res = -1;

        hres = src->media_control->GetState(0, &ds_graph_state);
        if(ds_graph_state == State_Running) {
            GST_INFO("Setting caps while DirectShow graph is already running");
            current_caps = gst_pad_get_current_caps(GST_BASE_SRC_PAD(src));

            if(gst_caps_is_equal(current_caps, caps)) {
                /* no need to set caps, just return */
                GST_INFO("Not resetting caps");
                gst_caps_unref(current_caps);
                return TRUE;
            }
            else {
                /* stop graph and disconnect filters so new caps can be set */
                GST_INFO("Different caps, stopping DirectShow graph");
                hres = src->media_control->Stop();
                hres = src->media_control->GetState(2000, &ds_graph_state);
                if(hres != S_OK) {
                    GST_ERROR("Could not stop DirectShow graph. Cannot renegoiate pins.");
                    goto error;
                }
                gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
                                               &input_pin);
                if (!input_pin) {
                    input_pin->Release();
                    GST_ERROR ("Can't get input pin from our dshow fakesink");
                    goto error;
                }
                input_pin->ConnectedTo(&output_pin);
                hres = input_pin->Disconnect();
                hres = output_pin->Disconnect();
                input_pin->Release();
                output_pin->Release();
            }
            gst_caps_unref(current_caps);
        }

        for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
            GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

            if (gst_caps_is_subset (caps, capstmp)) {
                res = i;
            }
            gst_caps_unref (capstmp);
        }

        if (res != -1 && src->pins_mediatypes) {
            /* get the corresponding media type and build the dshow graph */
            GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);

            if (type_pin_mediatype) {
                GstCapturePinMediaType *pin_mediatype =
                    (GstCapturePinMediaType *) type_pin_mediatype->data;
                gchar *src_caps_string = NULL;
                const gchar *format_string = NULL;

                /* retrieve the desired video size */
                VIDEOINFOHEADER *video_info = NULL;
                gint width = 0;
                gint height = 0;
                gint numerator = 0;
                gint denominator = 0;
                gst_structure_get_int (s, "width", &width);
                gst_structure_get_int (s, "height", &height);
                gst_structure_get_fraction (s, "framerate", &numerator, &denominator);

                /* check if the desired video size is valid about granularity  */
                /* This check will be removed when GST_TYPE_INT_RANGE_STEP exits */
                /* See remarks in gst_dshow_new_video_caps function */
                if (pin_mediatype->granularityWidth != 0
                        && width % pin_mediatype->granularityWidth != 0)
                    g_warning ("your desired video size is not valid : %d mod %d !=0\n",
                               width, pin_mediatype->granularityWidth);
                if (pin_mediatype->granularityHeight != 0
                        && height % pin_mediatype->granularityHeight != 0)
                    g_warning ("your desired video size is not valid : %d mod %d !=0\n",
                               height, pin_mediatype->granularityHeight);

                /* update mediatype */
                video_info = (VIDEOINFOHEADER *) pin_mediatype->mediatype->pbFormat;
                video_info->bmiHeader.biWidth = width;
                video_info->bmiHeader.biHeight = height;
                video_info->AvgTimePerFrame =
                    (LONGLONG) (10000000 * denominator / (double) numerator);
                video_info->bmiHeader.biSizeImage = DIBSIZE (video_info->bmiHeader);
                pin_mediatype->mediatype->lSampleSize = DIBSIZE (video_info->bmiHeader);

                src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
                src->dshow_fakesink->gst_set_buffer_callback (
                    (push_buffer_func) gst_dshowvideosrc_push_buffer, src);

                gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
                                               &input_pin);
                if (!input_pin) {
                    GST_ERROR ("Can't get input pin from our dshow fakesink");
                    goto error;
                }

                hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
                        input_pin, pin_mediatype->mediatype);
                input_pin->Release ();

                if (hres != S_OK) {
                    GST_ERROR
                    ("Can't connect capture filter with fakesink filter (error=0x%x)",
                     hres);
                    goto error;
                }

                /* save width and height negociated */
                gst_structure_get_int (s, "width", &src->width);
                gst_structure_get_int (s, "height", &src->height);

                src->is_rgb = FALSE;
                format_string = gst_structure_get_string (s, "format");
                if(format_string) {
                    if(!strcmp(format_string, "BGR")) {
                        src->is_rgb = TRUE;
                    }
                    else {
                        src->is_rgb = FALSE;
                    }
                }

                hres = src->media_control->Run();

                hres = src->media_control->GetState(5000, &ds_graph_state);
                if(hres != S_OK || ds_graph_state != State_Running) {
                    GST_ERROR("Could not run graph");
                    goto error;
                }
            }
        }
    }

    return TRUE;

error:
    return FALSE;
}
static gboolean
gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
{
  HRESULT hres;
  IPin *input_pin = NULL;
  GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);
  GstCaps *current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (asrc));

  if (current_caps) {
    if (gst_caps_is_equal (spec->caps, current_caps)) {
      gst_caps_unref (current_caps);
      return TRUE;
    }
    gst_caps_unref (current_caps);
  }
  /* In 1.0, prepare() seems to be called in the PLAYING state. Most
     of the time you can't do much on a running graph. */

  gboolean was_running = src->is_running;
  if (was_running) {
    HRESULT hres = src->media_filter->Stop ();
    if (hres != S_OK) {
      GST_ERROR("Can't STOP the directshow capture graph for preparing (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = FALSE;
  }

  /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
  if (gst_caps_is_subset (spec->caps, src->caps)) {
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
      GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

      if (gst_caps_is_subset (spec->caps, capstmp)) {
        res = i;
      }
      gst_caps_unref (capstmp);
    }

    if (res != -1 && src->pins_mediatypes) {
      /*get the corresponding media type and build the dshow graph */
      GstCapturePinMediaType *pin_mediatype = NULL;
      GList *type = g_list_nth (src->pins_mediatypes, res);

      if (type) {
        pin_mediatype = (GstCapturePinMediaType *) type->data;

        src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
        src->dshow_fakesink->gst_set_buffer_callback (
            (push_buffer_func) gst_dshowaudiosrc_push_buffer, src);

        gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
            &input_pin);
        if (!input_pin) {
          GST_ERROR ("Can't get input pin from our directshow fakesink filter");
          goto error;
        }

        spec->segsize = (gint) (spec->info.bpf * spec->info.rate * spec->latency_time /
            GST_MSECOND);
        spec->segtotal = (gint) ((gfloat) spec->buffer_time /
            (gfloat) spec->latency_time + 0.5);
        if (!gst_dshow_configure_latency (pin_mediatype->capture_pin,
            spec->segsize))
        {
          GST_WARNING ("Could not change capture latency");
          spec->segsize = spec->info.rate * spec->info.channels;
          spec->segtotal = 2;
        };
        GST_INFO ("Configuring with segsize:%d segtotal:%d", spec->segsize, spec->segtotal);

        if (gst_dshow_is_pin_connected (pin_mediatype->capture_pin)) {
          GST_DEBUG_OBJECT (src,
              "capture_pin already connected, disconnecting");
          src->filter_graph->Disconnect (pin_mediatype->capture_pin);
        }

        if (gst_dshow_is_pin_connected (input_pin)) {
          GST_DEBUG_OBJECT (src, "input_pin already connected, disconnecting");
          src->filter_graph->Disconnect (input_pin);
        }

        hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
            input_pin, NULL);
        input_pin->Release ();

        if (hres != S_OK) {
          GST_ERROR
              ("Can't connect capture filter with fakesink filter (error=0x%x)",
              hres);
          goto error;
        }

      }
    }
  }

  if (was_running) {
    HRESULT hres = src->media_filter->Run (0);
    if (hres != S_OK) {
      GST_ERROR("Can't RUN the directshow capture graph after prepare (error=0x%x)", hres);
      return FALSE;
    }

    src->is_running = TRUE;
  }

  return TRUE;

error:
  /* Don't restart the graph, we're out anyway. */
  return FALSE;
}
static gboolean
gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
{
  HRESULT hres;
  IPin *input_pin = NULL;
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
  GstStructure *s = gst_caps_get_structure (caps, 0);
  GstCaps *current_caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc));

  if (current_caps) {
    if (gst_caps_is_equal (caps, current_caps)) {
      gst_caps_unref (current_caps);
      return TRUE;
    }
    gst_caps_unref (current_caps);
  }

  /* Same remark as in gstdshowaudiosrc. */
  gboolean was_running = src->is_running;
  if (was_running) {
    HRESULT hres = src->media_filter->Stop ();
    if (hres != S_OK) {
      GST_ERROR ("Can't STOP the directshow capture graph (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = FALSE;
  }

  /* search the negociated caps in our caps list to get its index and the corresponding mediatype */
  if (gst_caps_is_subset (caps, src->caps)) {
    guint i = 0;
    gint res = -1;

    for (; i < gst_caps_get_size (src->caps) && res == -1; i++) {
      GstCaps *capstmp = gst_caps_copy_nth (src->caps, i);

      if (gst_caps_is_subset (caps, capstmp)) {
        res = i;
      }
      gst_caps_unref (capstmp);
    }

    if (res != -1 && src->pins_mediatypes) {
      /* get the corresponding media type and build the dshow graph */
      GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res);

      if (type_pin_mediatype) {
        GstCapturePinMediaType *pin_mediatype =
            (GstCapturePinMediaType *) type_pin_mediatype->data;
        gchar *caps_string = NULL;
        gchar *src_caps_string = NULL;

        /* retrieve the desired video size */
        VIDEOINFOHEADER *video_info = NULL;
        gint width = 0;
        gint height = 0;
        gint numerator = 0;
        gint denominator = 0;
        gst_structure_get_int (s, "width", &width);
        gst_structure_get_int (s, "height", &height);
        gst_structure_get_fraction (s, "framerate", &numerator, &denominator);

        /* check if the desired video size is valid about granularity  */
        /* This check will be removed when GST_TYPE_INT_RANGE_STEP exits */
        /* See remarks in gst_dshow_new_video_caps function */
        if (pin_mediatype->granularityWidth != 0
            && width % pin_mediatype->granularityWidth != 0)
          g_warning ("your desired video size is not valid : %d mod %d !=0\n",
              width, pin_mediatype->granularityWidth);
        if (pin_mediatype->granularityHeight != 0
            && height % pin_mediatype->granularityHeight != 0)
          g_warning ("your desired video size is not valid : %d mod %d !=0\n",
              height, pin_mediatype->granularityHeight);

        /* update mediatype */
        video_info = (VIDEOINFOHEADER *) pin_mediatype->mediatype->pbFormat;
        video_info->bmiHeader.biWidth = width;
        video_info->bmiHeader.biHeight = height;
        video_info->AvgTimePerFrame =
            (LONGLONG) (10000000 * denominator / (double) numerator);
        video_info->bmiHeader.biSizeImage = DIBSIZE (video_info->bmiHeader);
        pin_mediatype->mediatype->lSampleSize = DIBSIZE (video_info->bmiHeader);

        src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype);
        src->dshow_fakesink->gst_set_buffer_callback (
            (push_buffer_func) gst_dshowvideosrc_push_buffer, src);

        gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT,
            &input_pin);
        if (!input_pin) {
          GST_ERROR ("Can't get input pin from our dshow fakesink");
          goto error;
        }

        if (gst_dshow_is_pin_connected (pin_mediatype->capture_pin)) {
          GST_DEBUG_OBJECT (src,
              "capture_pin already connected, disconnecting");
          src->filter_graph->Disconnect (pin_mediatype->capture_pin);
        }

        if (gst_dshow_is_pin_connected (input_pin)) {
          GST_DEBUG_OBJECT (src, "input_pin already connected, disconnecting");
          src->filter_graph->Disconnect (input_pin);
        }

        hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin,
            input_pin, pin_mediatype->mediatype);
        input_pin->Release ();

        if (hres != S_OK) {
          GST_ERROR
              ("Can't connect capture filter with fakesink filter (error=0x%x)",
              hres);
          goto error;
        }

        /* save width and height negociated */
        gst_structure_get_int (s, "width", &src->width);
        gst_structure_get_int (s, "height", &src->height);

	GstVideoInfo info;
	gst_video_info_from_caps(&info, caps);
	switch (GST_VIDEO_INFO_FORMAT(&info)) {
          case GST_VIDEO_FORMAT_RGB:
          case GST_VIDEO_FORMAT_BGR:
	    src->is_rgb = TRUE;
	    break;
	default:
	  src->is_rgb = FALSE;
	  break;
	}
      }
    }
  }

  if (was_running) {
    HRESULT hres = src->media_filter->Run (0);
    if (hres != S_OK) {
      GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
      return FALSE;
    }
    src->is_running = TRUE;
  }

  return TRUE;

error:
  return FALSE;
}
Example #14
0
static gboolean
gst_dshowaudiodec_setup_graph (GstDshowAudioDec * adec, GstCaps *caps)
{
  gboolean ret = FALSE;
  GstDshowAudioDecClass *klass =
      (GstDshowAudioDecClass *) G_OBJECT_GET_CLASS (adec);
  HRESULT hres;
  GstCaps *outcaps;
  AM_MEDIA_TYPE *output_mediatype = NULL;
  AM_MEDIA_TYPE *input_mediatype = NULL;
  CComPtr<IPin> output_pin;
  CComPtr<IPin> input_pin;
  const AudioCodecEntry *codec_entry = klass->entry;
  CComQIPtr<IBaseFilter> srcfilter;
  CComQIPtr<IBaseFilter> sinkfilter;

  input_mediatype = dshowaudiodec_set_input_format (adec, caps);

  adec->fakesrc->GetOutputPin()->SetMediaType (input_mediatype);

  srcfilter = adec->fakesrc;

  /* connect our fake source to decoder */
  output_pin = gst_dshow_get_pin_from_filter (srcfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't get output pin from our directshow fakesrc filter"), (NULL));
    goto end;
  }
  input_pin = gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't get input pin from decoder filter"), (NULL));
    goto end;
  }

  hres = adec->filtergraph->ConnectDirect (output_pin, input_pin,
      NULL);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't connect fakesrc with decoder (error=%x)", hres), (NULL));
    goto end;
  }

  output_mediatype = dshowaudiodec_set_output_format (adec);
  if (!output_mediatype) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't get audio output format from decoder"), (NULL));
    goto end;
  }

  adec->fakesink->SetMediaType(output_mediatype);

  outcaps = gst_caps_new_simple ("audio/x-raw-int",
      "width", G_TYPE_INT, adec->depth,
      "depth", G_TYPE_INT, adec->depth,
      "rate", G_TYPE_INT, adec->rate,
      "channels", G_TYPE_INT, adec->channels, 
      "signed", G_TYPE_BOOLEAN, TRUE,
      "endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
      NULL);

  if (!gst_pad_set_caps (adec->srcpad, outcaps)) {
    gst_caps_unref (outcaps);
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Failed to negotiate output"), (NULL));
    goto end;
  }
  gst_caps_unref (outcaps);

  /* connect the decoder to our fake sink */
  output_pin = gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_OUTPUT);
  if (!output_pin) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't get output pin from our decoder filter"), (NULL));
    goto end;
  }

  sinkfilter = adec->fakesink;
  input_pin = gst_dshow_get_pin_from_filter (sinkfilter, PINDIR_INPUT);
  if (!input_pin) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't get input pin from our directshow fakesink filter"), (NULL));
    goto end;
  }

  hres = adec->filtergraph->ConnectDirect(output_pin, input_pin, NULL);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't connect decoder with fakesink (error=%x)", hres), (NULL));
    goto end;
  }

  hres = adec->mediafilter->Run (-1);
  if (hres != S_OK) {
    GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
        ("Can't run the directshow graph (error=%x)", hres), (NULL));
    goto end;
  }

  ret = TRUE;
  adec->setup = TRUE;
end:
  if (input_mediatype)
    dshowadec_free_mediatype (input_mediatype);
  if (output_mediatype)
    dshowadec_free_mediatype (output_mediatype);

  return ret;
}