コード例 #1
0
ファイル: gstgooencarmaac.c プロジェクト: ceyusa/gst-goo
static gboolean
gst_goo_encarmaac_setcaps (GstPad * pad, GstCaps * caps)
{
	GstGooEncArmAac* self = GST_GOO_ENCARMAAC (gst_pad_get_parent (pad));
	GstStructure* structure = gst_caps_get_structure (caps, 0);

	gint channels, samplerate, width;
	gulong samples, bytes, fmt = 0;
	gboolean result = FALSE;

	GstCaps* srccaps = NULL;

	if (!gst_caps_is_fixed (caps))
	{
		goto done;
	}

	omx_stop (self);

	if (!gst_structure_get_int (structure, "channels", &channels) ||
	    !gst_structure_get_int (structure, "rate", &samplerate))
	{
		goto done;
	}

	if (gst_structure_has_name (structure, "audio/x-raw-int"))
	{
		gst_structure_get_int (structure, "width", &width);
		switch (width)
		{
		case 16:
			fmt = 16;
			break;
		default:
			g_return_val_if_reached (FALSE);
		}
	}
	else
	{
		g_return_val_if_reached (FALSE);
	}

	/* 20 msec */
	gint outputframes = 0;
	g_object_get (self->component, "frames-buffer", &outputframes, NULL);
	self->duration = outputframes *
		gst_util_uint64_scale_int (1, GST_SECOND, 50);

	GST_OBJECT_LOCK (self);
	/* input params */
	{
		OMX_AUDIO_PARAM_PCMMODETYPE* param = NULL;
		param = GOO_TI_ARMAACENC_GET_INPUT_PORT_PARAM (self->component);
		param->nBitPerSample = fmt;
		param->nChannels = channels;
		param->nSamplingRate = samplerate;
	}

	/* output params */
	{
		OMX_AUDIO_PARAM_AACPROFILETYPE* param = NULL;
		param = GOO_TI_ARMAACENC_GET_OUTPUT_PORT_PARAM (self->component);

		param->nChannels = channels;
		param->nSampleRate = samplerate;
		if (channels == 1)
		{
			param->eChannelMode = OMX_AUDIO_ChannelModeMono;
		}
		if (channels == 2)
		{
			param->eChannelMode = OMX_AUDIO_ChannelModeStereo;
		}
	}
	GST_OBJECT_UNLOCK (self);

	omx_start (self);

	/* now create a caps for it all */
	srccaps = gst_caps_new_simple ("audio/mpeg",
				       "mpegversion", G_TYPE_INT, 4,
				       "channels", G_TYPE_INT, channels,
				       "rate", G_TYPE_INT, samplerate,
				       NULL);

	result = gst_pad_set_caps (self->srcpad, srccaps);
	gst_caps_unref (srccaps);

done:
	gst_object_unref (self);
	return result;
}
コード例 #2
0
ファイル: GstShow.cpp プロジェクト: bonfus/GstMadness
bool GstShow::init_pipeline(const int  xwinid)
{
    pipeline = gst_pipeline_new ("xvoverlay");

    
    //create base pipeline elements
    videosink = gst_element_factory_make("xvimagesink", NULL);

    gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (videosink), xwinid);    
    
    mixer = gst_element_factory_make("videomixer", "mix");
    
    
    ///* Manually linking the videoboxes to the mixer */
    GstPadTemplate *mixer_sink_pad_template = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mixer), "sink_%u");
    
    if(mixer_sink_pad_template == NULL) {
      g_printerr("Could not get mixer pad template.\n");
      // gst_object_unref(something);
      return false;
    }    
    
    GstPad* mixerpads[2];
    
    mixerpads[0] = gst_element_request_pad(mixer, mixer_sink_pad_template, NULL, NULL);
    mixerpads[1] = gst_element_request_pad(mixer, mixer_sink_pad_template, NULL, NULL);
    
    
    g_object_set(mixerpads[0], "xpos",   0, NULL);
    g_object_set(mixerpads[0], "ypos",   0, NULL);
    g_object_set(mixerpads[0], "alpha",1.0, NULL);
    
    g_object_set(mixerpads[1], "xpos", 640, NULL);
    g_object_set(mixerpads[1], "ypos",   0, NULL);
    g_object_set(mixerpads[1], "alpha",1.0, NULL);    
    
    
    gst_object_unref(mixerpads[0]);
    gst_object_unref(mixerpads[1]);
    
    // prepare queue and scale
    
    for (int i = 0; i<2; i++)
    {
        queue[i] = gst_element_factory_make("queue", NULL);
        scale[i] = gst_element_factory_make("videoscale", NULL);
        scalefilter[i] = gst_element_factory_make("capsfilter", NULL);
    
        GstCaps *caps =  gst_caps_new_simple("video/x-raw",
            "width", G_TYPE_INT, 640,
            "height", G_TYPE_INT, 480, 
            //"format", G_TYPE_STRING, "BGR",
            NULL);
        caps = gst_caps_fixate(caps);
        
        g_object_set(G_OBJECT(scalefilter[i]), "caps", caps, NULL);
        gst_caps_unref(caps);
    }
    
    
    
    gst_bin_add_many(GST_BIN(pipeline), queue[0], queue[1], scale[0], scale[1], 
                    scalefilter[0], scalefilter[1], mixer, videosink, NULL);
    
    
    
    return true;
}
コード例 #3
0
/* if element caps already in list, will make sure Transform elements have
 * priority and replace old ones */
static GList *
create_codec_cap_list (GstElementFactory *factory,
                       GstPadDirection direction,
                       GList *list,
                       GstCaps *rtp_caps)
{
  const GList *pads = gst_element_factory_get_static_pad_templates (factory);
  gint i;


  /* Let us look at each pad for stuff to add*/
  while (pads)
  {
    GstCaps *caps = NULL;
    GstStaticPadTemplate *padtemplate = NULL;

    padtemplate = (GstStaticPadTemplate *) (pads->data);
    pads = g_list_next (pads);

    if (padtemplate->direction != direction)
      continue;

    if (padtemplate->presence != GST_PAD_ALWAYS) {
      continue;
    }

    caps = gst_static_pad_template_get_caps (padtemplate);
    /*
      DEBUG ("%s caps are %s", gst_plugin_feature_get_name (GST_PLUGIN_FEATURE
      (factory)), gst_caps_to_string (caps));
    */

    /* skips caps ANY */
    if (!caps || gst_caps_is_any (caps))
    {
      goto done;
    }

    /* let us add one entry to the list per media type */
    for (i = 0; i < gst_caps_get_size (caps); i++)
    {
      CodecCap *entry = NULL;
      GList *found_item = NULL;
      GstStructure *structure = gst_caps_get_structure (caps, i);
      GstCaps *cur_caps = NULL;

      /* FIXME fix this in gstreamer! The rtpdepay element is bogus, it claims to
       * be a depayloader yet has application/x-rtp on both sides and does
       * absolutely nothing */
      /* Let's check if media caps are really media caps, this is to deal with
       * wierd elements such as rtpdepay that says it's a depayloader but has
       * application/x-rtp on src and sink pads */
      const gchar *name = gst_structure_get_name (structure);
      if (g_ascii_strcasecmp (name, "application/x-rtp") == 0)
      {
        GST_DEBUG ("skipping %s : %s",
            gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)), name);
        continue;
      }

      cur_caps = gst_caps_new_full (gst_structure_copy (structure), NULL);

      /* let's check if this caps is already in the list, if so let's replace
       * that CodecCap list instead of creating a new one */
      /* we need to compare both media caps and rtp caps */
      found_item = g_list_find_custom (list, cur_caps,
          (GCompareFunc)compare_media_caps);
      if (found_item)
      {
        entry = (CodecCap *)found_item->data;
        /* if RTP caps exist and don't match nullify entry */
        if (rtp_caps && compare_rtp_caps (found_item->data, rtp_caps))
        {
          entry = NULL;
        }
      }

      if (!entry)
      {
        entry = g_slice_new0 (CodecCap);

        entry->caps = cur_caps;
        if (rtp_caps)
        {
          entry->rtp_caps = rtp_caps;
          gst_caps_ref (rtp_caps);
        }
        list = g_list_append (list, entry);
        entry->element_list1 = g_list_prepend (NULL,
          g_list_prepend (NULL, factory));
        gst_object_ref (factory);
      }
      else
      {
        entry->element_list1->data =
          g_list_append (entry->element_list1->data, factory);
        gst_object_ref (factory);

        if (rtp_caps) {
          if (entry->rtp_caps) {
            GstCaps *tmp = gst_caps_intersect (rtp_caps, entry->rtp_caps);
            gst_caps_unref (entry->rtp_caps);
            entry->rtp_caps = tmp;
          } else {
            entry->rtp_caps = gst_caps_ref (rtp_caps);
            /* This shouldn't happen, its we're looking at rtp elements
             * or we're not */
            g_assert_not_reached ();
          }
        }
        entry->caps = gst_caps_merge (cur_caps, entry->caps);
      }
    }
  done:
    if (caps != NULL) {
      gst_caps_unref (caps);
    }

  }

  return list;
}
コード例 #4
0
ファイル: sprinkle2.c プロジェクト: ChinnaSuhas/ossbuild
int
main (int argc, char *argv[])
{
  GstBus *bus;
  GstElement *filter, *convert, *sink;
  GstCaps *caps;
  gboolean res;
  SprinkleState *state;

  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, TRUE);

  pipeline = gst_pipeline_new ("pipeline");

  /* add the fixed part to the pipeline. Remember that we need a capsfilter
   * after adder so that multiple sources are not racing to negotiate
   * a format */
  adder = gst_element_factory_make ("adder", "adder");
  filter = gst_element_factory_make ("capsfilter", "filter");
  convert = gst_element_factory_make ("audioconvert", "convert");
  sink = gst_element_factory_make ("autoaudiosink", "sink");

  caps = gst_caps_new_simple ("audio/x-raw-int",
      "endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
      "channels", G_TYPE_INT, 2,
      "width", G_TYPE_INT, 16,
      "depth", G_TYPE_INT, 16,
      "rate", G_TYPE_INT, 44100, "signed", G_TYPE_BOOLEAN, TRUE, NULL);
  g_object_set (filter, "caps", caps, NULL);
  gst_caps_unref (caps);

  gst_bin_add_many (GST_BIN (pipeline), adder, filter, convert, sink, NULL);

  res = gst_element_link_many (adder, filter, convert, sink, NULL);
  g_assert (res);

  /* setup message handling */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch_full (bus, G_PRIORITY_HIGH);
  g_signal_connect (bus, "message::error", (GCallback) message_received,
      pipeline);
  g_signal_connect (bus, "message::warning", (GCallback) message_received,
      pipeline);
  g_signal_connect (bus, "message::eos", (GCallback) eos_message_received,
      pipeline);

  /* we set the pipeline to PLAYING, the pipeline will not yet preroll because
   * there is no source providing data for it yet */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* and add the function that modifies the pipeline every 100ms */
  state = create_state ();
  g_timeout_add (100, (GSourceFunc) do_sprinkle, state);

  /* go to main loop */
  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  free_state (state);
  gst_object_unref (bus);
  gst_object_unref (pipeline);

  return 0;
}
コード例 #5
0
static void
gst_gnome_vfs_src_received_headers_callback (gconstpointer in,
    gsize in_size, gpointer out, gsize out_size, gpointer callback_data)
{
  GList *i;
  gint icy_metaint;
  GstGnomeVFSSrc *src = GST_GNOME_VFS_SRC (callback_data);
  GnomeVFSModuleCallbackReceivedHeadersIn *in_args =
      (GnomeVFSModuleCallbackReceivedHeadersIn *) in;

  /* This is only used for internet radio stuff right now */
  if (!src->iradio_mode)
    return;

  GST_DEBUG_OBJECT (src, "receiving internet radio metadata\n");

  /* FIXME: Could we use "Accept-Ranges: bytes"
   * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.5
   * to enable pull-mode?
   */

  for (i = in_args->headers; i; i = i->next) {
    char *data = (char *) i->data;
    char *value = strchr (data, ':');
    char *key;

    if (!value)
      continue;

    value++;
    g_strstrip (value);
    if (!strlen (value))
      continue;

    GST_LOG_OBJECT (src, "data %s", data);

    /* Icecast stuff */
    if (strncmp (data, "icy-metaint:", 12) == 0) {      /* ugh */
      if (sscanf (data + 12, "%d", &icy_metaint) == 1) {
        if (icy_metaint > 0) {
          GstCaps *icy_caps;

          icy_caps = gst_caps_new_simple ("application/x-icy",
              "metadata-interval", G_TYPE_INT, icy_metaint, NULL);
          gst_pad_set_caps (GST_BASE_SRC_PAD (src), icy_caps);
          gst_caps_unref (icy_caps);
        }
      }
      continue;
    }

    if (!strncmp (data, "icy-", 4))
      key = data + 4;
    else
      continue;

    GST_DEBUG_OBJECT (src, "key: %s", key);
    if (!strncmp (key, "name", 4)) {
      g_free (src->iradio_name);
      src->iradio_name = gst_gnome_vfs_src_unicodify (value);
      if (src->iradio_name)
        g_object_notify (G_OBJECT (src), "iradio-name");
    } else if (!strncmp (key, "genre", 5)) {
      g_free (src->iradio_genre);
      src->iradio_genre = gst_gnome_vfs_src_unicodify (value);
      if (src->iradio_genre)
        g_object_notify (G_OBJECT (src), "iradio-genre");
    } else if (!strncmp (key, "url", 3)) {
      g_free (src->iradio_url);
      src->iradio_url = gst_gnome_vfs_src_unicodify (value);
      if (src->iradio_url)
        g_object_notify (G_OBJECT (src), "iradio-url");
    }
  }
}
コード例 #6
0
int
main (int argc, char **argv)
{
  GstElement *source, *filter, *encoder, *conv, *resampler, *sink, *oggmux;
  GstCaps *caps;
  GstBus *bus;
  guint bus_watch_id;
  struct AudioMessage audio_message;
  int abort_send = 0;

  typedef void (*SignalHandlerPointer) (int);

  SignalHandlerPointer inthandler, termhandler;
  inthandler = signal (SIGINT, signalhandler);
  termhandler = signal (SIGTERM, signalhandler);

#ifdef DEBUG_RECORD_PURE_OGG
  dump_pure_ogg = getenv ("GNUNET_RECORD_PURE_OGG") ? 1 : 0;
#endif

#ifdef WINDOWS
  setmode (1, _O_BINARY);
#endif

  /* Initialisation */
  gst_init (&argc, &argv);

  GNUNET_assert (GNUNET_OK ==
		 GNUNET_log_setup ("gnunet-helper-audio-record",
				   "WARNING",
				   NULL));

  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
	      "Audio source starts\n");

  audio_message.header.type = htons (GNUNET_MESSAGE_TYPE_CONVERSATION_AUDIO);

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("audio-recorder");
  source   = gst_element_factory_make ("autoaudiosrc",  "audiosource");
  filter   = gst_element_factory_make ("capsfilter",    "filter");
  conv     = gst_element_factory_make ("audioconvert",  "converter");
  resampler= gst_element_factory_make ("audioresample", "resampler");
  encoder  = gst_element_factory_make ("opusenc",       "opus-encoder");
  oggmux   = gst_element_factory_make ("oggmux",        "ogg-muxer");
  sink     = gst_element_factory_make ("appsink",       "audio-output");

  if (!pipeline || !filter || !source || !conv || !resampler || !encoder || !oggmux || !sink)
  {
    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
        "One element could not be created. Exiting.\n");
    return -1;
  }

  g_signal_connect (source, "child-added", G_CALLBACK (source_child_added), NULL);

  /* Set up the pipeline */

  caps = gst_caps_new_simple ("audio/x-raw",
    "format", G_TYPE_STRING, "S16LE",
/*    "rate", G_TYPE_INT, SAMPLING_RATE,*/
    "channels", G_TYPE_INT, OPUS_CHANNELS,
/*    "layout", G_TYPE_STRING, "interleaved",*/
     NULL);
  g_object_set (G_OBJECT (filter),
      "caps", caps,
      NULL);
  gst_caps_unref (caps);

  g_object_set (G_OBJECT (encoder),
/*      "bitrate", 64000, */
/*      "bandwidth", OPUS_BANDWIDTH_FULLBAND, */
      "inband-fec", INBAND_FEC_MODE,
      "packet-loss-percentage", PACKET_LOSS_PERCENTAGE,
      "max-payload-size", MAX_PAYLOAD_SIZE,
      "audio", FALSE, /* VoIP, not audio */
      "frame-size", OPUS_FRAME_SIZE,
      NULL);

  g_object_set (G_OBJECT (oggmux),
      "max-delay", OGG_MAX_DELAY,
      "max-page-delay", OGG_MAX_PAGE_DELAY,
      NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, pipeline);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  /* audiosource | converter | resampler | opus-encoder | audio-output */
  gst_bin_add_many (GST_BIN (pipeline), source, filter, conv, resampler, encoder,
      oggmux, sink, NULL);

  /* we link the elements together */
  gst_element_link_many (source, filter, conv, resampler, encoder, oggmux, sink, NULL);

  /* Set the pipeline to "playing" state*/
  GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Now playing\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);


  GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Running...\n");
  /* Iterate */
  while (!abort_send)
  {
    GstSample *s;
    GstBuffer *b;
    GstMapInfo m;
    size_t len, msg_size;
    const char *ptr;
    int phase;

    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "pulling...\n");
    s = gst_app_sink_pull_sample (GST_APP_SINK (sink));
    if (NULL == s)
    {
      GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "pulled NULL\n");
      break;
    }
    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "...pulled!\n");
    {
      const GstStructure *si;
      char *si_str;
      GstCaps *s_caps;
      char *caps_str;
      si = gst_sample_get_info (s);
      if (si)
      {
        si_str = gst_structure_to_string (si);
        if (si_str)
        {
          GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample %s\n", si_str);
          g_free (si_str);
        }
      }
      else
      GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample with no info\n");
      s_caps = gst_sample_get_caps (s);
      if (s_caps)
      {
        caps_str = gst_caps_to_string (s_caps);
        if (caps_str)
        {
          GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample with caps %s\n", caps_str);
          g_free (caps_str);
        }
      }
      else
        GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample with no caps\n");
    }
    b = gst_sample_get_buffer (s);
    if (NULL == b || !gst_buffer_map (b, &m, GST_MAP_READ))
    {
      GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "got NULL buffer %p or failed to map the buffer\n", b);
      gst_sample_unref (s);
      continue;
    }

    len = m.size;
    if (len > UINT16_MAX - sizeof (struct AudioMessage))
    {
      GNUNET_break (0);
      len = UINT16_MAX - sizeof (struct AudioMessage);
    }
    msg_size = sizeof (struct AudioMessage) + len;
    audio_message.header.size = htons ((uint16_t) msg_size);

    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
        "Sending %u bytes of audio data\n", (unsigned int) msg_size);
    for (phase = 0; phase < 2; phase++)
    {
      size_t offset;
      size_t to_send;
      ssize_t ret;
      if (0 == phase)
      {
#ifdef DEBUG_RECORD_PURE_OGG
        if (dump_pure_ogg)
          continue;
#endif
        ptr = (const char *) &audio_message;
        to_send = sizeof (audio_message);
      }
      else
      {
        ptr = (const char *) m.data;
        to_send = len;
      }
      GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
          "Sending %u bytes on phase %d\n", (unsigned int) to_send, phase);
      for (offset = 0; offset < to_send; offset += ret)
      {
        ret = write (1, &ptr[offset], to_send - offset);
        if (0 >= ret)
        {
          if (-1 == ret)
            GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
                "Failed to write %u bytes at offset %u (total %u) in phase %d: %s\n",
                (unsigned int) to_send - offset, (unsigned int) offset,
                (unsigned int) (to_send + offset), phase, strerror (errno));
          abort_send = 1;
          break;
        }
      }
      if (abort_send)
        break;
    }
    gst_buffer_unmap (b, &m);
    gst_sample_unref (s);
  }

  signal (SIGINT, inthandler);
  signal (SIGINT, termhandler);

  GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Returned, stopping playback\n");
  quit ();

  GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  pipeline = NULL;
  g_source_remove (bus_watch_id);

  return 0;
}
コード例 #7
0
ファイル: cvcap_gstreamer.cpp プロジェクト: alejotima/opencv
static CvCapture_GStreamer * icvCreateCapture_GStreamer(int type, const char *filename)
{
	CvCapture_GStreamer *capture = 0;
	CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

	__BEGIN__;

//	teststreamer(filename);

//	return 0;

	if(!isInited) {
		printf("gst_init\n");
		gst_init (NULL, NULL);

// according to the documentation this is the way to register a plugin now
// unfortunately, it has not propagated into my distribution yet...
// 		gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
// 			"opencv-appsink", "Element application sink",
// 			"0.1", appsink_plugin_init, "LGPL", "highgui", "opencv",
// 			"http://opencvlibrary.sourceforge.net/");

		isInited = true;
	}

	const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"};
//	printf("entered capturecreator %s\n", sourcetypes[type]);

	GstElement *source = gst_element_factory_make(sourcetypes[type], NULL);
	if(!source)
		return 0;

	if(type == CV_CAP_GSTREAMER_FILE)
		g_object_set(G_OBJECT(source), "location", filename, NULL);

	GstElement *colour = gst_element_factory_make("ffmpegcolorspace", NULL);

	GstElement *sink = gst_element_factory_make("opencv-appsink", NULL);
	GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
	gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
//	gst_caps_unref(caps);
	gst_base_sink_set_sync(GST_BASE_SINK(sink), false);
//	g_signal_connect(sink, "new-buffer", G_CALLBACK(newbuffer), NULL);

	GstElement *decodebin = gst_element_factory_make("decodebin", NULL);
	g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(icvNewPad), colour);

	GstElement *pipeline = gst_pipeline_new (NULL);

	gst_bin_add_many(GST_BIN(pipeline), source, decodebin, colour, sink, NULL);

//	printf("added many\n");

	switch(type) {
	case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps
		caps = gst_caps_new_simple("video/x-raw-rgb",
					   "width", G_TYPE_INT, 640,
					   "height", G_TYPE_INT, 480,
					   "framerate", GST_TYPE_FRACTION, 30, 1,
					   NULL);
		if(!gst_element_link_filtered(source, decodebin, caps)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link v4l2src -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		gst_caps_unref(caps);
		break;
	case CV_CAP_GSTREAMER_V4L:
	case CV_CAP_GSTREAMER_1394:
	case CV_CAP_GSTREAMER_FILE:
		if(!gst_element_link(source, decodebin)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link filesrc -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		break;
	}

	if(!gst_element_link(colour, sink)) {
		CV_ERROR(CV_StsError, "GStreamer: cannot link colour -> sink\n");
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("linked, pausing\n");

	if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED) ==
	   GST_STATE_CHANGE_FAILURE) {
		CV_WARN("GStreamer: unable to set pipeline to paused\n");
//		icvHandleMessage(capture);
//		cvReleaseCapture((CvCapture **)(void *)&capture);
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("state now paused\n");

	// construct capture struct
	capture = (CvCapture_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer));
	memset(capture, 0, sizeof(CvCapture_GStreamer));
	capture->type = type;
	capture->pipeline = pipeline;
	capture->source = source;
	capture->decodebin = decodebin;
	capture->colour = colour;
	capture->appsink = sink;

	icvHandleMessage(capture);

	OPENCV_ASSERT(capture,
                      "cvCaptureFromFile_GStreamer( const char * )", "couldn't create capture");

//	GstClock *clock = gst_pipeline_get_clock(GST_PIPELINE(pipeline));
//	printf("clock %s\n", gst_object_get_name(GST_OBJECT(clock)));

	__END__;

	return capture;
}
コード例 #8
0
ファイル: pulsesrc.c プロジェクト: ChinnaSuhas/ossbuild
/* This is essentially gst_base_src_negotiate_default() but the caps
 * are guaranteed to have a channel layout for > 2 channels
 */
static gboolean
gst_pulsesrc_negotiate (GstBaseSrc * basesrc)
{
  GstCaps *thiscaps;
  GstCaps *caps = NULL;
  GstCaps *peercaps = NULL;
  gboolean result = FALSE;

  /* first see what is possible on our source pad */
  thiscaps = gst_pad_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
  /* nothing or anything is allowed, we're done */
  if (thiscaps == NULL || gst_caps_is_any (thiscaps))
    goto no_nego_needed;

  /* get the peer caps */
  peercaps = gst_pad_peer_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
  if (peercaps) {
    /* get intersection */
    caps = gst_caps_intersect (thiscaps, peercaps);
    GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, caps);
    gst_caps_unref (thiscaps);
    gst_caps_unref (peercaps);
  } else {
    /* no peer, work with our own caps then */
    caps = thiscaps;
  }
  if (caps) {
    /* take first (and best, since they are sorted) possibility */
    caps = gst_caps_make_writable (caps);
    gst_caps_truncate (caps);

    /* now fixate */
    if (!gst_caps_is_empty (caps)) {
      gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps);
      GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);

      if (gst_caps_is_any (caps)) {
        /* hmm, still anything, so element can do anything and
         * nego is not needed */
        result = TRUE;
      } else if (gst_caps_is_fixed (caps)) {
        /* yay, fixed caps, use those then */
        result = gst_pulsesrc_create_stream (GST_PULSESRC_CAST (basesrc), caps);
        if (result)
          result = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps);
      }
    }
    gst_caps_unref (caps);
  }
  return result;

no_nego_needed:
  {
    GST_DEBUG_OBJECT (basesrc, "no negotiation needed");
    if (thiscaps)
      gst_caps_unref (thiscaps);
    return TRUE;
  }
}
コード例 #9
0
ファイル: blitter.c プロジェクト: sijisunny/gstreamer-imx
gboolean gst_imx_ipu_blitter_set_input_buffer(GstImxIpuBlitter *ipu_blitter, GstBuffer *input_buffer)
{
	GstImxPhysMemMeta *phys_mem_meta;

	g_assert(input_buffer != NULL);

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(input_buffer);

	/* Test if the input buffer uses DMA memory */
	if ((phys_mem_meta != NULL) && (phys_mem_meta->phys_addr != 0))
	{
		/* DMA memory present - the input buffer can be used as an actual input buffer */
		gst_imx_ipu_blitter_set_actual_input_buffer(ipu_blitter, gst_buffer_ref(input_buffer));

		GST_TRACE_OBJECT(ipu_blitter, "input buffer uses DMA memory - setting it as actual input buffer");
	}
	else
	{
		/* No DMA memory present; the input buffer needs to be copied to an internal
		 * temporary input buffer */

		GstBuffer *temp_input_buffer;
		GstFlowReturn flow_ret;

		GST_TRACE_OBJECT(ipu_blitter, "input buffer does not use DMA memory - need to copy it to an internal input DMA buffer");

		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			if (ipu_blitter->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstCaps *caps = gst_video_info_to_caps(&(ipu_blitter->input_video_info));

				ipu_blitter->internal_bufferpool = gst_imx_ipu_blitter_create_bufferpool(
					ipu_blitter,
					caps,
					ipu_blitter->input_video_info.size,
					2, 0,
					NULL,
					NULL
				);

				gst_caps_unref(caps);

				if (ipu_blitter->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(ipu_blitter, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(ipu_blitter->internal_bufferpool))
				gst_buffer_pool_set_active(ipu_blitter->internal_bufferpool, TRUE);
		}

		/* Create the internal input buffer */
		flow_ret = gst_buffer_pool_acquire_buffer(ipu_blitter->internal_bufferpool, &temp_input_buffer, NULL);
		if (flow_ret != GST_FLOW_OK)
		{
			GST_ERROR_OBJECT(ipu_blitter, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
			return FALSE;
		}

		{
			GstVideoFrame input_frame, temp_input_frame;

			gst_video_frame_map(&input_frame, &(ipu_blitter->input_video_info), input_buffer, GST_MAP_READ);
			gst_video_frame_map(&temp_input_frame, &(ipu_blitter->input_video_info), temp_input_buffer, GST_MAP_WRITE);

			/* Copy the input buffer's pixels to the temp input frame
			 * The gst_video_frame_copy() makes sure stride and plane offset values from both
			 * frames are respected */
			gst_video_frame_copy(&temp_input_frame, &input_frame);

			gst_video_frame_unmap(&temp_input_frame);
			gst_video_frame_unmap(&input_frame);
		}

		/* Finally, set the temp input buffer as the actual input buffer */
		gst_imx_ipu_blitter_set_actual_input_buffer(ipu_blitter, temp_input_buffer);
	}

	/* Configure interlacing */
	ipu_blitter->priv->task.input.deinterlace.enable = 0;
	if (ipu_blitter->deinterlace_mode != GST_IMX_IPU_BLITTER_DEINTERLACE_NONE)
	{
		switch (ipu_blitter->input_video_info.interlace_mode)
		{
			case GST_VIDEO_INTERLACE_MODE_INTERLEAVED:
				GST_TRACE_OBJECT(ipu_blitter, "input stream uses interlacing -> deinterlacing enabled");
				ipu_blitter->priv->task.input.deinterlace.enable = 1;
				break;
			case GST_VIDEO_INTERLACE_MODE_MIXED:
			{
				GstVideoMeta *video_meta;

				GST_TRACE_OBJECT(ipu_blitter, "input stream uses mixed interlacing -> need to check video metadata deinterlacing flag");

				video_meta = gst_buffer_get_video_meta(input_buffer);
				if (video_meta != NULL)
				{
					if (video_meta->flags & GST_VIDEO_FRAME_FLAG_INTERLACED)
					{
						GST_TRACE_OBJECT(ipu_blitter, "frame has video metadata and deinterlacing flag");
						ipu_blitter->priv->task.input.deinterlace.enable = 1;
					}
					else
						GST_TRACE_OBJECT(ipu_blitter, "frame has video metadata but no deinterlacing flag");
				}
				else
					GST_TRACE_OBJECT(ipu_blitter, "frame has no video metadata -> no deinterlacing done");

				break;
			}
			case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE:
			{
				GST_TRACE_OBJECT(ipu_blitter, "input stream is progressive -> no deinterlacing necessary");
				break;
			}
			case GST_VIDEO_INTERLACE_MODE_FIELDS:
				GST_FIXME_OBJECT(ipu_blitter, "2-fields deinterlacing not supported (yet)");
				break;
			default:
				break;
		}
	}

	return TRUE;
}
コード例 #10
0
ファイル: fs-rtp-substream.c プロジェクト: zsx/ossbuild
gboolean
fs_rtp_sub_stream_set_codecbin_unlock (FsRtpSubStream *substream,
    FsCodec *codec,
    GstElement *codecbin,
    GError **error)
{
  GstCaps *caps = NULL;
  gchar *tmp;
  gboolean ret = FALSE;
  GstPad *pad;
  gboolean codec_changed = TRUE;

  FS_RTP_SUB_STREAM_LOCK (substream);

  if (substream->priv->stopped)
  {
    FS_RTP_SUB_STREAM_UNLOCK (substream);
    FS_RTP_SESSION_UNLOCK (substream->priv->session);
    gst_object_unref (codecbin);
    fs_codec_destroy (codec);
    fs_rtp_sub_stream_try_stop (substream);
    return TRUE;
  }
  substream->priv->modifying = TRUE;
  FS_RTP_SUB_STREAM_UNLOCK (substream);

  if (substream->codec)
  {
    if (!fs_codec_are_equal (codec, substream->codec))
      codec_changed = FALSE;
  }

  if (substream->priv->codecbin)
  {
    FsCodec *saved_codec = substream->codec;
    GstElement *old_codecbin;

    gst_element_set_locked_state (substream->priv->codecbin, TRUE);
    if (gst_element_set_state (substream->priv->codecbin, GST_STATE_NULL) !=
        GST_STATE_CHANGE_SUCCESS)
    {
      gst_element_set_locked_state (substream->priv->codecbin, FALSE);
      g_set_error (error, FS_ERROR, FS_ERROR_INTERNAL,
          "Could not set the codec bin for ssrc %u"
          " and payload type %d to the state NULL", substream->ssrc,
          substream->pt);
      FS_RTP_SUB_STREAM_LOCK (substream);
      substream->priv->modifying = FALSE;
      FS_RTP_SUB_STREAM_UNLOCK (substream);
      FS_RTP_SESSION_UNLOCK (substream->priv->session);
      gst_object_unref (codecbin);
      fs_codec_destroy (codec);
      fs_rtp_sub_stream_try_stop (substream);
      return FALSE;
    }

    old_codecbin = substream->priv->codecbin;
    substream->priv->codecbin = NULL;
    FS_RTP_SESSION_UNLOCK (substream->priv->session);

    gst_bin_remove (GST_BIN (substream->priv->conference), old_codecbin);

    FS_RTP_SESSION_LOCK (substream->priv->session);
    if (substream->codec == saved_codec)
    {
      fs_codec_destroy (substream->codec);
      substream->codec = NULL;
    }

    if (substream->priv->caps)
      gst_caps_unref (substream->priv->caps);
    substream->priv->caps = NULL;
  }

  FS_RTP_SESSION_UNLOCK (substream->priv->session);

  gst_object_ref (codecbin);

  if (!gst_bin_add (GST_BIN (substream->priv->conference), codecbin))
  {
    gst_object_unref (codecbin);
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
      "Could not add the codec bin to the conference");
    FS_RTP_SUB_STREAM_LOCK (substream);
    substream->priv->modifying = FALSE;
    FS_RTP_SUB_STREAM_UNLOCK (substream);
    fs_rtp_sub_stream_try_stop (substream);
    return FALSE;
  }

  if (gst_element_set_state (codecbin, GST_STATE_PLAYING) ==
      GST_STATE_CHANGE_FAILURE)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
      "Could not set the codec bin to the playing state");
    goto error;
  }

  if (!gst_element_link_pads (codecbin, "src",
      substream->priv->output_valve, "sink"))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
      "Could not link the codec bin to the output_valve");
    goto error;
  }

  if (!gst_element_link_pads (substream->priv->capsfilter, "src",
          codecbin, "sink"))
  {
     g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
         "Could not link the receive capsfilter and the codecbin for pt %d",
         substream->pt);
    goto error;
  }

  caps = fs_codec_to_gst_caps (codec);
  tmp = gst_caps_to_string (caps);
  GST_DEBUG ("Setting caps %s on recv substream", tmp);
  g_free (tmp);
  g_object_set (substream->priv->capsfilter, "caps", caps, NULL);

  pad = gst_element_get_static_pad (codecbin, "sink");
  if (!pad)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_INTERNAL, "Could not get sink pad"
        " from codecbin");
    goto error;
  }

  /* This is a non-error error
   * Some codecs require config data to start.. so we should just ignore them
   */
  if (!gst_pad_set_caps (pad, caps))
  {
    ret = TRUE;
    gst_object_unref (pad);
    gst_caps_unref (caps);

    GST_DEBUG ("Could not set the caps on the codecbin, waiting on config-data"
        " for SSRC:%x pt:%d", substream->ssrc, substream->pt);

    /* We call this to drop all buffers until something comes up */
    fs_rtp_sub_stream_add_probe_locked (substream);
    goto error;
  }

  gst_object_unref (pad);

  FS_RTP_SESSION_LOCK (substream->priv->session);
  substream->priv->caps = caps;
  substream->priv->codecbin = codecbin;
  substream->codec = codec;
  FS_RTP_SUB_STREAM_LOCK (substream);
  substream->priv->modifying = FALSE;
  FS_RTP_SUB_STREAM_UNLOCK (substream);

  if (substream->priv->stream && !substream->priv->output_ghostpad)
  {
    if (!fs_rtp_sub_stream_add_output_ghostpad_unlock (substream, error))
      goto error;
  }
  else
  {
    FS_RTP_SESSION_UNLOCK (substream->priv->session);
    if (codec_changed)
      g_signal_emit (substream, signals[CODEC_CHANGED], 0);
  }

  gst_object_unref (codecbin);

  fs_rtp_sub_stream_try_stop (substream);

  return TRUE;

 error:
  FS_RTP_SUB_STREAM_LOCK (substream);
  substream->priv->modifying = FALSE;
  FS_RTP_SUB_STREAM_UNLOCK (substream);


  gst_element_set_locked_state (codecbin, TRUE);
  gst_element_set_state (codecbin, GST_STATE_NULL);
  gst_object_ref (codecbin);
  gst_bin_remove (GST_BIN (substream->priv->conference), codecbin);

  gst_object_unref (codecbin);
  fs_codec_destroy (codec);

  fs_rtp_sub_stream_try_stop (substream);

  return ret;
}
コード例 #11
0
ファイル: pulsesrc.c プロジェクト: ChinnaSuhas/ossbuild
static gboolean
gst_pulsesrc_create_stream (GstPulseSrc * pulsesrc, GstCaps * caps)
{
  pa_channel_map channel_map;
  GstStructure *s;
  gboolean need_channel_layout = FALSE;
  GstRingBufferSpec spec;
  const gchar *name;

  memset (&spec, 0, sizeof (GstRingBufferSpec));
  spec.latency_time = GST_SECOND;
  if (!gst_ring_buffer_parse_caps (&spec, caps)) {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
        ("Can't parse caps."), (NULL));
    goto fail;
  }
  /* Keep the refcount of the caps at 1 to make them writable */
  gst_caps_unref (spec.caps);

  if (!gst_pulse_fill_sample_spec (&spec, &pulsesrc->sample_spec)) {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
        ("Invalid sample specification."), (NULL));
    goto fail;
  }

  pa_threaded_mainloop_lock (pulsesrc->mainloop);

  if (!pulsesrc->context) {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Bad context"), (NULL));
    goto unlock_and_fail;
  }

  s = gst_caps_get_structure (caps, 0);
  if (!gst_structure_has_field (s, "channel-layout") ||
      !gst_pulse_gst_to_channel_map (&channel_map, &spec)) {
    if (spec.channels == 1)
      pa_channel_map_init_mono (&channel_map);
    else if (spec.channels == 2)
      pa_channel_map_init_stereo (&channel_map);
    else
      need_channel_layout = TRUE;
  }

  name = "Record Stream";
  if (pulsesrc->proplist) {
    if (!(pulsesrc->stream = pa_stream_new_with_proplist (pulsesrc->context,
                name, &pulsesrc->sample_spec,
                (need_channel_layout) ? NULL : &channel_map,
                pulsesrc->proplist))) {
      GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
          ("Failed to create stream: %s",
              pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
      goto unlock_and_fail;
    }
  } else if (!(pulsesrc->stream = pa_stream_new (pulsesrc->context,
              name, &pulsesrc->sample_spec,
              (need_channel_layout) ? NULL : &channel_map))) {
    GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
        ("Failed to create stream: %s",
            pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
    goto unlock_and_fail;
  }

  if (need_channel_layout) {
    const pa_channel_map *m = pa_stream_get_channel_map (pulsesrc->stream);

    gst_pulse_channel_map_to_gst (m, &spec);
    caps = spec.caps;
  }

  GST_DEBUG_OBJECT (pulsesrc, "Caps are %" GST_PTR_FORMAT, caps);

  pa_stream_set_state_callback (pulsesrc->stream, gst_pulsesrc_stream_state_cb,
      pulsesrc);
  pa_stream_set_read_callback (pulsesrc->stream, gst_pulsesrc_stream_request_cb,
      pulsesrc);
  pa_stream_set_underflow_callback (pulsesrc->stream,
      gst_pulsesrc_stream_underflow_cb, pulsesrc);
  pa_stream_set_overflow_callback (pulsesrc->stream,
      gst_pulsesrc_stream_overflow_cb, pulsesrc);
  pa_stream_set_latency_update_callback (pulsesrc->stream,
      gst_pulsesrc_stream_latency_update_cb, pulsesrc);

  pa_threaded_mainloop_unlock (pulsesrc->mainloop);

  return TRUE;

unlock_and_fail:
  gst_pulsesrc_destroy_stream (pulsesrc);

  pa_threaded_mainloop_unlock (pulsesrc->mainloop);

fail:
  return FALSE;
}
コード例 #12
0
ファイル: gstdroidadec.c プロジェクト: rss351/gst-droid
static gboolean
gst_droidadec_set_format (GstAudioDecoder * decoder, GstCaps * caps)
{
  GstDroidADec *dec = GST_DROIDADEC (decoder);
  GstStructure *str = gst_caps_get_structure (caps, 0);
  const GValue *value = gst_structure_get_value (str, "codec_data");
  GstBuffer *codec_data = value ? gst_value_get_buffer (value) : NULL;

  /*
   * destroying the droidmedia codec here will cause stagefright to call abort.
   * That is why we create it after we are sure that everything is correct
   */

  GST_DEBUG_OBJECT (dec, "set format %" GST_PTR_FORMAT, caps);

  if (dec->codec) {
    /* If we get a format change then we stop */
    GstCaps *current =
        gst_pad_get_current_caps (GST_AUDIO_DECODER_SINK_PAD (decoder));
    gboolean equal = gst_caps_is_equal_fixed (caps, current);
    gst_caps_unref (current);

    GST_DEBUG_OBJECT (dec, "new format is similar to old format? %d", equal);

    if (!equal) {
      GST_ELEMENT_ERROR (dec, LIBRARY, SETTINGS, (NULL),
          ("codec already configured"));
    }

    return equal;
  }

  dec->codec_type =
      gst_droid_codec_new_from_caps (caps, GST_DROID_CODEC_DECODER_AUDIO);
  if (!dec->codec_type) {
    GST_ELEMENT_ERROR (dec, LIBRARY, FAILED, (NULL),
        ("Unknown codec type for caps %" GST_PTR_FORMAT, caps));
    return FALSE;
  }

  if (!gst_structure_get_int (str, "channels", &dec->channels) ||
      !gst_structure_get_int (str, "rate", &dec->rate)) {
    GST_ELEMENT_ERROR (dec, STREAM, FORMAT, (NULL),
        ("Failed to parse caps %" GST_PTR_FORMAT, caps));
    return FALSE;
  }

  GST_INFO_OBJECT (dec, "configuring decoder. rate=%d, channels=%d", dec->rate,
      dec->channels);

  gst_buffer_replace (&dec->codec_data, codec_data);

  /* handle_frame will create the codec */
  dec->dirty = TRUE;

  dec->spf = gst_droid_codec_get_samples_per_frane (caps);

  GST_INFO_OBJECT (dec, "samples per frame: %d", dec->spf);

  return TRUE;
}
コード例 #13
0
static GstFlowReturn
gst_pngdec_caps_create_and_set (GstPngDec * pngdec)
{
  GstFlowReturn ret = GST_FLOW_OK;
  gint bpc = 0, color_type;
  png_uint_32 width, height;
  GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;

  g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR);

  /* Get bits per channel */
  bpc = png_get_bit_depth (pngdec->png, pngdec->info);

  /* Get Color type */
  color_type = png_get_color_type (pngdec->png, pngdec->info);

  /* Add alpha channel if 16-bit depth, but not for GRAY images */
  if ((bpc > 8) && (color_type != PNG_COLOR_TYPE_GRAY)) {
    png_set_add_alpha (pngdec->png, 0xffff, PNG_FILLER_BEFORE);
    png_set_swap (pngdec->png);
  }
#if 0
  /* We used to have this HACK to reverse the outgoing bytes, but the problem
   * that originally required the hack seems to have been in videoconvert's
   * RGBA descriptions. It doesn't seem needed now that's fixed, but might
   * still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */
  if (color_type == PNG_COLOR_TYPE_RGB_ALPHA)
    png_set_bgr (pngdec->png);
#endif

  /* Gray scale with alpha channel converted to RGB */
  if (color_type == PNG_COLOR_TYPE_GRAY_ALPHA) {
    GST_LOG_OBJECT (pngdec,
        "converting grayscale png with alpha channel to RGB");
    png_set_gray_to_rgb (pngdec->png);
  }

  /* Gray scale converted to upscaled to 8 bits */
  if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) ||
      (color_type == PNG_COLOR_TYPE_GRAY)) {
    if (bpc < 8) {              /* Convert to 8 bits */
      GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits");
#if PNG_LIBPNG_VER < 10400
      png_set_gray_1_2_4_to_8 (pngdec->png);
#else
      png_set_expand_gray_1_2_4_to_8 (pngdec->png);
#endif
    }
  }

  /* Palette converted to RGB */
  if (color_type == PNG_COLOR_TYPE_PALETTE) {
    GST_LOG_OBJECT (pngdec, "converting palette png to RGB");
    png_set_palette_to_rgb (pngdec->png);
  }

  png_set_interlace_handling (pngdec->png);

  /* Update the info structure */
  png_read_update_info (pngdec->png, pngdec->info);

  /* Get IHDR header again after transformation settings */
  png_get_IHDR (pngdec->png, pngdec->info, &width, &height,
      &bpc, &pngdec->color_type, NULL, NULL, NULL);

  GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", (gint) width,
      (gint) height);

  switch (pngdec->color_type) {
    case PNG_COLOR_TYPE_RGB:
      GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits");
      if (bpc == 8)
        format = GST_VIDEO_FORMAT_RGB;
      break;
    case PNG_COLOR_TYPE_RGB_ALPHA:
      GST_LOG_OBJECT (pngdec,
          "we have an alpha channel, depth is 32 or 64 bits");
      if (bpc == 8)
        format = GST_VIDEO_FORMAT_RGBA;
      else if (bpc == 16)
        format = GST_VIDEO_FORMAT_ARGB64;
      break;
    case PNG_COLOR_TYPE_GRAY:
      GST_LOG_OBJECT (pngdec,
          "We have an gray image, depth is 8 or 16 (be) bits");
      if (bpc == 8)
        format = GST_VIDEO_FORMAT_GRAY8;
      else if (bpc == 16)
        format = GST_VIDEO_FORMAT_GRAY16_BE;
      break;
    default:
      break;
  }

  if (format == GST_VIDEO_FORMAT_UNKNOWN) {
    GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL),
        ("pngdec does not support this color type"));
    ret = GST_FLOW_NOT_SUPPORTED;
    goto beach;
  }

  /* Check if output state changed */
  if (pngdec->output_state) {
    GstVideoInfo *info = &pngdec->output_state->info;

    if (width == GST_VIDEO_INFO_WIDTH (info) &&
        height == GST_VIDEO_INFO_HEIGHT (info) &&
        GST_VIDEO_INFO_FORMAT (info) == format) {
      goto beach;
    }
    gst_video_codec_state_unref (pngdec->output_state);
  }
#ifdef HAVE_LIBPNG_1_5
  if ((pngdec->color_type & PNG_COLOR_MASK_COLOR)
      && !(pngdec->color_type & PNG_COLOR_MASK_PALETTE)
      && png_get_valid (pngdec->png, pngdec->info, PNG_INFO_iCCP)) {
    png_charp icc_name;
    png_bytep icc_profile;
    int icc_compression_type;
    png_uint_32 icc_proflen = 0;
    png_uint_32 ret = png_get_iCCP (pngdec->png, pngdec->info, &icc_name,
        &icc_compression_type, &icc_profile, &icc_proflen);

    if ((ret & PNG_INFO_iCCP)) {
      gpointer gst_icc_prof = g_memdup (icc_profile, icc_proflen);
      GstBuffer *tagbuffer = NULL;
      GstSample *tagsample = NULL;
      GstTagList *taglist = NULL;
      GstStructure *info = NULL;
      GstCaps *caps;

      GST_DEBUG_OBJECT (pngdec, "extracted ICC profile '%s' length=%i",
          icc_name, (guint32) icc_proflen);

      tagbuffer = gst_buffer_new_wrapped (gst_icc_prof, icc_proflen);

      caps = gst_caps_new_empty_simple ("application/vnd.iccprofile");
      info = gst_structure_new_empty ("application/vnd.iccprofile");

      if (icc_name)
        gst_structure_set (info, "icc-name", G_TYPE_STRING, icc_name, NULL);

      tagsample = gst_sample_new (tagbuffer, caps, NULL, info);

      gst_buffer_unref (tagbuffer);
      gst_caps_unref (caps);

      taglist = gst_tag_list_new_empty ();
      gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_ATTACHMENT,
          tagsample, NULL);
      gst_sample_unref (tagsample);

      gst_video_decoder_merge_tags (GST_VIDEO_DECODER (pngdec), taglist,
          GST_TAG_MERGE_APPEND);
      gst_tag_list_unref (taglist);
    }
  }
#endif

  pngdec->output_state =
      gst_video_decoder_set_output_state (GST_VIDEO_DECODER (pngdec), format,
      width, height, pngdec->input_state);
  gst_video_decoder_negotiate (GST_VIDEO_DECODER (pngdec));
  GST_DEBUG ("Final %d %d", GST_VIDEO_INFO_WIDTH (&pngdec->output_state->info),
      GST_VIDEO_INFO_HEIGHT (&pngdec->output_state->info));

beach:
  return ret;
}
コード例 #14
0
static GstMultipartPad *
gst_multipart_find_pad_by_mime (GstMultipartDemux * demux, gchar * mime,
    gboolean * created)
{
  GSList *walk;

  walk = demux->srcpads;
  while (walk) {
    GstMultipartPad *pad = (GstMultipartPad *) walk->data;

    if (!strcmp (pad->mime, mime)) {
      if (created) {
        *created = FALSE;
      }
      return pad;
    }

    walk = walk->next;
  }
  /* pad not found, create it */
  {
    GstPad *pad;
    GstMultipartPad *mppad;
    gchar *name;
    const gchar *capsname;
    GstCaps *caps;

    mppad = g_new0 (GstMultipartPad, 1);

    GST_DEBUG_OBJECT (demux, "creating pad with mime: %s", mime);

    name = g_strdup_printf ("src_%d", demux->numpads);
    pad =
        gst_pad_new_from_static_template (&multipart_demux_src_template_factory,
        name);
    g_free (name);

    /* take the mime type, convert it to the caps name */
    capsname = gst_multipart_demux_get_gstname (demux, mime);
    caps = gst_caps_from_string (capsname);
    GST_DEBUG_OBJECT (demux, "caps for pad: %s", capsname);
    gst_pad_use_fixed_caps (pad);
    gst_pad_set_caps (pad, caps);
    gst_caps_unref (caps);

    mppad->pad = pad;
    mppad->mime = g_strdup (mime);
    mppad->last_ret = GST_FLOW_OK;

    demux->srcpads = g_slist_prepend (demux->srcpads, mppad);
    demux->numpads++;

    gst_pad_set_active (pad, TRUE);
    gst_element_add_pad (GST_ELEMENT_CAST (demux), pad);

    if (created) {
      *created = TRUE;
    }

    return mppad;
  }
}
コード例 #15
0
ファイル: audio-info.c プロジェクト: PeterXu/gst-mobile
/**
 * gst_audio_info_to_caps:
 * @info: a #GstAudioInfo
 *
 * Convert the values of @info into a #GstCaps.
 *
 * Returns: (transfer full): the new #GstCaps containing the
 *          info of @info.
 */
GstCaps *
gst_audio_info_to_caps (const GstAudioInfo * info)
{
  GstCaps *caps;
  const gchar *format;
  const gchar *layout;
  GstAudioFlags flags;

  g_return_val_if_fail (info != NULL, NULL);
  g_return_val_if_fail (info->finfo != NULL, NULL);
  g_return_val_if_fail (info->finfo->format != GST_AUDIO_FORMAT_UNKNOWN, NULL);

  format = gst_audio_format_to_string (info->finfo->format);
  g_return_val_if_fail (format != NULL, NULL);

  if (info->layout == GST_AUDIO_LAYOUT_INTERLEAVED)
    layout = "interleaved";
  else if (info->layout == GST_AUDIO_LAYOUT_NON_INTERLEAVED)
    layout = "non-interleaved";
  else
    g_return_val_if_reached (NULL);

  flags = info->flags;
  if ((flags & GST_AUDIO_FLAG_UNPOSITIONED) && info->channels > 1
      && info->position[0] != GST_AUDIO_CHANNEL_POSITION_NONE) {
    flags &= ~GST_AUDIO_FLAG_UNPOSITIONED;
    g_warning ("Unpositioned audio channel position flag set but "
        "channel positions present");
  } else if (!(flags & GST_AUDIO_FLAG_UNPOSITIONED) && info->channels > 1
      && info->position[0] == GST_AUDIO_CHANNEL_POSITION_NONE) {
    flags |= GST_AUDIO_FLAG_UNPOSITIONED;
    g_warning ("Unpositioned audio channel position flag not set "
        "but no channel positions present");
  }

  caps = gst_caps_new_simple ("audio/x-raw",
      "format", G_TYPE_STRING, format,
      "layout", G_TYPE_STRING, layout,
      "rate", G_TYPE_INT, info->rate,
      "channels", G_TYPE_INT, info->channels, NULL);

  if (info->channels > 1
      || info->position[0] != GST_AUDIO_CHANNEL_POSITION_MONO) {
    guint64 channel_mask = 0;

    if ((flags & GST_AUDIO_FLAG_UNPOSITIONED)) {
      channel_mask = 0;
    } else {
      if (!gst_audio_channel_positions_to_mask (info->position, info->channels,
              TRUE, &channel_mask))
        goto invalid_channel_positions;
    }

    if (info->channels == 1
        && info->position[0] == GST_AUDIO_CHANNEL_POSITION_MONO) {
      /* Default mono special case */
    } else {
      gst_caps_set_simple (caps, "channel-mask", GST_TYPE_BITMASK, channel_mask,
          NULL);
    }
  }

  return caps;

invalid_channel_positions:
  {
    GST_ERROR ("Invalid channel positions");
    gst_caps_unref (caps);
    return NULL;
  }
}
コード例 #16
0
ファイル: gstdv1394src.c プロジェクト: PeterXu/gst-mobile
static int
gst_dv1394src_iso_receive (raw1394handle_t handle, int channel, size_t len,
                           quadlet_t * data)
{
    GstDV1394Src *dv1394src = gst_dv1394src_from_raw1394handle (handle);

    if (len > 16) {
        /*
           the following code taken from kino-0.51 (Dan Dennedy/Charles Yates)
           Kindly relicensed under the LGPL. See the commit log for version 1.6 of
           this file in CVS.
         */
        unsigned char *p = (unsigned char *) &data[3];

        int section_type = p[0] >> 5;       /* section type is in bits 5 - 7 */
        int dif_sequence = p[1] >> 4;       /* dif sequence number is in bits 4 - 7 */
        int dif_block = p[2];

        /* if we are at the beginning of a frame,
           we set buf=frame, and alloc a new buffer for frame
         */
        if (section_type == 0 && dif_sequence == 0) {       // dif header
            if (!GST_PAD_CAPS (GST_BASE_SRC_PAD (dv1394src))) {
                GstCaps *caps;

                // figure format (NTSC/PAL)
                if (p[3] & 0x80) {
                    // PAL
                    dv1394src->frame_size = PAL_FRAMESIZE;
                    dv1394src->frame_rate = PAL_FRAMERATE;
                    GST_DEBUG ("PAL data");
                    caps = gst_caps_new_simple ("video/x-dv",
                                                "format", G_TYPE_STRING, "PAL",
                                                "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
                } else {
                    // NTSC (untested)
                    dv1394src->frame_size = NTSC_FRAMESIZE;
                    dv1394src->frame_rate = NTSC_FRAMERATE;
                    GST_DEBUG
                    ("NTSC data [untested] - please report success/failure to <*****@*****.**>");
                    caps = gst_caps_new_simple ("video/x-dv",
                                                "format", G_TYPE_STRING, "NTSC",
                                                "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
                }
                gst_pad_set_caps (GST_BASE_SRC_PAD (dv1394src), caps);
                gst_caps_unref (caps);
            }
            // drop last frame when not complete
            if (!dv1394src->drop_incomplete
                    || dv1394src->bytes_in_frame == dv1394src->frame_size) {
                dv1394src->buf = dv1394src->frame;
            } else {
                GST_INFO_OBJECT (GST_ELEMENT (dv1394src), "incomplete frame dropped");
                g_signal_emit (G_OBJECT (dv1394src),
                               gst_dv1394src_signals[SIGNAL_FRAME_DROPPED], 0);
                if (dv1394src->frame) {
                    gst_buffer_unref (dv1394src->frame);
                }
            }
            if ((dv1394src->frame_sequence + 1) % (dv1394src->skip +
                                                   dv1394src->consecutive) < dv1394src->consecutive) {
                GstBuffer *buf;
                gint64 i64;

                buf = gst_buffer_new_and_alloc (dv1394src->frame_size);

                /* fill in offset, duration, timestamp */
                GST_BUFFER_OFFSET (buf) = dv1394src->frame_sequence;
                dv1394src->frame = buf;
            }
            dv1394src->frame_sequence++;
            dv1394src->bytes_in_frame = 0;
        }

        if (dv1394src->frame != NULL) {
            guint8 *data = GST_BUFFER_DATA (dv1394src->frame);

            switch (section_type) {
            case 0:                /* 1 Header block */
                /* p[3] |= 0x80; // hack to force PAL data */
                memcpy (data + dif_sequence * 150 * 80, p, 480);
                break;

            case 1:                /* 2 Subcode blocks */
                memcpy (data + dif_sequence * 150 * 80 + (1 + dif_block) * 80, p,
                        480);
                break;

            case 2:                /* 3 VAUX blocks */
                memcpy (data + dif_sequence * 150 * 80 + (3 + dif_block) * 80, p,
                        480);
                break;

            case 3:                /* 9 Audio blocks interleaved with video */
                memcpy (data + dif_sequence * 150 * 80 + (6 + dif_block * 16) * 80, p,
                        480);
                break;

            case 4:                /* 135 Video blocks interleaved with audio */
                memcpy (data + dif_sequence * 150 * 80 + (7 + (dif_block / 15) +
                        dif_block) * 80, p, 480);
                break;

            default:               /* we can't handle any other data */
                break;
            }
            dv1394src->bytes_in_frame += 480;
        }
    }
コード例 #17
0
ファイル: resindvdbin.c プロジェクト: PeterXu/gst-mobile
static void
demux_pad_added (GstElement * element, GstPad * pad, RsnDvdBin * dvdbin)
{
  gboolean skip_mq = FALSE;
  GstPad *mq_pad = NULL;
  GstPad *dest_pad = NULL;
  GstCaps *caps;
  GstStructure *s;

  GST_DEBUG_OBJECT (dvdbin, "New pad: %" GST_PTR_FORMAT, pad);

  caps = gst_pad_query_caps (pad, NULL);
  if (caps == NULL) {
    GST_WARNING_OBJECT (dvdbin, "NULL caps from pad %" GST_PTR_FORMAT, pad);
    return;
  }
  if (!gst_caps_is_fixed (caps)) {
    GST_WARNING_OBJECT (dvdbin, "Unfixed caps %" GST_PTR_FORMAT
        " on pad %" GST_PTR_FORMAT, caps, pad);
    gst_caps_unref (caps);
    return;
  }

  GST_DEBUG_OBJECT (dvdbin,
      "Pad %" GST_PTR_FORMAT " has caps: %" GST_PTR_FORMAT, pad, caps);

  s = gst_caps_get_structure (caps, 0);
  g_return_if_fail (s != NULL);

  if (can_sink_caps (dvdbin->pieces[DVD_ELEM_VIDPARSE], caps)) {
    GST_LOG_OBJECT (dvdbin, "Found video pad w/ caps %" GST_PTR_FORMAT, caps);
    dest_pad =
        gst_element_get_static_pad (dvdbin->pieces[DVD_ELEM_VIDPARSE], "sink");
  } else if (g_str_equal (gst_structure_get_name (s), "subpicture/x-dvd")) {
    GST_LOG_OBJECT (dvdbin, "Found subpicture pad w/ caps %" GST_PTR_FORMAT,
        caps);
    dest_pad =
        gst_element_get_request_pad (dvdbin->pieces[DVD_ELEM_SPU_SELECT],
        "sink_%u");
    skip_mq = TRUE;
  } else if (can_sink_caps (dvdbin->pieces[DVD_ELEM_AUDDEC], caps)) {
    GST_LOG_OBJECT (dvdbin, "Found audio pad w/ caps %" GST_PTR_FORMAT, caps);
    dest_pad =
        gst_element_get_request_pad (dvdbin->pieces[DVD_ELEM_AUD_SELECT],
        "sink_%u");
  } else {
    GstStructure *s;

    GST_DEBUG_OBJECT (dvdbin, "Ignoring unusable pad w/ caps %" GST_PTR_FORMAT,
        caps);
    gst_element_post_message (GST_ELEMENT_CAST (dvdbin),
        gst_missing_decoder_message_new (GST_ELEMENT_CAST (dvdbin), caps));

    s = gst_caps_get_structure (caps, 0);
    if (g_str_has_prefix ("video/", gst_structure_get_name (s))) {
      GST_ELEMENT_ERROR (dvdbin, STREAM, CODEC_NOT_FOUND, (NULL),
          ("No MPEG video decoder found"));
    } else {
      GST_ELEMENT_WARNING (dvdbin, STREAM, CODEC_NOT_FOUND, (NULL),
          ("No audio decoder found"));
    }
  }

  gst_caps_unref (caps);

  if (dest_pad == NULL) {
    GST_DEBUG_OBJECT (dvdbin, "Don't know how to handle pad. Ignoring");
    return;
  }

  if (skip_mq) {
    mq_pad = gst_object_ref (pad);
  } else {
    mq_pad = connect_thru_mq (dvdbin, pad);
    if (mq_pad == NULL)
      goto failed;
    GST_DEBUG_OBJECT (dvdbin, "Linking new pad %" GST_PTR_FORMAT
        " through multiqueue to %" GST_PTR_FORMAT, pad, dest_pad);
  }

  gst_pad_link (mq_pad, dest_pad);

  gst_object_unref (mq_pad);
  gst_object_unref (dest_pad);

  return;
failed:
  GST_ELEMENT_ERROR (dvdbin, CORE, FAILED, (NULL),
      ("Failed to handle new demuxer pad %s", GST_PAD_NAME (pad)));
  if (mq_pad)
    gst_object_unref (mq_pad);
  if (dest_pad)
    gst_object_unref (dest_pad);
  return;
}
コード例 #18
0
ファイル: videorate.c プロジェクト: kuailexs/symbiandump-mw1
void test_more()
{
  GstElement *videorate;
  GstBuffer *first, *second, *third, *outbuffer;
  GList *l;
  GstCaps *caps;
  GRand *rand;

  videorate = setup_videorate ();
  fail_unless (gst_element_set_state (videorate,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");
  assert_videorate_stats (videorate, "creation", 0, 0, 0, 0);

  rand = g_rand_new ();

  /* first buffer */
  first = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (first) = 0;
  /* it shouldn't matter what the offsets are, videorate produces perfect
     streams */
  GST_BUFFER_OFFSET (first) = g_rand_int (rand);
  GST_BUFFER_OFFSET_END (first) = g_rand_int (rand);
  memset (GST_BUFFER_DATA (first), 1, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (first, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (first, "first", 1);
  gst_buffer_ref (first);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, first) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (first, "first", 2);
  fail_unless_equals_int (g_list_length (buffers), 0);
  assert_videorate_stats (videorate, "first buffer", 1, 0, 0, 0);

  /* second buffer; inbetween second and third output frame's timestamp */
  second = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (second) = GST_SECOND * 3 / 50;
  GST_BUFFER_OFFSET (first) = g_rand_int (rand);
  GST_BUFFER_OFFSET_END (first) = g_rand_int (rand);
  memset (GST_BUFFER_DATA (second), 2, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (second, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (second, "second", 1);
  gst_buffer_ref (second);

  /* pushing gives away one of my references ... */
  fail_unless (gst_pad_push (mysrcpad, second) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (second, "second", 2);

  /* ... and the first one is pushed out, with timestamp 0 */
  fail_unless_equals_int (g_list_length (buffers), 1);
  assert_videorate_stats (videorate, "second buffer", 2, 1, 0, 0);
  ASSERT_BUFFER_REFCOUNT (first, "first", 2);

  outbuffer = buffers->data;
  fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (outbuffer), 0);

  /* third buffer */
  third = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (third) = GST_SECOND * 12 / 50;
  GST_BUFFER_OFFSET (first) = g_rand_int (rand);
  GST_BUFFER_OFFSET_END (first) = g_rand_int (rand);
  memset (GST_BUFFER_DATA (third), 3, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (third, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (third, "third", 1);
  gst_buffer_ref (third);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, third) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (third, "third", 2);

  /* submitting the third buffer has triggered flushing of three more frames */
  assert_videorate_stats (videorate, "third buffer", 3, 4, 0, 2);

  /* check timestamp and source correctness */
  l = buffers;
  fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (l->data), 0);
  fail_unless_equals_int (GST_BUFFER_DATA (l->data)[0], 1);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET (l->data), 0);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET_END (l->data), 1);

  l = g_list_next (l);
  fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (l->data), GST_SECOND / 25);
  fail_unless_equals_int (GST_BUFFER_DATA (l->data)[0], 2);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET (l->data), 1);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET_END (l->data), 2);

  l = g_list_next (l);
  fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (l->data),
      GST_SECOND * 2 / 25);
  fail_unless_equals_int (GST_BUFFER_DATA (l->data)[0], 2);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET (l->data), 2);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET_END (l->data), 3);

  l = g_list_next (l);
  fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (l->data),
      GST_SECOND * 3 / 25);
  fail_unless_equals_int (GST_BUFFER_DATA (l->data)[0], 2);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET (l->data), 3);
  fail_unless_equals_uint64 (GST_BUFFER_OFFSET_END (l->data), 4);

  fail_unless_equals_int (g_list_length (buffers), 4);
  /* one held by us, three held by each output frame taken from the second */
  ASSERT_BUFFER_REFCOUNT (second, "second", 4);

  /* now send EOS */
  fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()));

  /* submitting eos should flush out two more frames for tick 8 and 10 */
  /* FIXME: right now it only flushes out one, so out is 5 instead of 6 ! */
  assert_videorate_stats (videorate, "eos", 3, 5, 0, 2);
  fail_unless_equals_int (g_list_length (buffers), 5);

  /* cleanup */
  g_rand_free (rand);
  gst_buffer_unref (first);
  gst_buffer_unref (second);
  gst_buffer_unref (third);
  cleanup_videorate (videorate);
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
コード例 #19
0
ファイル: cvcap_gstreamer.cpp プロジェクト: alejotima/opencv
//
// decode buffer
//
static IplImage *icvRetrieveFrame_GStreamer(CvCapture_GStreamer *cap)
{
	if(!cap->buffer)
		return 0;

//	printf("getting buffercaps\n");

	GstCaps* caps = gst_buffer_get_caps(cap->buffer);

	assert(gst_caps_get_size(caps) == 1);

	GstStructure* structure = gst_caps_get_structure(caps, 0);

	gint bpp, endianness, redmask, greenmask, bluemask;

	if(!gst_structure_get_int(structure, "bpp", &bpp) ||
	   !gst_structure_get_int(structure, "endianness", &endianness) ||
	   !gst_structure_get_int(structure, "red_mask", &redmask) ||
	   !gst_structure_get_int(structure, "green_mask", &greenmask) ||
	   !gst_structure_get_int(structure, "blue_mask", &bluemask)) {
		printf("missing essential information in buffer caps, %s\n", gst_caps_to_string(caps));
		return 0;
	}

	printf("buffer has %d bpp, endianness %d, rgb %x %x %x, %s\n", bpp, endianness, redmask, greenmask, bluemask, gst_caps_to_string(caps));

	if(!redmask || !greenmask || !bluemask)
		return 0;

	if(!cap->frame) {
		gint height, width;

		if(!gst_structure_get_int(structure, "width", &width) ||
		   !gst_structure_get_int(structure, "height", &height))
			return 0;

//		printf("creating frame %dx%d\n", width, height);

		cap->frame = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3);
	}

	gst_caps_unref(caps);

	unsigned char *data = GST_BUFFER_DATA(cap->buffer);

	printf("generating shifts\n");

	IplImage *frame = cap->frame;
	unsigned nbyte = bpp >> 3;
	unsigned redshift, blueshift, greenshift;
	unsigned mask = redmask;
	for(redshift = 0, mask = redmask; (mask & 1) == 0; mask >>= 1, redshift++)
		;
	for(greenshift = 0, mask = greenmask; (mask & 1) == 0; mask >>= 1, greenshift++)
		;
	for(blueshift = 0, mask = bluemask; (mask & 1) == 0; mask >>= 1, blueshift++)
		;

	printf("shifts: %u %u %u\n", redshift, greenshift, blueshift);

	for(int r = 0; r < frame->height; r++) {
		for(int c = 0; c < frame->width; c++, data += nbyte) {
			int at = r * frame->widthStep + c * 3;
			frame->imageData[at] = ((*((gint *)data)) & redmask) >> redshift;
			frame->imageData[at+1] = ((*((gint *)data)) & greenmask) >> greenshift;
			frame->imageData[at+2] = ((*((gint *)data)) & bluemask) >> blueshift;
		}
	}

//	printf("converted buffer\n");

	gst_buffer_unref(cap->buffer);
	cap->buffer = 0;

	return cap->frame;
}
コード例 #20
0
ファイル: videorate.c プロジェクト: kuailexs/symbiandump-mw1
/* frames at 1, 0, 2 -> second one should be ignored */
void test_wrong_order_from_zero()
{
  GstElement *videorate;
  GstBuffer *first, *second, *third, *outbuffer;
  GstCaps *caps;

  videorate = setup_videorate ();
  fail_unless (gst_element_set_state (videorate,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");
  assert_videorate_stats (videorate, "start", 0, 0, 0, 0);

  /* first buffer */
  first = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (first) = GST_SECOND;
  memset (GST_BUFFER_DATA (first), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (first, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (first, "first", 1);
  gst_buffer_ref (first);

  GST_DEBUG ("pushing first buffer");
  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, first) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (first, "first", 2);
  fail_unless_equals_int (g_list_length (buffers), 0);
  assert_videorate_stats (videorate, "first", 1, 0, 0, 0);

  /* second buffer */
  second = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (second) = 0;
  memset (GST_BUFFER_DATA (second), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (second, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (second, "second", 1);
  gst_buffer_ref (second);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, second) == GST_FLOW_OK);
  /* ... and it is now dropped because it is too old */
  ASSERT_BUFFER_REFCOUNT (second, "second", 1);
  fail_unless_equals_int (g_list_length (buffers), 0);

  /* ... and the first one is still there */
  assert_videorate_stats (videorate, "second", 2, 0, 1, 0);
  ASSERT_BUFFER_REFCOUNT (first, "first", 2);

  /* third buffer */
  third = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (third) = 2 * GST_SECOND;
  memset (GST_BUFFER_DATA (third), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (third, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (third, "third", 1);
  gst_buffer_ref (third);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, third) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (third, "third", 2);

  /* and now the first one should be pushed once and dupped 24 + 13 times, to
   * reach the half point between 1 s (first) and 2 s (third) */
  fail_unless_equals_int (g_list_length (buffers), 38);
  ASSERT_BUFFER_REFCOUNT (first, "first", 39);
  ASSERT_BUFFER_REFCOUNT (second, "second", 1);
  ASSERT_BUFFER_REFCOUNT (third, "third", 2);
  assert_videorate_stats (videorate, "third", 3, 38, 1, 37);

  /* verify last buffer */
  outbuffer = g_list_last (buffers)->data;
  fail_unless (GST_IS_BUFFER (outbuffer));
  fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (outbuffer),
      GST_SECOND * 37 / 25);

  /* cleanup */
  gst_buffer_unref (first);
  gst_buffer_unref (second);
  gst_buffer_unref (third);
  cleanup_videorate (videorate);
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
コード例 #21
0
static gboolean
gst_xviddec_setcaps (GstPad * pad, GstCaps * caps)
{
    GstXvidDec *dec = GST_XVIDDEC (GST_PAD_PARENT (pad));
    GstStructure *structure;
    GstCaps *allowed_caps;
    const GValue *val;

    GST_LOG_OBJECT (dec, "caps %" GST_PTR_FORMAT, caps);

    /* if there's something old around, remove it */
    if (dec->handle) {
        gst_xviddec_unset (dec);
    }

    structure = gst_caps_get_structure (caps, 0);
    gst_structure_get_int (structure, "width", &dec->width);
    gst_structure_get_int (structure, "height", &dec->height);

    /* perhaps some fps info */
    val = gst_structure_get_value (structure, "framerate");
    if ((val != NULL) && GST_VALUE_HOLDS_FRACTION (val)) {
        dec->fps_n = gst_value_get_fraction_numerator (val);
        dec->fps_d = gst_value_get_fraction_denominator (val);
    } else {
        dec->fps_n = -1;
        dec->fps_d = 1;
    }

    /* perhaps some par info */
    val = gst_structure_get_value (structure, "pixel-aspect-ratio");
    if (val != NULL && GST_VALUE_HOLDS_FRACTION (val)) {
        dec->par_n = gst_value_get_fraction_numerator (val);
        dec->par_d = gst_value_get_fraction_denominator (val);
    } else {
        dec->par_n = 1;
        dec->par_d = 1;
    }

    /* we try to find the preferred/accept csp */
    allowed_caps = gst_pad_get_allowed_caps (dec->srcpad);
    if (!allowed_caps) {
        GST_DEBUG_OBJECT (dec, "... but no peer, using template caps");
        /* need to copy because get_allowed_caps returns a ref,
           and get_pad_template_caps doesn't */
        allowed_caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad));
    }
    GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps);

    /* pick the first one ... */
    structure = gst_caps_get_structure (allowed_caps, 0);
    val = gst_structure_get_value (structure, "format");
    if (val != NULL && G_VALUE_TYPE (val) == GST_TYPE_LIST) {
        GValue temp = { 0, };
        gst_value_init_and_copy (&temp, gst_value_list_get_value (val, 0));
        gst_structure_set_value (structure, "format", &temp);
        g_value_unset (&temp);
    }

    /* ... and use its info to get the csp */
    dec->csp = gst_xvid_structure_to_csp (structure);
    if (dec->csp == -1) {
        GST_WARNING_OBJECT (dec, "failed to decide on colorspace, using I420");
        dec->csp = XVID_CSP_I420;
    }

    dec->outbuf_size =
        gst_xvid_image_get_size (dec->csp, dec->width, dec->height);

    GST_LOG_OBJECT (dec, "csp=%d, outbuf_size=%d", dec->csp, dec->outbuf_size);

    gst_caps_unref (allowed_caps);

    /* now set up xvid ... */
    if (!gst_xviddec_setup (dec)) {
        GST_ELEMENT_ERROR (GST_ELEMENT (dec), LIBRARY, INIT, (NULL), (NULL));
        return FALSE;
    }

    return gst_xviddec_negotiate (dec, NULL);
}
コード例 #22
0
ファイル: videorate.c プロジェクト: kuailexs/symbiandump-mw1
/* send frames with 0, 1, 2, 0 seconds */
void test_wrong_order()
{
  GstElement *videorate;
  GstBuffer *first, *second, *third, *fourth, *outbuffer;
  GstCaps *caps;

  videorate = setup_videorate ();
  fail_unless (gst_element_set_state (videorate,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");
  assert_videorate_stats (videorate, "start", 0, 0, 0, 0);

  /* first buffer */
  first = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (first) = 0;
  memset (GST_BUFFER_DATA (first), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (first, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (first, "first", 1);
  gst_buffer_ref (first);

  GST_DEBUG ("pushing first buffer");
  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, first) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (first, "first", 2);
  fail_unless_equals_int (g_list_length (buffers), 0);
  assert_videorate_stats (videorate, "first", 1, 0, 0, 0);

  /* second buffer */
  second = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (second) = GST_SECOND;
  memset (GST_BUFFER_DATA (second), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (second, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (second, "second", 1);
  gst_buffer_ref (second);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, second) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (second, "second", 2);
  /* and it created 13 output buffers as copies of the first frame */
  fail_unless_equals_int (g_list_length (buffers), 13);
  assert_videorate_stats (videorate, "second", 2, 13, 0, 12);
  ASSERT_BUFFER_REFCOUNT (first, "first", 14);

  /* third buffer */
  third = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (third) = 2 * GST_SECOND;
  memset (GST_BUFFER_DATA (third), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (third, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (third, "third", 1);
  gst_buffer_ref (third);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, third) == GST_FLOW_OK);
  /* ... and it is now stuck inside videorate */
  ASSERT_BUFFER_REFCOUNT (third, "third", 2);

  /* submitting a frame with 2 seconds triggers output of 25 more frames */
  fail_unless_equals_int (g_list_length (buffers), 38);
  ASSERT_BUFFER_REFCOUNT (first, "first", 14);
  ASSERT_BUFFER_REFCOUNT (second, "second", 26);
  /* three frames submitted; two of them output as is, and 36 duplicated */
  assert_videorate_stats (videorate, "third", 3, 38, 0, 36);

  /* fourth buffer */
  fourth = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (fourth) = 0;
  memset (GST_BUFFER_DATA (fourth), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  gst_buffer_set_caps (fourth, caps);
  gst_caps_unref (caps);
  ASSERT_BUFFER_REFCOUNT (fourth, "fourth", 1);
  gst_buffer_ref (fourth);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, fourth) == GST_FLOW_OK);
  /* ... and it is dropped */
  ASSERT_BUFFER_REFCOUNT (fourth, "fourth", 1);

  fail_unless_equals_int (g_list_length (buffers), 38);
  ASSERT_BUFFER_REFCOUNT (first, "first", 14);
  ASSERT_BUFFER_REFCOUNT (second, "second", 26);
  assert_videorate_stats (videorate, "fourth", 4, 38, 1, 36);

  /* verify last buffer */
  outbuffer = g_list_last (buffers)->data;
  fail_unless (GST_IS_BUFFER (outbuffer));
  fail_unless_equals_uint64 (GST_BUFFER_TIMESTAMP (outbuffer),
      GST_SECOND * 37 / 25);


  /* cleanup */
  gst_buffer_unref (first);
  gst_buffer_unref (second);
  gst_buffer_unref (third);
  gst_buffer_unref (fourth);
  cleanup_videorate (videorate);
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
コード例 #23
0
ファイル: gst-inspect.c プロジェクト: ChinnaSuhas/ossbuild
static void
print_plugin_automatic_install_info_codecs (GstElementFactory * factory)
{
  GstPadDirection direction;
  const gchar *type_name;
  const gchar *klass;
  const GList *static_templates, *l;
  GstCaps *caps = NULL;
  guint i, num;

  klass = gst_element_factory_get_klass (factory);
  g_return_if_fail (klass != NULL);

  if (strstr (klass, "Demuxer") ||
      strstr (klass, "Decoder") ||
      strstr (klass, "Depay") || strstr (klass, "Parser")) {
    type_name = "decoder";
    direction = GST_PAD_SINK;
  } else if (strstr (klass, "Muxer") ||
      strstr (klass, "Encoder") || strstr (klass, "Pay")) {
    type_name = "encoder";
    direction = GST_PAD_SRC;
  } else {
    return;
  }

  /* decoder/demuxer sink pads should always be static and there should only
   * be one, the same applies to encoders/muxers and source pads */
  static_templates = gst_element_factory_get_static_pad_templates (factory);
  for (l = static_templates; l != NULL; l = l->next) {
    GstStaticPadTemplate *tmpl = NULL;

    tmpl = (GstStaticPadTemplate *) l->data;
    if (tmpl->direction == direction) {
      caps = gst_static_pad_template_get_caps (tmpl);
      break;
    }
  }

  if (caps == NULL) {
    g_printerr ("Couldn't find static pad template for %s '%s'\n",
        type_name, GST_PLUGIN_FEATURE_NAME (factory));
    return;
  }

  caps = gst_caps_make_writable (caps);
  num = gst_caps_get_size (caps);
  for (i = 0; i < num; ++i) {
    GstStructure *s;
    gchar *s_str;

    s = gst_caps_get_structure (caps, i);
    /* remove fields that are almost always just MIN-MAX of some sort
     * in order to make the caps look less messy */
    gst_structure_remove_field (s, "pixel-aspect-ratio");
    gst_structure_remove_field (s, "framerate");
    gst_structure_remove_field (s, "channels");
    gst_structure_remove_field (s, "width");
    gst_structure_remove_field (s, "height");
    gst_structure_remove_field (s, "rate");
    gst_structure_remove_field (s, "depth");
    gst_structure_remove_field (s, "clock-rate");
    s_str = gst_structure_to_string (s);
    g_print ("%s-%s\n", type_name, s_str);
    g_free (s_str);
  }
  gst_caps_unref (caps);
}
コード例 #24
0
ファイル: videorate.c プロジェクト: kuailexs/symbiandump-mw1
/* This test outputs 2 buffers of same dimensions (320x240), then 1 buffer of 
 * differing dimensions (240x120), and then another buffer of previous 
 * dimensions (320x240) and checks that the 3 buffers output as a result have 
 * correct caps (first 2 with 320x240 and 3rd with 240x120).
 */
void test_changing_size()
{
  GstElement *videorate;
  GstBuffer *first;
  GstBuffer *second;
  GstBuffer *third;
  GstBuffer *fourth;
  GstBuffer *fifth;
  GstBuffer *outbuf;
  GstEvent *newsegment;
  GstCaps *caps, *caps_newsize;

  videorate = setup_videorate ();
  fail_unless (gst_element_set_state (videorate,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");

  newsegment = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1,
      0);
  fail_unless (gst_pad_push_event (mysrcpad, newsegment) == TRUE);

  first = gst_buffer_new_and_alloc (4);
  memset (GST_BUFFER_DATA (first), 0, 4);
  caps = gst_caps_from_string (VIDEO_CAPS_STRING);
  GST_BUFFER_TIMESTAMP (first) = 0;
  gst_buffer_set_caps (first, caps);

  GST_DEBUG ("pushing first buffer");
  fail_unless (gst_pad_push (mysrcpad, first) == GST_FLOW_OK);

  /* second buffer */
  second = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (second) = GST_SECOND / 25;
  memset (GST_BUFFER_DATA (second), 0, 4);
  gst_buffer_set_caps (second, caps);

  fail_unless (gst_pad_push (mysrcpad, second) == GST_FLOW_OK);
  fail_unless_equals_int (g_list_length (buffers), 1);
  outbuf = buffers->data;
  /* first buffer should be output here */
  fail_unless (gst_caps_is_equal (GST_BUFFER_CAPS (outbuf), caps));
  fail_unless (GST_BUFFER_TIMESTAMP (outbuf) == 0);

  /* third buffer with new size */
  third = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (third) = 2 * GST_SECOND / 25;
  memset (GST_BUFFER_DATA (third), 0, 4);
  caps_newsize = gst_caps_from_string (VIDEO_CAPS_NEWSIZE_STRING);
  gst_buffer_set_caps (third, caps_newsize);

  fail_unless (gst_pad_push (mysrcpad, third) == GST_FLOW_OK);
  /* new caps flushed the internal state, no new output yet */
  fail_unless_equals_int (g_list_length (buffers), 1);
  outbuf = g_list_last (buffers)->data;
  /* first buffer should be output here */
  fail_unless (gst_caps_is_equal (GST_BUFFER_CAPS (outbuf), caps));
  fail_unless (GST_BUFFER_TIMESTAMP (outbuf) == 0);

  /* fourth buffer with original size */
  fourth = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (fourth) = 3 * GST_SECOND / 25;
  memset (GST_BUFFER_DATA (fourth), 0, 4);
  gst_buffer_set_caps (fourth, caps);

  fail_unless (gst_pad_push (mysrcpad, fourth) == GST_FLOW_OK);
  fail_unless_equals_int (g_list_length (buffers), 1);

  /* fifth buffer with original size */
  fifth = gst_buffer_new_and_alloc (4);
  GST_BUFFER_TIMESTAMP (fifth) = 4 * GST_SECOND / 25;
  memset (GST_BUFFER_DATA (fifth), 0, 4);
  gst_buffer_set_caps (fifth, caps);

  fail_unless (gst_pad_push (mysrcpad, fifth) == GST_FLOW_OK);
  /* all four missing buffers here, dups of fourth buffer */
  fail_unless_equals_int (g_list_length (buffers), 4);
  outbuf = g_list_last (buffers)->data;
  /* third buffer should be output here */
  fail_unless (GST_BUFFER_TIMESTAMP (outbuf) == 3 * GST_SECOND / 25);
  fail_unless (gst_caps_is_equal (GST_BUFFER_CAPS (outbuf), caps));

  gst_caps_unref (caps);
  gst_caps_unref (caps_newsize);
  cleanup_videorate (videorate);
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
コード例 #25
0
ファイル: rsndec.c プロジェクト: ylatuya/gst-plugins-bad
static gboolean
rsndec_factory_filter (GstPluginFeature * feature, RsnDecFactoryFilterCtx * ctx)
{
  GstElementFactory *factory;
  guint rank;
  const gchar *klass;
  const GList *templates;
  GList *walk;
  gboolean can_sink = FALSE;

  /* we only care about element factories */
  if (!GST_IS_ELEMENT_FACTORY (feature))
    return FALSE;

  factory = GST_ELEMENT_FACTORY (feature);

  klass =
      gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_KLASS);
  /* only decoders can play */
  if (strstr (klass, "Decoder") == NULL)
    return FALSE;

  /* only select elements with autoplugging rank */
  rank = gst_plugin_feature_get_rank (feature);
  if (rank < GST_RANK_MARGINAL)
    return FALSE;

  /* See if the element has a sink pad that can possibly sink this caps */

  /* get the templates from the element factory */
  templates = gst_element_factory_get_static_pad_templates (factory);
  for (walk = (GList *) templates; walk && !can_sink; walk = g_list_next (walk)) {
    GstStaticPadTemplate *templ = walk->data;

    /* we only care about the sink templates */
    if (templ->direction == GST_PAD_SINK) {
      GstCaps *intersect;
      GstCaps *tmpl_caps;

      /* try to intersect the caps with the caps of the template */
      tmpl_caps = gst_static_caps_get (&templ->static_caps);

      intersect = gst_caps_intersect (ctx->desired_caps, tmpl_caps);
      gst_caps_unref (tmpl_caps);

      /* check if the intersection is empty */
      if (!gst_caps_is_empty (intersect)) {
        /* non empty intersection, we can use this element */
        can_sink = TRUE;
        ctx->decoder_caps = gst_caps_merge (ctx->decoder_caps, intersect);
      } else
        gst_caps_unref (intersect);
    }
  }

  if (can_sink) {
    GST_DEBUG ("Found decoder element %s (%s)",
        gst_element_factory_get_metadata (factory,
            GST_ELEMENT_METADATA_LONGNAME),
        gst_plugin_feature_get_name (feature));
  }

  return can_sink;
}
コード例 #26
0
ファイル: gstjasperdec.c プロジェクト: LCW523/gst-plugins-bad
static GstFlowReturn
gst_jasper_dec_negotiate (GstJasperDec * dec, jas_image_t * image)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  gint width, height, channels;
  gint i, j;
  gboolean negotiate = FALSE;
  jas_clrspc_t clrspc;
  GstCaps *allowed_caps, *caps;

  width = jas_image_width (image);
  height = jas_image_height (image);
  channels = jas_image_numcmpts (image);

  GST_LOG_OBJECT (dec, "%d x %d, %d components", width, height, channels);

  /* jp2c bitstream has no real colour space info (kept in container),
   * so decoder may only pretend to know, where it really does not */
  if (!jas_clrspc_isunknown (dec->clrspc)) {
    clrspc = dec->clrspc;
    GST_DEBUG_OBJECT (dec, "forcing container supplied colour space %d",
        clrspc);
    jas_image_setclrspc (image, clrspc);
  } else
    clrspc = jas_image_clrspc (image);

  if (!width || !height || !channels || jas_clrspc_isunknown (clrspc))
    goto fail_image;

  if (dec->width != width || dec->height != height ||
      dec->channels != channels || dec->clrspc != clrspc)
    negotiate = TRUE;

  if (channels != 3)
    goto not_supported;

  for (i = 0; i < channels; i++) {
    gint cheight, cwidth, depth, sgnd;

    cheight = jas_image_cmptheight (image, i);
    cwidth = jas_image_cmptwidth (image, i);
    depth = jas_image_cmptprec (image, i);
    sgnd = jas_image_cmptsgnd (image, i);

    GST_LOG_OBJECT (dec, "image component %d, %dx%d, depth %d, sgnd %d", i,
        cwidth, cheight, depth, sgnd);

    if (depth != 8 || sgnd)
      goto not_supported;

    if (dec->cheight[i] != cheight || dec->cwidth[i] != cwidth) {
      dec->cheight[i] = cheight;
      dec->cwidth[i] = cwidth;
      negotiate = TRUE;
    }
  }

  if (!negotiate && dec->format != GST_VIDEO_FORMAT_UNKNOWN)
    goto done;

  /* clear and refresh to new state */
  flow_ret = GST_FLOW_NOT_NEGOTIATED;
  dec->format = GST_VIDEO_FORMAT_UNKNOWN;
  dec->width = width;
  dec->height = height;
  dec->channels = channels;

  /* retrieve allowed caps, and find the first one that reasonably maps
   * to the parameters of the colourspace */
  caps = gst_pad_get_allowed_caps (dec->srcpad);
  if (!caps) {
    GST_DEBUG_OBJECT (dec, "... but no peer, using template caps");
    /* need to copy because get_allowed_caps returns a ref,
       and get_pad_template_caps doesn't */
    caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad));
  }
  /* avoid lists of fourcc, etc */
  allowed_caps = gst_caps_normalize (caps);
  gst_caps_unref (caps);
  caps = NULL;
  GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps);

  for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
    GstVideoFormat format;
    gboolean ok;

    if (caps)
      gst_caps_unref (caps);
    caps = gst_caps_copy_nth (allowed_caps, i);
    /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */
    gst_pad_fixate_caps (dec->srcpad, caps);
    GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps);
    if (!gst_video_format_parse_caps (caps, &format, NULL, NULL))
      continue;
    if (gst_video_format_is_rgb (format) &&
        jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_RGB) {
      GST_DEBUG_OBJECT (dec, "trying RGB");
      if ((dec->cmpt[0] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_R))) < 0 ||
          (dec->cmpt[1] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_G))) < 0 ||
          (dec->cmpt[2] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_B))) < 0) {
        GST_DEBUG_OBJECT (dec, "missing RGB color component");
        continue;
      }
    } else if (gst_video_format_is_yuv (format) &&
        jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_YCBCR) {
      GST_DEBUG_OBJECT (dec, "trying YUV");
      if ((dec->cmpt[0] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_Y))) < 0 ||
          (dec->cmpt[1] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CB))) < 0 ||
          (dec->cmpt[2] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CR))) < 0) {
        GST_DEBUG_OBJECT (dec, "missing YUV color component");
        continue;
      }
    } else
      continue;
    /* match format with validity checks */
    ok = TRUE;
    for (j = 0; j < channels; j++) {
      gint cmpt;

      cmpt = dec->cmpt[j];
      if (dec->cwidth[cmpt] != gst_video_format_get_component_width (format, j,
              width) ||
          dec->cheight[cmpt] != gst_video_format_get_component_height (format,
              j, height))
        ok = FALSE;
    }
    /* commit to this format */
    if (ok) {
      dec->format = format;
      break;
    }
  }

  if (caps)
    gst_caps_unref (caps);
  gst_caps_unref (allowed_caps);

  if (dec->format != GST_VIDEO_FORMAT_UNKNOWN) {
    /* cache some video format properties */
    for (j = 0; j < channels; ++j) {
      dec->offset[j] = gst_video_format_get_component_offset (dec->format, j,
          dec->width, dec->height);
      dec->inc[j] = gst_video_format_get_pixel_stride (dec->format, j);
      dec->stride[j] = gst_video_format_get_row_stride (dec->format, j,
          dec->width);
    }
    dec->image_size = gst_video_format_get_size (dec->format, width, height);
    dec->alpha = gst_video_format_has_alpha (dec->format);

    if (dec->buf)
      g_free (dec->buf);
    dec->buf = g_new0 (glong, dec->width);

    caps = gst_video_format_new_caps (dec->format, dec->width, dec->height,
        dec->framerate_numerator, dec->framerate_denominator, 1, 1);

    GST_DEBUG_OBJECT (dec, "Set format to %d, size to %dx%d", dec->format,
        dec->width, dec->height);

    if (!gst_pad_set_caps (dec->srcpad, caps))
      flow_ret = GST_FLOW_NOT_NEGOTIATED;
    else
      flow_ret = GST_FLOW_OK;

    gst_caps_unref (caps);
  }

done:
  return flow_ret;

  /* ERRORS */
fail_image:
  {
    GST_DEBUG_OBJECT (dec, "Failed to process decoded image.");
    flow_ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
not_supported:
  {
    GST_DEBUG_OBJECT (dec, "Decoded image has unsupported colour space.");
    GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Unsupported colorspace"));
    flow_ret = GST_FLOW_ERROR;
    goto done;
  }
}
コード例 #27
0
/*
 * Creates a RTP pipeline for one test.
 * @param frame_data Pointer to the frame data which is used to pass thru pay/depayloaders.
 * @param frame_data_size Frame data size in bytes.
 * @param frame_count Frame count.
 * @param filtercaps Caps filters.
 * @param pay Payloader name.
 * @param depay Depayloader name.
 * @return
 * Returns pointer to the RTP pipeline.
 * The user must free the RTP pipeline when it's not used anymore.
 */
static rtp_pipeline *
rtp_pipeline_create (const guint8 * frame_data, int frame_data_size,
    int frame_count, const char *filtercaps, const char *pay, const char *depay)
{
  gchar *pipeline_name;
  rtp_pipeline *p;
  GstCaps *caps;

  /* Check parameters. */
  if (!frame_data || !pay || !depay) {
    return NULL;
  }

  /* Allocate memory for the RTP pipeline. */
  p = (rtp_pipeline *) malloc (sizeof (rtp_pipeline));

  p->frame_data = frame_data;
  p->frame_data_size = frame_data_size;
  p->frame_count = frame_count;

  /* Create elements. */
  pipeline_name = g_strdup_printf ("%s-%s-pipeline", pay, depay);
  p->pipeline = gst_pipeline_new (pipeline_name);
  g_free (pipeline_name);
  p->appsrc = gst_element_factory_make ("appsrc", NULL);
  p->rtppay = gst_element_factory_make (pay, NULL);
  p->rtpdepay = gst_element_factory_make (depay, NULL);
  p->fakesink = gst_element_factory_make ("fakesink", NULL);

  /* One or more elements are not created successfully or failed to create p? */
  if (!p->pipeline || !p->appsrc || !p->rtppay || !p->rtpdepay || !p->fakesink) {
    /* Release created elements. */
    RELEASE_ELEMENT (p->pipeline);
    RELEASE_ELEMENT (p->appsrc);
    RELEASE_ELEMENT (p->rtppay);
    RELEASE_ELEMENT (p->rtpdepay);
    RELEASE_ELEMENT (p->fakesink);

    /* Release allocated memory. */
    free (p);

    return NULL;
  }

  /* Set src properties. */
  caps = gst_caps_from_string (filtercaps);
  g_object_set (p->appsrc, "do-timestamp", TRUE, "caps", caps, NULL);
  gst_caps_unref (caps);

  /* Add elements to the pipeline. */
  gst_bin_add (GST_BIN (p->pipeline), p->appsrc);
  gst_bin_add (GST_BIN (p->pipeline), p->rtppay);
  gst_bin_add (GST_BIN (p->pipeline), p->rtpdepay);
  gst_bin_add (GST_BIN (p->pipeline), p->fakesink);

  /* Link elements. */
  gst_element_link (p->appsrc, p->rtppay);
  gst_element_link (p->rtppay, p->rtpdepay);
  gst_element_link (p->rtpdepay, p->fakesink);

  return p;
}
コード例 #28
0
ファイル: gspipe.cpp プロジェクト: ljb2208/ljb_ros_ws
	bool GSPipe::init_stream() {
		if(!gst_is_initialized()) {
		  // Initialize gstreamer pipeline
		  ROS_DEBUG_STREAM( "Initializing gstreamer..." );
		  gst_init(0,0);
		}

		ROS_DEBUG_STREAM( "Gstreamer Version: " << gst_version_string() );

		GError *error = 0; // Assignment to zero is a gst requirement

		//pipeline_ = gst_parse_launch(gsconfig_.c_str(), &error);
		pipeline_ = gst_parse_launch(pipeline_str.c_str(), &error);
		if (pipeline_ == NULL) {
		  ROS_FATAL_STREAM( error->message );
		  return false;
		}

		sink_ = gst_element_factory_make("appsink",NULL);
		GstCaps * caps = image_encoding_ == sensor_msgs::image_encodings::RGB8 ?
			gst_caps_new_simple("video/x-raw-rgb", NULL) :
			gst_caps_new_simple("video/x-raw-gray", NULL);
		gst_app_sink_set_caps(GST_APP_SINK(sink_), caps);
		gst_caps_unref(caps);

		gst_base_sink_set_sync(
		        GST_BASE_SINK(sink_),
		        (sync_sink_) ? TRUE : FALSE);

		if(GST_IS_PIPELINE(pipeline_)) {
		  GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline_), GST_PAD_SRC);
		  g_assert(outpad);

		  GstElement *outelement = gst_pad_get_parent_element(outpad);
		  g_assert(outelement);
		  gst_object_unref(outpad);

		  if(!gst_bin_add(GST_BIN(pipeline_), sink_)) {
			ROS_FATAL("gst_bin_add() failed");
			gst_object_unref(outelement);
			gst_object_unref(pipeline_);
			return false;
		  }

		  if(!gst_element_link(outelement, sink_)) {
			ROS_FATAL("GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement));
			gst_object_unref(outelement);
			gst_object_unref(pipeline_);
			return false;
		  }

		  gst_object_unref(outelement);
		} else {
		  GstElement* launchpipe = pipeline_;
		  pipeline_ = gst_pipeline_new(NULL);
		  g_assert(pipeline_);

		  gst_object_unparent(GST_OBJECT(launchpipe));

		  gst_bin_add_many(GST_BIN(pipeline_), launchpipe, sink_, NULL);

		  if(!gst_element_link(launchpipe, sink_)) {
			ROS_FATAL("GStreamer: cannot link launchpipe -> sink");
			gst_object_unref(pipeline_);
			return false;
		  }
		}

		gst_element_set_state(pipeline_, GST_STATE_PAUSED);

		if (gst_element_get_state(pipeline_, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
		  ROS_FATAL("Failed to PAUSE stream, check your gstreamer configuration.");
		  return false;
		} else {
		  ROS_DEBUG_STREAM("Stream is PAUSED.");
		}
		// Create ROS camera interface
		camera_pub_ = image_transport_.advertiseCamera("camera/image_raw", 1);

		return true;
	}
コード例 #29
0
/* creates/returns a list of CodecCap based on given filter function and caps */
static GList *
get_plugins_filtered_from_caps (FilterFunc filter,
                                GstCaps *caps,
                                GstPadDirection direction)
{
  GList *walk, *result;
  GList *list = NULL;
  GstCaps *matched_caps = NULL;

  result = gst_registry_get_feature_list (gst_registry_get (),
          GST_TYPE_ELEMENT_FACTORY);

  result = g_list_sort (result, (GCompareFunc) compare_ranks);

  for (walk = result; walk; walk = walk->next)
  {
    GstElementFactory *factory = GST_ELEMENT_FACTORY (walk->data);

    /* Ignore unranked plugins */
    if (gst_plugin_feature_get_rank (GST_PLUGIN_FEATURE (factory)) ==
        GST_RANK_NONE)
      continue;

    if (!filter (factory))
      continue;

    if (caps && !check_caps_compatibility (factory, caps, &matched_caps))
      continue;

    if (!matched_caps)
    {
      list = create_codec_cap_list (factory, direction, list, NULL);
    }
    else
    {
      gint i;
      GPtrArray *capslist = g_ptr_array_new_with_free_func (
        (GDestroyNotify) gst_caps_unref);

      while (gst_caps_get_size (matched_caps) > 0)
      {
        GstCaps *stolencaps = gst_caps_new_full (
          gst_caps_steal_structure (matched_caps, 0), NULL);
        gboolean got_match = FALSE;

        for (i = 0; i < capslist->len; i++)
        {
          GstCaps *intersect = gst_caps_intersect (stolencaps,
              g_ptr_array_index (capslist, i));

          if (gst_caps_is_empty (intersect))
          {
            gst_caps_unref (intersect);
          }
          else
          {
            got_match = TRUE;
            gst_caps_unref (g_ptr_array_index (capslist, i));
            g_ptr_array_index (capslist, i) = intersect;
          }
        }

        if (got_match)
          gst_caps_unref (stolencaps);
        else
          g_ptr_array_add (capslist, stolencaps);

      }
      gst_caps_unref (matched_caps);

      for (i = 0; i < capslist->len; i++)
        list = create_codec_cap_list (factory, direction, list,
            g_ptr_array_index (capslist, i));
      g_ptr_array_unref (capslist);
    }
  }

  gst_plugin_feature_list_free (result);

  return list;
}
コード例 #30
0
static GstFlowReturn webkitMediaPlayReadyDecryptTransformInPlace(GstBaseTransform* base, GstBuffer* buffer)
{
    WebKitMediaPlayReadyDecrypt* self = WEBKIT_MEDIA_PLAYREADY_DECRYPT(base);
    GstFlowReturn result = GST_FLOW_OK;
    GstMapInfo map;
    const GValue* value;
    guint sampleIndex = 0;
    int errorCode;
    uint32_t trackID = 0;
    GstPad* pad;
    GstCaps* caps;
    GstMapInfo boxMap;
    GstBuffer* box = nullptr;
    GstProtectionMeta* protectionMeta = 0;
    gboolean boxMapped = FALSE;
    gboolean bufferMapped = FALSE;

    GST_TRACE_OBJECT(self, "Processing buffer");
    g_mutex_lock(&self->mutex);
    GST_TRACE_OBJECT(self, "Mutex acquired, stream received: %s", self->streamReceived ? "yes":"no");

    // The key might not have been received yet. Wait for it.
    if (!self->streamReceived)
        g_cond_wait(&self->condition, &self->mutex);

    if (!self->streamReceived) {
        GST_DEBUG_OBJECT(self, "Condition signaled from state change transition. Aborting.");
        result = GST_FLOW_NOT_SUPPORTED;
        goto beach;
    }

    GST_TRACE_OBJECT(self, "Proceeding with decryption");
    protectionMeta = reinterpret_cast<GstProtectionMeta*>(gst_buffer_get_protection_meta(buffer));
    if (!protectionMeta || !buffer) {
        if (!protectionMeta)
            GST_ERROR_OBJECT(self, "Failed to get GstProtection metadata from buffer %p", buffer);

        if (!buffer)
            GST_ERROR_OBJECT(self, "Failed to get writable buffer");

        result = GST_FLOW_NOT_SUPPORTED;
        goto beach;
    }

    bufferMapped = gst_buffer_map(buffer, &map, static_cast<GstMapFlags>(GST_MAP_READWRITE));
    if (!bufferMapped) {
        GST_ERROR_OBJECT(self, "Failed to map buffer");
        result = GST_FLOW_NOT_SUPPORTED;
        goto beach;
    }

    pad = gst_element_get_static_pad(GST_ELEMENT(self), "src");
    caps = gst_pad_get_current_caps(pad);
    if (g_str_has_prefix(gst_structure_get_name(gst_caps_get_structure(caps, 0)), "video/"))
        trackID = 1;
    else
        trackID = 2;
    gst_caps_unref(caps);
    gst_object_unref(pad);

    if (!gst_structure_get_uint(protectionMeta->info, "sample-index", &sampleIndex)) {
        GST_ERROR_OBJECT(self, "failed to get sample-index");
        result = GST_FLOW_NOT_SUPPORTED;
        goto beach;
    }

    value = gst_structure_get_value(protectionMeta->info, "box");
    if (!value) {
        GST_ERROR_OBJECT(self, "Failed to get encryption box for sample");
        result = GST_FLOW_NOT_SUPPORTED;
        goto beach;
    }

    box = gst_value_get_buffer(value);
    boxMapped = gst_buffer_map(box, &boxMap, GST_MAP_READ);
    if (!boxMapped) {
        GST_ERROR_OBJECT(self, "Failed to map encryption box");
        result = GST_FLOW_NOT_SUPPORTED;
        goto beach;
    }

    GST_TRACE_OBJECT(self, "decrypt sample %u", sampleIndex);
    if ((errorCode = self->sessionMetaData->decrypt(static_cast<void*>(map.data), static_cast<uint32_t>(map.size),
        static_cast<void*>(boxMap.data), static_cast<uint32_t>(boxMap.size), static_cast<uint32_t>(sampleIndex), trackID))) {
        GST_WARNING_OBJECT(self, "ERROR - packet decryption failed [%d]", errorCode);
        GST_MEMDUMP_OBJECT(self, "box", boxMap.data, boxMap.size);
        result = GST_FLOW_ERROR;
        goto beach;
    }

beach:
    if (boxMapped)
        gst_buffer_unmap(box, &boxMap);

    if (bufferMapped)
        gst_buffer_unmap(buffer, &map);

    if (protectionMeta)
        gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta));

    GST_TRACE_OBJECT(self, "Unlocking mutex");
    g_mutex_unlock(&self->mutex);
    return result;
}