Example #1
0
static void
gst_vp8_enc_set_stream_info (GstVPXEnc * enc, GstCaps * caps,
    GstVideoInfo * info)
{
  GstStructure *s;
  GstVideoEncoder *video_encoder;
  GstBuffer *stream_hdr, *vorbiscomment;
  const GstTagList *iface_tags;
  GValue array = { 0, };
  GValue value = { 0, };
  guint8 *data = NULL;
  GstMapInfo map;

  video_encoder = GST_VIDEO_ENCODER (enc);
  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE);
  data = map.data;

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info));
  GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info));
  GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info));
  GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info));
  GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info));
  GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info));

  gst_buffer_unmap (stream_hdr, &map);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer (iface_tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

}
static gboolean
gst_genicamsrc_start (GstBaseSrc * bsrc)
{
  GstGenicamSrc *src = GST_GENICAM_SRC (bsrc);
  GC_ERROR ret;
  uint32_t i, num_ifaces, num_devs;
  guint32 width, height, bpp, stride;
  GstVideoInfo vinfo;

  GST_DEBUG_OBJECT (src, "start");

  /* bind functions from CTI */
  if (!gst_genicamsrc_bind_functions (src)) {
    GST_ELEMENT_ERROR (src, LIBRARY, INIT,
        ("GenTL CTI could not be opened: %s", g_module_error ()), (NULL));
    return FALSE;
  }

  /* initialize library and print info */
  ret = GTL_GCInitLib ();
  HANDLE_GTL_ERROR ("GenTL Producer library could not be initialized");

  gst_genicam_print_gentl_impl_info (src);

  /* open GenTL, print info, and update interface list */
  ret = GTL_TLOpen (&src->hTL);
  HANDLE_GTL_ERROR ("System module failed to open");

  gst_genicam_print_system_info (src);

  ret = GTL_TLUpdateInterfaceList (src->hTL, NULL, src->timeout);
  HANDLE_GTL_ERROR ("Failed to update interface list within timeout");

  /* print info for all interfaces and open specified interface */
  ret = GTL_TLGetNumInterfaces (src->hTL, &num_ifaces);
  HANDLE_GTL_ERROR ("Failed to get number of interfaces");
  if (num_ifaces > 0) {
    GST_DEBUG_OBJECT (src, "Found %dGenTL interfaces", num_ifaces);
    for (i = 0; i < num_ifaces; ++i) {
      gst_genicam_print_interface_info (src, i);
    }
  } else {
    GST_ELEMENT_ERROR (src, LIBRARY, FAILED, ("No interfaces found"), (NULL));
    goto error;
  }

  if (!src->interface_id || src->interface_id[0] == 0) {
    size_t id_size;
    GST_DEBUG_OBJECT (src, "Trying to find interface ID at index %d",
        src->interface_index);

    ret = GTL_TLGetInterfaceID (src->hTL, src->interface_index, NULL, &id_size);
    HANDLE_GTL_ERROR ("Failed to get interface ID at specified index");
    if (src->interface_id) {
      g_free (src->interface_id);
    }
    src->interface_id = (gchar *) g_malloc (id_size);
    ret =
        GTL_TLGetInterfaceID (src->hTL, src->interface_index, src->interface_id,
        &id_size);
    HANDLE_GTL_ERROR ("Failed to get interface ID at specified index");
  }

  GST_DEBUG_OBJECT (src, "Trying to open interface '%s'", src->interface_id);
  ret = GTL_TLOpenInterface (src->hTL, src->interface_id, &src->hIF);
  HANDLE_GTL_ERROR ("Interface module failed to open");

  ret = GTL_IFUpdateDeviceList (src->hIF, NULL, src->timeout);
  HANDLE_GTL_ERROR ("Failed to update device list within timeout");

  /* print info for all devices and open specified device */
  ret = GTL_IFGetNumDevices (src->hIF, &num_devs);
  HANDLE_GTL_ERROR ("Failed to get number of devices");
  if (num_devs > 0) {
    for (i = 0; i < num_devs; ++i) {
      gst_genicam_print_device_info (src, i);
    }
  } else {
    GST_ELEMENT_ERROR (src, LIBRARY, FAILED,
        ("No devices found on interface"), (NULL));
    goto error;
  }

  if (!src->device_id || src->device_id[0] == 0) {
    size_t id_size;
    GST_DEBUG_OBJECT (src, "Trying to find device ID at index %d",
        src->device_index);

    GTL_IFGetDeviceID (src->hIF, src->device_index, NULL, &id_size);
    HANDLE_GTL_ERROR ("Failed to get device ID at specified index");
    if (src->device_id) {
      g_free (src->device_id);
    }
    src->device_id = (gchar *) g_malloc (id_size);
    GTL_IFGetDeviceID (src->hIF, src->device_index, src->device_id, &id_size);
    HANDLE_GTL_ERROR ("Failed to get device ID at specified index");
  }

  GST_DEBUG_OBJECT (src, "Trying to open device '%s'", src->device_id);
  ret =
      GTL_IFOpenDevice (src->hIF, src->device_id, DEVICE_ACCESS_CONTROL,
      &src->hDEV);
  HANDLE_GTL_ERROR ("Failed to open device");

  /* find and open specified data stream id */
  if (!src->stream_id || src->stream_id[0] == 0) {
    size_t id_size;
    GST_DEBUG_OBJECT (src, "Trying to find stream ID at index %d",
        src->stream_index);

    GTL_DevGetDataStreamID (src->hDEV, src->stream_index, NULL, &id_size);
    HANDLE_GTL_ERROR ("Failed to get stream ID at specified index");
    if (src->stream_id) {
      g_free (src->stream_id);
    }
    src->stream_id = (gchar *) g_malloc (id_size);
    GTL_DevGetDataStreamID (src->hDEV, src->stream_index, src->stream_id,
        &id_size);
    HANDLE_GTL_ERROR ("Failed to get stream ID at specified index");
  }

  GST_DEBUG_OBJECT (src, "Trying to open data stream '%s'", src->stream_id);
  ret = GTL_DevOpenDataStream (src->hDEV, src->stream_id, &src->hDS);
  HANDLE_GTL_ERROR ("Failed to open data stream");

  {
    uint32_t num_urls = 0;
    char url[2048];
    size_t url_len = sizeof (url);
    INFO_DATATYPE datatype;
    const uint32_t url_index = 0;

    ret = GTL_DevGetPort (src->hDEV, &src->hDevPort);
    HANDLE_GTL_ERROR ("Failed to get port on device");
    ret = GTL_GCGetNumPortURLs (src->hDevPort, &num_urls);
    HANDLE_GTL_ERROR ("Failed to get number of port URLs");

    GST_DEBUG_OBJECT (src, "Found %d port URLs", num_urls);

    GST_DEBUG_OBJECT (src, "Trying to get URL index %d", url_index);
    GTL_GCGetPortURLInfo (src->hDevPort, url_index, URL_INFO_URL, &datatype,
        url, &url_len);
    HANDLE_GTL_ERROR ("Failed to get URL");
    GST_DEBUG_OBJECT (src, "Found URL '%s'", url);

    g_assert (url_len > 6);
    if (g_str_has_prefix (url, "file")) {
      GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
          ("file url not supported yet"), (NULL));
      goto error;
    } else if (g_str_has_prefix (url, "local")) {
      GError *err = NULL;
      GMatchInfo *matchInfo;
      GRegex *regex;
      gchar *filename, *addr_str, *len_str;
      uint64_t addr;
      size_t len;
      gchar *buf;

      regex =
          g_regex_new
          ("local:(?:///)?(?<filename>[^;]+);(?<address>[^;]+);(?<length>[^?]+)(?:[?]SchemaVersion=([^&]+))?",
          (GRegexCompileFlags) 0, (GRegexMatchFlags) 0, &err);
      if (!regex) {
        goto error;
      }
      g_regex_match (regex, url, (GRegexMatchFlags) 0, &matchInfo);
      filename = g_match_info_fetch_named (matchInfo, "filename");
      addr_str = g_match_info_fetch_named (matchInfo, "address");
      len_str = g_match_info_fetch_named (matchInfo, "length");
      if (!filename || !addr_str || !len_str) {
        GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
            ("Failed to parse local URL"), (NULL));
        goto error;
      }

      addr = g_ascii_strtoull (addr_str, NULL, 16);
      len = g_ascii_strtoull (len_str, NULL, 16);
      buf = (gchar *) g_malloc (len);
      GTL_GCReadPort (src->hDevPort, addr, buf, &len);
      HANDLE_GTL_ERROR ("Failed to read XML from port");

      if (g_str_has_suffix (filename, "zip")) {
        gchar *zipfilepath;
        unzFile uf;
        unz_file_info64 fileinfo;
        gchar xmlfilename[2048];
        gchar *xml;

        zipfilepath = g_build_filename (g_get_tmp_dir (), filename, NULL);
        if (!g_file_set_contents (zipfilepath, buf, len, &err)) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to write zipped XML to %s", zipfilepath), (NULL));
          goto error;
        }
        uf = unzOpen64 (zipfilepath);
        if (!uf) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to open zipped XML %s", zipfilepath), (NULL));
          goto error;
        }
        //ret = unzGetGlobalInfo64(uf, &gi);
        ret =
            unzGetCurrentFileInfo64 (uf, &fileinfo, xmlfilename,
            sizeof (xmlfilename), NULL, 0, NULL, 0);
        if (ret != UNZ_OK) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to query zip file %s", zipfilepath), (NULL));
          goto error;
        }

        ret = unzOpenCurrentFile (uf);
        if (ret != UNZ_OK) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to extract file %s", xmlfilename), (NULL));
          goto error;
        }

        xml = (gchar *) g_malloc (fileinfo.uncompressed_size);
        if (!xml) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to allocate memory to extract XML file"), (NULL));
          goto error;
        }

        ret = unzReadCurrentFile (uf, xml, fileinfo.uncompressed_size);
        if (ret != fileinfo.uncompressed_size) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to extract XML file %s", xmlfilename), (NULL));
          goto error;
        }
        unzClose (uf);
        g_free (zipfilepath);

        zipfilepath = g_build_filename (g_get_tmp_dir (), xmlfilename, NULL);
        g_file_set_contents (zipfilepath, xml, fileinfo.uncompressed_size,
            &err);
        g_free (zipfilepath);

        g_free (xml);
        //GZlibDecompressor *decompress;
        //char *unzipped;
        //gsize outbuf_size, bytes_read, bytes_written;
        //GInputStream *zippedstream, *unzippedstream;
        //decompress = g_zlib_decompressor_new (G_ZLIB_COMPRESSOR_FORMAT_ZLIB);

        ////zippedstream = g_memory_input_stream_new_from_data(buf, len, g_free);
        ////unzippedstream = g_converter_input_stream_new (zippedstream, G_CONVERTER(decompress));
        ////g_input_stream_read_all (G_INPUT_STREAM(unzippedstream), 
        ////    g_converter_output_stream
        //outbuf_size = 10000000;
        //unzipped = (gchar*) g_malloc(outbuf_size);
        //g_converter_convert (G_CONVERTER (decompress), buf, len, unzipped, outbuf_size, G_CONVERTER_NO_FLAGS, &bytes_read, &bytes_written, &err);
        //GST_DEBUG_OBJECT (src, unzipped);
      }

      g_free (filename);
      g_free (addr_str);
      g_free (len_str);
      g_free (buf);
    } else if (g_str_has_prefix (url, "http")) {
      GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
          ("file url not supported yet"), (NULL));
      goto error;
    }
  }

  {
    // TODO: use Genicam node map for this
    guint32 val = 0;
    size_t datasize = 4;
    ret = GTL_GCReadPort (src->hDevPort, 0x30204, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to get width");
    width = GUINT32_FROM_BE (val);
    ret = GTL_GCReadPort (src->hDevPort, 0x30224, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to get height");
    height = GUINT32_FROM_BE (val);

    bpp = 8;
  }

  if (!gst_genicamsrc_prepare_buffers (src)) {
    GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY, ("Failed to prepare buffers"),
        (NULL));
    goto error;
  }

  {
    ret =
        GTL_GCRegisterEvent (src->hDS, EVENT_NEW_BUFFER, &src->hNewBufferEvent);
    HANDLE_GTL_ERROR ("Failed to register New Buffer event");
  }

  ret =
      GTL_DSStartAcquisition (src->hDS, ACQ_START_FLAGS_DEFAULT,
      GENTL_INFINITE);
  HANDLE_GTL_ERROR ("Failed to start stream acquisition");

  {
    // TODO: use Genicam node map for this
    guint32 val;
    size_t datasize;

    /* set AcquisitionMode to Continuous */
    val = GUINT32_TO_BE (2);
    datasize = sizeof (val);
    ret = GTL_GCWritePort (src->hDevPort, 0x40004, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to start device acquisition");

    /* send AcquisitionStart command */
    val = GUINT32_TO_BE (1);
    datasize = sizeof (val);
    ret = GTL_GCWritePort (src->hDevPort, 0x40024, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to start device acquisition");
  }

  /* create caps */
  if (src->caps) {
    gst_caps_unref (src->caps);
    src->caps = NULL;
  }

  gst_video_info_init (&vinfo);

  if (bpp <= 8) {
    gst_video_info_set_format (&vinfo, GST_VIDEO_FORMAT_GRAY8, width, height);
    src->caps = gst_video_info_to_caps (&vinfo);
  } else if (bpp > 8 && bpp <= 16) {
    GValue val = G_VALUE_INIT;
    GstStructure *s;

    if (G_BYTE_ORDER == G_LITTLE_ENDIAN) {
      gst_video_info_set_format (&vinfo, GST_VIDEO_FORMAT_GRAY16_LE, width,
          height);
    } else if (G_BYTE_ORDER == G_BIG_ENDIAN) {
      gst_video_info_set_format (&vinfo, GST_VIDEO_FORMAT_GRAY16_BE, width,
          height);
    }
    src->caps = gst_video_info_to_caps (&vinfo);

    /* set bpp, extra info for GRAY16 so elements can scale properly */
    s = gst_caps_get_structure (src->caps, 0);
    g_value_init (&val, G_TYPE_INT);
    g_value_set_int (&val, bpp);
    gst_structure_set_value (s, "bpp", &val);
    g_value_unset (&val);
  } else {
    GST_ELEMENT_ERROR (src, STREAM, WRONG_TYPE,
        ("Unknown or unsupported bit depth (%d).", bpp), (NULL));
    return FALSE;
  }

  src->height = vinfo.height;
  src->gst_stride = GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0);

  GST_DEBUG_OBJECT (src, "starting acquisition");
//TODO: start acquisition engine

  /* TODO: check timestamps on buffers vs start time */
  src->acq_start_time =
      gst_clock_get_time (gst_element_get_clock (GST_ELEMENT (src)));

  return TRUE;

error:
  if (src->hDS) {
    GTL_DSClose (src->hDS);
    src->hDS = NULL;
  }

  if (src->hDEV) {
    GTL_DevClose (src->hDEV);
    src->hDEV = NULL;
  }

  if (src->hIF) {
    GTL_IFClose (src->hIF);
    src->hIF = NULL;
  }

  if (src->hTL) {
    GTL_TLClose (src->hTL);
    src->hTL = NULL;
  }

  GTL_GCCloseLib ();

  return FALSE;
}
Example #3
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
	GstAravis* gst_aravis = GST_ARAVIS(src);
	GstStructure *structure;
	ArvPixelFormat pixel_format;
	int height, width;
	const GValue *frame_rate;
	const char *caps_string;
	const char *format_string;
	unsigned int i;

	GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

	arv_camera_stop_acquisition (gst_aravis->camera);

	if (gst_aravis->stream != NULL)
		g_object_unref (gst_aravis->stream);

	structure = gst_caps_get_structure (caps, 0);

	gst_structure_get_int (structure, "width", &width);
	gst_structure_get_int (structure, "height", &height);
	frame_rate = gst_structure_get_value (structure, "framerate");
	format_string = gst_structure_get_string (structure, "format");

	pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), format_string);

	arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height);
	arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
	arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

	if (frame_rate != NULL) {
		double dbl_frame_rate;

		dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
			(double) gst_value_get_fraction_denominator (frame_rate);

		GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
		arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

		if (dbl_frame_rate > 0.0)
			gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
							     3e6 / dbl_frame_rate);
		else
			gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
	} else
		gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

	GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us);

	GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

	if(gst_aravis->gain_auto) {
		arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous");
	} else {
		if (gst_aravis->gain >= 0) {
			GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain);
			arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera));
	}

	if(gst_aravis->exposure_auto) {
		arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous");
	} else {
		if (gst_aravis->exposure_time_us > 0.0) {
			GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us);
			arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));
	}

	if (gst_aravis->fixed_caps != NULL)
		gst_caps_unref (gst_aravis->fixed_caps);

	caps_string = arv_pixel_format_to_gst_caps_string (pixel_format);
	if (caps_string != NULL) {
		GstStructure *structure;
		GstCaps *caps;

		caps = gst_caps_new_empty ();
		structure = gst_structure_from_string (caps_string, NULL);
		gst_structure_set (structure,
				   "width", G_TYPE_INT, width,
				   "height", G_TYPE_INT, height,
				   NULL);

		if (frame_rate != NULL)
			gst_structure_set_value (structure, "framerate", frame_rate);

		gst_caps_append_structure (caps, structure);

		gst_aravis->fixed_caps = caps;
	} else
		gst_aravis->fixed_caps = NULL;

	gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
	gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

	if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend)
                g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL);
        else
                g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);

	for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
		arv_stream_push_buffer (gst_aravis->stream,
					arv_buffer_new (gst_aravis->payload, NULL));

	GST_LOG_OBJECT (gst_aravis, "Start acquisition");
	arv_camera_start_acquisition (gst_aravis->camera);

	gst_aravis->timestamp_offset = 0;
	gst_aravis->last_timestamp = 0;

	return TRUE;
}
Example #4
0
static void
gst_video_scale_fixate_caps (GstBaseTransform * base, GstPadDirection direction,
    GstCaps * caps, GstCaps * othercaps)
{
  GstStructure *ins, *outs;
  const GValue *from_par, *to_par;
  GValue fpar = { 0, }, tpar = {
  0,};

  g_return_if_fail (gst_caps_is_fixed (caps));

  GST_DEBUG_OBJECT (base, "trying to fixate othercaps %" GST_PTR_FORMAT
      " based on caps %" GST_PTR_FORMAT, othercaps, caps);

  ins = gst_caps_get_structure (caps, 0);
  outs = gst_caps_get_structure (othercaps, 0);

  from_par = gst_structure_get_value (ins, "pixel-aspect-ratio");
  to_par = gst_structure_get_value (outs, "pixel-aspect-ratio");

  /* If we're fixating from the sinkpad we always set the PAR and
   * assume that missing PAR on the sinkpad means 1/1 and
   * missing PAR on the srcpad means undefined
   */
  if (direction == GST_PAD_SINK) {
    if (!from_par) {
      g_value_init (&fpar, GST_TYPE_FRACTION);
      gst_value_set_fraction (&fpar, 1, 1);
      from_par = &fpar;
    }
    if (!to_par) {
      g_value_init (&tpar, GST_TYPE_FRACTION_RANGE);
      gst_value_set_fraction_range_full (&tpar, 1, G_MAXINT, G_MAXINT, 1);
      to_par = &tpar;
    }
  } else {
    if (!to_par) {
      g_value_init (&tpar, GST_TYPE_FRACTION);
      gst_value_set_fraction (&tpar, 1, 1);
      to_par = &tpar;

      gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
          NULL);
    }
    if (!from_par) {
      g_value_init (&fpar, GST_TYPE_FRACTION);
      gst_value_set_fraction (&fpar, 1, 1);
      from_par = &fpar;
    }
  }

  /* we have both PAR but they might not be fixated */
  {
    gint from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d;
    gint w = 0, h = 0;
    gint from_dar_n, from_dar_d;
    gint num, den;

    /* from_par should be fixed */
    g_return_if_fail (gst_value_is_fixed (from_par));

    from_par_n = gst_value_get_fraction_numerator (from_par);
    from_par_d = gst_value_get_fraction_denominator (from_par);

    gst_structure_get_int (ins, "width", &from_w);
    gst_structure_get_int (ins, "height", &from_h);

    gst_structure_get_int (outs, "width", &w);
    gst_structure_get_int (outs, "height", &h);

    /* if both width and height are already fixed, we can't do anything
     * about it anymore */
    if (w && h) {
      guint n, d;

      GST_DEBUG_OBJECT (base, "dimensions already set to %dx%d, not fixating",
          w, h);
      if (!gst_value_is_fixed (to_par)) {
        if (gst_video_calculate_display_ratio (&n, &d, from_w, from_h,
                from_par_n, from_par_d, w, h)) {
          GST_DEBUG_OBJECT (base, "fixating to_par to %dx%d", n, d);
          if (gst_structure_has_field (outs, "pixel-aspect-ratio"))
            gst_structure_fixate_field_nearest_fraction (outs,
                "pixel-aspect-ratio", n, d);
          else if (n != d)
            gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
                n, d, NULL);
        }
      }
      goto done;
    }

    /* Calculate input DAR */
    if (!gst_util_fraction_multiply (from_w, from_h, from_par_n, from_par_d,
            &from_dar_n, &from_dar_d)) {
      GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
          ("Error calculating the output scaled size - integer overflow"));
      goto done;
    }

    GST_DEBUG_OBJECT (base, "Input DAR is %d/%d", from_dar_n, from_dar_d);

    /* If either width or height are fixed there's not much we
     * can do either except choosing a height or width and PAR
     * that matches the DAR as good as possible
     */
    if (h) {
      GstStructure *tmp;
      gint set_w, set_par_n, set_par_d;

      GST_DEBUG_OBJECT (base, "height is fixed (%d)", h);

      /* If the PAR is fixed too, there's not much to do
       * except choosing the width that is nearest to the
       * width with the same DAR */
      if (gst_value_is_fixed (to_par)) {
        to_par_n = gst_value_get_fraction_numerator (to_par);
        to_par_d = gst_value_get_fraction_denominator (to_par);

        GST_DEBUG_OBJECT (base, "PAR is fixed %d/%d", to_par_n, to_par_d);

        if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d,
                to_par_n, &num, &den)) {
          GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
              ("Error calculating the output scaled size - integer overflow"));
          goto done;
        }

        w = (guint) gst_util_uint64_scale_int (h, num, den);
        gst_structure_fixate_field_nearest_int (outs, "width", w);

        goto done;
      }

      /* The PAR is not fixed and it's quite likely that we can set
       * an arbitrary PAR. */

      /* Check if we can keep the input width */
      tmp = gst_structure_copy (outs);
      gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
      gst_structure_get_int (tmp, "width", &set_w);

      /* Might have failed but try to keep the DAR nonetheless by
       * adjusting the PAR */
      if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, h, set_w,
              &to_par_n, &to_par_d)) {
        GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
            ("Error calculating the output scaled size - integer overflow"));
        gst_structure_free (tmp);
        goto done;
      }

      if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
        gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
      gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
          to_par_n, to_par_d);
      gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
          &set_par_d);
      gst_structure_free (tmp);

      /* Check if the adjusted PAR is accepted */
      if (set_par_n == to_par_n && set_par_d == to_par_d) {
        if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
            set_par_n != set_par_d)
          gst_structure_set (outs, "width", G_TYPE_INT, set_w,
              "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d,
              NULL);
        goto done;
      }

      /* Otherwise scale the width to the new PAR and check if the
       * adjusted with is accepted. If all that fails we can't keep
       * the DAR */
      if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
              set_par_n, &num, &den)) {
        GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
            ("Error calculating the output scaled size - integer overflow"));
        goto done;
      }

      w = (guint) gst_util_uint64_scale_int (h, num, den);
      gst_structure_fixate_field_nearest_int (outs, "width", w);
      if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
          set_par_n != set_par_d)
        gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
            set_par_n, set_par_d, NULL);

      goto done;
    } else if (w) {
      GstStructure *tmp;
      gint set_h, set_par_n, set_par_d;

      GST_DEBUG_OBJECT (base, "width is fixed (%d)", w);

      /* If the PAR is fixed too, there's not much to do
       * except choosing the height that is nearest to the
       * height with the same DAR */
      if (gst_value_is_fixed (to_par)) {
        to_par_n = gst_value_get_fraction_numerator (to_par);
        to_par_d = gst_value_get_fraction_denominator (to_par);

        GST_DEBUG_OBJECT (base, "PAR is fixed %d/%d", to_par_n, to_par_d);

        if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d,
                to_par_n, &num, &den)) {
          GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
              ("Error calculating the output scaled size - integer overflow"));
          goto done;
        }

        h = (guint) gst_util_uint64_scale_int (w, den, num);
        gst_structure_fixate_field_nearest_int (outs, "height", h);

        goto done;
      }

      /* The PAR is not fixed and it's quite likely that we can set
       * an arbitrary PAR. */

      /* Check if we can keep the input height */
      tmp = gst_structure_copy (outs);
      gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
      gst_structure_get_int (tmp, "height", &set_h);

      /* Might have failed but try to keep the DAR nonetheless by
       * adjusting the PAR */
      if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, w,
              &to_par_n, &to_par_d)) {
        GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
            ("Error calculating the output scaled size - integer overflow"));
        gst_structure_free (tmp);
        goto done;
      }
      if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
        gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
      gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
          to_par_n, to_par_d);
      gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
          &set_par_d);
      gst_structure_free (tmp);

      /* Check if the adjusted PAR is accepted */
      if (set_par_n == to_par_n && set_par_d == to_par_d) {
        if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
            set_par_n != set_par_d)
          gst_structure_set (outs, "height", G_TYPE_INT, set_h,
              "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d,
              NULL);
        goto done;
      }

      /* Otherwise scale the height to the new PAR and check if the
       * adjusted with is accepted. If all that fails we can't keep
       * the DAR */
      if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
              set_par_n, &num, &den)) {
        GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
            ("Error calculating the output scaled size - integer overflow"));
        goto done;
      }

      h = (guint) gst_util_uint64_scale_int (w, den, num);
      gst_structure_fixate_field_nearest_int (outs, "height", h);
      if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
          set_par_n != set_par_d)
        gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
            set_par_n, set_par_d, NULL);

      goto done;
    } else if (gst_value_is_fixed (to_par)) {
      GstStructure *tmp;
      gint set_h, set_w, f_h, f_w;

      to_par_n = gst_value_get_fraction_numerator (to_par);
      to_par_d = gst_value_get_fraction_denominator (to_par);

      /* Calculate scale factor for the PAR change */
      if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_n,
              to_par_d, &num, &den)) {
        GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
            ("Error calculating the output scaled size - integer overflow"));
        goto done;
      }

      /* Try to keep the input height (because of interlacing) */
      tmp = gst_structure_copy (outs);
      gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
      gst_structure_get_int (tmp, "height", &set_h);

      /* This might have failed but try to scale the width
       * to keep the DAR nonetheless */
      w = (guint) gst_util_uint64_scale_int (set_h, num, den);
      gst_structure_fixate_field_nearest_int (tmp, "width", w);
      gst_structure_get_int (tmp, "width", &set_w);
      gst_structure_free (tmp);

      /* We kept the DAR and the height is nearest to the original height */
      if (set_w == w) {
        gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
            G_TYPE_INT, set_h, NULL);
        goto done;
      }

      f_h = set_h;
      f_w = set_w;

      /* If the former failed, try to keep the input width at least */
      tmp = gst_structure_copy (outs);
      gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
      gst_structure_get_int (tmp, "width", &set_w);

      /* This might have failed but try to scale the width
       * to keep the DAR nonetheless */
      h = (guint) gst_util_uint64_scale_int (set_w, den, num);
      gst_structure_fixate_field_nearest_int (tmp, "height", h);
      gst_structure_get_int (tmp, "height", &set_h);
      gst_structure_free (tmp);

      /* We kept the DAR and the width is nearest to the original width */
      if (set_h == h) {
        gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
            G_TYPE_INT, set_h, NULL);
        goto done;
      }

      /* If all this failed, keep the height that was nearest to the orignal
       * height and the nearest possible width. This changes the DAR but
       * there's not much else to do here.
       */
      gst_structure_set (outs, "width", G_TYPE_INT, f_w, "height", G_TYPE_INT,
          f_h, NULL);
      goto done;
    } else {
      GstStructure *tmp;
      gint set_h, set_w, set_par_n, set_par_d, tmp2;

      /* width, height and PAR are not fixed but passthrough is not possible */

      /* First try to keep the height and width as good as possible
       * and scale PAR */
      tmp = gst_structure_copy (outs);
      gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
      gst_structure_get_int (tmp, "height", &set_h);
      gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
      gst_structure_get_int (tmp, "width", &set_w);

      if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, set_w,
              &to_par_n, &to_par_d)) {
        GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
            ("Error calculating the output scaled size - integer overflow"));
        goto done;
      }

      if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
        gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
      gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
          to_par_n, to_par_d);
      gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
          &set_par_d);
      gst_structure_free (tmp);

      if (set_par_n == to_par_n && set_par_d == to_par_d) {
        gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
            G_TYPE_INT, set_h, NULL);

        if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
            set_par_n != set_par_d)
          gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
              set_par_n, set_par_d, NULL);
        goto done;
      }

      /* Otherwise try to scale width to keep the DAR with the set
       * PAR and height */
      if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
              set_par_n, &num, &den)) {
        GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
            ("Error calculating the output scaled size - integer overflow"));
        goto done;
      }

      w = (guint) gst_util_uint64_scale_int (set_h, num, den);
      tmp = gst_structure_copy (outs);
      gst_structure_fixate_field_nearest_int (tmp, "width", w);
      gst_structure_get_int (tmp, "width", &tmp2);
      gst_structure_free (tmp);

      if (tmp2 == w) {
        gst_structure_set (outs, "width", G_TYPE_INT, tmp2, "height",
            G_TYPE_INT, set_h, NULL);
        if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
            set_par_n != set_par_d)
          gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
              set_par_n, set_par_d, NULL);
        goto done;
      }

      /* ... or try the same with the height */
      h = (guint) gst_util_uint64_scale_int (set_w, den, num);
      tmp = gst_structure_copy (outs);
      gst_structure_fixate_field_nearest_int (tmp, "height", h);
      gst_structure_get_int (tmp, "height", &tmp2);
      gst_structure_free (tmp);

      if (tmp2 == h) {
        gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
            G_TYPE_INT, tmp2, NULL);
        if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
            set_par_n != set_par_d)
          gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
              set_par_n, set_par_d, NULL);
        goto done;
      }

      /* If all fails we can't keep the DAR and take the nearest values
       * for everything from the first try */
      gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
          G_TYPE_INT, set_h, NULL);
      if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
          set_par_n != set_par_d)
        gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
            set_par_n, set_par_d, NULL);
    }
  }

done:
  GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, othercaps);

  if (from_par == &fpar)
    g_value_unset (&fpar);
  if (to_par == &tpar)
    g_value_unset (&tpar);
}
Example #5
0
/* 
 * Performs the face detection
 */
static GstFlowReturn
gst_facedetect_transform_ip (GstOpencvVideoFilter * base, GstBuffer * buf,
    IplImage * img)
{
  Gstfacedetect *filter;
  CvSeq *faces;
  int i;

  filter = GST_FACEDETECT (base);

  cvCvtColor (img, filter->cvGray, CV_RGB2GRAY);
  cvClearMemStorage (filter->cvStorage);

  if (filter->cvCascade) {
    GstMessage *msg = NULL;
    GValue facelist = { 0 };

    faces =
        cvHaarDetectObjects (filter->cvGray, filter->cvCascade,
        filter->cvStorage, filter->scale_factor, filter->min_neighbors,
        filter->flags, cvSize (filter->min_size_width, filter->min_size_height)
#if (CV_MAJOR_VERSION >= 2) && (CV_MINOR_VERSION >= 2)
        , cvSize (filter->min_size_width + 2, filter->min_size_height + 2)
#endif
        );

    if (faces && faces->total > 0) {
      msg = gst_facedetect_message_new (filter, buf);
      g_value_init (&facelist, GST_TYPE_LIST);
    }

    for (i = 0; i < (faces ? faces->total : 0); i++) {
      CvRect *r = (CvRect *) cvGetSeqElem (faces, i);
      GValue value = { 0 };

      GstStructure *s = gst_structure_new ("face",
          "x", G_TYPE_UINT, r->x,
          "y", G_TYPE_UINT, r->y,
          "width", G_TYPE_UINT, r->width,
          "height", G_TYPE_UINT, r->height, NULL);

      GstMessage *m = gst_message_new_element (GST_OBJECT (filter), s);

      g_value_init (&value, GST_TYPE_STRUCTURE);
      gst_value_set_structure (&value, s);
      gst_value_list_append_value (&facelist, &value);
      g_value_unset (&value);

      gst_element_post_message (GST_ELEMENT (filter), m);

      if (filter->display) {
        if (gst_buffer_is_writable (buf)) {
          CvPoint center;
          int radius;
          center.x = cvRound ((r->x + r->width * 0.5));
          center.y = cvRound ((r->y + r->height * 0.5));
          radius = cvRound ((r->width + r->height) * 0.25);
          cvCircle (img, center, radius, CV_RGB (255, 32, 32), 3, 8, 0);
        } else {
          GST_DEBUG_OBJECT (filter, "Buffer is not writable, not drawing "
              "circles for faces");
        }
      }

    }

    if (msg) {
      gst_structure_set_value (msg->structure, "faces", &facelist);
      g_value_unset (&facelist);
      gst_element_post_message (GST_ELEMENT (filter), msg);
    }
  }

  return GST_FLOW_OK;
}
static GstCaps *
gst_rtp_h263p_pay_sink_getcaps (GstBaseRTPPayload * payload, GstPad * pad)
{
  GstRtpH263PPay *rtph263ppay;
  GstCaps *caps = NULL;
  GstCaps *peercaps = NULL;
  GstCaps *intersect = NULL;
  guint i;

  rtph263ppay = GST_RTP_H263P_PAY (payload);

  peercaps = gst_pad_peer_get_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload));
  if (!peercaps)
    return
        gst_caps_copy (gst_pad_get_pad_template_caps
        (GST_BASE_RTP_PAYLOAD_SINKPAD (payload)));

  intersect = gst_caps_intersect (peercaps,
      gst_pad_get_pad_template_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload)));
  gst_caps_unref (peercaps);

  if (gst_caps_is_empty (intersect))
    return intersect;

  caps = gst_caps_new_empty ();
  for (i = 0; i < gst_caps_get_size (intersect); i++) {
    GstStructure *s = gst_caps_get_structure (intersect, i);
    const gchar *encoding_name = gst_structure_get_string (s, "encoding-name");

    if (!strcmp (encoding_name, "H263-2000")) {
      const gchar *profile_str = gst_structure_get_string (s, "profile");
      const gchar *level_str = gst_structure_get_string (s, "level");
      int profile = 0;
      int level = 0;

      if (profile_str && level_str) {
        gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE,
            v = FALSE;
        GstStructure *new_s = gst_structure_new ("video/x-h263",
            "variant", G_TYPE_STRING, "itu",
            NULL);

        profile = atoi (profile_str);
        level = atoi (level_str);

        /* These profiles are defined in the H.263 Annex X */
        switch (profile) {
          case 0:
            /* The Baseline Profile (Profile 0) */
            break;
          case 1:
            /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile
             * (Profile 1)
             * Baseline + Annexes I, J, L.4 and T
             */
            i = j = l = t = TRUE;
            break;
          case 2:
            /* Version 1 Backward-Compatibility Profile (Profile 2)
             * Baseline + Annex F
             */
            i = j = l = t = f = TRUE;
            break;
          case 3:
            /* Version 2 Interactive and Streaming Wireless Profile
             * Baseline + Annexes I, J, T
             */
            i = j = t = TRUE;
            break;
          case 4:
            /* Version 3 Interactive and Streaming Wireless Profile (Profile 4)
             * Baseline + Annexes I, J, T, V, W.6.3.8,
             */
            /* Missing W.6.3.8 */
            i = j = t = v = TRUE;
            break;
          case 5:
            /* Conversational High Compression Profile (Profile 5)
             * Baseline + Annexes F, I, J, L.4, T, D, U
             */
            /* Missing D, U */
            f = i = j = l = t = TRUE;
            break;
          case 6:
            /* Conversational Internet Profile (Profile 6)
             * Baseline + Annexes F, I, J, L.4, T, D, U and
             * K with arbitratry slice ordering
             */
            /* Missing D, U, K with arbitratry slice ordering */
            f = i = j = l = t = TRUE;
            break;
          case 7:
            /* Conversational Interlace Profile (Profile 7)
             * Baseline + Annexes F, I, J, L.4, T, D, U,  W.6.3.11
             */
            /* Missing D, U, W.6.3.11 */
            f = i = j = l = t = TRUE;
            break;
          case 8:
            /* High Latency Profile (Profile 8)
             * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and
             * K with arbitratry slice ordering
             */
            /* Missing D, U, P.5, O.1.1 */
            f = i = j = l = t = TRUE;
            break;
        }


        if (f || i || j || t || l || v) {
          GValue list = { 0 };
          GValue vstr = { 0 };

          g_value_init (&list, GST_TYPE_LIST);
          g_value_init (&vstr, G_TYPE_STRING);

          g_value_set_static_string (&vstr, "h263");
          gst_value_list_append_value (&list, &vstr);
          g_value_set_static_string (&vstr, "h263p");
          gst_value_list_append_value (&list, &vstr);

          if (l || v) {
            g_value_set_static_string (&vstr, "h263pp");
            gst_value_list_append_value (&list, &vstr);
          }
          g_value_unset (&vstr);

          gst_structure_set_value (new_s, "h263version", &list);
          g_value_unset (&list);
        } else {
          gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
        }


        if (!f)
          gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!i)
          gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!j)
          gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!t)
          gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!l)
          gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!v)
          gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL);


        if (level <= 10 || level == 45) {
          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 176,
              "height", GST_TYPE_INT_RANGE, 1, 144,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
          gst_caps_merge_structure (caps, new_s);
        } else if (level <= 20) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
          gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 176,
              "height", GST_TYPE_INT_RANGE, 1, 144,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
          gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 40) {

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
          gst_caps_merge_structure (caps, new_s);
        } else if (level <= 50) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 240,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 60) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 240,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 70) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 576,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 480,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          gst_caps_merge_structure (caps, s_copy);
        } else {
          gst_caps_merge_structure (caps, new_s);
        }

      } else {
        GstStructure *new_s = gst_structure_new ("video/x-h263",
            "variant", G_TYPE_STRING, "itu",
            "h263version", G_TYPE_STRING, "h263",
            NULL);

        GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified"
            " for H263-2000, defaulting to baseline H263");

        gst_caps_merge_structure (caps, new_s);
      }
    } else {
      gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE;
      /* FIXME: ffmpeg support the Appendix K too, how do we express it ?
       *   guint k;
       */
      const gchar *str;
      GstStructure *new_s = gst_structure_new ("video/x-h263",
          "variant", G_TYPE_STRING, "itu",
          NULL);
      gboolean added = FALSE;

      str = gst_structure_get_string (s, "f");
      if (str && !strcmp (str, "1"))
        f = TRUE;

      str = gst_structure_get_string (s, "i");
      if (str && !strcmp (str, "1"))
        i = TRUE;

      str = gst_structure_get_string (s, "j");
      if (str && !strcmp (str, "1"))
        j = TRUE;

      str = gst_structure_get_string (s, "t");
      if (str && !strcmp (str, "1"))
        t = TRUE;

      if (f || i || j || t) {
        GValue list = { 0 };
        GValue vstr = { 0 };

        g_value_init (&list, GST_TYPE_LIST);
        g_value_init (&vstr, G_TYPE_STRING);

        g_value_set_static_string (&vstr, "h263");
        gst_value_list_append_value (&list, &vstr);
        g_value_set_static_string (&vstr, "h263p");
        gst_value_list_append_value (&list, &vstr);
        g_value_unset (&vstr);

        gst_structure_set_value (new_s, "h263version", &list);
        g_value_unset (&list);
      } else {
        gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
      }

      if (!f)
        gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
      if (!i)
        gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
      if (!j)
        gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
      if (!t)
        gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);


      str = gst_structure_get_string (s, "custom");
      if (str) {
        unsigned int xmax, ymax, mpi;
        if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) {
          if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) {
            caps_append (caps, new_s, xmax, ymax, mpi);
            added = TRUE;
          } else {
            GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI"
                " %u x %u at %u, ignoring", xmax, ymax, mpi);
          }
        } else {
          GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s,"
              " ignoring", str);
        }
      }

      str = gst_structure_get_string (s, "16cif");
      if (str) {
        int mpi = atoi (str);
        caps_append (caps, new_s, 1408, 1152, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "4cif");
      if (str) {
        int mpi = atoi (str);
        caps_append (caps, new_s, 704, 576, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "cif");
      if (str) {
        int mpi = atoi (str);
        caps_append (caps, new_s, 352, 288, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "qcif");
      if (str) {
        int mpi = atoi (str);
        caps_append (caps, new_s, 176, 144, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "sqcif");
      if (str) {
        int mpi = atoi (str);
        caps_append (caps, new_s, 128, 96, mpi);
        added = TRUE;
      }

      if (added)
        gst_structure_free (new_s);
      else
        gst_caps_merge_structure (caps, new_s);
    }
  }

  gst_caps_unref (intersect);

  return caps;
}
Example #7
0
void eServiceMP3Record::handleUridecNotifySource(GObject *object, GParamSpec *unused, gpointer user_data)
{
	GstElement *source = NULL;
	eServiceMP3Record *_this = (eServiceMP3Record*)user_data;
	g_object_get(object, "source", &source, NULL);
	if (source)
	{
		if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "ssl-strict") != 0)
		{
			g_object_set(G_OBJECT(source), "ssl-strict", FALSE, NULL);
		}
		if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "user-agent") != 0 && !_this->m_useragent.empty())
		{
			g_object_set(G_OBJECT(source), "user-agent", _this->m_useragent.c_str(), NULL);
		}
		if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), "extra-headers") != 0 && !_this->m_extra_headers.empty())
		{
#if GST_VERSION_MAJOR < 1
			GstStructure *extras = gst_structure_empty_new("extras");
#else
			GstStructure *extras = gst_structure_new_empty("extras");
#endif
			size_t pos = 0;
			while (pos != std::string::npos)
			{
				std::string name, value;
				size_t start = pos;
				size_t len = std::string::npos;
				pos = _this->m_extra_headers.find('=', pos);
				if (pos != std::string::npos)
				{
					len = pos - start;
					pos++;
					name = _this->m_extra_headers.substr(start, len);
					start = pos;
					len = std::string::npos;
					pos = _this->m_extra_headers.find('&', pos);
					if (pos != std::string::npos)
					{
						len = pos - start;
						pos++;
					}
					value = _this->m_extra_headers.substr(start, len);
				}
				if (!name.empty() && !value.empty())
				{
					GValue header;
					eDebug("[eServiceMP3Record] handleUridecNotifySource setting extra-header '%s:%s'", name.c_str(), value.c_str());
					memset(&header, 0, sizeof(GValue));
					g_value_init(&header, G_TYPE_STRING);
					g_value_set_string(&header, value.c_str());
					gst_structure_set_value(extras, name.c_str(), &header);
				}
				else
				{
					eDebug("[eServiceMP3Record] handleUridecNotifySource invalid header format %s", _this->m_extra_headers.c_str());
					break;
				}
			}
			if (gst_structure_n_fields(extras) > 0)
			{
				g_object_set(G_OBJECT(source), "extra-headers", extras, NULL);
			}
			gst_structure_free(extras);
		}
		gst_object_unref(source);
	}
}
static GstCaps *
gst_video_crop_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps)
{
  GstVideoCrop *vcrop;
  GstCaps *other_caps;
  gint dy, dx, i, left, right, bottom, top;
  gboolean w_dynamic, h_dynamic;

  vcrop = GST_VIDEO_CROP (trans);

  GST_OBJECT_LOCK (vcrop);

  GST_LOG_OBJECT (vcrop, "l=%d,r=%d,b=%d,t=%d",
      vcrop->prop_left, vcrop->prop_right, vcrop->prop_bottom, vcrop->prop_top);

  w_dynamic = (vcrop->prop_left == -1 || vcrop->prop_right == -1);
  h_dynamic = (vcrop->prop_top == -1 || vcrop->prop_bottom == -1);

  left = (vcrop->prop_left == -1) ? 0 : vcrop->prop_left;
  right = (vcrop->prop_right == -1) ? 0 : vcrop->prop_right;
  bottom = (vcrop->prop_bottom == -1) ? 0 : vcrop->prop_bottom;
  top = (vcrop->prop_top == -1) ? 0 : vcrop->prop_top;

  GST_OBJECT_UNLOCK (vcrop);

  if (direction == GST_PAD_SRC) {
    dx = left + right;
    dy = top + bottom;
  } else {
    dx = 0 - (left + right);
    dy = 0 - (top + bottom);
  }

  GST_LOG_OBJECT (vcrop, "transforming caps %" GST_PTR_FORMAT, caps);

  other_caps = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (caps); ++i) {
    const GValue *v;
    GstStructure *structure, *new_structure;
    GValue w_val = { 0, }, h_val = {
    0,};

    structure = gst_caps_get_structure (caps, i);

    v = gst_structure_get_value (structure, "width");
    if (!gst_video_crop_transform_dimension_value (v, dx, &w_val, direction,
            w_dynamic)) {
      GST_WARNING_OBJECT (vcrop, "could not tranform width value with dx=%d"
          ", caps structure=%" GST_PTR_FORMAT, dx, structure);
      continue;
    }

    v = gst_structure_get_value (structure, "height");
    if (!gst_video_crop_transform_dimension_value (v, dy, &h_val, direction,
            h_dynamic)) {
      g_value_unset (&w_val);
      GST_WARNING_OBJECT (vcrop, "could not tranform height value with dy=%d"
          ", caps structure=%" GST_PTR_FORMAT, dy, structure);
      continue;
    }

    new_structure = gst_structure_copy (structure);
    gst_structure_set_value (new_structure, "width", &w_val);
    gst_structure_set_value (new_structure, "height", &h_val);
    g_value_unset (&w_val);
    g_value_unset (&h_val);
    GST_LOG_OBJECT (vcrop, "transformed structure %2d: %" GST_PTR_FORMAT
        " => %" GST_PTR_FORMAT, i, structure, new_structure);
    gst_caps_append_structure (other_caps, new_structure);
  }

  if (!gst_caps_is_empty (other_caps) && filter_caps) {
    GstCaps *tmp = gst_caps_intersect_full (filter_caps, other_caps,
        GST_CAPS_INTERSECT_FIRST);
    gst_caps_replace (&other_caps, tmp);
    gst_caps_unref (tmp);
  }

  return other_caps;
}
GstCaps* convert_videoformatsdescription_to_caps (const std::vector<tcam::VideoFormatDescription>& descriptions)
{
    GstCaps* caps = gst_caps_new_empty();

    for (const auto& desc : descriptions)
    {
        if (desc.get_fourcc() == 0)
        {
            tcam_info("Format has empty fourcc. Ignoring");
            continue;
        }

        const char* caps_string = tcam_fourcc_to_gst_1_0_caps_string(desc.get_fourcc());

        if (caps_string == nullptr)
        {
            tcam_warning("Format has empty caps string. Ignoring %s",
                         tcam::fourcc_to_description(desc.get_fourcc()));
            continue;
        }

        // tcam_error("Found '%s' pixel format string", caps_string);

        std::vector<struct tcam_resolution_description> res = desc.get_resolutions();

        for (const auto& r : res)
        {
            int min_width = r.min_size.width;
            int min_height = r.min_size.height;

            int max_width = r.max_size.width;
            int max_height = r.max_size.height;

            if (r.type == TCAM_RESOLUTION_TYPE_RANGE)
            {
                std::vector<struct tcam_image_size> framesizes = tcam::get_standard_resolutions(r.min_size,
                                                                                                r.max_size);

                // check if min/max are already in the vector.
                // some devices return std resolutions as max
                if (r.min_size != framesizes.front())
                {
                    framesizes.insert(framesizes.begin(), r.min_size);
                }

                if (r.max_size != framesizes.back())
                {
                    framesizes.push_back(r.max_size);
                }

                for (const auto& reso : framesizes)
                {
                    GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                    std::vector<double> framerates = desc.get_framerates(reso);

                    if (framerates.empty())
                    {
                        // tcam_log(TCAM_LOG_WARNING, "No available framerates. Ignoring format.");
                        continue;
                    }

                    GValue fps_list = G_VALUE_INIT;
                    g_value_init(&fps_list, GST_TYPE_LIST);

                    for (const auto& f : framerates)
                    {
                        int frame_rate_numerator;
                        int frame_rate_denominator;
                        gst_util_double_to_fraction(f,
                                                    &frame_rate_numerator,
                                                    &frame_rate_denominator);

                        if ((frame_rate_denominator == 0) || (frame_rate_numerator == 0))
                        {
                            continue;
                        }

                        GValue fraction = G_VALUE_INIT;
                        g_value_init(&fraction, GST_TYPE_FRACTION);
                        gst_value_set_fraction(&fraction, frame_rate_numerator, frame_rate_denominator);
                        gst_value_list_append_value(&fps_list, &fraction);
                        g_value_unset(&fraction);
                    }


                    gst_structure_set (structure,
                                       "width", G_TYPE_INT, reso.width,
                                       "height", G_TYPE_INT, reso.height,
                                       NULL);

                    gst_structure_take_value(structure, "framerate", &fps_list);
                    gst_caps_append_structure (caps, structure);

                }

                // finally also add the range to allow unusual settings like 1920x96@90fps
                GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                GValue w = G_VALUE_INIT;
                g_value_init(&w, GST_TYPE_INT_RANGE);
                gst_value_set_int_range(&w, min_width, max_width);

                GValue h = G_VALUE_INIT;
                g_value_init(&h, GST_TYPE_INT_RANGE);
                gst_value_set_int_range(&h, min_height, max_height);

                std::vector<double> fps = desc.get_frame_rates(r);

                if (fps.empty())
                {
                    // GST_ERROR("Could not find any framerates for format");
                    continue;
                }

                int fps_min_num;
                int fps_min_den;
                int fps_max_num;
                int fps_max_den;
                gst_util_double_to_fraction(*std::min_element(fps.begin(), fps.end()),
                                            &fps_min_num,
                                            &fps_min_den);
                gst_util_double_to_fraction(*std::max_element(fps.begin(), fps.end()),
                                            &fps_max_num,
                                            &fps_max_den);

                GValue f = G_VALUE_INIT;
                g_value_init(&f, GST_TYPE_FRACTION_RANGE);

                gst_value_set_fraction_range_full(&f,
                                                  fps_min_num, fps_min_den,
                                                  fps_max_num, fps_max_den);

                gst_structure_set_value(structure, "width", &w);
                gst_structure_set_value(structure,"height", &h);
                gst_structure_set_value(structure,"framerate", &f);
                gst_caps_append_structure(caps, structure);
            }
            else
            {
                GstStructure* structure = gst_structure_from_string (caps_string, NULL);

                fill_structure_fixed_resolution(structure, desc, r);
                gst_caps_append_structure (caps, structure);
            }
        }

    }

    return caps;
}
Example #10
0
static GstCaps *
gst_cv_laplace_transform_caps (GstBaseTransform * trans, GstPadDirection dir,
                               GstCaps * caps, GstCaps * filter)
{
    GstCaps *to, *ret;
    GstCaps *templ;
    GstStructure *structure;
    GstPad *other;
    gint i;

    to = gst_caps_new_empty ();

    for (i = 0; i < gst_caps_get_size (caps); i++) {
        const GValue *v;
        GValue list = { 0, };
        GValue val = { 0, };

        structure = gst_structure_copy (gst_caps_get_structure (caps, i));

        g_value_init (&list, GST_TYPE_LIST);

        g_value_init (&val, G_TYPE_STRING);
        g_value_set_string (&val, "GRAY8");
        gst_value_list_append_value (&list, &val);
        g_value_unset (&val);

        g_value_init (&val, G_TYPE_STRING);
#if G_BYTE_ORDER == G_BIG_ENDIAN
        g_value_set_string (&val, "GRAY16_BE");
#else
        g_value_set_string (&val, "GRAY16_LE");
#endif
        gst_value_list_append_value (&list, &val);
        g_value_unset (&val);

        v = gst_structure_get_value (structure, "format");

        gst_value_list_merge (&val, v, &list);
        gst_structure_set_value (structure, "format", &val);
        g_value_unset (&val);
        g_value_unset (&list);

        gst_structure_remove_field (structure, "colorimetry");
        gst_structure_remove_field (structure, "chroma-site");

        gst_caps_append_structure (to, structure);

    }

    /* filter against set allowed caps on the pad */
    other = (dir == GST_PAD_SINK) ? trans->srcpad : trans->sinkpad;
    templ = gst_pad_get_pad_template_caps (other);
    ret = gst_caps_intersect (to, templ);
    gst_caps_unref (to);
    gst_caps_unref (templ);

    if (ret && filter) {
        GstCaps *intersection;

        intersection =
            gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
        gst_caps_unref (ret);
        ret = intersection;
    }

    return ret;

}
Example #11
0
static GstCaps *
gst_base_video_encoder_sink_getcaps (GstPad * pad)
{
  GstBaseVideoEncoder *base_video_encoder;
  const GstCaps *templ_caps;
  GstCaps *allowed;
  GstCaps *fcaps, *filter_caps;
  gint i, j;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));

  /* FIXME: Allow subclass to override this? */

  /* Allow downstream to specify width/height/framerate/PAR constraints
   * and forward them upstream for video converters to handle
   */
  templ_caps =
      gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SINK_PAD
      (base_video_encoder));
  allowed =
      gst_pad_get_allowed_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
      (base_video_encoder));
  if (!allowed || gst_caps_is_empty (allowed) || gst_caps_is_any (allowed)) {
    fcaps = gst_caps_copy (templ_caps);
    goto done;
  }

  GST_LOG_OBJECT (base_video_encoder, "template caps %" GST_PTR_FORMAT,
      templ_caps);
  GST_LOG_OBJECT (base_video_encoder, "allowed caps %" GST_PTR_FORMAT, allowed);

  filter_caps = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (templ_caps); i++) {
    GQuark q_name =
        gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i));

    for (j = 0; j < gst_caps_get_size (allowed); j++) {
      const GstStructure *allowed_s = gst_caps_get_structure (allowed, j);
      const GValue *val;
      GstStructure *s;

      s = gst_structure_id_empty_new (q_name);
      if ((val = gst_structure_get_value (allowed_s, "width")))
        gst_structure_set_value (s, "width", val);
      if ((val = gst_structure_get_value (allowed_s, "height")))
        gst_structure_set_value (s, "height", val);
      if ((val = gst_structure_get_value (allowed_s, "framerate")))
        gst_structure_set_value (s, "framerate", val);
      if ((val = gst_structure_get_value (allowed_s, "pixel-aspect-ratio")))
        gst_structure_set_value (s, "pixel-aspect-ratio", val);

      gst_caps_merge_structure (filter_caps, s);
    }
  }

  fcaps = gst_caps_intersect (filter_caps, templ_caps);
  gst_caps_unref (filter_caps);

done:

  gst_caps_replace (&allowed, NULL);

  GST_LOG_OBJECT (base_video_encoder, "Returning caps %" GST_PTR_FORMAT, fcaps);

  return fcaps;
}
Example #12
0
static gboolean
gst_xviddec_setcaps (GstPad * pad, GstCaps * caps)
{
    GstXvidDec *dec = GST_XVIDDEC (GST_PAD_PARENT (pad));
    GstStructure *structure;
    GstCaps *allowed_caps;
    const GValue *val;

    GST_LOG_OBJECT (dec, "caps %" GST_PTR_FORMAT, caps);

    /* if there's something old around, remove it */
    if (dec->handle) {
        gst_xviddec_unset (dec);
    }

    structure = gst_caps_get_structure (caps, 0);
    gst_structure_get_int (structure, "width", &dec->width);
    gst_structure_get_int (structure, "height", &dec->height);

    /* perhaps some fps info */
    val = gst_structure_get_value (structure, "framerate");
    if ((val != NULL) && GST_VALUE_HOLDS_FRACTION (val)) {
        dec->fps_n = gst_value_get_fraction_numerator (val);
        dec->fps_d = gst_value_get_fraction_denominator (val);
    } else {
        dec->fps_n = -1;
        dec->fps_d = 1;
    }

    /* perhaps some par info */
    val = gst_structure_get_value (structure, "pixel-aspect-ratio");
    if (val != NULL && GST_VALUE_HOLDS_FRACTION (val)) {
        dec->par_n = gst_value_get_fraction_numerator (val);
        dec->par_d = gst_value_get_fraction_denominator (val);
    } else {
        dec->par_n = 1;
        dec->par_d = 1;
    }

    /* we try to find the preferred/accept csp */
    allowed_caps = gst_pad_get_allowed_caps (dec->srcpad);
    if (!allowed_caps) {
        GST_DEBUG_OBJECT (dec, "... but no peer, using template caps");
        /* need to copy because get_allowed_caps returns a ref,
           and get_pad_template_caps doesn't */
        allowed_caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad));
    }
    GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps);

    /* pick the first one ... */
    structure = gst_caps_get_structure (allowed_caps, 0);
    val = gst_structure_get_value (structure, "format");
    if (val != NULL && G_VALUE_TYPE (val) == GST_TYPE_LIST) {
        GValue temp = { 0, };
        gst_value_init_and_copy (&temp, gst_value_list_get_value (val, 0));
        gst_structure_set_value (structure, "format", &temp);
        g_value_unset (&temp);
    }

    /* ... and use its info to get the csp */
    dec->csp = gst_xvid_structure_to_csp (structure);
    if (dec->csp == -1) {
        GST_WARNING_OBJECT (dec, "failed to decide on colorspace, using I420");
        dec->csp = XVID_CSP_I420;
    }

    dec->outbuf_size =
        gst_xvid_image_get_size (dec->csp, dec->width, dec->height);

    GST_LOG_OBJECT (dec, "csp=%d, outbuf_size=%d", dec->csp, dec->outbuf_size);

    gst_caps_unref (allowed_caps);

    /* now set up xvid ... */
    if (!gst_xviddec_setup (dec)) {
        GST_ELEMENT_ERROR (GST_ELEMENT (dec), LIBRARY, INIT, (NULL), (NULL));
        return FALSE;
    }

    return gst_xviddec_negotiate (dec, NULL);
}
Example #13
0
GstStructure *payloadInfoToStructure(const PPayloadInfo &info, const QString &media)
{
	GstStructure *out = gst_structure_empty_new("application/x-rtp");

	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, media.toLatin1().data());
		gst_structure_set_value(out, "media", &gv);
	}

	// payload id field required
	if(info.id == -1)
	{
		gst_structure_free(out);
		return 0;
	}

	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_INT);
		g_value_set_int(&gv, info.id);
		gst_structure_set_value(out, "payload", &gv);
	}

	// name required for payload values 96 or greater
	if(info.id >= 96 && info.name.isEmpty())
	{
		gst_structure_free(out);
		return 0;
	}

	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, info.name.toLatin1().data());
		gst_structure_set_value(out, "encoding-name", &gv);
	}

	if(info.clockrate != -1)
	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_INT);
		g_value_set_int(&gv, info.clockrate);
		gst_structure_set_value(out, "clock-rate", &gv);
	}

	if(info.channels != -1)
	{
		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, QString::number(info.channels).toLatin1().data());
		gst_structure_set_value(out, "encoding-params", &gv);
	}

	foreach(const PPayloadInfo::Parameter &i, info.parameters)
	{
		QString value = i.value;

		// FIXME: is there a better way to detect when we should do this conversion?
		if(i.name == "configuration" && (info.name.toUpper() == "THEORA" || info.name.toUpper() == "VORBIS"))
		{
			QByteArray config = hexDecode(value);
			if(config.isEmpty())
			{
				gst_structure_free(out);
				return 0;
			}

			value = QString::fromLatin1(config.toBase64());
		}

		GValue gv;
		memset(&gv, 0, sizeof(GValue));
		g_value_init(&gv, G_TYPE_STRING);
		g_value_set_string(&gv, value.toLatin1().data());
		gst_structure_set_value(out, i.name.toLatin1().data(), &gv);
	}

	return out;
}
Example #14
0
static GstCaps *
gst_caps_setter_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps, GstCaps * cfilter)
{
  GstCapsSetter *filter = GST_CAPS_SETTER (trans);
  GstCaps *ret = NULL, *filter_caps = NULL;
  GstStructure *structure, *merge;
  const gchar *name;
  gint i, j;

  GST_DEBUG_OBJECT (trans,
      "receiving caps: %" GST_PTR_FORMAT ", with filter: %" GST_PTR_FORMAT,
      caps, cfilter);

  /* pass filter caps upstream, or any if no filter */
  if (direction != GST_PAD_SINK) {
    if (!cfilter || gst_caps_is_empty (cfilter)) {
      return gst_caps_ref (GST_CAPS_ANY);
    } else {
      return gst_caps_ref (cfilter);
    }
  }

  ret = gst_caps_copy (caps);

  /* this function is always called with a simple caps */
  if (!GST_CAPS_IS_SIMPLE (ret))
    return ret;

  structure = gst_caps_get_structure (ret, 0);
  name = gst_structure_get_name (structure);

  GST_OBJECT_LOCK (filter);
  filter_caps = gst_caps_ref (filter->caps);
  GST_OBJECT_UNLOCK (filter);

  for (i = 0; i < gst_caps_get_size (filter_caps); ++i) {
    merge = gst_caps_get_structure (filter_caps, i);
    if (gst_structure_has_name (merge, name) || !filter->join) {

      if (!filter->join)
        gst_structure_set_name (structure, gst_structure_get_name (merge));

      if (filter->replace)
        gst_structure_remove_all_fields (structure);

      for (j = 0; j < gst_structure_n_fields (merge); ++j) {
        const gchar *fname;

        fname = gst_structure_nth_field_name (merge, j);
        gst_structure_set_value (structure, fname,
            gst_structure_get_value (merge, fname));
      }
    }
  }

  GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);

  gst_caps_unref (filter_caps);

  return ret;
}
static GstCaps *
gst_deinterlace2_getcaps (GstPad * pad)
{
  GstCaps *ret;
  GstDeinterlace2 *self = GST_DEINTERLACE2 (gst_pad_get_parent (pad));
  GstPad *otherpad;
  gint len;
  const GstCaps *ourcaps;
  GstCaps *peercaps;

  GST_OBJECT_LOCK (self);

  otherpad = (pad == self->srcpad) ? self->sinkpad : self->srcpad;

  ourcaps = gst_pad_get_pad_template_caps (pad);
  peercaps = gst_pad_peer_get_caps (otherpad);

  if (peercaps) {
    ret = gst_caps_intersect (ourcaps, peercaps);
    gst_caps_unref (peercaps);
  } else {
    ret = gst_caps_copy (ourcaps);
  }

  GST_OBJECT_UNLOCK (self);

  if (self->fields == GST_DEINTERLACE2_ALL) {
    for (len = gst_caps_get_size (ret); len > 0; len--) {
      GstStructure *s = gst_caps_get_structure (ret, len - 1);
      const GValue *val;

      val = gst_structure_get_value (s, "framerate");
      if (!val)
        continue;

      if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION) {
        gint n, d;

        n = gst_value_get_fraction_numerator (val);
        d = gst_value_get_fraction_denominator (val);

        if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
          goto error;
        }

        gst_structure_set (s, "framerate", GST_TYPE_FRACTION, n, d, NULL);
      } else if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION_RANGE) {
        const GValue *min, *max;
        GValue nrange = { 0, }, nmin = {
        0,}, nmax = {
        0,};
        gint n, d;

        g_value_init (&nrange, GST_TYPE_FRACTION_RANGE);
        g_value_init (&nmin, GST_TYPE_FRACTION);
        g_value_init (&nmax, GST_TYPE_FRACTION);

        min = gst_value_get_fraction_range_min (val);
        max = gst_value_get_fraction_range_max (val);

        n = gst_value_get_fraction_numerator (min);
        d = gst_value_get_fraction_denominator (min);

        if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
          g_value_unset (&nrange);
          g_value_unset (&nmax);
          g_value_unset (&nmin);
          goto error;
        }

        gst_value_set_fraction (&nmin, n, d);

        n = gst_value_get_fraction_numerator (max);
        d = gst_value_get_fraction_denominator (max);

        if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
          g_value_unset (&nrange);
          g_value_unset (&nmax);
          g_value_unset (&nmin);
          goto error;
        }

        gst_value_set_fraction (&nmax, n, d);
        gst_value_set_fraction_range (&nrange, &nmin, &nmax);

        gst_structure_set_value (s, "framerate", &nrange);

        g_value_unset (&nmin);
        g_value_unset (&nmax);
        g_value_unset (&nrange);
      } else if (G_VALUE_TYPE (val) == GST_TYPE_LIST) {
        const GValue *lval;
        GValue nlist = { 0, };
        GValue nval = { 0, };
        gint i;

        g_value_init (&nlist, GST_TYPE_LIST);
        for (i = gst_value_list_get_size (val); i > 0; i--) {
          gint n, d;

          lval = gst_value_list_get_value (val, i);

          if (G_VALUE_TYPE (lval) != GST_TYPE_FRACTION)
            continue;

          n = gst_value_get_fraction_numerator (lval);
          d = gst_value_get_fraction_denominator (lval);

          /* Double/Half the framerate but if this fails simply
           * skip this value from the list */
          if (!gst_fraction_double (&n, &d, pad != self->srcpad)) {
            continue;
          }

          g_value_init (&nval, GST_TYPE_FRACTION);

          gst_value_set_fraction (&nval, n, d);
          gst_value_list_append_value (&nlist, &nval);
          g_value_unset (&nval);
        }
        gst_structure_set_value (s, "framerate", &nlist);
        g_value_unset (&nlist);
      }
    }
  }

  GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret);

  return ret;

error:
  GST_ERROR_OBJECT (pad, "Unable to transform peer caps");
  gst_caps_unref (ret);
  return NULL;
}
Example #16
0
/*
 * Performs the face detection
 */
static GstFlowReturn
gst_face_detect_transform_ip (GstOpencvVideoFilter * base, GstBuffer * buf,
                              IplImage * img)
{
    GstFaceDetect *filter = GST_FACE_DETECT (base);

    if (filter->cvFaceDetect) {
        GstMessage *msg = NULL;
        GstStructure *s;
        GValue facelist = { 0 };
        GValue facedata = { 0 };
        CvSeq *faces;
        CvSeq *mouth = NULL, *nose = NULL, *eyes = NULL;
        gint i;
        gboolean do_display = FALSE;
        gboolean post_msg = FALSE;

        if (filter->display) {
            if (gst_buffer_is_writable (buf)) {
                do_display = TRUE;
            } else {
                GST_LOG_OBJECT (filter, "Buffer is not writable, not drawing faces.");
            }
        }

        cvCvtColor (img, filter->cvGray, CV_RGB2GRAY);
        cvClearMemStorage (filter->cvStorage);

        faces = gst_face_detect_run_detector (filter, filter->cvFaceDetect,
                                              filter->min_size_width, filter->min_size_height);

        switch (filter->updates) {
        case GST_FACEDETECT_UPDATES_EVERY_FRAME:
            post_msg = TRUE;
            break;
        case GST_FACEDETECT_UPDATES_ON_CHANGE:
            if (faces && faces->total > 0) {
                if (!filter->face_detected)
                    post_msg = TRUE;
            } else {
                if (filter->face_detected) {
                    post_msg = TRUE;
                }
            }
            break;
        case GST_FACEDETECT_UPDATES_ON_FACE:
            if (faces && faces->total > 0) {
                post_msg = TRUE;
            } else {
                post_msg = FALSE;
            }
            break;
        case GST_FACEDETECT_UPDATES_NONE:
            post_msg = FALSE;
            break;
        default:
            post_msg = TRUE;
            break;
        }

        filter->face_detected = faces ? faces->total > 0 : FALSE;

        if (post_msg) {
            msg = gst_face_detect_message_new (filter, buf);
            g_value_init (&facelist, GST_TYPE_LIST);
        }

        for (i = 0; i < (faces ? faces->total : 0); i++) {
            CvRect *r = (CvRect *) cvGetSeqElem (faces, i);
            guint mw = filter->min_size_width / 8;
            guint mh = filter->min_size_height / 8;
            guint rnx = 0, rny = 0, rnw, rnh;
            guint rmx = 0, rmy = 0, rmw, rmh;
            guint rex = 0, rey = 0, rew, reh;
            gboolean have_nose, have_mouth, have_eyes;

            /* detect face features */

            if (filter->cvNoseDetect) {
                rnx = r->x + r->width / 4;
                rny = r->y + r->height / 4;
                rnw = r->width / 2;
                rnh = r->height / 2;
                cvSetImageROI (filter->cvGray, cvRect (rnx, rny, rnw, rnh));
                nose =
                    gst_face_detect_run_detector (filter, filter->cvNoseDetect, mw, mh);
                have_nose = (nose && nose->total);
                cvResetImageROI (filter->cvGray);
            } else {
                have_nose = FALSE;
            }

            if (filter->cvMouthDetect) {
                rmx = r->x;
                rmy = r->y + r->height / 2;
                rmw = r->width;
                rmh = r->height / 2;
                cvSetImageROI (filter->cvGray, cvRect (rmx, rmy, rmw, rmh));
                mouth =
                    gst_face_detect_run_detector (filter, filter->cvMouthDetect, mw,
                                                  mh);
                have_mouth = (mouth && mouth->total);
                cvResetImageROI (filter->cvGray);
            } else {
                have_mouth = FALSE;
            }

            if (filter->cvEyesDetect) {
                rex = r->x;
                rey = r->y;
                rew = r->width;
                reh = r->height / 2;
                cvSetImageROI (filter->cvGray, cvRect (rex, rey, rew, reh));
                eyes =
                    gst_face_detect_run_detector (filter, filter->cvEyesDetect, mw, mh);
                have_eyes = (eyes && eyes->total);
                cvResetImageROI (filter->cvGray);
            } else {
                have_eyes = FALSE;
            }

            GST_LOG_OBJECT (filter,
                            "%2d/%2d: x,y = %4u,%4u: w.h = %4u,%4u : features(e,n,m) = %d,%d,%d",
                            i, faces->total, r->x, r->y, r->width, r->height,
                            have_eyes, have_nose, have_mouth);
            if (post_msg) {
                s = gst_structure_new ("face",
                                       "x", G_TYPE_UINT, r->x,
                                       "y", G_TYPE_UINT, r->y,
                                       "width", G_TYPE_UINT, r->width,
                                       "height", G_TYPE_UINT, r->height, NULL);
                if (have_nose) {
                    CvRect *sr = (CvRect *) cvGetSeqElem (nose, 0);
                    GST_LOG_OBJECT (filter, "nose/%d: x,y = %4u,%4u: w.h = %4u,%4u",
                                    nose->total, rnx + sr->x, rny + sr->y, sr->width, sr->height);
                    gst_structure_set (s,
                                       "nose->x", G_TYPE_UINT, rnx + sr->x,
                                       "nose->y", G_TYPE_UINT, rny + sr->y,
                                       "nose->width", G_TYPE_UINT, sr->width,
                                       "nose->height", G_TYPE_UINT, sr->height, NULL);
                }
                if (have_mouth) {
                    CvRect *sr = (CvRect *) cvGetSeqElem (mouth, 0);
                    GST_LOG_OBJECT (filter, "mouth/%d: x,y = %4u,%4u: w.h = %4u,%4u",
                                    mouth->total, rmx + sr->x, rmy + sr->y, sr->width, sr->height);
                    gst_structure_set (s,
                                       "mouth->x", G_TYPE_UINT, rmx + sr->x,
                                       "mouth->y", G_TYPE_UINT, rmy + sr->y,
                                       "mouth->width", G_TYPE_UINT, sr->width,
                                       "mouth->height", G_TYPE_UINT, sr->height, NULL);
                }
                if (have_eyes) {
                    CvRect *sr = (CvRect *) cvGetSeqElem (eyes, 0);
                    GST_LOG_OBJECT (filter, "eyes/%d: x,y = %4u,%4u: w.h = %4u,%4u",
                                    eyes->total, rex + sr->x, rey + sr->y, sr->width, sr->height);
                    gst_structure_set (s,
                                       "eyes->x", G_TYPE_UINT, rex + sr->x,
                                       "eyes->y", G_TYPE_UINT, rey + sr->y,
                                       "eyes->width", G_TYPE_UINT, sr->width,
                                       "eyes->height", G_TYPE_UINT, sr->height, NULL);
                }

                g_value_init (&facedata, GST_TYPE_STRUCTURE);
                g_value_take_boxed (&facedata, s);
                gst_value_list_append_value (&facelist, &facedata);
                g_value_unset (&facedata);
                s = NULL;
            }

            if (do_display) {
                CvPoint center;
                CvSize axes;
                gdouble w, h;
                gint cb = 255 - ((i & 3) << 7);
                gint cg = 255 - ((i & 12) << 5);
                gint cr = 255 - ((i & 48) << 3);

                w = r->width / 2;
                h = r->height / 2;
                center.x = cvRound ((r->x + w));
                center.y = cvRound ((r->y + h));
                axes.width = w;
                axes.height = h * 1.25; /* tweak for face form */
                cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb),
                           3, 8, 0);

                if (have_nose) {
                    CvRect *sr = (CvRect *) cvGetSeqElem (nose, 0);

                    w = sr->width / 2;
                    h = sr->height / 2;
                    center.x = cvRound ((rnx + sr->x + w));
                    center.y = cvRound ((rny + sr->y + h));
                    axes.width = w;
                    axes.height = h * 1.25;       /* tweak for nose form */
                    cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb),
                               1, 8, 0);
                }
                if (have_mouth) {
                    CvRect *sr = (CvRect *) cvGetSeqElem (mouth, 0);

                    w = sr->width / 2;
                    h = sr->height / 2;
                    center.x = cvRound ((rmx + sr->x + w));
                    center.y = cvRound ((rmy + sr->y + h));
                    axes.width = w * 1.5; /* tweak for mouth form */
                    axes.height = h;
                    cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb),
                               1, 8, 0);
                }
                if (have_eyes) {
                    CvRect *sr = (CvRect *) cvGetSeqElem (eyes, 0);

                    w = sr->width / 2;
                    h = sr->height / 2;
                    center.x = cvRound ((rex + sr->x + w));
                    center.y = cvRound ((rey + sr->y + h));
                    axes.width = w * 1.5; /* tweak for eyes form */
                    axes.height = h;
                    cvEllipse (img, center, axes, 0.0, 0.0, 360.0, CV_RGB (cr, cg, cb),
                               1, 8, 0);
                }
            }
            gst_buffer_add_video_region_of_interest_meta (buf, "face",
                    (guint) r->x, (guint) r->y, (guint) r->width, (guint) r->height);
        }

        if (post_msg) {
            gst_structure_set_value ((GstStructure *) gst_message_get_structure (msg),
                                     "faces", &facelist);
            g_value_unset (&facelist);
            gst_element_post_message (GST_ELEMENT (filter), msg);
        }
    }

    return GST_FLOW_OK;
}
Example #17
0
static GstCaps *
gst_v4lsrc_get_caps (GstBaseSrc * src)
{
    GstCaps *list;
    GstV4lSrc *v4lsrc = GST_V4LSRC (src);
    struct video_capability *vcap = &GST_V4LELEMENT (v4lsrc)->vcap;
    gint width = GST_V4LELEMENT (src)->vcap.minwidth;
    gint height = GST_V4LELEMENT (src)->vcap.minheight;
    gint i;
    gint fps_n, fps_d;
    GList *item;

    if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc))) {
        return gst_v4lsrc_get_any_caps ();
    }

    if (!v4lsrc->autoprobe) {
        /* FIXME: query current caps and return those, with _any appended */
        return gst_v4lsrc_get_any_caps ();
    }

    if (!v4lsrc->colorspaces) {
        GST_DEBUG_OBJECT (v4lsrc, "Checking supported palettes");
        for (i = 0; all_palettes[i] != -1; i++) {
            /* try palette out */
            if (!gst_v4lsrc_try_capture (v4lsrc, width, height, all_palettes[i]))
                continue;
            GST_DEBUG_OBJECT (v4lsrc, "Added palette %d (%s) to supported list",
                              all_palettes[i], gst_v4lsrc_palette_name (all_palettes[i]));
            v4lsrc->colorspaces = g_list_append (v4lsrc->colorspaces,
                                                 GINT_TO_POINTER (all_palettes[i]));
        }
        GST_DEBUG_OBJECT (v4lsrc, "%d palette(s) supported",
                          g_list_length (v4lsrc->colorspaces));
        if (v4lsrc->autoprobe_fps) {
            GST_DEBUG_OBJECT (v4lsrc, "autoprobing framerates");
            v4lsrc->fps_list = gst_v4lsrc_get_fps_list (v4lsrc);
        }
    }


    if (!gst_v4lsrc_get_fps (v4lsrc, &fps_n, &fps_d)) {
        fps_n = 0;
        fps_d = 1;
    }

    list = gst_caps_new_empty ();
    for (item = v4lsrc->colorspaces; item != NULL; item = item->next) {
        GstCaps *one;

        one = gst_v4lsrc_palette_to_caps (GPOINTER_TO_INT (item->data));
        if (!one) {
            GST_WARNING_OBJECT (v4lsrc, "Palette %d gave no caps\n",
                                GPOINTER_TO_INT (item->data));
            continue;
        }

        GST_DEBUG_OBJECT (v4lsrc,
                          "Device reports w: %d-%d, h: %d-%d, fps: %d/%d for palette %d",
                          vcap->minwidth, vcap->maxwidth, vcap->minheight, vcap->maxheight,
                          fps_n, fps_d, GPOINTER_TO_INT (item->data));

        if (vcap->minwidth < vcap->maxwidth) {
            gst_caps_set_simple (one, "width", GST_TYPE_INT_RANGE, vcap->minwidth,
                                 vcap->maxwidth, NULL);
        } else {
            gst_caps_set_simple (one, "width", G_TYPE_INT, vcap->minwidth, NULL);
        }
        if (vcap->minheight < vcap->maxheight) {
            gst_caps_set_simple (one, "height", GST_TYPE_INT_RANGE, vcap->minheight,
                                 vcap->maxheight, NULL);
        } else {
            gst_caps_set_simple (one, "height", G_TYPE_INT, vcap->minheight, NULL);
        }

        if (v4lsrc->autoprobe_fps) {
            GstStructure *structure = gst_caps_get_structure (one, 0);

            if (v4lsrc->fps_list) {
                gst_structure_set_value (structure, "framerate", v4lsrc->fps_list);
            } else {
                gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
                                   fps_n, fps_d, NULL);
            }
        } else {
            gst_caps_set_simple (one, "framerate", GST_TYPE_FRACTION_RANGE,
                                 1, 1, 100, 1, NULL);
        }

        GST_DEBUG_OBJECT (v4lsrc, "caps: %" GST_PTR_FORMAT, one);
        gst_caps_append (list, one);
    }

    return list;
}
static void
output_loop (gpointer data)
{
    GstPad *pad;
    GOmxCore *gomx;
    GOmxPort *out_port;
    GstOmxBaseFilter2 *self;
    GstFlowReturn ret = GST_FLOW_OK;
    GstOmxBaseFilter2Class *bclass;

    pad = data;
    self = GST_OMX_BASE_FILTER2 (gst_pad_get_parent (pad));
    gomx = self->gomx;

    bclass = GST_OMX_BASE_FILTER2_GET_CLASS (self);

    GST_LOG_OBJECT (self, "begin");

    if (!self->ready)
    {
        g_error ("not ready");
        return;
    }

    out_port = (GOmxPort *)gst_pad_get_element_private(pad);

    if (G_LIKELY (out_port->enabled))
    {
        gpointer obj = g_omx_port_recv (out_port);

        if (G_UNLIKELY (!obj))
        {
            GST_WARNING_OBJECT (self, "null buffer: leaving");
            ret = GST_FLOW_WRONG_STATE;
            goto leave;
        }

        if (G_LIKELY (GST_IS_BUFFER (obj)))
        {
            if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (obj, GST_BUFFER_FLAG_IN_CAPS)))
            {
                GstCaps *caps = NULL;
                GstStructure *structure;
                GValue value = { 0 };

                caps = gst_pad_get_negotiated_caps (pad);
                caps = gst_caps_make_writable (caps);
                structure = gst_caps_get_structure (caps, 0);

                g_value_init (&value, GST_TYPE_BUFFER);
                gst_value_set_buffer (&value, obj);
                gst_buffer_unref (obj);
                gst_structure_set_value (structure, "codec_data", &value);
                g_value_unset (&value);

                gst_pad_set_caps (pad, caps);
            }
            else
            {
                GstBuffer *buf = GST_BUFFER (obj);
                ret = bclass->push_buffer (self, buf);
                GST_DEBUG_OBJECT (self, "ret=%s", gst_flow_get_name (ret));
				// HACK!! Dont care if one of the output pads are not connected
                ret = GST_FLOW_OK;
            }
        }
        else if (GST_IS_EVENT (obj))
        {
            GST_DEBUG_OBJECT (self, "got eos");
            gst_pad_push_event (pad, obj);
            ret = GST_FLOW_UNEXPECTED;
            goto leave;
        }

    }

leave:

    self->last_pad_push_return = ret;

    if (gomx->omx_error != OMX_ErrorNone)
    {
        GST_DEBUG_OBJECT (self, "omx_error=%s", g_omx_error_to_str (gomx->omx_error));
        ret = GST_FLOW_ERROR;
    }

    if (ret != GST_FLOW_OK)
    {
        GST_INFO_OBJECT (self, "pause task, reason:  %s",
                         gst_flow_get_name (ret));
        gst_pad_pause_task (pad);
    }

    GST_LOG_OBJECT (self, "end");

    gst_object_unref (self);
}
static GstFlowReturn
gst_slvideo_buffer_alloc (GstBaseSink * bsink, guint64 offset, guint size,
			  GstCaps * caps, GstBuffer ** buf)
{
	gint width, height;
	GstStructure *structure = NULL;
	GstSLVideo *slvideo;
	slvideo = GST_SLVIDEO(bsink);

	// caps == requested caps
	// we can ignore these and reverse-negotiate our preferred dimensions with
	// the peer if we like - we need to do this to obey dynamic resize requests
	// flowing in from the app.
	structure = gst_caps_get_structure (caps, 0);
	if (!gst_structure_get_int(structure, "width", &width) ||
	    !gst_structure_get_int(structure, "height", &height))
	{
		GST_WARNING_OBJECT (slvideo, "no width/height in caps %" GST_PTR_FORMAT, caps);
		return GST_FLOW_NOT_NEGOTIATED;
	}

	GstBuffer *newbuf = gst_buffer_new();
	bool made_bufferdata_ptr = false;
#define MAXDEPTHHACK 4
	
	GST_OBJECT_LOCK(slvideo);
	if (slvideo->resize_forced_always) // app is giving us a fixed size to work with
	{
		gint slwantwidth, slwantheight;
		slwantwidth = slvideo->resize_try_width;
		slwantheight = slvideo->resize_try_height;
	
		if (slwantwidth != width ||
		    slwantheight != height)
		{
			// don't like requested caps, we will issue our own suggestion - copy
			// the requested caps but substitute our own width and height and see
			// if our peer is happy with that.
		
			GstCaps *desired_caps;
			GstStructure *desired_struct;
			desired_caps = gst_caps_copy (caps);
			desired_struct = gst_caps_get_structure (desired_caps, 0);
			
			GValue value = {0};
			g_value_init(&value, G_TYPE_INT);
			g_value_set_int(&value, slwantwidth);
			gst_structure_set_value (desired_struct, "width", &value);
			g_value_unset(&value);
			g_value_init(&value, G_TYPE_INT);
			g_value_set_int(&value, slwantheight);
			gst_structure_set_value (desired_struct, "height", &value);
			
			if (gst_pad_peer_accept_caps (GST_VIDEO_SINK_PAD (slvideo),
							desired_caps))
			{
				// todo: re-use buffers from a pool?
				// todo: set MALLOCDATA to null, set DATA to point straight to shm?
				
				// peer likes our cap suggestion
				DEBUGMSG("peer loves us :)");
				GST_BUFFER_SIZE(newbuf) = slwantwidth * slwantheight * MAXDEPTHHACK;
				GST_BUFFER_MALLOCDATA(newbuf) = (guint8*)g_malloc(GST_BUFFER_SIZE(newbuf));
				GST_BUFFER_DATA(newbuf) = GST_BUFFER_MALLOCDATA(newbuf);
				gst_buffer_set_caps (GST_BUFFER_CAST(newbuf), desired_caps);

				made_bufferdata_ptr = true;
			} else {
				// peer hates our cap suggestion
				INFOMSG("peer hates us :(");
				gst_caps_unref(desired_caps);
			}
		}
	}

	GST_OBJECT_UNLOCK(slvideo);

	if (!made_bufferdata_ptr) // need to fallback to malloc at original size
	{
		GST_BUFFER_SIZE(newbuf) = width * height * MAXDEPTHHACK;
		GST_BUFFER_MALLOCDATA(newbuf) = (guint8*)g_malloc(GST_BUFFER_SIZE(newbuf));
		GST_BUFFER_DATA(newbuf) = GST_BUFFER_MALLOCDATA(newbuf);
		gst_buffer_set_caps (GST_BUFFER_CAST(newbuf), caps);
	}

	*buf = GST_BUFFER_CAST(newbuf);

	return GST_FLOW_OK;
}
Example #20
0
static GstCaps *
gst_smpte_alpha_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * from, GstCaps * filter)
{
  GstCaps *result, *tmp_caps, *tmpl_caps = NULL;
  gint i, j;

  tmp_caps = gst_caps_new_empty ();

  for (i = 0; i < gst_caps_get_size (from); i++) {
    GstStructure *structure;
    const GValue *val, *lval;
    GValue list = { 0, };
    GValue aval = { 0, };
    const gchar *str;

    structure = gst_structure_copy (gst_caps_get_structure (from, i));
    /* we can transform I420 to AYUV,
     * so need to locate and substitute AYUV for the both of them */
    val = gst_structure_get_value (structure, "format");
    if (val && GST_VALUE_HOLDS_LIST (val)) {
      gboolean seen_ayuv = FALSE, seen_i420 = FALSE;

      g_value_init (&list, GST_TYPE_LIST);
      for (j = 0; j < gst_value_list_get_size (val); j++) {
        lval = gst_value_list_get_value (val, j);
        if ((str = g_value_get_string (lval))) {
          if (strcmp (str, "AYUV") == 0) {
            seen_ayuv = TRUE;
          } else if (strcmp (str, "I420") == 0) {
            seen_i420 = TRUE;
          }
        }
      }
      if (seen_ayuv && !seen_i420) {
        str = "I420";
      } else if (seen_i420 && !seen_ayuv) {
        str = "AYUV";
      } else
        str = NULL;
      if (str) {
        g_value_copy (val, &list);
        g_value_init (&aval, G_TYPE_STRING);
        g_value_set_string (&aval, str);
        gst_value_list_append_value (&list, &aval);
        g_value_reset (&aval);
        gst_structure_set_value (structure, "format", &list);
        g_value_unset (&list);
      }
    } else if (val && G_VALUE_HOLDS_STRING (val)) {
      if ((str = g_value_get_string (val)) &&
          ((strcmp (str, "AYUV") == 0) || (strcmp (str, "I420") == 0))) {
        g_value_init (&list, GST_TYPE_LIST);
        g_value_init (&aval, G_TYPE_STRING);
        g_value_set_string (&aval, "AYUV");
        gst_value_list_append_value (&list, &aval);
        g_value_reset (&aval);
        g_value_set_string (&aval, "I420");
        gst_value_list_append_value (&list, &aval);
        g_value_reset (&aval);
        gst_structure_set_value (structure, "format", &list);
        g_value_unset (&list);
      }
    } else {
      gst_structure_remove_field (structure, "format");
    }

    gst_structure_remove_field (structure, "colorimetry");
    gst_structure_remove_field (structure, "chroma-site");

    gst_caps_append_structure (tmp_caps, structure);
  }

  /* Get the appropriate template */
  if (direction == GST_PAD_SINK) {
    tmpl_caps =
        gst_static_pad_template_get_caps (&gst_smpte_alpha_src_template);
  } else if (direction == GST_PAD_SRC) {
    tmpl_caps =
        gst_static_pad_template_get_caps (&gst_smpte_alpha_sink_template);
  } else {
    g_assert_not_reached ();
  }

  /* Intersect with our template caps */
  result = gst_caps_intersect (tmp_caps, tmpl_caps);
  gst_caps_unref (tmpl_caps);
  gst_caps_unref (tmp_caps);

  result = gst_caps_simplify (result);

  GST_LOG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
      from, result);

  if (filter) {
    GstCaps *intersection;

    GST_DEBUG_OBJECT (trans, "Using filter caps %" GST_PTR_FORMAT, filter);
    intersection =
        gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (result);
    result = intersection;
    GST_DEBUG_OBJECT (trans, "Intersection %" GST_PTR_FORMAT, result);
  }

  return result;
}
Example #21
0
static GstCaps *
create_sink_caps (const GstAmcCodecInfo * codec_info)
{
    GstCaps *ret;
    gint i;

    ret = gst_caps_new_empty ();

    for (i = 0; i < codec_info->n_supported_types; i++) {
        const GstAmcCodecType *type = &codec_info->supported_types[i];

        if (strcmp (type->mime, "audio/mpeg") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/mpeg",
                                     "mpegversion", G_TYPE_INT, 1,
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else if (strcmp (type->mime, "audio/3gpp") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/AMR",
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else if (strcmp (type->mime, "audio/amr-wb") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/AMR-WB",
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else if (strcmp (type->mime, "audio/mp4a-latm") == 0) {
            gint j;
            GstStructure *tmp, *tmp2;
            gboolean have_profile = FALSE;
            GValue va = { 0, };
            GValue v = { 0, };

            g_value_init (&va, GST_TYPE_LIST);
            g_value_init (&v, G_TYPE_STRING);
            g_value_set_string (&v, "raw");
            gst_value_list_append_value (&va, &v);
            g_value_set_string (&v, "adts");
            gst_value_list_append_value (&va, &v);
            g_value_unset (&v);

            tmp = gst_structure_new ("audio/mpeg",
                                     "mpegversion", G_TYPE_INT, 4,
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "framed", G_TYPE_BOOLEAN, TRUE, NULL);
            gst_structure_set_value (tmp, "stream-format", &va);
            g_value_unset (&va);

            for (j = 0; j < type->n_profile_levels; j++) {
                const gchar *profile;

                profile =
                    gst_amc_aac_profile_to_string (type->profile_levels[j].profile);

                if (!profile) {
                    GST_ERROR ("Unable to map AAC profile 0x%08x",
                               type->profile_levels[j].profile);
                    continue;
                }

                tmp2 = gst_structure_copy (tmp);
                gst_structure_set (tmp2, "profile", G_TYPE_STRING, profile, NULL);
                ret = gst_caps_merge_structure (ret, tmp2);

                have_profile = TRUE;
            }

            if (!have_profile) {
                ret = gst_caps_merge_structure (ret, tmp);
            } else {
                gst_structure_free (tmp);
            }
        } else if (strcmp (type->mime, "audio/g711-alaw") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/x-alaw",
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else if (strcmp (type->mime, "audio/g711-mlaw") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/x-mulaw",
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else if (strcmp (type->mime, "audio/vorbis") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/x-vorbis",
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else if (strcmp (type->mime, "audio/flac") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/x-flac",
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "framed", G_TYPE_BOOLEAN, TRUE, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else if (strcmp (type->mime, "audio/mpeg-L2") == 0) {
            GstStructure *tmp;

            tmp = gst_structure_new ("audio/mpeg",
                                     "mpegversion", G_TYPE_INT, 1,
                                     "layer", G_TYPE_INT, 2,
                                     "rate", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT,
                                     "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
            ret = gst_caps_merge_structure (ret, tmp);
        } else {
            GST_WARNING ("Unsupported mimetype '%s'", type->mime);
        }
    }

    return ret;
}
Example #22
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
    GstAravis* gst_aravis = GST_ARAVIS(src);
    GstStructure *structure;
    ArvPixelFormat pixel_format;
    int height, width;
    int bpp, depth;
    const GValue *frame_rate;
    const char *caps_string;
    unsigned int i;
    guint32 fourcc;

    GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

    arv_camera_stop_acquisition (gst_aravis->camera);

    if (gst_aravis->stream != NULL)
        g_object_unref (gst_aravis->stream);

    structure = gst_caps_get_structure (caps, 0);

    gst_structure_get_int (structure, "width", &width);
    gst_structure_get_int (structure, "height", &height);
    frame_rate = gst_structure_get_value (structure, "framerate");
    gst_structure_get_fourcc (structure, "format", &fourcc);
    gst_structure_get_int (structure, "bpp", &bpp);
    gst_structure_get_int (structure, "depth", &depth);

    pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

    arv_camera_set_region (gst_aravis->camera, 0, 0, width, height);
    arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
    arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

    if (frame_rate != NULL) {
        double dbl_frame_rate;

        dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
                         (double) gst_value_get_fraction_denominator (frame_rate);

        GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
        arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

        if (dbl_frame_rate > 0.0)
            gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
                                                 3e6 / dbl_frame_rate);
        else
            gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
    } else
        gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

    GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us);

    GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Gain       = %d", gst_aravis->gain);
    arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
    GST_DEBUG_OBJECT (gst_aravis, "Actual gain       = %d", arv_camera_get_gain (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Exposure   = %g µs", gst_aravis->exposure_time_us);
    arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
    GST_DEBUG_OBJECT (gst_aravis, "Actual exposure   = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));

    if (gst_aravis->fixed_caps != NULL)
        gst_caps_unref (gst_aravis->fixed_caps);

    caps_string = arv_pixel_format_to_gst_caps_string (pixel_format);
    if (caps_string != NULL) {
        GstStructure *structure;
        GstCaps *caps;

        caps = gst_caps_new_empty ();
        structure = gst_structure_from_string (caps_string, NULL);
        gst_structure_set (structure,
                           "width", G_TYPE_INT, width,
                           "height", G_TYPE_INT, height,
                           NULL);

        if (frame_rate != NULL)
            gst_structure_set_value (structure, "framerate", frame_rate);

        gst_caps_append_structure (caps, structure);

        gst_aravis->fixed_caps = caps;
    } else
        gst_aravis->fixed_caps = NULL;

    gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
    gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

    for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
        arv_stream_push_buffer (gst_aravis->stream,
                                arv_buffer_new (gst_aravis->payload, NULL));

    GST_LOG_OBJECT (gst_aravis, "Start acquisition");
    arv_camera_start_acquisition (gst_aravis->camera);

    gst_aravis->timestamp_offset = 0;
    gst_aravis->last_timestamp = 0;

    return TRUE;
}
Example #23
0
static void
output_loop (gpointer data)
{
    GstPad *pad;
    GOmxCore *gomx;
    GOmxPort *out_port;
    GstOmxBaseFilter *self;
    GstFlowReturn ret = GST_FLOW_OK;

    pad = data;
    self = GST_OMX_BASE_FILTER (gst_pad_get_parent (pad));
    gomx = self->gomx;

    GST_LOG_OBJECT (self, "begin");

    if (!self->ready)
    {
        g_error ("not ready");
        return;
    }

    out_port = self->out_port;

    if (G_LIKELY (out_port->enabled))
    {
        OMX_BUFFERHEADERTYPE *omx_buffer = NULL;

        GST_LOG_OBJECT (self, "request buffer");
        omx_buffer = g_omx_port_request_buffer (out_port);

        GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer);

        if (G_UNLIKELY (!omx_buffer))
        {
            GST_WARNING_OBJECT (self, "null buffer: leaving");
            ret = GST_FLOW_WRONG_STATE;
            goto leave;
        }

        GST_DEBUG_OBJECT (self, "omx_buffer: size=%lu, len=%lu, flags=%lu, offset=%lu, timestamp=%lld",
                          omx_buffer->nAllocLen, omx_buffer->nFilledLen, omx_buffer->nFlags,
                          omx_buffer->nOffset, omx_buffer->nTimeStamp);

        if (G_LIKELY (omx_buffer->nFilledLen > 0))
        {
            GstBuffer *buf;

#if 1
            /** @todo remove this check */
            if (G_LIKELY (self->in_port->enabled))
            {
                GstCaps *caps = NULL;

                caps = gst_pad_get_negotiated_caps (self->srcpad);

                if (!caps)
                {
                    /** @todo We shouldn't be doing this. */
                    GST_WARNING_OBJECT (self, "faking settings changed notification");
                    if (gomx->settings_changed_cb)
                        gomx->settings_changed_cb (gomx);
                }
                else
                {
                    GST_LOG_OBJECT (self, "caps already fixed: %" GST_PTR_FORMAT, caps);
                    gst_caps_unref (caps);
                }
            }
#endif

            /* buf is always null when the output buffer pointer isn't shared. */
            buf = omx_buffer->pAppPrivate;

            /** @todo we need to move all the caps handling to one single
             * place, in the output loop probably. */
            if (G_UNLIKELY (omx_buffer->nFlags & 0x80))
            {
                GstCaps *caps = NULL;
                GstStructure *structure;
                GValue value = { 0 };

                caps = gst_pad_get_negotiated_caps (self->srcpad);
                caps = gst_caps_make_writable (caps);
                structure = gst_caps_get_structure (caps, 0);

                g_value_init (&value, GST_TYPE_BUFFER);
                buf = gst_buffer_new_and_alloc (omx_buffer->nFilledLen);
                memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen);
                gst_value_set_buffer (&value, buf);
                gst_buffer_unref (buf);
                gst_structure_set_value (structure, "codec_data", &value);
                g_value_unset (&value);

                gst_pad_set_caps (self->srcpad, caps);
            }
            else if (buf && !(omx_buffer->nFlags & OMX_BUFFERFLAG_EOS))
            {
                GST_BUFFER_SIZE (buf) = omx_buffer->nFilledLen;
                if (self->use_timestamps)
                {
                    GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp,
                                                                            GST_SECOND,
                                                                            OMX_TICKS_PER_SECOND);
                }

                omx_buffer->pAppPrivate = NULL;
                omx_buffer->pBuffer = NULL;

                ret = push_buffer (self, buf);

                gst_buffer_unref (buf);
            }
            else
            {
                /* This is only meant for the first OpenMAX buffers,
                 * which need to be pre-allocated. */
                /* Also for the very last one. */
                ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
                                                         GST_BUFFER_OFFSET_NONE,
                                                         omx_buffer->nFilledLen,
                                                         GST_PAD_CAPS (self->srcpad),
                                                         &buf);

                if (G_LIKELY (buf))
                {
                    memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen);
                    if (self->use_timestamps)
                    {
                        GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp,
                                                                                GST_SECOND,
                                                                                OMX_TICKS_PER_SECOND);
                    }

                    if (self->share_output_buffer)
                    {
                        GST_WARNING_OBJECT (self, "couldn't zero-copy");
                        /* If pAppPrivate is NULL, it means it was a dummy
                         * allocation, free it. */
                        if (!omx_buffer->pAppPrivate)
                        {
                            g_free (omx_buffer->pBuffer);
                            omx_buffer->pBuffer = NULL;
                        }
                    }

                    ret = push_buffer (self, buf);
                }
                else
                {
                    GST_WARNING_OBJECT (self, "couldn't allocate buffer of size %d",
                                        omx_buffer->nFilledLen);
                }
            }
        }
        else
        {
            GST_WARNING_OBJECT (self, "empty buffer");
        }

        if (G_UNLIKELY (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS))
        {
            GST_DEBUG_OBJECT (self, "got eos");
            gst_pad_push_event (self->srcpad, gst_event_new_eos ());
            ret = GST_FLOW_UNEXPECTED;
            goto leave;
        }

        if (self->share_output_buffer &&
            !omx_buffer->pBuffer &&
            omx_buffer->nOffset == 0)
        {
            GstBuffer *buf;
            GstFlowReturn result;

            GST_LOG_OBJECT (self, "allocate buffer");
            result = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
                                                        GST_BUFFER_OFFSET_NONE,
                                                        omx_buffer->nAllocLen,
                                                        GST_PAD_CAPS (self->srcpad),
                                                        &buf);

            if (G_LIKELY (result == GST_FLOW_OK))
            {
                gst_buffer_ref (buf);
                omx_buffer->pAppPrivate = buf;

                omx_buffer->pBuffer = GST_BUFFER_DATA (buf);
                omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf);
            }
            else
            {
                GST_WARNING_OBJECT (self, "could not pad allocate buffer, using malloc");
                omx_buffer->pBuffer = g_malloc (omx_buffer->nAllocLen);
            }
        }

        if (self->share_output_buffer &&
            !omx_buffer->pBuffer)
        {
            GST_ERROR_OBJECT (self, "no input buffer to share");
        }

        omx_buffer->nFilledLen = 0;
        GST_LOG_OBJECT (self, "release_buffer");
        g_omx_port_release_buffer (out_port, omx_buffer);
    }

leave:

    self->last_pad_push_return = ret;

    if (gomx->omx_error != OMX_ErrorNone)
        ret = GST_FLOW_ERROR;

    if (ret != GST_FLOW_OK)
    {
        GST_INFO_OBJECT (self, "pause task, reason:  %s",
                         gst_flow_get_name (ret));
        gst_pad_pause_task (self->srcpad);
    }

    GST_LOG_OBJECT (self, "end");

    gst_object_unref (self);
}
Example #24
0
static GstCaps *
gst_opus_enc_sink_getcaps (GstAudioEncoder * benc, GstCaps * filter)
{
  GstOpusEnc *enc;
  GstCaps *caps;
  GstCaps *peercaps = NULL;
  GstCaps *intersect = NULL;
  guint i;
  gboolean allow_multistream;

  enc = GST_OPUS_ENC (benc);

  GST_DEBUG_OBJECT (enc, "sink getcaps");

  peercaps = gst_pad_peer_query_caps (GST_AUDIO_ENCODER_SRC_PAD (benc), NULL);
  if (!peercaps) {
    GST_DEBUG_OBJECT (benc, "No peercaps, returning template sink caps");
    return
        gst_caps_copy (gst_pad_get_pad_template_caps
        (GST_AUDIO_ENCODER_SINK_PAD (benc)));
  }

  intersect = gst_caps_intersect (peercaps,
      gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (benc)));
  gst_caps_unref (peercaps);

  if (gst_caps_is_empty (intersect))
    return intersect;

  allow_multistream = FALSE;
  for (i = 0; i < gst_caps_get_size (intersect); i++) {
    GstStructure *s = gst_caps_get_structure (intersect, i);
    gboolean multistream;
    if (gst_structure_get_boolean (s, "multistream", &multistream)) {
      if (multistream) {
        allow_multistream = TRUE;
      }
    } else {
      allow_multistream = TRUE;
    }
  }

  gst_caps_unref (intersect);

  caps =
      gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD
          (benc)));
  if (!allow_multistream) {
    GValue range = { 0 };
    g_value_init (&range, GST_TYPE_INT_RANGE);
    gst_value_set_int_range (&range, 1, 2);
    for (i = 0; i < gst_caps_get_size (caps); i++) {
      GstStructure *s = gst_caps_get_structure (caps, i);
      gst_structure_set_value (s, "channels", &range);
    }
    g_value_unset (&range);
  }

  if (filter) {
    GstCaps *tmp = gst_caps_intersect_full (caps, filter,
        GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (caps);
    caps = tmp;
  }

  GST_DEBUG_OBJECT (enc, "Returning caps: %" GST_PTR_FORMAT, caps);
  return caps;
}
Example #25
0
static GstCaps *
gst_vp8_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
{
  GstCaps *caps;
  const GstVideoState *state;
  GstTagList *tags = NULL;
  const GstTagList *iface_tags;
  GstBuffer *stream_hdr, *vorbiscomment;
  guint8 *data;
  GstStructure *s;
  GValue array = { 0 };
  GValue value = { 0 };

  state = gst_base_video_encoder_get_state (base_video_encoder);

  caps = gst_caps_new_simple ("video/x-vp8",
      "width", G_TYPE_INT, state->width,
      "height", G_TYPE_INT, state->height,
      "framerate", GST_TYPE_FRACTION, state->fps_n,
      state->fps_d,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
      state->par_d, NULL);

  s = gst_caps_get_structure (caps, 0);

  /* put buffers in a fixed list */
  g_value_init (&array, GST_TYPE_ARRAY);
  g_value_init (&value, GST_TYPE_BUFFER);

  /* Create Ogg stream-info */
  stream_hdr = gst_buffer_new_and_alloc (26);
  data = GST_BUFFER_DATA (stream_hdr);

  GST_WRITE_UINT8 (data, 0x4F);
  GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
  GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
  GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
  GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
  GST_WRITE_UINT16_BE (data + 8, state->width);
  GST_WRITE_UINT16_BE (data + 10, state->height);
  GST_WRITE_UINT24_BE (data + 12, state->par_n);
  GST_WRITE_UINT24_BE (data + 15, state->par_d);
  GST_WRITE_UINT32_BE (data + 18, state->fps_n);
  GST_WRITE_UINT32_BE (data + 22, state->fps_d);

  GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
  gst_value_set_buffer (&value, stream_hdr);
  gst_value_array_append_value (&array, &value);
  g_value_unset (&value);
  gst_buffer_unref (stream_hdr);

  iface_tags =
      gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
  if (iface_tags) {
    vorbiscomment =
        gst_tag_list_to_vorbiscomment_buffer ((iface_tags) ? iface_tags : tags,
        (const guint8 *) "OVP80\2 ", 7,
        "Encoded with GStreamer vp8enc " PACKAGE_VERSION);

    GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, vorbiscomment);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    gst_buffer_unref (vorbiscomment);
  }

  gst_structure_set_value (s, "streamheader", &array);
  g_value_unset (&array);

  return caps;
}
static GstStructure *gst_avdtp_sink_parse_sbc_caps(
			GstAvdtpSink *self, sbc_capabilities_t *sbc)
{
	GstStructure *structure;
	GValue *value;
	GValue *list;
	gboolean mono, stereo;

	structure = gst_structure_empty_new("audio/x-sbc");
	value = g_value_init(g_new0(GValue, 1), G_TYPE_STRING);

	/* mode */
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_MONO) {
		g_value_set_static_string(value, "mono");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) {
		g_value_set_static_string(value, "stereo");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) {
		g_value_set_static_string(value, "dual");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_JOINT_STEREO) {
		g_value_set_static_string(value, "joint");
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "mode", list);
		g_free(list);
		list = NULL;
	}

	/* subbands */
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	value = g_value_init(value, G_TYPE_INT);
	if (sbc->subbands & BT_A2DP_SUBBANDS_4) {
		g_value_set_int(value, 4);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->subbands & BT_A2DP_SUBBANDS_8) {
		g_value_set_int(value, 8);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "subbands", list);
		g_free(list);
		list = NULL;
	}

	/* blocks */
	value = g_value_init(value, G_TYPE_INT);
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_16) {
		g_value_set_int(value, 16);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_12) {
		g_value_set_int(value, 12);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_8) {
		g_value_set_int(value, 8);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->block_length & BT_A2DP_BLOCK_LENGTH_4) {
		g_value_set_int(value, 4);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "blocks", list);
		g_free(list);
		list = NULL;
	}

	/* allocation */
	g_value_init(value, G_TYPE_STRING);
	list = g_value_init(g_new0(GValue,1), GST_TYPE_LIST);
	if (sbc->allocation_method & BT_A2DP_ALLOCATION_LOUDNESS) {
		g_value_set_static_string(value, "loudness");
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->allocation_method & BT_A2DP_ALLOCATION_SNR) {
		g_value_set_static_string(value, "snr");
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "allocation", list);
		g_free(list);
		list = NULL;
	}

	/* rate */
	g_value_init(value, G_TYPE_INT);
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_48000) {
		g_value_set_int(value, 48000);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_44100) {
		g_value_set_int(value, 44100);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_32000) {
		g_value_set_int(value, 32000);
		gst_value_list_prepend_value(list, value);
	}
	if (sbc->frequency & BT_SBC_SAMPLING_FREQ_16000) {
		g_value_set_int(value, 16000);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "rate", list);
		g_free(list);
		list = NULL;
	}

	/* bitpool */
	value = g_value_init(value, GST_TYPE_INT_RANGE);
	gst_value_set_int_range(value,
			MIN(sbc->min_bitpool, TEMPLATE_MAX_BITPOOL),
			MIN(sbc->max_bitpool, TEMPLATE_MAX_BITPOOL));
	gst_structure_set_value(structure, "bitpool", value);
	g_value_unset(value);

	/* channels */
	mono = FALSE;
	stereo = FALSE;
	if (sbc->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)
		mono = TRUE;
	if ((sbc->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||
			(sbc->channel_mode &
			BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||
			(sbc->channel_mode &
			BT_A2DP_CHANNEL_MODE_JOINT_STEREO))
		stereo = TRUE;

	if (mono && stereo) {
		g_value_init(value, GST_TYPE_INT_RANGE);
		gst_value_set_int_range(value, 1, 2);
	} else {
		g_value_init(value, G_TYPE_INT);
		if (mono)
			g_value_set_int(value, 1);
		else if (stereo)
			g_value_set_int(value, 2);
		else {
			GST_ERROR_OBJECT(self,
				"Unexpected number of channels");
			g_value_set_int(value, 0);
		}
	}

	gst_structure_set_value(structure, "channels", value);
	g_free(value);

	return structure;
}
Example #27
0
static GstCaps *
gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad,
    GstCaps * filter)
{
  GstRtpH263PPay *rtph263ppay;
  GstCaps *caps = NULL, *templ;
  GstCaps *peercaps = NULL;
  GstCaps *intersect = NULL;
  guint i;

  rtph263ppay = GST_RTP_H263P_PAY (payload);

  peercaps =
      gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);

  /* if we're just outputting to udpsink or fakesink or so, we should also
   * accept any input compatible with our sink template caps */
  if (!peercaps || gst_caps_is_any (peercaps)) {
    if (peercaps)
      gst_caps_unref (peercaps);
    caps =
        gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
    goto done;
  }

  /* We basically need to differentiate two use-cases here: One where there's
   * a capsfilter after the payloader with caps created from an SDP; in this
   * case the filter caps are fixed and we want to signal to an encoder what
   * we want it to produce. The second case is simply payloader ! depayloader
   * where we are dealing with the depayloader's template caps. In this case
   * we should accept any input compatible with our sink template caps. */
  if (!gst_caps_is_fixed (peercaps)) {
    gst_caps_unref (peercaps);
    caps =
        gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
    goto done;
  }

  templ = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
  intersect = gst_caps_intersect (peercaps, templ);
  gst_caps_unref (peercaps);
  gst_caps_unref (templ);

  if (gst_caps_is_empty (intersect))
    return intersect;

  caps = gst_caps_new_empty ();
  for (i = 0; i < gst_caps_get_size (intersect); i++) {
    GstStructure *s = gst_caps_get_structure (intersect, i);
    const gchar *encoding_name = gst_structure_get_string (s, "encoding-name");

    if (!strcmp (encoding_name, "H263-2000")) {
      const gchar *profile_str = gst_structure_get_string (s, "profile");
      const gchar *level_str = gst_structure_get_string (s, "level");
      int profile = 0;
      int level = 0;

      if (profile_str && level_str) {
        gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE,
            v = FALSE;
        GstStructure *new_s = gst_structure_new ("video/x-h263",
            "variant", G_TYPE_STRING, "itu",
            NULL);

        profile = atoi (profile_str);
        level = atoi (level_str);

        /* These profiles are defined in the H.263 Annex X */
        switch (profile) {
          case 0:
            /* The Baseline Profile (Profile 0) */
            break;
          case 1:
            /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile
             * (Profile 1)
             * Baseline + Annexes I, J, L.4 and T
             */
            i = j = l = t = TRUE;
            break;
          case 2:
            /* Version 1 Backward-Compatibility Profile (Profile 2)
             * Baseline + Annex F
             */
            i = j = l = t = f = TRUE;
            break;
          case 3:
            /* Version 2 Interactive and Streaming Wireless Profile
             * Baseline + Annexes I, J, T
             */
            i = j = t = TRUE;
            break;
          case 4:
            /* Version 3 Interactive and Streaming Wireless Profile (Profile 4)
             * Baseline + Annexes I, J, T, V, W.6.3.8,
             */
            /* Missing W.6.3.8 */
            i = j = t = v = TRUE;
            break;
          case 5:
            /* Conversational High Compression Profile (Profile 5)
             * Baseline + Annexes F, I, J, L.4, T, D, U
             */
            /* Missing D, U */
            f = i = j = l = t = TRUE;
            break;
          case 6:
            /* Conversational Internet Profile (Profile 6)
             * Baseline + Annexes F, I, J, L.4, T, D, U and
             * K with arbitratry slice ordering
             */
            /* Missing D, U, K with arbitratry slice ordering */
            f = i = j = l = t = TRUE;
            break;
          case 7:
            /* Conversational Interlace Profile (Profile 7)
             * Baseline + Annexes F, I, J, L.4, T, D, U,  W.6.3.11
             */
            /* Missing D, U, W.6.3.11 */
            f = i = j = l = t = TRUE;
            break;
          case 8:
            /* High Latency Profile (Profile 8)
             * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and
             * K with arbitratry slice ordering
             */
            /* Missing D, U, P.5, O.1.1 */
            f = i = j = l = t = TRUE;
            break;
        }


        if (f || i || j || t || l || v) {
          GValue list = { 0 };
          GValue vstr = { 0 };

          g_value_init (&list, GST_TYPE_LIST);
          g_value_init (&vstr, G_TYPE_STRING);

          g_value_set_static_string (&vstr, "h263");
          gst_value_list_append_value (&list, &vstr);
          g_value_set_static_string (&vstr, "h263p");
          gst_value_list_append_value (&list, &vstr);

          if (l || v) {
            g_value_set_static_string (&vstr, "h263pp");
            gst_value_list_append_value (&list, &vstr);
          }
          g_value_unset (&vstr);

          gst_structure_set_value (new_s, "h263version", &list);
          g_value_unset (&list);
        } else {
          gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
        }


        if (!f)
          gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!i)
          gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!j)
          gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!t)
          gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!l)
          gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!v)
          gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL);


        if (level <= 10 || level == 45) {
          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 176,
              "height", GST_TYPE_INT_RANGE, 1, 144,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
          caps = gst_caps_merge_structure (caps, new_s);
        } else if (level <= 20) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 176,
              "height", GST_TYPE_INT_RANGE, 1, 144,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 40) {

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, new_s);
        } else if (level <= 50) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 240,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 60) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 240,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 70) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 576,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 480,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else {
          caps = gst_caps_merge_structure (caps, new_s);
        }

      } else {
        GstStructure *new_s = gst_structure_new ("video/x-h263",
            "variant", G_TYPE_STRING, "itu",
            "h263version", G_TYPE_STRING, "h263",
            NULL);

        GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified"
            " for H263-2000, defaulting to baseline H263");

        caps = gst_caps_merge_structure (caps, new_s);
      }
    } else {
      /* FIXME: ffmpeg support the Appendix K too, how do we express it ?
       *   guint k;
       */
      const gchar *str;
      GstStructure *new_s = gst_structure_new ("video/x-h263",
          "variant", G_TYPE_STRING, "itu",
          NULL);
      gboolean added = FALSE;
      GValue list = { 0 };
      GValue vstr = { 0 };

      g_value_init (&list, GST_TYPE_LIST);
      g_value_init (&vstr, G_TYPE_STRING);

      g_value_set_static_string (&vstr, "h263");
      gst_value_list_append_value (&list, &vstr);
      g_value_set_static_string (&vstr, "h263p");
      gst_value_list_append_value (&list, &vstr);
      g_value_unset (&vstr);

      gst_structure_set_value (new_s, "h263version", &list);
      g_value_unset (&list);

      str = gst_structure_get_string (s, "f");
      if (str) {
        gst_structure_set (new_s, "annex-f",
            G_TYPE_BOOLEAN, !strcmp (str, "1"), NULL);
      }

      str = gst_structure_get_string (s, "i");
      if (str) {
        gst_structure_set (new_s, "annex-i",
            G_TYPE_BOOLEAN, !strcmp (str, "1"), NULL);
      }

      str = gst_structure_get_string (s, "j");
      if (str) {
        gst_structure_set (new_s, "annex-j",
            G_TYPE_BOOLEAN, !strcmp (str, "1"), NULL);
      }

      str = gst_structure_get_string (s, "t");
      if (str) {
        gst_structure_set (new_s, "annex-t",
            G_TYPE_BOOLEAN, !strcmp (str, "1"), NULL);
      }

      str = gst_structure_get_string (s, "custom");
      if (str) {
        unsigned int xmax, ymax, mpi;
        if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) {
          if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) {
            caps = caps_append (caps, new_s, xmax, ymax, mpi);
            added = TRUE;
          } else {
            GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI"
                " %u x %u at %u, ignoring", xmax, ymax, mpi);
          }
        } else {
          GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s,"
              " ignoring", str);
        }
      }

      str = gst_structure_get_string (s, "16cif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 1408, 1152, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "4cif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 704, 576, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "cif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 352, 288, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "qcif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 176, 144, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "sqcif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 128, 96, mpi);
        added = TRUE;
      }

      if (added)
        gst_structure_free (new_s);
      else
        caps = gst_caps_merge_structure (caps, new_s);
    }
  }

  gst_caps_unref (intersect);

done:

  if (filter) {
    GstCaps *tmp;

    GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
        GST_PTR_FORMAT, caps, filter);
    tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (caps);
    caps = tmp;
  }

  return caps;
}
static GstStructure *gst_avdtp_sink_parse_mpeg_caps(
			GstAvdtpSink *self, mpeg_capabilities_t *mpeg)
{
	GstStructure *structure;
	GValue *value;
	GValue *list;
	gboolean valid_layer = FALSE;
	gboolean mono, stereo;

	if (!mpeg)
		return NULL;

	GST_LOG_OBJECT(self, "parsing mpeg caps");

	structure = gst_structure_empty_new("audio/mpeg");
	value = g_new0(GValue, 1);
	g_value_init(value, G_TYPE_INT);

	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	g_value_set_int(value, 1);
	gst_value_list_prepend_value(list, value);
	g_value_set_int(value, 2);
	gst_value_list_prepend_value(list, value);
	gst_structure_set_value(structure, "mpegversion", list);
	g_free(list);

	/* layer */
	GST_LOG_OBJECT(self, "setting mpeg layer");
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (mpeg->layer & BT_MPEG_LAYER_1) {
		g_value_set_int(value, 1);
		gst_value_list_prepend_value(list, value);
		valid_layer = TRUE;
	}
	if (mpeg->layer & BT_MPEG_LAYER_2) {
		g_value_set_int(value, 2);
		gst_value_list_prepend_value(list, value);
		valid_layer = TRUE;
	}
	if (mpeg->layer & BT_MPEG_LAYER_3) {
		g_value_set_int(value, 3);
		gst_value_list_prepend_value(list, value);
		valid_layer = TRUE;
	}
	if (list) {
		gst_structure_set_value(structure, "layer", list);
		g_free(list);
		list = NULL;
	}

	if (!valid_layer) {
		gst_structure_free(structure);
		g_free(value);
		return NULL;
	}

	/* rate */
	GST_LOG_OBJECT(self, "setting mpeg rate");
	list = g_value_init(g_new0(GValue, 1), GST_TYPE_LIST);
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_48000) {
		g_value_set_int(value, 48000);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_44100) {
		g_value_set_int(value, 44100);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_32000) {
		g_value_set_int(value, 32000);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_24000) {
		g_value_set_int(value, 24000);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_22050) {
		g_value_set_int(value, 22050);
		gst_value_list_prepend_value(list, value);
	}
	if (mpeg->frequency & BT_MPEG_SAMPLING_FREQ_16000) {
		g_value_set_int(value, 16000);
		gst_value_list_prepend_value(list, value);
	}
	g_value_unset(value);
	if (list) {
		gst_structure_set_value(structure, "rate", list);
		g_free(list);
		list = NULL;
	}

	/* channels */
	GST_LOG_OBJECT(self, "setting mpeg channels");
	mono = FALSE;
	stereo = FALSE;
	if (mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_MONO)
		mono = TRUE;
	if ((mpeg->channel_mode & BT_A2DP_CHANNEL_MODE_STEREO) ||
			(mpeg->channel_mode &
			BT_A2DP_CHANNEL_MODE_DUAL_CHANNEL) ||
			(mpeg->channel_mode &
			BT_A2DP_CHANNEL_MODE_JOINT_STEREO))
		stereo = TRUE;

	if (mono && stereo) {
		g_value_init(value, GST_TYPE_INT_RANGE);
		gst_value_set_int_range(value, 1, 2);
	} else {
		g_value_init(value, G_TYPE_INT);
		if (mono)
			g_value_set_int(value, 1);
		else if (stereo)
			g_value_set_int(value, 2);
		else {
			GST_ERROR_OBJECT(self,
				"Unexpected number of channels");
			g_value_set_int(value, 0);
		}
	}
	gst_structure_set_value(structure, "channels", value);
	g_free(value);

	return structure;
}
Example #29
0
/* Reads in buffers, parses them, reframes into one-buffer-per-ogg-page, submits
 * pages to output pad.
 */
static GstFlowReturn
gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
{
  GstOggParse *ogg;
  GstFlowReturn result = GST_FLOW_OK;
  gint ret = -1;
  guint32 serialno;
  GstBuffer *pagebuffer;
  GstClockTime buffertimestamp = GST_BUFFER_TIMESTAMP (buffer);

  ogg = GST_OGG_PARSE (GST_OBJECT_PARENT (pad));

  GST_LOG_OBJECT (ogg, "Chain function received buffer of size %d",
      GST_BUFFER_SIZE (buffer));

  gst_ogg_parse_submit_buffer (ogg, buffer);

  while (ret != 0 && result == GST_FLOW_OK) {
    ogg_page page;

    /* We use ogg_sync_pageseek() rather than ogg_sync_pageout() so that we can
     * track how many bytes the ogg layer discarded (in the case of sync errors,
     * etc.); this allows us to accurately track the current stream offset
     */
    ret = ogg_sync_pageseek (&ogg->sync, &page);
    if (ret == 0) {
      /* need more data, that's fine... */
      break;
    } else if (ret < 0) {
      /* discontinuity; track how many bytes we skipped (-ret) */
      ogg->offset -= ret;
    } else {
#ifndef GST_DISABLE_GST_DEBUG
      gint64 granule = ogg_page_granulepos (&page);
      int bos = ogg_page_bos (&page);
#endif
      guint64 startoffset = ogg->offset;

      GST_LOG_OBJECT (ogg, "Timestamping outgoing buffer as %" GST_TIME_FORMAT,
          GST_TIME_ARGS (buffertimestamp));
      /* Turn our page into a GstBuffer TODO: better timestamps? Requires format
       * parsing. */
      pagebuffer = gst_ogg_parse_buffer_from_page (&page, startoffset, FALSE,
          buffertimestamp);

      /* We read out 'ret' bytes, so we set the next offset appropriately */
      ogg->offset += ret;

      serialno = ogg_page_serialno (&page);

      GST_LOG_OBJECT (ogg,
          "processing ogg page (serial %08x, pageno %ld, "
          "granule pos %" G_GUINT64_FORMAT ", bos %d, offset %"
          G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT ")",
          serialno, ogg_page_pageno (&page),
          granule, bos, startoffset, ogg->offset);

      if (ogg_page_bos (&page)) {
        /* If we've seen this serialno before, this is technically an error,
         * we log this case but accept it - this one replaces the previous
         * stream with this serialno. We can do this since we're streaming, and
         * not supporting seeking...
         */
        GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno);

        if (stream != NULL) {
          GST_LOG_OBJECT (ogg, "Incorrect stream; repeats serial number %u "
              "at offset %lld", serialno, ogg->offset);
        }

        if (ogg->last_page_not_bos) {
          GST_LOG_OBJECT (ogg, "Deleting all referenced streams, found a new "
              "chain starting with serial %u", serialno);
          gst_ogg_parse_delete_all_streams (ogg);
        }

        stream = gst_ogg_parse_new_stream (ogg, serialno);

        ogg->last_page_not_bos = FALSE;

        gst_buffer_ref (pagebuffer);
        stream->headers = g_slist_append (stream->headers, pagebuffer);

        if (!ogg->in_headers) {
          GST_LOG_OBJECT (ogg, "Found start of new chain at offset %llu",
              startoffset);
          ogg->in_headers = 1;
        }

        /* For now, we just keep the header buffer in the stream->headers list;
         * it actually gets output once we've collected the entire set
         */
      } else {
        /* Non-BOS page. Either: we're outside headers, and this isn't a 
         * header (normal data), outside headers and this is (error!), inside
         * headers, this is (append header), or inside headers and this isn't 
         * (we've found the end of headers; flush the lot!)
         *
         * Before that, we flag that the last page seen (this one) was not a 
         * BOS page; that way we know that when we next see a BOS page it's a
         * new chain, and we can flush all existing streams.
         */
        page_type type;
        GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno);

        if (!stream) {
          GST_LOG_OBJECT (ogg, "Non-BOS page unexpectedly found at %lld",
              ogg->offset);
          goto failure;
        }

        ogg->last_page_not_bos = TRUE;

        type = gst_ogg_parse_is_header (ogg, stream, &page);

        if (type == PAGE_PENDING && ogg->in_headers) {
          gst_buffer_ref (pagebuffer);

          stream->unknown_pages = g_slist_append (stream->unknown_pages,
              pagebuffer);
        } else if (type == PAGE_HEADER) {
          if (!ogg->in_headers) {
            GST_LOG_OBJECT (ogg, "Header page unexpectedly found outside "
                "headers at offset %lld", ogg->offset);
            goto failure;
          } else {
            /* Append the header to the buffer list, after any unknown previous
             * pages
             */
            stream->headers = g_slist_concat (stream->headers,
                stream->unknown_pages);
            g_slist_free (stream->unknown_pages);
            gst_buffer_ref (pagebuffer);
            stream->headers = g_slist_append (stream->headers, pagebuffer);
          }
        } else {                /* PAGE_DATA, or PAGE_PENDING but outside headers */
          if (ogg->in_headers) {
            /* First non-header page... set caps, flush headers.
             *
             * First up, we build a single GValue list of all the pagebuffers
             * we're using for the headers, in order.
             * Then we set this on the caps structure. Then we can start pushing
             * buffers for the headers, and finally we send this non-header
             * page.
             */
            GstCaps *caps;
            GstStructure *structure;
            GValue array = { 0 };
            gint count = 0;
            gboolean found_pending_headers = FALSE;
            GSList *l;

            g_value_init (&array, GST_TYPE_ARRAY);

            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;

              if (g_slist_length (stream->headers) == 0) {
                GST_LOG_OBJECT (ogg, "No primary header found for stream %u",
                    stream->serialno);
                goto failure;
              }

              gst_ogg_parse_append_header (&array,
                  GST_BUFFER (stream->headers->data));
              count++;
            }

            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              int j;

              for (j = 1; j < g_slist_length (stream->headers); j++) {
                gst_ogg_parse_append_header (&array,
                    GST_BUFFER (g_slist_nth_data (stream->headers, j)));
                count++;
              }
            }

            caps = gst_pad_get_caps (ogg->srcpad);
            caps = gst_caps_make_writable (caps);

            structure = gst_caps_get_structure (caps, 0);
            gst_structure_set_value (structure, "streamheader", &array);

            gst_pad_set_caps (ogg->srcpad, caps);

            g_value_unset (&array);

            if (ogg->caps)
              gst_caps_unref (ogg->caps);
            ogg->caps = caps;

            GST_LOG_OBJECT (ogg, "Set \"streamheader\" caps with %d buffers "
                "(one per page)", count);

            /* Now, we do the same thing, but push buffers... */
            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              GstBuffer *buf = GST_BUFFER (stream->headers->data);

              gst_buffer_set_caps (buf, caps);

              result = gst_pad_push (ogg->srcpad, buf);
              if (result != GST_FLOW_OK)
                return result;
            }
            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              int j;

              for (j = 1; j < g_slist_length (stream->headers); j++) {
                GstBuffer *buf =
                    GST_BUFFER (g_slist_nth_data (stream->headers, j));
                gst_buffer_set_caps (buf, caps);

                result = gst_pad_push (ogg->srcpad, buf);
                if (result != GST_FLOW_OK)
                  return result;
              }
            }

            ogg->in_headers = 0;

            /* And finally the pending data pages */
            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              GSList *k;

              if (stream->unknown_pages == NULL)
                continue;

              if (found_pending_headers) {
                GST_WARNING_OBJECT (ogg, "Incorrectly muxed headers found at "
                    "approximate offset %lld", ogg->offset);
              }
              found_pending_headers = TRUE;

              GST_LOG_OBJECT (ogg, "Pushing %d pending pages after headers",
                  g_slist_length (stream->unknown_pages) + 1);

              for (k = stream->unknown_pages; k != NULL; k = k->next) {
                GstBuffer *buf;

                buf = GST_BUFFER (k->data);
                gst_buffer_set_caps (buf, caps);
                result = gst_pad_push (ogg->srcpad, buf);
                if (result != GST_FLOW_OK)
                  return result;
              }
              g_slist_foreach (stream->unknown_pages,
                  (GFunc) gst_mini_object_unref, NULL);
              g_slist_free (stream->unknown_pages);
              stream->unknown_pages = NULL;
            }

            gst_buffer_set_caps (pagebuffer, caps);

            result = gst_pad_push (ogg->srcpad, GST_BUFFER (pagebuffer));
            if (result != GST_FLOW_OK)
              return result;
          } else {
            /* Normal data page, submit buffer */
            gst_buffer_set_caps (pagebuffer, ogg->caps);
            result = gst_pad_push (ogg->srcpad, GST_BUFFER (pagebuffer));
            if (result != GST_FLOW_OK)
              return result;
          }
        }
      }
    }
  }

  return result;

failure:
  gst_pad_push_event (GST_PAD (ogg->srcpad), gst_event_new_eos ());
  return GST_FLOW_ERROR;
}
static void
output_loop (gpointer data)
{
  GstPad *pad;
  GOmxCore *gomx;
  GOmxPort *out_port;
  GstOmxBaseFilter *self;
  GstFlowReturn ret = GST_FLOW_OK;

  pad = data;
  self = GST_OMX_BASE_FILTER (gst_pad_get_parent (pad));
  gomx = self->gomx;

  GST_LOG_OBJECT (self, "begin");

  /* do not bother if we have been setup to bail out */
  if ((ret = g_atomic_int_get (&self->last_pad_push_return)) != GST_FLOW_OK)
    goto leave;

  if (!self->ready) {
    g_error ("not ready");
    return;
  }

  out_port = self->out_port;

  if (G_LIKELY (out_port->enabled)) {
    OMX_BUFFERHEADERTYPE *omx_buffer = NULL;

    GST_LOG_OBJECT (self, "request buffer");
    omx_buffer = g_omx_port_request_buffer (out_port);

    GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer);

    if (G_UNLIKELY (!omx_buffer)) {
      GST_WARNING_OBJECT (self, "null buffer: leaving");
      ret = GST_FLOW_WRONG_STATE;
      goto leave;
    }

    log_buffer (self, omx_buffer);

    if (G_LIKELY (omx_buffer->nFilledLen > 0)) {
      GstBuffer *buf;

#if 1
            /** @todo remove this check */
      if (G_LIKELY (self->in_port->enabled)) {
        GstCaps *caps = NULL;

        caps = gst_pad_get_negotiated_caps (self->srcpad);

#ifdef ANDROID
        if (!caps || gomx->settings_changed) {
#else
        if (!caps) {
#endif
                    /** @todo We shouldn't be doing this. */
          GST_WARNING_OBJECT (self, "faking settings changed notification");
          if (gomx->settings_changed_cb)
            gomx->settings_changed_cb (gomx);
#ifdef ANDROID
          gomx->settings_changed = FALSE;
#endif
        } else {
          GST_LOG_OBJECT (self, "caps already fixed: %" GST_PTR_FORMAT, caps);
          gst_caps_unref (caps);
        }
      }
#endif

      /* buf is always null when the output buffer pointer isn't shared. */
      buf = omx_buffer->pAppPrivate;

            /** @todo we need to move all the caps handling to one single
             * place, in the output loop probably. */
      if (G_UNLIKELY (omx_buffer->nFlags & 0x80)) {
        GstCaps *caps = NULL;
        GstStructure *structure;
        GValue value = { 0, {{0}
            }
        };

        caps = gst_pad_get_negotiated_caps (self->srcpad);
        caps = gst_caps_make_writable (caps);
        structure = gst_caps_get_structure (caps, 0);

        g_value_init (&value, GST_TYPE_BUFFER);
        buf = gst_buffer_new_and_alloc (omx_buffer->nFilledLen);
        memcpy (GST_BUFFER_DATA (buf),
            omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen);
        gst_value_set_buffer (&value, buf);
        gst_buffer_unref (buf);
        gst_structure_set_value (structure, "codec_data", &value);
        g_value_unset (&value);

        gst_pad_set_caps (self->srcpad, caps);
      } else if (buf && !(omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) {
        GST_BUFFER_SIZE (buf) = omx_buffer->nFilledLen;
        if (self->use_timestamps) {
          GST_BUFFER_TIMESTAMP (buf) =
              gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND,
              OMX_TICKS_PER_SECOND);
        }

        omx_buffer->pAppPrivate = NULL;
        omx_buffer->pBuffer = NULL;

        ret = push_buffer (self, buf);

        gst_buffer_unref (buf);
      } else {
        /* This is only meant for the first OpenMAX buffers,
         * which need to be pre-allocated. */
        /* Also for the very last one. */
        ret = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
            GST_BUFFER_OFFSET_NONE,
            omx_buffer->nFilledLen, GST_PAD_CAPS (self->srcpad), &buf);

        if (G_LIKELY (buf)) {
          memcpy (GST_BUFFER_DATA (buf),
              omx_buffer->pBuffer + omx_buffer->nOffset,
              omx_buffer->nFilledLen);
          if (self->use_timestamps) {
            GST_BUFFER_TIMESTAMP (buf) =
                gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND,
                OMX_TICKS_PER_SECOND);
          }

          if (self->share_output_buffer) {
            GST_WARNING_OBJECT (self, "couldn't zero-copy");
            /* If pAppPrivate is NULL, it means it was a dummy
             * allocation, free it. */
            if (!omx_buffer->pAppPrivate) {
              g_free (omx_buffer->pBuffer);
              omx_buffer->pBuffer = NULL;
            }
          }

          ret = push_buffer (self, buf);
        } else {
          GST_WARNING_OBJECT (self, "couldn't allocate buffer of size %lu",
              omx_buffer->nFilledLen);
        }
      }
    } else {
      GST_WARNING_OBJECT (self, "empty buffer");
    }

    if (self->share_output_buffer &&
        !omx_buffer->pBuffer && omx_buffer->nOffset == 0) {
      GstBuffer *buf;
      GstFlowReturn result;

      GST_LOG_OBJECT (self, "allocate buffer");
      result = gst_pad_alloc_buffer_and_set_caps (self->srcpad,
          GST_BUFFER_OFFSET_NONE,
          omx_buffer->nAllocLen, GST_PAD_CAPS (self->srcpad), &buf);

      if (G_LIKELY (result == GST_FLOW_OK)) {
        gst_buffer_ref (buf);
        omx_buffer->pAppPrivate = buf;

        omx_buffer->pBuffer = GST_BUFFER_DATA (buf);
        omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf);
      } else {
        GST_WARNING_OBJECT (self,
            "could not pad allocate buffer, using malloc");
        omx_buffer->pBuffer = g_malloc (omx_buffer->nAllocLen);
      }
    }

    if (self->share_output_buffer && !omx_buffer->pBuffer) {
      GST_ERROR_OBJECT (self, "no input buffer to share");
    }

    if (G_UNLIKELY (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) {
      GST_DEBUG_OBJECT (self, "got eos");
      gst_pad_push_event (self->srcpad, gst_event_new_eos ());
      omx_buffer->nFlags &= ~OMX_BUFFERFLAG_EOS;
      ret = GST_FLOW_UNEXPECTED;
    }

    omx_buffer->nFilledLen = 0;
    GST_LOG_OBJECT (self, "release_buffer");
    g_omx_port_release_buffer (out_port, omx_buffer);
  }

leave:

  self->last_pad_push_return = ret;

  if (gomx->omx_error != OMX_ErrorNone)
    ret = GST_FLOW_ERROR;

  if (ret != GST_FLOW_OK) {
    GST_INFO_OBJECT (self, "pause task, reason:  %s", gst_flow_get_name (ret));
    gst_pad_pause_task (self->srcpad);
  }

  GST_LOG_OBJECT (self, "end");

  gst_object_unref (self);
}

static GstFlowReturn
pad_chain (GstPad * pad, GstBuffer * buf)
{
  GOmxCore *gomx;
  GOmxPort *in_port;
  GstOmxBaseFilter *self;
  GstFlowReturn ret = GST_FLOW_OK;

  self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad));

  gomx = self->gomx;

  GST_LOG_OBJECT (self, "begin");
  GST_LOG_OBJECT (self, "gst_buffer: size=%u", GST_BUFFER_SIZE (buf));

  GST_LOG_OBJECT (self, "state: %d", gomx->omx_state);

  if (G_UNLIKELY (gomx->omx_state == OMX_StateLoaded)) {
    g_mutex_lock (self->ready_lock);

    GST_INFO_OBJECT (self, "omx: prepare");

        /** @todo this should probably go after doing preparations. */
    if (self->omx_setup) {
      self->omx_setup (self);
    }

    setup_ports (self);

    g_omx_core_prepare (self->gomx);

    if (gomx->omx_state == OMX_StateIdle) {
      self->ready = TRUE;
      GST_INFO_OBJECT (self, "start srcpad task");
      gst_pad_start_task (self->srcpad, output_loop, self->srcpad);
    }

    g_mutex_unlock (self->ready_lock);

    if (gomx->omx_state != OMX_StateIdle)
      goto out_flushing;
  }

#ifdef ANDROID
  if (gomx->settings_changed) {
    GST_DEBUG_OBJECT (self, "settings changed called from streaming thread... Android");
    if (gomx->settings_changed_cb)
      gomx->settings_changed_cb (gomx);

    gomx->settings_changed = FALSE;
  }
#endif

  in_port = self->in_port;

  if (G_LIKELY (in_port->enabled)) {
    guint buffer_offset = 0;

    if (G_UNLIKELY (gomx->omx_state == OMX_StateIdle)) {
      GST_INFO_OBJECT (self, "omx: play");
      g_omx_core_start (gomx);

      if (gomx->omx_state != OMX_StateExecuting)
        goto out_flushing;

      /* send buffer with codec data flag */
            /** @todo move to util */
      if (self->codec_data) {
        OMX_BUFFERHEADERTYPE *omx_buffer;

        GST_LOG_OBJECT (self, "request buffer");
        omx_buffer = g_omx_port_request_buffer (in_port);

        if (G_LIKELY (omx_buffer)) {
          omx_buffer->nFlags |= 0x00000080;     /* codec data flag */

          omx_buffer->nFilledLen = GST_BUFFER_SIZE (self->codec_data);
          memcpy (omx_buffer->pBuffer + omx_buffer->nOffset,
              GST_BUFFER_DATA (self->codec_data), omx_buffer->nFilledLen);

          GST_LOG_OBJECT (self, "release_buffer");
          g_omx_port_release_buffer (in_port, omx_buffer);
        }
      }
    }

    if (G_UNLIKELY (gomx->omx_state != OMX_StateExecuting)) {
      GST_ERROR_OBJECT (self, "Whoa! very wrong");
    }

    while (G_LIKELY (buffer_offset < GST_BUFFER_SIZE (buf))) {
      OMX_BUFFERHEADERTYPE *omx_buffer;

      if (self->last_pad_push_return != GST_FLOW_OK ||
          !(gomx->omx_state == OMX_StateExecuting ||
              gomx->omx_state == OMX_StatePause)) {
        goto out_flushing;
      }

      GST_LOG_OBJECT (self, "request buffer");
      omx_buffer = g_omx_port_request_buffer (in_port);

      GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer);

      if (G_LIKELY (omx_buffer)) {
        log_buffer (self, omx_buffer);

        if (omx_buffer->nOffset == 0 && self->share_input_buffer) {
          {
            GstBuffer *old_buf;
            old_buf = omx_buffer->pAppPrivate;

            if (old_buf) {
              gst_buffer_unref (old_buf);
            } else if (omx_buffer->pBuffer) {
              g_free (omx_buffer->pBuffer);
            }
          }

          omx_buffer->pBuffer = GST_BUFFER_DATA (buf);
          omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf);
          omx_buffer->nFilledLen = GST_BUFFER_SIZE (buf);
          omx_buffer->pAppPrivate = buf;
        } else {
          omx_buffer->nFilledLen = MIN (GST_BUFFER_SIZE (buf) - buffer_offset,
              omx_buffer->nAllocLen - omx_buffer->nOffset);
          memcpy (omx_buffer->pBuffer + omx_buffer->nOffset,
              GST_BUFFER_DATA (buf) + buffer_offset, omx_buffer->nFilledLen);
        }

        if (self->use_timestamps) {
          GstClockTime timestamp_offset = 0;

          if (buffer_offset && GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
            timestamp_offset = gst_util_uint64_scale_int (buffer_offset,
                GST_BUFFER_DURATION (buf), GST_BUFFER_SIZE (buf));
          }

          omx_buffer->nTimeStamp =
              gst_util_uint64_scale_int (GST_BUFFER_TIMESTAMP (buf) +
              timestamp_offset, OMX_TICKS_PER_SECOND, GST_SECOND);
        }

        buffer_offset += omx_buffer->nFilledLen;
#ifdef ANDROID
        omx_buffer->nFlags |= OMX_BUFFERFLAG_ENDOFFRAME;
        log_buffer (self, omx_buffer);
#endif

        GST_LOG_OBJECT (self, "release_buffer");
                /** @todo untaint buffer */
        g_omx_port_release_buffer (in_port, omx_buffer);
      } else {
        GST_WARNING_OBJECT (self, "null buffer");
        ret = GST_FLOW_WRONG_STATE;
        goto out_flushing;
      }
    }
  } else {
    GST_WARNING_OBJECT (self, "done");
    ret = GST_FLOW_UNEXPECTED;
  }

  if (!self->share_input_buffer) {
    gst_buffer_unref (buf);
  }

leave:

  GST_LOG_OBJECT (self, "end");

  return ret;

  /* special conditions */
out_flushing:
  {
    const gchar *error_msg = NULL;

    if (gomx->omx_error) {
      error_msg = "Error from OpenMAX component";
    } else if (gomx->omx_state != OMX_StateExecuting &&
        gomx->omx_state != OMX_StatePause) {
      error_msg = "OpenMAX component in wrong state";
    }

    if (error_msg) {
      GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("%s", error_msg));
      ret = GST_FLOW_ERROR;
    }

    gst_buffer_unref (buf);

    goto leave;
  }
}