예제 #1
2
static void
gst_nle_source_setup (GstNleSource * nlesrc)
{
  GstElement *rotate, *videorate, *videoscale, *colorspace, *vident, *cairooverlay, *colorspace2;
  GstElement *audiorate, *audioconvert, *audioresample, *aident;
  GstElement *a_capsfilter, *v_capsfilter, *last;
  GstPad *v_pad, *a_pad;
  GstCaps *v_caps, *a_caps;

  rotate = gst_element_factory_make ("flurotate", NULL);
  videorate = gst_element_factory_make ("videorate", NULL);
  nlesrc->videocrop = gst_element_factory_make ("videocrop", NULL);
  videoscale = gst_element_factory_make ("videoscale", NULL);
  colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
  v_capsfilter = gst_element_factory_make ("capsfilter", "video_capsfilter");
  nlesrc->textoverlay = gst_element_factory_make ("textoverlay", NULL);
  cairooverlay = gst_element_factory_make ("cairooverlay", "overlay");
  colorspace2 = gst_element_factory_make ("ffmpegcolorspace", NULL);

  vident = gst_element_factory_make ("identity", NULL);

  v_caps = gst_caps_new_simple ("video/x-raw-yuv",
      "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("I420"),
      "width", G_TYPE_INT, (gint) nlesrc->width,
      "height", G_TYPE_INT, (gint) nlesrc->height,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      "framerate", GST_TYPE_FRACTION,
      (gint) nlesrc->fps_n, (gint) nlesrc->fps_d, NULL);

  if (rotate) {
    gst_caps_set_simple (v_caps, "rotation", G_TYPE_INT, (gint) 0, NULL);
  } else {
    rotate = gst_element_factory_make ("identity", NULL); 
  }

  gst_pad_set_caps (nlesrc->video_srcpad, v_caps);

  g_object_set (videoscale, "add-borders", TRUE, NULL);
  g_object_set (vident, "single-segment", TRUE, NULL);
  g_object_set (v_capsfilter, "caps", v_caps, NULL);
  g_object_set (nlesrc->textoverlay, "valignment", 2, "halignment", 0,
      "auto-resize", TRUE, "wrap-mode", 0, "silent", !nlesrc->overlay_title,
      NULL);

  g_signal_connect (cairooverlay, "draw",
      G_CALLBACK (gst_nle_source_draw_overlay), nlesrc);

  /* As videorate can duplicate a lot of buffers we want to put it last in this
     transformation bin */
  gst_bin_add_many (GST_BIN (nlesrc), rotate, nlesrc->videocrop,
      videoscale, colorspace, nlesrc->textoverlay, videorate, v_capsfilter,
      vident, NULL);
  /* cairooverlay forces a colorpsace conversion ro RGB that we want to avoid
   * when we are not rendering the watermark */
  if (nlesrc->watermark != NULL) {
    gst_bin_add_many (GST_BIN (nlesrc), cairooverlay, colorspace2, NULL);
  }

  gst_element_link_many (rotate, nlesrc->videocrop, videoscale, colorspace,
      nlesrc->textoverlay, NULL);

  if (nlesrc->watermark != NULL) {
    gst_element_link_many (nlesrc->textoverlay, cairooverlay, colorspace2, NULL);
    last = colorspace2;
  } else {
    last = nlesrc->textoverlay;
  }

  gst_element_link_many (last, videorate, v_capsfilter, vident, NULL);

  /* Ghost source and sink pads */
  v_pad = gst_element_get_pad (vident, "src");
  gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->video_srcpad), v_pad);
  gst_object_unref (v_pad);

  v_pad = gst_element_get_pad (rotate, "sink");
  gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->video_sinkpad), v_pad);
  gst_object_unref (v_pad);

  if (nlesrc->with_audio) {
    audiorate = gst_element_factory_make ("audiorate", NULL);
    audioconvert = gst_element_factory_make ("audioconvert", NULL);
    audioresample = gst_element_factory_make ("audioresample", NULL);
    a_capsfilter = gst_element_factory_make ("capsfilter", NULL);
    aident = gst_element_factory_make ("identity", NULL);

    gst_bin_add_many (GST_BIN (nlesrc), audioresample, audioconvert,
        audiorate, a_capsfilter, aident, NULL);
    gst_element_link_many (audioconvert, audioresample,
        audiorate, a_capsfilter, aident, NULL);

    a_caps = gst_nle_source_get_audio_caps (nlesrc);
    gst_pad_set_caps (nlesrc->audio_srcpad, a_caps);
    g_object_set (a_capsfilter, "caps", a_caps, NULL);

    g_object_set (aident, "single-segment", TRUE, NULL);

    /* Ghost sink and source pads */
    a_pad = gst_element_get_pad (aident, "src");
    gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->audio_srcpad), a_pad);
    gst_object_unref (a_pad);

    a_pad = gst_element_get_pad (audioconvert, "sink");
    gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->audio_sinkpad), a_pad);
    gst_object_unref (a_pad);
  }
  nlesrc->index = -1;
  nlesrc->accu_time = 0;
  nlesrc->video_srcpad_added = FALSE;
  nlesrc->audio_srcpad_added = FALSE;
}
예제 #2
0
static guint32
value_to_fourcc(VALUE value)
{
    if (RVAL2CBOOL(rb_obj_is_kind_of(value, rb_cString))) {
        return GST_STR_FOURCC(RSTRING_PTR(value));
    } else {
        return NUM2UINT(value);
    }
}
예제 #3
0
static GstCaps *
gst_smokeenc_getcaps (GstPad * pad)
{
  GstSmokeEnc *smokeenc = GST_SMOKEENC (gst_pad_get_parent (pad));
  GstPad *otherpad;
  GstCaps *result, *caps;
  const GstCaps *tcaps;
  const char *name;
  int i;
  GstStructure *structure = NULL;

  /* we want to proxy properties like width, height and framerate from the
     other end of the element */
  otherpad = (pad == smokeenc->srcpad) ? smokeenc->sinkpad : smokeenc->srcpad;

  /* get template caps, we always need this to fiter the peer caps */
  tcaps = gst_pad_get_pad_template_caps (otherpad);

  /* get any constraints on the peer pad */
  caps = gst_pad_peer_get_caps (otherpad);

  if (caps == NULL)
    caps = gst_caps_copy (tcaps);
  else
    caps = gst_caps_make_writable (caps);

  /* intersect with the template */
  result = gst_caps_intersect (caps, tcaps);
  gst_caps_unref (caps);

  if (pad == smokeenc->srcpad) {
    name = "video/x-smoke";
  } else {
    name = "video/x-raw-yuv";
  }

  /* we can only copy width, height, framerate from one side to the other */
  for (i = 0; i < gst_caps_get_size (result); i++) {
    structure = gst_caps_get_structure (result, i);

    gst_structure_set_name (structure, name);
    gst_structure_remove_field (structure, "format");
    /* ... but for the sink pad, we only do I420 anyway, so add that */
    if (pad == smokeenc->sinkpad) {
      gst_structure_set (structure, "format", GST_TYPE_FOURCC,
          GST_STR_FOURCC ("I420"), NULL);
    }
  }

  gst_object_unref (smokeenc);

  return result;
}
예제 #4
0
static gboolean extract_resolution (GstTcamWhitebalance* self)
{

    GstPad* pad  = GST_BASE_TRANSFORM_SINK_PAD(self);
    GstCaps* caps = gst_pad_get_current_caps(pad);
    GstStructure *structure = gst_caps_get_structure (caps, 0);

    g_return_val_if_fail(gst_structure_get_int(structure, "width", &self->image_size.width), FALSE);
    g_return_val_if_fail(gst_structure_get_int(structure, "height", &self->image_size.height), FALSE);

    guint fourcc;

    if (gst_structure_get_field_type(structure, "format") == G_TYPE_STRING)
    {
        const char *string;
        string = gst_structure_get_string (structure, "format");
        fourcc = GST_STR_FOURCC (string);
    }

    if (fourcc == MAKE_FOURCC ('g','r','b','g'))
    {
        self->pattern = GR;
    }
    else if (fourcc == MAKE_FOURCC ('r', 'g', 'g', 'b'))
    {
        self->pattern = RG;
    }
    else if (fourcc == MAKE_FOURCC ('g', 'b', 'r', 'g'))
    {
        self->pattern = GB;
    }
    else if (fourcc == MAKE_FOURCC ('b', 'g', 'g', 'r'))
    {
        self->pattern = BG;
    }
    else
    {
        GST_ERROR("Unable to determine bayer pattern.");
        return FALSE;
    }

    // we only handle bayer 8 bit -> 1 byte
    int bytes_per_pixel = 1;
    self->expected_buffer_size = self->image_size.height * self->image_size.width * bytes_per_pixel;

    self->res = find_source(GST_ELEMENT(self));

    return TRUE;
}
예제 #5
0
static GstCaps *
gst_smpte_alpha_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * from)
{
  GstCaps *to = gst_caps_copy (from);
  GstStructure *s;

  gst_caps_truncate (to);
  s = gst_caps_get_structure (to, 0);

  if (gst_structure_has_name (s, "video/x-raw-yuv")) {
    GValue list = { 0, };
    GValue val = { 0, };

    gst_structure_remove_field (s, "format");

    g_value_init (&list, GST_TYPE_LIST);
    g_value_init (&val, GST_TYPE_FOURCC);
    gst_value_set_fourcc (&val, GST_STR_FOURCC ("AYUV"));
    gst_value_list_append_value (&list, &val);
    g_value_reset (&val);
    gst_value_set_fourcc (&val, GST_STR_FOURCC ("I420"));
    gst_value_list_append_value (&list, &val);
    g_value_reset (&val);
    gst_value_set_fourcc (&val, GST_STR_FOURCC ("YV12"));
    gst_value_list_append_value (&list, &val);
    g_value_unset (&val);
    gst_structure_set_value (s, "format", &list);
    g_value_unset (&list);
  } else if (!gst_structure_has_name (s, "video/x-raw-rgb")) {
    gst_caps_unref (to);
    to = gst_caps_new_empty ();
  }

  return to;
}
예제 #6
0
static gboolean
gst_dvdec_src_negotiate (GstDVDec * dvdec)
{
  GstCaps *othercaps;

  /* no PAR was specified in input, derive from encoded data */
  if (dvdec->need_par) {
    if (dvdec->PAL) {
      if (dvdec->wide) {
        dvdec->par_x = PAL_WIDE_PAR_X;
        dvdec->par_y = PAL_WIDE_PAR_Y;
      } else {
        dvdec->par_x = PAL_NORMAL_PAR_X;
        dvdec->par_y = PAL_NORMAL_PAR_Y;
      }
    } else {
      if (dvdec->wide) {
        dvdec->par_x = NTSC_WIDE_PAR_X;
        dvdec->par_y = NTSC_WIDE_PAR_Y;
      } else {
        dvdec->par_x = NTSC_NORMAL_PAR_X;
        dvdec->par_y = NTSC_NORMAL_PAR_Y;
      }
    }
    GST_DEBUG_OBJECT (dvdec, "Inferred PAR %d/%d from video format",
        dvdec->par_x, dvdec->par_y);
  }

  /* ignoring rgb, bgr0 for now */
  dvdec->bpp = 2;

  othercaps = gst_caps_new_simple ("video/x-raw-yuv",
      "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("YUY2"),
      "width", G_TYPE_INT, 720,
      "height", G_TYPE_INT, dvdec->height,
      "framerate", GST_TYPE_FRACTION, dvdec->framerate_numerator,
      dvdec->framerate_denominator,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, dvdec->par_x,
      dvdec->par_y, NULL);

  gst_pad_set_caps (dvdec->srcpad, othercaps);
  gst_caps_unref (othercaps);

  dvdec->src_negotiated = TRUE;

  return TRUE;
}
static gboolean videodecoder_configure_sourcepad(VideoDecoder *decoder)
{
    BaseDecoder *base = BASEDECODER(decoder);

    if (GST_PAD_CAPS(base->srcpad) == NULL ||
        decoder->width != base->context->width ||
        decoder->height != base->context->height)
    {
        decoder->width = base->context->width;
        decoder->height = base->context->height;

        decoder->discont = (GST_PAD_CAPS(base->srcpad) != NULL);

        decoder->u_offset = base->frame->linesize[0] * decoder->height;
        decoder->uv_blocksize = base->frame->linesize[1] * decoder->height / 2;

        decoder->v_offset = decoder->u_offset + decoder->uv_blocksize;
        decoder->frame_size = (base->frame->linesize[0] + base->frame->linesize[1]) * decoder->height;

        GstCaps *src_caps = gst_caps_new_simple("video/x-raw-yuv",
                                                "format", GST_TYPE_FOURCC, GST_STR_FOURCC("YV12"),
                                                "width", G_TYPE_INT, decoder->width,
                                                "height", G_TYPE_INT, decoder->height,
                                                "stride-y", G_TYPE_INT, base->frame->linesize[0],
                                                "stride-u", G_TYPE_INT, base->frame->linesize[1],
                                                "stride-v", G_TYPE_INT, base->frame->linesize[2],
                                                "offset-y", G_TYPE_INT, 0,
                                                "offset-u", G_TYPE_INT, decoder->u_offset,
                                                "offset-v", G_TYPE_INT, decoder->v_offset,
                                                "framerate", GST_TYPE_FRACTION, 2997, 100,
                                                NULL);


        if (!gst_pad_set_caps (base->srcpad, src_caps))
        {
            gst_element_message_full(GST_ELEMENT(decoder), GST_MESSAGE_ERROR, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
                                     g_strdup("Failed to set caps on the sourcepad"), NULL,
                                     ("videodecoder.c"), ("videodecoder_configure"), 0);
            gst_caps_unref(src_caps);
            return FALSE;
        }
        gst_caps_unref(src_caps);
    }

    return TRUE;
}
예제 #8
0
static guint
gst_midi_parse_chunk (GstMidiParse * midiparse, guint8 * data, guint size)
{
  guint32 type, length = 0;

  if (size < 8)
    goto short_chunk;

  length = GST_READ_UINT32_BE (data + 4);

  GST_DEBUG_OBJECT (midiparse, "have type %c%c%c%c, length %u",
      data[0], data[1], data[2], data[3], length);

  if (size < length + 8)
    goto short_chunk;

  type = GST_STR_FOURCC (data);

  switch (type) {
    case GST_MAKE_FOURCC ('M', 'T', 'h', 'd'):
      if (!parse_MThd (midiparse, data + 8, length))
        goto invalid_format;
      break;
    case GST_MAKE_FOURCC ('M', 'T', 'r', 'k'):
      if (!parse_MTrk (midiparse, data + 8, length))
        goto invalid_format;
      break;
    default:
      GST_LOG_OBJECT (midiparse, "ignore chunk");
      break;
  }

  return length + 8;

  /* ERRORS */
short_chunk:
  {
    GST_LOG_OBJECT (midiparse, "not enough data %u < %u", size, length + 8);
    return 0;
  }
invalid_format:
  {
    GST_ERROR_OBJECT (midiparse, "invalid format");
    return 0;
  }
}
예제 #9
0
static gboolean
check_fourcc_list (const GValue * format_value)
{
    const GValue *fourcc_value;
    gboolean got_yv12 = FALSE;
    gboolean got_i420 = FALSE;
    gboolean got_yuy2 = FALSE;
    guint32 fourcc;

    fourcc_value = gst_value_list_get_value (format_value, 0);
    fail_unless (fourcc_value != NULL);
    fail_unless (GST_VALUE_HOLDS_FOURCC (fourcc_value));
    fourcc = gst_value_get_fourcc (fourcc_value);
    fail_unless (fourcc != 0);
    got_i420 = got_i420 || (fourcc == GST_STR_FOURCC ("I420"));
    got_yuy2 = got_yuy2 || (fourcc == GST_STR_FOURCC ("YUY2"));
    got_yv12 = got_yv12 || (fourcc == GST_STR_FOURCC ("YV12"));

    fourcc_value = gst_value_list_get_value (format_value, 1);
    fail_unless (fourcc_value != NULL);
    fail_unless (GST_VALUE_HOLDS_FOURCC (fourcc_value));
    fourcc = gst_value_get_fourcc (fourcc_value);
    fail_unless (fourcc != 0);
    got_i420 = got_i420 || (fourcc == GST_STR_FOURCC ("I420"));
    got_yuy2 = got_yuy2 || (fourcc == GST_STR_FOURCC ("YUY2"));
    got_yv12 = got_yv12 || (fourcc == GST_STR_FOURCC ("YV12"));

    fourcc_value = gst_value_list_get_value (format_value, 2);
    fail_unless (fourcc_value != NULL);
    fail_unless (GST_VALUE_HOLDS_FOURCC (fourcc_value));
    fourcc = gst_value_get_fourcc (fourcc_value);
    fail_unless (fourcc != 0);
    got_i420 = got_i420 || (fourcc == GST_STR_FOURCC ("I420"));
    got_yuy2 = got_yuy2 || (fourcc == GST_STR_FOURCC ("YUY2"));
    got_yv12 = got_yv12 || (fourcc == GST_STR_FOURCC ("YV12"));

    return (got_i420 && got_yuy2 && got_yv12);
}
예제 #10
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
	GstAravis* gst_aravis = GST_ARAVIS(src);
	GstStructure *structure;
	ArvPixelFormat pixel_format;
	int height, width;
	int bpp, depth;
	const GValue *frame_rate;
	const char *caps_string;
	unsigned int i;
	guint32 fourcc;

	GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

	arv_camera_stop_acquisition (gst_aravis->camera);

	if (gst_aravis->stream != NULL)
		g_object_unref (gst_aravis->stream);

	structure = gst_caps_get_structure (caps, 0);

	gst_structure_get_int (structure, "width", &width);
	gst_structure_get_int (structure, "height", &height);
	frame_rate = gst_structure_get_value (structure, "framerate");
	gst_structure_get_int (structure, "bpp", &bpp);
	gst_structure_get_int (structure, "depth", &depth);

	if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) {
		const char *string;

	       	string = gst_structure_get_string (structure, "format");
		fourcc = GST_STR_FOURCC (string);
	} else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) {
		gst_structure_get_fourcc (structure, "format", &fourcc);
	} else
		fourcc = 0;

	pixel_format = arv_pixel_format_from_gst_0_10_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

	arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height);
	arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
	arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

	if (frame_rate != NULL) {
		double dbl_frame_rate;

		dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
			(double) gst_value_get_fraction_denominator (frame_rate);

		GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
		arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

		if (dbl_frame_rate > 0.0)
			gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
							     3e6 / dbl_frame_rate);
		else
			gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
	} else
		gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

	GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us);

	GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

	if(gst_aravis->gain_auto) {
		arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous");
	} else {
		if (gst_aravis->gain >= 0) {
			GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain);
			arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera));
	}

	if(gst_aravis->exposure_auto) {
		arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous");
	} else {
		if (gst_aravis->exposure_time_us > 0.0) {
			GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us);
			arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));
	}

	if (gst_aravis->fixed_caps != NULL)
		gst_caps_unref (gst_aravis->fixed_caps);

	caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_format);
	if (caps_string != NULL) {
		GstStructure *structure;
		GstCaps *caps;

		caps = gst_caps_new_empty ();
		structure = gst_structure_from_string (caps_string, NULL);
		gst_structure_set (structure,
				   "width", G_TYPE_INT, width,
				   "height", G_TYPE_INT, height,
				   NULL);

		if (frame_rate != NULL)
			gst_structure_set_value (structure, "framerate", frame_rate);

		gst_caps_append_structure (caps, structure);

		gst_aravis->fixed_caps = caps;
	} else
		gst_aravis->fixed_caps = NULL;

	gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
	gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

	if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend)
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL);
	else
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);

	for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
		arv_stream_push_buffer (gst_aravis->stream,
					arv_buffer_new (gst_aravis->payload, NULL));

	GST_LOG_OBJECT (gst_aravis, "Start acquisition");
	arv_camera_start_acquisition (gst_aravis->camera);

	gst_aravis->timestamp_offset = 0;
	gst_aravis->last_timestamp = 0;

	return TRUE;
}
예제 #11
0
static void gst_tiswhitebalance_fixate_caps (GstBaseTransform* base,
                                             GstPadDirection direction,
                                             GstCaps* incoming,
                                             GstCaps* outgoing)
{
    GstTisWhiteBalance* self = GST_TISWHITEBALANCE(base);

    GstStructure* ins;
    GstStructure* outs;
    gint width, height;
    g_return_if_fail (gst_caps_is_fixed (incoming));

    GST_DEBUG_OBJECT (base, "trying to fixate outgoing caps %" GST_PTR_FORMAT
                      " based on caps %" GST_PTR_FORMAT, outgoing, incoming);

    ins = gst_caps_get_structure (incoming, 0);
    outs = gst_caps_get_structure (outgoing, 0);

    if (gst_structure_get_int (ins, "width", &width))
    {
        if (gst_structure_has_field (outs, "width"))
        {
            gst_structure_fixate_field_nearest_int (outs, "width", width);
        }
        self->width = width;
    }

    if (gst_structure_get_int (ins, "height", &height))
    {
        if (gst_structure_has_field (outs, "height"))
        {
            gst_structure_fixate_field_nearest_int (outs, "height", height);
        }
        self->height = height;
    }

    const char* p = gst_structure_get_name (ins);
    guint fourcc;
    if (g_strcmp0(p, "video/x-raw-bayer") == 0)
    {
        if (gst_structure_get_field_type (ins, "format") == G_TYPE_STRING)
        {
            const char *string;
            string = gst_structure_get_string (ins, "format");
            fourcc = GST_STR_FOURCC (string);
        }
        else if (gst_structure_get_field_type (ins, "format") == GST_TYPE_FOURCC)
        {
            gst_structure_get_fourcc (ins, "format", &fourcc);
        }
        else
            fourcc = 0;

        if (fourcc == 0)
        {
            gst_debug_log (gst_tiswhitebalance_debug_category,
                           GST_LEVEL_ERROR,
                           "gst_tiswhitebalance",
                           "gst_tiswhitebalance_fixate_caps",
                           0,
                           NULL,
                           "Unable to determine video format.");
            return;
        }

        if (fourcc == MAKE_FOURCC ('g','r','b','g'))
        {
            self->pattern = GR;
        }
        else if (fourcc == MAKE_FOURCC ('r', 'g', 'g', 'b'))
        {
            self->pattern = RG;
        }
        else if (fourcc == MAKE_FOURCC ('g', 'b', 'r', 'g'))
        {
            self->pattern = GB;
        }
        else if (fourcc == MAKE_FOURCC ('b', 'g', 'g', 'r'))
        {
            self->pattern = BG;
        }
        else
        {
            gst_debug_log (gst_tiswhitebalance_debug_category,
                           GST_LEVEL_ERROR,
                           "gst_tiswhitebalance",
                           "gst_tiswhitebalance_fixate_caps",
                           0,
                           NULL,
                           "Unable to determine bayer pattern.");
            return;
        }

        gst_debug_log (gst_tiswhitebalance_debug_category,
                       GST_LEVEL_INFO,
                       "gst_tiswhitebalance",
                       "gst_tiswhitebalance_fixate_caps",
                       0,
                       NULL,
                       "Using bayer format %s for whitebalancing.", bayer_to_string(self->pattern));

    }
    else
    {
        gst_debug_log (gst_tiswhitebalance_debug_category,
                       GST_LEVEL_INFO,
                       "gst_tiswhitebalance",
                       "gst_tiswhitebalance_fixate_caps",
                       0,
                       NULL,
                       "Not a bayer format. White balance will be disabled.");
    }

}
예제 #12
0
static gboolean
find_midi_chunk (GstMidiParse * midiparse, guint8 * data, guint size,
    guint * offset, guint * length)
{
  guint32 type;

  *length = 0;

  if (size < 8)
    goto short_chunk;

  type = GST_STR_FOURCC (data);

  if (type == GST_MAKE_FOURCC ('R', 'I', 'F', 'F')) {
    guint32 riff_len;

    GST_DEBUG_OBJECT (midiparse, "found RIFF");

    if (size < 12)
      goto short_chunk;

    if (GST_STR_FOURCC (data + 8) != GST_MAKE_FOURCC ('R', 'M', 'I', 'D'))
      goto invalid_format;

    riff_len = GST_READ_UINT32_LE (data + 4);

    if (size < riff_len)
      goto short_chunk;

    data += 12;
    size -= 12;
    *offset = 12;

    GST_DEBUG_OBJECT (midiparse, "found RIFF RMID of size %u", riff_len);

    while (TRUE) {
      guint32 chunk_type;
      guint32 chunk_len;

      if (riff_len < 8)
        goto short_chunk;

      chunk_type = GST_STR_FOURCC (data);
      chunk_len = GST_READ_UINT32_LE (data + 4);

      riff_len -= 8;
      if (riff_len < chunk_len)
        goto short_chunk;

      data += 8;
      size -= 8;
      *offset += 8;
      riff_len -= chunk_len;

      if (chunk_type == GST_MAKE_FOURCC ('d', 'a', 't', 'a')) {
        *length = chunk_len;
        break;
      }

      data += chunk_len;
      size -= chunk_len;
    }
  } else {
    *offset = 0;
    *length = size;
  }
  return TRUE;

  /* ERRORS */
short_chunk:
  {
    GST_LOG_OBJECT (midiparse, "not enough data %u < %u", *length + 8, size);
    return FALSE;
  }
invalid_format:
  {
    GST_ERROR_OBJECT (midiparse, "invalid format");
    return FALSE;
  }
}
int PsychCreateNewMovieFile(char* moviefile, int width, int height, double framerate, char* movieoptions)
{
	PsychMovieWriterRecordType*             pwriterRec = NULL;
	int                                     moviehandle = 0;
	GError                                  *myErr = NULL;
	char*                                   poption;
	char                                    codecString[1000];
	char                                    launchString[10000];
	int                                     dummyInt;
	float                                   dummyFloat;
	char                                    myfourcc[5];
	psych_bool                              doAudio = FALSE;

	// Still capacity left?
	if (moviewritercount >= PSYCH_MAX_MOVIEWRITERDEVICES) PsychErrorExitMsg(PsychError_user, "Maximum number of movie writers exceeded. Please close some first!");

	// Find first free (i.e., NULL) slot and assign moviehandle:
	while ((pwriterRec = PsychGetMovieWriter(moviehandle, TRUE)) && pwriterRec->Movie) moviehandle++;

	if (firsttime) {
		// Make sure GStreamer is ready:
		PsychGSCheckInit("movie writing");
		firsttime = FALSE;
	}

	// Store movie filename:
	strcpy(pwriterRec->File, moviefile);

	// Store width, height:
	pwriterRec->height  = height;
	pwriterRec->width   = width;
	pwriterRec->eos     = FALSE;

	// If no movieoptions specified, create default string for default
	// codec selection and configuration:
	if (strlen(movieoptions) == 0) {
		// No options provided. Select default encoder with default settings:
		movieoptions = strdup("DEFAULTenc");
	} else if ((poption = strstr(movieoptions, ":CodecSettings="))) {
		// Replace ':' with a zero in movieoptions, so it gets null-terminated:
		movieoptions = poption;
		*movieoptions = 0;

		// Move after null-terminator:
		movieoptions++;

		// Replace the ':CodecSettings=' with the special keyword 'DEFAULTenc', so
		// so the default video codec is chosen, but the given settings override its
		// default parameters.
		strncpy(movieoptions, "DEFAULTenc    ", strlen("DEFAULTenc    "));

		if (strlen(movieoptions) == 0) PsychErrorExitMsg(PsychError_user, "Invalid (empty) :CodecSettings= parameter specified. Aborted.");
	} else if ((poption = strstr(movieoptions, ":CodecType="))) {
		// Replace ':' with a zero in movieoptions, so it gets null-terminated
		// and only points to the actual movie filename:
		movieoptions = poption;
		*movieoptions = 0;

		// Advance movieoptions to point to the actual codec spec string:
		movieoptions+= 11;

		if (strlen(movieoptions) == 0) PsychErrorExitMsg(PsychError_user, "Invalid (empty) :CodecType= parameter specified. Aborted.");
	}

	// Assign numeric 32-bit FOURCC equivalent code to select codec:
	// This is optional. We default to kH264CodecType:
	if ((poption = strstr(movieoptions, "CodecFOURCCId="))) {
		if (sscanf(poption, "CodecFOURCCId=%i", &dummyInt) == 1) {
			pwriterRec->CodecType = dummyInt;
			if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Codec with FOURCC numeric id %i [%" GST_FOURCC_FORMAT "] requested for encoding of movie %i [%s].\n", dummyInt, GST_FOURCC_ARGS(dummyInt), moviehandle, moviefile);
			if (PsychPrefStateGet_Verbosity() > 1) printf("PTB-WARNING: Codec selection by FOURCC not yet supported. FOURCC code ignored!\n");            
		}
		else PsychErrorExitMsg(PsychError_user, "Invalid CodecFOURCCId= parameter provided in movieoptions parameter. Parse error!");
	}

	// Assign 4 character string FOURCC code to select codec:
	if ((poption = strstr(movieoptions, "CodecFOURCC="))) {
		if (sscanf(poption, "CodecFOURCC=%c%c%c%c", &myfourcc[0], &myfourcc[1], &myfourcc[2], &myfourcc[3]) == 4) {
			myfourcc[4] = 0;
			dummyInt = (int) GST_STR_FOURCC (myfourcc);
			pwriterRec->CodecType = dummyInt;
			if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Codec with FOURCC numeric id %i [%" GST_FOURCC_FORMAT "] requested for encoding of movie %i [%s].\n", dummyInt, GST_FOURCC_ARGS(dummyInt), moviehandle, moviefile);
			if (PsychPrefStateGet_Verbosity() > 1) printf("PTB-WARNING: Codec selection by FOURCC not yet supported. FOURCC code ignored!\n");            
		}
		else PsychErrorExitMsg(PsychError_user, "Invalid CodecFOURCC= parameter provided in movieoptions parameter. Must be exactly 4 characters! Parse error!");
	}

	// Assign numeric encoding quality level:
	// This is optional. We default to "normal quality":
	if ((poption = strstr(movieoptions, "EncodingQuality="))) {
		if ((sscanf(poption, "EncodingQuality=%f", &dummyFloat) == 1) && (dummyFloat >= 0) && (dummyFloat <= 1)) {
			// Map floating point quality level between 0.0 and 1.0 to 10 discrete levels:
			if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Encoding quality level %f selected for encoding of movie %i [%s].\n", dummyFloat, moviehandle, moviefile);

			// Rewrite "EncodingQuality=" string into "VideoQuality=" string, with proper
			// padding:      "EncodingQuality="
			// This way EncodingQuality in Quicktime lingo corresponds to
			// VideoQuality in GStreamer lingo:
			strncpy(poption, "   Videoquality=", strlen("   Videoquality="));
		}
		else PsychErrorExitMsg(PsychError_user, "Invalid EncodingQuality= parameter provided in movieoptions parameter. Parse error or out of valid 0 - 1 range!");
	}

	// Check for valid parameters. Also warn if some parameters are borderline for certain codecs:
	if ((framerate < 1) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: Negative or zero 'framerate' %f units for moviehandle %i provided! Sounds like trouble ahead.\n", (float) framerate, moviehandle);
	if (width < 1) PsychErrorExitMsg(PsychError_user, "In CreateMovie: Invalid zero or negative 'width' for video frame size provided!");
	if ((width < 4) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'width' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with such a small width.\n", width, moviehandle);
	if ((width % 4 != 0) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'width' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with a width which is not a multiple of 4 or 16.\n", width, moviehandle);
	if (height < 1) PsychErrorExitMsg(PsychError_user, "In CreateMovie: Invalid zero or negative 'height' for video frame size provided!");
	if ((height < 4) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'height' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with such a small height.\n", height, moviehandle);

	// Full GStreamer launch line a la gst-launch command provided?
	if (strstr(movieoptions, "gst-launch")) {
		// Yes: We use movieoptions directly as launch line:
		movieoptions = strstr(movieoptions, "gst-launch");
        
		// Move string pointer behind the "gst-launch" word (plus a blank):
		movieoptions+= strlen("gst-launch ");
        
		// Can directly use this:
		sprintf(launchString, "%s", movieoptions);

		// With audio track?
		if (strstr(movieoptions, "name=ptbaudioappsrc")) doAudio = TRUE;
	}
	else {
		// No: Do our own parsing and setup:

		// Find the gst-launch style string for codecs and muxers:
		if (!PsychGetCodecLaunchLineFromString(movieoptions, &(codecString[0]))) {
			// No config for this format possible:
			if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR:In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find matching codec setup.\n", moviehandle, moviefile);
			goto bail;
		}
        
		// With audio track?
		if (strstr(movieoptions, "AddAudioTrack")) doAudio = TRUE;
        
		// Build final launch string:
		if (doAudio) {
			// Video and audio:
			sprintf(launchString, "appsrc name=ptbvideoappsrc do-timestamp=0 stream-type=0 max-bytes=0 block=1 is-live=0 emit-signals=0 ! capsfilter caps=\"video/x-raw-rgb, bpp=(int)32, depth=(int)32, endianess=(int)4321, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, width=(int)%i, height=(int)%i, framerate=%i/1 \" ! videorate ! ffmpegcolorspace ! %s ! filesink name=ptbfilesink async=0 location=%s ", width, height, ((int) (framerate + 0.5)), codecString, moviefile);
		} else {
			// Video only:
			sprintf(launchString, "appsrc name=ptbvideoappsrc do-timestamp=0 stream-type=0 max-bytes=0 block=1 is-live=0 emit-signals=0 ! capsfilter caps=\"video/x-raw-rgb, bpp=(int)32, depth=(int)32, endianess=(int)4321, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, width=(int)%i, height=(int)%i, framerate=%i/1 \" ! videorate ! ffmpegcolorspace ! %s ! filesink name=ptbfilesink async=0 location=%s ", width, height, ((int) (framerate + 0.5)), codecString, moviefile);
		}
	}
        
	// Create a movie file for the destination movie:
	if (PsychPrefStateGet_Verbosity() > 3) {
		printf("PTB-INFO: Movie writing pipeline gst-launch line (without the -e option required on the command line!) is:\n");
		printf("gst-launch %s\n", launchString);
	}

	// Build pipeline from launch string:
	pwriterRec->Movie = gst_parse_launch((const gchar*) launchString, &myErr);
	if ((NULL == pwriterRec->Movie) || myErr) {
		if (PsychPrefStateGet_Verbosity() > 0) {
			printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not build pipeline.\n", moviehandle, moviefile);
			printf("PTB-ERROR: Parameters were: %s\n", movieoptions);
			printf("PTB-ERROR: Launch string was: %s\n", launchString);
			printf("PTB-ERROR: GStreamer error message was: %s\n", (char*) myErr->message);

		      // Special tips for the challenged:
		      if (strstr(myErr->message, "property")) {
			      // Bailed due to unsupported x264enc parameter "speed-preset" or "profile". Can be solved by upgrading
			      // GStreamer or the OS or the VideoCodec= override:
			      printf("PTB-TIP: The reason this failed is because your GStreamer codec installation is too outdated.\n");
			      printf("PTB-TIP: Either upgrade your GStreamer (plugin) installation to a more recent version,\n");
			      printf("PTB-TIP: or upgrade your operating system (e.g., Ubuntu 10.10 'Maverick Meercat' and later are fine).\n");
			      printf("PTB-TIP: A recent GStreamer installation is required to use all features and get optimal performance.\n");
			      printf("PTB-TIP: As a workaround, you can manually specify all codec settings, leaving out the unsupported\n");
			      printf("PTB-TIP: option. See 'help VideoRecording' on how to do that.\n\n");
		      }
		}

		goto bail;
	}

	// Get handle to ptbvideoappsrc:
	pwriterRec->ptbvideoappsrc = gst_bin_get_by_name(GST_BIN(pwriterRec->Movie), (const gchar *) "ptbvideoappsrc");
	if (NULL == pwriterRec->ptbvideoappsrc) {
		if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find ptbvideoappsrc pipeline element.\n", moviehandle, moviefile);
		goto bail;
	}

	// Get handle to ptbaudioappsrc:
	pwriterRec->ptbaudioappsrc = gst_bin_get_by_name(GST_BIN(pwriterRec->Movie), (const gchar *) "ptbaudioappsrc");
	if (doAudio && (NULL == pwriterRec->ptbaudioappsrc)) {
		if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find ptbaudioappsrc pipeline element.\n", moviehandle, moviefile);
		goto bail;
	}

	pwriterRec->Context = g_main_loop_new (NULL, FALSE);
	pwriterRec->bus = gst_pipeline_get_bus (GST_PIPELINE(pwriterRec->Movie));
	gst_bus_add_watch(pwriterRec->bus, (GstBusFunc) PsychMovieBusCallback, pwriterRec);
	gst_object_unref(pwriterRec->bus);

	// Start the pipeline:
	if (!PsychMoviePipelineSetState(pwriterRec->Movie, GST_STATE_PLAYING, 10)) {
		if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed:  Failed to start movie encoding pipeline!\n", moviehandle, moviefile);
		goto bail;
	}

	PsychGSProcessMovieContext(pwriterRec->Context, FALSE);

	// Increment count of open movie writers:
	moviewritercount++;
	
	if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Moviehandle %i successfully opened for movie writing into file '%s'.\n", moviehandle, moviefile);

    // Should we dump the whole encoding pipeline graph to a file for visualization
    // with GraphViz? This can be controlled via PsychTweak('GStreamerDumpFilterGraph' dirname);
    if (getenv("GST_DEBUG_DUMP_DOT_DIR")) {
        // Dump complete encoding filter graph to a .dot file for later visualization with GraphViz:
        printf("PTB-DEBUG: Dumping movie encoder graph for movie %s to directory %s.\n", moviefile, getenv("GST_DEBUG_DUMP_DOT_DIR"));
        GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pwriterRec->Movie), GST_DEBUG_GRAPH_SHOW_ALL, "PsychMovieWritingGraph");
    }

	// Return new handle:
	return(moviehandle);

bail:
	if (pwriterRec->ptbvideoappsrc) gst_object_unref(GST_OBJECT(pwriterRec->ptbvideoappsrc));
	pwriterRec->ptbvideoappsrc = NULL;

	if (pwriterRec->ptbaudioappsrc) gst_object_unref(GST_OBJECT(pwriterRec->ptbaudioappsrc));
	pwriterRec->ptbaudioappsrc = NULL;

	if (pwriterRec->Movie) gst_object_unref(GST_OBJECT(pwriterRec->Movie));
	pwriterRec->Movie = NULL;

	if (pwriterRec->Context) g_main_loop_unref(pwriterRec->Context);
	pwriterRec->Context = NULL;

	// Return failure:
	return(-1);
}